diff --git a/venv/lib/python3.10/site-packages/_pytest/__init__.py b/venv/lib/python3.10/site-packages/_pytest/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8eb8ec9605c73aeba33b5a2031d59a84d6841225 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/__init__.py @@ -0,0 +1,13 @@ +from __future__ import annotations + + +__all__ = ["__version__", "version_tuple"] + +try: + from ._version import version as __version__ + from ._version import version_tuple +except ImportError: # pragma: no cover + # broken installation, we don't even try + # unknown only works because we do poor mans version compare + __version__ = "unknown" + version_tuple = (0, 0, "unknown") diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..903ba67bc28816899a980de213edb646d1b3956e Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/_argcomplete.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/_argcomplete.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ef698cd152bd546404675c15e7b45204fd432e09 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/_argcomplete.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/_version.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/_version.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1dbf84c68cf9281df6ca120a1c833c1d96317b2f Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/_version.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/cacheprovider.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/cacheprovider.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..16fa5d1eccda5213e5c11c490ad350fcef4cb1d0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/cacheprovider.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/capture.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/capture.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1596718df6b1c391f065e2fe81b97a6d74181bdc Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/capture.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/compat.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/compat.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d980d713c26784cdd8242eef548b80045b567e24 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/compat.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/debugging.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/debugging.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a9ab315d7f76f21c8e363436ac176b304f63831c Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/debugging.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/deprecated.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/deprecated.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dc1757601ac14517b5b9deb14e8a12f2c5f2a153 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/deprecated.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/doctest.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/doctest.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0f8b32d5525349b81a243ad6277d33f04254dae8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/doctest.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/faulthandler.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/faulthandler.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8dd47dae256fbe54d23f391bcb6f453c9a3f2274 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/faulthandler.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/fixtures.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/fixtures.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ccd7883a42ac0ac5ec9d60504ae6a754fd57a026 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/fixtures.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/freeze_support.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/freeze_support.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a2d74a05d2460379091ea717ada5dd931749b802 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/freeze_support.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/helpconfig.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/helpconfig.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b4d6b079a1e1b9a4cf8beaf2751d97253d376322 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/helpconfig.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/hookspec.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/hookspec.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ce95dbd98b05c3fd7259a2d07beccb399469b9da Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/hookspec.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/junitxml.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/junitxml.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2a808db6aa6af2b147a7d4ab338e42a588b01721 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/junitxml.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/legacypath.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/legacypath.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bd231f86842ccfe320a7fb3b34d700e2647170ec Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/legacypath.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/logging.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/logging.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..749e67169a6de8ece2791d60b1a5dc7caa4afe65 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/logging.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/main.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/main.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9fc18d5e051e5e91440b36d95d4ed8e072a3cc7b Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/main.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/monkeypatch.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/monkeypatch.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c360ac45841c7a439917fef31782769d5b7a4d16 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/monkeypatch.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/nodes.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/nodes.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..05a32dd3ebf887b276c62e53018f6f855dfaac2f Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/nodes.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/outcomes.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/outcomes.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..77ce61e3dfd7e949538b2316a5b114cb2a012ef8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/outcomes.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/pastebin.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/pastebin.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6a5304e557d5cc4423fd3fd7db2e9193f8220116 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/pastebin.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/pathlib.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/pathlib.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..736b1f337b2cd0b5fe8f637f53e8ac946e53b369 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/pathlib.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/pytester.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/pytester.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c6c2e0ad26f27e3b905947b4c031b0af232cf6fc Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/pytester.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/pytester_assertions.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/pytester_assertions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ef1850b364fc95b165c3b2304c08e6846de34359 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/pytester_assertions.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/python.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/python.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f2aec6566e601f9aedd35115afbd6be1ec0c0a65 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/python.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/python_api.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/python_api.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3b5f5436a46433e8baec752c58656df02854b36a Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/python_api.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/raises.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/raises.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7e3879cded0b87d93227001df807853fbc961e6f Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/raises.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/recwarn.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/recwarn.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c37702688307895599b9860869aae2abe6e802cf Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/recwarn.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/reports.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/reports.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..67bb67a3b085a306808f4a6ef864dab35825d41a Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/reports.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/runner.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/runner.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..db7cda3bbecd87be195c20006bdd7db7859ba891 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/runner.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/scope.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/scope.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f3ed94bb18243f7acf26bc45dfb6ee8d2f60d7bd Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/scope.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/setuponly.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/setuponly.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3d01781fdf2f5c735e7a50012e623deebf489820 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/setuponly.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/setupplan.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/setupplan.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1f8ed1b9fb353ff900c50b05fbadb4e9e4948982 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/setupplan.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/skipping.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/skipping.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..36b51a4a5ab20535f8a50b73f272dad219cddd8c Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/skipping.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/stash.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/stash.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aa47d0e667ade8988b698e553a46df5e02f8c1d8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/stash.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/stepwise.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/stepwise.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9452a4c0cca1fda8cbea91d592a9f91a8d46547a Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/stepwise.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/terminal.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/terminal.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4bba04c274d873432ee7692a9bd0818e86315b54 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/terminal.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/threadexception.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/threadexception.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a804d8cf7354d2ffb08280190f59e3b23a7d17c6 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/threadexception.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/timing.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/timing.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dbf45dbe5b7935234af5717bd2e146ffd077b2f2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/timing.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/tmpdir.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/tmpdir.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5717a4c87f24862a56eb6b8caf06b83c96e1c04c Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/tmpdir.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/tracemalloc.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/tracemalloc.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..873c1e4c056091db225fb722a94815f031404cb3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/tracemalloc.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/unittest.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/unittest.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6790fb0211bed8fa33a0f5dcf66d8f884073fec8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/unittest.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/unraisableexception.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/unraisableexception.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c6b2e9d06efb0c713efceb5d5e38cee0edeb7907 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/unraisableexception.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/warning_types.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/warning_types.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7c4f25f2a279d62e323ea1644cc78ed8829c0736 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/warning_types.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/__pycache__/warnings.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/__pycache__/warnings.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a06d9a49f5ed4afd1ffd0a38c2e65c7503e21af7 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/__pycache__/warnings.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/_argcomplete.py b/venv/lib/python3.10/site-packages/_pytest/_argcomplete.py new file mode 100644 index 0000000000000000000000000000000000000000..59426ef949ed9276b5708f9f44e6893f2333f2e1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/_argcomplete.py @@ -0,0 +1,117 @@ +"""Allow bash-completion for argparse with argcomplete if installed. + +Needs argcomplete>=0.5.6 for python 3.2/3.3 (older versions fail +to find the magic string, so _ARGCOMPLETE env. var is never set, and +this does not need special code). + +Function try_argcomplete(parser) should be called directly before +the call to ArgumentParser.parse_args(). + +The filescompleter is what you normally would use on the positional +arguments specification, in order to get "dirname/" after "dirn" +instead of the default "dirname ": + + optparser.add_argument(Config._file_or_dir, nargs='*').completer=filescompleter + +Other, application specific, completers should go in the file +doing the add_argument calls as they need to be specified as .completer +attributes as well. (If argcomplete is not installed, the function the +attribute points to will not be used). + +SPEEDUP +======= + +The generic argcomplete script for bash-completion +(/etc/bash_completion.d/python-argcomplete.sh) +uses a python program to determine startup script generated by pip. +You can speed up completion somewhat by changing this script to include + # PYTHON_ARGCOMPLETE_OK +so the python-argcomplete-check-easy-install-script does not +need to be called to find the entry point of the code and see if that is +marked with PYTHON_ARGCOMPLETE_OK. + +INSTALL/DEBUGGING +================= + +To include this support in another application that has setup.py generated +scripts: + +- Add the line: + # PYTHON_ARGCOMPLETE_OK + near the top of the main python entry point. + +- Include in the file calling parse_args(): + from _argcomplete import try_argcomplete, filescompleter + Call try_argcomplete just before parse_args(), and optionally add + filescompleter to the positional arguments' add_argument(). + +If things do not work right away: + +- Switch on argcomplete debugging with (also helpful when doing custom + completers): + export _ARC_DEBUG=1 + +- Run: + python-argcomplete-check-easy-install-script $(which appname) + echo $? + will echo 0 if the magic line has been found, 1 if not. + +- Sometimes it helps to find early on errors using: + _ARGCOMPLETE=1 _ARC_DEBUG=1 appname + which should throw a KeyError: 'COMPLINE' (which is properly set by the + global argcomplete script). +""" + +from __future__ import annotations + +import argparse +from glob import glob +import os +import sys +from typing import Any + + +class FastFilesCompleter: + """Fast file completer class.""" + + def __init__(self, directories: bool = True) -> None: + self.directories = directories + + def __call__(self, prefix: str, **kwargs: Any) -> list[str]: + # Only called on non option completions. + if os.sep in prefix[1:]: + prefix_dir = len(os.path.dirname(prefix) + os.sep) + else: + prefix_dir = 0 + completion = [] + globbed = [] + if "*" not in prefix and "?" not in prefix: + # We are on unix, otherwise no bash. + if not prefix or prefix[-1] == os.sep: + globbed.extend(glob(prefix + ".*")) + prefix += "*" + globbed.extend(glob(prefix)) + for x in sorted(globbed): + if os.path.isdir(x): + x += "/" + # Append stripping the prefix (like bash, not like compgen). + completion.append(x[prefix_dir:]) + return completion + + +if os.environ.get("_ARGCOMPLETE"): + try: + import argcomplete.completers + except ImportError: + sys.exit(-1) + filescompleter: FastFilesCompleter | None = FastFilesCompleter() + + def try_argcomplete(parser: argparse.ArgumentParser) -> None: + argcomplete.autocomplete(parser, always_complete_options=False) + +else: + + def try_argcomplete(parser: argparse.ArgumentParser) -> None: + pass + + filescompleter = None diff --git a/venv/lib/python3.10/site-packages/_pytest/_code/__init__.py b/venv/lib/python3.10/site-packages/_pytest/_code/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7f67a2e3e0a6f34e04444d9410f517d59d21c422 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/_code/__init__.py @@ -0,0 +1,26 @@ +"""Python inspection/code generation API.""" + +from __future__ import annotations + +from .code import Code +from .code import ExceptionInfo +from .code import filter_traceback +from .code import Frame +from .code import getfslineno +from .code import Traceback +from .code import TracebackEntry +from .source import getrawcode +from .source import Source + + +__all__ = [ + "Code", + "ExceptionInfo", + "Frame", + "Source", + "Traceback", + "TracebackEntry", + "filter_traceback", + "getfslineno", + "getrawcode", +] diff --git a/venv/lib/python3.10/site-packages/_pytest/_code/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/_code/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..41f934ce5faad763788781ff92884e9415a7e2fd Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/_code/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/_code/__pycache__/code.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/_code/__pycache__/code.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d1af81a45d4474d0e3db9298725f757f88d62815 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/_code/__pycache__/code.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/_code/__pycache__/source.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/_code/__pycache__/source.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eb2691b3cda75467eee22c1183beeb53811be1e4 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/_code/__pycache__/source.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/_code/code.py b/venv/lib/python3.10/site-packages/_pytest/_code/code.py new file mode 100644 index 0000000000000000000000000000000000000000..f1241f14136f7b1c5bfceaf9e6972fa3e239ef44 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/_code/code.py @@ -0,0 +1,1567 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +import ast +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Mapping +from collections.abc import Sequence +import dataclasses +import inspect +from inspect import CO_VARARGS +from inspect import CO_VARKEYWORDS +from io import StringIO +import os +from pathlib import Path +import re +import sys +from traceback import extract_tb +from traceback import format_exception +from traceback import format_exception_only +from traceback import FrameSummary +from types import CodeType +from types import FrameType +from types import TracebackType +from typing import Any +from typing import ClassVar +from typing import Final +from typing import final +from typing import Generic +from typing import Literal +from typing import overload +from typing import SupportsIndex +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +import pluggy + +import _pytest +from _pytest._code.source import findsource +from _pytest._code.source import getrawcode +from _pytest._code.source import getstatementrange_ast +from _pytest._code.source import Source +from _pytest._io import TerminalWriter +from _pytest._io.saferepr import safeformat +from _pytest._io.saferepr import saferepr +from _pytest.compat import get_real_func +from _pytest.deprecated import check_ispytest +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath + + +if sys.version_info < (3, 11): + from exceptiongroup import BaseExceptionGroup + +TracebackStyle = Literal["long", "short", "line", "no", "native", "value", "auto"] + +EXCEPTION_OR_MORE = Union[type[BaseException], tuple[type[BaseException], ...]] + + +class Code: + """Wrapper around Python code objects.""" + + __slots__ = ("raw",) + + def __init__(self, obj: CodeType) -> None: + self.raw = obj + + @classmethod + def from_function(cls, obj: object) -> Code: + return cls(getrawcode(obj)) + + def __eq__(self, other): + return self.raw == other.raw + + # Ignore type because of https://github.com/python/mypy/issues/4266. + __hash__ = None # type: ignore + + @property + def firstlineno(self) -> int: + return self.raw.co_firstlineno - 1 + + @property + def name(self) -> str: + return self.raw.co_name + + @property + def path(self) -> Path | str: + """Return a path object pointing to source code, or an ``str`` in + case of ``OSError`` / non-existing file.""" + if not self.raw.co_filename: + return "" + try: + p = absolutepath(self.raw.co_filename) + # maybe don't try this checking + if not p.exists(): + raise OSError("path check failed.") + return p + except OSError: + # XXX maybe try harder like the weird logic + # in the standard lib [linecache.updatecache] does? + return self.raw.co_filename + + @property + def fullsource(self) -> Source | None: + """Return a _pytest._code.Source object for the full source file of the code.""" + full, _ = findsource(self.raw) + return full + + def source(self) -> Source: + """Return a _pytest._code.Source object for the code object's source only.""" + # return source only for that part of code + return Source(self.raw) + + def getargs(self, var: bool = False) -> tuple[str, ...]: + """Return a tuple with the argument names for the code object. + + If 'var' is set True also return the names of the variable and + keyword arguments when present. + """ + # Handy shortcut for getting args. + raw = self.raw + argcount = raw.co_argcount + if var: + argcount += raw.co_flags & CO_VARARGS + argcount += raw.co_flags & CO_VARKEYWORDS + return raw.co_varnames[:argcount] + + +class Frame: + """Wrapper around a Python frame holding f_locals and f_globals + in which expressions can be evaluated.""" + + __slots__ = ("raw",) + + def __init__(self, frame: FrameType) -> None: + self.raw = frame + + @property + def lineno(self) -> int: + return self.raw.f_lineno - 1 + + @property + def f_globals(self) -> dict[str, Any]: + return self.raw.f_globals + + @property + def f_locals(self) -> dict[str, Any]: + return self.raw.f_locals + + @property + def code(self) -> Code: + return Code(self.raw.f_code) + + @property + def statement(self) -> Source: + """Statement this frame is at.""" + if self.code.fullsource is None: + return Source("") + return self.code.fullsource.getstatement(self.lineno) + + def eval(self, code, **vars): + """Evaluate 'code' in the frame. + + 'vars' are optional additional local variables. + + Returns the result of the evaluation. + """ + f_locals = self.f_locals.copy() + f_locals.update(vars) + return eval(code, self.f_globals, f_locals) + + def repr(self, object: object) -> str: + """Return a 'safe' (non-recursive, one-line) string repr for 'object'.""" + return saferepr(object) + + def getargs(self, var: bool = False): + """Return a list of tuples (name, value) for all arguments. + + If 'var' is set True, also include the variable and keyword arguments + when present. + """ + retval = [] + for arg in self.code.getargs(var): + try: + retval.append((arg, self.f_locals[arg])) + except KeyError: + pass # this can occur when using Psyco + return retval + + +class TracebackEntry: + """A single entry in a Traceback.""" + + __slots__ = ("_rawentry", "_repr_style") + + def __init__( + self, + rawentry: TracebackType, + repr_style: Literal["short", "long"] | None = None, + ) -> None: + self._rawentry: Final = rawentry + self._repr_style: Final = repr_style + + def with_repr_style( + self, repr_style: Literal["short", "long"] | None + ) -> TracebackEntry: + return TracebackEntry(self._rawentry, repr_style) + + @property + def lineno(self) -> int: + return self._rawentry.tb_lineno - 1 + + def get_python_framesummary(self) -> FrameSummary: + # Python's built-in traceback module implements all the nitty gritty + # details to get column numbers of out frames. + stack_summary = extract_tb(self._rawentry, limit=1) + return stack_summary[0] + + # Column and end line numbers introduced in python 3.11 + if sys.version_info < (3, 11): + + @property + def end_lineno_relative(self) -> int | None: + return None + + @property + def colno(self) -> int | None: + return None + + @property + def end_colno(self) -> int | None: + return None + else: + + @property + def end_lineno_relative(self) -> int | None: + frame_summary = self.get_python_framesummary() + if frame_summary.end_lineno is None: # pragma: no cover + return None + return frame_summary.end_lineno - 1 - self.frame.code.firstlineno + + @property + def colno(self) -> int | None: + """Starting byte offset of the expression in the traceback entry.""" + return self.get_python_framesummary().colno + + @property + def end_colno(self) -> int | None: + """Ending byte offset of the expression in the traceback entry.""" + return self.get_python_framesummary().end_colno + + @property + def frame(self) -> Frame: + return Frame(self._rawentry.tb_frame) + + @property + def relline(self) -> int: + return self.lineno - self.frame.code.firstlineno + + def __repr__(self) -> str: + return f"" + + @property + def statement(self) -> Source: + """_pytest._code.Source object for the current statement.""" + source = self.frame.code.fullsource + assert source is not None + return source.getstatement(self.lineno) + + @property + def path(self) -> Path | str: + """Path to the source code.""" + return self.frame.code.path + + @property + def locals(self) -> dict[str, Any]: + """Locals of underlying frame.""" + return self.frame.f_locals + + def getfirstlinesource(self) -> int: + return self.frame.code.firstlineno + + def getsource( + self, astcache: dict[str | Path, ast.AST] | None = None + ) -> Source | None: + """Return failing source code.""" + # we use the passed in astcache to not reparse asttrees + # within exception info printing + source = self.frame.code.fullsource + if source is None: + return None + key = astnode = None + if astcache is not None: + key = self.frame.code.path + if key is not None: + astnode = astcache.get(key, None) + start = self.getfirstlinesource() + try: + astnode, _, end = getstatementrange_ast( + self.lineno, source, astnode=astnode + ) + except SyntaxError: + end = self.lineno + 1 + else: + if key is not None and astcache is not None: + astcache[key] = astnode + return source[start:end] + + source = property(getsource) + + def ishidden(self, excinfo: ExceptionInfo[BaseException] | None) -> bool: + """Return True if the current frame has a var __tracebackhide__ + resolving to True. + + If __tracebackhide__ is a callable, it gets called with the + ExceptionInfo instance and can decide whether to hide the traceback. + + Mostly for internal use. + """ + tbh: bool | Callable[[ExceptionInfo[BaseException] | None], bool] = False + for maybe_ns_dct in (self.frame.f_locals, self.frame.f_globals): + # in normal cases, f_locals and f_globals are dictionaries + # however via `exec(...)` / `eval(...)` they can be other types + # (even incorrect types!). + # as such, we suppress all exceptions while accessing __tracebackhide__ + try: + tbh = maybe_ns_dct["__tracebackhide__"] + except Exception: + pass + else: + break + if tbh and callable(tbh): + return tbh(excinfo) + return tbh + + def __str__(self) -> str: + name = self.frame.code.name + try: + line = str(self.statement).lstrip() + except KeyboardInterrupt: + raise + except BaseException: + line = "???" + # This output does not quite match Python's repr for traceback entries, + # but changing it to do so would break certain plugins. See + # https://github.com/pytest-dev/pytest/pull/7535/ for details. + return f" File '{self.path}':{self.lineno + 1} in {name}\n {line}\n" + + @property + def name(self) -> str: + """co_name of underlying code.""" + return self.frame.code.raw.co_name + + +class Traceback(list[TracebackEntry]): + """Traceback objects encapsulate and offer higher level access to Traceback entries.""" + + def __init__( + self, + tb: TracebackType | Iterable[TracebackEntry], + ) -> None: + """Initialize from given python traceback object and ExceptionInfo.""" + if isinstance(tb, TracebackType): + + def f(cur: TracebackType) -> Iterable[TracebackEntry]: + cur_: TracebackType | None = cur + while cur_ is not None: + yield TracebackEntry(cur_) + cur_ = cur_.tb_next + + super().__init__(f(tb)) + else: + super().__init__(tb) + + def cut( + self, + path: os.PathLike[str] | str | None = None, + lineno: int | None = None, + firstlineno: int | None = None, + excludepath: os.PathLike[str] | None = None, + ) -> Traceback: + """Return a Traceback instance wrapping part of this Traceback. + + By providing any combination of path, lineno and firstlineno, the + first frame to start the to-be-returned traceback is determined. + + This allows cutting the first part of a Traceback instance e.g. + for formatting reasons (removing some uninteresting bits that deal + with handling of the exception/traceback). + """ + path_ = None if path is None else os.fspath(path) + excludepath_ = None if excludepath is None else os.fspath(excludepath) + for x in self: + code = x.frame.code + codepath = code.path + if path is not None and str(codepath) != path_: + continue + if ( + excludepath is not None + and isinstance(codepath, Path) + and excludepath_ in (str(p) for p in codepath.parents) # type: ignore[operator] + ): + continue + if lineno is not None and x.lineno != lineno: + continue + if firstlineno is not None and x.frame.code.firstlineno != firstlineno: + continue + return Traceback(x._rawentry) + return self + + @overload + def __getitem__(self, key: SupportsIndex) -> TracebackEntry: ... + + @overload + def __getitem__(self, key: slice) -> Traceback: ... + + def __getitem__(self, key: SupportsIndex | slice) -> TracebackEntry | Traceback: + if isinstance(key, slice): + return self.__class__(super().__getitem__(key)) + else: + return super().__getitem__(key) + + def filter( + self, + excinfo_or_fn: ExceptionInfo[BaseException] | Callable[[TracebackEntry], bool], + /, + ) -> Traceback: + """Return a Traceback instance with certain items removed. + + If the filter is an `ExceptionInfo`, removes all the ``TracebackEntry``s + which are hidden (see ishidden() above). + + Otherwise, the filter is a function that gets a single argument, a + ``TracebackEntry`` instance, and should return True when the item should + be added to the ``Traceback``, False when not. + """ + if isinstance(excinfo_or_fn, ExceptionInfo): + fn = lambda x: not x.ishidden(excinfo_or_fn) # noqa: E731 + else: + fn = excinfo_or_fn + return Traceback(filter(fn, self)) + + def recursionindex(self) -> int | None: + """Return the index of the frame/TracebackEntry where recursion originates if + appropriate, None if no recursion occurred.""" + cache: dict[tuple[Any, int, int], list[dict[str, Any]]] = {} + for i, entry in enumerate(self): + # id for the code.raw is needed to work around + # the strange metaprogramming in the decorator lib from pypi + # which generates code objects that have hash/value equality + # XXX needs a test + key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno + values = cache.setdefault(key, []) + # Since Python 3.13 f_locals is a proxy, freeze it. + loc = dict(entry.frame.f_locals) + if values: + for otherloc in values: + if otherloc == loc: + return i + values.append(loc) + return None + + +def stringify_exception( + exc: BaseException, include_subexception_msg: bool = True +) -> str: + try: + notes = getattr(exc, "__notes__", []) + except KeyError: + # Workaround for https://github.com/python/cpython/issues/98778 on + # Python <= 3.9, and some 3.10 and 3.11 patch versions. + HTTPError = getattr(sys.modules.get("urllib.error", None), "HTTPError", ()) + if sys.version_info < (3, 12) and isinstance(exc, HTTPError): + notes = [] + else: # pragma: no cover + # exception not related to above bug, reraise + raise + if not include_subexception_msg and isinstance(exc, BaseExceptionGroup): + message = exc.message + else: + message = str(exc) + + return "\n".join( + [ + message, + *notes, + ] + ) + + +E = TypeVar("E", bound=BaseException, covariant=True) + + +@final +@dataclasses.dataclass +class ExceptionInfo(Generic[E]): + """Wraps sys.exc_info() objects and offers help for navigating the traceback.""" + + _assert_start_repr: ClassVar = "AssertionError('assert " + + _excinfo: tuple[type[E], E, TracebackType] | None + _striptext: str + _traceback: Traceback | None + + def __init__( + self, + excinfo: tuple[type[E], E, TracebackType] | None, + striptext: str = "", + traceback: Traceback | None = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._excinfo = excinfo + self._striptext = striptext + self._traceback = traceback + + @classmethod + def from_exception( + cls, + # Ignoring error: "Cannot use a covariant type variable as a parameter". + # This is OK to ignore because this class is (conceptually) readonly. + # See https://github.com/python/mypy/issues/7049. + exception: E, # type: ignore[misc] + exprinfo: str | None = None, + ) -> ExceptionInfo[E]: + """Return an ExceptionInfo for an existing exception. + + The exception must have a non-``None`` ``__traceback__`` attribute, + otherwise this function fails with an assertion error. This means that + the exception must have been raised, or added a traceback with the + :py:meth:`~BaseException.with_traceback()` method. + + :param exprinfo: + A text string helping to determine if we should strip + ``AssertionError`` from the output. Defaults to the exception + message/``__str__()``. + + .. versionadded:: 7.4 + """ + assert exception.__traceback__, ( + "Exceptions passed to ExcInfo.from_exception(...)" + " must have a non-None __traceback__." + ) + exc_info = (type(exception), exception, exception.__traceback__) + return cls.from_exc_info(exc_info, exprinfo) + + @classmethod + def from_exc_info( + cls, + exc_info: tuple[type[E], E, TracebackType], + exprinfo: str | None = None, + ) -> ExceptionInfo[E]: + """Like :func:`from_exception`, but using old-style exc_info tuple.""" + _striptext = "" + if exprinfo is None and isinstance(exc_info[1], AssertionError): + exprinfo = getattr(exc_info[1], "msg", None) + if exprinfo is None: + exprinfo = saferepr(exc_info[1]) + if exprinfo and exprinfo.startswith(cls._assert_start_repr): + _striptext = "AssertionError: " + + return cls(exc_info, _striptext, _ispytest=True) + + @classmethod + def from_current(cls, exprinfo: str | None = None) -> ExceptionInfo[BaseException]: + """Return an ExceptionInfo matching the current traceback. + + .. warning:: + + Experimental API + + :param exprinfo: + A text string helping to determine if we should strip + ``AssertionError`` from the output. Defaults to the exception + message/``__str__()``. + """ + tup = sys.exc_info() + assert tup[0] is not None, "no current exception" + assert tup[1] is not None, "no current exception" + assert tup[2] is not None, "no current exception" + exc_info = (tup[0], tup[1], tup[2]) + return ExceptionInfo.from_exc_info(exc_info, exprinfo) + + @classmethod + def for_later(cls) -> ExceptionInfo[E]: + """Return an unfilled ExceptionInfo.""" + return cls(None, _ispytest=True) + + def fill_unfilled(self, exc_info: tuple[type[E], E, TracebackType]) -> None: + """Fill an unfilled ExceptionInfo created with ``for_later()``.""" + assert self._excinfo is None, "ExceptionInfo was already filled" + self._excinfo = exc_info + + @property + def type(self) -> type[E]: + """The exception class.""" + assert self._excinfo is not None, ( + ".type can only be used after the context manager exits" + ) + return self._excinfo[0] + + @property + def value(self) -> E: + """The exception value.""" + assert self._excinfo is not None, ( + ".value can only be used after the context manager exits" + ) + return self._excinfo[1] + + @property + def tb(self) -> TracebackType: + """The exception raw traceback.""" + assert self._excinfo is not None, ( + ".tb can only be used after the context manager exits" + ) + return self._excinfo[2] + + @property + def typename(self) -> str: + """The type name of the exception.""" + assert self._excinfo is not None, ( + ".typename can only be used after the context manager exits" + ) + return self.type.__name__ + + @property + def traceback(self) -> Traceback: + """The traceback.""" + if self._traceback is None: + self._traceback = Traceback(self.tb) + return self._traceback + + @traceback.setter + def traceback(self, value: Traceback) -> None: + self._traceback = value + + def __repr__(self) -> str: + if self._excinfo is None: + return "" + return f"<{self.__class__.__name__} {saferepr(self._excinfo[1])} tblen={len(self.traceback)}>" + + def exconly(self, tryshort: bool = False) -> str: + """Return the exception as a string. + + When 'tryshort' resolves to True, and the exception is an + AssertionError, only the actual exception part of the exception + representation is returned (so 'AssertionError: ' is removed from + the beginning). + """ + + def _get_single_subexc( + eg: BaseExceptionGroup[BaseException], + ) -> BaseException | None: + if len(eg.exceptions) != 1: + return None + if isinstance(e := eg.exceptions[0], BaseExceptionGroup): + return _get_single_subexc(e) + return e + + if ( + tryshort + and isinstance(self.value, BaseExceptionGroup) + and (subexc := _get_single_subexc(self.value)) is not None + ): + return f"{subexc!r} [single exception in {type(self.value).__name__}]" + + lines = format_exception_only(self.type, self.value) + text = "".join(lines) + text = text.rstrip() + if tryshort: + if text.startswith(self._striptext): + text = text[len(self._striptext) :] + return text + + def errisinstance(self, exc: EXCEPTION_OR_MORE) -> bool: + """Return True if the exception is an instance of exc. + + Consider using ``isinstance(excinfo.value, exc)`` instead. + """ + return isinstance(self.value, exc) + + def _getreprcrash(self) -> ReprFileLocation | None: + # Find last non-hidden traceback entry that led to the exception of the + # traceback, or None if all hidden. + for i in range(-1, -len(self.traceback) - 1, -1): + entry = self.traceback[i] + if not entry.ishidden(self): + path, lineno = entry.frame.code.raw.co_filename, entry.lineno + exconly = self.exconly(tryshort=True) + return ReprFileLocation(path, lineno + 1, exconly) + return None + + def getrepr( + self, + showlocals: bool = False, + style: TracebackStyle = "long", + abspath: bool = False, + tbfilter: bool | Callable[[ExceptionInfo[BaseException]], Traceback] = True, + funcargs: bool = False, + truncate_locals: bool = True, + truncate_args: bool = True, + chain: bool = True, + ) -> ReprExceptionInfo | ExceptionChainRepr: + """Return str()able representation of this exception info. + + :param bool showlocals: + Show locals per traceback entry. + Ignored if ``style=="native"``. + + :param str style: + long|short|line|no|native|value traceback style. + + :param bool abspath: + If paths should be changed to absolute or left unchanged. + + :param tbfilter: + A filter for traceback entries. + + * If false, don't hide any entries. + * If true, hide internal entries and entries that contain a local + variable ``__tracebackhide__ = True``. + * If a callable, delegates the filtering to the callable. + + Ignored if ``style`` is ``"native"``. + + :param bool funcargs: + Show fixtures ("funcargs" for legacy purposes) per traceback entry. + + :param bool truncate_locals: + With ``showlocals==True``, make sure locals can be safely represented as strings. + + :param bool truncate_args: + With ``showargs==True``, make sure args can be safely represented as strings. + + :param bool chain: + If chained exceptions in Python 3 should be shown. + + .. versionchanged:: 3.9 + + Added the ``chain`` parameter. + """ + if style == "native": + return ReprExceptionInfo( + reprtraceback=ReprTracebackNative( + format_exception( + self.type, + self.value, + self.traceback[0]._rawentry if self.traceback else None, + ) + ), + reprcrash=self._getreprcrash(), + ) + + fmt = FormattedExcinfo( + showlocals=showlocals, + style=style, + abspath=abspath, + tbfilter=tbfilter, + funcargs=funcargs, + truncate_locals=truncate_locals, + truncate_args=truncate_args, + chain=chain, + ) + return fmt.repr_excinfo(self) + + def match(self, regexp: str | re.Pattern[str]) -> Literal[True]: + """Check whether the regular expression `regexp` matches the string + representation of the exception using :func:`python:re.search`. + + If it matches `True` is returned, otherwise an `AssertionError` is raised. + """ + __tracebackhide__ = True + value = stringify_exception(self.value) + msg = f"Regex pattern did not match.\n Regex: {regexp!r}\n Input: {value!r}" + if regexp == value: + msg += "\n Did you mean to `re.escape()` the regex?" + assert re.search(regexp, value), msg + # Return True to allow for "assert excinfo.match()". + return True + + def _group_contains( + self, + exc_group: BaseExceptionGroup[BaseException], + expected_exception: EXCEPTION_OR_MORE, + match: str | re.Pattern[str] | None, + target_depth: int | None = None, + current_depth: int = 1, + ) -> bool: + """Return `True` if a `BaseExceptionGroup` contains a matching exception.""" + if (target_depth is not None) and (current_depth > target_depth): + # already descended past the target depth + return False + for exc in exc_group.exceptions: + if isinstance(exc, BaseExceptionGroup): + if self._group_contains( + exc, expected_exception, match, target_depth, current_depth + 1 + ): + return True + if (target_depth is not None) and (current_depth != target_depth): + # not at the target depth, no match + continue + if not isinstance(exc, expected_exception): + continue + if match is not None: + value = stringify_exception(exc) + if not re.search(match, value): + continue + return True + return False + + def group_contains( + self, + expected_exception: EXCEPTION_OR_MORE, + *, + match: str | re.Pattern[str] | None = None, + depth: int | None = None, + ) -> bool: + """Check whether a captured exception group contains a matching exception. + + :param Type[BaseException] | Tuple[Type[BaseException]] expected_exception: + The expected exception type, or a tuple if one of multiple possible + exception types are expected. + + :param str | re.Pattern[str] | None match: + If specified, a string containing a regular expression, + or a regular expression object, that is tested against the string + representation of the exception and its `PEP-678 ` `__notes__` + using :func:`re.search`. + + To match a literal string that may contain :ref:`special characters + `, the pattern can first be escaped with :func:`re.escape`. + + :param Optional[int] depth: + If `None`, will search for a matching exception at any nesting depth. + If >= 1, will only match an exception if it's at the specified depth (depth = 1 being + the exceptions contained within the topmost exception group). + + .. versionadded:: 8.0 + + .. warning:: + This helper makes it easy to check for the presence of specific exceptions, + but it is very bad for checking that the group does *not* contain + *any other exceptions*. + You should instead consider using :class:`pytest.RaisesGroup` + + """ + msg = "Captured exception is not an instance of `BaseExceptionGroup`" + assert isinstance(self.value, BaseExceptionGroup), msg + msg = "`depth` must be >= 1 if specified" + assert (depth is None) or (depth >= 1), msg + return self._group_contains(self.value, expected_exception, match, depth) + + +if TYPE_CHECKING: + from typing_extensions import TypeAlias + + # Type alias for the `tbfilter` setting: + # bool: If True, it should be filtered using Traceback.filter() + # callable: A callable that takes an ExceptionInfo and returns the filtered traceback. + TracebackFilter: TypeAlias = Union[ + bool, Callable[[ExceptionInfo[BaseException]], Traceback] + ] + + +@dataclasses.dataclass +class FormattedExcinfo: + """Presenting information about failing Functions and Generators.""" + + # for traceback entries + flow_marker: ClassVar = ">" + fail_marker: ClassVar = "E" + + showlocals: bool = False + style: TracebackStyle = "long" + abspath: bool = True + tbfilter: TracebackFilter = True + funcargs: bool = False + truncate_locals: bool = True + truncate_args: bool = True + chain: bool = True + astcache: dict[str | Path, ast.AST] = dataclasses.field( + default_factory=dict, init=False, repr=False + ) + + def _getindent(self, source: Source) -> int: + # Figure out indent for the given source. + try: + s = str(source.getstatement(len(source) - 1)) + except KeyboardInterrupt: + raise + except BaseException: + try: + s = str(source[-1]) + except KeyboardInterrupt: + raise + except BaseException: + return 0 + return 4 + (len(s) - len(s.lstrip())) + + def _getentrysource(self, entry: TracebackEntry) -> Source | None: + source = entry.getsource(self.astcache) + if source is not None: + source = source.deindent() + return source + + def repr_args(self, entry: TracebackEntry) -> ReprFuncArgs | None: + if self.funcargs: + args = [] + for argname, argvalue in entry.frame.getargs(var=True): + if self.truncate_args: + str_repr = saferepr(argvalue) + else: + str_repr = saferepr(argvalue, maxsize=None) + args.append((argname, str_repr)) + return ReprFuncArgs(args) + return None + + def get_source( + self, + source: Source | None, + line_index: int = -1, + excinfo: ExceptionInfo[BaseException] | None = None, + short: bool = False, + end_line_index: int | None = None, + colno: int | None = None, + end_colno: int | None = None, + ) -> list[str]: + """Return formatted and marked up source lines.""" + lines = [] + if source is not None and line_index < 0: + line_index += len(source) + if source is None or line_index >= len(source.lines) or line_index < 0: + # `line_index` could still be outside `range(len(source.lines))` if + # we're processing AST with pathological position attributes. + source = Source("???") + line_index = 0 + space_prefix = " " + if short: + lines.append(space_prefix + source.lines[line_index].strip()) + lines.extend( + self.get_highlight_arrows_for_line( + raw_line=source.raw_lines[line_index], + line=source.lines[line_index].strip(), + lineno=line_index, + end_lineno=end_line_index, + colno=colno, + end_colno=end_colno, + ) + ) + else: + for line in source.lines[:line_index]: + lines.append(space_prefix + line) + lines.append(self.flow_marker + " " + source.lines[line_index]) + lines.extend( + self.get_highlight_arrows_for_line( + raw_line=source.raw_lines[line_index], + line=source.lines[line_index], + lineno=line_index, + end_lineno=end_line_index, + colno=colno, + end_colno=end_colno, + ) + ) + for line in source.lines[line_index + 1 :]: + lines.append(space_prefix + line) + if excinfo is not None: + indent = 4 if short else self._getindent(source) + lines.extend(self.get_exconly(excinfo, indent=indent, markall=True)) + return lines + + def get_highlight_arrows_for_line( + self, + line: str, + raw_line: str, + lineno: int | None, + end_lineno: int | None, + colno: int | None, + end_colno: int | None, + ) -> list[str]: + """Return characters highlighting a source line. + + Example with colno and end_colno pointing to the bar expression: + "foo() + bar()" + returns " ^^^^^" + """ + if lineno != end_lineno: + # Don't handle expressions that span multiple lines. + return [] + if colno is None or end_colno is None: + # Can't do anything without column information. + return [] + + num_stripped_chars = len(raw_line) - len(line) + + start_char_offset = _byte_offset_to_character_offset(raw_line, colno) + end_char_offset = _byte_offset_to_character_offset(raw_line, end_colno) + num_carets = end_char_offset - start_char_offset + # If the highlight would span the whole line, it is redundant, don't + # show it. + if num_carets >= len(line.strip()): + return [] + + highlights = " " + highlights += " " * (start_char_offset - num_stripped_chars + 1) + highlights += "^" * num_carets + return [highlights] + + def get_exconly( + self, + excinfo: ExceptionInfo[BaseException], + indent: int = 4, + markall: bool = False, + ) -> list[str]: + lines = [] + indentstr = " " * indent + # Get the real exception information out. + exlines = excinfo.exconly(tryshort=True).split("\n") + failindent = self.fail_marker + indentstr[1:] + for line in exlines: + lines.append(failindent + line) + if not markall: + failindent = indentstr + return lines + + def repr_locals(self, locals: Mapping[str, object]) -> ReprLocals | None: + if self.showlocals: + lines = [] + keys = [loc for loc in locals if loc[0] != "@"] + keys.sort() + for name in keys: + value = locals[name] + if name == "__builtins__": + lines.append("__builtins__ = ") + else: + # This formatting could all be handled by the + # _repr() function, which is only reprlib.Repr in + # disguise, so is very configurable. + if self.truncate_locals: + str_repr = saferepr(value) + else: + str_repr = safeformat(value) + # if len(str_repr) < 70 or not isinstance(value, (list, tuple, dict)): + lines.append(f"{name:<10} = {str_repr}") + # else: + # self._line("%-10s =\\" % (name,)) + # # XXX + # pprint.pprint(value, stream=self.excinfowriter) + return ReprLocals(lines) + return None + + def repr_traceback_entry( + self, + entry: TracebackEntry | None, + excinfo: ExceptionInfo[BaseException] | None = None, + ) -> ReprEntry: + lines: list[str] = [] + style = ( + entry._repr_style + if entry is not None and entry._repr_style is not None + else self.style + ) + if style in ("short", "long") and entry is not None: + source = self._getentrysource(entry) + if source is None: + source = Source("???") + line_index = 0 + end_line_index, colno, end_colno = None, None, None + else: + line_index = entry.relline + end_line_index = entry.end_lineno_relative + colno = entry.colno + end_colno = entry.end_colno + short = style == "short" + reprargs = self.repr_args(entry) if not short else None + s = self.get_source( + source=source, + line_index=line_index, + excinfo=excinfo, + short=short, + end_line_index=end_line_index, + colno=colno, + end_colno=end_colno, + ) + lines.extend(s) + if short: + message = f"in {entry.name}" + else: + message = (excinfo and excinfo.typename) or "" + entry_path = entry.path + path = self._makepath(entry_path) + reprfileloc = ReprFileLocation(path, entry.lineno + 1, message) + localsrepr = self.repr_locals(entry.locals) + return ReprEntry(lines, reprargs, localsrepr, reprfileloc, style) + elif style == "value": + if excinfo: + lines.extend(str(excinfo.value).split("\n")) + return ReprEntry(lines, None, None, None, style) + else: + if excinfo: + lines.extend(self.get_exconly(excinfo, indent=4)) + return ReprEntry(lines, None, None, None, style) + + def _makepath(self, path: Path | str) -> str: + if not self.abspath and isinstance(path, Path): + try: + np = bestrelpath(Path.cwd(), path) + except OSError: + return str(path) + if len(np) < len(str(path)): + return np + return str(path) + + def repr_traceback(self, excinfo: ExceptionInfo[BaseException]) -> ReprTraceback: + traceback = filter_excinfo_traceback(self.tbfilter, excinfo) + + if isinstance(excinfo.value, RecursionError): + traceback, extraline = self._truncate_recursive_traceback(traceback) + else: + extraline = None + + if not traceback: + if extraline is None: + extraline = "All traceback entries are hidden. Pass `--full-trace` to see hidden and internal frames." + entries = [self.repr_traceback_entry(None, excinfo)] + return ReprTraceback(entries, extraline, style=self.style) + + last = traceback[-1] + if self.style == "value": + entries = [self.repr_traceback_entry(last, excinfo)] + return ReprTraceback(entries, None, style=self.style) + + entries = [ + self.repr_traceback_entry(entry, excinfo if last == entry else None) + for entry in traceback + ] + return ReprTraceback(entries, extraline, style=self.style) + + def _truncate_recursive_traceback( + self, traceback: Traceback + ) -> tuple[Traceback, str | None]: + """Truncate the given recursive traceback trying to find the starting + point of the recursion. + + The detection is done by going through each traceback entry and + finding the point in which the locals of the frame are equal to the + locals of a previous frame (see ``recursionindex()``). + + Handle the situation where the recursion process might raise an + exception (for example comparing numpy arrays using equality raises a + TypeError), in which case we do our best to warn the user of the + error and show a limited traceback. + """ + try: + recursionindex = traceback.recursionindex() + except Exception as e: + max_frames = 10 + extraline: str | None = ( + "!!! Recursion error detected, but an error occurred locating the origin of recursion.\n" + " The following exception happened when comparing locals in the stack frame:\n" + f" {type(e).__name__}: {e!s}\n" + f" Displaying first and last {max_frames} stack frames out of {len(traceback)}." + ) + # Type ignored because adding two instances of a List subtype + # currently incorrectly has type List instead of the subtype. + traceback = traceback[:max_frames] + traceback[-max_frames:] # type: ignore + else: + if recursionindex is not None: + extraline = "!!! Recursion detected (same locals & position)" + traceback = traceback[: recursionindex + 1] + else: + extraline = None + + return traceback, extraline + + def repr_excinfo(self, excinfo: ExceptionInfo[BaseException]) -> ExceptionChainRepr: + repr_chain: list[tuple[ReprTraceback, ReprFileLocation | None, str | None]] = [] + e: BaseException | None = excinfo.value + excinfo_: ExceptionInfo[BaseException] | None = excinfo + descr = None + seen: set[int] = set() + while e is not None and id(e) not in seen: + seen.add(id(e)) + + if excinfo_: + # Fall back to native traceback as a temporary workaround until + # full support for exception groups added to ExceptionInfo. + # See https://github.com/pytest-dev/pytest/issues/9159 + reprtraceback: ReprTraceback | ReprTracebackNative + if isinstance(e, BaseExceptionGroup): + # don't filter any sub-exceptions since they shouldn't have any internal frames + traceback = filter_excinfo_traceback(self.tbfilter, excinfo) + reprtraceback = ReprTracebackNative( + format_exception( + type(excinfo.value), + excinfo.value, + traceback[0]._rawentry, + ) + ) + else: + reprtraceback = self.repr_traceback(excinfo_) + reprcrash = excinfo_._getreprcrash() + else: + # Fallback to native repr if the exception doesn't have a traceback: + # ExceptionInfo objects require a full traceback to work. + reprtraceback = ReprTracebackNative(format_exception(type(e), e, None)) + reprcrash = None + repr_chain += [(reprtraceback, reprcrash, descr)] + + if e.__cause__ is not None and self.chain: + e = e.__cause__ + excinfo_ = ExceptionInfo.from_exception(e) if e.__traceback__ else None + descr = "The above exception was the direct cause of the following exception:" + elif ( + e.__context__ is not None and not e.__suppress_context__ and self.chain + ): + e = e.__context__ + excinfo_ = ExceptionInfo.from_exception(e) if e.__traceback__ else None + descr = "During handling of the above exception, another exception occurred:" + else: + e = None + repr_chain.reverse() + return ExceptionChainRepr(repr_chain) + + +@dataclasses.dataclass(eq=False) +class TerminalRepr: + def __str__(self) -> str: + # FYI this is called from pytest-xdist's serialization of exception + # information. + io = StringIO() + tw = TerminalWriter(file=io) + self.toterminal(tw) + return io.getvalue().strip() + + def __repr__(self) -> str: + return f"<{self.__class__} instance at {id(self):0x}>" + + def toterminal(self, tw: TerminalWriter) -> None: + raise NotImplementedError() + + +# This class is abstract -- only subclasses are instantiated. +@dataclasses.dataclass(eq=False) +class ExceptionRepr(TerminalRepr): + # Provided by subclasses. + reprtraceback: ReprTraceback + reprcrash: ReprFileLocation | None + sections: list[tuple[str, str, str]] = dataclasses.field( + init=False, default_factory=list + ) + + def addsection(self, name: str, content: str, sep: str = "-") -> None: + self.sections.append((name, content, sep)) + + def toterminal(self, tw: TerminalWriter) -> None: + for name, content, sep in self.sections: + tw.sep(sep, name) + tw.line(content) + + +@dataclasses.dataclass(eq=False) +class ExceptionChainRepr(ExceptionRepr): + chain: Sequence[tuple[ReprTraceback, ReprFileLocation | None, str | None]] + + def __init__( + self, + chain: Sequence[tuple[ReprTraceback, ReprFileLocation | None, str | None]], + ) -> None: + # reprcrash and reprtraceback of the outermost (the newest) exception + # in the chain. + super().__init__( + reprtraceback=chain[-1][0], + reprcrash=chain[-1][1], + ) + self.chain = chain + + def toterminal(self, tw: TerminalWriter) -> None: + for element in self.chain: + element[0].toterminal(tw) + if element[2] is not None: + tw.line("") + tw.line(element[2], yellow=True) + super().toterminal(tw) + + +@dataclasses.dataclass(eq=False) +class ReprExceptionInfo(ExceptionRepr): + reprtraceback: ReprTraceback + reprcrash: ReprFileLocation | None + + def toterminal(self, tw: TerminalWriter) -> None: + self.reprtraceback.toterminal(tw) + super().toterminal(tw) + + +@dataclasses.dataclass(eq=False) +class ReprTraceback(TerminalRepr): + reprentries: Sequence[ReprEntry | ReprEntryNative] + extraline: str | None + style: TracebackStyle + + entrysep: ClassVar = "_ " + + def toterminal(self, tw: TerminalWriter) -> None: + # The entries might have different styles. + for i, entry in enumerate(self.reprentries): + if entry.style == "long": + tw.line("") + entry.toterminal(tw) + if i < len(self.reprentries) - 1: + next_entry = self.reprentries[i + 1] + if entry.style == "long" or ( + entry.style == "short" and next_entry.style == "long" + ): + tw.sep(self.entrysep) + + if self.extraline: + tw.line(self.extraline) + + +class ReprTracebackNative(ReprTraceback): + def __init__(self, tblines: Sequence[str]) -> None: + self.reprentries = [ReprEntryNative(tblines)] + self.extraline = None + self.style = "native" + + +@dataclasses.dataclass(eq=False) +class ReprEntryNative(TerminalRepr): + lines: Sequence[str] + + style: ClassVar[TracebackStyle] = "native" + + def toterminal(self, tw: TerminalWriter) -> None: + tw.write("".join(self.lines)) + + +@dataclasses.dataclass(eq=False) +class ReprEntry(TerminalRepr): + lines: Sequence[str] + reprfuncargs: ReprFuncArgs | None + reprlocals: ReprLocals | None + reprfileloc: ReprFileLocation | None + style: TracebackStyle + + def _write_entry_lines(self, tw: TerminalWriter) -> None: + """Write the source code portions of a list of traceback entries with syntax highlighting. + + Usually entries are lines like these: + + " x = 1" + "> assert x == 2" + "E assert 1 == 2" + + This function takes care of rendering the "source" portions of it (the lines without + the "E" prefix) using syntax highlighting, taking care to not highlighting the ">" + character, as doing so might break line continuations. + """ + if not self.lines: + return + + if self.style == "value": + # Using tw.write instead of tw.line for testing purposes due to TWMock implementation; + # lines written with TWMock.line and TWMock._write_source cannot be distinguished + # from each other, whereas lines written with TWMock.write are marked with TWMock.WRITE + for line in self.lines: + tw.write(line) + tw.write("\n") + return + + # separate indents and source lines that are not failures: we want to + # highlight the code but not the indentation, which may contain markers + # such as "> assert 0" + fail_marker = f"{FormattedExcinfo.fail_marker} " + indent_size = len(fail_marker) + indents: list[str] = [] + source_lines: list[str] = [] + failure_lines: list[str] = [] + for index, line in enumerate(self.lines): + is_failure_line = line.startswith(fail_marker) + if is_failure_line: + # from this point on all lines are considered part of the failure + failure_lines.extend(self.lines[index:]) + break + else: + indents.append(line[:indent_size]) + source_lines.append(line[indent_size:]) + + tw._write_source(source_lines, indents) + + # failure lines are always completely red and bold + for line in failure_lines: + tw.line(line, bold=True, red=True) + + def toterminal(self, tw: TerminalWriter) -> None: + if self.style == "short": + if self.reprfileloc: + self.reprfileloc.toterminal(tw) + self._write_entry_lines(tw) + if self.reprlocals: + self.reprlocals.toterminal(tw, indent=" " * 8) + return + + if self.reprfuncargs: + self.reprfuncargs.toterminal(tw) + + self._write_entry_lines(tw) + + if self.reprlocals: + tw.line("") + self.reprlocals.toterminal(tw) + if self.reprfileloc: + if self.lines: + tw.line("") + self.reprfileloc.toterminal(tw) + + def __str__(self) -> str: + return "{}\n{}\n{}".format( + "\n".join(self.lines), self.reprlocals, self.reprfileloc + ) + + +@dataclasses.dataclass(eq=False) +class ReprFileLocation(TerminalRepr): + path: str + lineno: int + message: str + + def __post_init__(self) -> None: + self.path = str(self.path) + + def toterminal(self, tw: TerminalWriter) -> None: + # Filename and lineno output for each entry, using an output format + # that most editors understand. + msg = self.message + i = msg.find("\n") + if i != -1: + msg = msg[:i] + tw.write(self.path, bold=True, red=True) + tw.line(f":{self.lineno}: {msg}") + + +@dataclasses.dataclass(eq=False) +class ReprLocals(TerminalRepr): + lines: Sequence[str] + + def toterminal(self, tw: TerminalWriter, indent="") -> None: + for line in self.lines: + tw.line(indent + line) + + +@dataclasses.dataclass(eq=False) +class ReprFuncArgs(TerminalRepr): + args: Sequence[tuple[str, object]] + + def toterminal(self, tw: TerminalWriter) -> None: + if self.args: + linesofar = "" + for name, value in self.args: + ns = f"{name} = {value}" + if len(ns) + len(linesofar) + 2 > tw.fullwidth: + if linesofar: + tw.line(linesofar) + linesofar = ns + else: + if linesofar: + linesofar += ", " + ns + else: + linesofar = ns + if linesofar: + tw.line(linesofar) + tw.line("") + + +def getfslineno(obj: object) -> tuple[str | Path, int]: + """Return source location (path, lineno) for the given object. + + If the source cannot be determined return ("", -1). + + The line number is 0-based. + """ + # xxx let decorators etc specify a sane ordering + # NOTE: this used to be done in _pytest.compat.getfslineno, initially added + # in 6ec13a2b9. It ("place_as") appears to be something very custom. + obj = get_real_func(obj) + if hasattr(obj, "place_as"): + obj = obj.place_as + + try: + code = Code.from_function(obj) + except TypeError: + try: + fn = inspect.getsourcefile(obj) or inspect.getfile(obj) # type: ignore[arg-type] + except TypeError: + return "", -1 + + fspath = (fn and absolutepath(fn)) or "" + lineno = -1 + if fspath: + try: + _, lineno = findsource(obj) + except OSError: + pass + return fspath, lineno + + return code.path, code.firstlineno + + +def _byte_offset_to_character_offset(str, offset): + """Converts a byte based offset in a string to a code-point.""" + as_utf8 = str.encode("utf-8") + return len(as_utf8[:offset].decode("utf-8", errors="replace")) + + +# Relative paths that we use to filter traceback entries from appearing to the user; +# see filter_traceback. +# note: if we need to add more paths than what we have now we should probably use a list +# for better maintenance. + +_PLUGGY_DIR = Path(pluggy.__file__.rstrip("oc")) +# pluggy is either a package or a single module depending on the version +if _PLUGGY_DIR.name == "__init__.py": + _PLUGGY_DIR = _PLUGGY_DIR.parent +_PYTEST_DIR = Path(_pytest.__file__).parent + + +def filter_traceback(entry: TracebackEntry) -> bool: + """Return True if a TracebackEntry instance should be included in tracebacks. + + We hide traceback entries of: + + * dynamically generated code (no code to show up for it); + * internal traceback from pytest or its internal libraries, py and pluggy. + """ + # entry.path might sometimes return a str object when the entry + # points to dynamically generated code. + # See https://bitbucket.org/pytest-dev/py/issues/71. + raw_filename = entry.frame.code.raw.co_filename + is_generated = "<" in raw_filename and ">" in raw_filename + if is_generated: + return False + + # entry.path might point to a non-existing file, in which case it will + # also return a str object. See #1133. + p = Path(entry.path) + + parents = p.parents + if _PLUGGY_DIR in parents: + return False + if _PYTEST_DIR in parents: + return False + + return True + + +def filter_excinfo_traceback( + tbfilter: TracebackFilter, excinfo: ExceptionInfo[BaseException] +) -> Traceback: + """Filter the exception traceback in ``excinfo`` according to ``tbfilter``.""" + if callable(tbfilter): + return tbfilter(excinfo) + elif tbfilter: + return excinfo.traceback.filter(excinfo) + else: + return excinfo.traceback diff --git a/venv/lib/python3.10/site-packages/_pytest/_code/source.py b/venv/lib/python3.10/site-packages/_pytest/_code/source.py new file mode 100644 index 0000000000000000000000000000000000000000..a8f7201a40f755a3181d2686b6089d7cee79e26c --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/_code/source.py @@ -0,0 +1,225 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +import ast +from bisect import bisect_right +from collections.abc import Iterable +from collections.abc import Iterator +import inspect +import textwrap +import tokenize +import types +from typing import overload +import warnings + + +class Source: + """An immutable object holding a source code fragment. + + When using Source(...), the source lines are deindented. + """ + + def __init__(self, obj: object = None) -> None: + if not obj: + self.lines: list[str] = [] + self.raw_lines: list[str] = [] + elif isinstance(obj, Source): + self.lines = obj.lines + self.raw_lines = obj.raw_lines + elif isinstance(obj, (tuple, list)): + self.lines = deindent(x.rstrip("\n") for x in obj) + self.raw_lines = list(x.rstrip("\n") for x in obj) + elif isinstance(obj, str): + self.lines = deindent(obj.split("\n")) + self.raw_lines = obj.split("\n") + else: + try: + rawcode = getrawcode(obj) + src = inspect.getsource(rawcode) + except TypeError: + src = inspect.getsource(obj) # type: ignore[arg-type] + self.lines = deindent(src.split("\n")) + self.raw_lines = src.split("\n") + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Source): + return NotImplemented + return self.lines == other.lines + + # Ignore type because of https://github.com/python/mypy/issues/4266. + __hash__ = None # type: ignore + + @overload + def __getitem__(self, key: int) -> str: ... + + @overload + def __getitem__(self, key: slice) -> Source: ... + + def __getitem__(self, key: int | slice) -> str | Source: + if isinstance(key, int): + return self.lines[key] + else: + if key.step not in (None, 1): + raise IndexError("cannot slice a Source with a step") + newsource = Source() + newsource.lines = self.lines[key.start : key.stop] + newsource.raw_lines = self.raw_lines[key.start : key.stop] + return newsource + + def __iter__(self) -> Iterator[str]: + return iter(self.lines) + + def __len__(self) -> int: + return len(self.lines) + + def strip(self) -> Source: + """Return new Source object with trailing and leading blank lines removed.""" + start, end = 0, len(self) + while start < end and not self.lines[start].strip(): + start += 1 + while end > start and not self.lines[end - 1].strip(): + end -= 1 + source = Source() + source.raw_lines = self.raw_lines + source.lines[:] = self.lines[start:end] + return source + + def indent(self, indent: str = " " * 4) -> Source: + """Return a copy of the source object with all lines indented by the + given indent-string.""" + newsource = Source() + newsource.raw_lines = self.raw_lines + newsource.lines = [(indent + line) for line in self.lines] + return newsource + + def getstatement(self, lineno: int) -> Source: + """Return Source statement which contains the given linenumber + (counted from 0).""" + start, end = self.getstatementrange(lineno) + return self[start:end] + + def getstatementrange(self, lineno: int) -> tuple[int, int]: + """Return (start, end) tuple which spans the minimal statement region + which containing the given lineno.""" + if not (0 <= lineno < len(self)): + raise IndexError("lineno out of range") + ast, start, end = getstatementrange_ast(lineno, self) + return start, end + + def deindent(self) -> Source: + """Return a new Source object deindented.""" + newsource = Source() + newsource.lines[:] = deindent(self.lines) + newsource.raw_lines = self.raw_lines + return newsource + + def __str__(self) -> str: + return "\n".join(self.lines) + + +# +# helper functions +# + + +def findsource(obj) -> tuple[Source | None, int]: + try: + sourcelines, lineno = inspect.findsource(obj) + except Exception: + return None, -1 + source = Source() + source.lines = [line.rstrip() for line in sourcelines] + source.raw_lines = sourcelines + return source, lineno + + +def getrawcode(obj: object, trycall: bool = True) -> types.CodeType: + """Return code object for given function.""" + try: + return obj.__code__ # type: ignore[attr-defined,no-any-return] + except AttributeError: + pass + if trycall: + call = getattr(obj, "__call__", None) + if call and not isinstance(obj, type): + return getrawcode(call, trycall=False) + raise TypeError(f"could not get code object for {obj!r}") + + +def deindent(lines: Iterable[str]) -> list[str]: + return textwrap.dedent("\n".join(lines)).splitlines() + + +def get_statement_startend2(lineno: int, node: ast.AST) -> tuple[int, int | None]: + # Flatten all statements and except handlers into one lineno-list. + # AST's line numbers start indexing at 1. + values: list[int] = [] + for x in ast.walk(node): + if isinstance(x, (ast.stmt, ast.ExceptHandler)): + # The lineno points to the class/def, so need to include the decorators. + if isinstance(x, (ast.ClassDef, ast.FunctionDef, ast.AsyncFunctionDef)): + for d in x.decorator_list: + values.append(d.lineno - 1) + values.append(x.lineno - 1) + for name in ("finalbody", "orelse"): + val: list[ast.stmt] | None = getattr(x, name, None) + if val: + # Treat the finally/orelse part as its own statement. + values.append(val[0].lineno - 1 - 1) + values.sort() + insert_index = bisect_right(values, lineno) + start = values[insert_index - 1] + if insert_index >= len(values): + end = None + else: + end = values[insert_index] + return start, end + + +def getstatementrange_ast( + lineno: int, + source: Source, + assertion: bool = False, + astnode: ast.AST | None = None, +) -> tuple[ast.AST, int, int]: + if astnode is None: + content = str(source) + # See #4260: + # Don't produce duplicate warnings when compiling source to find AST. + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + astnode = ast.parse(content, "source", "exec") + + start, end = get_statement_startend2(lineno, astnode) + # We need to correct the end: + # - ast-parsing strips comments + # - there might be empty lines + # - we might have lesser indented code blocks at the end + if end is None: + end = len(source.lines) + + if end > start + 1: + # Make sure we don't span differently indented code blocks + # by using the BlockFinder helper used which inspect.getsource() uses itself. + block_finder = inspect.BlockFinder() + # If we start with an indented line, put blockfinder to "started" mode. + block_finder.started = ( + bool(source.lines[start]) and source.lines[start][0].isspace() + ) + it = ((x + "\n") for x in source.lines[start:end]) + try: + for tok in tokenize.generate_tokens(lambda: next(it)): + block_finder.tokeneater(*tok) + except (inspect.EndOfBlock, IndentationError): + end = block_finder.last + start + except Exception: + pass + + # The end might still point to a comment or empty line, correct it. + while end: + line = source.lines[end - 1].lstrip() + if line.startswith("#") or not line: + end -= 1 + else: + break + return astnode, start, end diff --git a/venv/lib/python3.10/site-packages/_pytest/_io/__init__.py b/venv/lib/python3.10/site-packages/_pytest/_io/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b0155b18b605326ba0a3104deaefde938b7d651a --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/_io/__init__.py @@ -0,0 +1,10 @@ +from __future__ import annotations + +from .terminalwriter import get_terminal_width +from .terminalwriter import TerminalWriter + + +__all__ = [ + "TerminalWriter", + "get_terminal_width", +] diff --git a/venv/lib/python3.10/site-packages/_pytest/_io/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/_io/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1a3dad19a649d5e8a02a181fb65fb6e10b29838b Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/_io/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/_io/__pycache__/pprint.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/_io/__pycache__/pprint.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e7c827cd5d127ae1f5fb3880369d210a04660a66 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/_io/__pycache__/pprint.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/_io/__pycache__/saferepr.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/_io/__pycache__/saferepr.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ed16d6363bd14954b0c051413c1e9b1d95e97494 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/_io/__pycache__/saferepr.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/_io/__pycache__/terminalwriter.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/_io/__pycache__/terminalwriter.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..010db9180b8de4195df0859a0ed8b3f997d5ca6a Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/_io/__pycache__/terminalwriter.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/_io/__pycache__/wcwidth.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/_io/__pycache__/wcwidth.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ddde49f34e670ad8793a9f383e0b1e53d119ca92 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/_io/__pycache__/wcwidth.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/_io/pprint.py b/venv/lib/python3.10/site-packages/_pytest/_io/pprint.py new file mode 100644 index 0000000000000000000000000000000000000000..28f069092061928a1c06aaba94b6e8ba4f03075f --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/_io/pprint.py @@ -0,0 +1,673 @@ +# mypy: allow-untyped-defs +# This module was imported from the cpython standard library +# (https://github.com/python/cpython/) at commit +# c5140945c723ae6c4b7ee81ff720ac8ea4b52cfd (python3.12). +# +# +# Original Author: Fred L. Drake, Jr. +# fdrake@acm.org +# +# This is a simple little module I wrote to make life easier. I didn't +# see anything quite like it in the library, though I may have overlooked +# something. I wrote this when I was trying to read some heavily nested +# tuples with fairly non-descriptive content. This is modeled very much +# after Lisp/Scheme - style pretty-printing of lists. If you find it +# useful, thank small children who sleep at night. +from __future__ import annotations + +import collections as _collections +from collections.abc import Callable +from collections.abc import Iterator +import dataclasses as _dataclasses +from io import StringIO as _StringIO +import re +import types as _types +from typing import Any +from typing import IO + + +class _safe_key: + """Helper function for key functions when sorting unorderable objects. + + The wrapped-object will fallback to a Py2.x style comparison for + unorderable types (sorting first comparing the type name and then by + the obj ids). Does not work recursively, so dict.items() must have + _safe_key applied to both the key and the value. + + """ + + __slots__ = ["obj"] + + def __init__(self, obj): + self.obj = obj + + def __lt__(self, other): + try: + return self.obj < other.obj + except TypeError: + return (str(type(self.obj)), id(self.obj)) < ( + str(type(other.obj)), + id(other.obj), + ) + + +def _safe_tuple(t): + """Helper function for comparing 2-tuples""" + return _safe_key(t[0]), _safe_key(t[1]) + + +class PrettyPrinter: + def __init__( + self, + indent: int = 4, + width: int = 80, + depth: int | None = None, + ) -> None: + """Handle pretty printing operations onto a stream using a set of + configured parameters. + + indent + Number of spaces to indent for each level of nesting. + + width + Attempted maximum number of columns in the output. + + depth + The maximum depth to print out nested structures. + + """ + if indent < 0: + raise ValueError("indent must be >= 0") + if depth is not None and depth <= 0: + raise ValueError("depth must be > 0") + if not width: + raise ValueError("width must be != 0") + self._depth = depth + self._indent_per_level = indent + self._width = width + + def pformat(self, object: Any) -> str: + sio = _StringIO() + self._format(object, sio, 0, 0, set(), 0) + return sio.getvalue() + + def _format( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + objid = id(object) + if objid in context: + stream.write(_recursion(object)) + return + + p = self._dispatch.get(type(object).__repr__, None) + if p is not None: + context.add(objid) + p(self, object, stream, indent, allowance, context, level + 1) + context.remove(objid) + elif ( + _dataclasses.is_dataclass(object) + and not isinstance(object, type) + and object.__dataclass_params__.repr # type:ignore[attr-defined] + and + # Check dataclass has generated repr method. + hasattr(object.__repr__, "__wrapped__") + and "__create_fn__" in object.__repr__.__wrapped__.__qualname__ + ): + context.add(objid) + self._pprint_dataclass( + object, stream, indent, allowance, context, level + 1 + ) + context.remove(objid) + else: + stream.write(self._repr(object, context, level)) + + def _pprint_dataclass( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + cls_name = object.__class__.__name__ + items = [ + (f.name, getattr(object, f.name)) + for f in _dataclasses.fields(object) + if f.repr + ] + stream.write(cls_name + "(") + self._format_namespace_items(items, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch: dict[ + Callable[..., str], + Callable[[PrettyPrinter, Any, IO[str], int, int, set[int], int], None], + ] = {} + + def _pprint_dict( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + write = stream.write + write("{") + items = sorted(object.items(), key=_safe_tuple) + self._format_dict_items(items, stream, indent, allowance, context, level) + write("}") + + _dispatch[dict.__repr__] = _pprint_dict + + def _pprint_ordered_dict( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + if not len(object): + stream.write(repr(object)) + return + cls = object.__class__ + stream.write(cls.__name__ + "(") + self._pprint_dict(object, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[_collections.OrderedDict.__repr__] = _pprint_ordered_dict + + def _pprint_list( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + stream.write("[") + self._format_items(object, stream, indent, allowance, context, level) + stream.write("]") + + _dispatch[list.__repr__] = _pprint_list + + def _pprint_tuple( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + stream.write("(") + self._format_items(object, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[tuple.__repr__] = _pprint_tuple + + def _pprint_set( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + if not len(object): + stream.write(repr(object)) + return + typ = object.__class__ + if typ is set: + stream.write("{") + endchar = "}" + else: + stream.write(typ.__name__ + "({") + endchar = "})" + object = sorted(object, key=_safe_key) + self._format_items(object, stream, indent, allowance, context, level) + stream.write(endchar) + + _dispatch[set.__repr__] = _pprint_set + _dispatch[frozenset.__repr__] = _pprint_set + + def _pprint_str( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + write = stream.write + if not len(object): + write(repr(object)) + return + chunks = [] + lines = object.splitlines(True) + if level == 1: + indent += 1 + allowance += 1 + max_width1 = max_width = self._width - indent + for i, line in enumerate(lines): + rep = repr(line) + if i == len(lines) - 1: + max_width1 -= allowance + if len(rep) <= max_width1: + chunks.append(rep) + else: + # A list of alternating (non-space, space) strings + parts = re.findall(r"\S*\s*", line) + assert parts + assert not parts[-1] + parts.pop() # drop empty last part + max_width2 = max_width + current = "" + for j, part in enumerate(parts): + candidate = current + part + if j == len(parts) - 1 and i == len(lines) - 1: + max_width2 -= allowance + if len(repr(candidate)) > max_width2: + if current: + chunks.append(repr(current)) + current = part + else: + current = candidate + if current: + chunks.append(repr(current)) + if len(chunks) == 1: + write(rep) + return + if level == 1: + write("(") + for i, rep in enumerate(chunks): + if i > 0: + write("\n" + " " * indent) + write(rep) + if level == 1: + write(")") + + _dispatch[str.__repr__] = _pprint_str + + def _pprint_bytes( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + write = stream.write + if len(object) <= 4: + write(repr(object)) + return + parens = level == 1 + if parens: + indent += 1 + allowance += 1 + write("(") + delim = "" + for rep in _wrap_bytes_repr(object, self._width - indent, allowance): + write(delim) + write(rep) + if not delim: + delim = "\n" + " " * indent + if parens: + write(")") + + _dispatch[bytes.__repr__] = _pprint_bytes + + def _pprint_bytearray( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + write = stream.write + write("bytearray(") + self._pprint_bytes( + bytes(object), stream, indent + 10, allowance + 1, context, level + 1 + ) + write(")") + + _dispatch[bytearray.__repr__] = _pprint_bytearray + + def _pprint_mappingproxy( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + stream.write("mappingproxy(") + self._format(object.copy(), stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[_types.MappingProxyType.__repr__] = _pprint_mappingproxy + + def _pprint_simplenamespace( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + if type(object) is _types.SimpleNamespace: + # The SimpleNamespace repr is "namespace" instead of the class + # name, so we do the same here. For subclasses; use the class name. + cls_name = "namespace" + else: + cls_name = object.__class__.__name__ + items = object.__dict__.items() + stream.write(cls_name + "(") + self._format_namespace_items(items, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[_types.SimpleNamespace.__repr__] = _pprint_simplenamespace + + def _format_dict_items( + self, + items: list[tuple[Any, Any]], + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + if not items: + return + + write = stream.write + item_indent = indent + self._indent_per_level + delimnl = "\n" + " " * item_indent + for key, ent in items: + write(delimnl) + write(self._repr(key, context, level)) + write(": ") + self._format(ent, stream, item_indent, 1, context, level) + write(",") + + write("\n" + " " * indent) + + def _format_namespace_items( + self, + items: list[tuple[Any, Any]], + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + if not items: + return + + write = stream.write + item_indent = indent + self._indent_per_level + delimnl = "\n" + " " * item_indent + for key, ent in items: + write(delimnl) + write(key) + write("=") + if id(ent) in context: + # Special-case representation of recursion to match standard + # recursive dataclass repr. + write("...") + else: + self._format( + ent, + stream, + item_indent + len(key) + 1, + 1, + context, + level, + ) + + write(",") + + write("\n" + " " * indent) + + def _format_items( + self, + items: list[Any], + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + if not items: + return + + write = stream.write + item_indent = indent + self._indent_per_level + delimnl = "\n" + " " * item_indent + + for item in items: + write(delimnl) + self._format(item, stream, item_indent, 1, context, level) + write(",") + + write("\n" + " " * indent) + + def _repr(self, object: Any, context: set[int], level: int) -> str: + return self._safe_repr(object, context.copy(), self._depth, level) + + def _pprint_default_dict( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + rdf = self._repr(object.default_factory, context, level) + stream.write(f"{object.__class__.__name__}({rdf}, ") + self._pprint_dict(object, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[_collections.defaultdict.__repr__] = _pprint_default_dict + + def _pprint_counter( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + stream.write(object.__class__.__name__ + "(") + + if object: + stream.write("{") + items = object.most_common() + self._format_dict_items(items, stream, indent, allowance, context, level) + stream.write("}") + + stream.write(")") + + _dispatch[_collections.Counter.__repr__] = _pprint_counter + + def _pprint_chain_map( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + if not len(object.maps) or (len(object.maps) == 1 and not len(object.maps[0])): + stream.write(repr(object)) + return + + stream.write(object.__class__.__name__ + "(") + self._format_items(object.maps, stream, indent, allowance, context, level) + stream.write(")") + + _dispatch[_collections.ChainMap.__repr__] = _pprint_chain_map + + def _pprint_deque( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + stream.write(object.__class__.__name__ + "(") + if object.maxlen is not None: + stream.write(f"maxlen={object.maxlen}, ") + stream.write("[") + + self._format_items(object, stream, indent, allowance + 1, context, level) + stream.write("])") + + _dispatch[_collections.deque.__repr__] = _pprint_deque + + def _pprint_user_dict( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + self._format(object.data, stream, indent, allowance, context, level - 1) + + _dispatch[_collections.UserDict.__repr__] = _pprint_user_dict + + def _pprint_user_list( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + self._format(object.data, stream, indent, allowance, context, level - 1) + + _dispatch[_collections.UserList.__repr__] = _pprint_user_list + + def _pprint_user_string( + self, + object: Any, + stream: IO[str], + indent: int, + allowance: int, + context: set[int], + level: int, + ) -> None: + self._format(object.data, stream, indent, allowance, context, level - 1) + + _dispatch[_collections.UserString.__repr__] = _pprint_user_string + + def _safe_repr( + self, object: Any, context: set[int], maxlevels: int | None, level: int + ) -> str: + typ = type(object) + if typ in _builtin_scalars: + return repr(object) + + r = getattr(typ, "__repr__", None) + + if issubclass(typ, dict) and r is dict.__repr__: + if not object: + return "{}" + objid = id(object) + if maxlevels and level >= maxlevels: + return "{...}" + if objid in context: + return _recursion(object) + context.add(objid) + components: list[str] = [] + append = components.append + level += 1 + for k, v in sorted(object.items(), key=_safe_tuple): + krepr = self._safe_repr(k, context, maxlevels, level) + vrepr = self._safe_repr(v, context, maxlevels, level) + append(f"{krepr}: {vrepr}") + context.remove(objid) + return "{{{}}}".format(", ".join(components)) + + if (issubclass(typ, list) and r is list.__repr__) or ( + issubclass(typ, tuple) and r is tuple.__repr__ + ): + if issubclass(typ, list): + if not object: + return "[]" + format = "[%s]" + elif len(object) == 1: + format = "(%s,)" + else: + if not object: + return "()" + format = "(%s)" + objid = id(object) + if maxlevels and level >= maxlevels: + return format % "..." + if objid in context: + return _recursion(object) + context.add(objid) + components = [] + append = components.append + level += 1 + for o in object: + orepr = self._safe_repr(o, context, maxlevels, level) + append(orepr) + context.remove(objid) + return format % ", ".join(components) + + return repr(object) + + +_builtin_scalars = frozenset( + {str, bytes, bytearray, float, complex, bool, type(None), int} +) + + +def _recursion(object: Any) -> str: + return f"" + + +def _wrap_bytes_repr(object: Any, width: int, allowance: int) -> Iterator[str]: + current = b"" + last = len(object) // 4 * 4 + for i in range(0, len(object), 4): + part = object[i : i + 4] + candidate = current + part + if i == last: + width -= allowance + if len(repr(candidate)) > width: + if current: + yield repr(current) + current = part + else: + current = candidate + if current: + yield repr(current) diff --git a/venv/lib/python3.10/site-packages/_pytest/_io/saferepr.py b/venv/lib/python3.10/site-packages/_pytest/_io/saferepr.py new file mode 100644 index 0000000000000000000000000000000000000000..cee70e332f9802a5963bfed8149ac997e5b30de2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/_io/saferepr.py @@ -0,0 +1,130 @@ +from __future__ import annotations + +import pprint +import reprlib + + +def _try_repr_or_str(obj: object) -> str: + try: + return repr(obj) + except (KeyboardInterrupt, SystemExit): + raise + except BaseException: + return f'{type(obj).__name__}("{obj}")' + + +def _format_repr_exception(exc: BaseException, obj: object) -> str: + try: + exc_info = _try_repr_or_str(exc) + except (KeyboardInterrupt, SystemExit): + raise + except BaseException as inner_exc: + exc_info = f"unpresentable exception ({_try_repr_or_str(inner_exc)})" + return ( + f"<[{exc_info} raised in repr()] {type(obj).__name__} object at 0x{id(obj):x}>" + ) + + +def _ellipsize(s: str, maxsize: int) -> str: + if len(s) > maxsize: + i = max(0, (maxsize - 3) // 2) + j = max(0, maxsize - 3 - i) + return s[:i] + "..." + s[len(s) - j :] + return s + + +class SafeRepr(reprlib.Repr): + """ + repr.Repr that limits the resulting size of repr() and includes + information on exceptions raised during the call. + """ + + def __init__(self, maxsize: int | None, use_ascii: bool = False) -> None: + """ + :param maxsize: + If not None, will truncate the resulting repr to that specific size, using ellipsis + somewhere in the middle to hide the extra text. + If None, will not impose any size limits on the returning repr. + """ + super().__init__() + # ``maxstring`` is used by the superclass, and needs to be an int; using a + # very large number in case maxsize is None, meaning we want to disable + # truncation. + self.maxstring = maxsize if maxsize is not None else 1_000_000_000 + self.maxsize = maxsize + self.use_ascii = use_ascii + + def repr(self, x: object) -> str: + try: + if self.use_ascii: + s = ascii(x) + else: + s = super().repr(x) + except (KeyboardInterrupt, SystemExit): + raise + except BaseException as exc: + s = _format_repr_exception(exc, x) + if self.maxsize is not None: + s = _ellipsize(s, self.maxsize) + return s + + def repr_instance(self, x: object, level: int) -> str: + try: + s = repr(x) + except (KeyboardInterrupt, SystemExit): + raise + except BaseException as exc: + s = _format_repr_exception(exc, x) + if self.maxsize is not None: + s = _ellipsize(s, self.maxsize) + return s + + +def safeformat(obj: object) -> str: + """Return a pretty printed string for the given object. + + Failing __repr__ functions of user instances will be represented + with a short exception info. + """ + try: + return pprint.pformat(obj) + except Exception as exc: + return _format_repr_exception(exc, obj) + + +# Maximum size of overall repr of objects to display during assertion errors. +DEFAULT_REPR_MAX_SIZE = 240 + + +def saferepr( + obj: object, maxsize: int | None = DEFAULT_REPR_MAX_SIZE, use_ascii: bool = False +) -> str: + """Return a size-limited safe repr-string for the given object. + + Failing __repr__ functions of user instances will be represented + with a short exception info and 'saferepr' generally takes + care to never raise exceptions itself. + + This function is a wrapper around the Repr/reprlib functionality of the + stdlib. + """ + return SafeRepr(maxsize, use_ascii).repr(obj) + + +def saferepr_unlimited(obj: object, use_ascii: bool = True) -> str: + """Return an unlimited-size safe repr-string for the given object. + + As with saferepr, failing __repr__ functions of user instances + will be represented with a short exception info. + + This function is a wrapper around simple repr. + + Note: a cleaner solution would be to alter ``saferepr``this way + when maxsize=None, but that might affect some other code. + """ + try: + if use_ascii: + return ascii(obj) + return repr(obj) + except Exception as exc: + return _format_repr_exception(exc, obj) diff --git a/venv/lib/python3.10/site-packages/_pytest/_io/terminalwriter.py b/venv/lib/python3.10/site-packages/_pytest/_io/terminalwriter.py new file mode 100644 index 0000000000000000000000000000000000000000..fd808f8b3b728ad7f89afd05b045a36ea31dc8cb --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/_io/terminalwriter.py @@ -0,0 +1,254 @@ +"""Helper functions for writing to terminals and files.""" + +from __future__ import annotations + +from collections.abc import Sequence +import os +import shutil +import sys +from typing import final +from typing import Literal +from typing import TextIO + +import pygments +from pygments.formatters.terminal import TerminalFormatter +from pygments.lexer import Lexer +from pygments.lexers.diff import DiffLexer +from pygments.lexers.python import PythonLexer + +from ..compat import assert_never +from .wcwidth import wcswidth + + +# This code was initially copied from py 1.8.1, file _io/terminalwriter.py. + + +def get_terminal_width() -> int: + width, _ = shutil.get_terminal_size(fallback=(80, 24)) + + # The Windows get_terminal_size may be bogus, let's sanify a bit. + if width < 40: + width = 80 + + return width + + +def should_do_markup(file: TextIO) -> bool: + if os.environ.get("PY_COLORS") == "1": + return True + if os.environ.get("PY_COLORS") == "0": + return False + if os.environ.get("NO_COLOR"): + return False + if os.environ.get("FORCE_COLOR"): + return True + return ( + hasattr(file, "isatty") and file.isatty() and os.environ.get("TERM") != "dumb" + ) + + +@final +class TerminalWriter: + _esctable = dict( + black=30, + red=31, + green=32, + yellow=33, + blue=34, + purple=35, + cyan=36, + white=37, + Black=40, + Red=41, + Green=42, + Yellow=43, + Blue=44, + Purple=45, + Cyan=46, + White=47, + bold=1, + light=2, + blink=5, + invert=7, + ) + + def __init__(self, file: TextIO | None = None) -> None: + if file is None: + file = sys.stdout + if hasattr(file, "isatty") and file.isatty() and sys.platform == "win32": + try: + import colorama + except ImportError: + pass + else: + file = colorama.AnsiToWin32(file).stream + assert file is not None + self._file = file + self.hasmarkup = should_do_markup(file) + self._current_line = "" + self._terminal_width: int | None = None + self.code_highlight = True + + @property + def fullwidth(self) -> int: + if self._terminal_width is not None: + return self._terminal_width + return get_terminal_width() + + @fullwidth.setter + def fullwidth(self, value: int) -> None: + self._terminal_width = value + + @property + def width_of_current_line(self) -> int: + """Return an estimate of the width so far in the current line.""" + return wcswidth(self._current_line) + + def markup(self, text: str, **markup: bool) -> str: + for name in markup: + if name not in self._esctable: + raise ValueError(f"unknown markup: {name!r}") + if self.hasmarkup: + esc = [self._esctable[name] for name, on in markup.items() if on] + if esc: + text = "".join(f"\x1b[{cod}m" for cod in esc) + text + "\x1b[0m" + return text + + def sep( + self, + sepchar: str, + title: str | None = None, + fullwidth: int | None = None, + **markup: bool, + ) -> None: + if fullwidth is None: + fullwidth = self.fullwidth + # The goal is to have the line be as long as possible + # under the condition that len(line) <= fullwidth. + if sys.platform == "win32": + # If we print in the last column on windows we are on a + # new line but there is no way to verify/neutralize this + # (we may not know the exact line width). + # So let's be defensive to avoid empty lines in the output. + fullwidth -= 1 + if title is not None: + # we want 2 + 2*len(fill) + len(title) <= fullwidth + # i.e. 2 + 2*len(sepchar)*N + len(title) <= fullwidth + # 2*len(sepchar)*N <= fullwidth - len(title) - 2 + # N <= (fullwidth - len(title) - 2) // (2*len(sepchar)) + N = max((fullwidth - len(title) - 2) // (2 * len(sepchar)), 1) + fill = sepchar * N + line = f"{fill} {title} {fill}" + else: + # we want len(sepchar)*N <= fullwidth + # i.e. N <= fullwidth // len(sepchar) + line = sepchar * (fullwidth // len(sepchar)) + # In some situations there is room for an extra sepchar at the right, + # in particular if we consider that with a sepchar like "_ " the + # trailing space is not important at the end of the line. + if len(line) + len(sepchar.rstrip()) <= fullwidth: + line += sepchar.rstrip() + + self.line(line, **markup) + + def write(self, msg: str, *, flush: bool = False, **markup: bool) -> None: + if msg: + current_line = msg.rsplit("\n", 1)[-1] + if "\n" in msg: + self._current_line = current_line + else: + self._current_line += current_line + + msg = self.markup(msg, **markup) + + try: + self._file.write(msg) + except UnicodeEncodeError: + # Some environments don't support printing general Unicode + # strings, due to misconfiguration or otherwise; in that case, + # print the string escaped to ASCII. + # When the Unicode situation improves we should consider + # letting the error propagate instead of masking it (see #7475 + # for one brief attempt). + msg = msg.encode("unicode-escape").decode("ascii") + self._file.write(msg) + + if flush: + self.flush() + + def line(self, s: str = "", **markup: bool) -> None: + self.write(s, **markup) + self.write("\n") + + def flush(self) -> None: + self._file.flush() + + def _write_source(self, lines: Sequence[str], indents: Sequence[str] = ()) -> None: + """Write lines of source code possibly highlighted. + + Keeping this private for now because the API is clunky. We should discuss how + to evolve the terminal writer so we can have more precise color support, for example + being able to write part of a line in one color and the rest in another, and so on. + """ + if indents and len(indents) != len(lines): + raise ValueError( + f"indents size ({len(indents)}) should have same size as lines ({len(lines)})" + ) + if not indents: + indents = [""] * len(lines) + source = "\n".join(lines) + new_lines = self._highlight(source).splitlines() + for indent, new_line in zip(indents, new_lines): + self.line(indent + new_line) + + def _get_pygments_lexer(self, lexer: Literal["python", "diff"]) -> Lexer: + if lexer == "python": + return PythonLexer() + elif lexer == "diff": + return DiffLexer() + else: + assert_never(lexer) + + def _get_pygments_formatter(self) -> TerminalFormatter: + from _pytest.config.exceptions import UsageError + + theme = os.getenv("PYTEST_THEME") + theme_mode = os.getenv("PYTEST_THEME_MODE", "dark") + + try: + return TerminalFormatter(bg=theme_mode, style=theme) + except pygments.util.ClassNotFound as e: + raise UsageError( + f"PYTEST_THEME environment variable has an invalid value: '{theme}'. " + "Hint: See available pygments styles with `pygmentize -L styles`." + ) from e + except pygments.util.OptionError as e: + raise UsageError( + f"PYTEST_THEME_MODE environment variable has an invalid value: '{theme_mode}'. " + "The allowed values are 'dark' (default) and 'light'." + ) from e + + def _highlight( + self, source: str, lexer: Literal["diff", "python"] = "python" + ) -> str: + """Highlight the given source if we have markup support.""" + if not source or not self.hasmarkup or not self.code_highlight: + return source + + pygments_lexer = self._get_pygments_lexer(lexer) + pygments_formatter = self._get_pygments_formatter() + + highlighted: str = pygments.highlight( + source, pygments_lexer, pygments_formatter + ) + # pygments terminal formatter may add a newline when there wasn't one. + # We don't want this, remove. + if highlighted[-1] == "\n" and source[-1] != "\n": + highlighted = highlighted[:-1] + + # Some lexers will not set the initial color explicitly + # which may lead to the previous color being propagated to the + # start of the expression, so reset first. + highlighted = "\x1b[0m" + highlighted + + return highlighted diff --git a/venv/lib/python3.10/site-packages/_pytest/_io/wcwidth.py b/venv/lib/python3.10/site-packages/_pytest/_io/wcwidth.py new file mode 100644 index 0000000000000000000000000000000000000000..23886ff1581a16aa97e5c375e62261622e24c169 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/_io/wcwidth.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +from functools import lru_cache +import unicodedata + + +@lru_cache(100) +def wcwidth(c: str) -> int: + """Determine how many columns are needed to display a character in a terminal. + + Returns -1 if the character is not printable. + Returns 0, 1 or 2 for other characters. + """ + o = ord(c) + + # ASCII fast path. + if 0x20 <= o < 0x07F: + return 1 + + # Some Cf/Zp/Zl characters which should be zero-width. + if ( + o == 0x0000 + or 0x200B <= o <= 0x200F + or 0x2028 <= o <= 0x202E + or 0x2060 <= o <= 0x2063 + ): + return 0 + + category = unicodedata.category(c) + + # Control characters. + if category == "Cc": + return -1 + + # Combining characters with zero width. + if category in ("Me", "Mn"): + return 0 + + # Full/Wide east asian characters. + if unicodedata.east_asian_width(c) in ("F", "W"): + return 2 + + return 1 + + +def wcswidth(s: str) -> int: + """Determine how many columns are needed to display a string in a terminal. + + Returns -1 if the string contains non-printable characters. + """ + width = 0 + for c in unicodedata.normalize("NFC", s): + wc = wcwidth(c) + if wc < 0: + return -1 + width += wc + return width diff --git a/venv/lib/python3.10/site-packages/_pytest/_py/__init__.py b/venv/lib/python3.10/site-packages/_pytest/_py/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.10/site-packages/_pytest/_py/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/_py/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..da33c4d27f4a66e58df77cb144d1d62f607795c6 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/_py/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/_py/__pycache__/error.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/_py/__pycache__/error.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cebc2887434a3d92bd2343e96c0a4c23227b0601 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/_py/__pycache__/error.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/_py/__pycache__/path.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/_py/__pycache__/path.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..655ae94d2dbc95deadcb5e397448cd7c1765e967 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/_py/__pycache__/path.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/_py/error.py b/venv/lib/python3.10/site-packages/_pytest/_py/error.py new file mode 100644 index 0000000000000000000000000000000000000000..dace23764ffb4da9744cf23b668c9e7011674c67 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/_py/error.py @@ -0,0 +1,119 @@ +"""create errno-specific classes for IO or os calls.""" + +from __future__ import annotations + +from collections.abc import Callable +import errno +import os +import sys +from typing import TYPE_CHECKING +from typing import TypeVar + + +if TYPE_CHECKING: + from typing_extensions import ParamSpec + + P = ParamSpec("P") + +R = TypeVar("R") + + +class Error(EnvironmentError): + def __repr__(self) -> str: + return "{}.{} {!r}: {} ".format( + self.__class__.__module__, + self.__class__.__name__, + self.__class__.__doc__, + " ".join(map(str, self.args)), + # repr(self.args) + ) + + def __str__(self) -> str: + s = "[{}]: {}".format( + self.__class__.__doc__, + " ".join(map(str, self.args)), + ) + return s + + +_winerrnomap = { + 2: errno.ENOENT, + 3: errno.ENOENT, + 17: errno.EEXIST, + 18: errno.EXDEV, + 13: errno.EBUSY, # empty cd drive, but ENOMEDIUM seems unavailable + 22: errno.ENOTDIR, + 20: errno.ENOTDIR, + 267: errno.ENOTDIR, + 5: errno.EACCES, # anything better? +} + + +class ErrorMaker: + """lazily provides Exception classes for each possible POSIX errno + (as defined per the 'errno' module). All such instances + subclass EnvironmentError. + """ + + _errno2class: dict[int, type[Error]] = {} + + def __getattr__(self, name: str) -> type[Error]: + if name[0] == "_": + raise AttributeError(name) + eno = getattr(errno, name) + cls = self._geterrnoclass(eno) + setattr(self, name, cls) + return cls + + def _geterrnoclass(self, eno: int) -> type[Error]: + try: + return self._errno2class[eno] + except KeyError: + clsname = errno.errorcode.get(eno, f"UnknownErrno{eno}") + errorcls = type( + clsname, + (Error,), + {"__module__": "py.error", "__doc__": os.strerror(eno)}, + ) + self._errno2class[eno] = errorcls + return errorcls + + def checked_call( + self, func: Callable[P, R], *args: P.args, **kwargs: P.kwargs + ) -> R: + """Call a function and raise an errno-exception if applicable.""" + __tracebackhide__ = True + try: + return func(*args, **kwargs) + except Error: + raise + except OSError as value: + if not hasattr(value, "errno"): + raise + if sys.platform == "win32": + try: + # error: Invalid index type "Optional[int]" for "dict[int, int]"; expected type "int" [index] + # OK to ignore because we catch the KeyError below. + cls = self._geterrnoclass(_winerrnomap[value.errno]) # type:ignore[index] + except KeyError: + raise value + else: + # we are not on Windows, or we got a proper OSError + if value.errno is None: + cls = type( + "UnknownErrnoNone", + (Error,), + {"__module__": "py.error", "__doc__": None}, + ) + else: + cls = self._geterrnoclass(value.errno) + + raise cls(f"{func.__name__}{args!r}") + + +_error_maker = ErrorMaker() +checked_call = _error_maker.checked_call + + +def __getattr__(attr: str) -> type[Error]: + return getattr(_error_maker, attr) # type: ignore[no-any-return] diff --git a/venv/lib/python3.10/site-packages/_pytest/_py/path.py b/venv/lib/python3.10/site-packages/_pytest/_py/path.py new file mode 100644 index 0000000000000000000000000000000000000000..e353c1a9b5217c9a54f3e661f40845107f18ef74 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/_py/path.py @@ -0,0 +1,1475 @@ +# mypy: allow-untyped-defs +"""local path implementation.""" + +from __future__ import annotations + +import atexit +from collections.abc import Callable +from contextlib import contextmanager +import fnmatch +import importlib.util +import io +import os +from os.path import abspath +from os.path import dirname +from os.path import exists +from os.path import isabs +from os.path import isdir +from os.path import isfile +from os.path import islink +from os.path import normpath +import posixpath +from stat import S_ISDIR +from stat import S_ISLNK +from stat import S_ISREG +import sys +from typing import Any +from typing import cast +from typing import Literal +from typing import overload +from typing import TYPE_CHECKING +import uuid +import warnings + +from . import error + + +# Moved from local.py. +iswin32 = sys.platform == "win32" or (getattr(os, "_name", False) == "nt") + + +class Checkers: + _depend_on_existence = "exists", "link", "dir", "file" + + def __init__(self, path): + self.path = path + + def dotfile(self): + return self.path.basename.startswith(".") + + def ext(self, arg): + if not arg.startswith("."): + arg = "." + arg + return self.path.ext == arg + + def basename(self, arg): + return self.path.basename == arg + + def basestarts(self, arg): + return self.path.basename.startswith(arg) + + def relto(self, arg): + return self.path.relto(arg) + + def fnmatch(self, arg): + return self.path.fnmatch(arg) + + def endswith(self, arg): + return str(self.path).endswith(arg) + + def _evaluate(self, kw): + from .._code.source import getrawcode + + for name, value in kw.items(): + invert = False + meth = None + try: + meth = getattr(self, name) + except AttributeError: + if name[:3] == "not": + invert = True + try: + meth = getattr(self, name[3:]) + except AttributeError: + pass + if meth is None: + raise TypeError(f"no {name!r} checker available for {self.path!r}") + try: + if getrawcode(meth).co_argcount > 1: + if (not meth(value)) ^ invert: + return False + else: + if bool(value) ^ bool(meth()) ^ invert: + return False + except (error.ENOENT, error.ENOTDIR, error.EBUSY): + # EBUSY feels not entirely correct, + # but its kind of necessary since ENOMEDIUM + # is not accessible in python + for name in self._depend_on_existence: + if name in kw: + if kw.get(name): + return False + name = "not" + name + if name in kw: + if not kw.get(name): + return False + return True + + _statcache: Stat + + def _stat(self) -> Stat: + try: + return self._statcache + except AttributeError: + try: + self._statcache = self.path.stat() + except error.ELOOP: + self._statcache = self.path.lstat() + return self._statcache + + def dir(self): + return S_ISDIR(self._stat().mode) + + def file(self): + return S_ISREG(self._stat().mode) + + def exists(self): + return self._stat() + + def link(self): + st = self.path.lstat() + return S_ISLNK(st.mode) + + +class NeverRaised(Exception): + pass + + +class Visitor: + def __init__(self, fil, rec, ignore, bf, sort): + if isinstance(fil, str): + fil = FNMatcher(fil) + if isinstance(rec, str): + self.rec: Callable[[LocalPath], bool] = FNMatcher(rec) + elif not hasattr(rec, "__call__") and rec: + self.rec = lambda path: True + else: + self.rec = rec + self.fil = fil + self.ignore = ignore + self.breadthfirst = bf + self.optsort = cast(Callable[[Any], Any], sorted) if sort else (lambda x: x) + + def gen(self, path): + try: + entries = path.listdir() + except self.ignore: + return + rec = self.rec + dirs = self.optsort( + [p for p in entries if p.check(dir=1) and (rec is None or rec(p))] + ) + if not self.breadthfirst: + for subdir in dirs: + yield from self.gen(subdir) + for p in self.optsort(entries): + if self.fil is None or self.fil(p): + yield p + if self.breadthfirst: + for subdir in dirs: + yield from self.gen(subdir) + + +class FNMatcher: + def __init__(self, pattern): + self.pattern = pattern + + def __call__(self, path): + pattern = self.pattern + + if ( + pattern.find(path.sep) == -1 + and iswin32 + and pattern.find(posixpath.sep) != -1 + ): + # Running on Windows, the pattern has no Windows path separators, + # and the pattern has one or more Posix path separators. Replace + # the Posix path separators with the Windows path separator. + pattern = pattern.replace(posixpath.sep, path.sep) + + if pattern.find(path.sep) == -1: + name = path.basename + else: + name = str(path) # path.strpath # XXX svn? + if not os.path.isabs(pattern): + pattern = "*" + path.sep + pattern + return fnmatch.fnmatch(name, pattern) + + +def map_as_list(func, iter): + return list(map(func, iter)) + + +class Stat: + if TYPE_CHECKING: + + @property + def size(self) -> int: ... + + @property + def mtime(self) -> float: ... + + def __getattr__(self, name: str) -> Any: + return getattr(self._osstatresult, "st_" + name) + + def __init__(self, path, osstatresult): + self.path = path + self._osstatresult = osstatresult + + @property + def owner(self): + if iswin32: + raise NotImplementedError("XXX win32") + import pwd + + entry = error.checked_call(pwd.getpwuid, self.uid) # type:ignore[attr-defined,unused-ignore] + return entry[0] + + @property + def group(self): + """Return group name of file.""" + if iswin32: + raise NotImplementedError("XXX win32") + import grp + + entry = error.checked_call(grp.getgrgid, self.gid) # type:ignore[attr-defined,unused-ignore] + return entry[0] + + def isdir(self): + return S_ISDIR(self._osstatresult.st_mode) + + def isfile(self): + return S_ISREG(self._osstatresult.st_mode) + + def islink(self): + self.path.lstat() + return S_ISLNK(self._osstatresult.st_mode) + + +def getuserid(user): + import pwd + + if not isinstance(user, int): + user = pwd.getpwnam(user)[2] # type:ignore[attr-defined,unused-ignore] + return user + + +def getgroupid(group): + import grp + + if not isinstance(group, int): + group = grp.getgrnam(group)[2] # type:ignore[attr-defined,unused-ignore] + return group + + +class LocalPath: + """Object oriented interface to os.path and other local filesystem + related information. + """ + + class ImportMismatchError(ImportError): + """raised on pyimport() if there is a mismatch of __file__'s""" + + sep = os.sep + + def __init__(self, path=None, expanduser=False): + """Initialize and return a local Path instance. + + Path can be relative to the current directory. + If path is None it defaults to the current working directory. + If expanduser is True, tilde-expansion is performed. + Note that Path instances always carry an absolute path. + Note also that passing in a local path object will simply return + the exact same path object. Use new() to get a new copy. + """ + if path is None: + self.strpath = error.checked_call(os.getcwd) + else: + try: + path = os.fspath(path) + except TypeError: + raise ValueError( + "can only pass None, Path instances " + "or non-empty strings to LocalPath" + ) + if expanduser: + path = os.path.expanduser(path) + self.strpath = abspath(path) + + if sys.platform != "win32": + + def chown(self, user, group, rec=0): + """Change ownership to the given user and group. + user and group may be specified by a number or + by a name. if rec is True change ownership + recursively. + """ + uid = getuserid(user) + gid = getgroupid(group) + if rec: + for x in self.visit(rec=lambda x: x.check(link=0)): + if x.check(link=0): + error.checked_call(os.chown, str(x), uid, gid) + error.checked_call(os.chown, str(self), uid, gid) + + def readlink(self) -> str: + """Return value of a symbolic link.""" + # https://github.com/python/mypy/issues/12278 + return error.checked_call(os.readlink, self.strpath) # type: ignore[arg-type,return-value,unused-ignore] + + def mklinkto(self, oldname): + """Posix style hard link to another name.""" + error.checked_call(os.link, str(oldname), str(self)) + + def mksymlinkto(self, value, absolute=1): + """Create a symbolic link with the given value (pointing to another name).""" + if absolute: + error.checked_call(os.symlink, str(value), self.strpath) + else: + base = self.common(value) + # with posix local paths '/' is always a common base + relsource = self.__class__(value).relto(base) + reldest = self.relto(base) + n = reldest.count(self.sep) + target = self.sep.join(("..",) * n + (relsource,)) + error.checked_call(os.symlink, target, self.strpath) + + def __div__(self, other): + return self.join(os.fspath(other)) + + __truediv__ = __div__ # py3k + + @property + def basename(self): + """Basename part of path.""" + return self._getbyspec("basename")[0] + + @property + def dirname(self): + """Dirname part of path.""" + return self._getbyspec("dirname")[0] + + @property + def purebasename(self): + """Pure base name of the path.""" + return self._getbyspec("purebasename")[0] + + @property + def ext(self): + """Extension of the path (including the '.').""" + return self._getbyspec("ext")[0] + + def read_binary(self): + """Read and return a bytestring from reading the path.""" + with self.open("rb") as f: + return f.read() + + def read_text(self, encoding): + """Read and return a Unicode string from reading the path.""" + with self.open("r", encoding=encoding) as f: + return f.read() + + def read(self, mode="r"): + """Read and return a bytestring from reading the path.""" + with self.open(mode) as f: + return f.read() + + def readlines(self, cr=1): + """Read and return a list of lines from the path. if cr is False, the + newline will be removed from the end of each line.""" + mode = "r" + + if not cr: + content = self.read(mode) + return content.split("\n") + else: + f = self.open(mode) + try: + return f.readlines() + finally: + f.close() + + def load(self): + """(deprecated) return object unpickled from self.read()""" + f = self.open("rb") + try: + import pickle + + return error.checked_call(pickle.load, f) + finally: + f.close() + + def move(self, target): + """Move this path to target.""" + if target.relto(self): + raise error.EINVAL(target, "cannot move path into a subdirectory of itself") + try: + self.rename(target) + except error.EXDEV: # invalid cross-device link + self.copy(target) + self.remove() + + def fnmatch(self, pattern): + """Return true if the basename/fullname matches the glob-'pattern'. + + valid pattern characters:: + + * matches everything + ? matches any single character + [seq] matches any character in seq + [!seq] matches any char not in seq + + If the pattern contains a path-separator then the full path + is used for pattern matching and a '*' is prepended to the + pattern. + + if the pattern doesn't contain a path-separator the pattern + is only matched against the basename. + """ + return FNMatcher(pattern)(self) + + def relto(self, relpath): + """Return a string which is the relative part of the path + to the given 'relpath'. + """ + if not isinstance(relpath, (str, LocalPath)): + raise TypeError(f"{relpath!r}: not a string or path object") + strrelpath = str(relpath) + if strrelpath and strrelpath[-1] != self.sep: + strrelpath += self.sep + # assert strrelpath[-1] == self.sep + # assert strrelpath[-2] != self.sep + strself = self.strpath + if sys.platform == "win32" or getattr(os, "_name", None) == "nt": + if os.path.normcase(strself).startswith(os.path.normcase(strrelpath)): + return strself[len(strrelpath) :] + elif strself.startswith(strrelpath): + return strself[len(strrelpath) :] + return "" + + def ensure_dir(self, *args): + """Ensure the path joined with args is a directory.""" + return self.ensure(*args, dir=True) + + def bestrelpath(self, dest): + """Return a string which is a relative path from self + (assumed to be a directory) to dest such that + self.join(bestrelpath) == dest and if not such + path can be determined return dest. + """ + try: + if self == dest: + return os.curdir + base = self.common(dest) + if not base: # can be the case on windows + return str(dest) + self2base = self.relto(base) + reldest = dest.relto(base) + if self2base: + n = self2base.count(self.sep) + 1 + else: + n = 0 + lst = [os.pardir] * n + if reldest: + lst.append(reldest) + target = dest.sep.join(lst) + return target + except AttributeError: + return str(dest) + + def exists(self): + return self.check() + + def isdir(self): + return self.check(dir=1) + + def isfile(self): + return self.check(file=1) + + def parts(self, reverse=False): + """Return a root-first list of all ancestor directories + plus the path itself. + """ + current = self + lst = [self] + while 1: + last = current + current = current.dirpath() + if last == current: + break + lst.append(current) + if not reverse: + lst.reverse() + return lst + + def common(self, other): + """Return the common part shared with the other path + or None if there is no common part. + """ + last = None + for x, y in zip(self.parts(), other.parts()): + if x != y: + return last + last = x + return last + + def __add__(self, other): + """Return new path object with 'other' added to the basename""" + return self.new(basename=self.basename + str(other)) + + def visit(self, fil=None, rec=None, ignore=NeverRaised, bf=False, sort=False): + """Yields all paths below the current one + + fil is a filter (glob pattern or callable), if not matching the + path will not be yielded, defaulting to None (everything is + returned) + + rec is a filter (glob pattern or callable) that controls whether + a node is descended, defaulting to None + + ignore is an Exception class that is ignoredwhen calling dirlist() + on any of the paths (by default, all exceptions are reported) + + bf if True will cause a breadthfirst search instead of the + default depthfirst. Default: False + + sort if True will sort entries within each directory level. + """ + yield from Visitor(fil, rec, ignore, bf, sort).gen(self) + + def _sortlist(self, res, sort): + if sort: + if hasattr(sort, "__call__"): + warnings.warn( + DeprecationWarning( + "listdir(sort=callable) is deprecated and breaks on python3" + ), + stacklevel=3, + ) + res.sort(sort) + else: + res.sort() + + def __fspath__(self): + return self.strpath + + def __hash__(self): + s = self.strpath + if iswin32: + s = s.lower() + return hash(s) + + def __eq__(self, other): + s1 = os.fspath(self) + try: + s2 = os.fspath(other) + except TypeError: + return False + if iswin32: + s1 = s1.lower() + try: + s2 = s2.lower() + except AttributeError: + return False + return s1 == s2 + + def __ne__(self, other): + return not (self == other) + + def __lt__(self, other): + return os.fspath(self) < os.fspath(other) + + def __gt__(self, other): + return os.fspath(self) > os.fspath(other) + + def samefile(self, other): + """Return True if 'other' references the same file as 'self'.""" + other = os.fspath(other) + if not isabs(other): + other = abspath(other) + if self == other: + return True + if not hasattr(os.path, "samefile"): + return False + return error.checked_call(os.path.samefile, self.strpath, other) + + def remove(self, rec=1, ignore_errors=False): + """Remove a file or directory (or a directory tree if rec=1). + if ignore_errors is True, errors while removing directories will + be ignored. + """ + if self.check(dir=1, link=0): + if rec: + # force remove of readonly files on windows + if iswin32: + self.chmod(0o700, rec=1) + import shutil + + error.checked_call( + shutil.rmtree, self.strpath, ignore_errors=ignore_errors + ) + else: + error.checked_call(os.rmdir, self.strpath) + else: + if iswin32: + self.chmod(0o700) + error.checked_call(os.remove, self.strpath) + + def computehash(self, hashtype="md5", chunksize=524288): + """Return hexdigest of hashvalue for this file.""" + try: + try: + import hashlib as mod + except ImportError: + if hashtype == "sha1": + hashtype = "sha" + mod = __import__(hashtype) + hash = getattr(mod, hashtype)() + except (AttributeError, ImportError): + raise ValueError(f"Don't know how to compute {hashtype!r} hash") + f = self.open("rb") + try: + while 1: + buf = f.read(chunksize) + if not buf: + return hash.hexdigest() + hash.update(buf) + finally: + f.close() + + def new(self, **kw): + """Create a modified version of this path. + the following keyword arguments modify various path parts:: + + a:/some/path/to/a/file.ext + xx drive + xxxxxxxxxxxxxxxxx dirname + xxxxxxxx basename + xxxx purebasename + xxx ext + """ + obj = object.__new__(self.__class__) + if not kw: + obj.strpath = self.strpath + return obj + drive, dirname, basename, purebasename, ext = self._getbyspec( + "drive,dirname,basename,purebasename,ext" + ) + if "basename" in kw: + if "purebasename" in kw or "ext" in kw: + raise ValueError(f"invalid specification {kw!r}") + else: + pb = kw.setdefault("purebasename", purebasename) + try: + ext = kw["ext"] + except KeyError: + pass + else: + if ext and not ext.startswith("."): + ext = "." + ext + kw["basename"] = pb + ext + + if "dirname" in kw and not kw["dirname"]: + kw["dirname"] = drive + else: + kw.setdefault("dirname", dirname) + kw.setdefault("sep", self.sep) + obj.strpath = normpath("{dirname}{sep}{basename}".format(**kw)) + return obj + + def _getbyspec(self, spec: str) -> list[str]: + """See new for what 'spec' can be.""" + res = [] + parts = self.strpath.split(self.sep) + + args = filter(None, spec.split(",")) + for name in args: + if name == "drive": + res.append(parts[0]) + elif name == "dirname": + res.append(self.sep.join(parts[:-1])) + else: + basename = parts[-1] + if name == "basename": + res.append(basename) + else: + i = basename.rfind(".") + if i == -1: + purebasename, ext = basename, "" + else: + purebasename, ext = basename[:i], basename[i:] + if name == "purebasename": + res.append(purebasename) + elif name == "ext": + res.append(ext) + else: + raise ValueError(f"invalid part specification {name!r}") + return res + + def dirpath(self, *args, **kwargs): + """Return the directory path joined with any given path arguments.""" + if not kwargs: + path = object.__new__(self.__class__) + path.strpath = dirname(self.strpath) + if args: + path = path.join(*args) + return path + return self.new(basename="").join(*args, **kwargs) + + def join(self, *args: os.PathLike[str], abs: bool = False) -> LocalPath: + """Return a new path by appending all 'args' as path + components. if abs=1 is used restart from root if any + of the args is an absolute path. + """ + sep = self.sep + strargs = [os.fspath(arg) for arg in args] + strpath = self.strpath + if abs: + newargs: list[str] = [] + for arg in reversed(strargs): + if isabs(arg): + strpath = arg + strargs = newargs + break + newargs.insert(0, arg) + # special case for when we have e.g. strpath == "/" + actual_sep = "" if strpath.endswith(sep) else sep + for arg in strargs: + arg = arg.strip(sep) + if iswin32: + # allow unix style paths even on windows. + arg = arg.strip("/") + arg = arg.replace("/", sep) + strpath = strpath + actual_sep + arg + actual_sep = sep + obj = object.__new__(self.__class__) + obj.strpath = normpath(strpath) + return obj + + def open(self, mode="r", ensure=False, encoding=None): + """Return an opened file with the given mode. + + If ensure is True, create parent directories if needed. + """ + if ensure: + self.dirpath().ensure(dir=1) + if encoding: + return error.checked_call( + io.open, + self.strpath, + mode, + encoding=encoding, + ) + return error.checked_call(open, self.strpath, mode) + + def _fastjoin(self, name): + child = object.__new__(self.__class__) + child.strpath = self.strpath + self.sep + name + return child + + def islink(self): + return islink(self.strpath) + + def check(self, **kw): + """Check a path for existence and properties. + + Without arguments, return True if the path exists, otherwise False. + + valid checkers:: + + file = 1 # is a file + file = 0 # is not a file (may not even exist) + dir = 1 # is a dir + link = 1 # is a link + exists = 1 # exists + + You can specify multiple checker definitions, for example:: + + path.check(file=1, link=1) # a link pointing to a file + """ + if not kw: + return exists(self.strpath) + if len(kw) == 1: + if "dir" in kw: + return not kw["dir"] ^ isdir(self.strpath) + if "file" in kw: + return not kw["file"] ^ isfile(self.strpath) + if not kw: + kw = {"exists": 1} + return Checkers(self)._evaluate(kw) + + _patternchars = set("*?[" + os.sep) + + def listdir(self, fil=None, sort=None): + """List directory contents, possibly filter by the given fil func + and possibly sorted. + """ + if fil is None and sort is None: + names = error.checked_call(os.listdir, self.strpath) + return map_as_list(self._fastjoin, names) + if isinstance(fil, str): + if not self._patternchars.intersection(fil): + child = self._fastjoin(fil) + if exists(child.strpath): + return [child] + return [] + fil = FNMatcher(fil) + names = error.checked_call(os.listdir, self.strpath) + res = [] + for name in names: + child = self._fastjoin(name) + if fil is None or fil(child): + res.append(child) + self._sortlist(res, sort) + return res + + def size(self) -> int: + """Return size of the underlying file object""" + return self.stat().size + + def mtime(self) -> float: + """Return last modification time of the path.""" + return self.stat().mtime + + def copy(self, target, mode=False, stat=False): + """Copy path to target. + + If mode is True, will copy permission from path to target. + If stat is True, copy permission, last modification + time, last access time, and flags from path to target. + """ + if self.check(file=1): + if target.check(dir=1): + target = target.join(self.basename) + assert self != target + copychunked(self, target) + if mode: + copymode(self.strpath, target.strpath) + if stat: + copystat(self, target) + else: + + def rec(p): + return p.check(link=0) + + for x in self.visit(rec=rec): + relpath = x.relto(self) + newx = target.join(relpath) + newx.dirpath().ensure(dir=1) + if x.check(link=1): + newx.mksymlinkto(x.readlink()) + continue + elif x.check(file=1): + copychunked(x, newx) + elif x.check(dir=1): + newx.ensure(dir=1) + if mode: + copymode(x.strpath, newx.strpath) + if stat: + copystat(x, newx) + + def rename(self, target): + """Rename this path to target.""" + target = os.fspath(target) + return error.checked_call(os.rename, self.strpath, target) + + def dump(self, obj, bin=1): + """Pickle object into path location""" + f = self.open("wb") + import pickle + + try: + error.checked_call(pickle.dump, obj, f, bin) + finally: + f.close() + + def mkdir(self, *args): + """Create & return the directory joined with args.""" + p = self.join(*args) + error.checked_call(os.mkdir, os.fspath(p)) + return p + + def write_binary(self, data, ensure=False): + """Write binary data into path. If ensure is True create + missing parent directories. + """ + if ensure: + self.dirpath().ensure(dir=1) + with self.open("wb") as f: + f.write(data) + + def write_text(self, data, encoding, ensure=False): + """Write text data into path using the specified encoding. + If ensure is True create missing parent directories. + """ + if ensure: + self.dirpath().ensure(dir=1) + with self.open("w", encoding=encoding) as f: + f.write(data) + + def write(self, data, mode="w", ensure=False): + """Write data into path. If ensure is True create + missing parent directories. + """ + if ensure: + self.dirpath().ensure(dir=1) + if "b" in mode: + if not isinstance(data, bytes): + raise ValueError("can only process bytes") + else: + if not isinstance(data, str): + if not isinstance(data, bytes): + data = str(data) + else: + data = data.decode(sys.getdefaultencoding()) + f = self.open(mode) + try: + f.write(data) + finally: + f.close() + + def _ensuredirs(self): + parent = self.dirpath() + if parent == self: + return self + if parent.check(dir=0): + parent._ensuredirs() + if self.check(dir=0): + try: + self.mkdir() + except error.EEXIST: + # race condition: file/dir created by another thread/process. + # complain if it is not a dir + if self.check(dir=0): + raise + return self + + def ensure(self, *args, **kwargs): + """Ensure that an args-joined path exists (by default as + a file). if you specify a keyword argument 'dir=True' + then the path is forced to be a directory path. + """ + p = self.join(*args) + if kwargs.get("dir", 0): + return p._ensuredirs() + else: + p.dirpath()._ensuredirs() + if not p.check(file=1): + p.open("wb").close() + return p + + @overload + def stat(self, raising: Literal[True] = ...) -> Stat: ... + + @overload + def stat(self, raising: Literal[False]) -> Stat | None: ... + + def stat(self, raising: bool = True) -> Stat | None: + """Return an os.stat() tuple.""" + if raising: + return Stat(self, error.checked_call(os.stat, self.strpath)) + try: + return Stat(self, os.stat(self.strpath)) + except KeyboardInterrupt: + raise + except Exception: + return None + + def lstat(self) -> Stat: + """Return an os.lstat() tuple.""" + return Stat(self, error.checked_call(os.lstat, self.strpath)) + + def setmtime(self, mtime=None): + """Set modification time for the given path. if 'mtime' is None + (the default) then the file's mtime is set to current time. + + Note that the resolution for 'mtime' is platform dependent. + """ + if mtime is None: + return error.checked_call(os.utime, self.strpath, mtime) + try: + return error.checked_call(os.utime, self.strpath, (-1, mtime)) + except error.EINVAL: + return error.checked_call(os.utime, self.strpath, (self.atime(), mtime)) + + def chdir(self): + """Change directory to self and return old current directory""" + try: + old = self.__class__() + except error.ENOENT: + old = None + error.checked_call(os.chdir, self.strpath) + return old + + @contextmanager + def as_cwd(self): + """ + Return a context manager, which changes to the path's dir during the + managed "with" context. + On __enter__ it returns the old dir, which might be ``None``. + """ + old = self.chdir() + try: + yield old + finally: + if old is not None: + old.chdir() + + def realpath(self): + """Return a new path which contains no symbolic links.""" + return self.__class__(os.path.realpath(self.strpath)) + + def atime(self): + """Return last access time of the path.""" + return self.stat().atime + + def __repr__(self): + return f"local({self.strpath!r})" + + def __str__(self): + """Return string representation of the Path.""" + return self.strpath + + def chmod(self, mode, rec=0): + """Change permissions to the given mode. If mode is an + integer it directly encodes the os-specific modes. + if rec is True perform recursively. + """ + if not isinstance(mode, int): + raise TypeError(f"mode {mode!r} must be an integer") + if rec: + for x in self.visit(rec=rec): + error.checked_call(os.chmod, str(x), mode) + error.checked_call(os.chmod, self.strpath, mode) + + def pypkgpath(self): + """Return the Python package path by looking for the last + directory upwards which still contains an __init__.py. + Return None if a pkgpath cannot be determined. + """ + pkgpath = None + for parent in self.parts(reverse=True): + if parent.isdir(): + if not parent.join("__init__.py").exists(): + break + if not isimportable(parent.basename): + break + pkgpath = parent + return pkgpath + + def _ensuresyspath(self, ensuremode, path): + if ensuremode: + s = str(path) + if ensuremode == "append": + if s not in sys.path: + sys.path.append(s) + else: + if s != sys.path[0]: + sys.path.insert(0, s) + + def pyimport(self, modname=None, ensuresyspath=True): + """Return path as an imported python module. + + If modname is None, look for the containing package + and construct an according module name. + The module will be put/looked up in sys.modules. + if ensuresyspath is True then the root dir for importing + the file (taking __init__.py files into account) will + be prepended to sys.path if it isn't there already. + If ensuresyspath=="append" the root dir will be appended + if it isn't already contained in sys.path. + if ensuresyspath is False no modification of syspath happens. + + Special value of ensuresyspath=="importlib" is intended + purely for using in pytest, it is capable only of importing + separate .py files outside packages, e.g. for test suite + without any __init__.py file. It effectively allows having + same-named test modules in different places and offers + mild opt-in via this option. Note that it works only in + recent versions of python. + """ + if not self.check(): + raise error.ENOENT(self) + + if ensuresyspath == "importlib": + if modname is None: + modname = self.purebasename + spec = importlib.util.spec_from_file_location(modname, str(self)) + if spec is None or spec.loader is None: + raise ImportError(f"Can't find module {modname} at location {self!s}") + mod = importlib.util.module_from_spec(spec) + spec.loader.exec_module(mod) + return mod + + pkgpath = None + if modname is None: + pkgpath = self.pypkgpath() + if pkgpath is not None: + pkgroot = pkgpath.dirpath() + names = self.new(ext="").relto(pkgroot).split(self.sep) + if names[-1] == "__init__": + names.pop() + modname = ".".join(names) + else: + pkgroot = self.dirpath() + modname = self.purebasename + + self._ensuresyspath(ensuresyspath, pkgroot) + __import__(modname) + mod = sys.modules[modname] + if self.basename == "__init__.py": + return mod # we don't check anything as we might + # be in a namespace package ... too icky to check + modfile = mod.__file__ + assert modfile is not None + if modfile[-4:] in (".pyc", ".pyo"): + modfile = modfile[:-1] + elif modfile.endswith("$py.class"): + modfile = modfile[:-9] + ".py" + if modfile.endswith(os.sep + "__init__.py"): + if self.basename != "__init__.py": + modfile = modfile[:-12] + try: + issame = self.samefile(modfile) + except error.ENOENT: + issame = False + if not issame: + ignore = os.getenv("PY_IGNORE_IMPORTMISMATCH") + if ignore != "1": + raise self.ImportMismatchError(modname, modfile, self) + return mod + else: + try: + return sys.modules[modname] + except KeyError: + # we have a custom modname, do a pseudo-import + import types + + mod = types.ModuleType(modname) + mod.__file__ = str(self) + sys.modules[modname] = mod + try: + with open(str(self), "rb") as f: + exec(f.read(), mod.__dict__) + except BaseException: + del sys.modules[modname] + raise + return mod + + def sysexec(self, *argv: os.PathLike[str], **popen_opts: Any) -> str: + """Return stdout text from executing a system child process, + where the 'self' path points to executable. + The process is directly invoked and not through a system shell. + """ + from subprocess import PIPE + from subprocess import Popen + + popen_opts.pop("stdout", None) + popen_opts.pop("stderr", None) + proc = Popen( + [str(self)] + [str(arg) for arg in argv], + **popen_opts, + stdout=PIPE, + stderr=PIPE, + ) + stdout: str | bytes + stdout, stderr = proc.communicate() + ret = proc.wait() + if isinstance(stdout, bytes): + stdout = stdout.decode(sys.getdefaultencoding()) + if ret != 0: + if isinstance(stderr, bytes): + stderr = stderr.decode(sys.getdefaultencoding()) + raise RuntimeError( + ret, + ret, + str(self), + stdout, + stderr, + ) + return stdout + + @classmethod + def sysfind(cls, name, checker=None, paths=None): + """Return a path object found by looking at the systems + underlying PATH specification. If the checker is not None + it will be invoked to filter matching paths. If a binary + cannot be found, None is returned + Note: This is probably not working on plain win32 systems + but may work on cygwin. + """ + if isabs(name): + p = local(name) + if p.check(file=1): + return p + else: + if paths is None: + if iswin32: + paths = os.environ["Path"].split(";") + if "" not in paths and "." not in paths: + paths.append(".") + try: + systemroot = os.environ["SYSTEMROOT"] + except KeyError: + pass + else: + paths = [ + path.replace("%SystemRoot%", systemroot) for path in paths + ] + else: + paths = os.environ["PATH"].split(":") + tryadd = [] + if iswin32: + tryadd += os.environ["PATHEXT"].split(os.pathsep) + tryadd.append("") + + for x in paths: + for addext in tryadd: + p = local(x).join(name, abs=True) + addext + try: + if p.check(file=1): + if checker: + if not checker(p): + continue + return p + except error.EACCES: + pass + return None + + @classmethod + def _gethomedir(cls): + try: + x = os.environ["HOME"] + except KeyError: + try: + x = os.environ["HOMEDRIVE"] + os.environ["HOMEPATH"] + except KeyError: + return None + return cls(x) + + # """ + # special class constructors for local filesystem paths + # """ + @classmethod + def get_temproot(cls): + """Return the system's temporary directory + (where tempfiles are usually created in) + """ + import tempfile + + return local(tempfile.gettempdir()) + + @classmethod + def mkdtemp(cls, rootdir=None): + """Return a Path object pointing to a fresh new temporary directory + (which we created ourselves). + """ + import tempfile + + if rootdir is None: + rootdir = cls.get_temproot() + path = error.checked_call(tempfile.mkdtemp, dir=str(rootdir)) + return cls(path) + + @classmethod + def make_numbered_dir( + cls, prefix="session-", rootdir=None, keep=3, lock_timeout=172800 + ): # two days + """Return unique directory with a number greater than the current + maximum one. The number is assumed to start directly after prefix. + if keep is true directories with a number less than (maxnum-keep) + will be removed. If .lock files are used (lock_timeout non-zero), + algorithm is multi-process safe. + """ + if rootdir is None: + rootdir = cls.get_temproot() + + nprefix = prefix.lower() + + def parse_num(path): + """Parse the number out of a path (if it matches the prefix)""" + nbasename = path.basename.lower() + if nbasename.startswith(nprefix): + try: + return int(nbasename[len(nprefix) :]) + except ValueError: + pass + + def create_lockfile(path): + """Exclusively create lockfile. Throws when failed""" + mypid = os.getpid() + lockfile = path.join(".lock") + if hasattr(lockfile, "mksymlinkto"): + lockfile.mksymlinkto(str(mypid)) + else: + fd = error.checked_call( + os.open, str(lockfile), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o644 + ) + with os.fdopen(fd, "w") as f: + f.write(str(mypid)) + return lockfile + + def atexit_remove_lockfile(lockfile): + """Ensure lockfile is removed at process exit""" + mypid = os.getpid() + + def try_remove_lockfile(): + # in a fork() situation, only the last process should + # remove the .lock, otherwise the other processes run the + # risk of seeing their temporary dir disappear. For now + # we remove the .lock in the parent only (i.e. we assume + # that the children finish before the parent). + if os.getpid() != mypid: + return + try: + lockfile.remove() + except error.Error: + pass + + atexit.register(try_remove_lockfile) + + # compute the maximum number currently in use with the prefix + lastmax = None + while True: + maxnum = -1 + for path in rootdir.listdir(): + num = parse_num(path) + if num is not None: + maxnum = max(maxnum, num) + + # make the new directory + try: + udir = rootdir.mkdir(prefix + str(maxnum + 1)) + if lock_timeout: + lockfile = create_lockfile(udir) + atexit_remove_lockfile(lockfile) + except (error.EEXIST, error.ENOENT, error.EBUSY): + # race condition (1): another thread/process created the dir + # in the meantime - try again + # race condition (2): another thread/process spuriously acquired + # lock treating empty directory as candidate + # for removal - try again + # race condition (3): another thread/process tried to create the lock at + # the same time (happened in Python 3.3 on Windows) + # https://ci.appveyor.com/project/pytestbot/py/build/1.0.21/job/ffi85j4c0lqwsfwa + if lastmax == maxnum: + raise + lastmax = maxnum + continue + break + + def get_mtime(path): + """Read file modification time""" + try: + return path.lstat().mtime + except error.Error: + pass + + garbage_prefix = prefix + "garbage-" + + def is_garbage(path): + """Check if path denotes directory scheduled for removal""" + bn = path.basename + return bn.startswith(garbage_prefix) + + # prune old directories + udir_time = get_mtime(udir) + if keep and udir_time: + for path in rootdir.listdir(): + num = parse_num(path) + if num is not None and num <= (maxnum - keep): + try: + # try acquiring lock to remove directory as exclusive user + if lock_timeout: + create_lockfile(path) + except (error.EEXIST, error.ENOENT, error.EBUSY): + path_time = get_mtime(path) + if not path_time: + # assume directory doesn't exist now + continue + if abs(udir_time - path_time) < lock_timeout: + # assume directory with lockfile exists + # and lock timeout hasn't expired yet + continue + + # path dir locked for exclusive use + # and scheduled for removal to avoid another thread/process + # treating it as a new directory or removal candidate + garbage_path = rootdir.join(garbage_prefix + str(uuid.uuid4())) + try: + path.rename(garbage_path) + garbage_path.remove(rec=1) + except KeyboardInterrupt: + raise + except Exception: # this might be error.Error, WindowsError ... + pass + if is_garbage(path): + try: + path.remove(rec=1) + except KeyboardInterrupt: + raise + except Exception: # this might be error.Error, WindowsError ... + pass + + # make link... + try: + username = os.environ["USER"] # linux, et al + except KeyError: + try: + username = os.environ["USERNAME"] # windows + except KeyError: + username = "current" + + src = str(udir) + dest = src[: src.rfind("-")] + "-" + username + try: + os.unlink(dest) + except OSError: + pass + try: + os.symlink(src, dest) + except (OSError, AttributeError, NotImplementedError): + pass + + return udir + + +def copymode(src, dest): + """Copy permission from src to dst.""" + import shutil + + shutil.copymode(src, dest) + + +def copystat(src, dest): + """Copy permission, last modification time, + last access time, and flags from src to dst.""" + import shutil + + shutil.copystat(str(src), str(dest)) + + +def copychunked(src, dest): + chunksize = 524288 # half a meg of bytes + fsrc = src.open("rb") + try: + fdest = dest.open("wb") + try: + while 1: + buf = fsrc.read(chunksize) + if not buf: + break + fdest.write(buf) + finally: + fdest.close() + finally: + fsrc.close() + + +def isimportable(name): + if name and (name[0].isalpha() or name[0] == "_"): + name = name.replace("_", "") + return not name or name.isalnum() + + +local = LocalPath diff --git a/venv/lib/python3.10/site-packages/_pytest/_version.py b/venv/lib/python3.10/site-packages/_pytest/_version.py new file mode 100644 index 0000000000000000000000000000000000000000..1eaa448c79dae775e7486742bdf373c3b3d1f51a --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/_version.py @@ -0,0 +1,34 @@ +# file generated by setuptools-scm +# don't change, don't track in version control + +__all__ = [ + "__version__", + "__version_tuple__", + "version", + "version_tuple", + "__commit_id__", + "commit_id", +] + +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Tuple + from typing import Union + + VERSION_TUPLE = Tuple[Union[int, str], ...] + COMMIT_ID = Union[str, None] +else: + VERSION_TUPLE = object + COMMIT_ID = object + +version: str +__version__: str +__version_tuple__: VERSION_TUPLE +version_tuple: VERSION_TUPLE +commit_id: COMMIT_ID +__commit_id__: COMMIT_ID + +__version__ = version = '8.4.2' +__version_tuple__ = version_tuple = (8, 4, 2) + +__commit_id__ = commit_id = None diff --git a/venv/lib/python3.10/site-packages/_pytest/assertion/__init__.py b/venv/lib/python3.10/site-packages/_pytest/assertion/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..22f3ca8e258cc48effeb34154821f8b1e5cf151b --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/assertion/__init__.py @@ -0,0 +1,208 @@ +# mypy: allow-untyped-defs +"""Support for presenting detailed information in failing assertions.""" + +from __future__ import annotations + +from collections.abc import Generator +import sys +from typing import Any +from typing import Protocol +from typing import TYPE_CHECKING + +from _pytest.assertion import rewrite +from _pytest.assertion import truncate +from _pytest.assertion import util +from _pytest.assertion.rewrite import assertstate_key +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.nodes import Item + + +if TYPE_CHECKING: + from _pytest.main import Session + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("debugconfig") + group.addoption( + "--assert", + action="store", + dest="assertmode", + choices=("rewrite", "plain"), + default="rewrite", + metavar="MODE", + help=( + "Control assertion debugging tools.\n" + "'plain' performs no assertion debugging.\n" + "'rewrite' (the default) rewrites assert statements in test modules" + " on import to provide assert expression information." + ), + ) + parser.addini( + "enable_assertion_pass_hook", + type="bool", + default=False, + help="Enables the pytest_assertion_pass hook. " + "Make sure to delete any previously generated pyc cache files.", + ) + + parser.addini( + "truncation_limit_lines", + default=None, + help="Set threshold of LINES after which truncation will take effect", + ) + parser.addini( + "truncation_limit_chars", + default=None, + help=("Set threshold of CHARS after which truncation will take effect"), + ) + + Config._add_verbosity_ini( + parser, + Config.VERBOSITY_ASSERTIONS, + help=( + "Specify a verbosity level for assertions, overriding the main level. " + "Higher levels will provide more detailed explanation when an assertion fails." + ), + ) + + +def register_assert_rewrite(*names: str) -> None: + """Register one or more module names to be rewritten on import. + + This function will make sure that this module or all modules inside + the package will get their assert statements rewritten. + Thus you should make sure to call this before the module is + actually imported, usually in your __init__.py if you are a plugin + using a package. + + :param names: The module names to register. + """ + for name in names: + if not isinstance(name, str): + msg = "expected module names as *args, got {0} instead" # type: ignore[unreachable] + raise TypeError(msg.format(repr(names))) + rewrite_hook: RewriteHook + for hook in sys.meta_path: + if isinstance(hook, rewrite.AssertionRewritingHook): + rewrite_hook = hook + break + else: + rewrite_hook = DummyRewriteHook() + rewrite_hook.mark_rewrite(*names) + + +class RewriteHook(Protocol): + def mark_rewrite(self, *names: str) -> None: ... + + +class DummyRewriteHook: + """A no-op import hook for when rewriting is disabled.""" + + def mark_rewrite(self, *names: str) -> None: + pass + + +class AssertionState: + """State for the assertion plugin.""" + + def __init__(self, config: Config, mode) -> None: + self.mode = mode + self.trace = config.trace.root.get("assertion") + self.hook: rewrite.AssertionRewritingHook | None = None + + +def install_importhook(config: Config) -> rewrite.AssertionRewritingHook: + """Try to install the rewrite hook, raise SystemError if it fails.""" + config.stash[assertstate_key] = AssertionState(config, "rewrite") + config.stash[assertstate_key].hook = hook = rewrite.AssertionRewritingHook(config) + sys.meta_path.insert(0, hook) + config.stash[assertstate_key].trace("installed rewrite import hook") + + def undo() -> None: + hook = config.stash[assertstate_key].hook + if hook is not None and hook in sys.meta_path: + sys.meta_path.remove(hook) + + config.add_cleanup(undo) + return hook + + +def pytest_collection(session: Session) -> None: + # This hook is only called when test modules are collected + # so for example not in the managing process of pytest-xdist + # (which does not collect test modules). + assertstate = session.config.stash.get(assertstate_key, None) + if assertstate: + if assertstate.hook is not None: + assertstate.hook.set_session(session) + + +@hookimpl(wrapper=True, tryfirst=True) +def pytest_runtest_protocol(item: Item) -> Generator[None, object, object]: + """Setup the pytest_assertrepr_compare and pytest_assertion_pass hooks. + + The rewrite module will use util._reprcompare if it exists to use custom + reporting via the pytest_assertrepr_compare hook. This sets up this custom + comparison for the test. + """ + ihook = item.ihook + + def callbinrepr(op, left: object, right: object) -> str | None: + """Call the pytest_assertrepr_compare hook and prepare the result. + + This uses the first result from the hook and then ensures the + following: + * Overly verbose explanations are truncated unless configured otherwise + (eg. if running in verbose mode). + * Embedded newlines are escaped to help util.format_explanation() + later. + * If the rewrite mode is used embedded %-characters are replaced + to protect later % formatting. + + The result can be formatted by util.format_explanation() for + pretty printing. + """ + hook_result = ihook.pytest_assertrepr_compare( + config=item.config, op=op, left=left, right=right + ) + for new_expl in hook_result: + if new_expl: + new_expl = truncate.truncate_if_required(new_expl, item) + new_expl = [line.replace("\n", "\\n") for line in new_expl] + res = "\n~".join(new_expl) + if item.config.getvalue("assertmode") == "rewrite": + res = res.replace("%", "%%") + return res + return None + + saved_assert_hooks = util._reprcompare, util._assertion_pass + util._reprcompare = callbinrepr + util._config = item.config + + if ihook.pytest_assertion_pass.get_hookimpls(): + + def call_assertion_pass_hook(lineno: int, orig: str, expl: str) -> None: + ihook.pytest_assertion_pass(item=item, lineno=lineno, orig=orig, expl=expl) + + util._assertion_pass = call_assertion_pass_hook + + try: + return (yield) + finally: + util._reprcompare, util._assertion_pass = saved_assert_hooks + util._config = None + + +def pytest_sessionfinish(session: Session) -> None: + assertstate = session.config.stash.get(assertstate_key, None) + if assertstate: + if assertstate.hook is not None: + assertstate.hook.set_session(None) + + +def pytest_assertrepr_compare( + config: Config, op: str, left: Any, right: Any +) -> list[str] | None: + return util.assertrepr_compare(config=config, op=op, left=left, right=right) diff --git a/venv/lib/python3.10/site-packages/_pytest/assertion/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/assertion/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8cb396fd23a4b95f443d88cbbf7bc1dcf9b58416 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/assertion/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/assertion/__pycache__/rewrite.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/assertion/__pycache__/rewrite.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d94f43f01e5a8630abc3bf2b311ef0b0c2032eda Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/assertion/__pycache__/rewrite.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/assertion/__pycache__/truncate.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/assertion/__pycache__/truncate.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..afd803bc1c71bbc16fc15414c04bf981113ce296 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/assertion/__pycache__/truncate.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/assertion/__pycache__/util.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/assertion/__pycache__/util.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6ab822b3a8e0198322504af014caa2ba197f0fe7 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/assertion/__pycache__/util.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/assertion/rewrite.py b/venv/lib/python3.10/site-packages/_pytest/assertion/rewrite.py new file mode 100644 index 0000000000000000000000000000000000000000..c4782c7c5a8a5c9c18bb5225263e819a02657db4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/assertion/rewrite.py @@ -0,0 +1,1216 @@ +"""Rewrite assertion AST to produce nice error messages.""" + +from __future__ import annotations + +import ast +from collections import defaultdict +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Sequence +import errno +import functools +import importlib.abc +import importlib.machinery +import importlib.util +import io +import itertools +import marshal +import os +from pathlib import Path +from pathlib import PurePath +import struct +import sys +import tokenize +import types +from typing import IO +from typing import TYPE_CHECKING + +from _pytest._io.saferepr import DEFAULT_REPR_MAX_SIZE +from _pytest._io.saferepr import saferepr +from _pytest._io.saferepr import saferepr_unlimited +from _pytest._version import version +from _pytest.assertion import util +from _pytest.config import Config +from _pytest.fixtures import FixtureFunctionDefinition +from _pytest.main import Session +from _pytest.pathlib import absolutepath +from _pytest.pathlib import fnmatch_ex +from _pytest.stash import StashKey + + +# fmt: off +from _pytest.assertion.util import format_explanation as _format_explanation # noqa:F401, isort:skip +# fmt:on + +if TYPE_CHECKING: + from _pytest.assertion import AssertionState + + +class Sentinel: + pass + + +assertstate_key = StashKey["AssertionState"]() + +# pytest caches rewritten pycs in pycache dirs +PYTEST_TAG = f"{sys.implementation.cache_tag}-pytest-{version}" +PYC_EXT = ".py" + ((__debug__ and "c") or "o") +PYC_TAIL = "." + PYTEST_TAG + PYC_EXT + +# Special marker that denotes we have just left a scope definition +_SCOPE_END_MARKER = Sentinel() + + +class AssertionRewritingHook(importlib.abc.MetaPathFinder, importlib.abc.Loader): + """PEP302/PEP451 import hook which rewrites asserts.""" + + def __init__(self, config: Config) -> None: + self.config = config + try: + self.fnpats = config.getini("python_files") + except ValueError: + self.fnpats = ["test_*.py", "*_test.py"] + self.session: Session | None = None + self._rewritten_names: dict[str, Path] = {} + self._must_rewrite: set[str] = set() + # flag to guard against trying to rewrite a pyc file while we are already writing another pyc file, + # which might result in infinite recursion (#3506) + self._writing_pyc = False + self._basenames_to_check_rewrite = {"conftest"} + self._marked_for_rewrite_cache: dict[str, bool] = {} + self._session_paths_checked = False + + def set_session(self, session: Session | None) -> None: + self.session = session + self._session_paths_checked = False + + # Indirection so we can mock calls to find_spec originated from the hook during testing + _find_spec = importlib.machinery.PathFinder.find_spec + + def find_spec( + self, + name: str, + path: Sequence[str | bytes] | None = None, + target: types.ModuleType | None = None, + ) -> importlib.machinery.ModuleSpec | None: + if self._writing_pyc: + return None + state = self.config.stash[assertstate_key] + if self._early_rewrite_bailout(name, state): + return None + state.trace(f"find_module called for: {name}") + + # Type ignored because mypy is confused about the `self` binding here. + spec = self._find_spec(name, path) # type: ignore + + if spec is None and path is not None: + # With --import-mode=importlib, PathFinder cannot find spec without modifying `sys.path`, + # causing inability to assert rewriting (#12659). + # At this point, try using the file path to find the module spec. + for _path_str in path: + spec = importlib.util.spec_from_file_location(name, _path_str) + if spec is not None: + break + + if ( + # the import machinery could not find a file to import + spec is None + # this is a namespace package (without `__init__.py`) + # there's nothing to rewrite there + or spec.origin is None + # we can only rewrite source files + or not isinstance(spec.loader, importlib.machinery.SourceFileLoader) + # if the file doesn't exist, we can't rewrite it + or not os.path.exists(spec.origin) + ): + return None + else: + fn = spec.origin + + if not self._should_rewrite(name, fn, state): + return None + + return importlib.util.spec_from_file_location( + name, + fn, + loader=self, + submodule_search_locations=spec.submodule_search_locations, + ) + + def create_module( + self, spec: importlib.machinery.ModuleSpec + ) -> types.ModuleType | None: + return None # default behaviour is fine + + def exec_module(self, module: types.ModuleType) -> None: + assert module.__spec__ is not None + assert module.__spec__.origin is not None + fn = Path(module.__spec__.origin) + state = self.config.stash[assertstate_key] + + self._rewritten_names[module.__name__] = fn + + # The requested module looks like a test file, so rewrite it. This is + # the most magical part of the process: load the source, rewrite the + # asserts, and load the rewritten source. We also cache the rewritten + # module code in a special pyc. We must be aware of the possibility of + # concurrent pytest processes rewriting and loading pycs. To avoid + # tricky race conditions, we maintain the following invariant: The + # cached pyc is always a complete, valid pyc. Operations on it must be + # atomic. POSIX's atomic rename comes in handy. + write = not sys.dont_write_bytecode + cache_dir = get_cache_dir(fn) + if write: + ok = try_makedirs(cache_dir) + if not ok: + write = False + state.trace(f"read only directory: {cache_dir}") + + cache_name = fn.name[:-3] + PYC_TAIL + pyc = cache_dir / cache_name + # Notice that even if we're in a read-only directory, I'm going + # to check for a cached pyc. This may not be optimal... + co = _read_pyc(fn, pyc, state.trace) + if co is None: + state.trace(f"rewriting {fn!r}") + source_stat, co = _rewrite_test(fn, self.config) + if write: + self._writing_pyc = True + try: + _write_pyc(state, co, source_stat, pyc) + finally: + self._writing_pyc = False + else: + state.trace(f"found cached rewritten pyc for {fn}") + exec(co, module.__dict__) + + def _early_rewrite_bailout(self, name: str, state: AssertionState) -> bool: + """A fast way to get out of rewriting modules. + + Profiling has shown that the call to PathFinder.find_spec (inside of + the find_spec from this class) is a major slowdown, so, this method + tries to filter what we're sure won't be rewritten before getting to + it. + """ + if self.session is not None and not self._session_paths_checked: + self._session_paths_checked = True + for initial_path in self.session._initialpaths: + # Make something as c:/projects/my_project/path.py -> + # ['c:', 'projects', 'my_project', 'path.py'] + parts = str(initial_path).split(os.sep) + # add 'path' to basenames to be checked. + self._basenames_to_check_rewrite.add(os.path.splitext(parts[-1])[0]) + + # Note: conftest already by default in _basenames_to_check_rewrite. + parts = name.split(".") + if parts[-1] in self._basenames_to_check_rewrite: + return False + + # For matching the name it must be as if it was a filename. + path = PurePath(*parts).with_suffix(".py") + + for pat in self.fnpats: + # if the pattern contains subdirectories ("tests/**.py" for example) we can't bail out based + # on the name alone because we need to match against the full path + if os.path.dirname(pat): + return False + if fnmatch_ex(pat, path): + return False + + if self._is_marked_for_rewrite(name, state): + return False + + state.trace(f"early skip of rewriting module: {name}") + return True + + def _should_rewrite(self, name: str, fn: str, state: AssertionState) -> bool: + # always rewrite conftest files + if os.path.basename(fn) == "conftest.py": + state.trace(f"rewriting conftest file: {fn!r}") + return True + + if self.session is not None: + if self.session.isinitpath(absolutepath(fn)): + state.trace(f"matched test file (was specified on cmdline): {fn!r}") + return True + + # modules not passed explicitly on the command line are only + # rewritten if they match the naming convention for test files + fn_path = PurePath(fn) + for pat in self.fnpats: + if fnmatch_ex(pat, fn_path): + state.trace(f"matched test file {fn!r}") + return True + + return self._is_marked_for_rewrite(name, state) + + def _is_marked_for_rewrite(self, name: str, state: AssertionState) -> bool: + try: + return self._marked_for_rewrite_cache[name] + except KeyError: + for marked in self._must_rewrite: + if name == marked or name.startswith(marked + "."): + state.trace(f"matched marked file {name!r} (from {marked!r})") + self._marked_for_rewrite_cache[name] = True + return True + + self._marked_for_rewrite_cache[name] = False + return False + + def mark_rewrite(self, *names: str) -> None: + """Mark import names as needing to be rewritten. + + The named module or package as well as any nested modules will + be rewritten on import. + """ + already_imported = ( + set(names).intersection(sys.modules).difference(self._rewritten_names) + ) + for name in already_imported: + mod = sys.modules[name] + if not AssertionRewriter.is_rewrite_disabled( + mod.__doc__ or "" + ) and not isinstance(mod.__loader__, type(self)): + self._warn_already_imported(name) + self._must_rewrite.update(names) + self._marked_for_rewrite_cache.clear() + + def _warn_already_imported(self, name: str) -> None: + from _pytest.warning_types import PytestAssertRewriteWarning + + self.config.issue_config_time_warning( + PytestAssertRewriteWarning( + f"Module already imported so cannot be rewritten; {name}" + ), + stacklevel=5, + ) + + def get_data(self, pathname: str | bytes) -> bytes: + """Optional PEP302 get_data API.""" + with open(pathname, "rb") as f: + return f.read() + + if sys.version_info >= (3, 10): + if sys.version_info >= (3, 12): + from importlib.resources.abc import TraversableResources + else: + from importlib.abc import TraversableResources + + def get_resource_reader(self, name: str) -> TraversableResources: + if sys.version_info < (3, 11): + from importlib.readers import FileReader + else: + from importlib.resources.readers import FileReader + + return FileReader(types.SimpleNamespace(path=self._rewritten_names[name])) + + +def _write_pyc_fp( + fp: IO[bytes], source_stat: os.stat_result, co: types.CodeType +) -> None: + # Technically, we don't have to have the same pyc format as + # (C)Python, since these "pycs" should never be seen by builtin + # import. However, there's little reason to deviate. + fp.write(importlib.util.MAGIC_NUMBER) + # https://www.python.org/dev/peps/pep-0552/ + flags = b"\x00\x00\x00\x00" + fp.write(flags) + # as of now, bytecode header expects 32-bit numbers for size and mtime (#4903) + mtime = int(source_stat.st_mtime) & 0xFFFFFFFF + size = source_stat.st_size & 0xFFFFFFFF + # " bool: + proc_pyc = f"{pyc}.{os.getpid()}" + try: + with open(proc_pyc, "wb") as fp: + _write_pyc_fp(fp, source_stat, co) + except OSError as e: + state.trace(f"error writing pyc file at {proc_pyc}: errno={e.errno}") + return False + + try: + os.replace(proc_pyc, pyc) + except OSError as e: + state.trace(f"error writing pyc file at {pyc}: {e}") + # we ignore any failure to write the cache file + # there are many reasons, permission-denied, pycache dir being a + # file etc. + return False + return True + + +def _rewrite_test(fn: Path, config: Config) -> tuple[os.stat_result, types.CodeType]: + """Read and rewrite *fn* and return the code object.""" + stat = os.stat(fn) + source = fn.read_bytes() + strfn = str(fn) + tree = ast.parse(source, filename=strfn) + rewrite_asserts(tree, source, strfn, config) + co = compile(tree, strfn, "exec", dont_inherit=True) + return stat, co + + +def _read_pyc( + source: Path, pyc: Path, trace: Callable[[str], None] = lambda x: None +) -> types.CodeType | None: + """Possibly read a pytest pyc containing rewritten code. + + Return rewritten code if successful or None if not. + """ + try: + fp = open(pyc, "rb") + except OSError: + return None + with fp: + try: + stat_result = os.stat(source) + mtime = int(stat_result.st_mtime) + size = stat_result.st_size + data = fp.read(16) + except OSError as e: + trace(f"_read_pyc({source}): OSError {e}") + return None + # Check for invalid or out of date pyc file. + if len(data) != (16): + trace(f"_read_pyc({source}): invalid pyc (too short)") + return None + if data[:4] != importlib.util.MAGIC_NUMBER: + trace(f"_read_pyc({source}): invalid pyc (bad magic number)") + return None + if data[4:8] != b"\x00\x00\x00\x00": + trace(f"_read_pyc({source}): invalid pyc (unsupported flags)") + return None + mtime_data = data[8:12] + if int.from_bytes(mtime_data, "little") != mtime & 0xFFFFFFFF: + trace(f"_read_pyc({source}): out of date") + return None + size_data = data[12:16] + if int.from_bytes(size_data, "little") != size & 0xFFFFFFFF: + trace(f"_read_pyc({source}): invalid pyc (incorrect size)") + return None + try: + co = marshal.load(fp) + except Exception as e: + trace(f"_read_pyc({source}): marshal.load error {e}") + return None + if not isinstance(co, types.CodeType): + trace(f"_read_pyc({source}): not a code object") + return None + return co + + +def rewrite_asserts( + mod: ast.Module, + source: bytes, + module_path: str | None = None, + config: Config | None = None, +) -> None: + """Rewrite the assert statements in mod.""" + AssertionRewriter(module_path, config, source).run(mod) + + +def _saferepr(obj: object) -> str: + r"""Get a safe repr of an object for assertion error messages. + + The assertion formatting (util.format_explanation()) requires + newlines to be escaped since they are a special character for it. + Normally assertion.util.format_explanation() does this but for a + custom repr it is possible to contain one of the special escape + sequences, especially '\n{' and '\n}' are likely to be present in + JSON reprs. + """ + if isinstance(obj, types.MethodType): + # for bound methods, skip redundant information + return obj.__name__ + + maxsize = _get_maxsize_for_saferepr(util._config) + if not maxsize: + return saferepr_unlimited(obj).replace("\n", "\\n") + return saferepr(obj, maxsize=maxsize).replace("\n", "\\n") + + +def _get_maxsize_for_saferepr(config: Config | None) -> int | None: + """Get `maxsize` configuration for saferepr based on the given config object.""" + if config is None: + verbosity = 0 + else: + verbosity = config.get_verbosity(Config.VERBOSITY_ASSERTIONS) + if verbosity >= 2: + return None + if verbosity >= 1: + return DEFAULT_REPR_MAX_SIZE * 10 + return DEFAULT_REPR_MAX_SIZE + + +def _format_assertmsg(obj: object) -> str: + r"""Format the custom assertion message given. + + For strings this simply replaces newlines with '\n~' so that + util.format_explanation() will preserve them instead of escaping + newlines. For other objects saferepr() is used first. + """ + # reprlib appears to have a bug which means that if a string + # contains a newline it gets escaped, however if an object has a + # .__repr__() which contains newlines it does not get escaped. + # However in either case we want to preserve the newline. + replaces = [("\n", "\n~"), ("%", "%%")] + if not isinstance(obj, str): + obj = saferepr(obj, _get_maxsize_for_saferepr(util._config)) + replaces.append(("\\n", "\n~")) + + for r1, r2 in replaces: + obj = obj.replace(r1, r2) + + return obj + + +def _should_repr_global_name(obj: object) -> bool: + if callable(obj): + # For pytest fixtures the __repr__ method provides more information than the function name. + return isinstance(obj, FixtureFunctionDefinition) + + try: + return not hasattr(obj, "__name__") + except Exception: + return True + + +def _format_boolop(explanations: Iterable[str], is_or: bool) -> str: + explanation = "(" + ((is_or and " or ") or " and ").join(explanations) + ")" + return explanation.replace("%", "%%") + + +def _call_reprcompare( + ops: Sequence[str], + results: Sequence[bool], + expls: Sequence[str], + each_obj: Sequence[object], +) -> str: + for i, res, expl in zip(range(len(ops)), results, expls): + try: + done = not res + except Exception: + done = True + if done: + break + if util._reprcompare is not None: + custom = util._reprcompare(ops[i], each_obj[i], each_obj[i + 1]) + if custom is not None: + return custom + return expl + + +def _call_assertion_pass(lineno: int, orig: str, expl: str) -> None: + if util._assertion_pass is not None: + util._assertion_pass(lineno, orig, expl) + + +def _check_if_assertion_pass_impl() -> bool: + """Check if any plugins implement the pytest_assertion_pass hook + in order not to generate explanation unnecessarily (might be expensive).""" + return True if util._assertion_pass else False + + +UNARY_MAP = {ast.Not: "not %s", ast.Invert: "~%s", ast.USub: "-%s", ast.UAdd: "+%s"} + +BINOP_MAP = { + ast.BitOr: "|", + ast.BitXor: "^", + ast.BitAnd: "&", + ast.LShift: "<<", + ast.RShift: ">>", + ast.Add: "+", + ast.Sub: "-", + ast.Mult: "*", + ast.Div: "/", + ast.FloorDiv: "//", + ast.Mod: "%%", # escaped for string formatting + ast.Eq: "==", + ast.NotEq: "!=", + ast.Lt: "<", + ast.LtE: "<=", + ast.Gt: ">", + ast.GtE: ">=", + ast.Pow: "**", + ast.Is: "is", + ast.IsNot: "is not", + ast.In: "in", + ast.NotIn: "not in", + ast.MatMult: "@", +} + + +def traverse_node(node: ast.AST) -> Iterator[ast.AST]: + """Recursively yield node and all its children in depth-first order.""" + yield node + for child in ast.iter_child_nodes(node): + yield from traverse_node(child) + + +@functools.lru_cache(maxsize=1) +def _get_assertion_exprs(src: bytes) -> dict[int, str]: + """Return a mapping from {lineno: "assertion test expression"}.""" + ret: dict[int, str] = {} + + depth = 0 + lines: list[str] = [] + assert_lineno: int | None = None + seen_lines: set[int] = set() + + def _write_and_reset() -> None: + nonlocal depth, lines, assert_lineno, seen_lines + assert assert_lineno is not None + ret[assert_lineno] = "".join(lines).rstrip().rstrip("\\") + depth = 0 + lines = [] + assert_lineno = None + seen_lines = set() + + tokens = tokenize.tokenize(io.BytesIO(src).readline) + for tp, source, (lineno, offset), _, line in tokens: + if tp == tokenize.NAME and source == "assert": + assert_lineno = lineno + elif assert_lineno is not None: + # keep track of depth for the assert-message `,` lookup + if tp == tokenize.OP and source in "([{": + depth += 1 + elif tp == tokenize.OP and source in ")]}": + depth -= 1 + + if not lines: + lines.append(line[offset:]) + seen_lines.add(lineno) + # a non-nested comma separates the expression from the message + elif depth == 0 and tp == tokenize.OP and source == ",": + # one line assert with message + if lineno in seen_lines and len(lines) == 1: + offset_in_trimmed = offset + len(lines[-1]) - len(line) + lines[-1] = lines[-1][:offset_in_trimmed] + # multi-line assert with message + elif lineno in seen_lines: + lines[-1] = lines[-1][:offset] + # multi line assert with escaped newline before message + else: + lines.append(line[:offset]) + _write_and_reset() + elif tp in {tokenize.NEWLINE, tokenize.ENDMARKER}: + _write_and_reset() + elif lines and lineno not in seen_lines: + lines.append(line) + seen_lines.add(lineno) + + return ret + + +class AssertionRewriter(ast.NodeVisitor): + """Assertion rewriting implementation. + + The main entrypoint is to call .run() with an ast.Module instance, + this will then find all the assert statements and rewrite them to + provide intermediate values and a detailed assertion error. See + http://pybites.blogspot.be/2011/07/behind-scenes-of-pytests-new-assertion.html + for an overview of how this works. + + The entry point here is .run() which will iterate over all the + statements in an ast.Module and for each ast.Assert statement it + finds call .visit() with it. Then .visit_Assert() takes over and + is responsible for creating new ast statements to replace the + original assert statement: it rewrites the test of an assertion + to provide intermediate values and replace it with an if statement + which raises an assertion error with a detailed explanation in + case the expression is false and calls pytest_assertion_pass hook + if expression is true. + + For this .visit_Assert() uses the visitor pattern to visit all the + AST nodes of the ast.Assert.test field, each visit call returning + an AST node and the corresponding explanation string. During this + state is kept in several instance attributes: + + :statements: All the AST statements which will replace the assert + statement. + + :variables: This is populated by .variable() with each variable + used by the statements so that they can all be set to None at + the end of the statements. + + :variable_counter: Counter to create new unique variables needed + by statements. Variables are created using .variable() and + have the form of "@py_assert0". + + :expl_stmts: The AST statements which will be executed to get + data from the assertion. This is the code which will construct + the detailed assertion message that is used in the AssertionError + or for the pytest_assertion_pass hook. + + :explanation_specifiers: A dict filled by .explanation_param() + with %-formatting placeholders and their corresponding + expressions to use in the building of an assertion message. + This is used by .pop_format_context() to build a message. + + :stack: A stack of the explanation_specifiers dicts maintained by + .push_format_context() and .pop_format_context() which allows + to build another %-formatted string while already building one. + + :scope: A tuple containing the current scope used for variables_overwrite. + + :variables_overwrite: A dict filled with references to variables + that change value within an assert. This happens when a variable is + reassigned with the walrus operator + + This state, except the variables_overwrite, is reset on every new assert + statement visited and used by the other visitors. + """ + + def __init__( + self, module_path: str | None, config: Config | None, source: bytes + ) -> None: + super().__init__() + self.module_path = module_path + self.config = config + if config is not None: + self.enable_assertion_pass_hook = config.getini( + "enable_assertion_pass_hook" + ) + else: + self.enable_assertion_pass_hook = False + self.source = source + self.scope: tuple[ast.AST, ...] = () + self.variables_overwrite: defaultdict[tuple[ast.AST, ...], dict[str, str]] = ( + defaultdict(dict) + ) + + def run(self, mod: ast.Module) -> None: + """Find all assert statements in *mod* and rewrite them.""" + if not mod.body: + # Nothing to do. + return + + # We'll insert some special imports at the top of the module, but after any + # docstrings and __future__ imports, so first figure out where that is. + doc = getattr(mod, "docstring", None) + expect_docstring = doc is None + if doc is not None and self.is_rewrite_disabled(doc): + return + pos = 0 + item = None + for item in mod.body: + if ( + expect_docstring + and isinstance(item, ast.Expr) + and isinstance(item.value, ast.Constant) + and isinstance(item.value.value, str) + ): + doc = item.value.value + if self.is_rewrite_disabled(doc): + return + expect_docstring = False + elif ( + isinstance(item, ast.ImportFrom) + and item.level == 0 + and item.module == "__future__" + ): + pass + else: + break + pos += 1 + # Special case: for a decorated function, set the lineno to that of the + # first decorator, not the `def`. Issue #4984. + if isinstance(item, ast.FunctionDef) and item.decorator_list: + lineno = item.decorator_list[0].lineno + else: + lineno = item.lineno + # Now actually insert the special imports. + if sys.version_info >= (3, 10): + aliases = [ + ast.alias("builtins", "@py_builtins", lineno=lineno, col_offset=0), + ast.alias( + "_pytest.assertion.rewrite", + "@pytest_ar", + lineno=lineno, + col_offset=0, + ), + ] + else: + aliases = [ + ast.alias("builtins", "@py_builtins"), + ast.alias("_pytest.assertion.rewrite", "@pytest_ar"), + ] + imports = [ + ast.Import([alias], lineno=lineno, col_offset=0) for alias in aliases + ] + mod.body[pos:pos] = imports + + # Collect asserts. + self.scope = (mod,) + nodes: list[ast.AST | Sentinel] = [mod] + while nodes: + node = nodes.pop() + if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef, ast.ClassDef)): + self.scope = tuple((*self.scope, node)) + nodes.append(_SCOPE_END_MARKER) + if node == _SCOPE_END_MARKER: + self.scope = self.scope[:-1] + continue + assert isinstance(node, ast.AST) + for name, field in ast.iter_fields(node): + if isinstance(field, list): + new: list[ast.AST] = [] + for i, child in enumerate(field): + if isinstance(child, ast.Assert): + # Transform assert. + new.extend(self.visit(child)) + else: + new.append(child) + if isinstance(child, ast.AST): + nodes.append(child) + setattr(node, name, new) + elif ( + isinstance(field, ast.AST) + # Don't recurse into expressions as they can't contain + # asserts. + and not isinstance(field, ast.expr) + ): + nodes.append(field) + + @staticmethod + def is_rewrite_disabled(docstring: str) -> bool: + return "PYTEST_DONT_REWRITE" in docstring + + def variable(self) -> str: + """Get a new variable.""" + # Use a character invalid in python identifiers to avoid clashing. + name = "@py_assert" + str(next(self.variable_counter)) + self.variables.append(name) + return name + + def assign(self, expr: ast.expr) -> ast.Name: + """Give *expr* a name.""" + name = self.variable() + self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr)) + return ast.copy_location(ast.Name(name, ast.Load()), expr) + + def display(self, expr: ast.expr) -> ast.expr: + """Call saferepr on the expression.""" + return self.helper("_saferepr", expr) + + def helper(self, name: str, *args: ast.expr) -> ast.expr: + """Call a helper in this module.""" + py_name = ast.Name("@pytest_ar", ast.Load()) + attr = ast.Attribute(py_name, name, ast.Load()) + return ast.Call(attr, list(args), []) + + def builtin(self, name: str) -> ast.Attribute: + """Return the builtin called *name*.""" + builtin_name = ast.Name("@py_builtins", ast.Load()) + return ast.Attribute(builtin_name, name, ast.Load()) + + def explanation_param(self, expr: ast.expr) -> str: + """Return a new named %-formatting placeholder for expr. + + This creates a %-formatting placeholder for expr in the + current formatting context, e.g. ``%(py0)s``. The placeholder + and expr are placed in the current format context so that it + can be used on the next call to .pop_format_context(). + """ + specifier = "py" + str(next(self.variable_counter)) + self.explanation_specifiers[specifier] = expr + return "%(" + specifier + ")s" + + def push_format_context(self) -> None: + """Create a new formatting context. + + The format context is used for when an explanation wants to + have a variable value formatted in the assertion message. In + this case the value required can be added using + .explanation_param(). Finally .pop_format_context() is used + to format a string of %-formatted values as added by + .explanation_param(). + """ + self.explanation_specifiers: dict[str, ast.expr] = {} + self.stack.append(self.explanation_specifiers) + + def pop_format_context(self, expl_expr: ast.expr) -> ast.Name: + """Format the %-formatted string with current format context. + + The expl_expr should be an str ast.expr instance constructed from + the %-placeholders created by .explanation_param(). This will + add the required code to format said string to .expl_stmts and + return the ast.Name instance of the formatted string. + """ + current = self.stack.pop() + if self.stack: + self.explanation_specifiers = self.stack[-1] + keys: list[ast.expr | None] = [ast.Constant(key) for key in current.keys()] + format_dict = ast.Dict(keys, list(current.values())) + form = ast.BinOp(expl_expr, ast.Mod(), format_dict) + name = "@py_format" + str(next(self.variable_counter)) + if self.enable_assertion_pass_hook: + self.format_variables.append(name) + self.expl_stmts.append(ast.Assign([ast.Name(name, ast.Store())], form)) + return ast.Name(name, ast.Load()) + + def generic_visit(self, node: ast.AST) -> tuple[ast.Name, str]: + """Handle expressions we don't have custom code for.""" + assert isinstance(node, ast.expr) + res = self.assign(node) + return res, self.explanation_param(self.display(res)) + + def visit_Assert(self, assert_: ast.Assert) -> list[ast.stmt]: + """Return the AST statements to replace the ast.Assert instance. + + This rewrites the test of an assertion to provide + intermediate values and replace it with an if statement which + raises an assertion error with a detailed explanation in case + the expression is false. + """ + if isinstance(assert_.test, ast.Tuple) and len(assert_.test.elts) >= 1: + import warnings + + from _pytest.warning_types import PytestAssertRewriteWarning + + # TODO: This assert should not be needed. + assert self.module_path is not None + warnings.warn_explicit( + PytestAssertRewriteWarning( + "assertion is always true, perhaps remove parentheses?" + ), + category=None, + filename=self.module_path, + lineno=assert_.lineno, + ) + + self.statements: list[ast.stmt] = [] + self.variables: list[str] = [] + self.variable_counter = itertools.count() + + if self.enable_assertion_pass_hook: + self.format_variables: list[str] = [] + + self.stack: list[dict[str, ast.expr]] = [] + self.expl_stmts: list[ast.stmt] = [] + self.push_format_context() + # Rewrite assert into a bunch of statements. + top_condition, explanation = self.visit(assert_.test) + + negation = ast.UnaryOp(ast.Not(), top_condition) + + if self.enable_assertion_pass_hook: # Experimental pytest_assertion_pass hook + msg = self.pop_format_context(ast.Constant(explanation)) + + # Failed + if assert_.msg: + assertmsg = self.helper("_format_assertmsg", assert_.msg) + gluestr = "\n>assert " + else: + assertmsg = ast.Constant("") + gluestr = "assert " + err_explanation = ast.BinOp(ast.Constant(gluestr), ast.Add(), msg) + err_msg = ast.BinOp(assertmsg, ast.Add(), err_explanation) + err_name = ast.Name("AssertionError", ast.Load()) + fmt = self.helper("_format_explanation", err_msg) + exc = ast.Call(err_name, [fmt], []) + raise_ = ast.Raise(exc, None) + statements_fail = [] + statements_fail.extend(self.expl_stmts) + statements_fail.append(raise_) + + # Passed + fmt_pass = self.helper("_format_explanation", msg) + orig = _get_assertion_exprs(self.source)[assert_.lineno] + hook_call_pass = ast.Expr( + self.helper( + "_call_assertion_pass", + ast.Constant(assert_.lineno), + ast.Constant(orig), + fmt_pass, + ) + ) + # If any hooks implement assert_pass hook + hook_impl_test = ast.If( + self.helper("_check_if_assertion_pass_impl"), + [*self.expl_stmts, hook_call_pass], + [], + ) + statements_pass: list[ast.stmt] = [hook_impl_test] + + # Test for assertion condition + main_test = ast.If(negation, statements_fail, statements_pass) + self.statements.append(main_test) + if self.format_variables: + variables: list[ast.expr] = [ + ast.Name(name, ast.Store()) for name in self.format_variables + ] + clear_format = ast.Assign(variables, ast.Constant(None)) + self.statements.append(clear_format) + + else: # Original assertion rewriting + # Create failure message. + body = self.expl_stmts + self.statements.append(ast.If(negation, body, [])) + if assert_.msg: + assertmsg = self.helper("_format_assertmsg", assert_.msg) + explanation = "\n>assert " + explanation + else: + assertmsg = ast.Constant("") + explanation = "assert " + explanation + template = ast.BinOp(assertmsg, ast.Add(), ast.Constant(explanation)) + msg = self.pop_format_context(template) + fmt = self.helper("_format_explanation", msg) + err_name = ast.Name("AssertionError", ast.Load()) + exc = ast.Call(err_name, [fmt], []) + raise_ = ast.Raise(exc, None) + + body.append(raise_) + + # Clear temporary variables by setting them to None. + if self.variables: + variables = [ast.Name(name, ast.Store()) for name in self.variables] + clear = ast.Assign(variables, ast.Constant(None)) + self.statements.append(clear) + # Fix locations (line numbers/column offsets). + for stmt in self.statements: + for node in traverse_node(stmt): + if getattr(node, "lineno", None) is None: + # apply the assertion location to all generated ast nodes without source location + # and preserve the location of existing nodes or generated nodes with an correct location. + ast.copy_location(node, assert_) + return self.statements + + def visit_NamedExpr(self, name: ast.NamedExpr) -> tuple[ast.NamedExpr, str]: + # This method handles the 'walrus operator' repr of the target + # name if it's a local variable or _should_repr_global_name() + # thinks it's acceptable. + locs = ast.Call(self.builtin("locals"), [], []) + target_id = name.target.id + inlocs = ast.Compare(ast.Constant(target_id), [ast.In()], [locs]) + dorepr = self.helper("_should_repr_global_name", name) + test = ast.BoolOp(ast.Or(), [inlocs, dorepr]) + expr = ast.IfExp(test, self.display(name), ast.Constant(target_id)) + return name, self.explanation_param(expr) + + def visit_Name(self, name: ast.Name) -> tuple[ast.Name, str]: + # Display the repr of the name if it's a local variable or + # _should_repr_global_name() thinks it's acceptable. + locs = ast.Call(self.builtin("locals"), [], []) + inlocs = ast.Compare(ast.Constant(name.id), [ast.In()], [locs]) + dorepr = self.helper("_should_repr_global_name", name) + test = ast.BoolOp(ast.Or(), [inlocs, dorepr]) + expr = ast.IfExp(test, self.display(name), ast.Constant(name.id)) + return name, self.explanation_param(expr) + + def visit_BoolOp(self, boolop: ast.BoolOp) -> tuple[ast.Name, str]: + res_var = self.variable() + expl_list = self.assign(ast.List([], ast.Load())) + app = ast.Attribute(expl_list, "append", ast.Load()) + is_or = int(isinstance(boolop.op, ast.Or)) + body = save = self.statements + fail_save = self.expl_stmts + levels = len(boolop.values) - 1 + self.push_format_context() + # Process each operand, short-circuiting if needed. + for i, v in enumerate(boolop.values): + if i: + fail_inner: list[ast.stmt] = [] + # cond is set in a prior loop iteration below + self.expl_stmts.append(ast.If(cond, fail_inner, [])) # noqa: F821 + self.expl_stmts = fail_inner + # Check if the left operand is a ast.NamedExpr and the value has already been visited + if ( + isinstance(v, ast.Compare) + and isinstance(v.left, ast.NamedExpr) + and v.left.target.id + in [ + ast_expr.id + for ast_expr in boolop.values[:i] + if hasattr(ast_expr, "id") + ] + ): + pytest_temp = self.variable() + self.variables_overwrite[self.scope][v.left.target.id] = v.left # type:ignore[assignment] + v.left.target.id = pytest_temp + self.push_format_context() + res, expl = self.visit(v) + body.append(ast.Assign([ast.Name(res_var, ast.Store())], res)) + expl_format = self.pop_format_context(ast.Constant(expl)) + call = ast.Call(app, [expl_format], []) + self.expl_stmts.append(ast.Expr(call)) + if i < levels: + cond: ast.expr = res + if is_or: + cond = ast.UnaryOp(ast.Not(), cond) + inner: list[ast.stmt] = [] + self.statements.append(ast.If(cond, inner, [])) + self.statements = body = inner + self.statements = save + self.expl_stmts = fail_save + expl_template = self.helper("_format_boolop", expl_list, ast.Constant(is_or)) + expl = self.pop_format_context(expl_template) + return ast.Name(res_var, ast.Load()), self.explanation_param(expl) + + def visit_UnaryOp(self, unary: ast.UnaryOp) -> tuple[ast.Name, str]: + pattern = UNARY_MAP[unary.op.__class__] + operand_res, operand_expl = self.visit(unary.operand) + res = self.assign(ast.copy_location(ast.UnaryOp(unary.op, operand_res), unary)) + return res, pattern % (operand_expl,) + + def visit_BinOp(self, binop: ast.BinOp) -> tuple[ast.Name, str]: + symbol = BINOP_MAP[binop.op.__class__] + left_expr, left_expl = self.visit(binop.left) + right_expr, right_expl = self.visit(binop.right) + explanation = f"({left_expl} {symbol} {right_expl})" + res = self.assign( + ast.copy_location(ast.BinOp(left_expr, binop.op, right_expr), binop) + ) + return res, explanation + + def visit_Call(self, call: ast.Call) -> tuple[ast.Name, str]: + new_func, func_expl = self.visit(call.func) + arg_expls = [] + new_args = [] + new_kwargs = [] + for arg in call.args: + if isinstance(arg, ast.Name) and arg.id in self.variables_overwrite.get( + self.scope, {} + ): + arg = self.variables_overwrite[self.scope][arg.id] # type:ignore[assignment] + res, expl = self.visit(arg) + arg_expls.append(expl) + new_args.append(res) + for keyword in call.keywords: + if isinstance( + keyword.value, ast.Name + ) and keyword.value.id in self.variables_overwrite.get(self.scope, {}): + keyword.value = self.variables_overwrite[self.scope][keyword.value.id] # type:ignore[assignment] + res, expl = self.visit(keyword.value) + new_kwargs.append(ast.keyword(keyword.arg, res)) + if keyword.arg: + arg_expls.append(keyword.arg + "=" + expl) + else: # **args have `arg` keywords with an .arg of None + arg_expls.append("**" + expl) + + expl = "{}({})".format(func_expl, ", ".join(arg_expls)) + new_call = ast.copy_location(ast.Call(new_func, new_args, new_kwargs), call) + res = self.assign(new_call) + res_expl = self.explanation_param(self.display(res)) + outer_expl = f"{res_expl}\n{{{res_expl} = {expl}\n}}" + return res, outer_expl + + def visit_Starred(self, starred: ast.Starred) -> tuple[ast.Starred, str]: + # A Starred node can appear in a function call. + res, expl = self.visit(starred.value) + new_starred = ast.Starred(res, starred.ctx) + return new_starred, "*" + expl + + def visit_Attribute(self, attr: ast.Attribute) -> tuple[ast.Name, str]: + if not isinstance(attr.ctx, ast.Load): + return self.generic_visit(attr) + value, value_expl = self.visit(attr.value) + res = self.assign( + ast.copy_location(ast.Attribute(value, attr.attr, ast.Load()), attr) + ) + res_expl = self.explanation_param(self.display(res)) + pat = "%s\n{%s = %s.%s\n}" + expl = pat % (res_expl, res_expl, value_expl, attr.attr) + return res, expl + + def visit_Compare(self, comp: ast.Compare) -> tuple[ast.expr, str]: + self.push_format_context() + # We first check if we have overwritten a variable in the previous assert + if isinstance( + comp.left, ast.Name + ) and comp.left.id in self.variables_overwrite.get(self.scope, {}): + comp.left = self.variables_overwrite[self.scope][comp.left.id] # type:ignore[assignment] + if isinstance(comp.left, ast.NamedExpr): + self.variables_overwrite[self.scope][comp.left.target.id] = comp.left # type:ignore[assignment] + left_res, left_expl = self.visit(comp.left) + if isinstance(comp.left, (ast.Compare, ast.BoolOp)): + left_expl = f"({left_expl})" + res_variables = [self.variable() for i in range(len(comp.ops))] + load_names: list[ast.expr] = [ast.Name(v, ast.Load()) for v in res_variables] + store_names = [ast.Name(v, ast.Store()) for v in res_variables] + it = zip(range(len(comp.ops)), comp.ops, comp.comparators) + expls: list[ast.expr] = [] + syms: list[ast.expr] = [] + results = [left_res] + for i, op, next_operand in it: + if ( + isinstance(next_operand, ast.NamedExpr) + and isinstance(left_res, ast.Name) + and next_operand.target.id == left_res.id + ): + next_operand.target.id = self.variable() + self.variables_overwrite[self.scope][left_res.id] = next_operand # type:ignore[assignment] + next_res, next_expl = self.visit(next_operand) + if isinstance(next_operand, (ast.Compare, ast.BoolOp)): + next_expl = f"({next_expl})" + results.append(next_res) + sym = BINOP_MAP[op.__class__] + syms.append(ast.Constant(sym)) + expl = f"{left_expl} {sym} {next_expl}" + expls.append(ast.Constant(expl)) + res_expr = ast.copy_location(ast.Compare(left_res, [op], [next_res]), comp) + self.statements.append(ast.Assign([store_names[i]], res_expr)) + left_res, left_expl = next_res, next_expl + # Use pytest.assertion.util._reprcompare if that's available. + expl_call = self.helper( + "_call_reprcompare", + ast.Tuple(syms, ast.Load()), + ast.Tuple(load_names, ast.Load()), + ast.Tuple(expls, ast.Load()), + ast.Tuple(results, ast.Load()), + ) + if len(comp.ops) > 1: + res: ast.expr = ast.BoolOp(ast.And(), load_names) + else: + res = load_names[0] + + return res, self.explanation_param(self.pop_format_context(expl_call)) + + +def try_makedirs(cache_dir: Path) -> bool: + """Attempt to create the given directory and sub-directories exist. + + Returns True if successful or if it already exists. + """ + try: + os.makedirs(cache_dir, exist_ok=True) + except (FileNotFoundError, NotADirectoryError, FileExistsError): + # One of the path components was not a directory: + # - we're in a zip file + # - it is a file + return False + except PermissionError: + return False + except OSError as e: + # as of now, EROFS doesn't have an equivalent OSError-subclass + # + # squashfuse_ll returns ENOSYS "OSError: [Errno 38] Function not + # implemented" for a read-only error + if e.errno in {errno.EROFS, errno.ENOSYS}: + return False + raise + return True + + +def get_cache_dir(file_path: Path) -> Path: + """Return the cache directory to write .pyc files for the given .py file path.""" + if sys.pycache_prefix: + # given: + # prefix = '/tmp/pycs' + # path = '/home/user/proj/test_app.py' + # we want: + # '/tmp/pycs/home/user/proj' + return Path(sys.pycache_prefix) / Path(*file_path.parts[1:-1]) + else: + # classic pycache directory + return file_path.parent / "__pycache__" diff --git a/venv/lib/python3.10/site-packages/_pytest/assertion/truncate.py b/venv/lib/python3.10/site-packages/_pytest/assertion/truncate.py new file mode 100644 index 0000000000000000000000000000000000000000..4854a62ba6b73c76dc1fa16300f6c85d920b4938 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/assertion/truncate.py @@ -0,0 +1,137 @@ +"""Utilities for truncating assertion output. + +Current default behaviour is to truncate assertion explanations at +terminal lines, unless running with an assertions verbosity level of at least 2 or running on CI. +""" + +from __future__ import annotations + +from _pytest.assertion import util +from _pytest.config import Config +from _pytest.nodes import Item + + +DEFAULT_MAX_LINES = 8 +DEFAULT_MAX_CHARS = DEFAULT_MAX_LINES * 80 +USAGE_MSG = "use '-vv' to show" + + +def truncate_if_required(explanation: list[str], item: Item) -> list[str]: + """Truncate this assertion explanation if the given test item is eligible.""" + should_truncate, max_lines, max_chars = _get_truncation_parameters(item) + if should_truncate: + return _truncate_explanation( + explanation, + max_lines=max_lines, + max_chars=max_chars, + ) + return explanation + + +def _get_truncation_parameters(item: Item) -> tuple[bool, int, int]: + """Return the truncation parameters related to the given item, as (should truncate, max lines, max chars).""" + # We do not need to truncate if one of conditions is met: + # 1. Verbosity level is 2 or more; + # 2. Test is being run in CI environment; + # 3. Both truncation_limit_lines and truncation_limit_chars + # .ini parameters are set to 0 explicitly. + max_lines = item.config.getini("truncation_limit_lines") + max_lines = int(max_lines if max_lines is not None else DEFAULT_MAX_LINES) + + max_chars = item.config.getini("truncation_limit_chars") + max_chars = int(max_chars if max_chars is not None else DEFAULT_MAX_CHARS) + + verbose = item.config.get_verbosity(Config.VERBOSITY_ASSERTIONS) + + should_truncate = verbose < 2 and not util.running_on_ci() + should_truncate = should_truncate and (max_lines > 0 or max_chars > 0) + + return should_truncate, max_lines, max_chars + + +def _truncate_explanation( + input_lines: list[str], + max_lines: int, + max_chars: int, +) -> list[str]: + """Truncate given list of strings that makes up the assertion explanation. + + Truncates to either max_lines, or max_chars - whichever the input reaches + first, taking the truncation explanation into account. The remaining lines + will be replaced by a usage message. + """ + # Check if truncation required + input_char_count = len("".join(input_lines)) + # The length of the truncation explanation depends on the number of lines + # removed but is at least 68 characters: + # The real value is + # 64 (for the base message: + # '...\n...Full output truncated (1 line hidden), use '-vv' to show")' + # ) + # + 1 (for plural) + # + int(math.log10(len(input_lines) - max_lines)) (number of hidden line, at least 1) + # + 3 for the '...' added to the truncated line + # But if there's more than 100 lines it's very likely that we're going to + # truncate, so we don't need the exact value using log10. + tolerable_max_chars = ( + max_chars + 70 # 64 + 1 (for plural) + 2 (for '99') + 3 for '...' + ) + # The truncation explanation add two lines to the output + tolerable_max_lines = max_lines + 2 + if ( + len(input_lines) <= tolerable_max_lines + and input_char_count <= tolerable_max_chars + ): + return input_lines + # Truncate first to max_lines, and then truncate to max_chars if necessary + if max_lines > 0: + truncated_explanation = input_lines[:max_lines] + else: + truncated_explanation = input_lines + truncated_char = True + # We reevaluate the need to truncate chars following removal of some lines + if len("".join(truncated_explanation)) > tolerable_max_chars and max_chars > 0: + truncated_explanation = _truncate_by_char_count( + truncated_explanation, max_chars + ) + else: + truncated_char = False + + if truncated_explanation == input_lines: + # No truncation happened, so we do not need to add any explanations + return truncated_explanation + + truncated_line_count = len(input_lines) - len(truncated_explanation) + if truncated_explanation[-1]: + # Add ellipsis and take into account part-truncated final line + truncated_explanation[-1] = truncated_explanation[-1] + "..." + if truncated_char: + # It's possible that we did not remove any char from this line + truncated_line_count += 1 + else: + # Add proper ellipsis when we were able to fit a full line exactly + truncated_explanation[-1] = "..." + return [ + *truncated_explanation, + "", + f"...Full output truncated ({truncated_line_count} line" + f"{'' if truncated_line_count == 1 else 's'} hidden), {USAGE_MSG}", + ] + + +def _truncate_by_char_count(input_lines: list[str], max_chars: int) -> list[str]: + # Find point at which input length exceeds total allowed length + iterated_char_count = 0 + for iterated_index, input_line in enumerate(input_lines): + if iterated_char_count + len(input_line) > max_chars: + break + iterated_char_count += len(input_line) + + # Create truncated explanation with modified final line + truncated_result = input_lines[:iterated_index] + final_line = input_lines[iterated_index] + if final_line: + final_line_truncate_point = max_chars - iterated_char_count + final_line = final_line[:final_line_truncate_point] + truncated_result.append(final_line) + return truncated_result diff --git a/venv/lib/python3.10/site-packages/_pytest/assertion/util.py b/venv/lib/python3.10/site-packages/_pytest/assertion/util.py new file mode 100644 index 0000000000000000000000000000000000000000..c545e6cd20c6e2c47c0b183b5e1dbe44c286a6f2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/assertion/util.py @@ -0,0 +1,621 @@ +# mypy: allow-untyped-defs +"""Utilities for assertion debugging.""" + +from __future__ import annotations + +import collections.abc +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Mapping +from collections.abc import Sequence +from collections.abc import Set as AbstractSet +import os +import pprint +from typing import Any +from typing import Literal +from typing import Protocol +from unicodedata import normalize + +from _pytest import outcomes +import _pytest._code +from _pytest._io.pprint import PrettyPrinter +from _pytest._io.saferepr import saferepr +from _pytest._io.saferepr import saferepr_unlimited +from _pytest.config import Config + + +# The _reprcompare attribute on the util module is used by the new assertion +# interpretation code and assertion rewriter to detect this plugin was +# loaded and in turn call the hooks defined here as part of the +# DebugInterpreter. +_reprcompare: Callable[[str, object, object], str | None] | None = None + +# Works similarly as _reprcompare attribute. Is populated with the hook call +# when pytest_runtest_setup is called. +_assertion_pass: Callable[[int, str, str], None] | None = None + +# Config object which is assigned during pytest_runtest_protocol. +_config: Config | None = None + + +class _HighlightFunc(Protocol): + def __call__(self, source: str, lexer: Literal["diff", "python"] = "python") -> str: + """Apply highlighting to the given source.""" + + +def dummy_highlighter(source: str, lexer: Literal["diff", "python"] = "python") -> str: + """Dummy highlighter that returns the text unprocessed. + + Needed for _notin_text, as the diff gets post-processed to only show the "+" part. + """ + return source + + +def format_explanation(explanation: str) -> str: + r"""Format an explanation. + + Normally all embedded newlines are escaped, however there are + three exceptions: \n{, \n} and \n~. The first two are intended + cover nested explanations, see function and attribute explanations + for examples (.visit_Call(), visit_Attribute()). The last one is + for when one explanation needs to span multiple lines, e.g. when + displaying diffs. + """ + lines = _split_explanation(explanation) + result = _format_lines(lines) + return "\n".join(result) + + +def _split_explanation(explanation: str) -> list[str]: + r"""Return a list of individual lines in the explanation. + + This will return a list of lines split on '\n{', '\n}' and '\n~'. + Any other newlines will be escaped and appear in the line as the + literal '\n' characters. + """ + raw_lines = (explanation or "").split("\n") + lines = [raw_lines[0]] + for values in raw_lines[1:]: + if values and values[0] in ["{", "}", "~", ">"]: + lines.append(values) + else: + lines[-1] += "\\n" + values + return lines + + +def _format_lines(lines: Sequence[str]) -> list[str]: + """Format the individual lines. + + This will replace the '{', '}' and '~' characters of our mini formatting + language with the proper 'where ...', 'and ...' and ' + ...' text, taking + care of indentation along the way. + + Return a list of formatted lines. + """ + result = list(lines[:1]) + stack = [0] + stackcnt = [0] + for line in lines[1:]: + if line.startswith("{"): + if stackcnt[-1]: + s = "and " + else: + s = "where " + stack.append(len(result)) + stackcnt[-1] += 1 + stackcnt.append(0) + result.append(" +" + " " * (len(stack) - 1) + s + line[1:]) + elif line.startswith("}"): + stack.pop() + stackcnt.pop() + result[stack[-1]] += line[1:] + else: + assert line[0] in ["~", ">"] + stack[-1] += 1 + indent = len(stack) if line.startswith("~") else len(stack) - 1 + result.append(" " * indent + line[1:]) + assert len(stack) == 1 + return result + + +def issequence(x: Any) -> bool: + return isinstance(x, collections.abc.Sequence) and not isinstance(x, str) + + +def istext(x: Any) -> bool: + return isinstance(x, str) + + +def isdict(x: Any) -> bool: + return isinstance(x, dict) + + +def isset(x: Any) -> bool: + return isinstance(x, (set, frozenset)) + + +def isnamedtuple(obj: Any) -> bool: + return isinstance(obj, tuple) and getattr(obj, "_fields", None) is not None + + +def isdatacls(obj: Any) -> bool: + return getattr(obj, "__dataclass_fields__", None) is not None + + +def isattrs(obj: Any) -> bool: + return getattr(obj, "__attrs_attrs__", None) is not None + + +def isiterable(obj: Any) -> bool: + try: + iter(obj) + return not istext(obj) + except Exception: + return False + + +def has_default_eq( + obj: object, +) -> bool: + """Check if an instance of an object contains the default eq + + First, we check if the object's __eq__ attribute has __code__, + if so, we check the equally of the method code filename (__code__.co_filename) + to the default one generated by the dataclass and attr module + for dataclasses the default co_filename is , for attrs class, the __eq__ should contain "attrs eq generated" + """ + # inspired from https://github.com/willmcgugan/rich/blob/07d51ffc1aee6f16bd2e5a25b4e82850fb9ed778/rich/pretty.py#L68 + if hasattr(obj.__eq__, "__code__") and hasattr(obj.__eq__.__code__, "co_filename"): + code_filename = obj.__eq__.__code__.co_filename + + if isattrs(obj): + return "attrs generated " in code_filename + + return code_filename == "" # data class + return True + + +def assertrepr_compare( + config, op: str, left: Any, right: Any, use_ascii: bool = False +) -> list[str] | None: + """Return specialised explanations for some operators/operands.""" + verbose = config.get_verbosity(Config.VERBOSITY_ASSERTIONS) + + # Strings which normalize equal are often hard to distinguish when printed; use ascii() to make this easier. + # See issue #3246. + use_ascii = ( + isinstance(left, str) + and isinstance(right, str) + and normalize("NFD", left) == normalize("NFD", right) + ) + + if verbose > 1: + left_repr = saferepr_unlimited(left, use_ascii=use_ascii) + right_repr = saferepr_unlimited(right, use_ascii=use_ascii) + else: + # XXX: "15 chars indentation" is wrong + # ("E AssertionError: assert "); should use term width. + maxsize = ( + 80 - 15 - len(op) - 2 + ) // 2 # 15 chars indentation, 1 space around op + + left_repr = saferepr(left, maxsize=maxsize, use_ascii=use_ascii) + right_repr = saferepr(right, maxsize=maxsize, use_ascii=use_ascii) + + summary = f"{left_repr} {op} {right_repr}" + highlighter = config.get_terminal_writer()._highlight + + explanation = None + try: + if op == "==": + explanation = _compare_eq_any(left, right, highlighter, verbose) + elif op == "not in": + if istext(left) and istext(right): + explanation = _notin_text(left, right, verbose) + elif op == "!=": + if isset(left) and isset(right): + explanation = ["Both sets are equal"] + elif op == ">=": + if isset(left) and isset(right): + explanation = _compare_gte_set(left, right, highlighter, verbose) + elif op == "<=": + if isset(left) and isset(right): + explanation = _compare_lte_set(left, right, highlighter, verbose) + elif op == ">": + if isset(left) and isset(right): + explanation = _compare_gt_set(left, right, highlighter, verbose) + elif op == "<": + if isset(left) and isset(right): + explanation = _compare_lt_set(left, right, highlighter, verbose) + + except outcomes.Exit: + raise + except Exception: + repr_crash = _pytest._code.ExceptionInfo.from_current()._getreprcrash() + explanation = [ + f"(pytest_assertion plugin: representation of details failed: {repr_crash}.", + " Probably an object has a faulty __repr__.)", + ] + + if not explanation: + return None + + if explanation[0] != "": + explanation = ["", *explanation] + return [summary, *explanation] + + +def _compare_eq_any( + left: Any, right: Any, highlighter: _HighlightFunc, verbose: int = 0 +) -> list[str]: + explanation = [] + if istext(left) and istext(right): + explanation = _diff_text(left, right, highlighter, verbose) + else: + from _pytest.python_api import ApproxBase + + if isinstance(left, ApproxBase) or isinstance(right, ApproxBase): + # Although the common order should be obtained == expected, this ensures both ways + approx_side = left if isinstance(left, ApproxBase) else right + other_side = right if isinstance(left, ApproxBase) else left + + explanation = approx_side._repr_compare(other_side) + elif type(left) is type(right) and ( + isdatacls(left) or isattrs(left) or isnamedtuple(left) + ): + # Note: unlike dataclasses/attrs, namedtuples compare only the + # field values, not the type or field names. But this branch + # intentionally only handles the same-type case, which was often + # used in older code bases before dataclasses/attrs were available. + explanation = _compare_eq_cls(left, right, highlighter, verbose) + elif issequence(left) and issequence(right): + explanation = _compare_eq_sequence(left, right, highlighter, verbose) + elif isset(left) and isset(right): + explanation = _compare_eq_set(left, right, highlighter, verbose) + elif isdict(left) and isdict(right): + explanation = _compare_eq_dict(left, right, highlighter, verbose) + + if isiterable(left) and isiterable(right): + expl = _compare_eq_iterable(left, right, highlighter, verbose) + explanation.extend(expl) + + return explanation + + +def _diff_text( + left: str, right: str, highlighter: _HighlightFunc, verbose: int = 0 +) -> list[str]: + """Return the explanation for the diff between text. + + Unless --verbose is used this will skip leading and trailing + characters which are identical to keep the diff minimal. + """ + from difflib import ndiff + + explanation: list[str] = [] + + if verbose < 1: + i = 0 # just in case left or right has zero length + for i in range(min(len(left), len(right))): + if left[i] != right[i]: + break + if i > 42: + i -= 10 # Provide some context + explanation = [ + f"Skipping {i} identical leading characters in diff, use -v to show" + ] + left = left[i:] + right = right[i:] + if len(left) == len(right): + for i in range(len(left)): + if left[-i] != right[-i]: + break + if i > 42: + i -= 10 # Provide some context + explanation += [ + f"Skipping {i} identical trailing " + "characters in diff, use -v to show" + ] + left = left[:-i] + right = right[:-i] + keepends = True + if left.isspace() or right.isspace(): + left = repr(str(left)) + right = repr(str(right)) + explanation += ["Strings contain only whitespace, escaping them using repr()"] + # "right" is the expected base against which we compare "left", + # see https://github.com/pytest-dev/pytest/issues/3333 + explanation.extend( + highlighter( + "\n".join( + line.strip("\n") + for line in ndiff(right.splitlines(keepends), left.splitlines(keepends)) + ), + lexer="diff", + ).splitlines() + ) + return explanation + + +def _compare_eq_iterable( + left: Iterable[Any], + right: Iterable[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + if verbose <= 0 and not running_on_ci(): + return ["Use -v to get more diff"] + # dynamic import to speedup pytest + import difflib + + left_formatting = PrettyPrinter().pformat(left).splitlines() + right_formatting = PrettyPrinter().pformat(right).splitlines() + + explanation = ["", "Full diff:"] + # "right" is the expected base against which we compare "left", + # see https://github.com/pytest-dev/pytest/issues/3333 + explanation.extend( + highlighter( + "\n".join( + line.rstrip() + for line in difflib.ndiff(right_formatting, left_formatting) + ), + lexer="diff", + ).splitlines() + ) + return explanation + + +def _compare_eq_sequence( + left: Sequence[Any], + right: Sequence[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + comparing_bytes = isinstance(left, bytes) and isinstance(right, bytes) + explanation: list[str] = [] + len_left = len(left) + len_right = len(right) + for i in range(min(len_left, len_right)): + if left[i] != right[i]: + if comparing_bytes: + # when comparing bytes, we want to see their ascii representation + # instead of their numeric values (#5260) + # using a slice gives us the ascii representation: + # >>> s = b'foo' + # >>> s[0] + # 102 + # >>> s[0:1] + # b'f' + left_value = left[i : i + 1] + right_value = right[i : i + 1] + else: + left_value = left[i] + right_value = right[i] + + explanation.append( + f"At index {i} diff:" + f" {highlighter(repr(left_value))} != {highlighter(repr(right_value))}" + ) + break + + if comparing_bytes: + # when comparing bytes, it doesn't help to show the "sides contain one or more + # items" longer explanation, so skip it + + return explanation + + len_diff = len_left - len_right + if len_diff: + if len_diff > 0: + dir_with_more = "Left" + extra = saferepr(left[len_right]) + else: + len_diff = 0 - len_diff + dir_with_more = "Right" + extra = saferepr(right[len_left]) + + if len_diff == 1: + explanation += [ + f"{dir_with_more} contains one more item: {highlighter(extra)}" + ] + else: + explanation += [ + f"{dir_with_more} contains {len_diff} more items, first extra item: {highlighter(extra)}" + ] + return explanation + + +def _compare_eq_set( + left: AbstractSet[Any], + right: AbstractSet[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + explanation = [] + explanation.extend(_set_one_sided_diff("left", left, right, highlighter)) + explanation.extend(_set_one_sided_diff("right", right, left, highlighter)) + return explanation + + +def _compare_gt_set( + left: AbstractSet[Any], + right: AbstractSet[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + explanation = _compare_gte_set(left, right, highlighter) + if not explanation: + return ["Both sets are equal"] + return explanation + + +def _compare_lt_set( + left: AbstractSet[Any], + right: AbstractSet[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + explanation = _compare_lte_set(left, right, highlighter) + if not explanation: + return ["Both sets are equal"] + return explanation + + +def _compare_gte_set( + left: AbstractSet[Any], + right: AbstractSet[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + return _set_one_sided_diff("right", right, left, highlighter) + + +def _compare_lte_set( + left: AbstractSet[Any], + right: AbstractSet[Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + return _set_one_sided_diff("left", left, right, highlighter) + + +def _set_one_sided_diff( + posn: str, + set1: AbstractSet[Any], + set2: AbstractSet[Any], + highlighter: _HighlightFunc, +) -> list[str]: + explanation = [] + diff = set1 - set2 + if diff: + explanation.append(f"Extra items in the {posn} set:") + for item in diff: + explanation.append(highlighter(saferepr(item))) + return explanation + + +def _compare_eq_dict( + left: Mapping[Any, Any], + right: Mapping[Any, Any], + highlighter: _HighlightFunc, + verbose: int = 0, +) -> list[str]: + explanation: list[str] = [] + set_left = set(left) + set_right = set(right) + common = set_left.intersection(set_right) + same = {k: left[k] for k in common if left[k] == right[k]} + if same and verbose < 2: + explanation += [f"Omitting {len(same)} identical items, use -vv to show"] + elif same: + explanation += ["Common items:"] + explanation += highlighter(pprint.pformat(same)).splitlines() + diff = {k for k in common if left[k] != right[k]} + if diff: + explanation += ["Differing items:"] + for k in diff: + explanation += [ + highlighter(saferepr({k: left[k]})) + + " != " + + highlighter(saferepr({k: right[k]})) + ] + extra_left = set_left - set_right + len_extra_left = len(extra_left) + if len_extra_left: + explanation.append( + f"Left contains {len_extra_left} more item{'' if len_extra_left == 1 else 's'}:" + ) + explanation.extend( + highlighter(pprint.pformat({k: left[k] for k in extra_left})).splitlines() + ) + extra_right = set_right - set_left + len_extra_right = len(extra_right) + if len_extra_right: + explanation.append( + f"Right contains {len_extra_right} more item{'' if len_extra_right == 1 else 's'}:" + ) + explanation.extend( + highlighter(pprint.pformat({k: right[k] for k in extra_right})).splitlines() + ) + return explanation + + +def _compare_eq_cls( + left: Any, right: Any, highlighter: _HighlightFunc, verbose: int +) -> list[str]: + if not has_default_eq(left): + return [] + if isdatacls(left): + import dataclasses + + all_fields = dataclasses.fields(left) + fields_to_check = [info.name for info in all_fields if info.compare] + elif isattrs(left): + all_fields = left.__attrs_attrs__ + fields_to_check = [field.name for field in all_fields if getattr(field, "eq")] + elif isnamedtuple(left): + fields_to_check = left._fields + else: + assert False + + indent = " " + same = [] + diff = [] + for field in fields_to_check: + if getattr(left, field) == getattr(right, field): + same.append(field) + else: + diff.append(field) + + explanation = [] + if same or diff: + explanation += [""] + if same and verbose < 2: + explanation.append(f"Omitting {len(same)} identical items, use -vv to show") + elif same: + explanation += ["Matching attributes:"] + explanation += highlighter(pprint.pformat(same)).splitlines() + if diff: + explanation += ["Differing attributes:"] + explanation += highlighter(pprint.pformat(diff)).splitlines() + for field in diff: + field_left = getattr(left, field) + field_right = getattr(right, field) + explanation += [ + "", + f"Drill down into differing attribute {field}:", + f"{indent}{field}: {highlighter(repr(field_left))} != {highlighter(repr(field_right))}", + ] + explanation += [ + indent + line + for line in _compare_eq_any( + field_left, field_right, highlighter, verbose + ) + ] + return explanation + + +def _notin_text(term: str, text: str, verbose: int = 0) -> list[str]: + index = text.find(term) + head = text[:index] + tail = text[index + len(term) :] + correct_text = head + tail + diff = _diff_text(text, correct_text, dummy_highlighter, verbose) + newdiff = [f"{saferepr(term, maxsize=42)} is contained here:"] + for line in diff: + if line.startswith("Skipping"): + continue + if line.startswith("- "): + continue + if line.startswith("+ "): + newdiff.append(" " + line[2:]) + else: + newdiff.append(line) + return newdiff + + +def running_on_ci() -> bool: + """Check if we're currently running on a CI system.""" + env_vars = ["CI", "BUILD_NUMBER"] + return any(var in os.environ for var in env_vars) diff --git a/venv/lib/python3.10/site-packages/_pytest/cacheprovider.py b/venv/lib/python3.10/site-packages/_pytest/cacheprovider.py new file mode 100644 index 0000000000000000000000000000000000000000..dea60109b51930ea9c255af8ee64b5997080bb76 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/cacheprovider.py @@ -0,0 +1,625 @@ +# mypy: allow-untyped-defs +"""Implementation of the cache provider.""" + +# This plugin was not named "cache" to avoid conflicts with the external +# pytest-cache version. +from __future__ import annotations + +from collections.abc import Generator +from collections.abc import Iterable +import dataclasses +import errno +import json +import os +from pathlib import Path +import tempfile +from typing import final + +from .pathlib import resolve_from_str +from .pathlib import rm_rf +from .reports import CollectReport +from _pytest import nodes +from _pytest._io import TerminalWriter +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.main import Session +from _pytest.nodes import Directory +from _pytest.nodes import File +from _pytest.reports import TestReport + + +README_CONTENT = """\ +# pytest cache directory # + +This directory contains data from the pytest's cache plugin, +which provides the `--lf` and `--ff` options, as well as the `cache` fixture. + +**Do not** commit this to version control. + +See [the docs](https://docs.pytest.org/en/stable/how-to/cache.html) for more information. +""" + +CACHEDIR_TAG_CONTENT = b"""\ +Signature: 8a477f597d28d172789f06886806bc55 +# This file is a cache directory tag created by pytest. +# For information about cache directory tags, see: +# https://bford.info/cachedir/spec.html +""" + + +@final +@dataclasses.dataclass +class Cache: + """Instance of the `cache` fixture.""" + + _cachedir: Path = dataclasses.field(repr=False) + _config: Config = dataclasses.field(repr=False) + + # Sub-directory under cache-dir for directories created by `mkdir()`. + _CACHE_PREFIX_DIRS = "d" + + # Sub-directory under cache-dir for values created by `set()`. + _CACHE_PREFIX_VALUES = "v" + + def __init__( + self, cachedir: Path, config: Config, *, _ispytest: bool = False + ) -> None: + check_ispytest(_ispytest) + self._cachedir = cachedir + self._config = config + + @classmethod + def for_config(cls, config: Config, *, _ispytest: bool = False) -> Cache: + """Create the Cache instance for a Config. + + :meta private: + """ + check_ispytest(_ispytest) + cachedir = cls.cache_dir_from_config(config, _ispytest=True) + if config.getoption("cacheclear") and cachedir.is_dir(): + cls.clear_cache(cachedir, _ispytest=True) + return cls(cachedir, config, _ispytest=True) + + @classmethod + def clear_cache(cls, cachedir: Path, _ispytest: bool = False) -> None: + """Clear the sub-directories used to hold cached directories and values. + + :meta private: + """ + check_ispytest(_ispytest) + for prefix in (cls._CACHE_PREFIX_DIRS, cls._CACHE_PREFIX_VALUES): + d = cachedir / prefix + if d.is_dir(): + rm_rf(d) + + @staticmethod + def cache_dir_from_config(config: Config, *, _ispytest: bool = False) -> Path: + """Get the path to the cache directory for a Config. + + :meta private: + """ + check_ispytest(_ispytest) + return resolve_from_str(config.getini("cache_dir"), config.rootpath) + + def warn(self, fmt: str, *, _ispytest: bool = False, **args: object) -> None: + """Issue a cache warning. + + :meta private: + """ + check_ispytest(_ispytest) + import warnings + + from _pytest.warning_types import PytestCacheWarning + + warnings.warn( + PytestCacheWarning(fmt.format(**args) if args else fmt), + self._config.hook, + stacklevel=3, + ) + + def _mkdir(self, path: Path) -> None: + self._ensure_cache_dir_and_supporting_files() + path.mkdir(exist_ok=True, parents=True) + + def mkdir(self, name: str) -> Path: + """Return a directory path object with the given name. + + If the directory does not yet exist, it will be created. You can use + it to manage files to e.g. store/retrieve database dumps across test + sessions. + + .. versionadded:: 7.0 + + :param name: + Must be a string not containing a ``/`` separator. + Make sure the name contains your plugin or application + identifiers to prevent clashes with other cache users. + """ + path = Path(name) + if len(path.parts) > 1: + raise ValueError("name is not allowed to contain path separators") + res = self._cachedir.joinpath(self._CACHE_PREFIX_DIRS, path) + self._mkdir(res) + return res + + def _getvaluepath(self, key: str) -> Path: + return self._cachedir.joinpath(self._CACHE_PREFIX_VALUES, Path(key)) + + def get(self, key: str, default): + """Return the cached value for the given key. + + If no value was yet cached or the value cannot be read, the specified + default is returned. + + :param key: + Must be a ``/`` separated value. Usually the first + name is the name of your plugin or your application. + :param default: + The value to return in case of a cache-miss or invalid cache value. + """ + path = self._getvaluepath(key) + try: + with path.open("r", encoding="UTF-8") as f: + return json.load(f) + except (ValueError, OSError): + return default + + def set(self, key: str, value: object) -> None: + """Save value for the given key. + + :param key: + Must be a ``/`` separated value. Usually the first + name is the name of your plugin or your application. + :param value: + Must be of any combination of basic python types, + including nested types like lists of dictionaries. + """ + path = self._getvaluepath(key) + try: + self._mkdir(path.parent) + except OSError as exc: + self.warn( + f"could not create cache path {path}: {exc}", + _ispytest=True, + ) + return + data = json.dumps(value, ensure_ascii=False, indent=2) + try: + f = path.open("w", encoding="UTF-8") + except OSError as exc: + self.warn( + f"cache could not write path {path}: {exc}", + _ispytest=True, + ) + else: + with f: + f.write(data) + + def _ensure_cache_dir_and_supporting_files(self) -> None: + """Create the cache dir and its supporting files.""" + if self._cachedir.is_dir(): + return + + self._cachedir.parent.mkdir(parents=True, exist_ok=True) + with tempfile.TemporaryDirectory( + prefix="pytest-cache-files-", + dir=self._cachedir.parent, + ) as newpath: + path = Path(newpath) + + # Reset permissions to the default, see #12308. + # Note: there's no way to get the current umask atomically, eek. + umask = os.umask(0o022) + os.umask(umask) + path.chmod(0o777 - umask) + + with open(path.joinpath("README.md"), "x", encoding="UTF-8") as f: + f.write(README_CONTENT) + with open(path.joinpath(".gitignore"), "x", encoding="UTF-8") as f: + f.write("# Created by pytest automatically.\n*\n") + with open(path.joinpath("CACHEDIR.TAG"), "xb") as f: + f.write(CACHEDIR_TAG_CONTENT) + + try: + path.rename(self._cachedir) + except OSError as e: + # If 2 concurrent pytests both race to the rename, the loser + # gets "Directory not empty" from the rename. In this case, + # everything is handled so just continue (while letting the + # temporary directory be cleaned up). + # On Windows, the error is a FileExistsError which translates to EEXIST. + if e.errno not in (errno.ENOTEMPTY, errno.EEXIST): + raise + else: + # Create a directory in place of the one we just moved so that + # `TemporaryDirectory`'s cleanup doesn't complain. + # + # TODO: pass ignore_cleanup_errors=True when we no longer support python < 3.10. + # See https://github.com/python/cpython/issues/74168. Note that passing + # delete=False would do the wrong thing in case of errors and isn't supported + # until python 3.12. + path.mkdir() + + +class LFPluginCollWrapper: + def __init__(self, lfplugin: LFPlugin) -> None: + self.lfplugin = lfplugin + self._collected_at_least_one_failure = False + + @hookimpl(wrapper=True) + def pytest_make_collect_report( + self, collector: nodes.Collector + ) -> Generator[None, CollectReport, CollectReport]: + res = yield + if isinstance(collector, (Session, Directory)): + # Sort any lf-paths to the beginning. + lf_paths = self.lfplugin._last_failed_paths + + # Use stable sort to prioritize last failed. + def sort_key(node: nodes.Item | nodes.Collector) -> bool: + return node.path in lf_paths + + res.result = sorted( + res.result, + key=sort_key, + reverse=True, + ) + + elif isinstance(collector, File): + if collector.path in self.lfplugin._last_failed_paths: + result = res.result + lastfailed = self.lfplugin.lastfailed + + # Only filter with known failures. + if not self._collected_at_least_one_failure: + if not any(x.nodeid in lastfailed for x in result): + return res + self.lfplugin.config.pluginmanager.register( + LFPluginCollSkipfiles(self.lfplugin), "lfplugin-collskip" + ) + self._collected_at_least_one_failure = True + + session = collector.session + result[:] = [ + x + for x in result + if x.nodeid in lastfailed + # Include any passed arguments (not trivial to filter). + or session.isinitpath(x.path) + # Keep all sub-collectors. + or isinstance(x, nodes.Collector) + ] + + return res + + +class LFPluginCollSkipfiles: + def __init__(self, lfplugin: LFPlugin) -> None: + self.lfplugin = lfplugin + + @hookimpl + def pytest_make_collect_report( + self, collector: nodes.Collector + ) -> CollectReport | None: + if isinstance(collector, File): + if collector.path not in self.lfplugin._last_failed_paths: + self.lfplugin._skipped_files += 1 + + return CollectReport( + collector.nodeid, "passed", longrepr=None, result=[] + ) + return None + + +class LFPlugin: + """Plugin which implements the --lf (run last-failing) option.""" + + def __init__(self, config: Config) -> None: + self.config = config + active_keys = "lf", "failedfirst" + self.active = any(config.getoption(key) for key in active_keys) + assert config.cache + self.lastfailed: dict[str, bool] = config.cache.get("cache/lastfailed", {}) + self._previously_failed_count: int | None = None + self._report_status: str | None = None + self._skipped_files = 0 # count skipped files during collection due to --lf + + if config.getoption("lf"): + self._last_failed_paths = self.get_last_failed_paths() + config.pluginmanager.register( + LFPluginCollWrapper(self), "lfplugin-collwrapper" + ) + + def get_last_failed_paths(self) -> set[Path]: + """Return a set with all Paths of the previously failed nodeids and + their parents.""" + rootpath = self.config.rootpath + result = set() + for nodeid in self.lastfailed: + path = rootpath / nodeid.split("::")[0] + result.add(path) + result.update(path.parents) + return {x for x in result if x.exists()} + + def pytest_report_collectionfinish(self) -> str | None: + if self.active and self.config.get_verbosity() >= 0: + return f"run-last-failure: {self._report_status}" + return None + + def pytest_runtest_logreport(self, report: TestReport) -> None: + if (report.when == "call" and report.passed) or report.skipped: + self.lastfailed.pop(report.nodeid, None) + elif report.failed: + self.lastfailed[report.nodeid] = True + + def pytest_collectreport(self, report: CollectReport) -> None: + passed = report.outcome in ("passed", "skipped") + if passed: + if report.nodeid in self.lastfailed: + self.lastfailed.pop(report.nodeid) + self.lastfailed.update((item.nodeid, True) for item in report.result) + else: + self.lastfailed[report.nodeid] = True + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_collection_modifyitems( + self, config: Config, items: list[nodes.Item] + ) -> Generator[None]: + res = yield + + if not self.active: + return res + + if self.lastfailed: + previously_failed = [] + previously_passed = [] + for item in items: + if item.nodeid in self.lastfailed: + previously_failed.append(item) + else: + previously_passed.append(item) + self._previously_failed_count = len(previously_failed) + + if not previously_failed: + # Running a subset of all tests with recorded failures + # only outside of it. + self._report_status = ( + f"{len(self.lastfailed)} known failures not in selected tests" + ) + else: + if self.config.getoption("lf"): + items[:] = previously_failed + config.hook.pytest_deselected(items=previously_passed) + else: # --failedfirst + items[:] = previously_failed + previously_passed + + noun = "failure" if self._previously_failed_count == 1 else "failures" + suffix = " first" if self.config.getoption("failedfirst") else "" + self._report_status = ( + f"rerun previous {self._previously_failed_count} {noun}{suffix}" + ) + + if self._skipped_files > 0: + files_noun = "file" if self._skipped_files == 1 else "files" + self._report_status += f" (skipped {self._skipped_files} {files_noun})" + else: + self._report_status = "no previously failed tests, " + if self.config.getoption("last_failed_no_failures") == "none": + self._report_status += "deselecting all items." + config.hook.pytest_deselected(items=items[:]) + items[:] = [] + else: + self._report_status += "not deselecting items." + + return res + + def pytest_sessionfinish(self, session: Session) -> None: + config = self.config + if config.getoption("cacheshow") or hasattr(config, "workerinput"): + return + + assert config.cache is not None + saved_lastfailed = config.cache.get("cache/lastfailed", {}) + if saved_lastfailed != self.lastfailed: + config.cache.set("cache/lastfailed", self.lastfailed) + + +class NFPlugin: + """Plugin which implements the --nf (run new-first) option.""" + + def __init__(self, config: Config) -> None: + self.config = config + self.active = config.option.newfirst + assert config.cache is not None + self.cached_nodeids = set(config.cache.get("cache/nodeids", [])) + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_collection_modifyitems(self, items: list[nodes.Item]) -> Generator[None]: + res = yield + + if self.active: + new_items: dict[str, nodes.Item] = {} + other_items: dict[str, nodes.Item] = {} + for item in items: + if item.nodeid not in self.cached_nodeids: + new_items[item.nodeid] = item + else: + other_items[item.nodeid] = item + + items[:] = self._get_increasing_order( + new_items.values() + ) + self._get_increasing_order(other_items.values()) + self.cached_nodeids.update(new_items) + else: + self.cached_nodeids.update(item.nodeid for item in items) + + return res + + def _get_increasing_order(self, items: Iterable[nodes.Item]) -> list[nodes.Item]: + return sorted(items, key=lambda item: item.path.stat().st_mtime, reverse=True) + + def pytest_sessionfinish(self) -> None: + config = self.config + if config.getoption("cacheshow") or hasattr(config, "workerinput"): + return + + if config.getoption("collectonly"): + return + + assert config.cache is not None + config.cache.set("cache/nodeids", sorted(self.cached_nodeids)) + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group.addoption( + "--lf", + "--last-failed", + action="store_true", + dest="lf", + help="Rerun only the tests that failed at the last run (or all if none failed)", + ) + group.addoption( + "--ff", + "--failed-first", + action="store_true", + dest="failedfirst", + help="Run all tests, but run the last failures first. " + "This may re-order tests and thus lead to " + "repeated fixture setup/teardown.", + ) + group.addoption( + "--nf", + "--new-first", + action="store_true", + dest="newfirst", + help="Run tests from new files first, then the rest of the tests " + "sorted by file mtime", + ) + group.addoption( + "--cache-show", + action="append", + nargs="?", + dest="cacheshow", + help=( + "Show cache contents, don't perform collection or tests. " + "Optional argument: glob (default: '*')." + ), + ) + group.addoption( + "--cache-clear", + action="store_true", + dest="cacheclear", + help="Remove all cache contents at start of test run", + ) + cache_dir_default = ".pytest_cache" + if "TOX_ENV_DIR" in os.environ: + cache_dir_default = os.path.join(os.environ["TOX_ENV_DIR"], cache_dir_default) + parser.addini("cache_dir", default=cache_dir_default, help="Cache directory path") + group.addoption( + "--lfnf", + "--last-failed-no-failures", + action="store", + dest="last_failed_no_failures", + choices=("all", "none"), + default="all", + help="With ``--lf``, determines whether to execute tests when there " + "are no previously (known) failures or when no " + "cached ``lastfailed`` data was found. " + "``all`` (the default) runs the full test suite again. " + "``none`` just emits a message about no known failures and exits successfully.", + ) + + +def pytest_cmdline_main(config: Config) -> int | ExitCode | None: + if config.option.cacheshow and not config.option.help: + from _pytest.main import wrap_session + + return wrap_session(config, cacheshow) + return None + + +@hookimpl(tryfirst=True) +def pytest_configure(config: Config) -> None: + config.cache = Cache.for_config(config, _ispytest=True) + config.pluginmanager.register(LFPlugin(config), "lfplugin") + config.pluginmanager.register(NFPlugin(config), "nfplugin") + + +@fixture +def cache(request: FixtureRequest) -> Cache: + """Return a cache object that can persist state between testing sessions. + + cache.get(key, default) + cache.set(key, value) + + Keys must be ``/`` separated strings, where the first part is usually the + name of your plugin or application to avoid clashes with other cache users. + + Values can be any object handled by the json stdlib module. + """ + assert request.config.cache is not None + return request.config.cache + + +def pytest_report_header(config: Config) -> str | None: + """Display cachedir with --cache-show and if non-default.""" + if config.option.verbose > 0 or config.getini("cache_dir") != ".pytest_cache": + assert config.cache is not None + cachedir = config.cache._cachedir + # TODO: evaluate generating upward relative paths + # starting with .., ../.. if sensible + + try: + displaypath = cachedir.relative_to(config.rootpath) + except ValueError: + displaypath = cachedir + return f"cachedir: {displaypath}" + return None + + +def cacheshow(config: Config, session: Session) -> int: + from pprint import pformat + + assert config.cache is not None + + tw = TerminalWriter() + tw.line("cachedir: " + str(config.cache._cachedir)) + if not config.cache._cachedir.is_dir(): + tw.line("cache is empty") + return 0 + + glob = config.option.cacheshow[0] + if glob is None: + glob = "*" + + dummy = object() + basedir = config.cache._cachedir + vdir = basedir / Cache._CACHE_PREFIX_VALUES + tw.sep("-", f"cache values for {glob!r}") + for valpath in sorted(x for x in vdir.rglob(glob) if x.is_file()): + key = str(valpath.relative_to(vdir)) + val = config.cache.get(key, dummy) + if val is dummy: + tw.line(f"{key} contains unreadable content, will be ignored") + else: + tw.line(f"{key} contains:") + for line in pformat(val).splitlines(): + tw.line(" " + line) + + ddir = basedir / Cache._CACHE_PREFIX_DIRS + if ddir.is_dir(): + contents = sorted(ddir.rglob(glob)) + tw.sep("-", f"cache directories for {glob!r}") + for p in contents: + # if p.is_dir(): + # print("%s/" % p.relative_to(basedir)) + if p.is_file(): + key = str(p.relative_to(basedir)) + tw.line(f"{key} is a file of length {p.stat().st_size}") + return 0 diff --git a/venv/lib/python3.10/site-packages/_pytest/capture.py b/venv/lib/python3.10/site-packages/_pytest/capture.py new file mode 100644 index 0000000000000000000000000000000000000000..6d98676be5f20166929164e39fd0eeb17a24db20 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/capture.py @@ -0,0 +1,1144 @@ +# mypy: allow-untyped-defs +"""Per-test stdout/stderr capturing mechanism.""" + +from __future__ import annotations + +import abc +import collections +from collections.abc import Generator +from collections.abc import Iterable +from collections.abc import Iterator +import contextlib +import io +from io import UnsupportedOperation +import os +import sys +from tempfile import TemporaryFile +from types import TracebackType +from typing import Any +from typing import AnyStr +from typing import BinaryIO +from typing import cast +from typing import Final +from typing import final +from typing import Generic +from typing import Literal +from typing import NamedTuple +from typing import TextIO +from typing import TYPE_CHECKING + + +if TYPE_CHECKING: + from typing_extensions import Self + +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import SubRequest +from _pytest.nodes import Collector +from _pytest.nodes import File +from _pytest.nodes import Item +from _pytest.reports import CollectReport + + +_CaptureMethod = Literal["fd", "sys", "no", "tee-sys"] + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group.addoption( + "--capture", + action="store", + default="fd", + metavar="method", + choices=["fd", "sys", "no", "tee-sys"], + help="Per-test capturing method: one of fd|sys|no|tee-sys", + ) + group._addoption( # private to use reserved lower-case short option + "-s", + action="store_const", + const="no", + dest="capture", + help="Shortcut for --capture=no", + ) + + +def _colorama_workaround() -> None: + """Ensure colorama is imported so that it attaches to the correct stdio + handles on Windows. + + colorama uses the terminal on import time. So if something does the + first import of colorama while I/O capture is active, colorama will + fail in various ways. + """ + if sys.platform.startswith("win32"): + try: + import colorama # noqa: F401 + except ImportError: + pass + + +def _readline_workaround() -> None: + """Ensure readline is imported early so it attaches to the correct stdio handles. + + This isn't a problem with the default GNU readline implementation, but in + some configurations, Python uses libedit instead (on macOS, and for prebuilt + binaries such as used by uv). + + In theory this is only needed if readline.backend == "libedit", but the + workaround consists of importing readline here, so we already worked around + the issue by the time we could check if we need to. + """ + try: + import readline # noqa: F401 + except ImportError: + pass + + +def _windowsconsoleio_workaround(stream: TextIO) -> None: + """Workaround for Windows Unicode console handling. + + Python 3.6 implemented Unicode console handling for Windows. This works + by reading/writing to the raw console handle using + ``{Read,Write}ConsoleW``. + + The problem is that we are going to ``dup2`` over the stdio file + descriptors when doing ``FDCapture`` and this will ``CloseHandle`` the + handles used by Python to write to the console. Though there is still some + weirdness and the console handle seems to only be closed randomly and not + on the first call to ``CloseHandle``, or maybe it gets reopened with the + same handle value when we suspend capturing. + + The workaround in this case will reopen stdio with a different fd which + also means a different handle by replicating the logic in + "Py_lifecycle.c:initstdio/create_stdio". + + :param stream: + In practice ``sys.stdout`` or ``sys.stderr``, but given + here as parameter for unittesting purposes. + + See https://github.com/pytest-dev/py/issues/103. + """ + if not sys.platform.startswith("win32") or hasattr(sys, "pypy_version_info"): + return + + # Bail out if ``stream`` doesn't seem like a proper ``io`` stream (#2666). + if not hasattr(stream, "buffer"): # type: ignore[unreachable,unused-ignore] + return + + raw_stdout = stream.buffer.raw if hasattr(stream.buffer, "raw") else stream.buffer + + if not isinstance(raw_stdout, io._WindowsConsoleIO): # type: ignore[attr-defined,unused-ignore] + return + + def _reopen_stdio(f, mode): + if not hasattr(stream.buffer, "raw") and mode[0] == "w": + buffering = 0 + else: + buffering = -1 + + return io.TextIOWrapper( + open(os.dup(f.fileno()), mode, buffering), + f.encoding, + f.errors, + f.newlines, + f.line_buffering, + ) + + sys.stdin = _reopen_stdio(sys.stdin, "rb") + sys.stdout = _reopen_stdio(sys.stdout, "wb") + sys.stderr = _reopen_stdio(sys.stderr, "wb") + + +@hookimpl(wrapper=True) +def pytest_load_initial_conftests(early_config: Config) -> Generator[None]: + ns = early_config.known_args_namespace + if ns.capture == "fd": + _windowsconsoleio_workaround(sys.stdout) + _colorama_workaround() + _readline_workaround() + pluginmanager = early_config.pluginmanager + capman = CaptureManager(ns.capture) + pluginmanager.register(capman, "capturemanager") + + # Make sure that capturemanager is properly reset at final shutdown. + early_config.add_cleanup(capman.stop_global_capturing) + + # Finally trigger conftest loading but while capturing (issue #93). + capman.start_global_capturing() + try: + try: + yield + finally: + capman.suspend_global_capture() + except BaseException: + out, err = capman.read_global_capture() + sys.stdout.write(out) + sys.stderr.write(err) + raise + + +# IO Helpers. + + +class EncodedFile(io.TextIOWrapper): + __slots__ = () + + @property + def name(self) -> str: + # Ensure that file.name is a string. Workaround for a Python bug + # fixed in >=3.7.4: https://bugs.python.org/issue36015 + return repr(self.buffer) + + @property + def mode(self) -> str: + # TextIOWrapper doesn't expose a mode, but at least some of our + # tests check it. + assert hasattr(self.buffer, "mode") + return cast(str, self.buffer.mode.replace("b", "")) + + +class CaptureIO(io.TextIOWrapper): + def __init__(self) -> None: + super().__init__(io.BytesIO(), encoding="UTF-8", newline="", write_through=True) + + def getvalue(self) -> str: + assert isinstance(self.buffer, io.BytesIO) + return self.buffer.getvalue().decode("UTF-8") + + +class TeeCaptureIO(CaptureIO): + def __init__(self, other: TextIO) -> None: + self._other = other + super().__init__() + + def write(self, s: str) -> int: + super().write(s) + return self._other.write(s) + + +class DontReadFromInput(TextIO): + @property + def encoding(self) -> str: + assert sys.__stdin__ is not None + return sys.__stdin__.encoding + + def read(self, size: int = -1) -> str: + raise OSError( + "pytest: reading from stdin while output is captured! Consider using `-s`." + ) + + readline = read + + def __next__(self) -> str: + return self.readline() + + def readlines(self, hint: int | None = -1) -> list[str]: + raise OSError( + "pytest: reading from stdin while output is captured! Consider using `-s`." + ) + + def __iter__(self) -> Iterator[str]: + return self + + def fileno(self) -> int: + raise UnsupportedOperation("redirected stdin is pseudofile, has no fileno()") + + def flush(self) -> None: + raise UnsupportedOperation("redirected stdin is pseudofile, has no flush()") + + def isatty(self) -> bool: + return False + + def close(self) -> None: + pass + + def readable(self) -> bool: + return False + + def seek(self, offset: int, whence: int = 0) -> int: + raise UnsupportedOperation("redirected stdin is pseudofile, has no seek(int)") + + def seekable(self) -> bool: + return False + + def tell(self) -> int: + raise UnsupportedOperation("redirected stdin is pseudofile, has no tell()") + + def truncate(self, size: int | None = None) -> int: + raise UnsupportedOperation("cannot truncate stdin") + + def write(self, data: str) -> int: + raise UnsupportedOperation("cannot write to stdin") + + def writelines(self, lines: Iterable[str]) -> None: + raise UnsupportedOperation("Cannot write to stdin") + + def writable(self) -> bool: + return False + + def __enter__(self) -> Self: + return self + + def __exit__( + self, + type: type[BaseException] | None, + value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + pass + + @property + def buffer(self) -> BinaryIO: + # The str/bytes doesn't actually matter in this type, so OK to fake. + return self # type: ignore[return-value] + + +# Capture classes. + + +class CaptureBase(abc.ABC, Generic[AnyStr]): + EMPTY_BUFFER: AnyStr + + @abc.abstractmethod + def __init__(self, fd: int) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def start(self) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def done(self) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def suspend(self) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def resume(self) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def writeorg(self, data: AnyStr) -> None: + raise NotImplementedError() + + @abc.abstractmethod + def snap(self) -> AnyStr: + raise NotImplementedError() + + +patchsysdict = {0: "stdin", 1: "stdout", 2: "stderr"} + + +class NoCapture(CaptureBase[str]): + EMPTY_BUFFER = "" + + def __init__(self, fd: int) -> None: + pass + + def start(self) -> None: + pass + + def done(self) -> None: + pass + + def suspend(self) -> None: + pass + + def resume(self) -> None: + pass + + def snap(self) -> str: + return "" + + def writeorg(self, data: str) -> None: + pass + + +class SysCaptureBase(CaptureBase[AnyStr]): + def __init__( + self, fd: int, tmpfile: TextIO | None = None, *, tee: bool = False + ) -> None: + name = patchsysdict[fd] + self._old: TextIO = getattr(sys, name) + self.name = name + if tmpfile is None: + if name == "stdin": + tmpfile = DontReadFromInput() + else: + tmpfile = CaptureIO() if not tee else TeeCaptureIO(self._old) + self.tmpfile = tmpfile + self._state = "initialized" + + def repr(self, class_name: str) -> str: + return "<{} {} _old={} _state={!r} tmpfile={!r}>".format( + class_name, + self.name, + (hasattr(self, "_old") and repr(self._old)) or "", + self._state, + self.tmpfile, + ) + + def __repr__(self) -> str: + return "<{} {} _old={} _state={!r} tmpfile={!r}>".format( + self.__class__.__name__, + self.name, + (hasattr(self, "_old") and repr(self._old)) or "", + self._state, + self.tmpfile, + ) + + def _assert_state(self, op: str, states: tuple[str, ...]) -> None: + assert self._state in states, ( + "cannot {} in state {!r}: expected one of {}".format( + op, self._state, ", ".join(states) + ) + ) + + def start(self) -> None: + self._assert_state("start", ("initialized",)) + setattr(sys, self.name, self.tmpfile) + self._state = "started" + + def done(self) -> None: + self._assert_state("done", ("initialized", "started", "suspended", "done")) + if self._state == "done": + return + setattr(sys, self.name, self._old) + del self._old + self.tmpfile.close() + self._state = "done" + + def suspend(self) -> None: + self._assert_state("suspend", ("started", "suspended")) + setattr(sys, self.name, self._old) + self._state = "suspended" + + def resume(self) -> None: + self._assert_state("resume", ("started", "suspended")) + if self._state == "started": + return + setattr(sys, self.name, self.tmpfile) + self._state = "started" + + +class SysCaptureBinary(SysCaptureBase[bytes]): + EMPTY_BUFFER = b"" + + def snap(self) -> bytes: + self._assert_state("snap", ("started", "suspended")) + self.tmpfile.seek(0) + res = self.tmpfile.buffer.read() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res + + def writeorg(self, data: bytes) -> None: + self._assert_state("writeorg", ("started", "suspended")) + self._old.flush() + self._old.buffer.write(data) + self._old.buffer.flush() + + +class SysCapture(SysCaptureBase[str]): + EMPTY_BUFFER = "" + + def snap(self) -> str: + self._assert_state("snap", ("started", "suspended")) + assert isinstance(self.tmpfile, CaptureIO) + res = self.tmpfile.getvalue() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res + + def writeorg(self, data: str) -> None: + self._assert_state("writeorg", ("started", "suspended")) + self._old.write(data) + self._old.flush() + + +class FDCaptureBase(CaptureBase[AnyStr]): + def __init__(self, targetfd: int) -> None: + self.targetfd = targetfd + + try: + os.fstat(targetfd) + except OSError: + # FD capturing is conceptually simple -- create a temporary file, + # redirect the FD to it, redirect back when done. But when the + # target FD is invalid it throws a wrench into this lovely scheme. + # + # Tests themselves shouldn't care if the FD is valid, FD capturing + # should work regardless of external circumstances. So falling back + # to just sys capturing is not a good option. + # + # Further complications are the need to support suspend() and the + # possibility of FD reuse (e.g. the tmpfile getting the very same + # target FD). The following approach is robust, I believe. + self.targetfd_invalid: int | None = os.open(os.devnull, os.O_RDWR) + os.dup2(self.targetfd_invalid, targetfd) + else: + self.targetfd_invalid = None + self.targetfd_save = os.dup(targetfd) + + if targetfd == 0: + self.tmpfile = open(os.devnull, encoding="utf-8") + self.syscapture: CaptureBase[str] = SysCapture(targetfd) + else: + self.tmpfile = EncodedFile( + TemporaryFile(buffering=0), + encoding="utf-8", + errors="replace", + newline="", + write_through=True, + ) + if targetfd in patchsysdict: + self.syscapture = SysCapture(targetfd, self.tmpfile) + else: + self.syscapture = NoCapture(targetfd) + + self._state = "initialized" + + def __repr__(self) -> str: + return ( + f"<{self.__class__.__name__} {self.targetfd} oldfd={self.targetfd_save} " + f"_state={self._state!r} tmpfile={self.tmpfile!r}>" + ) + + def _assert_state(self, op: str, states: tuple[str, ...]) -> None: + assert self._state in states, ( + "cannot {} in state {!r}: expected one of {}".format( + op, self._state, ", ".join(states) + ) + ) + + def start(self) -> None: + """Start capturing on targetfd using memorized tmpfile.""" + self._assert_state("start", ("initialized",)) + os.dup2(self.tmpfile.fileno(), self.targetfd) + self.syscapture.start() + self._state = "started" + + def done(self) -> None: + """Stop capturing, restore streams, return original capture file, + seeked to position zero.""" + self._assert_state("done", ("initialized", "started", "suspended", "done")) + if self._state == "done": + return + os.dup2(self.targetfd_save, self.targetfd) + os.close(self.targetfd_save) + if self.targetfd_invalid is not None: + if self.targetfd_invalid != self.targetfd: + os.close(self.targetfd) + os.close(self.targetfd_invalid) + self.syscapture.done() + self.tmpfile.close() + self._state = "done" + + def suspend(self) -> None: + self._assert_state("suspend", ("started", "suspended")) + if self._state == "suspended": + return + self.syscapture.suspend() + os.dup2(self.targetfd_save, self.targetfd) + self._state = "suspended" + + def resume(self) -> None: + self._assert_state("resume", ("started", "suspended")) + if self._state == "started": + return + self.syscapture.resume() + os.dup2(self.tmpfile.fileno(), self.targetfd) + self._state = "started" + + +class FDCaptureBinary(FDCaptureBase[bytes]): + """Capture IO to/from a given OS-level file descriptor. + + snap() produces `bytes`. + """ + + EMPTY_BUFFER = b"" + + def snap(self) -> bytes: + self._assert_state("snap", ("started", "suspended")) + self.tmpfile.seek(0) + res = self.tmpfile.buffer.read() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res # type: ignore[return-value] + + def writeorg(self, data: bytes) -> None: + """Write to original file descriptor.""" + self._assert_state("writeorg", ("started", "suspended")) + os.write(self.targetfd_save, data) + + +class FDCapture(FDCaptureBase[str]): + """Capture IO to/from a given OS-level file descriptor. + + snap() produces text. + """ + + EMPTY_BUFFER = "" + + def snap(self) -> str: + self._assert_state("snap", ("started", "suspended")) + self.tmpfile.seek(0) + res = self.tmpfile.read() + self.tmpfile.seek(0) + self.tmpfile.truncate() + return res + + def writeorg(self, data: str) -> None: + """Write to original file descriptor.""" + self._assert_state("writeorg", ("started", "suspended")) + # XXX use encoding of original stream + os.write(self.targetfd_save, data.encode("utf-8")) + + +# MultiCapture + + +# Generic NamedTuple only supported since Python 3.11. +if sys.version_info >= (3, 11) or TYPE_CHECKING: + + @final + class CaptureResult(NamedTuple, Generic[AnyStr]): + """The result of :method:`caplog.readouterr() `.""" + + out: AnyStr + err: AnyStr + +else: + + class CaptureResult( + collections.namedtuple("CaptureResult", ["out", "err"]), # noqa: PYI024 + Generic[AnyStr], + ): + """The result of :method:`caplog.readouterr() `.""" + + __slots__ = () + + +class MultiCapture(Generic[AnyStr]): + _state = None + _in_suspended = False + + def __init__( + self, + in_: CaptureBase[AnyStr] | None, + out: CaptureBase[AnyStr] | None, + err: CaptureBase[AnyStr] | None, + ) -> None: + self.in_: CaptureBase[AnyStr] | None = in_ + self.out: CaptureBase[AnyStr] | None = out + self.err: CaptureBase[AnyStr] | None = err + + def __repr__(self) -> str: + return ( + f"" + ) + + def start_capturing(self) -> None: + self._state = "started" + if self.in_: + self.in_.start() + if self.out: + self.out.start() + if self.err: + self.err.start() + + def pop_outerr_to_orig(self) -> tuple[AnyStr, AnyStr]: + """Pop current snapshot out/err capture and flush to orig streams.""" + out, err = self.readouterr() + if out: + assert self.out is not None + self.out.writeorg(out) + if err: + assert self.err is not None + self.err.writeorg(err) + return out, err + + def suspend_capturing(self, in_: bool = False) -> None: + self._state = "suspended" + if self.out: + self.out.suspend() + if self.err: + self.err.suspend() + if in_ and self.in_: + self.in_.suspend() + self._in_suspended = True + + def resume_capturing(self) -> None: + self._state = "started" + if self.out: + self.out.resume() + if self.err: + self.err.resume() + if self._in_suspended: + assert self.in_ is not None + self.in_.resume() + self._in_suspended = False + + def stop_capturing(self) -> None: + """Stop capturing and reset capturing streams.""" + if self._state == "stopped": + raise ValueError("was already stopped") + self._state = "stopped" + if self.out: + self.out.done() + if self.err: + self.err.done() + if self.in_: + self.in_.done() + + def is_started(self) -> bool: + """Whether actively capturing -- not suspended or stopped.""" + return self._state == "started" + + def readouterr(self) -> CaptureResult[AnyStr]: + out = self.out.snap() if self.out else "" + err = self.err.snap() if self.err else "" + # TODO: This type error is real, need to fix. + return CaptureResult(out, err) # type: ignore[arg-type] + + +def _get_multicapture(method: _CaptureMethod) -> MultiCapture[str]: + if method == "fd": + return MultiCapture(in_=FDCapture(0), out=FDCapture(1), err=FDCapture(2)) + elif method == "sys": + return MultiCapture(in_=SysCapture(0), out=SysCapture(1), err=SysCapture(2)) + elif method == "no": + return MultiCapture(in_=None, out=None, err=None) + elif method == "tee-sys": + return MultiCapture( + in_=None, out=SysCapture(1, tee=True), err=SysCapture(2, tee=True) + ) + raise ValueError(f"unknown capturing method: {method!r}") + + +# CaptureManager and CaptureFixture + + +class CaptureManager: + """The capture plugin. + + Manages that the appropriate capture method is enabled/disabled during + collection and each test phase (setup, call, teardown). After each of + those points, the captured output is obtained and attached to the + collection/runtest report. + + There are two levels of capture: + + * global: enabled by default and can be suppressed by the ``-s`` + option. This is always enabled/disabled during collection and each test + phase. + + * fixture: when a test function or one of its fixture depend on the + ``capsys`` or ``capfd`` fixtures. In this case special handling is + needed to ensure the fixtures take precedence over the global capture. + """ + + def __init__(self, method: _CaptureMethod) -> None: + self._method: Final = method + self._global_capturing: MultiCapture[str] | None = None + self._capture_fixture: CaptureFixture[Any] | None = None + + def __repr__(self) -> str: + return ( + f"" + ) + + def is_capturing(self) -> str | bool: + if self.is_globally_capturing(): + return "global" + if self._capture_fixture: + return f"fixture {self._capture_fixture.request.fixturename}" + return False + + # Global capturing control + + def is_globally_capturing(self) -> bool: + return self._method != "no" + + def start_global_capturing(self) -> None: + assert self._global_capturing is None + self._global_capturing = _get_multicapture(self._method) + self._global_capturing.start_capturing() + + def stop_global_capturing(self) -> None: + if self._global_capturing is not None: + self._global_capturing.pop_outerr_to_orig() + self._global_capturing.stop_capturing() + self._global_capturing = None + + def resume_global_capture(self) -> None: + # During teardown of the python process, and on rare occasions, capture + # attributes can be `None` while trying to resume global capture. + if self._global_capturing is not None: + self._global_capturing.resume_capturing() + + def suspend_global_capture(self, in_: bool = False) -> None: + if self._global_capturing is not None: + self._global_capturing.suspend_capturing(in_=in_) + + def suspend(self, in_: bool = False) -> None: + # Need to undo local capsys-et-al if it exists before disabling global capture. + self.suspend_fixture() + self.suspend_global_capture(in_) + + def resume(self) -> None: + self.resume_global_capture() + self.resume_fixture() + + def read_global_capture(self) -> CaptureResult[str]: + assert self._global_capturing is not None + return self._global_capturing.readouterr() + + # Fixture Control + + def set_fixture(self, capture_fixture: CaptureFixture[Any]) -> None: + if self._capture_fixture: + current_fixture = self._capture_fixture.request.fixturename + requested_fixture = capture_fixture.request.fixturename + capture_fixture.request.raiseerror( + f"cannot use {requested_fixture} and {current_fixture} at the same time" + ) + self._capture_fixture = capture_fixture + + def unset_fixture(self) -> None: + self._capture_fixture = None + + def activate_fixture(self) -> None: + """If the current item is using ``capsys`` or ``capfd``, activate + them so they take precedence over the global capture.""" + if self._capture_fixture: + self._capture_fixture._start() + + def deactivate_fixture(self) -> None: + """Deactivate the ``capsys`` or ``capfd`` fixture of this item, if any.""" + if self._capture_fixture: + self._capture_fixture.close() + + def suspend_fixture(self) -> None: + if self._capture_fixture: + self._capture_fixture._suspend() + + def resume_fixture(self) -> None: + if self._capture_fixture: + self._capture_fixture._resume() + + # Helper context managers + + @contextlib.contextmanager + def global_and_fixture_disabled(self) -> Generator[None]: + """Context manager to temporarily disable global and current fixture capturing.""" + do_fixture = self._capture_fixture and self._capture_fixture._is_started() + if do_fixture: + self.suspend_fixture() + do_global = self._global_capturing and self._global_capturing.is_started() + if do_global: + self.suspend_global_capture() + try: + yield + finally: + if do_global: + self.resume_global_capture() + if do_fixture: + self.resume_fixture() + + @contextlib.contextmanager + def item_capture(self, when: str, item: Item) -> Generator[None]: + self.resume_global_capture() + self.activate_fixture() + try: + yield + finally: + self.deactivate_fixture() + self.suspend_global_capture(in_=False) + + out, err = self.read_global_capture() + item.add_report_section(when, "stdout", out) + item.add_report_section(when, "stderr", err) + + # Hooks + + @hookimpl(wrapper=True) + def pytest_make_collect_report( + self, collector: Collector + ) -> Generator[None, CollectReport, CollectReport]: + if isinstance(collector, File): + self.resume_global_capture() + try: + rep = yield + finally: + self.suspend_global_capture() + out, err = self.read_global_capture() + if out: + rep.sections.append(("Captured stdout", out)) + if err: + rep.sections.append(("Captured stderr", err)) + else: + rep = yield + return rep + + @hookimpl(wrapper=True) + def pytest_runtest_setup(self, item: Item) -> Generator[None]: + with self.item_capture("setup", item): + return (yield) + + @hookimpl(wrapper=True) + def pytest_runtest_call(self, item: Item) -> Generator[None]: + with self.item_capture("call", item): + return (yield) + + @hookimpl(wrapper=True) + def pytest_runtest_teardown(self, item: Item) -> Generator[None]: + with self.item_capture("teardown", item): + return (yield) + + @hookimpl(tryfirst=True) + def pytest_keyboard_interrupt(self) -> None: + self.stop_global_capturing() + + @hookimpl(tryfirst=True) + def pytest_internalerror(self) -> None: + self.stop_global_capturing() + + +class CaptureFixture(Generic[AnyStr]): + """Object returned by the :fixture:`capsys`, :fixture:`capsysbinary`, + :fixture:`capfd` and :fixture:`capfdbinary` fixtures.""" + + def __init__( + self, + captureclass: type[CaptureBase[AnyStr]], + request: SubRequest, + *, + config: dict[str, Any] | None = None, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self.captureclass: type[CaptureBase[AnyStr]] = captureclass + self.request = request + self._config = config if config else {} + self._capture: MultiCapture[AnyStr] | None = None + self._captured_out: AnyStr = self.captureclass.EMPTY_BUFFER + self._captured_err: AnyStr = self.captureclass.EMPTY_BUFFER + + def _start(self) -> None: + if self._capture is None: + self._capture = MultiCapture( + in_=None, + out=self.captureclass(1, **self._config), + err=self.captureclass(2, **self._config), + ) + self._capture.start_capturing() + + def close(self) -> None: + if self._capture is not None: + out, err = self._capture.pop_outerr_to_orig() + self._captured_out += out + self._captured_err += err + self._capture.stop_capturing() + self._capture = None + + def readouterr(self) -> CaptureResult[AnyStr]: + """Read and return the captured output so far, resetting the internal + buffer. + + :returns: + The captured content as a namedtuple with ``out`` and ``err`` + string attributes. + """ + captured_out, captured_err = self._captured_out, self._captured_err + if self._capture is not None: + out, err = self._capture.readouterr() + captured_out += out + captured_err += err + self._captured_out = self.captureclass.EMPTY_BUFFER + self._captured_err = self.captureclass.EMPTY_BUFFER + return CaptureResult(captured_out, captured_err) + + def _suspend(self) -> None: + """Suspend this fixture's own capturing temporarily.""" + if self._capture is not None: + self._capture.suspend_capturing() + + def _resume(self) -> None: + """Resume this fixture's own capturing temporarily.""" + if self._capture is not None: + self._capture.resume_capturing() + + def _is_started(self) -> bool: + """Whether actively capturing -- not disabled or closed.""" + if self._capture is not None: + return self._capture.is_started() + return False + + @contextlib.contextmanager + def disabled(self) -> Generator[None]: + """Temporarily disable capturing while inside the ``with`` block.""" + capmanager: CaptureManager = self.request.config.pluginmanager.getplugin( + "capturemanager" + ) + with capmanager.global_and_fixture_disabled(): + yield + + +# The fixtures. + + +@fixture +def capsys(request: SubRequest) -> Generator[CaptureFixture[str]]: + r"""Enable text capturing of writes to ``sys.stdout`` and ``sys.stderr``. + + The captured output is made available via ``capsys.readouterr()`` method + calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``text`` objects. + + Returns an instance of :class:`CaptureFixture[str] `. + + Example: + + .. code-block:: python + + def test_output(capsys): + print("hello") + captured = capsys.readouterr() + assert captured.out == "hello\n" + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture(SysCapture, request, _ispytest=True) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() + + +@fixture +def capteesys(request: SubRequest) -> Generator[CaptureFixture[str]]: + r"""Enable simultaneous text capturing and pass-through of writes + to ``sys.stdout`` and ``sys.stderr`` as defined by ``--capture=``. + + + The captured output is made available via ``capteesys.readouterr()`` method + calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``text`` objects. + + The output is also passed-through, allowing it to be "live-printed", + reported, or both as defined by ``--capture=``. + + Returns an instance of :class:`CaptureFixture[str] `. + + Example: + + .. code-block:: python + + def test_output(capteesys): + print("hello") + captured = capteesys.readouterr() + assert captured.out == "hello\n" + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture( + SysCapture, request, config=dict(tee=True), _ispytest=True + ) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() + + +@fixture +def capsysbinary(request: SubRequest) -> Generator[CaptureFixture[bytes]]: + r"""Enable bytes capturing of writes to ``sys.stdout`` and ``sys.stderr``. + + The captured output is made available via ``capsysbinary.readouterr()`` + method calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``bytes`` objects. + + Returns an instance of :class:`CaptureFixture[bytes] `. + + Example: + + .. code-block:: python + + def test_output(capsysbinary): + print("hello") + captured = capsysbinary.readouterr() + assert captured.out == b"hello\n" + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture(SysCaptureBinary, request, _ispytest=True) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() + + +@fixture +def capfd(request: SubRequest) -> Generator[CaptureFixture[str]]: + r"""Enable text capturing of writes to file descriptors ``1`` and ``2``. + + The captured output is made available via ``capfd.readouterr()`` method + calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``text`` objects. + + Returns an instance of :class:`CaptureFixture[str] `. + + Example: + + .. code-block:: python + + def test_system_echo(capfd): + os.system('echo "hello"') + captured = capfd.readouterr() + assert captured.out == "hello\n" + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture(FDCapture, request, _ispytest=True) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() + + +@fixture +def capfdbinary(request: SubRequest) -> Generator[CaptureFixture[bytes]]: + r"""Enable bytes capturing of writes to file descriptors ``1`` and ``2``. + + The captured output is made available via ``capfd.readouterr()`` method + calls, which return a ``(out, err)`` namedtuple. + ``out`` and ``err`` will be ``byte`` objects. + + Returns an instance of :class:`CaptureFixture[bytes] `. + + Example: + + .. code-block:: python + + def test_system_echo(capfdbinary): + os.system('echo "hello"') + captured = capfdbinary.readouterr() + assert captured.out == b"hello\n" + + """ + capman: CaptureManager = request.config.pluginmanager.getplugin("capturemanager") + capture_fixture = CaptureFixture(FDCaptureBinary, request, _ispytest=True) + capman.set_fixture(capture_fixture) + capture_fixture._start() + yield capture_fixture + capture_fixture.close() + capman.unset_fixture() diff --git a/venv/lib/python3.10/site-packages/_pytest/compat.py b/venv/lib/python3.10/site-packages/_pytest/compat.py new file mode 100644 index 0000000000000000000000000000000000000000..bef8c317bb94722a1571efe168aa3ed176cee49e --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/compat.py @@ -0,0 +1,333 @@ +# mypy: allow-untyped-defs +"""Python version compatibility code.""" + +from __future__ import annotations + +from collections.abc import Callable +import enum +import functools +import inspect +from inspect import Parameter +from inspect import Signature +import os +from pathlib import Path +import sys +from typing import Any +from typing import Final +from typing import NoReturn + +import py + + +if sys.version_info >= (3, 14): + from annotationlib import Format + + +#: constant to prepare valuing pylib path replacements/lazy proxies later on +# intended for removal in pytest 8.0 or 9.0 + +# fmt: off +# intentional space to create a fake difference for the verification +LEGACY_PATH = py.path. local +# fmt: on + + +def legacy_path(path: str | os.PathLike[str]) -> LEGACY_PATH: + """Internal wrapper to prepare lazy proxies for legacy_path instances""" + return LEGACY_PATH(path) + + +# fmt: off +# Singleton type for NOTSET, as described in: +# https://www.python.org/dev/peps/pep-0484/#support-for-singleton-types-in-unions +class NotSetType(enum.Enum): + token = 0 +NOTSET: Final = NotSetType.token +# fmt: on + + +def iscoroutinefunction(func: object) -> bool: + """Return True if func is a coroutine function (a function defined with async + def syntax, and doesn't contain yield), or a function decorated with + @asyncio.coroutine. + + Note: copied and modified from Python 3.5's builtin coroutines.py to avoid + importing asyncio directly, which in turns also initializes the "logging" + module as a side-effect (see issue #8). + """ + return inspect.iscoroutinefunction(func) or getattr(func, "_is_coroutine", False) + + +def is_async_function(func: object) -> bool: + """Return True if the given function seems to be an async function or + an async generator.""" + return iscoroutinefunction(func) or inspect.isasyncgenfunction(func) + + +def signature(obj: Callable[..., Any]) -> Signature: + """Return signature without evaluating annotations.""" + if sys.version_info >= (3, 14): + return inspect.signature(obj, annotation_format=Format.STRING) + return inspect.signature(obj) + + +def getlocation(function, curdir: str | os.PathLike[str] | None = None) -> str: + function = get_real_func(function) + fn = Path(inspect.getfile(function)) + lineno = function.__code__.co_firstlineno + if curdir is not None: + try: + relfn = fn.relative_to(curdir) + except ValueError: + pass + else: + return f"{relfn}:{lineno + 1}" + return f"{fn}:{lineno + 1}" + + +def num_mock_patch_args(function) -> int: + """Return number of arguments used up by mock arguments (if any).""" + patchings = getattr(function, "patchings", None) + if not patchings: + return 0 + + mock_sentinel = getattr(sys.modules.get("mock"), "DEFAULT", object()) + ut_mock_sentinel = getattr(sys.modules.get("unittest.mock"), "DEFAULT", object()) + + return len( + [ + p + for p in patchings + if not p.attribute_name + and (p.new is mock_sentinel or p.new is ut_mock_sentinel) + ] + ) + + +def getfuncargnames( + function: Callable[..., object], + *, + name: str = "", + cls: type | None = None, +) -> tuple[str, ...]: + """Return the names of a function's mandatory arguments. + + Should return the names of all function arguments that: + * Aren't bound to an instance or type as in instance or class methods. + * Don't have default values. + * Aren't bound with functools.partial. + * Aren't replaced with mocks. + + The cls arguments indicate that the function should be treated as a bound + method even though it's not unless the function is a static method. + + The name parameter should be the original name in which the function was collected. + """ + # TODO(RonnyPfannschmidt): This function should be refactored when we + # revisit fixtures. The fixture mechanism should ask the node for + # the fixture names, and not try to obtain directly from the + # function object well after collection has occurred. + + # The parameters attribute of a Signature object contains an + # ordered mapping of parameter names to Parameter instances. This + # creates a tuple of the names of the parameters that don't have + # defaults. + try: + parameters = signature(function).parameters.values() + except (ValueError, TypeError) as e: + from _pytest.outcomes import fail + + fail( + f"Could not determine arguments of {function!r}: {e}", + pytrace=False, + ) + + arg_names = tuple( + p.name + for p in parameters + if ( + p.kind is Parameter.POSITIONAL_OR_KEYWORD + or p.kind is Parameter.KEYWORD_ONLY + ) + and p.default is Parameter.empty + ) + if not name: + name = function.__name__ + + # If this function should be treated as a bound method even though + # it's passed as an unbound method or function, and its first parameter + # wasn't defined as positional only, remove the first parameter name. + if not any(p.kind is Parameter.POSITIONAL_ONLY for p in parameters) and ( + # Not using `getattr` because we don't want to resolve the staticmethod. + # Not using `cls.__dict__` because we want to check the entire MRO. + cls + and not isinstance( + inspect.getattr_static(cls, name, default=None), staticmethod + ) + ): + arg_names = arg_names[1:] + # Remove any names that will be replaced with mocks. + if hasattr(function, "__wrapped__"): + arg_names = arg_names[num_mock_patch_args(function) :] + return arg_names + + +def get_default_arg_names(function: Callable[..., Any]) -> tuple[str, ...]: + # Note: this code intentionally mirrors the code at the beginning of + # getfuncargnames, to get the arguments which were excluded from its result + # because they had default values. + return tuple( + p.name + for p in signature(function).parameters.values() + if p.kind in (Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY) + and p.default is not Parameter.empty + ) + + +_non_printable_ascii_translate_table = { + i: f"\\x{i:02x}" for i in range(128) if i not in range(32, 127) +} +_non_printable_ascii_translate_table.update( + {ord("\t"): "\\t", ord("\r"): "\\r", ord("\n"): "\\n"} +) + + +def ascii_escaped(val: bytes | str) -> str: + r"""If val is pure ASCII, return it as an str, otherwise, escape + bytes objects into a sequence of escaped bytes: + + b'\xc3\xb4\xc5\xd6' -> r'\xc3\xb4\xc5\xd6' + + and escapes strings into a sequence of escaped unicode ids, e.g.: + + r'4\nV\U00043efa\x0eMXWB\x1e\u3028\u15fd\xcd\U0007d944' + + Note: + The obvious "v.decode('unicode-escape')" will return + valid UTF-8 unicode if it finds them in bytes, but we + want to return escaped bytes for any byte, even if they match + a UTF-8 string. + """ + if isinstance(val, bytes): + ret = val.decode("ascii", "backslashreplace") + else: + ret = val.encode("unicode_escape").decode("ascii") + return ret.translate(_non_printable_ascii_translate_table) + + +def get_real_func(obj): + """Get the real function object of the (possibly) wrapped object by + :func:`functools.wraps`, or :func:`functools.partial`.""" + obj = inspect.unwrap(obj) + + if isinstance(obj, functools.partial): + obj = obj.func + return obj + + +def getimfunc(func): + try: + return func.__func__ + except AttributeError: + return func + + +def safe_getattr(object: Any, name: str, default: Any) -> Any: + """Like getattr but return default upon any Exception or any OutcomeException. + + Attribute access can potentially fail for 'evil' Python objects. + See issue #214. + It catches OutcomeException because of #2490 (issue #580), new outcomes + are derived from BaseException instead of Exception (for more details + check #2707). + """ + from _pytest.outcomes import TEST_OUTCOME + + try: + return getattr(object, name, default) + except TEST_OUTCOME: + return default + + +def safe_isclass(obj: object) -> bool: + """Ignore any exception via isinstance on Python 3.""" + try: + return inspect.isclass(obj) + except Exception: + return False + + +def get_user_id() -> int | None: + """Return the current process's real user id or None if it could not be + determined. + + :return: The user id or None if it could not be determined. + """ + # mypy follows the version and platform checking expectation of PEP 484: + # https://mypy.readthedocs.io/en/stable/common_issues.html?highlight=platform#python-version-and-system-platform-checks + # Containment checks are too complex for mypy v1.5.0 and cause failure. + if sys.platform == "win32" or sys.platform == "emscripten": + # win32 does not have a getuid() function. + # Emscripten has a return 0 stub. + return None + else: + # On other platforms, a return value of -1 is assumed to indicate that + # the current process's real user id could not be determined. + ERROR = -1 + uid = os.getuid() + return uid if uid != ERROR else None + + +# Perform exhaustiveness checking. +# +# Consider this example: +# +# MyUnion = Union[int, str] +# +# def handle(x: MyUnion) -> int { +# if isinstance(x, int): +# return 1 +# elif isinstance(x, str): +# return 2 +# else: +# raise Exception('unreachable') +# +# Now suppose we add a new variant: +# +# MyUnion = Union[int, str, bytes] +# +# After doing this, we must remember ourselves to go and update the handle +# function to handle the new variant. +# +# With `assert_never` we can do better: +# +# // raise Exception('unreachable') +# return assert_never(x) +# +# Now, if we forget to handle the new variant, the type-checker will emit a +# compile-time error, instead of the runtime error we would have gotten +# previously. +# +# This also work for Enums (if you use `is` to compare) and Literals. +def assert_never(value: NoReturn) -> NoReturn: + assert False, f"Unhandled value: {value} ({type(value).__name__})" + + +class CallableBool: + """ + A bool-like object that can also be called, returning its true/false value. + + Used for backwards compatibility in cases where something was supposed to be a method + but was implemented as a simple attribute by mistake (see `TerminalReporter.isatty`). + + Do not use in new code. + """ + + def __init__(self, value: bool) -> None: + self._value = value + + def __bool__(self) -> bool: + return self._value + + def __call__(self) -> bool: + return self._value diff --git a/venv/lib/python3.10/site-packages/_pytest/config/__init__.py b/venv/lib/python3.10/site-packages/_pytest/config/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..468018fadc0ad45523905041b627fb066afb4a06 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/config/__init__.py @@ -0,0 +1,2029 @@ +# mypy: allow-untyped-defs +"""Command line options, ini-file and conftest.py processing.""" + +from __future__ import annotations + +import argparse +import collections.abc +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Sequence +import contextlib +import copy +import dataclasses +import enum +from functools import lru_cache +import glob +import importlib.metadata +import inspect +import os +import pathlib +import re +import shlex +import sys +from textwrap import dedent +import types +from types import FunctionType +from typing import Any +from typing import cast +from typing import Final +from typing import final +from typing import IO +from typing import TextIO +from typing import TYPE_CHECKING +import warnings + +import pluggy +from pluggy import HookimplMarker +from pluggy import HookimplOpts +from pluggy import HookspecMarker +from pluggy import HookspecOpts +from pluggy import PluginManager + +from .compat import PathAwareHookProxy +from .exceptions import PrintHelp as PrintHelp +from .exceptions import UsageError as UsageError +from .findpaths import determine_setup +from _pytest import __version__ +import _pytest._code +from _pytest._code import ExceptionInfo +from _pytest._code import filter_traceback +from _pytest._code.code import TracebackStyle +from _pytest._io import TerminalWriter +from _pytest.config.argparsing import Argument +from _pytest.config.argparsing import Parser +import _pytest.deprecated +import _pytest.hookspec +from _pytest.outcomes import fail +from _pytest.outcomes import Skipped +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath +from _pytest.pathlib import import_path +from _pytest.pathlib import ImportMode +from _pytest.pathlib import resolve_package_path +from _pytest.pathlib import safe_exists +from _pytest.stash import Stash +from _pytest.warning_types import PytestConfigWarning +from _pytest.warning_types import warn_explicit_for + + +if TYPE_CHECKING: + from _pytest.assertion.rewrite import AssertionRewritingHook + from _pytest.cacheprovider import Cache + from _pytest.terminal import TerminalReporter + +_PluggyPlugin = object +"""A type to represent plugin objects. + +Plugins can be any namespace, so we can't narrow it down much, but we use an +alias to make the intent clear. + +Ideally this type would be provided by pluggy itself. +""" + + +hookimpl = HookimplMarker("pytest") +hookspec = HookspecMarker("pytest") + + +@final +class ExitCode(enum.IntEnum): + """Encodes the valid exit codes by pytest. + + Currently users and plugins may supply other exit codes as well. + + .. versionadded:: 5.0 + """ + + #: Tests passed. + OK = 0 + #: Tests failed. + TESTS_FAILED = 1 + #: pytest was interrupted. + INTERRUPTED = 2 + #: An internal error got in the way. + INTERNAL_ERROR = 3 + #: pytest was misused. + USAGE_ERROR = 4 + #: pytest couldn't find tests. + NO_TESTS_COLLECTED = 5 + + +class ConftestImportFailure(Exception): + def __init__( + self, + path: pathlib.Path, + *, + cause: Exception, + ) -> None: + self.path = path + self.cause = cause + + def __str__(self) -> str: + return f"{type(self.cause).__name__}: {self.cause} (from {self.path})" + + +def filter_traceback_for_conftest_import_failure( + entry: _pytest._code.TracebackEntry, +) -> bool: + """Filter tracebacks entries which point to pytest internals or importlib. + + Make a special case for importlib because we use it to import test modules and conftest files + in _pytest.pathlib.import_path. + """ + return filter_traceback(entry) and "importlib" not in str(entry.path).split(os.sep) + + +def main( + args: list[str] | os.PathLike[str] | None = None, + plugins: Sequence[str | _PluggyPlugin] | None = None, +) -> int | ExitCode: + """Perform an in-process test run. + + :param args: + List of command line arguments. If `None` or not given, defaults to reading + arguments directly from the process command line (:data:`sys.argv`). + :param plugins: List of plugin objects to be auto-registered during initialization. + + :returns: An exit code. + """ + old_pytest_version = os.environ.get("PYTEST_VERSION") + try: + os.environ["PYTEST_VERSION"] = __version__ + try: + config = _prepareconfig(args, plugins) + except ConftestImportFailure as e: + exc_info = ExceptionInfo.from_exception(e.cause) + tw = TerminalWriter(sys.stderr) + tw.line(f"ImportError while loading conftest '{e.path}'.", red=True) + exc_info.traceback = exc_info.traceback.filter( + filter_traceback_for_conftest_import_failure + ) + exc_repr = ( + exc_info.getrepr(style="short", chain=False) + if exc_info.traceback + else exc_info.exconly() + ) + formatted_tb = str(exc_repr) + for line in formatted_tb.splitlines(): + tw.line(line.rstrip(), red=True) + return ExitCode.USAGE_ERROR + else: + try: + ret: ExitCode | int = config.hook.pytest_cmdline_main(config=config) + try: + return ExitCode(ret) + except ValueError: + return ret + finally: + config._ensure_unconfigure() + except UsageError as e: + tw = TerminalWriter(sys.stderr) + for msg in e.args: + tw.line(f"ERROR: {msg}\n", red=True) + return ExitCode.USAGE_ERROR + finally: + if old_pytest_version is None: + os.environ.pop("PYTEST_VERSION", None) + else: + os.environ["PYTEST_VERSION"] = old_pytest_version + + +def console_main() -> int: + """The CLI entry point of pytest. + + This function is not meant for programmable use; use `main()` instead. + """ + # https://docs.python.org/3/library/signal.html#note-on-sigpipe + try: + code = main() + sys.stdout.flush() + return code + except BrokenPipeError: + # Python flushes standard streams on exit; redirect remaining output + # to devnull to avoid another BrokenPipeError at shutdown + devnull = os.open(os.devnull, os.O_WRONLY) + os.dup2(devnull, sys.stdout.fileno()) + return 1 # Python exits with error code 1 on EPIPE + + +class cmdline: # compatibility namespace + main = staticmethod(main) + + +def filename_arg(path: str, optname: str) -> str: + """Argparse type validator for filename arguments. + + :path: Path of filename. + :optname: Name of the option. + """ + if os.path.isdir(path): + raise UsageError(f"{optname} must be a filename, given: {path}") + return path + + +def directory_arg(path: str, optname: str) -> str: + """Argparse type validator for directory arguments. + + :path: Path of directory. + :optname: Name of the option. + """ + if not os.path.isdir(path): + raise UsageError(f"{optname} must be a directory, given: {path}") + return path + + +# Plugins that cannot be disabled via "-p no:X" currently. +essential_plugins = ( + "mark", + "main", + "runner", + "fixtures", + "helpconfig", # Provides -p. +) + +default_plugins = ( + *essential_plugins, + "python", + "terminal", + "debugging", + "unittest", + "capture", + "skipping", + "legacypath", + "tmpdir", + "monkeypatch", + "recwarn", + "pastebin", + "assertion", + "junitxml", + "doctest", + "cacheprovider", + "freeze_support", + "setuponly", + "setupplan", + "stepwise", + "unraisableexception", + "threadexception", + "warnings", + "logging", + "reports", + "faulthandler", +) + +builtin_plugins = { + *default_plugins, + "pytester", + "pytester_assertions", +} + + +def get_config( + args: list[str] | None = None, + plugins: Sequence[str | _PluggyPlugin] | None = None, +) -> Config: + # subsequent calls to main will create a fresh instance + pluginmanager = PytestPluginManager() + config = Config( + pluginmanager, + invocation_params=Config.InvocationParams( + args=args or (), + plugins=plugins, + dir=pathlib.Path.cwd(), + ), + ) + + if args is not None: + # Handle any "-p no:plugin" args. + pluginmanager.consider_preparse(args, exclude_only=True) + + for spec in default_plugins: + pluginmanager.import_plugin(spec) + + return config + + +def get_plugin_manager() -> PytestPluginManager: + """Obtain a new instance of the + :py:class:`pytest.PytestPluginManager`, with default plugins + already loaded. + + This function can be used by integration with other tools, like hooking + into pytest to run tests into an IDE. + """ + return get_config().pluginmanager + + +def _prepareconfig( + args: list[str] | os.PathLike[str] | None = None, + plugins: Sequence[str | _PluggyPlugin] | None = None, +) -> Config: + if args is None: + args = sys.argv[1:] + elif isinstance(args, os.PathLike): + args = [os.fspath(args)] + elif not isinstance(args, list): + msg = ( # type:ignore[unreachable] + "`args` parameter expected to be a list of strings, got: {!r} (type: {})" + ) + raise TypeError(msg.format(args, type(args))) + + config = get_config(args, plugins) + pluginmanager = config.pluginmanager + try: + if plugins: + for plugin in plugins: + if isinstance(plugin, str): + pluginmanager.consider_pluginarg(plugin) + else: + pluginmanager.register(plugin) + config = pluginmanager.hook.pytest_cmdline_parse( + pluginmanager=pluginmanager, args=args + ) + return config + except BaseException: + config._ensure_unconfigure() + raise + + +def _get_directory(path: pathlib.Path) -> pathlib.Path: + """Get the directory of a path - itself if already a directory.""" + if path.is_file(): + return path.parent + else: + return path + + +def _get_legacy_hook_marks( + method: Any, + hook_type: str, + opt_names: tuple[str, ...], +) -> dict[str, bool]: + if TYPE_CHECKING: + # abuse typeguard from importlib to avoid massive method type union that's lacking an alias + assert inspect.isroutine(method) + known_marks: set[str] = {m.name for m in getattr(method, "pytestmark", [])} + must_warn: list[str] = [] + opts: dict[str, bool] = {} + for opt_name in opt_names: + opt_attr = getattr(method, opt_name, AttributeError) + if opt_attr is not AttributeError: + must_warn.append(f"{opt_name}={opt_attr}") + opts[opt_name] = True + elif opt_name in known_marks: + must_warn.append(f"{opt_name}=True") + opts[opt_name] = True + else: + opts[opt_name] = False + if must_warn: + hook_opts = ", ".join(must_warn) + message = _pytest.deprecated.HOOK_LEGACY_MARKING.format( + type=hook_type, + fullname=method.__qualname__, + hook_opts=hook_opts, + ) + warn_explicit_for(cast(FunctionType, method), message) + return opts + + +@final +class PytestPluginManager(PluginManager): + """A :py:class:`pluggy.PluginManager ` with + additional pytest-specific functionality: + + * Loading plugins from the command line, ``PYTEST_PLUGINS`` env variable and + ``pytest_plugins`` global variables found in plugins being loaded. + * ``conftest.py`` loading during start-up. + """ + + def __init__(self) -> None: + from _pytest.assertion import DummyRewriteHook + from _pytest.assertion import RewriteHook + + super().__init__("pytest") + + # -- State related to local conftest plugins. + # All loaded conftest modules. + self._conftest_plugins: set[types.ModuleType] = set() + # All conftest modules applicable for a directory. + # This includes the directory's own conftest modules as well + # as those of its parent directories. + self._dirpath2confmods: dict[pathlib.Path, list[types.ModuleType]] = {} + # Cutoff directory above which conftests are no longer discovered. + self._confcutdir: pathlib.Path | None = None + # If set, conftest loading is skipped. + self._noconftest = False + + # _getconftestmodules()'s call to _get_directory() causes a stat + # storm when it's called potentially thousands of times in a test + # session (#9478), often with the same path, so cache it. + self._get_directory = lru_cache(256)(_get_directory) + + # plugins that were explicitly skipped with pytest.skip + # list of (module name, skip reason) + # previously we would issue a warning when a plugin was skipped, but + # since we refactored warnings as first citizens of Config, they are + # just stored here to be used later. + self.skipped_plugins: list[tuple[str, str]] = [] + + self.add_hookspecs(_pytest.hookspec) + self.register(self) + if os.environ.get("PYTEST_DEBUG"): + err: IO[str] = sys.stderr + encoding: str = getattr(err, "encoding", "utf8") + try: + err = open( + os.dup(err.fileno()), + mode=err.mode, + buffering=1, + encoding=encoding, + ) + except Exception: + pass + self.trace.root.setwriter(err.write) + self.enable_tracing() + + # Config._consider_importhook will set a real object if required. + self.rewrite_hook: RewriteHook = DummyRewriteHook() + # Used to know when we are importing conftests after the pytest_configure stage. + self._configured = False + + def parse_hookimpl_opts( + self, plugin: _PluggyPlugin, name: str + ) -> HookimplOpts | None: + """:meta private:""" + # pytest hooks are always prefixed with "pytest_", + # so we avoid accessing possibly non-readable attributes + # (see issue #1073). + if not name.startswith("pytest_"): + return None + # Ignore names which cannot be hooks. + if name == "pytest_plugins": + return None + + opts = super().parse_hookimpl_opts(plugin, name) + if opts is not None: + return opts + + method = getattr(plugin, name) + # Consider only actual functions for hooks (#3775). + if not inspect.isroutine(method): + return None + # Collect unmarked hooks as long as they have the `pytest_' prefix. + legacy = _get_legacy_hook_marks( + method, "impl", ("tryfirst", "trylast", "optionalhook", "hookwrapper") + ) + return cast(HookimplOpts, legacy) + + def parse_hookspec_opts(self, module_or_class, name: str) -> HookspecOpts | None: + """:meta private:""" + opts = super().parse_hookspec_opts(module_or_class, name) + if opts is None: + method = getattr(module_or_class, name) + if name.startswith("pytest_"): + legacy = _get_legacy_hook_marks( + method, "spec", ("firstresult", "historic") + ) + opts = cast(HookspecOpts, legacy) + return opts + + def register(self, plugin: _PluggyPlugin, name: str | None = None) -> str | None: + if name in _pytest.deprecated.DEPRECATED_EXTERNAL_PLUGINS: + warnings.warn( + PytestConfigWarning( + "{} plugin has been merged into the core, " + "please remove it from your requirements.".format( + name.replace("_", "-") + ) + ) + ) + return None + plugin_name = super().register(plugin, name) + if plugin_name is not None: + self.hook.pytest_plugin_registered.call_historic( + kwargs=dict( + plugin=plugin, + plugin_name=plugin_name, + manager=self, + ) + ) + + if isinstance(plugin, types.ModuleType): + self.consider_module(plugin) + return plugin_name + + def getplugin(self, name: str): + # Support deprecated naming because plugins (xdist e.g.) use it. + plugin: _PluggyPlugin | None = self.get_plugin(name) + return plugin + + def hasplugin(self, name: str) -> bool: + """Return whether a plugin with the given name is registered.""" + return bool(self.get_plugin(name)) + + def pytest_configure(self, config: Config) -> None: + """:meta private:""" + # XXX now that the pluginmanager exposes hookimpl(tryfirst...) + # we should remove tryfirst/trylast as markers. + config.addinivalue_line( + "markers", + "tryfirst: mark a hook implementation function such that the " + "plugin machinery will try to call it first/as early as possible. " + "DEPRECATED, use @pytest.hookimpl(tryfirst=True) instead.", + ) + config.addinivalue_line( + "markers", + "trylast: mark a hook implementation function such that the " + "plugin machinery will try to call it last/as late as possible. " + "DEPRECATED, use @pytest.hookimpl(trylast=True) instead.", + ) + self._configured = True + + # + # Internal API for local conftest plugin handling. + # + def _set_initial_conftests( + self, + args: Sequence[str | pathlib.Path], + pyargs: bool, + noconftest: bool, + rootpath: pathlib.Path, + confcutdir: pathlib.Path | None, + invocation_dir: pathlib.Path, + importmode: ImportMode | str, + *, + consider_namespace_packages: bool, + ) -> None: + """Load initial conftest files given a preparsed "namespace". + + As conftest files may add their own command line options which have + arguments ('--my-opt somepath') we might get some false positives. + All builtin and 3rd party plugins will have been loaded, however, so + common options will not confuse our logic here. + """ + self._confcutdir = ( + absolutepath(invocation_dir / confcutdir) if confcutdir else None + ) + self._noconftest = noconftest + self._using_pyargs = pyargs + foundanchor = False + for initial_path in args: + path = str(initial_path) + # remove node-id syntax + i = path.find("::") + if i != -1: + path = path[:i] + anchor = absolutepath(invocation_dir / path) + + # Ensure we do not break if what appears to be an anchor + # is in fact a very long option (#10169, #11394). + if safe_exists(anchor): + self._try_load_conftest( + anchor, + importmode, + rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + foundanchor = True + if not foundanchor: + self._try_load_conftest( + invocation_dir, + importmode, + rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + + def _is_in_confcutdir(self, path: pathlib.Path) -> bool: + """Whether to consider the given path to load conftests from.""" + if self._confcutdir is None: + return True + # The semantics here are literally: + # Do not load a conftest if it is found upwards from confcut dir. + # But this is *not* the same as: + # Load only conftests from confcutdir or below. + # At first glance they might seem the same thing, however we do support use cases where + # we want to load conftests that are not found in confcutdir or below, but are found + # in completely different directory hierarchies like packages installed + # in out-of-source trees. + # (see #9767 for a regression where the logic was inverted). + return path not in self._confcutdir.parents + + def _try_load_conftest( + self, + anchor: pathlib.Path, + importmode: str | ImportMode, + rootpath: pathlib.Path, + *, + consider_namespace_packages: bool, + ) -> None: + self._loadconftestmodules( + anchor, + importmode, + rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + # let's also consider test* subdirs + if anchor.is_dir(): + for x in anchor.glob("test*"): + if x.is_dir(): + self._loadconftestmodules( + x, + importmode, + rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + + def _loadconftestmodules( + self, + path: pathlib.Path, + importmode: str | ImportMode, + rootpath: pathlib.Path, + *, + consider_namespace_packages: bool, + ) -> None: + if self._noconftest: + return + + directory = self._get_directory(path) + + # Optimization: avoid repeated searches in the same directory. + # Assumes always called with same importmode and rootpath. + if directory in self._dirpath2confmods: + return + + clist = [] + for parent in reversed((directory, *directory.parents)): + if self._is_in_confcutdir(parent): + conftestpath = parent / "conftest.py" + if conftestpath.is_file(): + mod = self._importconftest( + conftestpath, + importmode, + rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + clist.append(mod) + self._dirpath2confmods[directory] = clist + + def _getconftestmodules(self, path: pathlib.Path) -> Sequence[types.ModuleType]: + directory = self._get_directory(path) + return self._dirpath2confmods.get(directory, ()) + + def _rget_with_confmod( + self, + name: str, + path: pathlib.Path, + ) -> tuple[types.ModuleType, Any]: + modules = self._getconftestmodules(path) + for mod in reversed(modules): + try: + return mod, getattr(mod, name) + except AttributeError: + continue + raise KeyError(name) + + def _importconftest( + self, + conftestpath: pathlib.Path, + importmode: str | ImportMode, + rootpath: pathlib.Path, + *, + consider_namespace_packages: bool, + ) -> types.ModuleType: + conftestpath_plugin_name = str(conftestpath) + existing = self.get_plugin(conftestpath_plugin_name) + if existing is not None: + return cast(types.ModuleType, existing) + + # conftest.py files there are not in a Python package all have module + # name "conftest", and thus conflict with each other. Clear the existing + # before loading the new one, otherwise the existing one will be + # returned from the module cache. + pkgpath = resolve_package_path(conftestpath) + if pkgpath is None: + try: + del sys.modules[conftestpath.stem] + except KeyError: + pass + + try: + mod = import_path( + conftestpath, + mode=importmode, + root=rootpath, + consider_namespace_packages=consider_namespace_packages, + ) + except Exception as e: + assert e.__traceback__ is not None + raise ConftestImportFailure(conftestpath, cause=e) from e + + self._check_non_top_pytest_plugins(mod, conftestpath) + + self._conftest_plugins.add(mod) + dirpath = conftestpath.parent + if dirpath in self._dirpath2confmods: + for path, mods in self._dirpath2confmods.items(): + if dirpath in path.parents or path == dirpath: + if mod in mods: + raise AssertionError( + f"While trying to load conftest path {conftestpath!s}, " + f"found that the module {mod} is already loaded with path {mod.__file__}. " + "This is not supposed to happen. Please report this issue to pytest." + ) + mods.append(mod) + self.trace(f"loading conftestmodule {mod!r}") + self.consider_conftest(mod, registration_name=conftestpath_plugin_name) + return mod + + def _check_non_top_pytest_plugins( + self, + mod: types.ModuleType, + conftestpath: pathlib.Path, + ) -> None: + if ( + hasattr(mod, "pytest_plugins") + and self._configured + and not self._using_pyargs + ): + msg = ( + "Defining 'pytest_plugins' in a non-top-level conftest is no longer supported:\n" + "It affects the entire test suite instead of just below the conftest as expected.\n" + " {}\n" + "Please move it to a top level conftest file at the rootdir:\n" + " {}\n" + "For more information, visit:\n" + " https://docs.pytest.org/en/stable/deprecations.html#pytest-plugins-in-non-top-level-conftest-files" + ) + fail(msg.format(conftestpath, self._confcutdir), pytrace=False) + + # + # API for bootstrapping plugin loading + # + # + + def consider_preparse( + self, args: Sequence[str], *, exclude_only: bool = False + ) -> None: + """:meta private:""" + i = 0 + n = len(args) + while i < n: + opt = args[i] + i += 1 + if isinstance(opt, str): + if opt == "-p": + try: + parg = args[i] + except IndexError: + return + i += 1 + elif opt.startswith("-p"): + parg = opt[2:] + else: + continue + parg = parg.strip() + if exclude_only and not parg.startswith("no:"): + continue + self.consider_pluginarg(parg) + + def consider_pluginarg(self, arg: str) -> None: + """:meta private:""" + if arg.startswith("no:"): + name = arg[3:] + if name in essential_plugins: + raise UsageError(f"plugin {name} cannot be disabled") + + # PR #4304: remove stepwise if cacheprovider is blocked. + if name == "cacheprovider": + self.set_blocked("stepwise") + self.set_blocked("pytest_stepwise") + + self.set_blocked(name) + if not name.startswith("pytest_"): + self.set_blocked("pytest_" + name) + else: + name = arg + # Unblock the plugin. + self.unblock(name) + if not name.startswith("pytest_"): + self.unblock("pytest_" + name) + self.import_plugin(arg, consider_entry_points=True) + + def consider_conftest( + self, conftestmodule: types.ModuleType, registration_name: str + ) -> None: + """:meta private:""" + self.register(conftestmodule, name=registration_name) + + def consider_env(self) -> None: + """:meta private:""" + self._import_plugin_specs(os.environ.get("PYTEST_PLUGINS")) + + def consider_module(self, mod: types.ModuleType) -> None: + """:meta private:""" + self._import_plugin_specs(getattr(mod, "pytest_plugins", [])) + + def _import_plugin_specs( + self, spec: None | types.ModuleType | str | Sequence[str] + ) -> None: + plugins = _get_plugin_specs_as_list(spec) + for import_spec in plugins: + self.import_plugin(import_spec) + + def import_plugin(self, modname: str, consider_entry_points: bool = False) -> None: + """Import a plugin with ``modname``. + + If ``consider_entry_points`` is True, entry point names are also + considered to find a plugin. + """ + # Most often modname refers to builtin modules, e.g. "pytester", + # "terminal" or "capture". Those plugins are registered under their + # basename for historic purposes but must be imported with the + # _pytest prefix. + assert isinstance(modname, str), ( + f"module name as text required, got {modname!r}" + ) + if self.is_blocked(modname) or self.get_plugin(modname) is not None: + return + + importspec = "_pytest." + modname if modname in builtin_plugins else modname + self.rewrite_hook.mark_rewrite(importspec) + + if consider_entry_points: + loaded = self.load_setuptools_entrypoints("pytest11", name=modname) + if loaded: + return + + try: + __import__(importspec) + except ImportError as e: + raise ImportError( + f'Error importing plugin "{modname}": {e.args[0]}' + ).with_traceback(e.__traceback__) from e + + except Skipped as e: + self.skipped_plugins.append((modname, e.msg or "")) + else: + mod = sys.modules[importspec] + self.register(mod, modname) + + +def _get_plugin_specs_as_list( + specs: None | types.ModuleType | str | Sequence[str], +) -> list[str]: + """Parse a plugins specification into a list of plugin names.""" + # None means empty. + if specs is None: + return [] + # Workaround for #3899 - a submodule which happens to be called "pytest_plugins". + if isinstance(specs, types.ModuleType): + return [] + # Comma-separated list. + if isinstance(specs, str): + return specs.split(",") if specs else [] + # Direct specification. + if isinstance(specs, collections.abc.Sequence): + return list(specs) + raise UsageError( + f"Plugins may be specified as a sequence or a ','-separated string of plugin names. Got: {specs!r}" + ) + + +class Notset: + def __repr__(self): + return "" + + +notset = Notset() + + +def _iter_rewritable_modules(package_files: Iterable[str]) -> Iterator[str]: + """Given an iterable of file names in a source distribution, return the "names" that should + be marked for assertion rewrite. + + For example the package "pytest_mock/__init__.py" should be added as "pytest_mock" in + the assertion rewrite mechanism. + + This function has to deal with dist-info based distributions and egg based distributions + (which are still very much in use for "editable" installs). + + Here are the file names as seen in a dist-info based distribution: + + pytest_mock/__init__.py + pytest_mock/_version.py + pytest_mock/plugin.py + pytest_mock.egg-info/PKG-INFO + + Here are the file names as seen in an egg based distribution: + + src/pytest_mock/__init__.py + src/pytest_mock/_version.py + src/pytest_mock/plugin.py + src/pytest_mock.egg-info/PKG-INFO + LICENSE + setup.py + + We have to take in account those two distribution flavors in order to determine which + names should be considered for assertion rewriting. + + More information: + https://github.com/pytest-dev/pytest-mock/issues/167 + """ + package_files = list(package_files) + seen_some = False + for fn in package_files: + is_simple_module = "/" not in fn and fn.endswith(".py") + is_package = fn.count("/") == 1 and fn.endswith("__init__.py") + if is_simple_module: + module_name, _ = os.path.splitext(fn) + # we ignore "setup.py" at the root of the distribution + # as well as editable installation finder modules made by setuptools + if module_name != "setup" and not module_name.startswith("__editable__"): + seen_some = True + yield module_name + elif is_package: + package_name = os.path.dirname(fn) + seen_some = True + yield package_name + + if not seen_some: + # At this point we did not find any packages or modules suitable for assertion + # rewriting, so we try again by stripping the first path component (to account for + # "src" based source trees for example). + # This approach lets us have the common case continue to be fast, as egg-distributions + # are rarer. + new_package_files = [] + for fn in package_files: + parts = fn.split("/") + new_fn = "/".join(parts[1:]) + if new_fn: + new_package_files.append(new_fn) + if new_package_files: + yield from _iter_rewritable_modules(new_package_files) + + +@final +class Config: + """Access to configuration values, pluginmanager and plugin hooks. + + :param PytestPluginManager pluginmanager: + A pytest PluginManager. + + :param InvocationParams invocation_params: + Object containing parameters regarding the :func:`pytest.main` + invocation. + """ + + @final + @dataclasses.dataclass(frozen=True) + class InvocationParams: + """Holds parameters passed during :func:`pytest.main`. + + The object attributes are read-only. + + .. versionadded:: 5.1 + + .. note:: + + Note that the environment variable ``PYTEST_ADDOPTS`` and the ``addopts`` + ini option are handled by pytest, not being included in the ``args`` attribute. + + Plugins accessing ``InvocationParams`` must be aware of that. + """ + + args: tuple[str, ...] + """The command-line arguments as passed to :func:`pytest.main`.""" + plugins: Sequence[str | _PluggyPlugin] | None + """Extra plugins, might be `None`.""" + dir: pathlib.Path + """The directory from which :func:`pytest.main` was invoked. :type: pathlib.Path""" + + def __init__( + self, + *, + args: Iterable[str], + plugins: Sequence[str | _PluggyPlugin] | None, + dir: pathlib.Path, + ) -> None: + object.__setattr__(self, "args", tuple(args)) + object.__setattr__(self, "plugins", plugins) + object.__setattr__(self, "dir", dir) + + class ArgsSource(enum.Enum): + """Indicates the source of the test arguments. + + .. versionadded:: 7.2 + """ + + #: Command line arguments. + ARGS = enum.auto() + #: Invocation directory. + INVOCATION_DIR = enum.auto() + INCOVATION_DIR = INVOCATION_DIR # backwards compatibility alias + #: 'testpaths' configuration value. + TESTPATHS = enum.auto() + + # Set by cacheprovider plugin. + cache: Cache + + def __init__( + self, + pluginmanager: PytestPluginManager, + *, + invocation_params: InvocationParams | None = None, + ) -> None: + from .argparsing import FILE_OR_DIR + from .argparsing import Parser + + if invocation_params is None: + invocation_params = self.InvocationParams( + args=(), plugins=None, dir=pathlib.Path.cwd() + ) + + self.option = argparse.Namespace() + """Access to command line option as attributes. + + :type: argparse.Namespace + """ + + self.invocation_params = invocation_params + """The parameters with which pytest was invoked. + + :type: InvocationParams + """ + + _a = FILE_OR_DIR + self._parser = Parser( + usage=f"%(prog)s [options] [{_a}] [{_a}] [...]", + processopt=self._processopt, + _ispytest=True, + ) + self.pluginmanager = pluginmanager + """The plugin manager handles plugin registration and hook invocation. + + :type: PytestPluginManager + """ + + self.stash = Stash() + """A place where plugins can store information on the config for their + own use. + + :type: Stash + """ + # Deprecated alias. Was never public. Can be removed in a few releases. + self._store = self.stash + + self.trace = self.pluginmanager.trace.root.get("config") + self.hook: pluggy.HookRelay = PathAwareHookProxy(self.pluginmanager.hook) # type: ignore[assignment] + self._inicache: dict[str, Any] = {} + self._override_ini: Sequence[str] = () + self._opt2dest: dict[str, str] = {} + self._cleanup_stack = contextlib.ExitStack() + self.pluginmanager.register(self, "pytestconfig") + self._configured = False + self.hook.pytest_addoption.call_historic( + kwargs=dict(parser=self._parser, pluginmanager=self.pluginmanager) + ) + self.args_source = Config.ArgsSource.ARGS + self.args: list[str] = [] + + @property + def rootpath(self) -> pathlib.Path: + """The path to the :ref:`rootdir `. + + :type: pathlib.Path + + .. versionadded:: 6.1 + """ + return self._rootpath + + @property + def inipath(self) -> pathlib.Path | None: + """The path to the :ref:`configfile `. + + .. versionadded:: 6.1 + """ + return self._inipath + + def add_cleanup(self, func: Callable[[], None]) -> None: + """Add a function to be called when the config object gets out of + use (usually coinciding with pytest_unconfigure). + """ + self._cleanup_stack.callback(func) + + def _do_configure(self) -> None: + assert not self._configured + self._configured = True + self.hook.pytest_configure.call_historic(kwargs=dict(config=self)) + + def _ensure_unconfigure(self) -> None: + try: + if self._configured: + self._configured = False + try: + self.hook.pytest_unconfigure(config=self) + finally: + self.hook.pytest_configure._call_history = [] + finally: + try: + self._cleanup_stack.close() + finally: + self._cleanup_stack = contextlib.ExitStack() + + def get_terminal_writer(self) -> TerminalWriter: + terminalreporter: TerminalReporter | None = self.pluginmanager.get_plugin( + "terminalreporter" + ) + assert terminalreporter is not None + return terminalreporter._tw + + def pytest_cmdline_parse( + self, pluginmanager: PytestPluginManager, args: list[str] + ) -> Config: + try: + self.parse(args) + except UsageError: + # Handle --version and --help here in a minimal fashion. + # This gets done via helpconfig normally, but its + # pytest_cmdline_main is not called in case of errors. + if getattr(self.option, "version", False) or "--version" in args: + from _pytest.helpconfig import showversion + + showversion(self) + elif ( + getattr(self.option, "help", False) or "--help" in args or "-h" in args + ): + self._parser._getparser().print_help() + sys.stdout.write( + "\nNOTE: displaying only minimal help due to UsageError.\n\n" + ) + + raise + + return self + + def notify_exception( + self, + excinfo: ExceptionInfo[BaseException], + option: argparse.Namespace | None = None, + ) -> None: + if option and getattr(option, "fulltrace", False): + style: TracebackStyle = "long" + else: + style = "native" + excrepr = excinfo.getrepr( + funcargs=True, showlocals=getattr(option, "showlocals", False), style=style + ) + res = self.hook.pytest_internalerror(excrepr=excrepr, excinfo=excinfo) + if not any(res): + for line in str(excrepr).split("\n"): + sys.stderr.write(f"INTERNALERROR> {line}\n") + sys.stderr.flush() + + def cwd_relative_nodeid(self, nodeid: str) -> str: + # nodeid's are relative to the rootpath, compute relative to cwd. + if self.invocation_params.dir != self.rootpath: + base_path_part, *nodeid_part = nodeid.split("::") + # Only process path part + fullpath = self.rootpath / base_path_part + relative_path = bestrelpath(self.invocation_params.dir, fullpath) + + nodeid = "::".join([relative_path, *nodeid_part]) + return nodeid + + @classmethod + def fromdictargs(cls, option_dict, args) -> Config: + """Constructor usable for subprocesses.""" + config = get_config(args) + config.option.__dict__.update(option_dict) + config.parse(args, addopts=False) + for x in config.option.plugins: + config.pluginmanager.consider_pluginarg(x) + return config + + def _processopt(self, opt: Argument) -> None: + for name in opt._short_opts + opt._long_opts: + self._opt2dest[name] = opt.dest + + if hasattr(opt, "default"): + if not hasattr(self.option, opt.dest): + setattr(self.option, opt.dest, opt.default) + + @hookimpl(trylast=True) + def pytest_load_initial_conftests(self, early_config: Config) -> None: + # We haven't fully parsed the command line arguments yet, so + # early_config.args it not set yet. But we need it for + # discovering the initial conftests. So "pre-run" the logic here. + # It will be done for real in `parse()`. + args, args_source = early_config._decide_args( + args=early_config.known_args_namespace.file_or_dir, + pyargs=early_config.known_args_namespace.pyargs, + testpaths=early_config.getini("testpaths"), + invocation_dir=early_config.invocation_params.dir, + rootpath=early_config.rootpath, + warn=False, + ) + self.pluginmanager._set_initial_conftests( + args=args, + pyargs=early_config.known_args_namespace.pyargs, + noconftest=early_config.known_args_namespace.noconftest, + rootpath=early_config.rootpath, + confcutdir=early_config.known_args_namespace.confcutdir, + invocation_dir=early_config.invocation_params.dir, + importmode=early_config.known_args_namespace.importmode, + consider_namespace_packages=early_config.getini( + "consider_namespace_packages" + ), + ) + + def _initini(self, args: Sequence[str]) -> None: + ns, unknown_args = self._parser.parse_known_and_unknown_args( + args, namespace=copy.copy(self.option) + ) + rootpath, inipath, inicfg = determine_setup( + inifile=ns.inifilename, + args=ns.file_or_dir + unknown_args, + rootdir_cmd_arg=ns.rootdir or None, + invocation_dir=self.invocation_params.dir, + ) + self._rootpath = rootpath + self._inipath = inipath + self.inicfg = inicfg + self._parser.extra_info["rootdir"] = str(self.rootpath) + self._parser.extra_info["inifile"] = str(self.inipath) + self._parser.addini("addopts", "Extra command line options", "args") + self._parser.addini("minversion", "Minimally required pytest version") + self._parser.addini( + "pythonpath", type="paths", help="Add paths to sys.path", default=[] + ) + self._parser.addini( + "required_plugins", + "Plugins that must be present for pytest to run", + type="args", + default=[], + ) + self._override_ini = ns.override_ini or () + + def _consider_importhook(self, args: Sequence[str]) -> None: + """Install the PEP 302 import hook if using assertion rewriting. + + Needs to parse the --assert= option from the commandline + and find all the installed plugins to mark them for rewriting + by the importhook. + """ + ns, unknown_args = self._parser.parse_known_and_unknown_args(args) + mode = getattr(ns, "assertmode", "plain") + + disable_autoload = getattr(ns, "disable_plugin_autoload", False) or bool( + os.environ.get("PYTEST_DISABLE_PLUGIN_AUTOLOAD") + ) + if mode == "rewrite": + import _pytest.assertion + + try: + hook = _pytest.assertion.install_importhook(self) + except SystemError: + mode = "plain" + else: + self._mark_plugins_for_rewrite(hook, disable_autoload) + self._warn_about_missing_assertion(mode) + + def _mark_plugins_for_rewrite( + self, hook: AssertionRewritingHook, disable_autoload: bool + ) -> None: + """Given an importhook, mark for rewrite any top-level + modules or packages in the distribution package for + all pytest plugins.""" + self.pluginmanager.rewrite_hook = hook + + if disable_autoload: + # We don't autoload from distribution package entry points, + # no need to continue. + return + + package_files = ( + str(file) + for dist in importlib.metadata.distributions() + if any(ep.group == "pytest11" for ep in dist.entry_points) + for file in dist.files or [] + ) + + for name in _iter_rewritable_modules(package_files): + hook.mark_rewrite(name) + + def _configure_python_path(self) -> None: + # `pythonpath = a b` will set `sys.path` to `[a, b, x, y, z, ...]` + for path in reversed(self.getini("pythonpath")): + sys.path.insert(0, str(path)) + self.add_cleanup(self._unconfigure_python_path) + + def _unconfigure_python_path(self) -> None: + for path in self.getini("pythonpath"): + path_str = str(path) + if path_str in sys.path: + sys.path.remove(path_str) + + def _validate_args(self, args: list[str], via: str) -> list[str]: + """Validate known args.""" + self._parser._config_source_hint = via # type: ignore + try: + self._parser.parse_known_and_unknown_args( + args, namespace=copy.copy(self.option) + ) + finally: + del self._parser._config_source_hint # type: ignore + + return args + + def _decide_args( + self, + *, + args: list[str], + pyargs: bool, + testpaths: list[str], + invocation_dir: pathlib.Path, + rootpath: pathlib.Path, + warn: bool, + ) -> tuple[list[str], ArgsSource]: + """Decide the args (initial paths/nodeids) to use given the relevant inputs. + + :param warn: Whether can issue warnings. + + :returns: The args and the args source. Guaranteed to be non-empty. + """ + if args: + source = Config.ArgsSource.ARGS + result = args + else: + if invocation_dir == rootpath: + source = Config.ArgsSource.TESTPATHS + if pyargs: + result = testpaths + else: + result = [] + for path in testpaths: + result.extend(sorted(glob.iglob(path, recursive=True))) + if testpaths and not result: + if warn: + warning_text = ( + "No files were found in testpaths; " + "consider removing or adjusting your testpaths configuration. " + "Searching recursively from the current directory instead." + ) + self.issue_config_time_warning( + PytestConfigWarning(warning_text), stacklevel=3 + ) + else: + result = [] + if not result: + source = Config.ArgsSource.INVOCATION_DIR + result = [str(invocation_dir)] + return result, source + + def _preparse(self, args: list[str], addopts: bool = True) -> None: + if addopts: + env_addopts = os.environ.get("PYTEST_ADDOPTS", "") + if len(env_addopts): + args[:] = ( + self._validate_args(shlex.split(env_addopts), "via PYTEST_ADDOPTS") + + args + ) + self._initini(args) + if addopts: + args[:] = ( + self._validate_args(self.getini("addopts"), "via addopts config") + args + ) + + self.known_args_namespace = self._parser.parse_known_args( + args, namespace=copy.copy(self.option) + ) + self._checkversion() + self._consider_importhook(args) + self._configure_python_path() + self.pluginmanager.consider_preparse(args, exclude_only=False) + if ( + not os.environ.get("PYTEST_DISABLE_PLUGIN_AUTOLOAD") + and not self.known_args_namespace.disable_plugin_autoload + ): + # Autoloading from distribution package entry point has + # not been disabled. + self.pluginmanager.load_setuptools_entrypoints("pytest11") + # Otherwise only plugins explicitly specified in PYTEST_PLUGINS + # are going to be loaded. + self.pluginmanager.consider_env() + + self.known_args_namespace = self._parser.parse_known_args( + args, namespace=copy.copy(self.known_args_namespace) + ) + + self._validate_plugins() + self._warn_about_skipped_plugins() + + if self.known_args_namespace.confcutdir is None: + if self.inipath is not None: + confcutdir = str(self.inipath.parent) + else: + confcutdir = str(self.rootpath) + self.known_args_namespace.confcutdir = confcutdir + try: + self.hook.pytest_load_initial_conftests( + early_config=self, args=args, parser=self._parser + ) + except ConftestImportFailure as e: + if self.known_args_namespace.help or self.known_args_namespace.version: + # we don't want to prevent --help/--version to work + # so just let it pass and print a warning at the end + self.issue_config_time_warning( + PytestConfigWarning(f"could not load initial conftests: {e.path}"), + stacklevel=2, + ) + else: + raise + + @hookimpl(wrapper=True) + def pytest_collection(self) -> Generator[None, object, object]: + # Validate invalid ini keys after collection is done so we take in account + # options added by late-loading conftest files. + try: + return (yield) + finally: + self._validate_config_options() + + def _checkversion(self) -> None: + import pytest + + minver = self.inicfg.get("minversion", None) + if minver: + # Imported lazily to improve start-up time. + from packaging.version import Version + + if not isinstance(minver, str): + raise pytest.UsageError( + f"{self.inipath}: 'minversion' must be a single value" + ) + + if Version(minver) > Version(pytest.__version__): + raise pytest.UsageError( + f"{self.inipath}: 'minversion' requires pytest-{minver}, actual pytest-{pytest.__version__}'" + ) + + def _validate_config_options(self) -> None: + for key in sorted(self._get_unknown_ini_keys()): + self._warn_or_fail_if_strict(f"Unknown config option: {key}\n") + + def _validate_plugins(self) -> None: + required_plugins = sorted(self.getini("required_plugins")) + if not required_plugins: + return + + # Imported lazily to improve start-up time. + from packaging.requirements import InvalidRequirement + from packaging.requirements import Requirement + from packaging.version import Version + + plugin_info = self.pluginmanager.list_plugin_distinfo() + plugin_dist_info = {dist.project_name: dist.version for _, dist in plugin_info} + + missing_plugins = [] + for required_plugin in required_plugins: + try: + req = Requirement(required_plugin) + except InvalidRequirement: + missing_plugins.append(required_plugin) + continue + + if req.name not in plugin_dist_info: + missing_plugins.append(required_plugin) + elif not req.specifier.contains( + Version(plugin_dist_info[req.name]), prereleases=True + ): + missing_plugins.append(required_plugin) + + if missing_plugins: + raise UsageError( + "Missing required plugins: {}".format(", ".join(missing_plugins)), + ) + + def _warn_or_fail_if_strict(self, message: str) -> None: + if self.known_args_namespace.strict_config: + raise UsageError(message) + + self.issue_config_time_warning(PytestConfigWarning(message), stacklevel=3) + + def _get_unknown_ini_keys(self) -> list[str]: + parser_inicfg = self._parser._inidict + return [name for name in self.inicfg if name not in parser_inicfg] + + def parse(self, args: list[str], addopts: bool = True) -> None: + # Parse given cmdline arguments into this config object. + assert self.args == [], ( + "can only parse cmdline args at most once per Config object" + ) + self.hook.pytest_addhooks.call_historic( + kwargs=dict(pluginmanager=self.pluginmanager) + ) + self._preparse(args, addopts=addopts) + self._parser.after_preparse = True # type: ignore + try: + args = self._parser.parse_setoption( + args, self.option, namespace=self.option + ) + self.args, self.args_source = self._decide_args( + args=args, + pyargs=self.known_args_namespace.pyargs, + testpaths=self.getini("testpaths"), + invocation_dir=self.invocation_params.dir, + rootpath=self.rootpath, + warn=True, + ) + except PrintHelp: + pass + + def issue_config_time_warning(self, warning: Warning, stacklevel: int) -> None: + """Issue and handle a warning during the "configure" stage. + + During ``pytest_configure`` we can't capture warnings using the ``catch_warnings_for_item`` + function because it is not possible to have hook wrappers around ``pytest_configure``. + + This function is mainly intended for plugins that need to issue warnings during + ``pytest_configure`` (or similar stages). + + :param warning: The warning instance. + :param stacklevel: stacklevel forwarded to warnings.warn. + """ + if self.pluginmanager.is_blocked("warnings"): + return + + cmdline_filters = self.known_args_namespace.pythonwarnings or [] + config_filters = self.getini("filterwarnings") + + with warnings.catch_warnings(record=True) as records: + warnings.simplefilter("always", type(warning)) + apply_warning_filters(config_filters, cmdline_filters) + warnings.warn(warning, stacklevel=stacklevel) + + if records: + frame = sys._getframe(stacklevel - 1) + location = frame.f_code.co_filename, frame.f_lineno, frame.f_code.co_name + self.hook.pytest_warning_recorded.call_historic( + kwargs=dict( + warning_message=records[0], + when="config", + nodeid="", + location=location, + ) + ) + + def addinivalue_line(self, name: str, line: str) -> None: + """Add a line to an ini-file option. The option must have been + declared but might not yet be set in which case the line becomes + the first line in its value.""" + x = self.getini(name) + assert isinstance(x, list) + x.append(line) # modifies the cached list inline + + def getini(self, name: str) -> Any: + """Return configuration value from an :ref:`ini file `. + + If a configuration value is not defined in an + :ref:`ini file `, then the ``default`` value provided while + registering the configuration through + :func:`parser.addini ` will be returned. + Please note that you can even provide ``None`` as a valid + default value. + + If ``default`` is not provided while registering using + :func:`parser.addini `, then a default value + based on the ``type`` parameter passed to + :func:`parser.addini ` will be returned. + The default values based on ``type`` are: + ``paths``, ``pathlist``, ``args`` and ``linelist`` : empty list ``[]`` + ``bool`` : ``False`` + ``string`` : empty string ``""`` + ``int`` : ``0`` + ``float`` : ``0.0`` + + If neither the ``default`` nor the ``type`` parameter is passed + while registering the configuration through + :func:`parser.addini `, then the configuration + is treated as a string and a default empty string '' is returned. + + If the specified name hasn't been registered through a prior + :func:`parser.addini ` call (usually from a + plugin), a ValueError is raised. + """ + try: + return self._inicache[name] + except KeyError: + self._inicache[name] = val = self._getini(name) + return val + + # Meant for easy monkeypatching by legacypath plugin. + # Can be inlined back (with no cover removed) once legacypath is gone. + def _getini_unknown_type(self, name: str, type: str, value: object): + msg = ( + f"Option {name} has unknown configuration type {type} with value {value!r}" + ) + raise ValueError(msg) # pragma: no cover + + def _getini(self, name: str): + try: + description, type, default = self._parser._inidict[name] + except KeyError as e: + raise ValueError(f"unknown configuration value: {name!r}") from e + override_value = self._get_override_ini_value(name) + if override_value is None: + try: + value = self.inicfg[name] + except KeyError: + return default + else: + value = override_value + # Coerce the values based on types. + # + # Note: some coercions are only required if we are reading from .ini files, because + # the file format doesn't contain type information, but when reading from toml we will + # get either str or list of str values (see _parse_ini_config_from_pyproject_toml). + # For example: + # + # ini: + # a_line_list = "tests acceptance" + # in this case, we need to split the string to obtain a list of strings. + # + # toml: + # a_line_list = ["tests", "acceptance"] + # in this case, we already have a list ready to use. + # + if type == "paths": + dp = ( + self.inipath.parent + if self.inipath is not None + else self.invocation_params.dir + ) + input_values = shlex.split(value) if isinstance(value, str) else value + return [dp / x for x in input_values] + elif type == "args": + return shlex.split(value) if isinstance(value, str) else value + elif type == "linelist": + if isinstance(value, str): + return [t for t in map(lambda x: x.strip(), value.split("\n")) if t] + else: + return value + elif type == "bool": + return _strtobool(str(value).strip()) + elif type == "string": + return value + elif type == "int": + if not isinstance(value, str): + raise TypeError( + f"Expected an int string for option {name} of type integer, but got: {value!r}" + ) from None + return int(value) + elif type == "float": + if not isinstance(value, str): + raise TypeError( + f"Expected a float string for option {name} of type float, but got: {value!r}" + ) from None + return float(value) + elif type is None: + return value + else: + return self._getini_unknown_type(name, type, value) + + def _getconftest_pathlist( + self, name: str, path: pathlib.Path + ) -> list[pathlib.Path] | None: + try: + mod, relroots = self.pluginmanager._rget_with_confmod(name, path) + except KeyError: + return None + assert mod.__file__ is not None + modpath = pathlib.Path(mod.__file__).parent + values: list[pathlib.Path] = [] + for relroot in relroots: + if isinstance(relroot, os.PathLike): + relroot = pathlib.Path(relroot) + else: + relroot = relroot.replace("/", os.sep) + relroot = absolutepath(modpath / relroot) + values.append(relroot) + return values + + def _get_override_ini_value(self, name: str) -> str | None: + value = None + # override_ini is a list of "ini=value" options. + # Always use the last item if multiple values are set for same ini-name, + # e.g. -o foo=bar1 -o foo=bar2 will set foo to bar2. + for ini_config in self._override_ini: + try: + key, user_ini_value = ini_config.split("=", 1) + except ValueError as e: + raise UsageError( + f"-o/--override-ini expects option=value style (got: {ini_config!r})." + ) from e + else: + if key == name: + value = user_ini_value + return value + + def getoption(self, name: str, default: Any = notset, skip: bool = False): + """Return command line option value. + + :param name: Name of the option. You may also specify + the literal ``--OPT`` option instead of the "dest" option name. + :param default: Fallback value if no option of that name is **declared** via :hook:`pytest_addoption`. + Note this parameter will be ignored when the option is **declared** even if the option's value is ``None``. + :param skip: If ``True``, raise :func:`pytest.skip` if option is undeclared or has a ``None`` value. + Note that even if ``True``, if a default was specified it will be returned instead of a skip. + """ + name = self._opt2dest.get(name, name) + try: + val = getattr(self.option, name) + if val is None and skip: + raise AttributeError(name) + return val + except AttributeError as e: + if default is not notset: + return default + if skip: + import pytest + + pytest.skip(f"no {name!r} option found") + raise ValueError(f"no option named {name!r}") from e + + def getvalue(self, name: str, path=None): + """Deprecated, use getoption() instead.""" + return self.getoption(name) + + def getvalueorskip(self, name: str, path=None): + """Deprecated, use getoption(skip=True) instead.""" + return self.getoption(name, skip=True) + + #: Verbosity type for failed assertions (see :confval:`verbosity_assertions`). + VERBOSITY_ASSERTIONS: Final = "assertions" + #: Verbosity type for test case execution (see :confval:`verbosity_test_cases`). + VERBOSITY_TEST_CASES: Final = "test_cases" + _VERBOSITY_INI_DEFAULT: Final = "auto" + + def get_verbosity(self, verbosity_type: str | None = None) -> int: + r"""Retrieve the verbosity level for a fine-grained verbosity type. + + :param verbosity_type: Verbosity type to get level for. If a level is + configured for the given type, that value will be returned. If the + given type is not a known verbosity type, the global verbosity + level will be returned. If the given type is None (default), the + global verbosity level will be returned. + + To configure a level for a fine-grained verbosity type, the + configuration file should have a setting for the configuration name + and a numeric value for the verbosity level. A special value of "auto" + can be used to explicitly use the global verbosity level. + + Example: + + .. code-block:: ini + + # content of pytest.ini + [pytest] + verbosity_assertions = 2 + + .. code-block:: console + + pytest -v + + .. code-block:: python + + print(config.get_verbosity()) # 1 + print(config.get_verbosity(Config.VERBOSITY_ASSERTIONS)) # 2 + """ + global_level = self.getoption("verbose", default=0) + assert isinstance(global_level, int) + if verbosity_type is None: + return global_level + + ini_name = Config._verbosity_ini_name(verbosity_type) + if ini_name not in self._parser._inidict: + return global_level + + level = self.getini(ini_name) + if level == Config._VERBOSITY_INI_DEFAULT: + return global_level + + return int(level) + + @staticmethod + def _verbosity_ini_name(verbosity_type: str) -> str: + return f"verbosity_{verbosity_type}" + + @staticmethod + def _add_verbosity_ini(parser: Parser, verbosity_type: str, help: str) -> None: + """Add a output verbosity configuration option for the given output type. + + :param parser: Parser for command line arguments and ini-file values. + :param verbosity_type: Fine-grained verbosity category. + :param help: Description of the output this type controls. + + The value should be retrieved via a call to + :py:func:`config.get_verbosity(type) `. + """ + parser.addini( + Config._verbosity_ini_name(verbosity_type), + help=help, + type="string", + default=Config._VERBOSITY_INI_DEFAULT, + ) + + def _warn_about_missing_assertion(self, mode: str) -> None: + if not _assertion_supported(): + if mode == "plain": + warning_text = ( + "ASSERTIONS ARE NOT EXECUTED" + " and FAILING TESTS WILL PASS. Are you" + " using python -O?" + ) + else: + warning_text = ( + "assertions not in test modules or" + " plugins will be ignored" + " because assert statements are not executed " + "by the underlying Python interpreter " + "(are you using python -O?)\n" + ) + self.issue_config_time_warning( + PytestConfigWarning(warning_text), + stacklevel=3, + ) + + def _warn_about_skipped_plugins(self) -> None: + for module_name, msg in self.pluginmanager.skipped_plugins: + self.issue_config_time_warning( + PytestConfigWarning(f"skipped plugin {module_name!r}: {msg}"), + stacklevel=2, + ) + + +def _assertion_supported() -> bool: + try: + assert False + except AssertionError: + return True + else: + return False # type: ignore[unreachable] + + +def create_terminal_writer( + config: Config, file: TextIO | None = None +) -> TerminalWriter: + """Create a TerminalWriter instance configured according to the options + in the config object. + + Every code which requires a TerminalWriter object and has access to a + config object should use this function. + """ + tw = TerminalWriter(file=file) + + if config.option.color == "yes": + tw.hasmarkup = True + elif config.option.color == "no": + tw.hasmarkup = False + + if config.option.code_highlight == "yes": + tw.code_highlight = True + elif config.option.code_highlight == "no": + tw.code_highlight = False + + return tw + + +def _strtobool(val: str) -> bool: + """Convert a string representation of truth to True or False. + + True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values + are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if + 'val' is anything else. + + .. note:: Copied from distutils.util. + """ + val = val.lower() + if val in ("y", "yes", "t", "true", "on", "1"): + return True + elif val in ("n", "no", "f", "false", "off", "0"): + return False + else: + raise ValueError(f"invalid truth value {val!r}") + + +@lru_cache(maxsize=50) +def parse_warning_filter( + arg: str, *, escape: bool +) -> tuple[warnings._ActionKind, str, type[Warning], str, int]: + """Parse a warnings filter string. + + This is copied from warnings._setoption with the following changes: + + * Does not apply the filter. + * Escaping is optional. + * Raises UsageError so we get nice error messages on failure. + """ + __tracebackhide__ = True + error_template = dedent( + f"""\ + while parsing the following warning configuration: + + {arg} + + This error occurred: + + {{error}} + """ + ) + + parts = arg.split(":") + if len(parts) > 5: + doc_url = ( + "https://docs.python.org/3/library/warnings.html#describing-warning-filters" + ) + error = dedent( + f"""\ + Too many fields ({len(parts)}), expected at most 5 separated by colons: + + action:message:category:module:line + + For more information please consult: {doc_url} + """ + ) + raise UsageError(error_template.format(error=error)) + + while len(parts) < 5: + parts.append("") + action_, message, category_, module, lineno_ = (s.strip() for s in parts) + try: + action: warnings._ActionKind = warnings._getaction(action_) # type: ignore[attr-defined] + except warnings._OptionError as e: + raise UsageError(error_template.format(error=str(e))) from None + try: + category: type[Warning] = _resolve_warning_category(category_) + except Exception: + exc_info = ExceptionInfo.from_current() + exception_text = exc_info.getrepr(style="native") + raise UsageError(error_template.format(error=exception_text)) from None + if message and escape: + message = re.escape(message) + if module and escape: + module = re.escape(module) + r"\Z" + if lineno_: + try: + lineno = int(lineno_) + if lineno < 0: + raise ValueError("number is negative") + except ValueError as e: + raise UsageError( + error_template.format(error=f"invalid lineno {lineno_!r}: {e}") + ) from None + else: + lineno = 0 + try: + re.compile(message) + re.compile(module) + except re.error as e: + raise UsageError( + error_template.format(error=f"Invalid regex {e.pattern!r}: {e}") + ) from None + return action, message, category, module, lineno + + +def _resolve_warning_category(category: str) -> type[Warning]: + """ + Copied from warnings._getcategory, but changed so it lets exceptions (specially ImportErrors) + propagate so we can get access to their tracebacks (#9218). + """ + __tracebackhide__ = True + if not category: + return Warning + + if "." not in category: + import builtins as m + + klass = category + else: + module, _, klass = category.rpartition(".") + m = __import__(module, None, None, [klass]) + cat = getattr(m, klass) + if not issubclass(cat, Warning): + raise UsageError(f"{cat} is not a Warning subclass") + return cast(type[Warning], cat) + + +def apply_warning_filters( + config_filters: Iterable[str], cmdline_filters: Iterable[str] +) -> None: + """Applies pytest-configured filters to the warnings module""" + # Filters should have this precedence: cmdline options, config. + # Filters should be applied in the inverse order of precedence. + for arg in config_filters: + warnings.filterwarnings(*parse_warning_filter(arg, escape=False)) + + for arg in cmdline_filters: + warnings.filterwarnings(*parse_warning_filter(arg, escape=True)) diff --git a/venv/lib/python3.10/site-packages/_pytest/config/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/config/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..25249466a5c34c6759284bfc89466a0475b16e38 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/config/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/config/__pycache__/argparsing.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/config/__pycache__/argparsing.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0d672b5f2ecfe63e2fc12eba4e501a47cb31bff2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/config/__pycache__/argparsing.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/config/__pycache__/compat.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/config/__pycache__/compat.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..63a136da0a9de65be22b2799ca2d6c1e03e14673 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/config/__pycache__/compat.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/config/__pycache__/exceptions.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/config/__pycache__/exceptions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d7e224973f801ab2921c7a1c82e9299fe12f569e Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/config/__pycache__/exceptions.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/config/__pycache__/findpaths.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/config/__pycache__/findpaths.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..af56be1d295b294271c19c2a6fd196116002684a Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/config/__pycache__/findpaths.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/config/argparsing.py b/venv/lib/python3.10/site-packages/_pytest/config/argparsing.py new file mode 100644 index 0000000000000000000000000000000000000000..8d4ed8233255c23879d73717a057055d07493893 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/config/argparsing.py @@ -0,0 +1,535 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +import argparse +from collections.abc import Callable +from collections.abc import Mapping +from collections.abc import Sequence +import os +from typing import Any +from typing import cast +from typing import final +from typing import Literal +from typing import NoReturn + +import _pytest._io +from _pytest.config.exceptions import UsageError +from _pytest.deprecated import check_ispytest + + +FILE_OR_DIR = "file_or_dir" + + +class NotSet: + def __repr__(self) -> str: + return "" + + +NOT_SET = NotSet() + + +@final +class Parser: + """Parser for command line arguments and ini-file values. + + :ivar extra_info: Dict of generic param -> value to display in case + there's an error processing the command line arguments. + """ + + prog: str | None = None + + def __init__( + self, + usage: str | None = None, + processopt: Callable[[Argument], None] | None = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._anonymous = OptionGroup("Custom options", parser=self, _ispytest=True) + self._groups: list[OptionGroup] = [] + self._processopt = processopt + self._usage = usage + self._inidict: dict[str, tuple[str, str | None, Any]] = {} + self._ininames: list[str] = [] + self.extra_info: dict[str, Any] = {} + + def processoption(self, option: Argument) -> None: + if self._processopt: + if option.dest: + self._processopt(option) + + def getgroup( + self, name: str, description: str = "", after: str | None = None + ) -> OptionGroup: + """Get (or create) a named option Group. + + :param name: Name of the option group. + :param description: Long description for --help output. + :param after: Name of another group, used for ordering --help output. + :returns: The option group. + + The returned group object has an ``addoption`` method with the same + signature as :func:`parser.addoption ` but + will be shown in the respective group in the output of + ``pytest --help``. + """ + for group in self._groups: + if group.name == name: + return group + group = OptionGroup(name, description, parser=self, _ispytest=True) + i = 0 + for i, grp in enumerate(self._groups): + if grp.name == after: + break + self._groups.insert(i + 1, group) + return group + + def addoption(self, *opts: str, **attrs: Any) -> None: + """Register a command line option. + + :param opts: + Option names, can be short or long options. + :param attrs: + Same attributes as the argparse library's :meth:`add_argument() + ` function accepts. + + After command line parsing, options are available on the pytest config + object via ``config.option.NAME`` where ``NAME`` is usually set + by passing a ``dest`` attribute, for example + ``addoption("--long", dest="NAME", ...)``. + """ + self._anonymous.addoption(*opts, **attrs) + + def parse( + self, + args: Sequence[str | os.PathLike[str]], + namespace: argparse.Namespace | None = None, + ) -> argparse.Namespace: + from _pytest._argcomplete import try_argcomplete + + self.optparser = self._getparser() + try_argcomplete(self.optparser) + strargs = [os.fspath(x) for x in args] + return self.optparser.parse_args(strargs, namespace=namespace) + + def _getparser(self) -> MyOptionParser: + from _pytest._argcomplete import filescompleter + + optparser = MyOptionParser(self, self.extra_info, prog=self.prog) + groups = [*self._groups, self._anonymous] + for group in groups: + if group.options: + desc = group.description or group.name + arggroup = optparser.add_argument_group(desc) + for option in group.options: + n = option.names() + a = option.attrs() + arggroup.add_argument(*n, **a) + file_or_dir_arg = optparser.add_argument(FILE_OR_DIR, nargs="*") + # bash like autocompletion for dirs (appending '/') + # Type ignored because typeshed doesn't know about argcomplete. + file_or_dir_arg.completer = filescompleter # type: ignore + return optparser + + def parse_setoption( + self, + args: Sequence[str | os.PathLike[str]], + option: argparse.Namespace, + namespace: argparse.Namespace | None = None, + ) -> list[str]: + parsedoption = self.parse(args, namespace=namespace) + for name, value in parsedoption.__dict__.items(): + setattr(option, name, value) + return cast(list[str], getattr(parsedoption, FILE_OR_DIR)) + + def parse_known_args( + self, + args: Sequence[str | os.PathLike[str]], + namespace: argparse.Namespace | None = None, + ) -> argparse.Namespace: + """Parse the known arguments at this point. + + :returns: An argparse namespace object. + """ + return self.parse_known_and_unknown_args(args, namespace=namespace)[0] + + def parse_known_and_unknown_args( + self, + args: Sequence[str | os.PathLike[str]], + namespace: argparse.Namespace | None = None, + ) -> tuple[argparse.Namespace, list[str]]: + """Parse the known arguments at this point, and also return the + remaining unknown arguments. + + :returns: + A tuple containing an argparse namespace object for the known + arguments, and a list of the unknown arguments. + """ + optparser = self._getparser() + strargs = [os.fspath(x) for x in args] + return optparser.parse_known_args(strargs, namespace=namespace) + + def addini( + self, + name: str, + help: str, + type: Literal[ + "string", "paths", "pathlist", "args", "linelist", "bool", "int", "float" + ] + | None = None, + default: Any = NOT_SET, + ) -> None: + """Register an ini-file option. + + :param name: + Name of the ini-variable. + :param type: + Type of the variable. Can be: + + * ``string``: a string + * ``bool``: a boolean + * ``args``: a list of strings, separated as in a shell + * ``linelist``: a list of strings, separated by line breaks + * ``paths``: a list of :class:`pathlib.Path`, separated as in a shell + * ``pathlist``: a list of ``py.path``, separated as in a shell + * ``int``: an integer + * ``float``: a floating-point number + + .. versionadded:: 8.4 + + The ``float`` and ``int`` types. + + For ``paths`` and ``pathlist`` types, they are considered relative to the ini-file. + In case the execution is happening without an ini-file defined, + they will be considered relative to the current working directory (for example with ``--override-ini``). + + .. versionadded:: 7.0 + The ``paths`` variable type. + + .. versionadded:: 8.1 + Use the current working directory to resolve ``paths`` and ``pathlist`` in the absence of an ini-file. + + Defaults to ``string`` if ``None`` or not passed. + :param default: + Default value if no ini-file option exists but is queried. + + The value of ini-variables can be retrieved via a call to + :py:func:`config.getini(name) `. + """ + assert type in ( + None, + "string", + "paths", + "pathlist", + "args", + "linelist", + "bool", + "int", + "float", + ) + if default is NOT_SET: + default = get_ini_default_for_type(type) + + self._inidict[name] = (help, type, default) + self._ininames.append(name) + + +def get_ini_default_for_type( + type: Literal[ + "string", "paths", "pathlist", "args", "linelist", "bool", "int", "float" + ] + | None, +) -> Any: + """ + Used by addini to get the default value for a given ini-option type, when + default is not supplied. + """ + if type is None: + return "" + elif type in ("paths", "pathlist", "args", "linelist"): + return [] + elif type == "bool": + return False + elif type == "int": + return 0 + elif type == "float": + return 0.0 + else: + return "" + + +class ArgumentError(Exception): + """Raised if an Argument instance is created with invalid or + inconsistent arguments.""" + + def __init__(self, msg: str, option: Argument | str) -> None: + self.msg = msg + self.option_id = str(option) + + def __str__(self) -> str: + if self.option_id: + return f"option {self.option_id}: {self.msg}" + else: + return self.msg + + +class Argument: + """Class that mimics the necessary behaviour of optparse.Option. + + It's currently a least effort implementation and ignoring choices + and integer prefixes. + + https://docs.python.org/3/library/optparse.html#optparse-standard-option-types + """ + + def __init__(self, *names: str, **attrs: Any) -> None: + """Store params in private vars for use in add_argument.""" + self._attrs = attrs + self._short_opts: list[str] = [] + self._long_opts: list[str] = [] + try: + self.type = attrs["type"] + except KeyError: + pass + try: + # Attribute existence is tested in Config._processopt. + self.default = attrs["default"] + except KeyError: + pass + self._set_opt_strings(names) + dest: str | None = attrs.get("dest") + if dest: + self.dest = dest + elif self._long_opts: + self.dest = self._long_opts[0][2:].replace("-", "_") + else: + try: + self.dest = self._short_opts[0][1:] + except IndexError as e: + self.dest = "???" # Needed for the error repr. + raise ArgumentError("need a long or short option", self) from e + + def names(self) -> list[str]: + return self._short_opts + self._long_opts + + def attrs(self) -> Mapping[str, Any]: + # Update any attributes set by processopt. + attrs = "default dest help".split() + attrs.append(self.dest) + for attr in attrs: + try: + self._attrs[attr] = getattr(self, attr) + except AttributeError: + pass + return self._attrs + + def _set_opt_strings(self, opts: Sequence[str]) -> None: + """Directly from optparse. + + Might not be necessary as this is passed to argparse later on. + """ + for opt in opts: + if len(opt) < 2: + raise ArgumentError( + f"invalid option string {opt!r}: " + "must be at least two characters long", + self, + ) + elif len(opt) == 2: + if not (opt[0] == "-" and opt[1] != "-"): + raise ArgumentError( + f"invalid short option string {opt!r}: " + "must be of the form -x, (x any non-dash char)", + self, + ) + self._short_opts.append(opt) + else: + if not (opt[0:2] == "--" and opt[2] != "-"): + raise ArgumentError( + f"invalid long option string {opt!r}: " + "must start with --, followed by non-dash", + self, + ) + self._long_opts.append(opt) + + def __repr__(self) -> str: + args: list[str] = [] + if self._short_opts: + args += ["_short_opts: " + repr(self._short_opts)] + if self._long_opts: + args += ["_long_opts: " + repr(self._long_opts)] + args += ["dest: " + repr(self.dest)] + if hasattr(self, "type"): + args += ["type: " + repr(self.type)] + if hasattr(self, "default"): + args += ["default: " + repr(self.default)] + return "Argument({})".format(", ".join(args)) + + +class OptionGroup: + """A group of options shown in its own section.""" + + def __init__( + self, + name: str, + description: str = "", + parser: Parser | None = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self.name = name + self.description = description + self.options: list[Argument] = [] + self.parser = parser + + def addoption(self, *opts: str, **attrs: Any) -> None: + """Add an option to this group. + + If a shortened version of a long option is specified, it will + be suppressed in the help. ``addoption('--twowords', '--two-words')`` + results in help showing ``--two-words`` only, but ``--twowords`` gets + accepted **and** the automatic destination is in ``args.twowords``. + + :param opts: + Option names, can be short or long options. + :param attrs: + Same attributes as the argparse library's :meth:`add_argument() + ` function accepts. + """ + conflict = set(opts).intersection( + name for opt in self.options for name in opt.names() + ) + if conflict: + raise ValueError(f"option names {conflict} already added") + option = Argument(*opts, **attrs) + self._addoption_instance(option, shortupper=False) + + def _addoption(self, *opts: str, **attrs: Any) -> None: + option = Argument(*opts, **attrs) + self._addoption_instance(option, shortupper=True) + + def _addoption_instance(self, option: Argument, shortupper: bool = False) -> None: + if not shortupper: + for opt in option._short_opts: + if opt[0] == "-" and opt[1].islower(): + raise ValueError("lowercase shortoptions reserved") + if self.parser: + self.parser.processoption(option) + self.options.append(option) + + +class MyOptionParser(argparse.ArgumentParser): + def __init__( + self, + parser: Parser, + extra_info: dict[str, Any] | None = None, + prog: str | None = None, + ) -> None: + self._parser = parser + super().__init__( + prog=prog, + usage=parser._usage, + add_help=False, + formatter_class=DropShorterLongHelpFormatter, + allow_abbrev=False, + fromfile_prefix_chars="@", + ) + # extra_info is a dict of (param -> value) to display if there's + # an usage error to provide more contextual information to the user. + self.extra_info = extra_info if extra_info else {} + + def error(self, message: str) -> NoReturn: + """Transform argparse error message into UsageError.""" + msg = f"{self.prog}: error: {message}" + + if hasattr(self._parser, "_config_source_hint"): + msg = f"{msg} ({self._parser._config_source_hint})" + + raise UsageError(self.format_usage() + msg) + + # Type ignored because typeshed has a very complex type in the superclass. + def parse_args( # type: ignore + self, + args: Sequence[str] | None = None, + namespace: argparse.Namespace | None = None, + ) -> argparse.Namespace: + """Allow splitting of positional arguments.""" + parsed, unrecognized = self.parse_known_args(args, namespace) + if unrecognized: + for arg in unrecognized: + if arg and arg[0] == "-": + lines = [ + "unrecognized arguments: {}".format(" ".join(unrecognized)) + ] + for k, v in sorted(self.extra_info.items()): + lines.append(f" {k}: {v}") + self.error("\n".join(lines)) + getattr(parsed, FILE_OR_DIR).extend(unrecognized) + return parsed + + +class DropShorterLongHelpFormatter(argparse.HelpFormatter): + """Shorten help for long options that differ only in extra hyphens. + + - Collapse **long** options that are the same except for extra hyphens. + - Shortcut if there are only two options and one of them is a short one. + - Cache result on the action object as this is called at least 2 times. + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + # Use more accurate terminal width. + if "width" not in kwargs: + kwargs["width"] = _pytest._io.get_terminal_width() + super().__init__(*args, **kwargs) + + def _format_action_invocation(self, action: argparse.Action) -> str: + orgstr = super()._format_action_invocation(action) + if orgstr and orgstr[0] != "-": # only optional arguments + return orgstr + res: str | None = getattr(action, "_formatted_action_invocation", None) + if res: + return res + options = orgstr.split(", ") + if len(options) == 2 and (len(options[0]) == 2 or len(options[1]) == 2): + # a shortcut for '-h, --help' or '--abc', '-a' + action._formatted_action_invocation = orgstr # type: ignore + return orgstr + return_list = [] + short_long: dict[str, str] = {} + for option in options: + if len(option) == 2 or option[2] == " ": + continue + if not option.startswith("--"): + raise ArgumentError( + f'long optional argument without "--": [{option}]', option + ) + xxoption = option[2:] + shortened = xxoption.replace("-", "") + if shortened not in short_long or len(short_long[shortened]) < len( + xxoption + ): + short_long[shortened] = xxoption + # now short_long has been filled out to the longest with dashes + # **and** we keep the right option ordering from add_argument + for option in options: + if len(option) == 2 or option[2] == " ": + return_list.append(option) + if option[2:] == short_long.get(option.replace("-", "")): + return_list.append(option.replace(" ", "=", 1)) + formatted_action_invocation = ", ".join(return_list) + action._formatted_action_invocation = formatted_action_invocation # type: ignore + return formatted_action_invocation + + def _split_lines(self, text, width): + """Wrap lines after splitting on original newlines. + + This allows to have explicit line breaks in the help text. + """ + import textwrap + + lines = [] + for line in text.splitlines(): + lines.extend(textwrap.wrap(line.strip(), width)) + return lines diff --git a/venv/lib/python3.10/site-packages/_pytest/config/compat.py b/venv/lib/python3.10/site-packages/_pytest/config/compat.py new file mode 100644 index 0000000000000000000000000000000000000000..21eab4c7e47aa2af1690887716055943c0f99fdc --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/config/compat.py @@ -0,0 +1,85 @@ +from __future__ import annotations + +from collections.abc import Mapping +import functools +from pathlib import Path +from typing import Any +import warnings + +import pluggy + +from ..compat import LEGACY_PATH +from ..compat import legacy_path +from ..deprecated import HOOK_LEGACY_PATH_ARG + + +# hookname: (Path, LEGACY_PATH) +imply_paths_hooks: Mapping[str, tuple[str, str]] = { + "pytest_ignore_collect": ("collection_path", "path"), + "pytest_collect_file": ("file_path", "path"), + "pytest_pycollect_makemodule": ("module_path", "path"), + "pytest_report_header": ("start_path", "startdir"), + "pytest_report_collectionfinish": ("start_path", "startdir"), +} + + +def _check_path(path: Path, fspath: LEGACY_PATH) -> None: + if Path(fspath) != path: + raise ValueError( + f"Path({fspath!r}) != {path!r}\n" + "if both path and fspath are given they need to be equal" + ) + + +class PathAwareHookProxy: + """ + this helper wraps around hook callers + until pluggy supports fixingcalls, this one will do + + it currently doesn't return full hook caller proxies for fixed hooks, + this may have to be changed later depending on bugs + """ + + def __init__(self, hook_relay: pluggy.HookRelay) -> None: + self._hook_relay = hook_relay + + def __dir__(self) -> list[str]: + return dir(self._hook_relay) + + def __getattr__(self, key: str) -> pluggy.HookCaller: + hook: pluggy.HookCaller = getattr(self._hook_relay, key) + if key not in imply_paths_hooks: + self.__dict__[key] = hook + return hook + else: + path_var, fspath_var = imply_paths_hooks[key] + + @functools.wraps(hook) + def fixed_hook(**kw: Any) -> Any: + path_value: Path | None = kw.pop(path_var, None) + fspath_value: LEGACY_PATH | None = kw.pop(fspath_var, None) + if fspath_value is not None: + warnings.warn( + HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg=fspath_var, pathlib_path_arg=path_var + ), + stacklevel=2, + ) + if path_value is not None: + if fspath_value is not None: + _check_path(path_value, fspath_value) + else: + fspath_value = legacy_path(path_value) + else: + assert fspath_value is not None + path_value = Path(fspath_value) + + kw[path_var] = path_value + kw[fspath_var] = fspath_value + return hook(**kw) + + fixed_hook.name = hook.name # type: ignore[attr-defined] + fixed_hook.spec = hook.spec # type: ignore[attr-defined] + fixed_hook.__name__ = key + self.__dict__[key] = fixed_hook + return fixed_hook # type: ignore[return-value] diff --git a/venv/lib/python3.10/site-packages/_pytest/config/exceptions.py b/venv/lib/python3.10/site-packages/_pytest/config/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..90108eca9047baa1b7122ff00d77be942940a6a6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/config/exceptions.py @@ -0,0 +1,13 @@ +from __future__ import annotations + +from typing import final + + +@final +class UsageError(Exception): + """Error in pytest usage or invocation.""" + + +class PrintHelp(Exception): + """Raised when pytest should print its help to skip the rest of the + argument parsing and validation.""" diff --git a/venv/lib/python3.10/site-packages/_pytest/config/findpaths.py b/venv/lib/python3.10/site-packages/_pytest/config/findpaths.py new file mode 100644 index 0000000000000000000000000000000000000000..15bfbb0613e82c9b62df65f19e56256b563e88c8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/config/findpaths.py @@ -0,0 +1,239 @@ +from __future__ import annotations + +from collections.abc import Iterable +from collections.abc import Sequence +import os +from pathlib import Path +import sys +from typing import TYPE_CHECKING + +import iniconfig + +from .exceptions import UsageError +from _pytest.outcomes import fail +from _pytest.pathlib import absolutepath +from _pytest.pathlib import commonpath +from _pytest.pathlib import safe_exists + + +if TYPE_CHECKING: + from typing import Union + + from typing_extensions import TypeAlias + + # Even though TOML supports richer data types, all values are converted to str/list[str] during + # parsing to maintain compatibility with the rest of the configuration system. + ConfigDict: TypeAlias = dict[str, Union[str, list[str]]] + + +def _parse_ini_config(path: Path) -> iniconfig.IniConfig: + """Parse the given generic '.ini' file using legacy IniConfig parser, returning + the parsed object. + + Raise UsageError if the file cannot be parsed. + """ + try: + return iniconfig.IniConfig(str(path)) + except iniconfig.ParseError as exc: + raise UsageError(str(exc)) from exc + + +def load_config_dict_from_file( + filepath: Path, +) -> ConfigDict | None: + """Load pytest configuration from the given file path, if supported. + + Return None if the file does not contain valid pytest configuration. + """ + # Configuration from ini files are obtained from the [pytest] section, if present. + if filepath.suffix == ".ini": + iniconfig = _parse_ini_config(filepath) + + if "pytest" in iniconfig: + return dict(iniconfig["pytest"].items()) + else: + # "pytest.ini" files are always the source of configuration, even if empty. + if filepath.name == "pytest.ini": + return {} + + # '.cfg' files are considered if they contain a "[tool:pytest]" section. + elif filepath.suffix == ".cfg": + iniconfig = _parse_ini_config(filepath) + + if "tool:pytest" in iniconfig.sections: + return dict(iniconfig["tool:pytest"].items()) + elif "pytest" in iniconfig.sections: + # If a setup.cfg contains a "[pytest]" section, we raise a failure to indicate users that + # plain "[pytest]" sections in setup.cfg files is no longer supported (#3086). + fail(CFG_PYTEST_SECTION.format(filename="setup.cfg"), pytrace=False) + + # '.toml' files are considered if they contain a [tool.pytest.ini_options] table. + elif filepath.suffix == ".toml": + if sys.version_info >= (3, 11): + import tomllib + else: + import tomli as tomllib + + toml_text = filepath.read_text(encoding="utf-8") + try: + config = tomllib.loads(toml_text) + except tomllib.TOMLDecodeError as exc: + raise UsageError(f"{filepath}: {exc}") from exc + + result = config.get("tool", {}).get("pytest", {}).get("ini_options", None) + if result is not None: + # TOML supports richer data types than ini files (strings, arrays, floats, ints, etc), + # however we need to convert all scalar values to str for compatibility with the rest + # of the configuration system, which expects strings only. + def make_scalar(v: object) -> str | list[str]: + return v if isinstance(v, list) else str(v) + + return {k: make_scalar(v) for k, v in result.items()} + + return None + + +def locate_config( + invocation_dir: Path, + args: Iterable[Path], +) -> tuple[Path | None, Path | None, ConfigDict]: + """Search in the list of arguments for a valid ini-file for pytest, + and return a tuple of (rootdir, inifile, cfg-dict).""" + config_names = [ + "pytest.ini", + ".pytest.ini", + "pyproject.toml", + "tox.ini", + "setup.cfg", + ] + args = [x for x in args if not str(x).startswith("-")] + if not args: + args = [invocation_dir] + found_pyproject_toml: Path | None = None + for arg in args: + argpath = absolutepath(arg) + for base in (argpath, *argpath.parents): + for config_name in config_names: + p = base / config_name + if p.is_file(): + if p.name == "pyproject.toml" and found_pyproject_toml is None: + found_pyproject_toml = p + ini_config = load_config_dict_from_file(p) + if ini_config is not None: + return base, p, ini_config + if found_pyproject_toml is not None: + return found_pyproject_toml.parent, found_pyproject_toml, {} + return None, None, {} + + +def get_common_ancestor( + invocation_dir: Path, + paths: Iterable[Path], +) -> Path: + common_ancestor: Path | None = None + for path in paths: + if not path.exists(): + continue + if common_ancestor is None: + common_ancestor = path + else: + if common_ancestor in path.parents or path == common_ancestor: + continue + elif path in common_ancestor.parents: + common_ancestor = path + else: + shared = commonpath(path, common_ancestor) + if shared is not None: + common_ancestor = shared + if common_ancestor is None: + common_ancestor = invocation_dir + elif common_ancestor.is_file(): + common_ancestor = common_ancestor.parent + return common_ancestor + + +def get_dirs_from_args(args: Iterable[str]) -> list[Path]: + def is_option(x: str) -> bool: + return x.startswith("-") + + def get_file_part_from_node_id(x: str) -> str: + return x.split("::")[0] + + def get_dir_from_path(path: Path) -> Path: + if path.is_dir(): + return path + return path.parent + + # These look like paths but may not exist + possible_paths = ( + absolutepath(get_file_part_from_node_id(arg)) + for arg in args + if not is_option(arg) + ) + + return [get_dir_from_path(path) for path in possible_paths if safe_exists(path)] + + +CFG_PYTEST_SECTION = "[pytest] section in {filename} files is no longer supported, change to [tool:pytest] instead." + + +def determine_setup( + *, + inifile: str | None, + args: Sequence[str], + rootdir_cmd_arg: str | None, + invocation_dir: Path, +) -> tuple[Path, Path | None, ConfigDict]: + """Determine the rootdir, inifile and ini configuration values from the + command line arguments. + + :param inifile: + The `--inifile` command line argument, if given. + :param args: + The free command line arguments. + :param rootdir_cmd_arg: + The `--rootdir` command line argument, if given. + :param invocation_dir: + The working directory when pytest was invoked. + """ + rootdir = None + dirs = get_dirs_from_args(args) + if inifile: + inipath_ = absolutepath(inifile) + inipath: Path | None = inipath_ + inicfg = load_config_dict_from_file(inipath_) or {} + if rootdir_cmd_arg is None: + rootdir = inipath_.parent + else: + ancestor = get_common_ancestor(invocation_dir, dirs) + rootdir, inipath, inicfg = locate_config(invocation_dir, [ancestor]) + if rootdir is None and rootdir_cmd_arg is None: + for possible_rootdir in (ancestor, *ancestor.parents): + if (possible_rootdir / "setup.py").is_file(): + rootdir = possible_rootdir + break + else: + if dirs != [ancestor]: + rootdir, inipath, inicfg = locate_config(invocation_dir, dirs) + if rootdir is None: + rootdir = get_common_ancestor( + invocation_dir, [invocation_dir, ancestor] + ) + if is_fs_root(rootdir): + rootdir = ancestor + if rootdir_cmd_arg: + rootdir = absolutepath(os.path.expandvars(rootdir_cmd_arg)) + if not rootdir.is_dir(): + raise UsageError( + f"Directory '{rootdir}' not found. Check your '--rootdir' option." + ) + assert rootdir is not None + return rootdir, inipath, inicfg or {} + + +def is_fs_root(p: Path) -> bool: + r""" + Return True if the given path is pointing to the root of the + file system ("/" on Unix and "C:\\" on Windows for example). + """ + return os.path.splitdrive(str(p))[1] == os.sep diff --git a/venv/lib/python3.10/site-packages/_pytest/debugging.py b/venv/lib/python3.10/site-packages/_pytest/debugging.py new file mode 100644 index 0000000000000000000000000000000000000000..de1b2688f765a2982dbe10d86bffd9762f2f6512 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/debugging.py @@ -0,0 +1,407 @@ +# mypy: allow-untyped-defs +# ruff: noqa: T100 +"""Interactive debugging with PDB, the Python Debugger.""" + +from __future__ import annotations + +import argparse +from collections.abc import Callable +from collections.abc import Generator +import functools +import sys +import types +from typing import Any +import unittest + +from _pytest import outcomes +from _pytest._code import ExceptionInfo +from _pytest.capture import CaptureManager +from _pytest.config import Config +from _pytest.config import ConftestImportFailure +from _pytest.config import hookimpl +from _pytest.config import PytestPluginManager +from _pytest.config.argparsing import Parser +from _pytest.config.exceptions import UsageError +from _pytest.nodes import Node +from _pytest.reports import BaseReport +from _pytest.runner import CallInfo + + +def _validate_usepdb_cls(value: str) -> tuple[str, str]: + """Validate syntax of --pdbcls option.""" + try: + modname, classname = value.split(":") + except ValueError as e: + raise argparse.ArgumentTypeError( + f"{value!r} is not in the format 'modname:classname'" + ) from e + return (modname, classname) + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group.addoption( + "--pdb", + dest="usepdb", + action="store_true", + help="Start the interactive Python debugger on errors or KeyboardInterrupt", + ) + group.addoption( + "--pdbcls", + dest="usepdb_cls", + metavar="modulename:classname", + type=_validate_usepdb_cls, + help="Specify a custom interactive Python debugger for use with --pdb." + "For example: --pdbcls=IPython.terminal.debugger:TerminalPdb", + ) + group.addoption( + "--trace", + dest="trace", + action="store_true", + help="Immediately break when running each test", + ) + + +def pytest_configure(config: Config) -> None: + import pdb + + if config.getvalue("trace"): + config.pluginmanager.register(PdbTrace(), "pdbtrace") + if config.getvalue("usepdb"): + config.pluginmanager.register(PdbInvoke(), "pdbinvoke") + + pytestPDB._saved.append( + (pdb.set_trace, pytestPDB._pluginmanager, pytestPDB._config) + ) + pdb.set_trace = pytestPDB.set_trace + pytestPDB._pluginmanager = config.pluginmanager + pytestPDB._config = config + + # NOTE: not using pytest_unconfigure, since it might get called although + # pytest_configure was not (if another plugin raises UsageError). + def fin() -> None: + ( + pdb.set_trace, + pytestPDB._pluginmanager, + pytestPDB._config, + ) = pytestPDB._saved.pop() + + config.add_cleanup(fin) + + +class pytestPDB: + """Pseudo PDB that defers to the real pdb.""" + + _pluginmanager: PytestPluginManager | None = None + _config: Config | None = None + _saved: list[ + tuple[Callable[..., None], PytestPluginManager | None, Config | None] + ] = [] + _recursive_debug = 0 + _wrapped_pdb_cls: tuple[type[Any], type[Any]] | None = None + + @classmethod + def _is_capturing(cls, capman: CaptureManager | None) -> str | bool: + if capman: + return capman.is_capturing() + return False + + @classmethod + def _import_pdb_cls(cls, capman: CaptureManager | None): + if not cls._config: + import pdb + + # Happens when using pytest.set_trace outside of a test. + return pdb.Pdb + + usepdb_cls = cls._config.getvalue("usepdb_cls") + + if cls._wrapped_pdb_cls and cls._wrapped_pdb_cls[0] == usepdb_cls: + return cls._wrapped_pdb_cls[1] + + if usepdb_cls: + modname, classname = usepdb_cls + + try: + __import__(modname) + mod = sys.modules[modname] + + # Handle --pdbcls=pdb:pdb.Pdb (useful e.g. with pdbpp). + parts = classname.split(".") + pdb_cls = getattr(mod, parts[0]) + for part in parts[1:]: + pdb_cls = getattr(pdb_cls, part) + except Exception as exc: + value = ":".join((modname, classname)) + raise UsageError( + f"--pdbcls: could not import {value!r}: {exc}" + ) from exc + else: + import pdb + + pdb_cls = pdb.Pdb + + wrapped_cls = cls._get_pdb_wrapper_class(pdb_cls, capman) + cls._wrapped_pdb_cls = (usepdb_cls, wrapped_cls) + return wrapped_cls + + @classmethod + def _get_pdb_wrapper_class(cls, pdb_cls, capman: CaptureManager | None): + import _pytest.config + + class PytestPdbWrapper(pdb_cls): + _pytest_capman = capman + _continued = False + + def do_debug(self, arg): + cls._recursive_debug += 1 + ret = super().do_debug(arg) + cls._recursive_debug -= 1 + return ret + + if hasattr(pdb_cls, "do_debug"): + do_debug.__doc__ = pdb_cls.do_debug.__doc__ + + def do_continue(self, arg): + ret = super().do_continue(arg) + if cls._recursive_debug == 0: + assert cls._config is not None + tw = _pytest.config.create_terminal_writer(cls._config) + tw.line() + + capman = self._pytest_capman + capturing = pytestPDB._is_capturing(capman) + if capturing: + if capturing == "global": + tw.sep(">", "PDB continue (IO-capturing resumed)") + else: + tw.sep( + ">", + f"PDB continue (IO-capturing resumed for {capturing})", + ) + assert capman is not None + capman.resume() + else: + tw.sep(">", "PDB continue") + assert cls._pluginmanager is not None + cls._pluginmanager.hook.pytest_leave_pdb(config=cls._config, pdb=self) + self._continued = True + return ret + + if hasattr(pdb_cls, "do_continue"): + do_continue.__doc__ = pdb_cls.do_continue.__doc__ + + do_c = do_cont = do_continue + + def do_quit(self, arg): + # Raise Exit outcome when quit command is used in pdb. + # + # This is a bit of a hack - it would be better if BdbQuit + # could be handled, but this would require to wrap the + # whole pytest run, and adjust the report etc. + ret = super().do_quit(arg) + + if cls._recursive_debug == 0: + outcomes.exit("Quitting debugger") + + return ret + + if hasattr(pdb_cls, "do_quit"): + do_quit.__doc__ = pdb_cls.do_quit.__doc__ + + do_q = do_quit + do_exit = do_quit + + def setup(self, f, tb): + """Suspend on setup(). + + Needed after do_continue resumed, and entering another + breakpoint again. + """ + ret = super().setup(f, tb) + if not ret and self._continued: + # pdb.setup() returns True if the command wants to exit + # from the interaction: do not suspend capturing then. + if self._pytest_capman: + self._pytest_capman.suspend_global_capture(in_=True) + return ret + + def get_stack(self, f, t): + stack, i = super().get_stack(f, t) + if f is None: + # Find last non-hidden frame. + i = max(0, len(stack) - 1) + while i and stack[i][0].f_locals.get("__tracebackhide__", False): + i -= 1 + return stack, i + + return PytestPdbWrapper + + @classmethod + def _init_pdb(cls, method, *args, **kwargs): + """Initialize PDB debugging, dropping any IO capturing.""" + import _pytest.config + + if cls._pluginmanager is None: + capman: CaptureManager | None = None + else: + capman = cls._pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend(in_=True) + + if cls._config: + tw = _pytest.config.create_terminal_writer(cls._config) + tw.line() + + if cls._recursive_debug == 0: + # Handle header similar to pdb.set_trace in py37+. + header = kwargs.pop("header", None) + if header is not None: + tw.sep(">", header) + else: + capturing = cls._is_capturing(capman) + if capturing == "global": + tw.sep(">", f"PDB {method} (IO-capturing turned off)") + elif capturing: + tw.sep( + ">", + f"PDB {method} (IO-capturing turned off for {capturing})", + ) + else: + tw.sep(">", f"PDB {method}") + + _pdb = cls._import_pdb_cls(capman)(**kwargs) + + if cls._pluginmanager: + cls._pluginmanager.hook.pytest_enter_pdb(config=cls._config, pdb=_pdb) + return _pdb + + @classmethod + def set_trace(cls, *args, **kwargs) -> None: + """Invoke debugging via ``Pdb.set_trace``, dropping any IO capturing.""" + frame = sys._getframe().f_back + _pdb = cls._init_pdb("set_trace", *args, **kwargs) + _pdb.set_trace(frame) + + +class PdbInvoke: + def pytest_exception_interact( + self, node: Node, call: CallInfo[Any], report: BaseReport + ) -> None: + capman = node.config.pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend_global_capture(in_=True) + out, err = capman.read_global_capture() + sys.stdout.write(out) + sys.stdout.write(err) + assert call.excinfo is not None + + if not isinstance(call.excinfo.value, unittest.SkipTest): + _enter_pdb(node, call.excinfo, report) + + def pytest_internalerror(self, excinfo: ExceptionInfo[BaseException]) -> None: + exc_or_tb = _postmortem_exc_or_tb(excinfo) + post_mortem(exc_or_tb) + + +class PdbTrace: + @hookimpl(wrapper=True) + def pytest_pyfunc_call(self, pyfuncitem) -> Generator[None, object, object]: + wrap_pytest_function_for_tracing(pyfuncitem) + return (yield) + + +def wrap_pytest_function_for_tracing(pyfuncitem) -> None: + """Change the Python function object of the given Function item by a + wrapper which actually enters pdb before calling the python function + itself, effectively leaving the user in the pdb prompt in the first + statement of the function.""" + _pdb = pytestPDB._init_pdb("runcall") + testfunction = pyfuncitem.obj + + # we can't just return `partial(pdb.runcall, testfunction)` because (on + # python < 3.7.4) runcall's first param is `func`, which means we'd get + # an exception if one of the kwargs to testfunction was called `func`. + @functools.wraps(testfunction) + def wrapper(*args, **kwargs) -> None: + func = functools.partial(testfunction, *args, **kwargs) + _pdb.runcall(func) + + pyfuncitem.obj = wrapper + + +def maybe_wrap_pytest_function_for_tracing(pyfuncitem) -> None: + """Wrap the given pytestfunct item for tracing support if --trace was given in + the command line.""" + if pyfuncitem.config.getvalue("trace"): + wrap_pytest_function_for_tracing(pyfuncitem) + + +def _enter_pdb( + node: Node, excinfo: ExceptionInfo[BaseException], rep: BaseReport +) -> BaseReport: + # XXX we reuse the TerminalReporter's terminalwriter + # because this seems to avoid some encoding related troubles + # for not completely clear reasons. + tw = node.config.pluginmanager.getplugin("terminalreporter")._tw + tw.line() + + showcapture = node.config.option.showcapture + + for sectionname, content in ( + ("stdout", rep.capstdout), + ("stderr", rep.capstderr), + ("log", rep.caplog), + ): + if showcapture in (sectionname, "all") and content: + tw.sep(">", "captured " + sectionname) + if content[-1:] == "\n": + content = content[:-1] + tw.line(content) + + tw.sep(">", "traceback") + rep.toterminal(tw) + tw.sep(">", "entering PDB") + tb_or_exc = _postmortem_exc_or_tb(excinfo) + rep._pdbshown = True # type: ignore[attr-defined] + post_mortem(tb_or_exc) + return rep + + +def _postmortem_exc_or_tb( + excinfo: ExceptionInfo[BaseException], +) -> types.TracebackType | BaseException: + from doctest import UnexpectedException + + get_exc = sys.version_info >= (3, 13) + if isinstance(excinfo.value, UnexpectedException): + # A doctest.UnexpectedException is not useful for post_mortem. + # Use the underlying exception instead: + underlying_exc = excinfo.value + if get_exc: + return underlying_exc.exc_info[1] + + return underlying_exc.exc_info[2] + elif isinstance(excinfo.value, ConftestImportFailure): + # A config.ConftestImportFailure is not useful for post_mortem. + # Use the underlying exception instead: + cause = excinfo.value.cause + if get_exc: + return cause + + assert cause.__traceback__ is not None + return cause.__traceback__ + else: + assert excinfo._excinfo is not None + if get_exc: + return excinfo._excinfo[1] + + return excinfo._excinfo[2] + + +def post_mortem(tb_or_exc: types.TracebackType | BaseException) -> None: + p = pytestPDB._init_pdb("post_mortem") + p.reset() + p.interaction(None, tb_or_exc) + if p.quitting: + outcomes.exit("Quitting debugger") diff --git a/venv/lib/python3.10/site-packages/_pytest/deprecated.py b/venv/lib/python3.10/site-packages/_pytest/deprecated.py new file mode 100644 index 0000000000000000000000000000000000000000..a605c24e58f44175a53b0ac7aec349d57d665ead --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/deprecated.py @@ -0,0 +1,91 @@ +"""Deprecation messages and bits of code used elsewhere in the codebase that +is planned to be removed in the next pytest release. + +Keeping it in a central location makes it easy to track what is deprecated and should +be removed when the time comes. + +All constants defined in this module should be either instances of +:class:`PytestWarning`, or :class:`UnformattedWarning` +in case of warnings which need to format their messages. +""" + +from __future__ import annotations + +from warnings import warn + +from _pytest.warning_types import PytestDeprecationWarning +from _pytest.warning_types import PytestRemovedIn9Warning +from _pytest.warning_types import UnformattedWarning + + +# set of plugins which have been integrated into the core; we use this list to ignore +# them during registration to avoid conflicts +DEPRECATED_EXTERNAL_PLUGINS = { + "pytest_catchlog", + "pytest_capturelog", + "pytest_faulthandler", +} + + +# This can be* removed pytest 8, but it's harmless and common, so no rush to remove. +# * If you're in the future: "could have been". +YIELD_FIXTURE = PytestDeprecationWarning( + "@pytest.yield_fixture is deprecated.\n" + "Use @pytest.fixture instead; they are the same." +) + +# This deprecation is never really meant to be removed. +PRIVATE = PytestDeprecationWarning("A private pytest class or function was used.") + + +HOOK_LEGACY_PATH_ARG = UnformattedWarning( + PytestRemovedIn9Warning, + "The ({pylib_path_arg}: py.path.local) argument is deprecated, please use ({pathlib_path_arg}: pathlib.Path)\n" + "see https://docs.pytest.org/en/latest/deprecations.html" + "#py-path-local-arguments-for-hooks-replaced-with-pathlib-path", +) + +NODE_CTOR_FSPATH_ARG = UnformattedWarning( + PytestRemovedIn9Warning, + "The (fspath: py.path.local) argument to {node_type_name} is deprecated. " + "Please use the (path: pathlib.Path) argument instead.\n" + "See https://docs.pytest.org/en/latest/deprecations.html" + "#fspath-argument-for-node-constructors-replaced-with-pathlib-path", +) + +HOOK_LEGACY_MARKING = UnformattedWarning( + PytestDeprecationWarning, + "The hook{type} {fullname} uses old-style configuration options (marks or attributes).\n" + "Please use the pytest.hook{type}({hook_opts}) decorator instead\n" + " to configure the hooks.\n" + " See https://docs.pytest.org/en/latest/deprecations.html" + "#configuring-hook-specs-impls-using-markers", +) + +MARKED_FIXTURE = PytestRemovedIn9Warning( + "Marks applied to fixtures have no effect\n" + "See docs: https://docs.pytest.org/en/stable/deprecations.html#applying-a-mark-to-a-fixture-function" +) + +# You want to make some `__init__` or function "private". +# +# def my_private_function(some, args): +# ... +# +# Do this: +# +# def my_private_function(some, args, *, _ispytest: bool = False): +# check_ispytest(_ispytest) +# ... +# +# Change all internal/allowed calls to +# +# my_private_function(some, args, _ispytest=True) +# +# All other calls will get the default _ispytest=False and trigger +# the warning (possibly error in the future). + + +def check_ispytest(ispytest: bool) -> None: + if not ispytest: + warn(PRIVATE, stacklevel=3) diff --git a/venv/lib/python3.10/site-packages/_pytest/doctest.py b/venv/lib/python3.10/site-packages/_pytest/doctest.py new file mode 100644 index 0000000000000000000000000000000000000000..0dbef6056d7e5bbe2c843ec55f6082a69c4b2873 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/doctest.py @@ -0,0 +1,754 @@ +# mypy: allow-untyped-defs +"""Discover and run doctests in modules and test files.""" + +from __future__ import annotations + +import bdb +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Iterable +from collections.abc import Sequence +from contextlib import contextmanager +import functools +import inspect +import os +from pathlib import Path +import platform +import re +import sys +import traceback +import types +from typing import Any +from typing import TYPE_CHECKING +import warnings + +from _pytest import outcomes +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import ReprFileLocation +from _pytest._code.code import TerminalRepr +from _pytest._io import TerminalWriter +from _pytest.compat import safe_getattr +from _pytest.config import Config +from _pytest.config.argparsing import Parser +from _pytest.fixtures import fixture +from _pytest.fixtures import TopRequest +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.outcomes import OutcomeException +from _pytest.outcomes import skip +from _pytest.pathlib import fnmatch_ex +from _pytest.python import Module +from _pytest.python_api import approx +from _pytest.warning_types import PytestWarning + + +if TYPE_CHECKING: + import doctest + + from typing_extensions import Self + +DOCTEST_REPORT_CHOICE_NONE = "none" +DOCTEST_REPORT_CHOICE_CDIFF = "cdiff" +DOCTEST_REPORT_CHOICE_NDIFF = "ndiff" +DOCTEST_REPORT_CHOICE_UDIFF = "udiff" +DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE = "only_first_failure" + +DOCTEST_REPORT_CHOICES = ( + DOCTEST_REPORT_CHOICE_NONE, + DOCTEST_REPORT_CHOICE_CDIFF, + DOCTEST_REPORT_CHOICE_NDIFF, + DOCTEST_REPORT_CHOICE_UDIFF, + DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE, +) + +# Lazy definition of runner class +RUNNER_CLASS = None +# Lazy definition of output checker class +CHECKER_CLASS: type[doctest.OutputChecker] | None = None + + +def pytest_addoption(parser: Parser) -> None: + parser.addini( + "doctest_optionflags", + "Option flags for doctests", + type="args", + default=["ELLIPSIS"], + ) + parser.addini( + "doctest_encoding", "Encoding used for doctest files", default="utf-8" + ) + group = parser.getgroup("collect") + group.addoption( + "--doctest-modules", + action="store_true", + default=False, + help="Run doctests in all .py modules", + dest="doctestmodules", + ) + group.addoption( + "--doctest-report", + type=str.lower, + default="udiff", + help="Choose another output format for diffs on doctest failure", + choices=DOCTEST_REPORT_CHOICES, + dest="doctestreport", + ) + group.addoption( + "--doctest-glob", + action="append", + default=[], + metavar="pat", + help="Doctests file matching pattern, default: test*.txt", + dest="doctestglob", + ) + group.addoption( + "--doctest-ignore-import-errors", + action="store_true", + default=False, + help="Ignore doctest collection errors", + dest="doctest_ignore_import_errors", + ) + group.addoption( + "--doctest-continue-on-failure", + action="store_true", + default=False, + help="For a given doctest, continue to run after the first failure", + dest="doctest_continue_on_failure", + ) + + +def pytest_unconfigure() -> None: + global RUNNER_CLASS + + RUNNER_CLASS = None + + +def pytest_collect_file( + file_path: Path, + parent: Collector, +) -> DoctestModule | DoctestTextfile | None: + config = parent.config + if file_path.suffix == ".py": + if config.option.doctestmodules and not any( + (_is_setup_py(file_path), _is_main_py(file_path)) + ): + return DoctestModule.from_parent(parent, path=file_path) + elif _is_doctest(config, file_path, parent): + return DoctestTextfile.from_parent(parent, path=file_path) + return None + + +def _is_setup_py(path: Path) -> bool: + if path.name != "setup.py": + return False + contents = path.read_bytes() + return b"setuptools" in contents or b"distutils" in contents + + +def _is_doctest(config: Config, path: Path, parent: Collector) -> bool: + if path.suffix in (".txt", ".rst") and parent.session.isinitpath(path): + return True + globs = config.getoption("doctestglob") or ["test*.txt"] + return any(fnmatch_ex(glob, path) for glob in globs) + + +def _is_main_py(path: Path) -> bool: + return path.name == "__main__.py" + + +class ReprFailDoctest(TerminalRepr): + def __init__( + self, reprlocation_lines: Sequence[tuple[ReprFileLocation, Sequence[str]]] + ) -> None: + self.reprlocation_lines = reprlocation_lines + + def toterminal(self, tw: TerminalWriter) -> None: + for reprlocation, lines in self.reprlocation_lines: + for line in lines: + tw.line(line) + reprlocation.toterminal(tw) + + +class MultipleDoctestFailures(Exception): + def __init__(self, failures: Sequence[doctest.DocTestFailure]) -> None: + super().__init__() + self.failures = failures + + +def _init_runner_class() -> type[doctest.DocTestRunner]: + import doctest + + class PytestDoctestRunner(doctest.DebugRunner): + """Runner to collect failures. + + Note that the out variable in this case is a list instead of a + stdout-like object. + """ + + def __init__( + self, + checker: doctest.OutputChecker | None = None, + verbose: bool | None = None, + optionflags: int = 0, + continue_on_failure: bool = True, + ) -> None: + super().__init__(checker=checker, verbose=verbose, optionflags=optionflags) + self.continue_on_failure = continue_on_failure + + def report_failure( + self, + out, + test: doctest.DocTest, + example: doctest.Example, + got: str, + ) -> None: + failure = doctest.DocTestFailure(test, example, got) + if self.continue_on_failure: + out.append(failure) + else: + raise failure + + def report_unexpected_exception( + self, + out, + test: doctest.DocTest, + example: doctest.Example, + exc_info: tuple[type[BaseException], BaseException, types.TracebackType], + ) -> None: + if isinstance(exc_info[1], OutcomeException): + raise exc_info[1] + if isinstance(exc_info[1], bdb.BdbQuit): + outcomes.exit("Quitting debugger") + failure = doctest.UnexpectedException(test, example, exc_info) + if self.continue_on_failure: + out.append(failure) + else: + raise failure + + return PytestDoctestRunner + + +def _get_runner( + checker: doctest.OutputChecker | None = None, + verbose: bool | None = None, + optionflags: int = 0, + continue_on_failure: bool = True, +) -> doctest.DocTestRunner: + # We need this in order to do a lazy import on doctest + global RUNNER_CLASS + if RUNNER_CLASS is None: + RUNNER_CLASS = _init_runner_class() + # Type ignored because the continue_on_failure argument is only defined on + # PytestDoctestRunner, which is lazily defined so can't be used as a type. + return RUNNER_CLASS( # type: ignore + checker=checker, + verbose=verbose, + optionflags=optionflags, + continue_on_failure=continue_on_failure, + ) + + +class DoctestItem(Item): + def __init__( + self, + name: str, + parent: DoctestTextfile | DoctestModule, + runner: doctest.DocTestRunner, + dtest: doctest.DocTest, + ) -> None: + super().__init__(name, parent) + self.runner = runner + self.dtest = dtest + + # Stuff needed for fixture support. + self.obj = None + fm = self.session._fixturemanager + fixtureinfo = fm.getfixtureinfo(node=self, func=None, cls=None) + self._fixtureinfo = fixtureinfo + self.fixturenames = fixtureinfo.names_closure + self._initrequest() + + @classmethod + def from_parent( # type: ignore[override] + cls, + parent: DoctestTextfile | DoctestModule, + *, + name: str, + runner: doctest.DocTestRunner, + dtest: doctest.DocTest, + ) -> Self: + # incompatible signature due to imposed limits on subclass + """The public named constructor.""" + return super().from_parent(name=name, parent=parent, runner=runner, dtest=dtest) + + def _initrequest(self) -> None: + self.funcargs: dict[str, object] = {} + self._request = TopRequest(self, _ispytest=True) # type: ignore[arg-type] + + def setup(self) -> None: + self._request._fillfixtures() + globs = dict(getfixture=self._request.getfixturevalue) + for name, value in self._request.getfixturevalue("doctest_namespace").items(): + globs[name] = value + self.dtest.globs.update(globs) + + def runtest(self) -> None: + _check_all_skipped(self.dtest) + self._disable_output_capturing_for_darwin() + failures: list[doctest.DocTestFailure] = [] + # Type ignored because we change the type of `out` from what + # doctest expects. + self.runner.run(self.dtest, out=failures) # type: ignore[arg-type] + if failures: + raise MultipleDoctestFailures(failures) + + def _disable_output_capturing_for_darwin(self) -> None: + """Disable output capturing. Otherwise, stdout is lost to doctest (#985).""" + if platform.system() != "Darwin": + return + capman = self.config.pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend_global_capture(in_=True) + out, err = capman.read_global_capture() + sys.stdout.write(out) + sys.stderr.write(err) + + # TODO: Type ignored -- breaks Liskov Substitution. + def repr_failure( # type: ignore[override] + self, + excinfo: ExceptionInfo[BaseException], + ) -> str | TerminalRepr: + import doctest + + failures: ( + Sequence[doctest.DocTestFailure | doctest.UnexpectedException] | None + ) = None + if isinstance( + excinfo.value, (doctest.DocTestFailure, doctest.UnexpectedException) + ): + failures = [excinfo.value] + elif isinstance(excinfo.value, MultipleDoctestFailures): + failures = excinfo.value.failures + + if failures is None: + return super().repr_failure(excinfo) + + reprlocation_lines = [] + for failure in failures: + example = failure.example + test = failure.test + filename = test.filename + if test.lineno is None: + lineno = None + else: + lineno = test.lineno + example.lineno + 1 + message = type(failure).__name__ + # TODO: ReprFileLocation doesn't expect a None lineno. + reprlocation = ReprFileLocation(filename, lineno, message) # type: ignore[arg-type] + checker = _get_checker() + report_choice = _get_report_choice(self.config.getoption("doctestreport")) + if lineno is not None: + assert failure.test.docstring is not None + lines = failure.test.docstring.splitlines(False) + # add line numbers to the left of the error message + assert test.lineno is not None + lines = [ + f"{i + test.lineno + 1:03d} {x}" for (i, x) in enumerate(lines) + ] + # trim docstring error lines to 10 + lines = lines[max(example.lineno - 9, 0) : example.lineno + 1] + else: + lines = [ + "EXAMPLE LOCATION UNKNOWN, not showing all tests of that example" + ] + indent = ">>>" + for line in example.source.splitlines(): + lines.append(f"??? {indent} {line}") + indent = "..." + if isinstance(failure, doctest.DocTestFailure): + lines += checker.output_difference( + example, failure.got, report_choice + ).split("\n") + else: + inner_excinfo = ExceptionInfo.from_exc_info(failure.exc_info) + lines += [f"UNEXPECTED EXCEPTION: {inner_excinfo.value!r}"] + lines += [ + x.strip("\n") for x in traceback.format_exception(*failure.exc_info) + ] + reprlocation_lines.append((reprlocation, lines)) + return ReprFailDoctest(reprlocation_lines) + + def reportinfo(self) -> tuple[os.PathLike[str] | str, int | None, str]: + return self.path, self.dtest.lineno, f"[doctest] {self.name}" + + +def _get_flag_lookup() -> dict[str, int]: + import doctest + + return dict( + DONT_ACCEPT_TRUE_FOR_1=doctest.DONT_ACCEPT_TRUE_FOR_1, + DONT_ACCEPT_BLANKLINE=doctest.DONT_ACCEPT_BLANKLINE, + NORMALIZE_WHITESPACE=doctest.NORMALIZE_WHITESPACE, + ELLIPSIS=doctest.ELLIPSIS, + IGNORE_EXCEPTION_DETAIL=doctest.IGNORE_EXCEPTION_DETAIL, + COMPARISON_FLAGS=doctest.COMPARISON_FLAGS, + ALLOW_UNICODE=_get_allow_unicode_flag(), + ALLOW_BYTES=_get_allow_bytes_flag(), + NUMBER=_get_number_flag(), + ) + + +def get_optionflags(config: Config) -> int: + optionflags_str = config.getini("doctest_optionflags") + flag_lookup_table = _get_flag_lookup() + flag_acc = 0 + for flag in optionflags_str: + flag_acc |= flag_lookup_table[flag] + return flag_acc + + +def _get_continue_on_failure(config: Config) -> bool: + continue_on_failure: bool = config.getvalue("doctest_continue_on_failure") + if continue_on_failure: + # We need to turn off this if we use pdb since we should stop at + # the first failure. + if config.getvalue("usepdb"): + continue_on_failure = False + return continue_on_failure + + +class DoctestTextfile(Module): + obj = None + + def collect(self) -> Iterable[DoctestItem]: + import doctest + + # Inspired by doctest.testfile; ideally we would use it directly, + # but it doesn't support passing a custom checker. + encoding = self.config.getini("doctest_encoding") + text = self.path.read_text(encoding) + filename = str(self.path) + name = self.path.name + globs = {"__name__": "__main__"} + + optionflags = get_optionflags(self.config) + + runner = _get_runner( + verbose=False, + optionflags=optionflags, + checker=_get_checker(), + continue_on_failure=_get_continue_on_failure(self.config), + ) + + parser = doctest.DocTestParser() + test = parser.get_doctest(text, globs, name, filename, 0) + if test.examples: + yield DoctestItem.from_parent( + self, name=test.name, runner=runner, dtest=test + ) + + +def _check_all_skipped(test: doctest.DocTest) -> None: + """Raise pytest.skip() if all examples in the given DocTest have the SKIP + option set.""" + import doctest + + all_skipped = all(x.options.get(doctest.SKIP, False) for x in test.examples) + if all_skipped: + skip("all tests skipped by +SKIP option") + + +def _is_mocked(obj: object) -> bool: + """Return if an object is possibly a mock object by checking the + existence of a highly improbable attribute.""" + return ( + safe_getattr(obj, "pytest_mock_example_attribute_that_shouldnt_exist", None) + is not None + ) + + +@contextmanager +def _patch_unwrap_mock_aware() -> Generator[None]: + """Context manager which replaces ``inspect.unwrap`` with a version + that's aware of mock objects and doesn't recurse into them.""" + real_unwrap = inspect.unwrap + + def _mock_aware_unwrap( + func: Callable[..., Any], *, stop: Callable[[Any], Any] | None = None + ) -> Any: + try: + if stop is None or stop is _is_mocked: + return real_unwrap(func, stop=_is_mocked) + _stop = stop + return real_unwrap(func, stop=lambda obj: _is_mocked(obj) or _stop(func)) + except Exception as e: + warnings.warn( + f"Got {e!r} when unwrapping {func!r}. This is usually caused " + "by a violation of Python's object protocol; see e.g. " + "https://github.com/pytest-dev/pytest/issues/5080", + PytestWarning, + ) + raise + + inspect.unwrap = _mock_aware_unwrap + try: + yield + finally: + inspect.unwrap = real_unwrap + + +class DoctestModule(Module): + def collect(self) -> Iterable[DoctestItem]: + import doctest + + class MockAwareDocTestFinder(doctest.DocTestFinder): + py_ver_info_minor = sys.version_info[:2] + is_find_lineno_broken = ( + py_ver_info_minor < (3, 11) + or (py_ver_info_minor == (3, 11) and sys.version_info.micro < 9) + or (py_ver_info_minor == (3, 12) and sys.version_info.micro < 3) + ) + if is_find_lineno_broken: + + def _find_lineno(self, obj, source_lines): + """On older Pythons, doctest code does not take into account + `@property`. https://github.com/python/cpython/issues/61648 + + Moreover, wrapped Doctests need to be unwrapped so the correct + line number is returned. #8796 + """ + if isinstance(obj, property): + obj = getattr(obj, "fget", obj) + + if hasattr(obj, "__wrapped__"): + # Get the main obj in case of it being wrapped + obj = inspect.unwrap(obj) + + # Type ignored because this is a private function. + return super()._find_lineno( # type:ignore[misc] + obj, + source_lines, + ) + + if sys.version_info < (3, 10): + + def _find( + self, tests, obj, name, module, source_lines, globs, seen + ) -> None: + """Override _find to work around issue in stdlib. + + https://github.com/pytest-dev/pytest/issues/3456 + https://github.com/python/cpython/issues/69718 + """ + if _is_mocked(obj): + return # pragma: no cover + with _patch_unwrap_mock_aware(): + # Type ignored because this is a private function. + super()._find( # type:ignore[misc] + tests, obj, name, module, source_lines, globs, seen + ) + + if sys.version_info < (3, 13): + + def _from_module(self, module, object): + """`cached_property` objects are never considered a part + of the 'current module'. As such they are skipped by doctest. + Here we override `_from_module` to check the underlying + function instead. https://github.com/python/cpython/issues/107995 + """ + if isinstance(object, functools.cached_property): + object = object.func + + # Type ignored because this is a private function. + return super()._from_module(module, object) # type: ignore[misc] + + try: + module = self.obj + except Collector.CollectError: + if self.config.getvalue("doctest_ignore_import_errors"): + skip(f"unable to import module {self.path!r}") + else: + raise + + # While doctests currently don't support fixtures directly, we still + # need to pick up autouse fixtures. + self.session._fixturemanager.parsefactories(self) + + # Uses internal doctest module parsing mechanism. + finder = MockAwareDocTestFinder() + optionflags = get_optionflags(self.config) + runner = _get_runner( + verbose=False, + optionflags=optionflags, + checker=_get_checker(), + continue_on_failure=_get_continue_on_failure(self.config), + ) + + for test in finder.find(module, module.__name__): + if test.examples: # skip empty doctests + yield DoctestItem.from_parent( + self, name=test.name, runner=runner, dtest=test + ) + + +def _init_checker_class() -> type[doctest.OutputChecker]: + import doctest + + class LiteralsOutputChecker(doctest.OutputChecker): + # Based on doctest_nose_plugin.py from the nltk project + # (https://github.com/nltk/nltk) and on the "numtest" doctest extension + # by Sebastien Boisgerault (https://github.com/boisgera/numtest). + + _unicode_literal_re = re.compile(r"(\W|^)[uU]([rR]?[\'\"])", re.UNICODE) + _bytes_literal_re = re.compile(r"(\W|^)[bB]([rR]?[\'\"])", re.UNICODE) + _number_re = re.compile( + r""" + (?P + (?P + (?P [+-]?\d*)\.(?P\d+) + | + (?P [+-]?\d+)\. + ) + (?: + [Ee] + (?P [+-]?\d+) + )? + | + (?P [+-]?\d+) + (?: + [Ee] + (?P [+-]?\d+) + ) + ) + """, + re.VERBOSE, + ) + + def check_output(self, want: str, got: str, optionflags: int) -> bool: + if super().check_output(want, got, optionflags): + return True + + allow_unicode = optionflags & _get_allow_unicode_flag() + allow_bytes = optionflags & _get_allow_bytes_flag() + allow_number = optionflags & _get_number_flag() + + if not allow_unicode and not allow_bytes and not allow_number: + return False + + def remove_prefixes(regex: re.Pattern[str], txt: str) -> str: + return re.sub(regex, r"\1\2", txt) + + if allow_unicode: + want = remove_prefixes(self._unicode_literal_re, want) + got = remove_prefixes(self._unicode_literal_re, got) + + if allow_bytes: + want = remove_prefixes(self._bytes_literal_re, want) + got = remove_prefixes(self._bytes_literal_re, got) + + if allow_number: + got = self._remove_unwanted_precision(want, got) + + return super().check_output(want, got, optionflags) + + def _remove_unwanted_precision(self, want: str, got: str) -> str: + wants = list(self._number_re.finditer(want)) + gots = list(self._number_re.finditer(got)) + if len(wants) != len(gots): + return got + offset = 0 + for w, g in zip(wants, gots): + fraction: str | None = w.group("fraction") + exponent: str | None = w.group("exponent1") + if exponent is None: + exponent = w.group("exponent2") + precision = 0 if fraction is None else len(fraction) + if exponent is not None: + precision -= int(exponent) + if float(w.group()) == approx(float(g.group()), abs=10**-precision): + # They're close enough. Replace the text we actually + # got with the text we want, so that it will match when we + # check the string literally. + got = ( + got[: g.start() + offset] + w.group() + got[g.end() + offset :] + ) + offset += w.end() - w.start() - (g.end() - g.start()) + return got + + return LiteralsOutputChecker + + +def _get_checker() -> doctest.OutputChecker: + """Return a doctest.OutputChecker subclass that supports some + additional options: + + * ALLOW_UNICODE and ALLOW_BYTES options to ignore u'' and b'' + prefixes (respectively) in string literals. Useful when the same + doctest should run in Python 2 and Python 3. + + * NUMBER to ignore floating-point differences smaller than the + precision of the literal number in the doctest. + + An inner class is used to avoid importing "doctest" at the module + level. + """ + global CHECKER_CLASS + if CHECKER_CLASS is None: + CHECKER_CLASS = _init_checker_class() + return CHECKER_CLASS() + + +def _get_allow_unicode_flag() -> int: + """Register and return the ALLOW_UNICODE flag.""" + import doctest + + return doctest.register_optionflag("ALLOW_UNICODE") + + +def _get_allow_bytes_flag() -> int: + """Register and return the ALLOW_BYTES flag.""" + import doctest + + return doctest.register_optionflag("ALLOW_BYTES") + + +def _get_number_flag() -> int: + """Register and return the NUMBER flag.""" + import doctest + + return doctest.register_optionflag("NUMBER") + + +def _get_report_choice(key: str) -> int: + """Return the actual `doctest` module flag value. + + We want to do it as late as possible to avoid importing `doctest` and all + its dependencies when parsing options, as it adds overhead and breaks tests. + """ + import doctest + + return { + DOCTEST_REPORT_CHOICE_UDIFF: doctest.REPORT_UDIFF, + DOCTEST_REPORT_CHOICE_CDIFF: doctest.REPORT_CDIFF, + DOCTEST_REPORT_CHOICE_NDIFF: doctest.REPORT_NDIFF, + DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE: doctest.REPORT_ONLY_FIRST_FAILURE, + DOCTEST_REPORT_CHOICE_NONE: 0, + }[key] + + +@fixture(scope="session") +def doctest_namespace() -> dict[str, Any]: + """Fixture that returns a :py:class:`dict` that will be injected into the + namespace of doctests. + + Usually this fixture is used in conjunction with another ``autouse`` fixture: + + .. code-block:: python + + @pytest.fixture(autouse=True) + def add_np(doctest_namespace): + doctest_namespace["np"] = numpy + + For more details: :ref:`doctest_namespace`. + """ + return dict() diff --git a/venv/lib/python3.10/site-packages/_pytest/faulthandler.py b/venv/lib/python3.10/site-packages/_pytest/faulthandler.py new file mode 100644 index 0000000000000000000000000000000000000000..79efc1d1704e556603c19869d3728ffbca48c0a3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/faulthandler.py @@ -0,0 +1,105 @@ +from __future__ import annotations + +from collections.abc import Generator +import os +import sys + +from _pytest.config import Config +from _pytest.config.argparsing import Parser +from _pytest.nodes import Item +from _pytest.stash import StashKey +import pytest + + +fault_handler_original_stderr_fd_key = StashKey[int]() +fault_handler_stderr_fd_key = StashKey[int]() + + +def pytest_addoption(parser: Parser) -> None: + help = ( + "Dump the traceback of all threads if a test takes " + "more than TIMEOUT seconds to finish" + ) + parser.addini("faulthandler_timeout", help, default=0.0) + + +def pytest_configure(config: Config) -> None: + import faulthandler + + # at teardown we want to restore the original faulthandler fileno + # but faulthandler has no api to return the original fileno + # so here we stash the stderr fileno to be used at teardown + # sys.stderr and sys.__stderr__ may be closed or patched during the session + # so we can't rely on their values being good at that point (#11572). + stderr_fileno = get_stderr_fileno() + if faulthandler.is_enabled(): + config.stash[fault_handler_original_stderr_fd_key] = stderr_fileno + config.stash[fault_handler_stderr_fd_key] = os.dup(stderr_fileno) + faulthandler.enable(file=config.stash[fault_handler_stderr_fd_key]) + + +def pytest_unconfigure(config: Config) -> None: + import faulthandler + + faulthandler.disable() + # Close the dup file installed during pytest_configure. + if fault_handler_stderr_fd_key in config.stash: + os.close(config.stash[fault_handler_stderr_fd_key]) + del config.stash[fault_handler_stderr_fd_key] + # Re-enable the faulthandler if it was originally enabled. + if fault_handler_original_stderr_fd_key in config.stash: + faulthandler.enable(config.stash[fault_handler_original_stderr_fd_key]) + del config.stash[fault_handler_original_stderr_fd_key] + + +def get_stderr_fileno() -> int: + try: + fileno = sys.stderr.fileno() + # The Twisted Logger will return an invalid file descriptor since it is not backed + # by an FD. So, let's also forward this to the same code path as with pytest-xdist. + if fileno == -1: + raise AttributeError() + return fileno + except (AttributeError, ValueError): + # pytest-xdist monkeypatches sys.stderr with an object that is not an actual file. + # https://docs.python.org/3/library/faulthandler.html#issue-with-file-descriptors + # This is potentially dangerous, but the best we can do. + assert sys.__stderr__ is not None + return sys.__stderr__.fileno() + + +def get_timeout_config_value(config: Config) -> float: + return float(config.getini("faulthandler_timeout") or 0.0) + + +@pytest.hookimpl(wrapper=True, trylast=True) +def pytest_runtest_protocol(item: Item) -> Generator[None, object, object]: + timeout = get_timeout_config_value(item.config) + if timeout > 0: + import faulthandler + + stderr = item.config.stash[fault_handler_stderr_fd_key] + faulthandler.dump_traceback_later(timeout, file=stderr) + try: + return (yield) + finally: + faulthandler.cancel_dump_traceback_later() + else: + return (yield) + + +@pytest.hookimpl(tryfirst=True) +def pytest_enter_pdb() -> None: + """Cancel any traceback dumping due to timeout before entering pdb.""" + import faulthandler + + faulthandler.cancel_dump_traceback_later() + + +@pytest.hookimpl(tryfirst=True) +def pytest_exception_interact() -> None: + """Cancel any traceback dumping due to an interactive exception being + raised.""" + import faulthandler + + faulthandler.cancel_dump_traceback_later() diff --git a/venv/lib/python3.10/site-packages/_pytest/fixtures.py b/venv/lib/python3.10/site-packages/_pytest/fixtures.py new file mode 100644 index 0000000000000000000000000000000000000000..421237e35a0bd5b4b9184b13e388e74216f6b96c --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/fixtures.py @@ -0,0 +1,2017 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +import abc +from collections import defaultdict +from collections import deque +from collections import OrderedDict +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Mapping +from collections.abc import MutableMapping +from collections.abc import Sequence +from collections.abc import Set as AbstractSet +import dataclasses +import functools +import inspect +import os +from pathlib import Path +import sys +import types +from typing import Any +from typing import cast +from typing import Final +from typing import final +from typing import Generic +from typing import NoReturn +from typing import Optional +from typing import overload +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union +import warnings + +import _pytest +from _pytest import nodes +from _pytest._code import getfslineno +from _pytest._code import Source +from _pytest._code.code import FormattedExcinfo +from _pytest._code.code import TerminalRepr +from _pytest._io import TerminalWriter +from _pytest.compat import assert_never +from _pytest.compat import get_real_func +from _pytest.compat import getfuncargnames +from _pytest.compat import getimfunc +from _pytest.compat import getlocation +from _pytest.compat import NOTSET +from _pytest.compat import NotSetType +from _pytest.compat import safe_getattr +from _pytest.compat import safe_isclass +from _pytest.compat import signature +from _pytest.config import _PluggyPlugin +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.deprecated import MARKED_FIXTURE +from _pytest.deprecated import YIELD_FIXTURE +from _pytest.main import Session +from _pytest.mark import Mark +from _pytest.mark import ParameterSet +from _pytest.mark.structures import MarkDecorator +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.outcomes import TEST_OUTCOME +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath +from _pytest.scope import _ScopeName +from _pytest.scope import HIGH_SCOPES +from _pytest.scope import Scope +from _pytest.warning_types import PytestRemovedIn9Warning +from _pytest.warning_types import PytestWarning + + +if sys.version_info < (3, 11): + from exceptiongroup import BaseExceptionGroup + + +if TYPE_CHECKING: + from _pytest.python import CallSpec2 + from _pytest.python import Function + from _pytest.python import Metafunc + + +# The value of the fixture -- return/yield of the fixture function (type variable). +FixtureValue = TypeVar("FixtureValue") +# The type of the fixture function (type variable). +FixtureFunction = TypeVar("FixtureFunction", bound=Callable[..., object]) +# The type of a fixture function (type alias generic in fixture value). +_FixtureFunc = Union[ + Callable[..., FixtureValue], Callable[..., Generator[FixtureValue]] +] +# The type of FixtureDef.cached_result (type alias generic in fixture value). +_FixtureCachedResult = Union[ + tuple[ + # The result. + FixtureValue, + # Cache key. + object, + None, + ], + tuple[ + None, + # Cache key. + object, + # The exception and the original traceback. + tuple[BaseException, Optional[types.TracebackType]], + ], +] + + +@dataclasses.dataclass(frozen=True) +class PseudoFixtureDef(Generic[FixtureValue]): + cached_result: _FixtureCachedResult[FixtureValue] + _scope: Scope + + +def pytest_sessionstart(session: Session) -> None: + session._fixturemanager = FixtureManager(session) + + +def get_scope_package( + node: nodes.Item, + fixturedef: FixtureDef[object], +) -> nodes.Node | None: + from _pytest.python import Package + + for parent in node.iter_parents(): + if isinstance(parent, Package) and parent.nodeid == fixturedef.baseid: + return parent + return node.session + + +def get_scope_node(node: nodes.Node, scope: Scope) -> nodes.Node | None: + """Get the closest parent node (including self) which matches the given + scope. + + If there is no parent node for the scope (e.g. asking for class scope on a + Module, or on a Function when not defined in a class), returns None. + """ + import _pytest.python + + if scope is Scope.Function: + # Type ignored because this is actually safe, see: + # https://github.com/python/mypy/issues/4717 + return node.getparent(nodes.Item) # type: ignore[type-abstract] + elif scope is Scope.Class: + return node.getparent(_pytest.python.Class) + elif scope is Scope.Module: + return node.getparent(_pytest.python.Module) + elif scope is Scope.Package: + return node.getparent(_pytest.python.Package) + elif scope is Scope.Session: + return node.getparent(_pytest.main.Session) + else: + assert_never(scope) + + +# TODO: Try to use FixtureFunctionDefinition instead of the marker +def getfixturemarker(obj: object) -> FixtureFunctionMarker | None: + """Return fixturemarker or None if it doesn't exist""" + if isinstance(obj, FixtureFunctionDefinition): + return obj._fixture_function_marker + return None + + +# Algorithm for sorting on a per-parametrized resource setup basis. +# It is called for Session scope first and performs sorting +# down to the lower scopes such as to minimize number of "high scope" +# setups and teardowns. + + +@dataclasses.dataclass(frozen=True) +class ParamArgKey: + """A key for a high-scoped parameter used by an item. + + For use as a hashable key in `reorder_items`. The combination of fields + is meant to uniquely identify a particular "instance" of a param, + potentially shared by multiple items in a scope. + """ + + #: The param name. + argname: str + param_index: int + #: For scopes Package, Module, Class, the path to the file (directory in + #: Package's case) of the package/module/class where the item is defined. + scoped_item_path: Path | None + #: For Class scope, the class where the item is defined. + item_cls: type | None + + +_V = TypeVar("_V") +OrderedSet = dict[_V, None] + + +def get_param_argkeys(item: nodes.Item, scope: Scope) -> Iterator[ParamArgKey]: + """Return all ParamArgKeys for item matching the specified high scope.""" + assert scope is not Scope.Function + + try: + callspec: CallSpec2 = item.callspec # type: ignore[attr-defined] + except AttributeError: + return + + item_cls = None + if scope is Scope.Session: + scoped_item_path = None + elif scope is Scope.Package: + # Package key = module's directory. + scoped_item_path = item.path.parent + elif scope is Scope.Module: + scoped_item_path = item.path + elif scope is Scope.Class: + scoped_item_path = item.path + item_cls = item.cls # type: ignore[attr-defined] + else: + assert_never(scope) + + for argname in callspec.indices: + if callspec._arg2scope[argname] != scope: + continue + param_index = callspec.indices[argname] + yield ParamArgKey(argname, param_index, scoped_item_path, item_cls) + + +def reorder_items(items: Sequence[nodes.Item]) -> list[nodes.Item]: + argkeys_by_item: dict[Scope, dict[nodes.Item, OrderedSet[ParamArgKey]]] = {} + items_by_argkey: dict[Scope, dict[ParamArgKey, OrderedDict[nodes.Item, None]]] = {} + for scope in HIGH_SCOPES: + scoped_argkeys_by_item = argkeys_by_item[scope] = {} + scoped_items_by_argkey = items_by_argkey[scope] = defaultdict(OrderedDict) + for item in items: + argkeys = dict.fromkeys(get_param_argkeys(item, scope)) + if argkeys: + scoped_argkeys_by_item[item] = argkeys + for argkey in argkeys: + scoped_items_by_argkey[argkey][item] = None + + items_set = dict.fromkeys(items) + return list( + reorder_items_atscope( + items_set, argkeys_by_item, items_by_argkey, Scope.Session + ) + ) + + +def reorder_items_atscope( + items: OrderedSet[nodes.Item], + argkeys_by_item: Mapping[Scope, Mapping[nodes.Item, OrderedSet[ParamArgKey]]], + items_by_argkey: Mapping[ + Scope, Mapping[ParamArgKey, OrderedDict[nodes.Item, None]] + ], + scope: Scope, +) -> OrderedSet[nodes.Item]: + if scope is Scope.Function or len(items) < 3: + return items + + scoped_items_by_argkey = items_by_argkey[scope] + scoped_argkeys_by_item = argkeys_by_item[scope] + + ignore: set[ParamArgKey] = set() + items_deque = deque(items) + items_done: OrderedSet[nodes.Item] = {} + while items_deque: + no_argkey_items: OrderedSet[nodes.Item] = {} + slicing_argkey = None + while items_deque: + item = items_deque.popleft() + if item in items_done or item in no_argkey_items: + continue + argkeys = dict.fromkeys( + k for k in scoped_argkeys_by_item.get(item, ()) if k not in ignore + ) + if not argkeys: + no_argkey_items[item] = None + else: + slicing_argkey, _ = argkeys.popitem() + # We don't have to remove relevant items from later in the + # deque because they'll just be ignored. + matching_items = [ + i for i in scoped_items_by_argkey[slicing_argkey] if i in items + ] + for i in reversed(matching_items): + items_deque.appendleft(i) + # Fix items_by_argkey order. + for other_scope in HIGH_SCOPES: + other_scoped_items_by_argkey = items_by_argkey[other_scope] + for argkey in argkeys_by_item[other_scope].get(i, ()): + argkey_dict = other_scoped_items_by_argkey[argkey] + if not hasattr(sys, "pypy_version_info"): + argkey_dict[i] = None + argkey_dict.move_to_end(i, last=False) + else: + # Work around a bug in PyPy: + # https://github.com/pypy/pypy/issues/5257 + # https://github.com/pytest-dev/pytest/issues/13312 + bkp = argkey_dict.copy() + argkey_dict.clear() + argkey_dict[i] = None + argkey_dict.update(bkp) + break + if no_argkey_items: + reordered_no_argkey_items = reorder_items_atscope( + no_argkey_items, argkeys_by_item, items_by_argkey, scope.next_lower() + ) + items_done.update(reordered_no_argkey_items) + if slicing_argkey is not None: + ignore.add(slicing_argkey) + return items_done + + +@dataclasses.dataclass(frozen=True) +class FuncFixtureInfo: + """Fixture-related information for a fixture-requesting item (e.g. test + function). + + This is used to examine the fixtures which an item requests statically + (known during collection). This includes autouse fixtures, fixtures + requested by the `usefixtures` marker, fixtures requested in the function + parameters, and the transitive closure of these. + + An item may also request fixtures dynamically (using `request.getfixturevalue`); + these are not reflected here. + """ + + __slots__ = ("argnames", "initialnames", "name2fixturedefs", "names_closure") + + # Fixture names that the item requests directly by function parameters. + argnames: tuple[str, ...] + # Fixture names that the item immediately requires. These include + # argnames + fixture names specified via usefixtures and via autouse=True in + # fixture definitions. + initialnames: tuple[str, ...] + # The transitive closure of the fixture names that the item requires. + # Note: can't include dynamic dependencies (`request.getfixturevalue` calls). + names_closure: list[str] + # A map from a fixture name in the transitive closure to the FixtureDefs + # matching the name which are applicable to this function. + # There may be multiple overriding fixtures with the same name. The + # sequence is ordered from furthest to closes to the function. + name2fixturedefs: dict[str, Sequence[FixtureDef[Any]]] + + def prune_dependency_tree(self) -> None: + """Recompute names_closure from initialnames and name2fixturedefs. + + Can only reduce names_closure, which means that the new closure will + always be a subset of the old one. The order is preserved. + + This method is needed because direct parametrization may shadow some + of the fixtures that were included in the originally built dependency + tree. In this way the dependency tree can get pruned, and the closure + of argnames may get reduced. + """ + closure: set[str] = set() + working_set = set(self.initialnames) + while working_set: + argname = working_set.pop() + # Argname may be something not included in the original names_closure, + # in which case we ignore it. This currently happens with pseudo + # FixtureDefs which wrap 'get_direct_param_fixture_func(request)'. + # So they introduce the new dependency 'request' which might have + # been missing in the original tree (closure). + if argname not in closure and argname in self.names_closure: + closure.add(argname) + if argname in self.name2fixturedefs: + working_set.update(self.name2fixturedefs[argname][-1].argnames) + + self.names_closure[:] = sorted(closure, key=self.names_closure.index) + + +class FixtureRequest(abc.ABC): + """The type of the ``request`` fixture. + + A request object gives access to the requesting test context and has a + ``param`` attribute in case the fixture is parametrized. + """ + + def __init__( + self, + pyfuncitem: Function, + fixturename: str | None, + arg2fixturedefs: dict[str, Sequence[FixtureDef[Any]]], + fixture_defs: dict[str, FixtureDef[Any]], + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + #: Fixture for which this request is being performed. + self.fixturename: Final = fixturename + self._pyfuncitem: Final = pyfuncitem + # The FixtureDefs for each fixture name requested by this item. + # Starts from the statically-known fixturedefs resolved during + # collection. Dynamically requested fixtures (using + # `request.getfixturevalue("foo")`) are added dynamically. + self._arg2fixturedefs: Final = arg2fixturedefs + # The evaluated argnames so far, mapping to the FixtureDef they resolved + # to. + self._fixture_defs: Final = fixture_defs + # Notes on the type of `param`: + # -`request.param` is only defined in parametrized fixtures, and will raise + # AttributeError otherwise. Python typing has no notion of "undefined", so + # this cannot be reflected in the type. + # - Technically `param` is only (possibly) defined on SubRequest, not + # FixtureRequest, but the typing of that is still in flux so this cheats. + # - In the future we might consider using a generic for the param type, but + # for now just using Any. + self.param: Any + + @property + def _fixturemanager(self) -> FixtureManager: + return self._pyfuncitem.session._fixturemanager + + @property + @abc.abstractmethod + def _scope(self) -> Scope: + raise NotImplementedError() + + @property + def scope(self) -> _ScopeName: + """Scope string, one of "function", "class", "module", "package", "session".""" + return self._scope.value + + @abc.abstractmethod + def _check_scope( + self, + requested_fixturedef: FixtureDef[object] | PseudoFixtureDef[object], + requested_scope: Scope, + ) -> None: + raise NotImplementedError() + + @property + def fixturenames(self) -> list[str]: + """Names of all active fixtures in this request.""" + result = list(self._pyfuncitem.fixturenames) + result.extend(set(self._fixture_defs).difference(result)) + return result + + @property + @abc.abstractmethod + def node(self): + """Underlying collection node (depends on current request scope).""" + raise NotImplementedError() + + @property + def config(self) -> Config: + """The pytest config object associated with this request.""" + return self._pyfuncitem.config + + @property + def function(self): + """Test function object if the request has a per-function scope.""" + if self.scope != "function": + raise AttributeError( + f"function not available in {self.scope}-scoped context" + ) + return self._pyfuncitem.obj + + @property + def cls(self): + """Class (can be None) where the test function was collected.""" + if self.scope not in ("class", "function"): + raise AttributeError(f"cls not available in {self.scope}-scoped context") + clscol = self._pyfuncitem.getparent(_pytest.python.Class) + if clscol: + return clscol.obj + + @property + def instance(self): + """Instance (can be None) on which test function was collected.""" + if self.scope != "function": + return None + return getattr(self._pyfuncitem, "instance", None) + + @property + def module(self): + """Python module object where the test function was collected.""" + if self.scope not in ("function", "class", "module"): + raise AttributeError(f"module not available in {self.scope}-scoped context") + mod = self._pyfuncitem.getparent(_pytest.python.Module) + assert mod is not None + return mod.obj + + @property + def path(self) -> Path: + """Path where the test function was collected.""" + if self.scope not in ("function", "class", "module", "package"): + raise AttributeError(f"path not available in {self.scope}-scoped context") + return self._pyfuncitem.path + + @property + def keywords(self) -> MutableMapping[str, Any]: + """Keywords/markers dictionary for the underlying node.""" + node: nodes.Node = self.node + return node.keywords + + @property + def session(self) -> Session: + """Pytest session object.""" + return self._pyfuncitem.session + + @abc.abstractmethod + def addfinalizer(self, finalizer: Callable[[], object]) -> None: + """Add finalizer/teardown function to be called without arguments after + the last test within the requesting test context finished execution.""" + raise NotImplementedError() + + def applymarker(self, marker: str | MarkDecorator) -> None: + """Apply a marker to a single test function invocation. + + This method is useful if you don't want to have a keyword/marker + on all function invocations. + + :param marker: + An object created by a call to ``pytest.mark.NAME(...)``. + """ + self.node.add_marker(marker) + + def raiseerror(self, msg: str | None) -> NoReturn: + """Raise a FixtureLookupError exception. + + :param msg: + An optional custom error message. + """ + raise FixtureLookupError(None, self, msg) + + def getfixturevalue(self, argname: str) -> Any: + """Dynamically run a named fixture function. + + Declaring fixtures via function argument is recommended where possible. + But if you can only decide whether to use another fixture at test + setup time, you may use this function to retrieve it inside a fixture + or test function body. + + This method can be used during the test setup phase or the test run + phase, but during the test teardown phase a fixture's value may not + be available. + + :param argname: + The fixture name. + :raises pytest.FixtureLookupError: + If the given fixture could not be found. + """ + # Note that in addition to the use case described in the docstring, + # getfixturevalue() is also called by pytest itself during item and fixture + # setup to evaluate the fixtures that are requested statically + # (using function parameters, autouse, etc). + + fixturedef = self._get_active_fixturedef(argname) + assert fixturedef.cached_result is not None, ( + f'The fixture value for "{argname}" is not available. ' + "This can happen when the fixture has already been torn down." + ) + return fixturedef.cached_result[0] + + def _iter_chain(self) -> Iterator[SubRequest]: + """Yield all SubRequests in the chain, from self up. + + Note: does *not* yield the TopRequest. + """ + current = self + while isinstance(current, SubRequest): + yield current + current = current._parent_request + + def _get_active_fixturedef( + self, argname: str + ) -> FixtureDef[object] | PseudoFixtureDef[object]: + if argname == "request": + cached_result = (self, [0], None) + return PseudoFixtureDef(cached_result, Scope.Function) + + # If we already finished computing a fixture by this name in this item, + # return it. + fixturedef = self._fixture_defs.get(argname) + if fixturedef is not None: + self._check_scope(fixturedef, fixturedef._scope) + return fixturedef + + # Find the appropriate fixturedef. + fixturedefs = self._arg2fixturedefs.get(argname, None) + if fixturedefs is None: + # We arrive here because of a dynamic call to + # getfixturevalue(argname) which was naturally + # not known at parsing/collection time. + fixturedefs = self._fixturemanager.getfixturedefs(argname, self._pyfuncitem) + if fixturedefs is not None: + self._arg2fixturedefs[argname] = fixturedefs + # No fixtures defined with this name. + if fixturedefs is None: + raise FixtureLookupError(argname, self) + # The are no fixtures with this name applicable for the function. + if not fixturedefs: + raise FixtureLookupError(argname, self) + + # A fixture may override another fixture with the same name, e.g. a + # fixture in a module can override a fixture in a conftest, a fixture in + # a class can override a fixture in the module, and so on. + # An overriding fixture can request its own name (possibly indirectly); + # in this case it gets the value of the fixture it overrides, one level + # up. + # Check how many `argname`s deep we are, and take the next one. + # `fixturedefs` is sorted from furthest to closest, so use negative + # indexing to go in reverse. + index = -1 + for request in self._iter_chain(): + if request.fixturename == argname: + index -= 1 + # If already consumed all of the available levels, fail. + if -index > len(fixturedefs): + raise FixtureLookupError(argname, self) + fixturedef = fixturedefs[index] + + # Prepare a SubRequest object for calling the fixture. + try: + callspec = self._pyfuncitem.callspec + except AttributeError: + callspec = None + if callspec is not None and argname in callspec.params: + param = callspec.params[argname] + param_index = callspec.indices[argname] + # The parametrize invocation scope overrides the fixture's scope. + scope = callspec._arg2scope[argname] + else: + param = NOTSET + param_index = 0 + scope = fixturedef._scope + self._check_fixturedef_without_param(fixturedef) + # The parametrize invocation scope only controls caching behavior while + # allowing wider-scoped fixtures to keep depending on the parametrized + # fixture. Scope control is enforced for parametrized fixtures + # by recreating the whole fixture tree on parameter change. + # Hence `fixturedef._scope`, not `scope`. + self._check_scope(fixturedef, fixturedef._scope) + subrequest = SubRequest( + self, scope, param, param_index, fixturedef, _ispytest=True + ) + + # Make sure the fixture value is cached, running it if it isn't + fixturedef.execute(request=subrequest) + + self._fixture_defs[argname] = fixturedef + return fixturedef + + def _check_fixturedef_without_param(self, fixturedef: FixtureDef[object]) -> None: + """Check that this request is allowed to execute this fixturedef without + a param.""" + funcitem = self._pyfuncitem + has_params = fixturedef.params is not None + fixtures_not_supported = getattr(funcitem, "nofuncargs", False) + if has_params and fixtures_not_supported: + msg = ( + f"{funcitem.name} does not support fixtures, maybe unittest.TestCase subclass?\n" + f"Node id: {funcitem.nodeid}\n" + f"Function type: {type(funcitem).__name__}" + ) + fail(msg, pytrace=False) + if has_params: + frame = inspect.stack()[3] + frameinfo = inspect.getframeinfo(frame[0]) + source_path = absolutepath(frameinfo.filename) + source_lineno = frameinfo.lineno + try: + source_path_str = str(source_path.relative_to(funcitem.config.rootpath)) + except ValueError: + source_path_str = str(source_path) + location = getlocation(fixturedef.func, funcitem.config.rootpath) + msg = ( + "The requested fixture has no parameter defined for test:\n" + f" {funcitem.nodeid}\n\n" + f"Requested fixture '{fixturedef.argname}' defined in:\n" + f"{location}\n\n" + f"Requested here:\n" + f"{source_path_str}:{source_lineno}" + ) + fail(msg, pytrace=False) + + def _get_fixturestack(self) -> list[FixtureDef[Any]]: + values = [request._fixturedef for request in self._iter_chain()] + values.reverse() + return values + + +@final +class TopRequest(FixtureRequest): + """The type of the ``request`` fixture in a test function.""" + + def __init__(self, pyfuncitem: Function, *, _ispytest: bool = False) -> None: + super().__init__( + fixturename=None, + pyfuncitem=pyfuncitem, + arg2fixturedefs=pyfuncitem._fixtureinfo.name2fixturedefs.copy(), + fixture_defs={}, + _ispytest=_ispytest, + ) + + @property + def _scope(self) -> Scope: + return Scope.Function + + def _check_scope( + self, + requested_fixturedef: FixtureDef[object] | PseudoFixtureDef[object], + requested_scope: Scope, + ) -> None: + # TopRequest always has function scope so always valid. + pass + + @property + def node(self): + return self._pyfuncitem + + def __repr__(self) -> str: + return f"" + + def _fillfixtures(self) -> None: + item = self._pyfuncitem + for argname in item.fixturenames: + if argname not in item.funcargs: + item.funcargs[argname] = self.getfixturevalue(argname) + + def addfinalizer(self, finalizer: Callable[[], object]) -> None: + self.node.addfinalizer(finalizer) + + +@final +class SubRequest(FixtureRequest): + """The type of the ``request`` fixture in a fixture function requested + (transitively) by a test function.""" + + def __init__( + self, + request: FixtureRequest, + scope: Scope, + param: Any, + param_index: int, + fixturedef: FixtureDef[object], + *, + _ispytest: bool = False, + ) -> None: + super().__init__( + pyfuncitem=request._pyfuncitem, + fixturename=fixturedef.argname, + fixture_defs=request._fixture_defs, + arg2fixturedefs=request._arg2fixturedefs, + _ispytest=_ispytest, + ) + self._parent_request: Final[FixtureRequest] = request + self._scope_field: Final = scope + self._fixturedef: Final[FixtureDef[object]] = fixturedef + if param is not NOTSET: + self.param = param + self.param_index: Final = param_index + + def __repr__(self) -> str: + return f"" + + @property + def _scope(self) -> Scope: + return self._scope_field + + @property + def node(self): + scope = self._scope + if scope is Scope.Function: + # This might also be a non-function Item despite its attribute name. + node: nodes.Node | None = self._pyfuncitem + elif scope is Scope.Package: + node = get_scope_package(self._pyfuncitem, self._fixturedef) + else: + node = get_scope_node(self._pyfuncitem, scope) + if node is None and scope is Scope.Class: + # Fallback to function item itself. + node = self._pyfuncitem + assert node, ( + f'Could not obtain a node for scope "{scope}" for function {self._pyfuncitem!r}' + ) + return node + + def _check_scope( + self, + requested_fixturedef: FixtureDef[object] | PseudoFixtureDef[object], + requested_scope: Scope, + ) -> None: + if isinstance(requested_fixturedef, PseudoFixtureDef): + return + if self._scope > requested_scope: + # Try to report something helpful. + argname = requested_fixturedef.argname + fixture_stack = "\n".join( + self._format_fixturedef_line(fixturedef) + for fixturedef in self._get_fixturestack() + ) + requested_fixture = self._format_fixturedef_line(requested_fixturedef) + fail( + f"ScopeMismatch: You tried to access the {requested_scope.value} scoped " + f"fixture {argname} with a {self._scope.value} scoped request object. " + f"Requesting fixture stack:\n{fixture_stack}\n" + f"Requested fixture:\n{requested_fixture}", + pytrace=False, + ) + + def _format_fixturedef_line(self, fixturedef: FixtureDef[object]) -> str: + factory = fixturedef.func + path, lineno = getfslineno(factory) + if isinstance(path, Path): + path = bestrelpath(self._pyfuncitem.session.path, path) + sig = signature(factory) + return f"{path}:{lineno + 1}: def {factory.__name__}{sig}" + + def addfinalizer(self, finalizer: Callable[[], object]) -> None: + self._fixturedef.addfinalizer(finalizer) + + +@final +class FixtureLookupError(LookupError): + """Could not return a requested fixture (missing or invalid).""" + + def __init__( + self, argname: str | None, request: FixtureRequest, msg: str | None = None + ) -> None: + self.argname = argname + self.request = request + self.fixturestack = request._get_fixturestack() + self.msg = msg + + def formatrepr(self) -> FixtureLookupErrorRepr: + tblines: list[str] = [] + addline = tblines.append + stack = [self.request._pyfuncitem.obj] + stack.extend(map(lambda x: x.func, self.fixturestack)) + msg = self.msg + # This function currently makes an assumption that a non-None msg means we + # have a non-empty `self.fixturestack`. This is currently true, but if + # somebody at some point want to extend the use of FixtureLookupError to + # new cases it might break. + # Add the assert to make it clearer to developer that this will fail, otherwise + # it crashes because `fspath` does not get set due to `stack` being empty. + assert self.msg is None or self.fixturestack, ( + "formatrepr assumptions broken, rewrite it to handle it" + ) + if msg is not None: + # The last fixture raise an error, let's present + # it at the requesting side. + stack = stack[:-1] + for function in stack: + fspath, lineno = getfslineno(function) + try: + lines, _ = inspect.getsourcelines(get_real_func(function)) + except (OSError, IndexError, TypeError): + error_msg = "file %s, line %s: source code not available" + addline(error_msg % (fspath, lineno + 1)) + else: + addline(f"file {fspath}, line {lineno + 1}") + for i, line in enumerate(lines): + line = line.rstrip() + addline(" " + line) + if line.lstrip().startswith("def"): + break + + if msg is None: + fm = self.request._fixturemanager + available = set() + parent = self.request._pyfuncitem.parent + assert parent is not None + for name, fixturedefs in fm._arg2fixturedefs.items(): + faclist = list(fm._matchfactories(fixturedefs, parent)) + if faclist: + available.add(name) + if self.argname in available: + msg = ( + f" recursive dependency involving fixture '{self.argname}' detected" + ) + else: + msg = f"fixture '{self.argname}' not found" + msg += "\n available fixtures: {}".format(", ".join(sorted(available))) + msg += "\n use 'pytest --fixtures [testpath]' for help on them." + + return FixtureLookupErrorRepr(fspath, lineno, tblines, msg, self.argname) + + +class FixtureLookupErrorRepr(TerminalRepr): + def __init__( + self, + filename: str | os.PathLike[str], + firstlineno: int, + tblines: Sequence[str], + errorstring: str, + argname: str | None, + ) -> None: + self.tblines = tblines + self.errorstring = errorstring + self.filename = filename + self.firstlineno = firstlineno + self.argname = argname + + def toterminal(self, tw: TerminalWriter) -> None: + # tw.line("FixtureLookupError: %s" %(self.argname), red=True) + for tbline in self.tblines: + tw.line(tbline.rstrip()) + lines = self.errorstring.split("\n") + if lines: + tw.line( + f"{FormattedExcinfo.fail_marker} {lines[0].strip()}", + red=True, + ) + for line in lines[1:]: + tw.line( + f"{FormattedExcinfo.flow_marker} {line.strip()}", + red=True, + ) + tw.line() + tw.line(f"{os.fspath(self.filename)}:{self.firstlineno + 1}") + + +def call_fixture_func( + fixturefunc: _FixtureFunc[FixtureValue], request: FixtureRequest, kwargs +) -> FixtureValue: + if inspect.isgeneratorfunction(fixturefunc): + fixturefunc = cast(Callable[..., Generator[FixtureValue]], fixturefunc) + generator = fixturefunc(**kwargs) + try: + fixture_result = next(generator) + except StopIteration: + raise ValueError(f"{request.fixturename} did not yield a value") from None + finalizer = functools.partial(_teardown_yield_fixture, fixturefunc, generator) + request.addfinalizer(finalizer) + else: + fixturefunc = cast(Callable[..., FixtureValue], fixturefunc) + fixture_result = fixturefunc(**kwargs) + return fixture_result + + +def _teardown_yield_fixture(fixturefunc, it) -> None: + """Execute the teardown of a fixture function by advancing the iterator + after the yield and ensure the iteration ends (if not it means there is + more than one yield in the function).""" + try: + next(it) + except StopIteration: + pass + else: + fs, lineno = getfslineno(fixturefunc) + fail( + f"fixture function has more than one 'yield':\n\n" + f"{Source(fixturefunc).indent()}\n" + f"{fs}:{lineno + 1}", + pytrace=False, + ) + + +def _eval_scope_callable( + scope_callable: Callable[[str, Config], _ScopeName], + fixture_name: str, + config: Config, +) -> _ScopeName: + try: + # Type ignored because there is no typing mechanism to specify + # keyword arguments, currently. + result = scope_callable(fixture_name=fixture_name, config=config) # type: ignore[call-arg] + except Exception as e: + raise TypeError( + f"Error evaluating {scope_callable} while defining fixture '{fixture_name}'.\n" + "Expected a function with the signature (*, fixture_name, config)" + ) from e + if not isinstance(result, str): + fail( + f"Expected {scope_callable} to return a 'str' while defining fixture '{fixture_name}', but it returned:\n" + f"{result!r}", + pytrace=False, + ) + return result + + +@final +class FixtureDef(Generic[FixtureValue]): + """A container for a fixture definition. + + Note: At this time, only explicitly documented fields and methods are + considered public stable API. + """ + + def __init__( + self, + config: Config, + baseid: str | None, + argname: str, + func: _FixtureFunc[FixtureValue], + scope: Scope | _ScopeName | Callable[[str, Config], _ScopeName] | None, + params: Sequence[object] | None, + ids: tuple[object | None, ...] | Callable[[Any], object | None] | None = None, + *, + _ispytest: bool = False, + # only used in a deprecationwarning msg, can be removed in pytest9 + _autouse: bool = False, + ) -> None: + check_ispytest(_ispytest) + # The "base" node ID for the fixture. + # + # This is a node ID prefix. A fixture is only available to a node (e.g. + # a `Function` item) if the fixture's baseid is a nodeid of a parent of + # node. + # + # For a fixture found in a Collector's object (e.g. a `Module`s module, + # a `Class`'s class), the baseid is the Collector's nodeid. + # + # For a fixture found in a conftest plugin, the baseid is the conftest's + # directory path relative to the rootdir. + # + # For other plugins, the baseid is the empty string (always matches). + self.baseid: Final = baseid or "" + # Whether the fixture was found from a node or a conftest in the + # collection tree. Will be false for fixtures defined in non-conftest + # plugins. + self.has_location: Final = baseid is not None + # The fixture factory function. + self.func: Final = func + # The name by which the fixture may be requested. + self.argname: Final = argname + if scope is None: + scope = Scope.Function + elif callable(scope): + scope = _eval_scope_callable(scope, argname, config) + if isinstance(scope, str): + scope = Scope.from_user( + scope, descr=f"Fixture '{func.__name__}'", where=baseid + ) + self._scope: Final = scope + # If the fixture is directly parametrized, the parameter values. + self.params: Final = params + # If the fixture is directly parametrized, a tuple of explicit IDs to + # assign to the parameter values, or a callable to generate an ID given + # a parameter value. + self.ids: Final = ids + # The names requested by the fixtures. + self.argnames: Final = getfuncargnames(func, name=argname) + # If the fixture was executed, the current value of the fixture. + # Can change if the fixture is executed with different parameters. + self.cached_result: _FixtureCachedResult[FixtureValue] | None = None + self._finalizers: Final[list[Callable[[], object]]] = [] + + # only used to emit a deprecationwarning, can be removed in pytest9 + self._autouse = _autouse + + @property + def scope(self) -> _ScopeName: + """Scope string, one of "function", "class", "module", "package", "session".""" + return self._scope.value + + def addfinalizer(self, finalizer: Callable[[], object]) -> None: + self._finalizers.append(finalizer) + + def finish(self, request: SubRequest) -> None: + exceptions: list[BaseException] = [] + while self._finalizers: + fin = self._finalizers.pop() + try: + fin() + except BaseException as e: + exceptions.append(e) + node = request.node + node.ihook.pytest_fixture_post_finalizer(fixturedef=self, request=request) + # Even if finalization fails, we invalidate the cached fixture + # value and remove all finalizers because they may be bound methods + # which will keep instances alive. + self.cached_result = None + self._finalizers.clear() + if len(exceptions) == 1: + raise exceptions[0] + elif len(exceptions) > 1: + msg = f'errors while tearing down fixture "{self.argname}" of {node}' + raise BaseExceptionGroup(msg, exceptions[::-1]) + + def execute(self, request: SubRequest) -> FixtureValue: + """Return the value of this fixture, executing it if not cached.""" + # Ensure that the dependent fixtures requested by this fixture are loaded. + # This needs to be done before checking if we have a cached value, since + # if a dependent fixture has their cache invalidated, e.g. due to + # parametrization, they finalize themselves and fixtures depending on it + # (which will likely include this fixture) setting `self.cached_result = None`. + # See #4871 + requested_fixtures_that_should_finalize_us = [] + for argname in self.argnames: + fixturedef = request._get_active_fixturedef(argname) + # Saves requested fixtures in a list so we later can add our finalizer + # to them, ensuring that if a requested fixture gets torn down we get torn + # down first. This is generally handled by SetupState, but still currently + # needed when this fixture is not parametrized but depends on a parametrized + # fixture. + if not isinstance(fixturedef, PseudoFixtureDef): + requested_fixtures_that_should_finalize_us.append(fixturedef) + + # Check for (and return) cached value/exception. + if self.cached_result is not None: + request_cache_key = self.cache_key(request) + cache_key = self.cached_result[1] + try: + # Attempt to make a normal == check: this might fail for objects + # which do not implement the standard comparison (like numpy arrays -- #6497). + cache_hit = bool(request_cache_key == cache_key) + except (ValueError, RuntimeError): + # If the comparison raises, use 'is' as fallback. + cache_hit = request_cache_key is cache_key + + if cache_hit: + if self.cached_result[2] is not None: + exc, exc_tb = self.cached_result[2] + raise exc.with_traceback(exc_tb) + else: + result = self.cached_result[0] + return result + # We have a previous but differently parametrized fixture instance + # so we need to tear it down before creating a new one. + self.finish(request) + assert self.cached_result is None + + # Add finalizer to requested fixtures we saved previously. + # We make sure to do this after checking for cached value to avoid + # adding our finalizer multiple times. (#12135) + finalizer = functools.partial(self.finish, request=request) + for parent_fixture in requested_fixtures_that_should_finalize_us: + parent_fixture.addfinalizer(finalizer) + + ihook = request.node.ihook + try: + # Setup the fixture, run the code in it, and cache the value + # in self.cached_result + result = ihook.pytest_fixture_setup(fixturedef=self, request=request) + finally: + # schedule our finalizer, even if the setup failed + request.node.addfinalizer(finalizer) + + return result + + def cache_key(self, request: SubRequest) -> object: + return getattr(request, "param", None) + + def __repr__(self) -> str: + return f"" + + +def resolve_fixture_function( + fixturedef: FixtureDef[FixtureValue], request: FixtureRequest +) -> _FixtureFunc[FixtureValue]: + """Get the actual callable that can be called to obtain the fixture + value.""" + fixturefunc = fixturedef.func + # The fixture function needs to be bound to the actual + # request.instance so that code working with "fixturedef" behaves + # as expected. + instance = request.instance + if instance is not None: + # Handle the case where fixture is defined not in a test class, but some other class + # (for example a plugin class with a fixture), see #2270. + if hasattr(fixturefunc, "__self__") and not isinstance( + instance, + fixturefunc.__self__.__class__, + ): + return fixturefunc + fixturefunc = getimfunc(fixturedef.func) + if fixturefunc != fixturedef.func: + fixturefunc = fixturefunc.__get__(instance) + return fixturefunc + + +def pytest_fixture_setup( + fixturedef: FixtureDef[FixtureValue], request: SubRequest +) -> FixtureValue: + """Execution of fixture setup.""" + kwargs = {} + for argname in fixturedef.argnames: + kwargs[argname] = request.getfixturevalue(argname) + + fixturefunc = resolve_fixture_function(fixturedef, request) + my_cache_key = fixturedef.cache_key(request) + + if inspect.isasyncgenfunction(fixturefunc) or inspect.iscoroutinefunction( + fixturefunc + ): + auto_str = " with autouse=True" if fixturedef._autouse else "" + + warnings.warn( + PytestRemovedIn9Warning( + f"{request.node.name!r} requested an async fixture " + f"{request.fixturename!r}{auto_str}, with no plugin or hook that " + "handled it. This is usually an error, as pytest does not natively " + "support it. " + "This will turn into an error in pytest 9.\n" + "See: https://docs.pytest.org/en/stable/deprecations.html#sync-test-depending-on-async-fixture" + ), + # no stacklevel will point at users code, so we just point here + stacklevel=1, + ) + + try: + result = call_fixture_func(fixturefunc, request, kwargs) + except TEST_OUTCOME as e: + if isinstance(e, skip.Exception): + # The test requested a fixture which caused a skip. + # Don't show the fixture as the skip location, as then the user + # wouldn't know which test skipped. + e._use_item_location = True + fixturedef.cached_result = (None, my_cache_key, (e, e.__traceback__)) + raise + fixturedef.cached_result = (result, my_cache_key, None) + return result + + +@final +@dataclasses.dataclass(frozen=True) +class FixtureFunctionMarker: + scope: _ScopeName | Callable[[str, Config], _ScopeName] + params: tuple[object, ...] | None + autouse: bool = False + ids: tuple[object | None, ...] | Callable[[Any], object | None] | None = None + name: str | None = None + + _ispytest: dataclasses.InitVar[bool] = False + + def __post_init__(self, _ispytest: bool) -> None: + check_ispytest(_ispytest) + + def __call__(self, function: FixtureFunction) -> FixtureFunctionDefinition: + if inspect.isclass(function): + raise ValueError("class fixtures not supported (maybe in the future)") + + if isinstance(function, FixtureFunctionDefinition): + raise ValueError( + f"@pytest.fixture is being applied more than once to the same function {function.__name__!r}" + ) + + if hasattr(function, "pytestmark"): + warnings.warn(MARKED_FIXTURE, stacklevel=2) + + fixture_definition = FixtureFunctionDefinition( + function=function, fixture_function_marker=self, _ispytest=True + ) + + name = self.name or function.__name__ + if name == "request": + location = getlocation(function) + fail( + f"'request' is a reserved word for fixtures, use another name:\n {location}", + pytrace=False, + ) + + return fixture_definition + + +# TODO: paramspec/return type annotation tracking and storing +class FixtureFunctionDefinition: + def __init__( + self, + *, + function: Callable[..., Any], + fixture_function_marker: FixtureFunctionMarker, + instance: object | None = None, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self.name = fixture_function_marker.name or function.__name__ + # In order to show the function that this fixture contains in messages. + # Set the __name__ to be same as the function __name__ or the given fixture name. + self.__name__ = self.name + self._fixture_function_marker = fixture_function_marker + if instance is not None: + self._fixture_function = cast( + Callable[..., Any], function.__get__(instance) + ) + else: + self._fixture_function = function + functools.update_wrapper(self, function) + + def __repr__(self) -> str: + return f"" + + def __get__(self, instance, owner=None): + """Behave like a method if the function it was applied to was a method.""" + return FixtureFunctionDefinition( + function=self._fixture_function, + fixture_function_marker=self._fixture_function_marker, + instance=instance, + _ispytest=True, + ) + + def __call__(self, *args: Any, **kwds: Any) -> Any: + message = ( + f'Fixture "{self.name}" called directly. Fixtures are not meant to be called directly,\n' + "but are created automatically when test functions request them as parameters.\n" + "See https://docs.pytest.org/en/stable/explanation/fixtures.html for more information about fixtures, and\n" + "https://docs.pytest.org/en/stable/deprecations.html#calling-fixtures-directly" + ) + fail(message, pytrace=False) + + def _get_wrapped_function(self) -> Callable[..., Any]: + return self._fixture_function + + +@overload +def fixture( + fixture_function: Callable[..., object], + *, + scope: _ScopeName | Callable[[str, Config], _ScopeName] = ..., + params: Iterable[object] | None = ..., + autouse: bool = ..., + ids: Sequence[object | None] | Callable[[Any], object | None] | None = ..., + name: str | None = ..., +) -> FixtureFunctionDefinition: ... + + +@overload +def fixture( + fixture_function: None = ..., + *, + scope: _ScopeName | Callable[[str, Config], _ScopeName] = ..., + params: Iterable[object] | None = ..., + autouse: bool = ..., + ids: Sequence[object | None] | Callable[[Any], object | None] | None = ..., + name: str | None = None, +) -> FixtureFunctionMarker: ... + + +def fixture( + fixture_function: FixtureFunction | None = None, + *, + scope: _ScopeName | Callable[[str, Config], _ScopeName] = "function", + params: Iterable[object] | None = None, + autouse: bool = False, + ids: Sequence[object | None] | Callable[[Any], object | None] | None = None, + name: str | None = None, +) -> FixtureFunctionMarker | FixtureFunctionDefinition: + """Decorator to mark a fixture factory function. + + This decorator can be used, with or without parameters, to define a + fixture function. + + The name of the fixture function can later be referenced to cause its + invocation ahead of running tests: test modules or classes can use the + ``pytest.mark.usefixtures(fixturename)`` marker. + + Test functions can directly use fixture names as input arguments in which + case the fixture instance returned from the fixture function will be + injected. + + Fixtures can provide their values to test functions using ``return`` or + ``yield`` statements. When using ``yield`` the code block after the + ``yield`` statement is executed as teardown code regardless of the test + outcome, and must yield exactly once. + + :param scope: + The scope for which this fixture is shared; one of ``"function"`` + (default), ``"class"``, ``"module"``, ``"package"`` or ``"session"``. + + This parameter may also be a callable which receives ``(fixture_name, config)`` + as parameters, and must return a ``str`` with one of the values mentioned above. + + See :ref:`dynamic scope` in the docs for more information. + + :param params: + An optional list of parameters which will cause multiple invocations + of the fixture function and all of the tests using it. The current + parameter is available in ``request.param``. + + :param autouse: + If True, the fixture func is activated for all tests that can see it. + If False (the default), an explicit reference is needed to activate + the fixture. + + :param ids: + Sequence of ids each corresponding to the params so that they are + part of the test id. If no ids are provided they will be generated + automatically from the params. + + :param name: + The name of the fixture. This defaults to the name of the decorated + function. If a fixture is used in the same module in which it is + defined, the function name of the fixture will be shadowed by the + function arg that requests the fixture; one way to resolve this is to + name the decorated function ``fixture_`` and then use + ``@pytest.fixture(name='')``. + """ + fixture_marker = FixtureFunctionMarker( + scope=scope, + params=tuple(params) if params is not None else None, + autouse=autouse, + ids=None if ids is None else ids if callable(ids) else tuple(ids), + name=name, + _ispytest=True, + ) + + # Direct decoration. + if fixture_function: + return fixture_marker(fixture_function) + + return fixture_marker + + +def yield_fixture( + fixture_function=None, + *args, + scope="function", + params=None, + autouse=False, + ids=None, + name=None, +): + """(Return a) decorator to mark a yield-fixture factory function. + + .. deprecated:: 3.0 + Use :py:func:`pytest.fixture` directly instead. + """ + warnings.warn(YIELD_FIXTURE, stacklevel=2) + return fixture( + fixture_function, + *args, + scope=scope, + params=params, + autouse=autouse, + ids=ids, + name=name, + ) + + +@fixture(scope="session") +def pytestconfig(request: FixtureRequest) -> Config: + """Session-scoped fixture that returns the session's :class:`pytest.Config` + object. + + Example:: + + def test_foo(pytestconfig): + if pytestconfig.get_verbosity() > 0: + ... + + """ + return request.config + + +def pytest_addoption(parser: Parser) -> None: + parser.addini( + "usefixtures", + type="args", + default=[], + help="List of default fixtures to be used with this project", + ) + group = parser.getgroup("general") + group.addoption( + "--fixtures", + "--funcargs", + action="store_true", + dest="showfixtures", + default=False, + help="Show available fixtures, sorted by plugin appearance " + "(fixtures with leading '_' are only shown with '-v')", + ) + group.addoption( + "--fixtures-per-test", + action="store_true", + dest="show_fixtures_per_test", + default=False, + help="Show fixtures per test", + ) + + +def pytest_cmdline_main(config: Config) -> int | ExitCode | None: + if config.option.showfixtures: + showfixtures(config) + return 0 + if config.option.show_fixtures_per_test: + show_fixtures_per_test(config) + return 0 + return None + + +def _get_direct_parametrize_args(node: nodes.Node) -> set[str]: + """Return all direct parametrization arguments of a node, so we don't + mistake them for fixtures. + + Check https://github.com/pytest-dev/pytest/issues/5036. + + These things are done later as well when dealing with parametrization + so this could be improved. + """ + parametrize_argnames: set[str] = set() + for marker in node.iter_markers(name="parametrize"): + if not marker.kwargs.get("indirect", False): + p_argnames, _ = ParameterSet._parse_parametrize_args( + *marker.args, **marker.kwargs + ) + parametrize_argnames.update(p_argnames) + return parametrize_argnames + + +def deduplicate_names(*seqs: Iterable[str]) -> tuple[str, ...]: + """De-duplicate the sequence of names while keeping the original order.""" + # Ideally we would use a set, but it does not preserve insertion order. + return tuple(dict.fromkeys(name for seq in seqs for name in seq)) + + +class FixtureManager: + """pytest fixture definitions and information is stored and managed + from this class. + + During collection fm.parsefactories() is called multiple times to parse + fixture function definitions into FixtureDef objects and internal + data structures. + + During collection of test functions, metafunc-mechanics instantiate + a FuncFixtureInfo object which is cached per node/func-name. + This FuncFixtureInfo object is later retrieved by Function nodes + which themselves offer a fixturenames attribute. + + The FuncFixtureInfo object holds information about fixtures and FixtureDefs + relevant for a particular function. An initial list of fixtures is + assembled like this: + + - ini-defined usefixtures + - autouse-marked fixtures along the collection chain up from the function + - usefixtures markers at module/class/function level + - test function funcargs + + Subsequently the funcfixtureinfo.fixturenames attribute is computed + as the closure of the fixtures needed to setup the initial fixtures, + i.e. fixtures needed by fixture functions themselves are appended + to the fixturenames list. + + Upon the test-setup phases all fixturenames are instantiated, retrieved + by a lookup of their FuncFixtureInfo. + """ + + def __init__(self, session: Session) -> None: + self.session = session + self.config: Config = session.config + # Maps a fixture name (argname) to all of the FixtureDefs in the test + # suite/plugins defined with this name. Populated by parsefactories(). + # TODO: The order of the FixtureDefs list of each arg is significant, + # explain. + self._arg2fixturedefs: Final[dict[str, list[FixtureDef[Any]]]] = {} + self._holderobjseen: Final[set[object]] = set() + # A mapping from a nodeid to a list of autouse fixtures it defines. + self._nodeid_autousenames: Final[dict[str, list[str]]] = { + "": self.config.getini("usefixtures"), + } + session.config.pluginmanager.register(self, "funcmanage") + + def getfixtureinfo( + self, + node: nodes.Item, + func: Callable[..., object] | None, + cls: type | None, + ) -> FuncFixtureInfo: + """Calculate the :class:`FuncFixtureInfo` for an item. + + If ``func`` is None, or if the item sets an attribute + ``nofuncargs = True``, then ``func`` is not examined at all. + + :param node: + The item requesting the fixtures. + :param func: + The item's function. + :param cls: + If the function is a method, the method's class. + """ + if func is not None and not getattr(node, "nofuncargs", False): + argnames = getfuncargnames(func, name=node.name, cls=cls) + else: + argnames = () + usefixturesnames = self._getusefixturesnames(node) + autousenames = self._getautousenames(node) + initialnames = deduplicate_names(autousenames, usefixturesnames, argnames) + + direct_parametrize_args = _get_direct_parametrize_args(node) + + names_closure, arg2fixturedefs = self.getfixtureclosure( + parentnode=node, + initialnames=initialnames, + ignore_args=direct_parametrize_args, + ) + + return FuncFixtureInfo(argnames, initialnames, names_closure, arg2fixturedefs) + + def pytest_plugin_registered(self, plugin: _PluggyPlugin, plugin_name: str) -> None: + # Fixtures defined in conftest plugins are only visible to within the + # conftest's directory. This is unlike fixtures in non-conftest plugins + # which have global visibility. So for conftests, construct the base + # nodeid from the plugin name (which is the conftest path). + if plugin_name and plugin_name.endswith("conftest.py"): + # Note: we explicitly do *not* use `plugin.__file__` here -- The + # difference is that plugin_name has the correct capitalization on + # case-insensitive systems (Windows) and other normalization issues + # (issue #11816). + conftestpath = absolutepath(plugin_name) + try: + nodeid = str(conftestpath.parent.relative_to(self.config.rootpath)) + except ValueError: + nodeid = "" + if nodeid == ".": + nodeid = "" + if os.sep != nodes.SEP: + nodeid = nodeid.replace(os.sep, nodes.SEP) + else: + nodeid = None + + self.parsefactories(plugin, nodeid) + + def _getautousenames(self, node: nodes.Node) -> Iterator[str]: + """Return the names of autouse fixtures applicable to node.""" + for parentnode in node.listchain(): + basenames = self._nodeid_autousenames.get(parentnode.nodeid) + if basenames: + yield from basenames + + def _getusefixturesnames(self, node: nodes.Item) -> Iterator[str]: + """Return the names of usefixtures fixtures applicable to node.""" + for marker_node, mark in node.iter_markers_with_node(name="usefixtures"): + if not mark.args: + marker_node.warn( + PytestWarning( + f"usefixtures() in {node.nodeid} without arguments has no effect" + ) + ) + yield from mark.args + + def getfixtureclosure( + self, + parentnode: nodes.Node, + initialnames: tuple[str, ...], + ignore_args: AbstractSet[str], + ) -> tuple[list[str], dict[str, Sequence[FixtureDef[Any]]]]: + # Collect the closure of all fixtures, starting with the given + # fixturenames as the initial set. As we have to visit all + # factory definitions anyway, we also return an arg2fixturedefs + # mapping so that the caller can reuse it and does not have + # to re-discover fixturedefs again for each fixturename + # (discovering matching fixtures for a given name/node is expensive). + + fixturenames_closure = list(initialnames) + + arg2fixturedefs: dict[str, Sequence[FixtureDef[Any]]] = {} + lastlen = -1 + while lastlen != len(fixturenames_closure): + lastlen = len(fixturenames_closure) + for argname in fixturenames_closure: + if argname in ignore_args: + continue + if argname in arg2fixturedefs: + continue + fixturedefs = self.getfixturedefs(argname, parentnode) + if fixturedefs: + arg2fixturedefs[argname] = fixturedefs + for arg in fixturedefs[-1].argnames: + if arg not in fixturenames_closure: + fixturenames_closure.append(arg) + + def sort_by_scope(arg_name: str) -> Scope: + try: + fixturedefs = arg2fixturedefs[arg_name] + except KeyError: + return Scope.Function + else: + return fixturedefs[-1]._scope + + fixturenames_closure.sort(key=sort_by_scope, reverse=True) + return fixturenames_closure, arg2fixturedefs + + def pytest_generate_tests(self, metafunc: Metafunc) -> None: + """Generate new tests based on parametrized fixtures used by the given metafunc""" + + def get_parametrize_mark_argnames(mark: Mark) -> Sequence[str]: + args, _ = ParameterSet._parse_parametrize_args(*mark.args, **mark.kwargs) + return args + + for argname in metafunc.fixturenames: + # Get the FixtureDefs for the argname. + fixture_defs = metafunc._arg2fixturedefs.get(argname) + if not fixture_defs: + # Will raise FixtureLookupError at setup time if not parametrized somewhere + # else (e.g @pytest.mark.parametrize) + continue + + # If the test itself parametrizes using this argname, give it + # precedence. + if any( + argname in get_parametrize_mark_argnames(mark) + for mark in metafunc.definition.iter_markers("parametrize") + ): + continue + + # In the common case we only look at the fixture def with the + # closest scope (last in the list). But if the fixture overrides + # another fixture, while requesting the super fixture, keep going + # in case the super fixture is parametrized (#1953). + for fixturedef in reversed(fixture_defs): + # Fixture is parametrized, apply it and stop. + if fixturedef.params is not None: + metafunc.parametrize( + argname, + fixturedef.params, + indirect=True, + scope=fixturedef.scope, + ids=fixturedef.ids, + ) + break + + # Not requesting the overridden super fixture, stop. + if argname not in fixturedef.argnames: + break + + # Try next super fixture, if any. + + def pytest_collection_modifyitems(self, items: list[nodes.Item]) -> None: + # Separate parametrized setups. + items[:] = reorder_items(items) + + def _register_fixture( + self, + *, + name: str, + func: _FixtureFunc[object], + nodeid: str | None, + scope: Scope | _ScopeName | Callable[[str, Config], _ScopeName] = "function", + params: Sequence[object] | None = None, + ids: tuple[object | None, ...] | Callable[[Any], object | None] | None = None, + autouse: bool = False, + ) -> None: + """Register a fixture + + :param name: + The fixture's name. + :param func: + The fixture's implementation function. + :param nodeid: + The visibility of the fixture. The fixture will be available to the + node with this nodeid and its children in the collection tree. + None means that the fixture is visible to the entire collection tree, + e.g. a fixture defined for general use in a plugin. + :param scope: + The fixture's scope. + :param params: + The fixture's parametrization params. + :param ids: + The fixture's IDs. + :param autouse: + Whether this is an autouse fixture. + """ + fixture_def = FixtureDef( + config=self.config, + baseid=nodeid, + argname=name, + func=func, + scope=scope, + params=params, + ids=ids, + _ispytest=True, + _autouse=autouse, + ) + + faclist = self._arg2fixturedefs.setdefault(name, []) + if fixture_def.has_location: + faclist.append(fixture_def) + else: + # fixturedefs with no location are at the front + # so this inserts the current fixturedef after the + # existing fixturedefs from external plugins but + # before the fixturedefs provided in conftests. + i = len([f for f in faclist if not f.has_location]) + faclist.insert(i, fixture_def) + if autouse: + self._nodeid_autousenames.setdefault(nodeid or "", []).append(name) + + @overload + def parsefactories( + self, + node_or_obj: nodes.Node, + ) -> None: + raise NotImplementedError() + + @overload + def parsefactories( + self, + node_or_obj: object, + nodeid: str | None, + ) -> None: + raise NotImplementedError() + + def parsefactories( + self, + node_or_obj: nodes.Node | object, + nodeid: str | NotSetType | None = NOTSET, + ) -> None: + """Collect fixtures from a collection node or object. + + Found fixtures are parsed into `FixtureDef`s and saved. + + If `node_or_object` is a collection node (with an underlying Python + object), the node's object is traversed and the node's nodeid is used to + determine the fixtures' visibility. `nodeid` must not be specified in + this case. + + If `node_or_object` is an object (e.g. a plugin), the object is + traversed and the given `nodeid` is used to determine the fixtures' + visibility. `nodeid` must be specified in this case; None and "" mean + total visibility. + """ + if nodeid is not NOTSET: + holderobj = node_or_obj + else: + assert isinstance(node_or_obj, nodes.Node) + holderobj = cast(object, node_or_obj.obj) # type: ignore[attr-defined] + assert isinstance(node_or_obj.nodeid, str) + nodeid = node_or_obj.nodeid + if holderobj in self._holderobjseen: + return + + # Avoid accessing `@property` (and other descriptors) when iterating fixtures. + if not safe_isclass(holderobj) and not isinstance(holderobj, types.ModuleType): + holderobj_tp: object = type(holderobj) + else: + holderobj_tp = holderobj + + self._holderobjseen.add(holderobj) + for name in dir(holderobj): + # The attribute can be an arbitrary descriptor, so the attribute + # access below can raise. safe_getattr() ignores such exceptions. + obj_ub = safe_getattr(holderobj_tp, name, None) + if type(obj_ub) is FixtureFunctionDefinition: + marker = obj_ub._fixture_function_marker + if marker.name: + fixture_name = marker.name + else: + fixture_name = name + + # OK we know it is a fixture -- now safe to look up on the _instance_. + try: + obj = getattr(holderobj, name) + # if the fixture is named in the decorator we cannot find it in the module + except AttributeError: + obj = obj_ub + + func = obj._get_wrapped_function() + + self._register_fixture( + name=fixture_name, + nodeid=nodeid, + func=func, + scope=marker.scope, + params=marker.params, + ids=marker.ids, + autouse=marker.autouse, + ) + + def getfixturedefs( + self, argname: str, node: nodes.Node + ) -> Sequence[FixtureDef[Any]] | None: + """Get FixtureDefs for a fixture name which are applicable + to a given node. + + Returns None if there are no fixtures at all defined with the given + name. (This is different from the case in which there are fixtures + with the given name, but none applicable to the node. In this case, + an empty result is returned). + + :param argname: Name of the fixture to search for. + :param node: The requesting Node. + """ + try: + fixturedefs = self._arg2fixturedefs[argname] + except KeyError: + return None + return tuple(self._matchfactories(fixturedefs, node)) + + def _matchfactories( + self, fixturedefs: Iterable[FixtureDef[Any]], node: nodes.Node + ) -> Iterator[FixtureDef[Any]]: + parentnodeids = {n.nodeid for n in node.iter_parents()} + for fixturedef in fixturedefs: + if fixturedef.baseid in parentnodeids: + yield fixturedef + + +def show_fixtures_per_test(config: Config) -> int | ExitCode: + from _pytest.main import wrap_session + + return wrap_session(config, _show_fixtures_per_test) + + +_PYTEST_DIR = Path(_pytest.__file__).parent + + +def _pretty_fixture_path(invocation_dir: Path, func) -> str: + loc = Path(getlocation(func, invocation_dir)) + prefix = Path("...", "_pytest") + try: + return str(prefix / loc.relative_to(_PYTEST_DIR)) + except ValueError: + return bestrelpath(invocation_dir, loc) + + +def _show_fixtures_per_test(config: Config, session: Session) -> None: + import _pytest.config + + session.perform_collect() + invocation_dir = config.invocation_params.dir + tw = _pytest.config.create_terminal_writer(config) + verbose = config.get_verbosity() + + def get_best_relpath(func) -> str: + loc = getlocation(func, invocation_dir) + return bestrelpath(invocation_dir, Path(loc)) + + def write_fixture(fixture_def: FixtureDef[object]) -> None: + argname = fixture_def.argname + if verbose <= 0 and argname.startswith("_"): + return + prettypath = _pretty_fixture_path(invocation_dir, fixture_def.func) + tw.write(f"{argname}", green=True) + tw.write(f" -- {prettypath}", yellow=True) + tw.write("\n") + fixture_doc = inspect.getdoc(fixture_def.func) + if fixture_doc: + write_docstring( + tw, + fixture_doc.split("\n\n", maxsplit=1)[0] + if verbose <= 0 + else fixture_doc, + ) + else: + tw.line(" no docstring available", red=True) + + def write_item(item: nodes.Item) -> None: + # Not all items have _fixtureinfo attribute. + info: FuncFixtureInfo | None = getattr(item, "_fixtureinfo", None) + if info is None or not info.name2fixturedefs: + # This test item does not use any fixtures. + return + tw.line() + tw.sep("-", f"fixtures used by {item.name}") + # TODO: Fix this type ignore. + tw.sep("-", f"({get_best_relpath(item.function)})") # type: ignore[attr-defined] + # dict key not used in loop but needed for sorting. + for _, fixturedefs in sorted(info.name2fixturedefs.items()): + assert fixturedefs is not None + if not fixturedefs: + continue + # Last item is expected to be the one used by the test item. + write_fixture(fixturedefs[-1]) + + for session_item in session.items: + write_item(session_item) + + +def showfixtures(config: Config) -> int | ExitCode: + from _pytest.main import wrap_session + + return wrap_session(config, _showfixtures_main) + + +def _showfixtures_main(config: Config, session: Session) -> None: + import _pytest.config + + session.perform_collect() + invocation_dir = config.invocation_params.dir + tw = _pytest.config.create_terminal_writer(config) + verbose = config.get_verbosity() + + fm = session._fixturemanager + + available = [] + seen: set[tuple[str, str]] = set() + + for argname, fixturedefs in fm._arg2fixturedefs.items(): + assert fixturedefs is not None + if not fixturedefs: + continue + for fixturedef in fixturedefs: + loc = getlocation(fixturedef.func, invocation_dir) + if (fixturedef.argname, loc) in seen: + continue + seen.add((fixturedef.argname, loc)) + available.append( + ( + len(fixturedef.baseid), + fixturedef.func.__module__, + _pretty_fixture_path(invocation_dir, fixturedef.func), + fixturedef.argname, + fixturedef, + ) + ) + + available.sort() + currentmodule = None + for baseid, module, prettypath, argname, fixturedef in available: + if currentmodule != module: + if not module.startswith("_pytest."): + tw.line() + tw.sep("-", f"fixtures defined from {module}") + currentmodule = module + if verbose <= 0 and argname.startswith("_"): + continue + tw.write(f"{argname}", green=True) + if fixturedef.scope != "function": + tw.write(f" [{fixturedef.scope} scope]", cyan=True) + tw.write(f" -- {prettypath}", yellow=True) + tw.write("\n") + doc = inspect.getdoc(fixturedef.func) + if doc: + write_docstring( + tw, doc.split("\n\n", maxsplit=1)[0] if verbose <= 0 else doc + ) + else: + tw.line(" no docstring available", red=True) + tw.line() + + +def write_docstring(tw: TerminalWriter, doc: str, indent: str = " ") -> None: + for line in doc.split("\n"): + tw.line(indent + line) diff --git a/venv/lib/python3.10/site-packages/_pytest/freeze_support.py b/venv/lib/python3.10/site-packages/_pytest/freeze_support.py new file mode 100644 index 0000000000000000000000000000000000000000..959ff071d86be285aecf76a1c49c1e5de27c5cd2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/freeze_support.py @@ -0,0 +1,45 @@ +"""Provides a function to report all internal modules for using freezing +tools.""" + +from __future__ import annotations + +from collections.abc import Iterator +import types + + +def freeze_includes() -> list[str]: + """Return a list of module names used by pytest that should be + included by cx_freeze.""" + import _pytest + + result = list(_iter_all_modules(_pytest)) + return result + + +def _iter_all_modules( + package: str | types.ModuleType, + prefix: str = "", +) -> Iterator[str]: + """Iterate over the names of all modules that can be found in the given + package, recursively. + + >>> import _pytest + >>> list(_iter_all_modules(_pytest)) + ['_pytest._argcomplete', '_pytest._code.code', ...] + """ + import os + import pkgutil + + if isinstance(package, str): + path = package + else: + # Type ignored because typeshed doesn't define ModuleType.__path__ + # (only defined on packages). + package_path = package.__path__ + path, prefix = package_path[0], package.__name__ + "." + for _, name, is_package in pkgutil.iter_modules([path]): + if is_package: + for m in _iter_all_modules(os.path.join(path, name), prefix=name + "."): + yield prefix + m + else: + yield prefix + name diff --git a/venv/lib/python3.10/site-packages/_pytest/helpconfig.py b/venv/lib/python3.10/site-packages/_pytest/helpconfig.py new file mode 100644 index 0000000000000000000000000000000000000000..b5ac0e6a50ca3dd72743366c6e694f7eada20918 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/helpconfig.py @@ -0,0 +1,283 @@ +# mypy: allow-untyped-defs +"""Version info, help messages, tracing configuration.""" + +from __future__ import annotations + +from argparse import Action +from collections.abc import Generator +import os +import sys + +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import PrintHelp +from _pytest.config.argparsing import Parser +from _pytest.terminal import TerminalReporter +import pytest + + +class HelpAction(Action): + """An argparse Action that will raise an exception in order to skip the + rest of the argument parsing when --help is passed. + + This prevents argparse from quitting due to missing required arguments + when any are defined, for example by ``pytest_addoption``. + This is similar to the way that the builtin argparse --help option is + implemented by raising SystemExit. + """ + + def __init__(self, option_strings, dest=None, default=False, help=None): + super().__init__( + option_strings=option_strings, + dest=dest, + const=True, + default=default, + nargs=0, + help=help, + ) + + def __call__(self, parser, namespace, values, option_string=None): + setattr(namespace, self.dest, self.const) + + # We should only skip the rest of the parsing after preparse is done. + if getattr(parser._parser, "after_preparse", False): + raise PrintHelp + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("debugconfig") + group.addoption( + "--version", + "-V", + action="count", + default=0, + dest="version", + help="Display pytest version and information about plugins. " + "When given twice, also display information about plugins.", + ) + group._addoption( # private to use reserved lower-case short option + "-h", + "--help", + action=HelpAction, + dest="help", + help="Show help message and configuration info", + ) + group._addoption( # private to use reserved lower-case short option + "-p", + action="append", + dest="plugins", + default=[], + metavar="name", + help="Early-load given plugin module name or entry point (multi-allowed). " + "To avoid loading of plugins, use the `no:` prefix, e.g. " + "`no:doctest`. See also --disable-plugin-autoload.", + ) + group.addoption( + "--disable-plugin-autoload", + action="store_true", + default=False, + help="Disable plugin auto-loading through entry point packaging metadata. " + "Only plugins explicitly specified in -p or env var PYTEST_PLUGINS will be loaded.", + ) + group.addoption( + "--traceconfig", + "--trace-config", + action="store_true", + default=False, + help="Trace considerations of conftest.py files", + ) + group.addoption( + "--debug", + action="store", + nargs="?", + const="pytestdebug.log", + dest="debug", + metavar="DEBUG_FILE_NAME", + help="Store internal tracing debug information in this log file. " + "This file is opened with 'w' and truncated as a result, care advised. " + "Default: pytestdebug.log.", + ) + group._addoption( # private to use reserved lower-case short option + "-o", + "--override-ini", + dest="override_ini", + action="append", + help='Override ini option with "option=value" style, ' + "e.g. `-o xfail_strict=True -o cache_dir=cache`.", + ) + + +@pytest.hookimpl(wrapper=True) +def pytest_cmdline_parse() -> Generator[None, Config, Config]: + config = yield + + if config.option.debug: + # --debug | --debug was provided. + path = config.option.debug + debugfile = open(path, "w", encoding="utf-8") + debugfile.write( + "versions pytest-{}, " + "python-{}\ninvocation_dir={}\ncwd={}\nargs={}\n\n".format( + pytest.__version__, + ".".join(map(str, sys.version_info)), + config.invocation_params.dir, + os.getcwd(), + config.invocation_params.args, + ) + ) + config.trace.root.setwriter(debugfile.write) + undo_tracing = config.pluginmanager.enable_tracing() + sys.stderr.write(f"writing pytest debug information to {path}\n") + + def unset_tracing() -> None: + debugfile.close() + sys.stderr.write(f"wrote pytest debug information to {debugfile.name}\n") + config.trace.root.setwriter(None) + undo_tracing() + + config.add_cleanup(unset_tracing) + + return config + + +def showversion(config: Config) -> None: + if config.option.version > 1: + sys.stdout.write( + f"This is pytest version {pytest.__version__}, imported from {pytest.__file__}\n" + ) + plugininfo = getpluginversioninfo(config) + if plugininfo: + for line in plugininfo: + sys.stdout.write(line + "\n") + else: + sys.stdout.write(f"pytest {pytest.__version__}\n") + + +def pytest_cmdline_main(config: Config) -> int | ExitCode | None: + if config.option.version > 0: + showversion(config) + return 0 + elif config.option.help: + config._do_configure() + showhelp(config) + config._ensure_unconfigure() + return 0 + return None + + +def showhelp(config: Config) -> None: + import textwrap + + reporter: TerminalReporter | None = config.pluginmanager.get_plugin( + "terminalreporter" + ) + assert reporter is not None + tw = reporter._tw + tw.write(config._parser.optparser.format_help()) + tw.line() + tw.line( + "[pytest] ini-options in the first " + "pytest.ini|tox.ini|setup.cfg|pyproject.toml file found:" + ) + tw.line() + + columns = tw.fullwidth # costly call + indent_len = 24 # based on argparse's max_help_position=24 + indent = " " * indent_len + for name in config._parser._ininames: + help, type, default = config._parser._inidict[name] + if type is None: + type = "string" + if help is None: + raise TypeError(f"help argument cannot be None for {name}") + spec = f"{name} ({type}):" + tw.write(f" {spec}") + spec_len = len(spec) + if spec_len > (indent_len - 3): + # Display help starting at a new line. + tw.line() + helplines = textwrap.wrap( + help, + columns, + initial_indent=indent, + subsequent_indent=indent, + break_on_hyphens=False, + ) + + for line in helplines: + tw.line(line) + else: + # Display help starting after the spec, following lines indented. + tw.write(" " * (indent_len - spec_len - 2)) + wrapped = textwrap.wrap(help, columns - indent_len, break_on_hyphens=False) + + if wrapped: + tw.line(wrapped[0]) + for line in wrapped[1:]: + tw.line(indent + line) + + tw.line() + tw.line("Environment variables:") + vars = [ + ( + "CI", + "When set (regardless of value), pytest knows it is running in a " + "CI process and does not truncate summary info", + ), + ("BUILD_NUMBER", "Equivalent to CI"), + ("PYTEST_ADDOPTS", "Extra command line options"), + ("PYTEST_PLUGINS", "Comma-separated plugins to load during startup"), + ("PYTEST_DISABLE_PLUGIN_AUTOLOAD", "Set to disable plugin auto-loading"), + ("PYTEST_DEBUG", "Set to enable debug tracing of pytest's internals"), + ] + for name, help in vars: + tw.line(f" {name:<24} {help}") + tw.line() + tw.line() + + tw.line("to see available markers type: pytest --markers") + tw.line("to see available fixtures type: pytest --fixtures") + tw.line( + "(shown according to specified file_or_dir or current dir " + "if not specified; fixtures with leading '_' are only shown " + "with the '-v' option" + ) + + for warningreport in reporter.stats.get("warnings", []): + tw.line("warning : " + warningreport.message, red=True) + + +conftest_options = [("pytest_plugins", "list of plugin names to load")] + + +def getpluginversioninfo(config: Config) -> list[str]: + lines = [] + plugininfo = config.pluginmanager.list_plugin_distinfo() + if plugininfo: + lines.append("registered third-party plugins:") + for plugin, dist in plugininfo: + loc = getattr(plugin, "__file__", repr(plugin)) + content = f"{dist.project_name}-{dist.version} at {loc}" + lines.append(" " + content) + return lines + + +def pytest_report_header(config: Config) -> list[str]: + lines = [] + if config.option.debug or config.option.traceconfig: + lines.append(f"using: pytest-{pytest.__version__}") + + verinfo = getpluginversioninfo(config) + if verinfo: + lines.extend(verinfo) + + if config.option.traceconfig: + lines.append("active plugins:") + items = config.pluginmanager.list_name_plugin() + for name, plugin in items: + if hasattr(plugin, "__file__"): + r = plugin.__file__ + else: + r = repr(plugin) + lines.append(f" {name:<20}: {r}") + return lines diff --git a/venv/lib/python3.10/site-packages/_pytest/hookspec.py b/venv/lib/python3.10/site-packages/_pytest/hookspec.py new file mode 100644 index 0000000000000000000000000000000000000000..12653ea11fed64a9d83a8e0a71bd1d2435a8788d --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/hookspec.py @@ -0,0 +1,1333 @@ +# mypy: allow-untyped-defs +# ruff: noqa: T100 +"""Hook specifications for pytest plugins which are invoked by pytest itself +and by builtin plugins.""" + +from __future__ import annotations + +from collections.abc import Mapping +from collections.abc import Sequence +from pathlib import Path +from typing import Any +from typing import TYPE_CHECKING + +from pluggy import HookspecMarker + +from .deprecated import HOOK_LEGACY_PATH_ARG + + +if TYPE_CHECKING: + import pdb + from typing import Literal + import warnings + + from _pytest._code.code import ExceptionInfo + from _pytest._code.code import ExceptionRepr + from _pytest.compat import LEGACY_PATH + from _pytest.config import _PluggyPlugin + from _pytest.config import Config + from _pytest.config import ExitCode + from _pytest.config import PytestPluginManager + from _pytest.config.argparsing import Parser + from _pytest.fixtures import FixtureDef + from _pytest.fixtures import SubRequest + from _pytest.main import Session + from _pytest.nodes import Collector + from _pytest.nodes import Item + from _pytest.outcomes import Exit + from _pytest.python import Class + from _pytest.python import Function + from _pytest.python import Metafunc + from _pytest.python import Module + from _pytest.reports import CollectReport + from _pytest.reports import TestReport + from _pytest.runner import CallInfo + from _pytest.terminal import TerminalReporter + from _pytest.terminal import TestShortLogReport + + +hookspec = HookspecMarker("pytest") + +# ------------------------------------------------------------------------- +# Initialization hooks called for every plugin +# ------------------------------------------------------------------------- + + +@hookspec(historic=True) +def pytest_addhooks(pluginmanager: PytestPluginManager) -> None: + """Called at plugin registration time to allow adding new hooks via a call to + :func:`pluginmanager.add_hookspecs(module_or_class, prefix) `. + + :param pluginmanager: The pytest plugin manager. + + .. note:: + This hook is incompatible with hook wrappers. + + Use in conftest plugins + ======================= + + If a conftest plugin implements this hook, it will be called immediately + when the conftest is registered. + """ + + +@hookspec(historic=True) +def pytest_plugin_registered( + plugin: _PluggyPlugin, + plugin_name: str, + manager: PytestPluginManager, +) -> None: + """A new pytest plugin got registered. + + :param plugin: The plugin module or instance. + :param plugin_name: The name by which the plugin is registered. + :param manager: The pytest plugin manager. + + .. note:: + This hook is incompatible with hook wrappers. + + Use in conftest plugins + ======================= + + If a conftest plugin implements this hook, it will be called immediately + when the conftest is registered, once for each plugin registered thus far + (including itself!), and for all plugins thereafter when they are + registered. + """ + + +@hookspec(historic=True) +def pytest_addoption(parser: Parser, pluginmanager: PytestPluginManager) -> None: + """Register argparse-style options and ini-style config values, + called once at the beginning of a test run. + + :param parser: + To add command line options, call + :py:func:`parser.addoption(...) `. + To add ini-file values call :py:func:`parser.addini(...) + `. + + :param pluginmanager: + The pytest plugin manager, which can be used to install :py:func:`~pytest.hookspec`'s + or :py:func:`~pytest.hookimpl`'s and allow one plugin to call another plugin's hooks + to change how command line options are added. + + Options can later be accessed through the + :py:class:`config ` object, respectively: + + - :py:func:`config.getoption(name) ` to + retrieve the value of a command line option. + + - :py:func:`config.getini(name) ` to retrieve + a value read from an ini-style file. + + The config object is passed around on many internal objects via the ``.config`` + attribute or can be retrieved as the ``pytestconfig`` fixture. + + .. note:: + This hook is incompatible with hook wrappers. + + Use in conftest plugins + ======================= + + If a conftest plugin implements this hook, it will be called immediately + when the conftest is registered. + + This hook is only called for :ref:`initial conftests `. + """ + + +@hookspec(historic=True) +def pytest_configure(config: Config) -> None: + """Allow plugins and conftest files to perform initial configuration. + + .. note:: + This hook is incompatible with hook wrappers. + + :param config: The pytest config object. + + Use in conftest plugins + ======================= + + This hook is called for every :ref:`initial conftest ` file + after command line options have been parsed. After that, the hook is called + for other conftest files as they are registered. + """ + + +# ------------------------------------------------------------------------- +# Bootstrapping hooks called for plugins registered early enough: +# internal and 3rd party plugins. +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_cmdline_parse( + pluginmanager: PytestPluginManager, args: list[str] +) -> Config | None: + """Return an initialized :class:`~pytest.Config`, parsing the specified args. + + Stops at first non-None result, see :ref:`firstresult`. + + .. note:: + This hook is only called for plugin classes passed to the + ``plugins`` arg when using `pytest.main`_ to perform an in-process + test run. + + :param pluginmanager: The pytest plugin manager. + :param args: List of arguments passed on the command line. + :returns: A pytest config object. + + Use in conftest plugins + ======================= + + This hook is not called for conftest files. + """ + + +def pytest_load_initial_conftests( + early_config: Config, parser: Parser, args: list[str] +) -> None: + """Called to implement the loading of :ref:`initial conftest files + ` ahead of command line option parsing. + + :param early_config: The pytest config object. + :param args: Arguments passed on the command line. + :param parser: To add command line options. + + Use in conftest plugins + ======================= + + This hook is not called for conftest files. + """ + + +@hookspec(firstresult=True) +def pytest_cmdline_main(config: Config) -> ExitCode | int | None: + """Called for performing the main command line action. + + The default implementation will invoke the configure hooks and + :hook:`pytest_runtestloop`. + + Stops at first non-None result, see :ref:`firstresult`. + + :param config: The pytest config object. + :returns: The exit code. + + Use in conftest plugins + ======================= + + This hook is only called for :ref:`initial conftests `. + """ + + +# ------------------------------------------------------------------------- +# collection hooks +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_collection(session: Session) -> object | None: + """Perform the collection phase for the given session. + + Stops at first non-None result, see :ref:`firstresult`. + The return value is not used, but only stops further processing. + + The default collection phase is this (see individual hooks for full details): + + 1. Starting from ``session`` as the initial collector: + + 1. ``pytest_collectstart(collector)`` + 2. ``report = pytest_make_collect_report(collector)`` + 3. ``pytest_exception_interact(collector, call, report)`` if an interactive exception occurred + 4. For each collected node: + + 1. If an item, ``pytest_itemcollected(item)`` + 2. If a collector, recurse into it. + + 5. ``pytest_collectreport(report)`` + + 2. ``pytest_collection_modifyitems(session, config, items)`` + + 1. ``pytest_deselected(items)`` for any deselected items (may be called multiple times) + + 3. ``pytest_collection_finish(session)`` + 4. Set ``session.items`` to the list of collected items + 5. Set ``session.testscollected`` to the number of collected items + + You can implement this hook to only perform some action before collection, + for example the terminal plugin uses it to start displaying the collection + counter (and returns `None`). + + :param session: The pytest session object. + + Use in conftest plugins + ======================= + + This hook is only called for :ref:`initial conftests `. + """ + + +def pytest_collection_modifyitems( + session: Session, config: Config, items: list[Item] +) -> None: + """Called after collection has been performed. May filter or re-order + the items in-place. + + When items are deselected (filtered out from ``items``), + the hook :hook:`pytest_deselected` must be called explicitly + with the deselected items to properly notify other plugins, + e.g. with ``config.hook.pytest_deselected(items=deselected_items)``. + + :param session: The pytest session object. + :param config: The pytest config object. + :param items: List of item objects. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +def pytest_collection_finish(session: Session) -> None: + """Called after collection has been performed and modified. + + :param session: The pytest session object. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +@hookspec( + firstresult=True, + warn_on_impl_args={ + "path": HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg="path", pathlib_path_arg="collection_path" + ), + }, +) +def pytest_ignore_collect( + collection_path: Path, path: LEGACY_PATH, config: Config +) -> bool | None: + """Return ``True`` to ignore this path for collection. + + Return ``None`` to let other plugins ignore the path for collection. + + Returning ``False`` will forcefully *not* ignore this path for collection, + without giving a chance for other plugins to ignore this path. + + This hook is consulted for all files and directories prior to calling + more specific hooks. + + Stops at first non-None result, see :ref:`firstresult`. + + :param collection_path: The path to analyze. + :type collection_path: pathlib.Path + :param path: The path to analyze (deprecated). + :param config: The pytest config object. + + .. versionchanged:: 7.0.0 + The ``collection_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``path`` parameter. The ``path`` parameter + has been deprecated. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collection path, only + conftest files in parent directories of the collection path are consulted + (if the path is a directory, its own conftest file is *not* consulted - a + directory cannot ignore itself!). + """ + + +@hookspec(firstresult=True) +def pytest_collect_directory(path: Path, parent: Collector) -> Collector | None: + """Create a :class:`~pytest.Collector` for the given directory, or None if + not relevant. + + .. versionadded:: 8.0 + + For best results, the returned collector should be a subclass of + :class:`~pytest.Directory`, but this is not required. + + The new node needs to have the specified ``parent`` as a parent. + + Stops at first non-None result, see :ref:`firstresult`. + + :param path: The path to analyze. + :type path: pathlib.Path + + See :ref:`custom directory collectors` for a simple example of use of this + hook. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collection path, only + conftest files in parent directories of the collection path are consulted + (if the path is a directory, its own conftest file is *not* consulted - a + directory cannot collect itself!). + """ + + +@hookspec( + warn_on_impl_args={ + "path": HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg="path", pathlib_path_arg="file_path" + ), + }, +) +def pytest_collect_file( + file_path: Path, path: LEGACY_PATH, parent: Collector +) -> Collector | None: + """Create a :class:`~pytest.Collector` for the given path, or None if not relevant. + + For best results, the returned collector should be a subclass of + :class:`~pytest.File`, but this is not required. + + The new node needs to have the specified ``parent`` as a parent. + + :param file_path: The path to analyze. + :type file_path: pathlib.Path + :param path: The path to collect (deprecated). + + .. versionchanged:: 7.0.0 + The ``file_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``path`` parameter. The ``path`` parameter + has been deprecated. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given file path, only + conftest files in parent directories of the file path are consulted. + """ + + +# logging hooks for collection + + +def pytest_collectstart(collector: Collector) -> None: + """Collector starts collecting. + + :param collector: + The collector. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collector, only + conftest files in the collector's directory and its parent directories are + consulted. + """ + + +def pytest_itemcollected(item: Item) -> None: + """We just collected a test item. + + :param item: + The item. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_collectreport(report: CollectReport) -> None: + """Collector finished collecting. + + :param report: + The collect report. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collector, only + conftest files in the collector's directory and its parent directories are + consulted. + """ + + +def pytest_deselected(items: Sequence[Item]) -> None: + """Called for deselected test items, e.g. by keyword. + + Note that this hook has two integration aspects for plugins: + + - it can be *implemented* to be notified of deselected items + - it must be *called* from :hook:`pytest_collection_modifyitems` + implementations when items are deselected (to properly notify other plugins). + + May be called multiple times. + + :param items: + The items. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +@hookspec(firstresult=True) +def pytest_make_collect_report(collector: Collector) -> CollectReport | None: + """Perform :func:`collector.collect() ` and return + a :class:`~pytest.CollectReport`. + + Stops at first non-None result, see :ref:`firstresult`. + + :param collector: + The collector. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collector, only + conftest files in the collector's directory and its parent directories are + consulted. + """ + + +# ------------------------------------------------------------------------- +# Python test function related hooks +# ------------------------------------------------------------------------- + + +@hookspec( + firstresult=True, + warn_on_impl_args={ + "path": HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg="path", pathlib_path_arg="module_path" + ), + }, +) +def pytest_pycollect_makemodule( + module_path: Path, path: LEGACY_PATH, parent +) -> Module | None: + """Return a :class:`pytest.Module` collector or None for the given path. + + This hook will be called for each matching test module path. + The :hook:`pytest_collect_file` hook needs to be used if you want to + create test modules for files that do not match as a test module. + + Stops at first non-None result, see :ref:`firstresult`. + + :param module_path: The path of the module to collect. + :type module_path: pathlib.Path + :param path: The path of the module to collect (deprecated). + + .. versionchanged:: 7.0.0 + The ``module_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``path`` parameter. + + The ``path`` parameter has been deprecated in favor of ``fspath``. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given parent collector, + only conftest files in the collector's directory and its parent directories + are consulted. + """ + + +@hookspec(firstresult=True) +def pytest_pycollect_makeitem( + collector: Module | Class, name: str, obj: object +) -> None | Item | Collector | list[Item | Collector]: + """Return a custom item/collector for a Python object in a module, or None. + + Stops at first non-None result, see :ref:`firstresult`. + + :param collector: + The module/class collector. + :param name: + The name of the object in the module/class. + :param obj: + The object. + :returns: + The created items/collectors. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given collector, only + conftest files in the collector's directory and its parent directories + are consulted. + """ + + +@hookspec(firstresult=True) +def pytest_pyfunc_call(pyfuncitem: Function) -> object | None: + """Call underlying test function. + + Stops at first non-None result, see :ref:`firstresult`. + + :param pyfuncitem: + The function item. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only + conftest files in the item's directory and its parent directories + are consulted. + """ + + +def pytest_generate_tests(metafunc: Metafunc) -> None: + """Generate (multiple) parametrized calls to a test function. + + :param metafunc: + The :class:`~pytest.Metafunc` helper for the test function. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given function definition, + only conftest files in the functions's directory and its parent directories + are consulted. + """ + + +@hookspec(firstresult=True) +def pytest_make_parametrize_id(config: Config, val: object, argname: str) -> str | None: + """Return a user-friendly string representation of the given ``val`` + that will be used by @pytest.mark.parametrize calls, or None if the hook + doesn't know about ``val``. + + The parameter name is available as ``argname``, if required. + + Stops at first non-None result, see :ref:`firstresult`. + + :param config: The pytest config object. + :param val: The parametrized value. + :param argname: The automatic parameter name produced by pytest. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +# ------------------------------------------------------------------------- +# runtest related hooks +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_runtestloop(session: Session) -> object | None: + """Perform the main runtest loop (after collection finished). + + The default hook implementation performs the runtest protocol for all items + collected in the session (``session.items``), unless the collection failed + or the ``collectonly`` pytest option is set. + + If at any point :py:func:`pytest.exit` is called, the loop is + terminated immediately. + + If at any point ``session.shouldfail`` or ``session.shouldstop`` are set, the + loop is terminated after the runtest protocol for the current item is finished. + + :param session: The pytest session object. + + Stops at first non-None result, see :ref:`firstresult`. + The return value is not used, but only stops further processing. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +@hookspec(firstresult=True) +def pytest_runtest_protocol(item: Item, nextitem: Item | None) -> object | None: + """Perform the runtest protocol for a single test item. + + The default runtest protocol is this (see individual hooks for full details): + + - ``pytest_runtest_logstart(nodeid, location)`` + + - Setup phase: + - ``call = pytest_runtest_setup(item)`` (wrapped in ``CallInfo(when="setup")``) + - ``report = pytest_runtest_makereport(item, call)`` + - ``pytest_runtest_logreport(report)`` + - ``pytest_exception_interact(call, report)`` if an interactive exception occurred + + - Call phase, if the setup passed and the ``setuponly`` pytest option is not set: + - ``call = pytest_runtest_call(item)`` (wrapped in ``CallInfo(when="call")``) + - ``report = pytest_runtest_makereport(item, call)`` + - ``pytest_runtest_logreport(report)`` + - ``pytest_exception_interact(call, report)`` if an interactive exception occurred + + - Teardown phase: + - ``call = pytest_runtest_teardown(item, nextitem)`` (wrapped in ``CallInfo(when="teardown")``) + - ``report = pytest_runtest_makereport(item, call)`` + - ``pytest_runtest_logreport(report)`` + - ``pytest_exception_interact(call, report)`` if an interactive exception occurred + + - ``pytest_runtest_logfinish(nodeid, location)`` + + :param item: Test item for which the runtest protocol is performed. + :param nextitem: The scheduled-to-be-next test item (or None if this is the end my friend). + + Stops at first non-None result, see :ref:`firstresult`. + The return value is not used, but only stops further processing. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +def pytest_runtest_logstart(nodeid: str, location: tuple[str, int | None, str]) -> None: + """Called at the start of running the runtest protocol for a single item. + + See :hook:`pytest_runtest_protocol` for a description of the runtest protocol. + + :param nodeid: Full node ID of the item. + :param location: A tuple of ``(filename, lineno, testname)`` + where ``filename`` is a file path relative to ``config.rootpath`` + and ``lineno`` is 0-based. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_runtest_logfinish( + nodeid: str, location: tuple[str, int | None, str] +) -> None: + """Called at the end of running the runtest protocol for a single item. + + See :hook:`pytest_runtest_protocol` for a description of the runtest protocol. + + :param nodeid: Full node ID of the item. + :param location: A tuple of ``(filename, lineno, testname)`` + where ``filename`` is a file path relative to ``config.rootpath`` + and ``lineno`` is 0-based. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_runtest_setup(item: Item) -> None: + """Called to perform the setup phase for a test item. + + The default implementation runs ``setup()`` on ``item`` and all of its + parents (which haven't been setup yet). This includes obtaining the + values of fixtures required by the item (which haven't been obtained + yet). + + :param item: + The item. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_runtest_call(item: Item) -> None: + """Called to run the test for test item (the call phase). + + The default implementation calls ``item.runtest()``. + + :param item: + The item. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_runtest_teardown(item: Item, nextitem: Item | None) -> None: + """Called to perform the teardown phase for a test item. + + The default implementation runs the finalizers and calls ``teardown()`` + on ``item`` and all of its parents (which need to be torn down). This + includes running the teardown phase of fixtures required by the item (if + they go out of scope). + + :param item: + The item. + :param nextitem: + The scheduled-to-be-next test item (None if no further test item is + scheduled). This argument is used to perform exact teardowns, i.e. + calling just enough finalizers so that nextitem only needs to call + setup functions. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +@hookspec(firstresult=True) +def pytest_runtest_makereport(item: Item, call: CallInfo[None]) -> TestReport | None: + """Called to create a :class:`~pytest.TestReport` for each of + the setup, call and teardown runtest phases of a test item. + + See :hook:`pytest_runtest_protocol` for a description of the runtest protocol. + + :param item: The item. + :param call: The :class:`~pytest.CallInfo` for the phase. + + Stops at first non-None result, see :ref:`firstresult`. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_runtest_logreport(report: TestReport) -> None: + """Process the :class:`~pytest.TestReport` produced for each + of the setup, call and teardown runtest phases of an item. + + See :hook:`pytest_runtest_protocol` for a description of the runtest protocol. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +@hookspec(firstresult=True) +def pytest_report_to_serializable( + config: Config, + report: CollectReport | TestReport, +) -> dict[str, Any] | None: + """Serialize the given report object into a data structure suitable for + sending over the wire, e.g. converted to JSON. + + :param config: The pytest config object. + :param report: The report. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. The exact details may depend + on the plugin which calls the hook. + """ + + +@hookspec(firstresult=True) +def pytest_report_from_serializable( + config: Config, + data: dict[str, Any], +) -> CollectReport | TestReport | None: + """Restore a report object previously serialized with + :hook:`pytest_report_to_serializable`. + + :param config: The pytest config object. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. The exact details may depend + on the plugin which calls the hook. + """ + + +# ------------------------------------------------------------------------- +# Fixture related hooks +# ------------------------------------------------------------------------- + + +@hookspec(firstresult=True) +def pytest_fixture_setup( + fixturedef: FixtureDef[Any], request: SubRequest +) -> object | None: + """Perform fixture setup execution. + + :param fixturedef: + The fixture definition object. + :param request: + The fixture request object. + :returns: + The return value of the call to the fixture function. + + Stops at first non-None result, see :ref:`firstresult`. + + .. note:: + If the fixture function returns None, other implementations of + this hook function will continue to be called, according to the + behavior of the :ref:`firstresult` option. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given fixture, only + conftest files in the fixture scope's directory and its parent directories + are consulted. + """ + + +def pytest_fixture_post_finalizer( + fixturedef: FixtureDef[Any], request: SubRequest +) -> None: + """Called after fixture teardown, but before the cache is cleared, so + the fixture result ``fixturedef.cached_result`` is still available (not + ``None``). + + :param fixturedef: + The fixture definition object. + :param request: + The fixture request object. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given fixture, only + conftest files in the fixture scope's directory and its parent directories + are consulted. + """ + + +# ------------------------------------------------------------------------- +# test session related hooks +# ------------------------------------------------------------------------- + + +def pytest_sessionstart(session: Session) -> None: + """Called after the ``Session`` object has been created and before performing collection + and entering the run test loop. + + :param session: The pytest session object. + + Use in conftest plugins + ======================= + + This hook is only called for :ref:`initial conftests `. + """ + + +def pytest_sessionfinish( + session: Session, + exitstatus: int | ExitCode, +) -> None: + """Called after whole test run finished, right before returning the exit status to the system. + + :param session: The pytest session object. + :param exitstatus: The status which pytest will return to the system. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +def pytest_unconfigure(config: Config) -> None: + """Called before test process is exited. + + :param config: The pytest config object. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. + """ + + +# ------------------------------------------------------------------------- +# hooks for customizing the assert methods +# ------------------------------------------------------------------------- + + +def pytest_assertrepr_compare( + config: Config, op: str, left: object, right: object +) -> list[str] | None: + """Return explanation for comparisons in failing assert expressions. + + Return None for no custom explanation, otherwise return a list + of strings. The strings will be joined by newlines but any newlines + *in* a string will be escaped. Note that all but the first line will + be indented slightly, the intention is for the first line to be a summary. + + :param config: The pytest config object. + :param op: The operator, e.g. `"=="`, `"!="`, `"not in"`. + :param left: The left operand. + :param right: The right operand. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +def pytest_assertion_pass(item: Item, lineno: int, orig: str, expl: str) -> None: + """Called whenever an assertion passes. + + .. versionadded:: 5.0 + + Use this hook to do some processing after a passing assertion. + The original assertion information is available in the `orig` string + and the pytest introspected assertion information is available in the + `expl` string. + + This hook must be explicitly enabled by the ``enable_assertion_pass_hook`` + ini-file option: + + .. code-block:: ini + + [pytest] + enable_assertion_pass_hook=true + + You need to **clean the .pyc** files in your project directory and interpreter libraries + when enabling this option, as assertions will require to be re-written. + + :param item: pytest item object of current test. + :param lineno: Line number of the assert statement. + :param orig: String with the original assertion. + :param expl: String with the assert explanation. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in the item's directory and its parent directories are consulted. + """ + + +# ------------------------------------------------------------------------- +# Hooks for influencing reporting (invoked from _pytest_terminal). +# ------------------------------------------------------------------------- + + +@hookspec( + warn_on_impl_args={ + "startdir": HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg="startdir", pathlib_path_arg="start_path" + ), + }, +) +def pytest_report_header( # type:ignore[empty-body] + config: Config, start_path: Path, startdir: LEGACY_PATH +) -> str | list[str]: + """Return a string or list of strings to be displayed as header info for terminal reporting. + + :param config: The pytest config object. + :param start_path: The starting dir. + :type start_path: pathlib.Path + :param startdir: The starting dir (deprecated). + + .. note:: + + Lines returned by a plugin are displayed before those of plugins which + ran before it. + If you want to have your line(s) displayed first, use + :ref:`trylast=True `. + + .. versionchanged:: 7.0.0 + The ``start_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``startdir`` parameter. The ``startdir`` parameter + has been deprecated. + + Use in conftest plugins + ======================= + + This hook is only called for :ref:`initial conftests `. + """ + + +@hookspec( + warn_on_impl_args={ + "startdir": HOOK_LEGACY_PATH_ARG.format( + pylib_path_arg="startdir", pathlib_path_arg="start_path" + ), + }, +) +def pytest_report_collectionfinish( # type:ignore[empty-body] + config: Config, + start_path: Path, + startdir: LEGACY_PATH, + items: Sequence[Item], +) -> str | list[str]: + """Return a string or list of strings to be displayed after collection + has finished successfully. + + These strings will be displayed after the standard "collected X items" message. + + .. versionadded:: 3.2 + + :param config: The pytest config object. + :param start_path: The starting dir. + :type start_path: pathlib.Path + :param startdir: The starting dir (deprecated). + :param items: List of pytest items that are going to be executed; this list should not be modified. + + .. note:: + + Lines returned by a plugin are displayed before those of plugins which + ran before it. + If you want to have your line(s) displayed first, use + :ref:`trylast=True `. + + .. versionchanged:: 7.0.0 + The ``start_path`` parameter was added as a :class:`pathlib.Path` + equivalent of the ``startdir`` parameter. The ``startdir`` parameter + has been deprecated. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +@hookspec(firstresult=True) +def pytest_report_teststatus( # type:ignore[empty-body] + report: CollectReport | TestReport, config: Config +) -> TestShortLogReport | tuple[str, str, str | tuple[str, Mapping[str, bool]]]: + """Return result-category, shortletter and verbose word for status + reporting. + + The result-category is a category in which to count the result, for + example "passed", "skipped", "error" or the empty string. + + The shortletter is shown as testing progresses, for example ".", "s", + "E" or the empty string. + + The verbose word is shown as testing progresses in verbose mode, for + example "PASSED", "SKIPPED", "ERROR" or the empty string. + + pytest may style these implicitly according to the report outcome. + To provide explicit styling, return a tuple for the verbose word, + for example ``"rerun", "R", ("RERUN", {"yellow": True})``. + + :param report: The report object whose status is to be returned. + :param config: The pytest config object. + :returns: The test status. + + Stops at first non-None result, see :ref:`firstresult`. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +def pytest_terminal_summary( + terminalreporter: TerminalReporter, + exitstatus: ExitCode, + config: Config, +) -> None: + """Add a section to terminal summary reporting. + + :param terminalreporter: The internal terminal reporter object. + :param exitstatus: The exit status that will be reported back to the OS. + :param config: The pytest config object. + + .. versionadded:: 4.2 + The ``config`` parameter. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +@hookspec(historic=True) +def pytest_warning_recorded( + warning_message: warnings.WarningMessage, + when: Literal["config", "collect", "runtest"], + nodeid: str, + location: tuple[str, int, str] | None, +) -> None: + """Process a warning captured by the internal pytest warnings plugin. + + :param warning_message: + The captured warning. This is the same object produced by :class:`warnings.catch_warnings`, + and contains the same attributes as the parameters of :py:func:`warnings.showwarning`. + + :param when: + Indicates when the warning was captured. Possible values: + + * ``"config"``: during pytest configuration/initialization stage. + * ``"collect"``: during test collection. + * ``"runtest"``: during test execution. + + :param nodeid: + Full id of the item. Empty string for warnings that are not specific to + a particular node. + + :param location: + When available, holds information about the execution context of the captured + warning (filename, linenumber, function). ``function`` evaluates to + when the execution context is at the module level. + + .. versionadded:: 6.0 + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. If the warning is specific to a + particular node, only conftest files in parent directories of the node are + consulted. + """ + + +# ------------------------------------------------------------------------- +# Hooks for influencing skipping +# ------------------------------------------------------------------------- + + +def pytest_markeval_namespace( # type:ignore[empty-body] + config: Config, +) -> dict[str, Any]: + """Called when constructing the globals dictionary used for + evaluating string conditions in xfail/skipif markers. + + This is useful when the condition for a marker requires + objects that are expensive or impossible to obtain during + collection time, which is required by normal boolean + conditions. + + .. versionadded:: 6.2 + + :param config: The pytest config object. + :returns: A dictionary of additional globals to add. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given item, only conftest + files in parent directories of the item are consulted. + """ + + +# ------------------------------------------------------------------------- +# error handling and internal debugging hooks +# ------------------------------------------------------------------------- + + +def pytest_internalerror( + excrepr: ExceptionRepr, + excinfo: ExceptionInfo[BaseException], +) -> bool | None: + """Called for internal errors. + + Return True to suppress the fallback handling of printing an + INTERNALERROR message directly to sys.stderr. + + :param excrepr: The exception repr object. + :param excinfo: The exception info. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +def pytest_keyboard_interrupt( + excinfo: ExceptionInfo[KeyboardInterrupt | Exit], +) -> None: + """Called for keyboard interrupt. + + :param excinfo: The exception info. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +def pytest_exception_interact( + node: Item | Collector, + call: CallInfo[Any], + report: CollectReport | TestReport, +) -> None: + """Called when an exception was raised which can potentially be + interactively handled. + + May be called during collection (see :hook:`pytest_make_collect_report`), + in which case ``report`` is a :class:`~pytest.CollectReport`. + + May be called during runtest of an item (see :hook:`pytest_runtest_protocol`), + in which case ``report`` is a :class:`~pytest.TestReport`. + + This hook is not called if the exception that was raised is an internal + exception like ``skip.Exception``. + + :param node: + The item or collector. + :param call: + The call information. Contains the exception. + :param report: + The collection or test report. + + Use in conftest plugins + ======================= + + Any conftest file can implement this hook. For a given node, only conftest + files in parent directories of the node are consulted. + """ + + +def pytest_enter_pdb(config: Config, pdb: pdb.Pdb) -> None: + """Called upon pdb.set_trace(). + + Can be used by plugins to take special action just before the python + debugger enters interactive mode. + + :param config: The pytest config object. + :param pdb: The Pdb instance. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ + + +def pytest_leave_pdb(config: Config, pdb: pdb.Pdb) -> None: + """Called when leaving pdb (e.g. with continue after pdb.set_trace()). + + Can be used by plugins to take special action just after the python + debugger leaves interactive mode. + + :param config: The pytest config object. + :param pdb: The Pdb instance. + + Use in conftest plugins + ======================= + + Any conftest plugin can implement this hook. + """ diff --git a/venv/lib/python3.10/site-packages/_pytest/junitxml.py b/venv/lib/python3.10/site-packages/_pytest/junitxml.py new file mode 100644 index 0000000000000000000000000000000000000000..dc35e3aac15768cf9b7aa48a272d50c054bf561c --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/junitxml.py @@ -0,0 +1,692 @@ +# mypy: allow-untyped-defs +"""Report test results in JUnit-XML format, for use with Jenkins and build +integration servers. + +Based on initial code from Ross Lawley. + +Output conforms to +https://github.com/jenkinsci/xunit-plugin/blob/master/src/main/resources/org/jenkinsci/plugins/xunit/types/model/xsd/junit-10.xsd +""" + +from __future__ import annotations + +from collections.abc import Callable +import functools +import os +import platform +import re +import xml.etree.ElementTree as ET + +from _pytest import nodes +from _pytest import timing +from _pytest._code.code import ExceptionRepr +from _pytest._code.code import ReprFileLocation +from _pytest.config import Config +from _pytest.config import filename_arg +from _pytest.config.argparsing import Parser +from _pytest.fixtures import FixtureRequest +from _pytest.reports import TestReport +from _pytest.stash import StashKey +from _pytest.terminal import TerminalReporter +import pytest + + +xml_key = StashKey["LogXML"]() + + +def bin_xml_escape(arg: object) -> str: + r"""Visually escape invalid XML characters. + + For example, transforms + 'hello\aworld\b' + into + 'hello#x07world#x08' + Note that the #xABs are *not* XML escapes - missing the ampersand «. + The idea is to escape visually for the user rather than for XML itself. + """ + + def repl(matchobj: re.Match[str]) -> str: + i = ord(matchobj.group()) + if i <= 0xFF: + return f"#x{i:02X}" + else: + return f"#x{i:04X}" + + # The spec range of valid chars is: + # Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF] + # For an unknown(?) reason, we disallow #x7F (DEL) as well. + illegal_xml_re = ( + "[^\u0009\u000a\u000d\u0020-\u007e\u0080-\ud7ff\ue000-\ufffd\u10000-\u10ffff]" + ) + return re.sub(illegal_xml_re, repl, str(arg)) + + +def merge_family(left, right) -> None: + result = {} + for kl, vl in left.items(): + for kr, vr in right.items(): + if not isinstance(vl, list): + raise TypeError(type(vl)) + result[kl] = vl + vr + left.update(result) + + +families = { # pylint: disable=dict-init-mutate + "_base": {"testcase": ["classname", "name"]}, + "_base_legacy": {"testcase": ["file", "line", "url"]}, +} +# xUnit 1.x inherits legacy attributes. +families["xunit1"] = families["_base"].copy() +merge_family(families["xunit1"], families["_base_legacy"]) + +# xUnit 2.x uses strict base attributes. +families["xunit2"] = families["_base"] + + +class _NodeReporter: + def __init__(self, nodeid: str | TestReport, xml: LogXML) -> None: + self.id = nodeid + self.xml = xml + self.add_stats = self.xml.add_stats + self.family = self.xml.family + self.duration = 0.0 + self.properties: list[tuple[str, str]] = [] + self.nodes: list[ET.Element] = [] + self.attrs: dict[str, str] = {} + + def append(self, node: ET.Element) -> None: + self.xml.add_stats(node.tag) + self.nodes.append(node) + + def add_property(self, name: str, value: object) -> None: + self.properties.append((str(name), bin_xml_escape(value))) + + def add_attribute(self, name: str, value: object) -> None: + self.attrs[str(name)] = bin_xml_escape(value) + + def make_properties_node(self) -> ET.Element | None: + """Return a Junit node containing custom properties, if any.""" + if self.properties: + properties = ET.Element("properties") + for name, value in self.properties: + properties.append(ET.Element("property", name=name, value=value)) + return properties + return None + + def record_testreport(self, testreport: TestReport) -> None: + names = mangle_test_address(testreport.nodeid) + existing_attrs = self.attrs + classnames = names[:-1] + if self.xml.prefix: + classnames.insert(0, self.xml.prefix) + attrs: dict[str, str] = { + "classname": ".".join(classnames), + "name": bin_xml_escape(names[-1]), + "file": testreport.location[0], + } + if testreport.location[1] is not None: + attrs["line"] = str(testreport.location[1]) + if hasattr(testreport, "url"): + attrs["url"] = testreport.url + self.attrs = attrs + self.attrs.update(existing_attrs) # Restore any user-defined attributes. + + # Preserve legacy testcase behavior. + if self.family == "xunit1": + return + + # Filter out attributes not permitted by this test family. + # Including custom attributes because they are not valid here. + temp_attrs = {} + for key in self.attrs: + if key in families[self.family]["testcase"]: + temp_attrs[key] = self.attrs[key] + self.attrs = temp_attrs + + def to_xml(self) -> ET.Element: + testcase = ET.Element("testcase", self.attrs, time=f"{self.duration:.3f}") + properties = self.make_properties_node() + if properties is not None: + testcase.append(properties) + testcase.extend(self.nodes) + return testcase + + def _add_simple(self, tag: str, message: str, data: str | None = None) -> None: + node = ET.Element(tag, message=message) + node.text = bin_xml_escape(data) + self.append(node) + + def write_captured_output(self, report: TestReport) -> None: + if not self.xml.log_passing_tests and report.passed: + return + + content_out = report.capstdout + content_log = report.caplog + content_err = report.capstderr + if self.xml.logging == "no": + return + content_all = "" + if self.xml.logging in ["log", "all"]: + content_all = self._prepare_content(content_log, " Captured Log ") + if self.xml.logging in ["system-out", "out-err", "all"]: + content_all += self._prepare_content(content_out, " Captured Out ") + self._write_content(report, content_all, "system-out") + content_all = "" + if self.xml.logging in ["system-err", "out-err", "all"]: + content_all += self._prepare_content(content_err, " Captured Err ") + self._write_content(report, content_all, "system-err") + content_all = "" + if content_all: + self._write_content(report, content_all, "system-out") + + def _prepare_content(self, content: str, header: str) -> str: + return "\n".join([header.center(80, "-"), content, ""]) + + def _write_content(self, report: TestReport, content: str, jheader: str) -> None: + tag = ET.Element(jheader) + tag.text = bin_xml_escape(content) + self.append(tag) + + def append_pass(self, report: TestReport) -> None: + self.add_stats("passed") + + def append_failure(self, report: TestReport) -> None: + # msg = str(report.longrepr.reprtraceback.extraline) + if hasattr(report, "wasxfail"): + self._add_simple("skipped", "xfail-marked test passes unexpectedly") + else: + assert report.longrepr is not None + reprcrash: ReprFileLocation | None = getattr( + report.longrepr, "reprcrash", None + ) + if reprcrash is not None: + message = reprcrash.message + else: + message = str(report.longrepr) + message = bin_xml_escape(message) + self._add_simple("failure", message, str(report.longrepr)) + + def append_collect_error(self, report: TestReport) -> None: + # msg = str(report.longrepr.reprtraceback.extraline) + assert report.longrepr is not None + self._add_simple("error", "collection failure", str(report.longrepr)) + + def append_collect_skipped(self, report: TestReport) -> None: + self._add_simple("skipped", "collection skipped", str(report.longrepr)) + + def append_error(self, report: TestReport) -> None: + assert report.longrepr is not None + reprcrash: ReprFileLocation | None = getattr(report.longrepr, "reprcrash", None) + if reprcrash is not None: + reason = reprcrash.message + else: + reason = str(report.longrepr) + + if report.when == "teardown": + msg = f'failed on teardown with "{reason}"' + else: + msg = f'failed on setup with "{reason}"' + self._add_simple("error", bin_xml_escape(msg), str(report.longrepr)) + + def append_skipped(self, report: TestReport) -> None: + if hasattr(report, "wasxfail"): + xfailreason = report.wasxfail + if xfailreason.startswith("reason: "): + xfailreason = xfailreason[8:] + xfailreason = bin_xml_escape(xfailreason) + skipped = ET.Element("skipped", type="pytest.xfail", message=xfailreason) + self.append(skipped) + else: + assert isinstance(report.longrepr, tuple) + filename, lineno, skipreason = report.longrepr + if skipreason.startswith("Skipped: "): + skipreason = skipreason[9:] + details = f"{filename}:{lineno}: {skipreason}" + + skipped = ET.Element( + "skipped", type="pytest.skip", message=bin_xml_escape(skipreason) + ) + skipped.text = bin_xml_escape(details) + self.append(skipped) + self.write_captured_output(report) + + def finalize(self) -> None: + data = self.to_xml() + self.__dict__.clear() + # Type ignored because mypy doesn't like overriding a method. + # Also the return value doesn't match... + self.to_xml = lambda: data # type: ignore[method-assign] + + +def _warn_incompatibility_with_xunit2( + request: FixtureRequest, fixture_name: str +) -> None: + """Emit a PytestWarning about the given fixture being incompatible with newer xunit revisions.""" + from _pytest.warning_types import PytestWarning + + xml = request.config.stash.get(xml_key, None) + if xml is not None and xml.family not in ("xunit1", "legacy"): + request.node.warn( + PytestWarning( + f"{fixture_name} is incompatible with junit_family '{xml.family}' (use 'legacy' or 'xunit1')" + ) + ) + + +@pytest.fixture +def record_property(request: FixtureRequest) -> Callable[[str, object], None]: + """Add extra properties to the calling test. + + User properties become part of the test report and are available to the + configured reporters, like JUnit XML. + + The fixture is callable with ``name, value``. The value is automatically + XML-encoded. + + Example:: + + def test_function(record_property): + record_property("example_key", 1) + """ + _warn_incompatibility_with_xunit2(request, "record_property") + + def append_property(name: str, value: object) -> None: + request.node.user_properties.append((name, value)) + + return append_property + + +@pytest.fixture +def record_xml_attribute(request: FixtureRequest) -> Callable[[str, object], None]: + """Add extra xml attributes to the tag for the calling test. + + The fixture is callable with ``name, value``. The value is + automatically XML-encoded. + """ + from _pytest.warning_types import PytestExperimentalApiWarning + + request.node.warn( + PytestExperimentalApiWarning("record_xml_attribute is an experimental feature") + ) + + _warn_incompatibility_with_xunit2(request, "record_xml_attribute") + + # Declare noop + def add_attr_noop(name: str, value: object) -> None: + pass + + attr_func = add_attr_noop + + xml = request.config.stash.get(xml_key, None) + if xml is not None: + node_reporter = xml.node_reporter(request.node.nodeid) + attr_func = node_reporter.add_attribute + + return attr_func + + +def _check_record_param_type(param: str, v: str) -> None: + """Used by record_testsuite_property to check that the given parameter name is of the proper + type.""" + __tracebackhide__ = True + if not isinstance(v, str): + msg = "{param} parameter needs to be a string, but {g} given" # type: ignore[unreachable] + raise TypeError(msg.format(param=param, g=type(v).__name__)) + + +@pytest.fixture(scope="session") +def record_testsuite_property(request: FixtureRequest) -> Callable[[str, object], None]: + """Record a new ```` tag as child of the root ````. + + This is suitable to writing global information regarding the entire test + suite, and is compatible with ``xunit2`` JUnit family. + + This is a ``session``-scoped fixture which is called with ``(name, value)``. Example: + + .. code-block:: python + + def test_foo(record_testsuite_property): + record_testsuite_property("ARCH", "PPC") + record_testsuite_property("STORAGE_TYPE", "CEPH") + + :param name: + The property name. + :param value: + The property value. Will be converted to a string. + + .. warning:: + + Currently this fixture **does not work** with the + `pytest-xdist `__ plugin. See + :issue:`7767` for details. + """ + __tracebackhide__ = True + + def record_func(name: str, value: object) -> None: + """No-op function in case --junit-xml was not passed in the command-line.""" + __tracebackhide__ = True + _check_record_param_type("name", name) + + xml = request.config.stash.get(xml_key, None) + if xml is not None: + record_func = xml.add_global_property + return record_func + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("terminal reporting") + group.addoption( + "--junitxml", + "--junit-xml", + action="store", + dest="xmlpath", + metavar="path", + type=functools.partial(filename_arg, optname="--junitxml"), + default=None, + help="Create junit-xml style report file at given path", + ) + group.addoption( + "--junitprefix", + "--junit-prefix", + action="store", + metavar="str", + default=None, + help="Prepend prefix to classnames in junit-xml output", + ) + parser.addini( + "junit_suite_name", "Test suite name for JUnit report", default="pytest" + ) + parser.addini( + "junit_logging", + "Write captured log messages to JUnit report: " + "one of no|log|system-out|system-err|out-err|all", + default="no", + ) + parser.addini( + "junit_log_passing_tests", + "Capture log information for passing tests to JUnit report: ", + type="bool", + default=True, + ) + parser.addini( + "junit_duration_report", + "Duration time to report: one of total|call", + default="total", + ) # choices=['total', 'call']) + parser.addini( + "junit_family", + "Emit XML for schema: one of legacy|xunit1|xunit2", + default="xunit2", + ) + + +def pytest_configure(config: Config) -> None: + xmlpath = config.option.xmlpath + # Prevent opening xmllog on worker nodes (xdist). + if xmlpath and not hasattr(config, "workerinput"): + junit_family = config.getini("junit_family") + config.stash[xml_key] = LogXML( + xmlpath, + config.option.junitprefix, + config.getini("junit_suite_name"), + config.getini("junit_logging"), + config.getini("junit_duration_report"), + junit_family, + config.getini("junit_log_passing_tests"), + ) + config.pluginmanager.register(config.stash[xml_key]) + + +def pytest_unconfigure(config: Config) -> None: + xml = config.stash.get(xml_key, None) + if xml: + del config.stash[xml_key] + config.pluginmanager.unregister(xml) + + +def mangle_test_address(address: str) -> list[str]: + path, possible_open_bracket, params = address.partition("[") + names = path.split("::") + # Convert file path to dotted path. + names[0] = names[0].replace(nodes.SEP, ".") + names[0] = re.sub(r"\.py$", "", names[0]) + # Put any params back. + names[-1] += possible_open_bracket + params + return names + + +class LogXML: + def __init__( + self, + logfile, + prefix: str | None, + suite_name: str = "pytest", + logging: str = "no", + report_duration: str = "total", + family="xunit1", + log_passing_tests: bool = True, + ) -> None: + logfile = os.path.expanduser(os.path.expandvars(logfile)) + self.logfile = os.path.normpath(os.path.abspath(logfile)) + self.prefix = prefix + self.suite_name = suite_name + self.logging = logging + self.log_passing_tests = log_passing_tests + self.report_duration = report_duration + self.family = family + self.stats: dict[str, int] = dict.fromkeys( + ["error", "passed", "failure", "skipped"], 0 + ) + self.node_reporters: dict[tuple[str | TestReport, object], _NodeReporter] = {} + self.node_reporters_ordered: list[_NodeReporter] = [] + self.global_properties: list[tuple[str, str]] = [] + + # List of reports that failed on call but teardown is pending. + self.open_reports: list[TestReport] = [] + self.cnt_double_fail_tests = 0 + + # Replaces convenience family with real family. + if self.family == "legacy": + self.family = "xunit1" + + def finalize(self, report: TestReport) -> None: + nodeid = getattr(report, "nodeid", report) + # Local hack to handle xdist report order. + workernode = getattr(report, "node", None) + reporter = self.node_reporters.pop((nodeid, workernode)) + + for propname, propvalue in report.user_properties: + reporter.add_property(propname, str(propvalue)) + + if reporter is not None: + reporter.finalize() + + def node_reporter(self, report: TestReport | str) -> _NodeReporter: + nodeid: str | TestReport = getattr(report, "nodeid", report) + # Local hack to handle xdist report order. + workernode = getattr(report, "node", None) + + key = nodeid, workernode + + if key in self.node_reporters: + # TODO: breaks for --dist=each + return self.node_reporters[key] + + reporter = _NodeReporter(nodeid, self) + + self.node_reporters[key] = reporter + self.node_reporters_ordered.append(reporter) + + return reporter + + def add_stats(self, key: str) -> None: + if key in self.stats: + self.stats[key] += 1 + + def _opentestcase(self, report: TestReport) -> _NodeReporter: + reporter = self.node_reporter(report) + reporter.record_testreport(report) + return reporter + + def pytest_runtest_logreport(self, report: TestReport) -> None: + """Handle a setup/call/teardown report, generating the appropriate + XML tags as necessary. + + Note: due to plugins like xdist, this hook may be called in interlaced + order with reports from other nodes. For example: + + Usual call order: + -> setup node1 + -> call node1 + -> teardown node1 + -> setup node2 + -> call node2 + -> teardown node2 + + Possible call order in xdist: + -> setup node1 + -> call node1 + -> setup node2 + -> call node2 + -> teardown node2 + -> teardown node1 + """ + close_report = None + if report.passed: + if report.when == "call": # ignore setup/teardown + reporter = self._opentestcase(report) + reporter.append_pass(report) + elif report.failed: + if report.when == "teardown": + # The following vars are needed when xdist plugin is used. + report_wid = getattr(report, "worker_id", None) + report_ii = getattr(report, "item_index", None) + close_report = next( + ( + rep + for rep in self.open_reports + if ( + rep.nodeid == report.nodeid + and getattr(rep, "item_index", None) == report_ii + and getattr(rep, "worker_id", None) == report_wid + ) + ), + None, + ) + if close_report: + # We need to open new testcase in case we have failure in + # call and error in teardown in order to follow junit + # schema. + self.finalize(close_report) + self.cnt_double_fail_tests += 1 + reporter = self._opentestcase(report) + if report.when == "call": + reporter.append_failure(report) + self.open_reports.append(report) + if not self.log_passing_tests: + reporter.write_captured_output(report) + else: + reporter.append_error(report) + elif report.skipped: + reporter = self._opentestcase(report) + reporter.append_skipped(report) + self.update_testcase_duration(report) + if report.when == "teardown": + reporter = self._opentestcase(report) + reporter.write_captured_output(report) + + self.finalize(report) + report_wid = getattr(report, "worker_id", None) + report_ii = getattr(report, "item_index", None) + close_report = next( + ( + rep + for rep in self.open_reports + if ( + rep.nodeid == report.nodeid + and getattr(rep, "item_index", None) == report_ii + and getattr(rep, "worker_id", None) == report_wid + ) + ), + None, + ) + if close_report: + self.open_reports.remove(close_report) + + def update_testcase_duration(self, report: TestReport) -> None: + """Accumulate total duration for nodeid from given report and update + the Junit.testcase with the new total if already created.""" + if self.report_duration in {"total", report.when}: + reporter = self.node_reporter(report) + reporter.duration += getattr(report, "duration", 0.0) + + def pytest_collectreport(self, report: TestReport) -> None: + if not report.passed: + reporter = self._opentestcase(report) + if report.failed: + reporter.append_collect_error(report) + else: + reporter.append_collect_skipped(report) + + def pytest_internalerror(self, excrepr: ExceptionRepr) -> None: + reporter = self.node_reporter("internal") + reporter.attrs.update(classname="pytest", name="internal") + reporter._add_simple("error", "internal error", str(excrepr)) + + def pytest_sessionstart(self) -> None: + self.suite_start = timing.Instant() + + def pytest_sessionfinish(self) -> None: + dirname = os.path.dirname(os.path.abspath(self.logfile)) + # exist_ok avoids filesystem race conditions between checking path existence and requesting creation + os.makedirs(dirname, exist_ok=True) + + with open(self.logfile, "w", encoding="utf-8") as logfile: + duration = self.suite_start.elapsed() + + numtests = ( + self.stats["passed"] + + self.stats["failure"] + + self.stats["skipped"] + + self.stats["error"] + - self.cnt_double_fail_tests + ) + logfile.write('') + + suite_node = ET.Element( + "testsuite", + name=self.suite_name, + errors=str(self.stats["error"]), + failures=str(self.stats["failure"]), + skipped=str(self.stats["skipped"]), + tests=str(numtests), + time=f"{duration.seconds:.3f}", + timestamp=self.suite_start.as_utc().astimezone().isoformat(), + hostname=platform.node(), + ) + global_properties = self._get_global_properties_node() + if global_properties is not None: + suite_node.append(global_properties) + for node_reporter in self.node_reporters_ordered: + suite_node.append(node_reporter.to_xml()) + testsuites = ET.Element("testsuites") + testsuites.set("name", "pytest tests") + testsuites.append(suite_node) + logfile.write(ET.tostring(testsuites, encoding="unicode")) + + def pytest_terminal_summary(self, terminalreporter: TerminalReporter) -> None: + terminalreporter.write_sep("-", f"generated xml file: {self.logfile}") + + def add_global_property(self, name: str, value: object) -> None: + __tracebackhide__ = True + _check_record_param_type("name", name) + self.global_properties.append((name, bin_xml_escape(value))) + + def _get_global_properties_node(self) -> ET.Element | None: + """Return a Junit node containing custom properties, if any.""" + if self.global_properties: + properties = ET.Element("properties") + for name, value in self.global_properties: + properties.append(ET.Element("property", name=name, value=value)) + return properties + return None diff --git a/venv/lib/python3.10/site-packages/_pytest/legacypath.py b/venv/lib/python3.10/site-packages/_pytest/legacypath.py new file mode 100644 index 0000000000000000000000000000000000000000..59e8ef6e7427ed2f548c1da72c9acceb82ef31fa --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/legacypath.py @@ -0,0 +1,468 @@ +# mypy: allow-untyped-defs +"""Add backward compatibility support for the legacy py path type.""" + +from __future__ import annotations + +import dataclasses +from pathlib import Path +import shlex +import subprocess +from typing import Final +from typing import final +from typing import TYPE_CHECKING + +from iniconfig import SectionWrapper + +from _pytest.cacheprovider import Cache +from _pytest.compat import LEGACY_PATH +from _pytest.compat import legacy_path +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config import PytestPluginManager +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.main import Session +from _pytest.monkeypatch import MonkeyPatch +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.nodes import Node +from _pytest.pytester import HookRecorder +from _pytest.pytester import Pytester +from _pytest.pytester import RunResult +from _pytest.terminal import TerminalReporter +from _pytest.tmpdir import TempPathFactory + + +if TYPE_CHECKING: + import pexpect + + +@final +class Testdir: + """ + Similar to :class:`Pytester`, but this class works with legacy legacy_path objects instead. + + All methods just forward to an internal :class:`Pytester` instance, converting results + to `legacy_path` objects as necessary. + """ + + __test__ = False + + CLOSE_STDIN: Final = Pytester.CLOSE_STDIN + TimeoutExpired: Final = Pytester.TimeoutExpired + + def __init__(self, pytester: Pytester, *, _ispytest: bool = False) -> None: + check_ispytest(_ispytest) + self._pytester = pytester + + @property + def tmpdir(self) -> LEGACY_PATH: + """Temporary directory where tests are executed.""" + return legacy_path(self._pytester.path) + + @property + def test_tmproot(self) -> LEGACY_PATH: + return legacy_path(self._pytester._test_tmproot) + + @property + def request(self): + return self._pytester._request + + @property + def plugins(self): + return self._pytester.plugins + + @plugins.setter + def plugins(self, plugins): + self._pytester.plugins = plugins + + @property + def monkeypatch(self) -> MonkeyPatch: + return self._pytester._monkeypatch + + def make_hook_recorder(self, pluginmanager) -> HookRecorder: + """See :meth:`Pytester.make_hook_recorder`.""" + return self._pytester.make_hook_recorder(pluginmanager) + + def chdir(self) -> None: + """See :meth:`Pytester.chdir`.""" + return self._pytester.chdir() + + def finalize(self) -> None: + return self._pytester._finalize() + + def makefile(self, ext, *args, **kwargs) -> LEGACY_PATH: + """See :meth:`Pytester.makefile`.""" + if ext and not ext.startswith("."): + # pytester.makefile is going to throw a ValueError in a way that + # testdir.makefile did not, because + # pathlib.Path is stricter suffixes than py.path + # This ext arguments is likely user error, but since testdir has + # allowed this, we will prepend "." as a workaround to avoid breaking + # testdir usage that worked before + ext = "." + ext + return legacy_path(self._pytester.makefile(ext, *args, **kwargs)) + + def makeconftest(self, source) -> LEGACY_PATH: + """See :meth:`Pytester.makeconftest`.""" + return legacy_path(self._pytester.makeconftest(source)) + + def makeini(self, source) -> LEGACY_PATH: + """See :meth:`Pytester.makeini`.""" + return legacy_path(self._pytester.makeini(source)) + + def getinicfg(self, source: str) -> SectionWrapper: + """See :meth:`Pytester.getinicfg`.""" + return self._pytester.getinicfg(source) + + def makepyprojecttoml(self, source) -> LEGACY_PATH: + """See :meth:`Pytester.makepyprojecttoml`.""" + return legacy_path(self._pytester.makepyprojecttoml(source)) + + def makepyfile(self, *args, **kwargs) -> LEGACY_PATH: + """See :meth:`Pytester.makepyfile`.""" + return legacy_path(self._pytester.makepyfile(*args, **kwargs)) + + def maketxtfile(self, *args, **kwargs) -> LEGACY_PATH: + """See :meth:`Pytester.maketxtfile`.""" + return legacy_path(self._pytester.maketxtfile(*args, **kwargs)) + + def syspathinsert(self, path=None) -> None: + """See :meth:`Pytester.syspathinsert`.""" + return self._pytester.syspathinsert(path) + + def mkdir(self, name) -> LEGACY_PATH: + """See :meth:`Pytester.mkdir`.""" + return legacy_path(self._pytester.mkdir(name)) + + def mkpydir(self, name) -> LEGACY_PATH: + """See :meth:`Pytester.mkpydir`.""" + return legacy_path(self._pytester.mkpydir(name)) + + def copy_example(self, name=None) -> LEGACY_PATH: + """See :meth:`Pytester.copy_example`.""" + return legacy_path(self._pytester.copy_example(name)) + + def getnode(self, config: Config, arg) -> Item | Collector | None: + """See :meth:`Pytester.getnode`.""" + return self._pytester.getnode(config, arg) + + def getpathnode(self, path): + """See :meth:`Pytester.getpathnode`.""" + return self._pytester.getpathnode(path) + + def genitems(self, colitems: list[Item | Collector]) -> list[Item]: + """See :meth:`Pytester.genitems`.""" + return self._pytester.genitems(colitems) + + def runitem(self, source): + """See :meth:`Pytester.runitem`.""" + return self._pytester.runitem(source) + + def inline_runsource(self, source, *cmdlineargs): + """See :meth:`Pytester.inline_runsource`.""" + return self._pytester.inline_runsource(source, *cmdlineargs) + + def inline_genitems(self, *args): + """See :meth:`Pytester.inline_genitems`.""" + return self._pytester.inline_genitems(*args) + + def inline_run(self, *args, plugins=(), no_reraise_ctrlc: bool = False): + """See :meth:`Pytester.inline_run`.""" + return self._pytester.inline_run( + *args, plugins=plugins, no_reraise_ctrlc=no_reraise_ctrlc + ) + + def runpytest_inprocess(self, *args, **kwargs) -> RunResult: + """See :meth:`Pytester.runpytest_inprocess`.""" + return self._pytester.runpytest_inprocess(*args, **kwargs) + + def runpytest(self, *args, **kwargs) -> RunResult: + """See :meth:`Pytester.runpytest`.""" + return self._pytester.runpytest(*args, **kwargs) + + def parseconfig(self, *args) -> Config: + """See :meth:`Pytester.parseconfig`.""" + return self._pytester.parseconfig(*args) + + def parseconfigure(self, *args) -> Config: + """See :meth:`Pytester.parseconfigure`.""" + return self._pytester.parseconfigure(*args) + + def getitem(self, source, funcname="test_func"): + """See :meth:`Pytester.getitem`.""" + return self._pytester.getitem(source, funcname) + + def getitems(self, source): + """See :meth:`Pytester.getitems`.""" + return self._pytester.getitems(source) + + def getmodulecol(self, source, configargs=(), withinit=False): + """See :meth:`Pytester.getmodulecol`.""" + return self._pytester.getmodulecol( + source, configargs=configargs, withinit=withinit + ) + + def collect_by_name(self, modcol: Collector, name: str) -> Item | Collector | None: + """See :meth:`Pytester.collect_by_name`.""" + return self._pytester.collect_by_name(modcol, name) + + def popen( + self, + cmdargs, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + stdin=CLOSE_STDIN, + **kw, + ): + """See :meth:`Pytester.popen`.""" + return self._pytester.popen(cmdargs, stdout, stderr, stdin, **kw) + + def run(self, *cmdargs, timeout=None, stdin=CLOSE_STDIN) -> RunResult: + """See :meth:`Pytester.run`.""" + return self._pytester.run(*cmdargs, timeout=timeout, stdin=stdin) + + def runpython(self, script) -> RunResult: + """See :meth:`Pytester.runpython`.""" + return self._pytester.runpython(script) + + def runpython_c(self, command): + """See :meth:`Pytester.runpython_c`.""" + return self._pytester.runpython_c(command) + + def runpytest_subprocess(self, *args, timeout=None) -> RunResult: + """See :meth:`Pytester.runpytest_subprocess`.""" + return self._pytester.runpytest_subprocess(*args, timeout=timeout) + + def spawn_pytest(self, string: str, expect_timeout: float = 10.0) -> pexpect.spawn: + """See :meth:`Pytester.spawn_pytest`.""" + return self._pytester.spawn_pytest(string, expect_timeout=expect_timeout) + + def spawn(self, cmd: str, expect_timeout: float = 10.0) -> pexpect.spawn: + """See :meth:`Pytester.spawn`.""" + return self._pytester.spawn(cmd, expect_timeout=expect_timeout) + + def __repr__(self) -> str: + return f"" + + def __str__(self) -> str: + return str(self.tmpdir) + + +class LegacyTestdirPlugin: + @staticmethod + @fixture + def testdir(pytester: Pytester) -> Testdir: + """ + Identical to :fixture:`pytester`, and provides an instance whose methods return + legacy ``LEGACY_PATH`` objects instead when applicable. + + New code should avoid using :fixture:`testdir` in favor of :fixture:`pytester`. + """ + return Testdir(pytester, _ispytest=True) + + +@final +@dataclasses.dataclass +class TempdirFactory: + """Backward compatibility wrapper that implements ``py.path.local`` + for :class:`TempPathFactory`. + + .. note:: + These days, it is preferred to use ``tmp_path_factory``. + + :ref:`About the tmpdir and tmpdir_factory fixtures`. + + """ + + _tmppath_factory: TempPathFactory + + def __init__( + self, tmppath_factory: TempPathFactory, *, _ispytest: bool = False + ) -> None: + check_ispytest(_ispytest) + self._tmppath_factory = tmppath_factory + + def mktemp(self, basename: str, numbered: bool = True) -> LEGACY_PATH: + """Same as :meth:`TempPathFactory.mktemp`, but returns a ``py.path.local`` object.""" + return legacy_path(self._tmppath_factory.mktemp(basename, numbered).resolve()) + + def getbasetemp(self) -> LEGACY_PATH: + """Same as :meth:`TempPathFactory.getbasetemp`, but returns a ``py.path.local`` object.""" + return legacy_path(self._tmppath_factory.getbasetemp().resolve()) + + +class LegacyTmpdirPlugin: + @staticmethod + @fixture(scope="session") + def tmpdir_factory(request: FixtureRequest) -> TempdirFactory: + """Return a :class:`pytest.TempdirFactory` instance for the test session.""" + # Set dynamically by pytest_configure(). + return request.config._tmpdirhandler # type: ignore + + @staticmethod + @fixture + def tmpdir(tmp_path: Path) -> LEGACY_PATH: + """Return a temporary directory (as `legacy_path`_ object) + which is unique to each test function invocation. + The temporary directory is created as a subdirectory + of the base temporary directory, with configurable retention, + as discussed in :ref:`temporary directory location and retention`. + + .. note:: + These days, it is preferred to use ``tmp_path``. + + :ref:`About the tmpdir and tmpdir_factory fixtures`. + + .. _legacy_path: https://py.readthedocs.io/en/latest/path.html + """ + return legacy_path(tmp_path) + + +def Cache_makedir(self: Cache, name: str) -> LEGACY_PATH: + """Return a directory path object with the given name. + + Same as :func:`mkdir`, but returns a legacy py path instance. + """ + return legacy_path(self.mkdir(name)) + + +def FixtureRequest_fspath(self: FixtureRequest) -> LEGACY_PATH: + """(deprecated) The file system path of the test module which collected this test.""" + return legacy_path(self.path) + + +def TerminalReporter_startdir(self: TerminalReporter) -> LEGACY_PATH: + """The directory from which pytest was invoked. + + Prefer to use ``startpath`` which is a :class:`pathlib.Path`. + + :type: LEGACY_PATH + """ + return legacy_path(self.startpath) + + +def Config_invocation_dir(self: Config) -> LEGACY_PATH: + """The directory from which pytest was invoked. + + Prefer to use :attr:`invocation_params.dir `, + which is a :class:`pathlib.Path`. + + :type: LEGACY_PATH + """ + return legacy_path(str(self.invocation_params.dir)) + + +def Config_rootdir(self: Config) -> LEGACY_PATH: + """The path to the :ref:`rootdir `. + + Prefer to use :attr:`rootpath`, which is a :class:`pathlib.Path`. + + :type: LEGACY_PATH + """ + return legacy_path(str(self.rootpath)) + + +def Config_inifile(self: Config) -> LEGACY_PATH | None: + """The path to the :ref:`configfile `. + + Prefer to use :attr:`inipath`, which is a :class:`pathlib.Path`. + + :type: Optional[LEGACY_PATH] + """ + return legacy_path(str(self.inipath)) if self.inipath else None + + +def Session_startdir(self: Session) -> LEGACY_PATH: + """The path from which pytest was invoked. + + Prefer to use ``startpath`` which is a :class:`pathlib.Path`. + + :type: LEGACY_PATH + """ + return legacy_path(self.startpath) + + +def Config__getini_unknown_type(self, name: str, type: str, value: str | list[str]): + if type == "pathlist": + # TODO: This assert is probably not valid in all cases. + assert self.inipath is not None + dp = self.inipath.parent + input_values = shlex.split(value) if isinstance(value, str) else value + return [legacy_path(str(dp / x)) for x in input_values] + else: + raise ValueError(f"unknown configuration type: {type}", value) + + +def Node_fspath(self: Node) -> LEGACY_PATH: + """(deprecated) returns a legacy_path copy of self.path""" + return legacy_path(self.path) + + +def Node_fspath_set(self: Node, value: LEGACY_PATH) -> None: + self.path = Path(value) + + +@hookimpl(tryfirst=True) +def pytest_load_initial_conftests(early_config: Config) -> None: + """Monkeypatch legacy path attributes in several classes, as early as possible.""" + mp = MonkeyPatch() + early_config.add_cleanup(mp.undo) + + # Add Cache.makedir(). + mp.setattr(Cache, "makedir", Cache_makedir, raising=False) + + # Add FixtureRequest.fspath property. + mp.setattr(FixtureRequest, "fspath", property(FixtureRequest_fspath), raising=False) + + # Add TerminalReporter.startdir property. + mp.setattr( + TerminalReporter, "startdir", property(TerminalReporter_startdir), raising=False + ) + + # Add Config.{invocation_dir,rootdir,inifile} properties. + mp.setattr(Config, "invocation_dir", property(Config_invocation_dir), raising=False) + mp.setattr(Config, "rootdir", property(Config_rootdir), raising=False) + mp.setattr(Config, "inifile", property(Config_inifile), raising=False) + + # Add Session.startdir property. + mp.setattr(Session, "startdir", property(Session_startdir), raising=False) + + # Add pathlist configuration type. + mp.setattr(Config, "_getini_unknown_type", Config__getini_unknown_type) + + # Add Node.fspath property. + mp.setattr(Node, "fspath", property(Node_fspath, Node_fspath_set), raising=False) + + +@hookimpl +def pytest_configure(config: Config) -> None: + """Installs the LegacyTmpdirPlugin if the ``tmpdir`` plugin is also installed.""" + if config.pluginmanager.has_plugin("tmpdir"): + mp = MonkeyPatch() + config.add_cleanup(mp.undo) + # Create TmpdirFactory and attach it to the config object. + # + # This is to comply with existing plugins which expect the handler to be + # available at pytest_configure time, but ideally should be moved entirely + # to the tmpdir_factory session fixture. + try: + tmp_path_factory = config._tmp_path_factory # type: ignore[attr-defined] + except AttributeError: + # tmpdir plugin is blocked. + pass + else: + _tmpdirhandler = TempdirFactory(tmp_path_factory, _ispytest=True) + mp.setattr(config, "_tmpdirhandler", _tmpdirhandler, raising=False) + + config.pluginmanager.register(LegacyTmpdirPlugin, "legacypath-tmpdir") + + +@hookimpl +def pytest_plugin_registered(plugin: object, manager: PytestPluginManager) -> None: + # pytester is not loaded by default and is commonly loaded from a conftest, + # so checking for it in `pytest_configure` is not enough. + is_pytester = plugin is manager.get_plugin("pytester") + if is_pytester and not manager.is_registered(LegacyTestdirPlugin): + manager.register(LegacyTestdirPlugin, "legacypath-pytester") diff --git a/venv/lib/python3.10/site-packages/_pytest/logging.py b/venv/lib/python3.10/site-packages/_pytest/logging.py new file mode 100644 index 0000000000000000000000000000000000000000..e4fed579d2161da981e1ad2f502577a7a1fe596e --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/logging.py @@ -0,0 +1,960 @@ +# mypy: allow-untyped-defs +"""Access and control log capturing.""" + +from __future__ import annotations + +from collections.abc import Generator +from collections.abc import Mapping +from collections.abc import Set as AbstractSet +from contextlib import contextmanager +from contextlib import nullcontext +from datetime import datetime +from datetime import timedelta +from datetime import timezone +import io +from io import StringIO +import logging +from logging import LogRecord +import os +from pathlib import Path +import re +from types import TracebackType +from typing import final +from typing import Generic +from typing import Literal +from typing import TYPE_CHECKING +from typing import TypeVar + +from _pytest import nodes +from _pytest._io import TerminalWriter +from _pytest.capture import CaptureManager +from _pytest.config import _strtobool +from _pytest.config import Config +from _pytest.config import create_terminal_writer +from _pytest.config import hookimpl +from _pytest.config import UsageError +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.main import Session +from _pytest.stash import StashKey +from _pytest.terminal import TerminalReporter + + +if TYPE_CHECKING: + logging_StreamHandler = logging.StreamHandler[StringIO] +else: + logging_StreamHandler = logging.StreamHandler + +DEFAULT_LOG_FORMAT = "%(levelname)-8s %(name)s:%(filename)s:%(lineno)d %(message)s" +DEFAULT_LOG_DATE_FORMAT = "%H:%M:%S" +_ANSI_ESCAPE_SEQ = re.compile(r"\x1b\[[\d;]+m") +caplog_handler_key = StashKey["LogCaptureHandler"]() +caplog_records_key = StashKey[dict[str, list[logging.LogRecord]]]() + + +def _remove_ansi_escape_sequences(text: str) -> str: + return _ANSI_ESCAPE_SEQ.sub("", text) + + +class DatetimeFormatter(logging.Formatter): + """A logging formatter which formats record with + :func:`datetime.datetime.strftime` formatter instead of + :func:`time.strftime` in case of microseconds in format string. + """ + + def formatTime(self, record: LogRecord, datefmt: str | None = None) -> str: + if datefmt and "%f" in datefmt: + ct = self.converter(record.created) + tz = timezone(timedelta(seconds=ct.tm_gmtoff), ct.tm_zone) + # Construct `datetime.datetime` object from `struct_time` + # and msecs information from `record` + # Using int() instead of round() to avoid it exceeding 1_000_000 and causing a ValueError (#11861). + dt = datetime(*ct[0:6], microsecond=int(record.msecs * 1000), tzinfo=tz) + return dt.strftime(datefmt) + # Use `logging.Formatter` for non-microsecond formats + return super().formatTime(record, datefmt) + + +class ColoredLevelFormatter(DatetimeFormatter): + """A logging formatter which colorizes the %(levelname)..s part of the + log format passed to __init__.""" + + LOGLEVEL_COLOROPTS: Mapping[int, AbstractSet[str]] = { + logging.CRITICAL: {"red"}, + logging.ERROR: {"red", "bold"}, + logging.WARNING: {"yellow"}, + logging.WARN: {"yellow"}, + logging.INFO: {"green"}, + logging.DEBUG: {"purple"}, + logging.NOTSET: set(), + } + LEVELNAME_FMT_REGEX = re.compile(r"%\(levelname\)([+-.]?\d*(?:\.\d+)?s)") + + def __init__(self, terminalwriter: TerminalWriter, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + self._terminalwriter = terminalwriter + self._original_fmt = self._style._fmt + self._level_to_fmt_mapping: dict[int, str] = {} + + for level, color_opts in self.LOGLEVEL_COLOROPTS.items(): + self.add_color_level(level, *color_opts) + + def add_color_level(self, level: int, *color_opts: str) -> None: + """Add or update color opts for a log level. + + :param level: + Log level to apply a style to, e.g. ``logging.INFO``. + :param color_opts: + ANSI escape sequence color options. Capitalized colors indicates + background color, i.e. ``'green', 'Yellow', 'bold'`` will give bold + green text on yellow background. + + .. warning:: + This is an experimental API. + """ + assert self._fmt is not None + levelname_fmt_match = self.LEVELNAME_FMT_REGEX.search(self._fmt) + if not levelname_fmt_match: + return + levelname_fmt = levelname_fmt_match.group() + + formatted_levelname = levelname_fmt % {"levelname": logging.getLevelName(level)} + + # add ANSI escape sequences around the formatted levelname + color_kwargs = {name: True for name in color_opts} + colorized_formatted_levelname = self._terminalwriter.markup( + formatted_levelname, **color_kwargs + ) + self._level_to_fmt_mapping[level] = self.LEVELNAME_FMT_REGEX.sub( + colorized_formatted_levelname, self._fmt + ) + + def format(self, record: logging.LogRecord) -> str: + fmt = self._level_to_fmt_mapping.get(record.levelno, self._original_fmt) + self._style._fmt = fmt + return super().format(record) + + +class PercentStyleMultiline(logging.PercentStyle): + """A logging style with special support for multiline messages. + + If the message of a record consists of multiple lines, this style + formats the message as if each line were logged separately. + """ + + def __init__(self, fmt: str, auto_indent: int | str | bool | None) -> None: + super().__init__(fmt) + self._auto_indent = self._get_auto_indent(auto_indent) + + @staticmethod + def _get_auto_indent(auto_indent_option: int | str | bool | None) -> int: + """Determine the current auto indentation setting. + + Specify auto indent behavior (on/off/fixed) by passing in + extra={"auto_indent": [value]} to the call to logging.log() or + using a --log-auto-indent [value] command line or the + log_auto_indent [value] config option. + + Default behavior is auto-indent off. + + Using the string "True" or "on" or the boolean True as the value + turns auto indent on, using the string "False" or "off" or the + boolean False or the int 0 turns it off, and specifying a + positive integer fixes the indentation position to the value + specified. + + Any other values for the option are invalid, and will silently be + converted to the default. + + :param None|bool|int|str auto_indent_option: + User specified option for indentation from command line, config + or extra kwarg. Accepts int, bool or str. str option accepts the + same range of values as boolean config options, as well as + positive integers represented in str form. + + :returns: + Indentation value, which can be + -1 (automatically determine indentation) or + 0 (auto-indent turned off) or + >0 (explicitly set indentation position). + """ + if auto_indent_option is None: + return 0 + elif isinstance(auto_indent_option, bool): + if auto_indent_option: + return -1 + else: + return 0 + elif isinstance(auto_indent_option, int): + return int(auto_indent_option) + elif isinstance(auto_indent_option, str): + try: + return int(auto_indent_option) + except ValueError: + pass + try: + if _strtobool(auto_indent_option): + return -1 + except ValueError: + return 0 + + return 0 + + def format(self, record: logging.LogRecord) -> str: + if "\n" in record.message: + if hasattr(record, "auto_indent"): + # Passed in from the "extra={}" kwarg on the call to logging.log(). + auto_indent = self._get_auto_indent(record.auto_indent) + else: + auto_indent = self._auto_indent + + if auto_indent: + lines = record.message.splitlines() + formatted = self._fmt % {**record.__dict__, "message": lines[0]} + + if auto_indent < 0: + indentation = _remove_ansi_escape_sequences(formatted).find( + lines[0] + ) + else: + # Optimizes logging by allowing a fixed indentation. + indentation = auto_indent + lines[0] = formatted + return ("\n" + " " * indentation).join(lines) + return self._fmt % record.__dict__ + + +def get_option_ini(config: Config, *names: str): + for name in names: + ret = config.getoption(name) # 'default' arg won't work as expected + if ret is None: + ret = config.getini(name) + if ret: + return ret + + +def pytest_addoption(parser: Parser) -> None: + """Add options to control log capturing.""" + group = parser.getgroup("logging") + + def add_option_ini(option, dest, default=None, type=None, **kwargs): + parser.addini( + dest, default=default, type=type, help="Default value for " + option + ) + group.addoption(option, dest=dest, **kwargs) + + add_option_ini( + "--log-level", + dest="log_level", + default=None, + metavar="LEVEL", + help=( + "Level of messages to catch/display." + " Not set by default, so it depends on the root/parent log handler's" + ' effective level, where it is "WARNING" by default.' + ), + ) + add_option_ini( + "--log-format", + dest="log_format", + default=DEFAULT_LOG_FORMAT, + help="Log format used by the logging module", + ) + add_option_ini( + "--log-date-format", + dest="log_date_format", + default=DEFAULT_LOG_DATE_FORMAT, + help="Log date format used by the logging module", + ) + parser.addini( + "log_cli", + default=False, + type="bool", + help='Enable log display during test run (also known as "live logging")', + ) + add_option_ini( + "--log-cli-level", dest="log_cli_level", default=None, help="CLI logging level" + ) + add_option_ini( + "--log-cli-format", + dest="log_cli_format", + default=None, + help="Log format used by the logging module", + ) + add_option_ini( + "--log-cli-date-format", + dest="log_cli_date_format", + default=None, + help="Log date format used by the logging module", + ) + add_option_ini( + "--log-file", + dest="log_file", + default=None, + help="Path to a file when logging will be written to", + ) + add_option_ini( + "--log-file-mode", + dest="log_file_mode", + default="w", + choices=["w", "a"], + help="Log file open mode", + ) + add_option_ini( + "--log-file-level", + dest="log_file_level", + default=None, + help="Log file logging level", + ) + add_option_ini( + "--log-file-format", + dest="log_file_format", + default=None, + help="Log format used by the logging module", + ) + add_option_ini( + "--log-file-date-format", + dest="log_file_date_format", + default=None, + help="Log date format used by the logging module", + ) + add_option_ini( + "--log-auto-indent", + dest="log_auto_indent", + default=None, + help="Auto-indent multiline messages passed to the logging module. Accepts true|on, false|off or an integer.", + ) + group.addoption( + "--log-disable", + action="append", + default=[], + dest="logger_disable", + help="Disable a logger by name. Can be passed multiple times.", + ) + + +_HandlerType = TypeVar("_HandlerType", bound=logging.Handler) + + +# Not using @contextmanager for performance reasons. +class catching_logs(Generic[_HandlerType]): + """Context manager that prepares the whole logging machinery properly.""" + + __slots__ = ("handler", "level", "orig_level") + + def __init__(self, handler: _HandlerType, level: int | None = None) -> None: + self.handler = handler + self.level = level + + def __enter__(self) -> _HandlerType: + root_logger = logging.getLogger() + if self.level is not None: + self.handler.setLevel(self.level) + root_logger.addHandler(self.handler) + if self.level is not None: + self.orig_level = root_logger.level + root_logger.setLevel(min(self.orig_level, self.level)) + return self.handler + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + root_logger = logging.getLogger() + if self.level is not None: + root_logger.setLevel(self.orig_level) + root_logger.removeHandler(self.handler) + + +class LogCaptureHandler(logging_StreamHandler): + """A logging handler that stores log records and the log text.""" + + def __init__(self) -> None: + """Create a new log handler.""" + super().__init__(StringIO()) + self.records: list[logging.LogRecord] = [] + + def emit(self, record: logging.LogRecord) -> None: + """Keep the log records in a list in addition to the log text.""" + self.records.append(record) + super().emit(record) + + def reset(self) -> None: + self.records = [] + self.stream = StringIO() + + def clear(self) -> None: + self.records.clear() + self.stream = StringIO() + + def handleError(self, record: logging.LogRecord) -> None: + if logging.raiseExceptions: + # Fail the test if the log message is bad (emit failed). + # The default behavior of logging is to print "Logging error" + # to stderr with the call stack and some extra details. + # pytest wants to make such mistakes visible during testing. + raise # noqa: PLE0704 + + +@final +class LogCaptureFixture: + """Provides access and control of log capturing.""" + + def __init__(self, item: nodes.Node, *, _ispytest: bool = False) -> None: + check_ispytest(_ispytest) + self._item = item + self._initial_handler_level: int | None = None + # Dict of log name -> log level. + self._initial_logger_levels: dict[str | None, int] = {} + self._initial_disabled_logging_level: int | None = None + + def _finalize(self) -> None: + """Finalize the fixture. + + This restores the log levels and the disabled logging levels changed by :meth:`set_level`. + """ + # Restore log levels. + if self._initial_handler_level is not None: + self.handler.setLevel(self._initial_handler_level) + for logger_name, level in self._initial_logger_levels.items(): + logger = logging.getLogger(logger_name) + logger.setLevel(level) + # Disable logging at the original disabled logging level. + if self._initial_disabled_logging_level is not None: + logging.disable(self._initial_disabled_logging_level) + self._initial_disabled_logging_level = None + + @property + def handler(self) -> LogCaptureHandler: + """Get the logging handler used by the fixture.""" + return self._item.stash[caplog_handler_key] + + def get_records( + self, when: Literal["setup", "call", "teardown"] + ) -> list[logging.LogRecord]: + """Get the logging records for one of the possible test phases. + + :param when: + Which test phase to obtain the records from. + Valid values are: "setup", "call" and "teardown". + + :returns: The list of captured records at the given stage. + + .. versionadded:: 3.4 + """ + return self._item.stash[caplog_records_key].get(when, []) + + @property + def text(self) -> str: + """The formatted log text.""" + return _remove_ansi_escape_sequences(self.handler.stream.getvalue()) + + @property + def records(self) -> list[logging.LogRecord]: + """The list of log records.""" + return self.handler.records + + @property + def record_tuples(self) -> list[tuple[str, int, str]]: + """A list of a stripped down version of log records intended + for use in assertion comparison. + + The format of the tuple is: + + (logger_name, log_level, message) + """ + return [(r.name, r.levelno, r.getMessage()) for r in self.records] + + @property + def messages(self) -> list[str]: + """A list of format-interpolated log messages. + + Unlike 'records', which contains the format string and parameters for + interpolation, log messages in this list are all interpolated. + + Unlike 'text', which contains the output from the handler, log + messages in this list are unadorned with levels, timestamps, etc, + making exact comparisons more reliable. + + Note that traceback or stack info (from :func:`logging.exception` or + the `exc_info` or `stack_info` arguments to the logging functions) is + not included, as this is added by the formatter in the handler. + + .. versionadded:: 3.7 + """ + return [r.getMessage() for r in self.records] + + def clear(self) -> None: + """Reset the list of log records and the captured log text.""" + self.handler.clear() + + def _force_enable_logging( + self, level: int | str, logger_obj: logging.Logger + ) -> int: + """Enable the desired logging level if the global level was disabled via ``logging.disabled``. + + Only enables logging levels greater than or equal to the requested ``level``. + + Does nothing if the desired ``level`` wasn't disabled. + + :param level: + The logger level caplog should capture. + All logging is enabled if a non-standard logging level string is supplied. + Valid level strings are in :data:`logging._nameToLevel`. + :param logger_obj: The logger object to check. + + :return: The original disabled logging level. + """ + original_disable_level: int = logger_obj.manager.disable + + if isinstance(level, str): + # Try to translate the level string to an int for `logging.disable()` + level = logging.getLevelName(level) + + if not isinstance(level, int): + # The level provided was not valid, so just un-disable all logging. + logging.disable(logging.NOTSET) + elif not logger_obj.isEnabledFor(level): + # Each level is `10` away from other levels. + # https://docs.python.org/3/library/logging.html#logging-levels + disable_level = max(level - 10, logging.NOTSET) + logging.disable(disable_level) + + return original_disable_level + + def set_level(self, level: int | str, logger: str | None = None) -> None: + """Set the threshold level of a logger for the duration of a test. + + Logging messages which are less severe than this level will not be captured. + + .. versionchanged:: 3.4 + The levels of the loggers changed by this function will be + restored to their initial values at the end of the test. + + Will enable the requested logging level if it was disabled via :func:`logging.disable`. + + :param level: The level. + :param logger: The logger to update. If not given, the root logger. + """ + logger_obj = logging.getLogger(logger) + # Save the original log-level to restore it during teardown. + self._initial_logger_levels.setdefault(logger, logger_obj.level) + logger_obj.setLevel(level) + if self._initial_handler_level is None: + self._initial_handler_level = self.handler.level + self.handler.setLevel(level) + initial_disabled_logging_level = self._force_enable_logging(level, logger_obj) + if self._initial_disabled_logging_level is None: + self._initial_disabled_logging_level = initial_disabled_logging_level + + @contextmanager + def at_level(self, level: int | str, logger: str | None = None) -> Generator[None]: + """Context manager that sets the level for capturing of logs. After + the end of the 'with' statement the level is restored to its original + value. + + Will enable the requested logging level if it was disabled via :func:`logging.disable`. + + :param level: The level. + :param logger: The logger to update. If not given, the root logger. + """ + logger_obj = logging.getLogger(logger) + orig_level = logger_obj.level + logger_obj.setLevel(level) + handler_orig_level = self.handler.level + self.handler.setLevel(level) + original_disable_level = self._force_enable_logging(level, logger_obj) + try: + yield + finally: + logger_obj.setLevel(orig_level) + self.handler.setLevel(handler_orig_level) + logging.disable(original_disable_level) + + @contextmanager + def filtering(self, filter_: logging.Filter) -> Generator[None]: + """Context manager that temporarily adds the given filter to the caplog's + :meth:`handler` for the 'with' statement block, and removes that filter at the + end of the block. + + :param filter_: A custom :class:`logging.Filter` object. + + .. versionadded:: 7.5 + """ + self.handler.addFilter(filter_) + try: + yield + finally: + self.handler.removeFilter(filter_) + + +@fixture +def caplog(request: FixtureRequest) -> Generator[LogCaptureFixture]: + """Access and control log capturing. + + Captured logs are available through the following properties/methods:: + + * caplog.messages -> list of format-interpolated log messages + * caplog.text -> string containing formatted log output + * caplog.records -> list of logging.LogRecord instances + * caplog.record_tuples -> list of (logger_name, level, message) tuples + * caplog.clear() -> clear captured records and formatted log output string + """ + result = LogCaptureFixture(request.node, _ispytest=True) + yield result + result._finalize() + + +def get_log_level_for_setting(config: Config, *setting_names: str) -> int | None: + for setting_name in setting_names: + log_level = config.getoption(setting_name) + if log_level is None: + log_level = config.getini(setting_name) + if log_level: + break + else: + return None + + if isinstance(log_level, str): + log_level = log_level.upper() + try: + return int(getattr(logging, log_level, log_level)) + except ValueError as e: + # Python logging does not recognise this as a logging level + raise UsageError( + f"'{log_level}' is not recognized as a logging level name for " + f"'{setting_name}'. Please consider passing the " + "logging level num instead." + ) from e + + +# run after terminalreporter/capturemanager are configured +@hookimpl(trylast=True) +def pytest_configure(config: Config) -> None: + config.pluginmanager.register(LoggingPlugin(config), "logging-plugin") + + +class LoggingPlugin: + """Attaches to the logging module and captures log messages for each test.""" + + def __init__(self, config: Config) -> None: + """Create a new plugin to capture log messages. + + The formatter can be safely shared across all handlers so + create a single one for the entire test session here. + """ + self._config = config + + # Report logging. + self.formatter = self._create_formatter( + get_option_ini(config, "log_format"), + get_option_ini(config, "log_date_format"), + get_option_ini(config, "log_auto_indent"), + ) + self.log_level = get_log_level_for_setting(config, "log_level") + self.caplog_handler = LogCaptureHandler() + self.caplog_handler.setFormatter(self.formatter) + self.report_handler = LogCaptureHandler() + self.report_handler.setFormatter(self.formatter) + + # File logging. + self.log_file_level = get_log_level_for_setting( + config, "log_file_level", "log_level" + ) + log_file = get_option_ini(config, "log_file") or os.devnull + if log_file != os.devnull: + directory = os.path.dirname(os.path.abspath(log_file)) + if not os.path.isdir(directory): + os.makedirs(directory) + + self.log_file_mode = get_option_ini(config, "log_file_mode") or "w" + self.log_file_handler = _FileHandler( + log_file, mode=self.log_file_mode, encoding="UTF-8" + ) + log_file_format = get_option_ini(config, "log_file_format", "log_format") + log_file_date_format = get_option_ini( + config, "log_file_date_format", "log_date_format" + ) + + log_file_formatter = DatetimeFormatter( + log_file_format, datefmt=log_file_date_format + ) + self.log_file_handler.setFormatter(log_file_formatter) + + # CLI/live logging. + self.log_cli_level = get_log_level_for_setting( + config, "log_cli_level", "log_level" + ) + if self._log_cli_enabled(): + terminal_reporter = config.pluginmanager.get_plugin("terminalreporter") + # Guaranteed by `_log_cli_enabled()`. + assert terminal_reporter is not None + capture_manager = config.pluginmanager.get_plugin("capturemanager") + # if capturemanager plugin is disabled, live logging still works. + self.log_cli_handler: ( + _LiveLoggingStreamHandler | _LiveLoggingNullHandler + ) = _LiveLoggingStreamHandler(terminal_reporter, capture_manager) + else: + self.log_cli_handler = _LiveLoggingNullHandler() + log_cli_formatter = self._create_formatter( + get_option_ini(config, "log_cli_format", "log_format"), + get_option_ini(config, "log_cli_date_format", "log_date_format"), + get_option_ini(config, "log_auto_indent"), + ) + self.log_cli_handler.setFormatter(log_cli_formatter) + self._disable_loggers(loggers_to_disable=config.option.logger_disable) + + def _disable_loggers(self, loggers_to_disable: list[str]) -> None: + if not loggers_to_disable: + return + + for name in loggers_to_disable: + logger = logging.getLogger(name) + logger.disabled = True + + def _create_formatter(self, log_format, log_date_format, auto_indent): + # Color option doesn't exist if terminal plugin is disabled. + color = getattr(self._config.option, "color", "no") + if color != "no" and ColoredLevelFormatter.LEVELNAME_FMT_REGEX.search( + log_format + ): + formatter: logging.Formatter = ColoredLevelFormatter( + create_terminal_writer(self._config), log_format, log_date_format + ) + else: + formatter = DatetimeFormatter(log_format, log_date_format) + + formatter._style = PercentStyleMultiline( + formatter._style._fmt, auto_indent=auto_indent + ) + + return formatter + + def set_log_path(self, fname: str) -> None: + """Set the filename parameter for Logging.FileHandler(). + + Creates parent directory if it does not exist. + + .. warning:: + This is an experimental API. + """ + fpath = Path(fname) + + if not fpath.is_absolute(): + fpath = self._config.rootpath / fpath + + if not fpath.parent.exists(): + fpath.parent.mkdir(exist_ok=True, parents=True) + + # https://github.com/python/mypy/issues/11193 + stream: io.TextIOWrapper = fpath.open(mode=self.log_file_mode, encoding="UTF-8") # type: ignore[assignment] + old_stream = self.log_file_handler.setStream(stream) + if old_stream: + old_stream.close() + + def _log_cli_enabled(self) -> bool: + """Return whether live logging is enabled.""" + enabled = self._config.getoption( + "--log-cli-level" + ) is not None or self._config.getini("log_cli") + if not enabled: + return False + + terminal_reporter = self._config.pluginmanager.get_plugin("terminalreporter") + if terminal_reporter is None: + # terminal reporter is disabled e.g. by pytest-xdist. + return False + + return True + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_sessionstart(self) -> Generator[None]: + self.log_cli_handler.set_when("sessionstart") + + with catching_logs(self.log_cli_handler, level=self.log_cli_level): + with catching_logs(self.log_file_handler, level=self.log_file_level): + return (yield) + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_collection(self) -> Generator[None]: + self.log_cli_handler.set_when("collection") + + with catching_logs(self.log_cli_handler, level=self.log_cli_level): + with catching_logs(self.log_file_handler, level=self.log_file_level): + return (yield) + + @hookimpl(wrapper=True) + def pytest_runtestloop(self, session: Session) -> Generator[None, object, object]: + if session.config.option.collectonly: + return (yield) + + if self._log_cli_enabled() and self._config.get_verbosity() < 1: + # The verbose flag is needed to avoid messy test progress output. + self._config.option.verbose = 1 + + with catching_logs(self.log_cli_handler, level=self.log_cli_level): + with catching_logs(self.log_file_handler, level=self.log_file_level): + return (yield) # Run all the tests. + + @hookimpl + def pytest_runtest_logstart(self) -> None: + self.log_cli_handler.reset() + self.log_cli_handler.set_when("start") + + @hookimpl + def pytest_runtest_logreport(self) -> None: + self.log_cli_handler.set_when("logreport") + + @contextmanager + def _runtest_for(self, item: nodes.Item, when: str) -> Generator[None]: + """Implement the internals of the pytest_runtest_xxx() hooks.""" + with ( + catching_logs( + self.caplog_handler, + level=self.log_level, + ) as caplog_handler, + catching_logs( + self.report_handler, + level=self.log_level, + ) as report_handler, + ): + caplog_handler.reset() + report_handler.reset() + item.stash[caplog_records_key][when] = caplog_handler.records + item.stash[caplog_handler_key] = caplog_handler + + try: + yield + finally: + log = report_handler.stream.getvalue().strip() + item.add_report_section(when, "log", log) + + @hookimpl(wrapper=True) + def pytest_runtest_setup(self, item: nodes.Item) -> Generator[None]: + self.log_cli_handler.set_when("setup") + + empty: dict[str, list[logging.LogRecord]] = {} + item.stash[caplog_records_key] = empty + with self._runtest_for(item, "setup"): + yield + + @hookimpl(wrapper=True) + def pytest_runtest_call(self, item: nodes.Item) -> Generator[None]: + self.log_cli_handler.set_when("call") + + with self._runtest_for(item, "call"): + yield + + @hookimpl(wrapper=True) + def pytest_runtest_teardown(self, item: nodes.Item) -> Generator[None]: + self.log_cli_handler.set_when("teardown") + + try: + with self._runtest_for(item, "teardown"): + yield + finally: + del item.stash[caplog_records_key] + del item.stash[caplog_handler_key] + + @hookimpl + def pytest_runtest_logfinish(self) -> None: + self.log_cli_handler.set_when("finish") + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_sessionfinish(self) -> Generator[None]: + self.log_cli_handler.set_when("sessionfinish") + + with catching_logs(self.log_cli_handler, level=self.log_cli_level): + with catching_logs(self.log_file_handler, level=self.log_file_level): + return (yield) + + @hookimpl + def pytest_unconfigure(self) -> None: + # Close the FileHandler explicitly. + # (logging.shutdown might have lost the weakref?!) + self.log_file_handler.close() + + +class _FileHandler(logging.FileHandler): + """A logging FileHandler with pytest tweaks.""" + + def handleError(self, record: logging.LogRecord) -> None: + # Handled by LogCaptureHandler. + pass + + +class _LiveLoggingStreamHandler(logging_StreamHandler): + """A logging StreamHandler used by the live logging feature: it will + write a newline before the first log message in each test. + + During live logging we must also explicitly disable stdout/stderr + capturing otherwise it will get captured and won't appear in the + terminal. + """ + + # Officially stream needs to be a IO[str], but TerminalReporter + # isn't. So force it. + stream: TerminalReporter = None # type: ignore + + def __init__( + self, + terminal_reporter: TerminalReporter, + capture_manager: CaptureManager | None, + ) -> None: + super().__init__(stream=terminal_reporter) # type: ignore[arg-type] + self.capture_manager = capture_manager + self.reset() + self.set_when(None) + self._test_outcome_written = False + + def reset(self) -> None: + """Reset the handler; should be called before the start of each test.""" + self._first_record_emitted = False + + def set_when(self, when: str | None) -> None: + """Prepare for the given test phase (setup/call/teardown).""" + self._when = when + self._section_name_shown = False + if when == "start": + self._test_outcome_written = False + + def emit(self, record: logging.LogRecord) -> None: + ctx_manager = ( + self.capture_manager.global_and_fixture_disabled() + if self.capture_manager + else nullcontext() + ) + with ctx_manager: + if not self._first_record_emitted: + self.stream.write("\n") + self._first_record_emitted = True + elif self._when in ("teardown", "finish"): + if not self._test_outcome_written: + self._test_outcome_written = True + self.stream.write("\n") + if not self._section_name_shown and self._when: + self.stream.section("live log " + self._when, sep="-", bold=True) + self._section_name_shown = True + super().emit(record) + + def handleError(self, record: logging.LogRecord) -> None: + # Handled by LogCaptureHandler. + pass + + +class _LiveLoggingNullHandler(logging.NullHandler): + """A logging handler used when live logging is disabled.""" + + def reset(self) -> None: + pass + + def set_when(self, when: str) -> None: + pass + + def handleError(self, record: logging.LogRecord) -> None: + # Handled by LogCaptureHandler. + pass diff --git a/venv/lib/python3.10/site-packages/_pytest/main.py b/venv/lib/python3.10/site-packages/_pytest/main.py new file mode 100644 index 0000000000000000000000000000000000000000..dac084b553a5c5cff8e5a88d0451965710c4c541 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/main.py @@ -0,0 +1,1076 @@ +"""Core implementation of the testing process: init, session, runtest loop.""" + +from __future__ import annotations + +import argparse +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Sequence +from collections.abc import Set as AbstractSet +import dataclasses +import fnmatch +import functools +import importlib +import importlib.util +import os +from pathlib import Path +import sys +from typing import final +from typing import Literal +from typing import overload +from typing import TYPE_CHECKING +import warnings + +import pluggy + +from _pytest import nodes +import _pytest._code +from _pytest.config import Config +from _pytest.config import directory_arg +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config import PytestPluginManager +from _pytest.config import UsageError +from _pytest.config.argparsing import Parser +from _pytest.config.compat import PathAwareHookProxy +from _pytest.outcomes import exit +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath +from _pytest.pathlib import fnmatch_ex +from _pytest.pathlib import safe_exists +from _pytest.pathlib import scandir +from _pytest.reports import CollectReport +from _pytest.reports import TestReport +from _pytest.runner import collect_one_node +from _pytest.runner import SetupState +from _pytest.warning_types import PytestWarning + + +if TYPE_CHECKING: + from typing_extensions import Self + + from _pytest.fixtures import FixtureManager + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general", "Running and selection options") + group._addoption( # private to use reserved lower-case short option + "-x", + "--exitfirst", + action="store_const", + dest="maxfail", + const=1, + help="Exit instantly on first error or failed test", + ) + group.addoption( + "--maxfail", + metavar="num", + action="store", + type=int, + dest="maxfail", + default=0, + help="Exit after first num failures or errors", + ) + group.addoption( + "--strict-config", + action="store_true", + help="Any warnings encountered while parsing the `pytest` section of the " + "configuration file raise errors", + ) + group.addoption( + "--strict-markers", + action="store_true", + help="Markers not registered in the `markers` section of the configuration " + "file raise errors", + ) + group.addoption( + "--strict", + action="store_true", + help="(Deprecated) alias to --strict-markers", + ) + + group = parser.getgroup("pytest-warnings") + group.addoption( + "-W", + "--pythonwarnings", + action="append", + help="Set which warnings to report, see -W option of Python itself", + ) + parser.addini( + "filterwarnings", + type="linelist", + help="Each line specifies a pattern for " + "warnings.filterwarnings. " + "Processed after -W/--pythonwarnings.", + ) + + group = parser.getgroup("collect", "collection") + group.addoption( + "--collectonly", + "--collect-only", + "--co", + action="store_true", + help="Only collect tests, don't execute them", + ) + group.addoption( + "--pyargs", + action="store_true", + help="Try to interpret all arguments as Python packages", + ) + group.addoption( + "--ignore", + action="append", + metavar="path", + help="Ignore path during collection (multi-allowed)", + ) + group.addoption( + "--ignore-glob", + action="append", + metavar="path", + help="Ignore path pattern during collection (multi-allowed)", + ) + group.addoption( + "--deselect", + action="append", + metavar="nodeid_prefix", + help="Deselect item (via node id prefix) during collection (multi-allowed)", + ) + group.addoption( + "--confcutdir", + dest="confcutdir", + default=None, + metavar="dir", + type=functools.partial(directory_arg, optname="--confcutdir"), + help="Only load conftest.py's relative to specified dir", + ) + group.addoption( + "--noconftest", + action="store_true", + dest="noconftest", + default=False, + help="Don't load any conftest.py files", + ) + group.addoption( + "--keepduplicates", + "--keep-duplicates", + action="store_true", + dest="keepduplicates", + default=False, + help="Keep duplicate tests", + ) + group.addoption( + "--collect-in-virtualenv", + action="store_true", + dest="collect_in_virtualenv", + default=False, + help="Don't ignore tests in a local virtualenv directory", + ) + group.addoption( + "--continue-on-collection-errors", + action="store_true", + default=False, + dest="continue_on_collection_errors", + help="Force test execution even if collection errors occur", + ) + group.addoption( + "--import-mode", + default="prepend", + choices=["prepend", "append", "importlib"], + dest="importmode", + help="Prepend/append to sys.path when importing test modules and conftest " + "files. Default: prepend.", + ) + parser.addini( + "norecursedirs", + "Directory patterns to avoid for recursion", + type="args", + default=[ + "*.egg", + ".*", + "_darcs", + "build", + "CVS", + "dist", + "node_modules", + "venv", + "{arch}", + ], + ) + parser.addini( + "testpaths", + "Directories to search for tests when no files or directories are given on the " + "command line", + type="args", + default=[], + ) + parser.addini( + "collect_imported_tests", + "Whether to collect tests in imported modules outside `testpaths`", + type="bool", + default=True, + ) + parser.addini( + "consider_namespace_packages", + type="bool", + default=False, + help="Consider namespace packages when resolving module names during import", + ) + + group = parser.getgroup("debugconfig", "test session debugging and configuration") + group._addoption( # private to use reserved lower-case short option + "-c", + "--config-file", + metavar="FILE", + type=str, + dest="inifilename", + help="Load configuration from `FILE` instead of trying to locate one of the " + "implicit configuration files.", + ) + group.addoption( + "--rootdir", + action="store", + dest="rootdir", + help="Define root directory for tests. Can be relative path: 'root_dir', './root_dir', " + "'root_dir/another_dir/'; absolute path: '/home/user/root_dir'; path with variables: " + "'$HOME/root_dir'.", + ) + group.addoption( + "--basetemp", + dest="basetemp", + default=None, + type=validate_basetemp, + metavar="dir", + help=( + "Base temporary directory for this test run. " + "(Warning: this directory is removed if it exists.)" + ), + ) + + +def validate_basetemp(path: str) -> str: + # GH 7119 + msg = "basetemp must not be empty, the current working directory or any parent directory of it" + + # empty path + if not path: + raise argparse.ArgumentTypeError(msg) + + def is_ancestor(base: Path, query: Path) -> bool: + """Return whether query is an ancestor of base.""" + if base == query: + return True + return query in base.parents + + # check if path is an ancestor of cwd + if is_ancestor(Path.cwd(), Path(path).absolute()): + raise argparse.ArgumentTypeError(msg) + + # check symlinks for ancestors + if is_ancestor(Path.cwd().resolve(), Path(path).resolve()): + raise argparse.ArgumentTypeError(msg) + + return path + + +def wrap_session( + config: Config, doit: Callable[[Config, Session], int | ExitCode | None] +) -> int | ExitCode: + """Skeleton command line program.""" + session = Session.from_config(config) + session.exitstatus = ExitCode.OK + initstate = 0 + try: + try: + config._do_configure() + initstate = 1 + config.hook.pytest_sessionstart(session=session) + initstate = 2 + session.exitstatus = doit(config, session) or 0 + except UsageError: + session.exitstatus = ExitCode.USAGE_ERROR + raise + except Failed: + session.exitstatus = ExitCode.TESTS_FAILED + except (KeyboardInterrupt, exit.Exception): + excinfo = _pytest._code.ExceptionInfo.from_current() + exitstatus: int | ExitCode = ExitCode.INTERRUPTED + if isinstance(excinfo.value, exit.Exception): + if excinfo.value.returncode is not None: + exitstatus = excinfo.value.returncode + if initstate < 2: + sys.stderr.write(f"{excinfo.typename}: {excinfo.value.msg}\n") + config.hook.pytest_keyboard_interrupt(excinfo=excinfo) + session.exitstatus = exitstatus + except BaseException: + session.exitstatus = ExitCode.INTERNAL_ERROR + excinfo = _pytest._code.ExceptionInfo.from_current() + try: + config.notify_exception(excinfo, config.option) + except exit.Exception as exc: + if exc.returncode is not None: + session.exitstatus = exc.returncode + sys.stderr.write(f"{type(exc).__name__}: {exc}\n") + else: + if isinstance(excinfo.value, SystemExit): + sys.stderr.write("mainloop: caught unexpected SystemExit!\n") + + finally: + # Explicitly break reference cycle. + excinfo = None # type: ignore + os.chdir(session.startpath) + if initstate >= 2: + try: + config.hook.pytest_sessionfinish( + session=session, exitstatus=session.exitstatus + ) + except exit.Exception as exc: + if exc.returncode is not None: + session.exitstatus = exc.returncode + sys.stderr.write(f"{type(exc).__name__}: {exc}\n") + config._ensure_unconfigure() + return session.exitstatus + + +def pytest_cmdline_main(config: Config) -> int | ExitCode: + return wrap_session(config, _main) + + +def _main(config: Config, session: Session) -> int | ExitCode | None: + """Default command line protocol for initialization, session, + running tests and reporting.""" + config.hook.pytest_collection(session=session) + config.hook.pytest_runtestloop(session=session) + + if session.testsfailed: + return ExitCode.TESTS_FAILED + elif session.testscollected == 0: + return ExitCode.NO_TESTS_COLLECTED + return None + + +def pytest_collection(session: Session) -> None: + session.perform_collect() + + +def pytest_runtestloop(session: Session) -> bool: + if session.testsfailed and not session.config.option.continue_on_collection_errors: + raise session.Interrupted( + f"{session.testsfailed} error{'s' if session.testsfailed != 1 else ''} during collection" + ) + + if session.config.option.collectonly: + return True + + for i, item in enumerate(session.items): + nextitem = session.items[i + 1] if i + 1 < len(session.items) else None + item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) + if session.shouldfail: + raise session.Failed(session.shouldfail) + if session.shouldstop: + raise session.Interrupted(session.shouldstop) + return True + + +def _in_venv(path: Path) -> bool: + """Attempt to detect if ``path`` is the root of a Virtual Environment by + checking for the existence of the pyvenv.cfg file. + + [https://peps.python.org/pep-0405/] + + For regression protection we also check for conda environments that do not include pyenv.cfg yet -- + https://github.com/conda/conda/issues/13337 is the conda issue tracking adding pyenv.cfg. + + Checking for the `conda-meta/history` file per https://github.com/pytest-dev/pytest/issues/12652#issuecomment-2246336902. + + """ + try: + return ( + path.joinpath("pyvenv.cfg").is_file() + or path.joinpath("conda-meta", "history").is_file() + ) + except OSError: + return False + + +def pytest_ignore_collect(collection_path: Path, config: Config) -> bool | None: + if collection_path.name == "__pycache__": + return True + + ignore_paths = config._getconftest_pathlist( + "collect_ignore", path=collection_path.parent + ) + ignore_paths = ignore_paths or [] + excludeopt = config.getoption("ignore") + if excludeopt: + ignore_paths.extend(absolutepath(x) for x in excludeopt) + + if collection_path in ignore_paths: + return True + + ignore_globs = config._getconftest_pathlist( + "collect_ignore_glob", path=collection_path.parent + ) + ignore_globs = ignore_globs or [] + excludeglobopt = config.getoption("ignore_glob") + if excludeglobopt: + ignore_globs.extend(absolutepath(x) for x in excludeglobopt) + + if any(fnmatch.fnmatch(str(collection_path), str(glob)) for glob in ignore_globs): + return True + + allow_in_venv = config.getoption("collect_in_virtualenv") + if not allow_in_venv and _in_venv(collection_path): + return True + + if collection_path.is_dir(): + norecursepatterns = config.getini("norecursedirs") + if any(fnmatch_ex(pat, collection_path) for pat in norecursepatterns): + return True + + return None + + +def pytest_collect_directory( + path: Path, parent: nodes.Collector +) -> nodes.Collector | None: + return Dir.from_parent(parent, path=path) + + +def pytest_collection_modifyitems(items: list[nodes.Item], config: Config) -> None: + deselect_prefixes = tuple(config.getoption("deselect") or []) + if not deselect_prefixes: + return + + remaining = [] + deselected = [] + for colitem in items: + if colitem.nodeid.startswith(deselect_prefixes): + deselected.append(colitem) + else: + remaining.append(colitem) + + if deselected: + config.hook.pytest_deselected(items=deselected) + items[:] = remaining + + +class FSHookProxy: + def __init__( + self, + pm: PytestPluginManager, + remove_mods: AbstractSet[object], + ) -> None: + self.pm = pm + self.remove_mods = remove_mods + + def __getattr__(self, name: str) -> pluggy.HookCaller: + x = self.pm.subset_hook_caller(name, remove_plugins=self.remove_mods) + self.__dict__[name] = x + return x + + +class Interrupted(KeyboardInterrupt): + """Signals that the test run was interrupted.""" + + __module__ = "builtins" # For py3. + + +class Failed(Exception): + """Signals a stop as failed test run.""" + + +@dataclasses.dataclass +class _bestrelpath_cache(dict[Path, str]): + __slots__ = ("path",) + + path: Path + + def __missing__(self, path: Path) -> str: + r = bestrelpath(self.path, path) + self[path] = r + return r + + +@final +class Dir(nodes.Directory): + """Collector of files in a file system directory. + + .. versionadded:: 8.0 + + .. note:: + + Python directories with an `__init__.py` file are instead collected by + :class:`~pytest.Package` by default. Both are :class:`~pytest.Directory` + collectors. + """ + + @classmethod + def from_parent( # type: ignore[override] + cls, + parent: nodes.Collector, + *, + path: Path, + ) -> Self: + """The public constructor. + + :param parent: The parent collector of this Dir. + :param path: The directory's path. + :type path: pathlib.Path + """ + return super().from_parent(parent=parent, path=path) + + def collect(self) -> Iterable[nodes.Item | nodes.Collector]: + config = self.config + col: nodes.Collector | None + cols: Sequence[nodes.Collector] + ihook = self.ihook + for direntry in scandir(self.path): + if direntry.is_dir(): + path = Path(direntry.path) + if not self.session.isinitpath(path, with_parents=True): + if ihook.pytest_ignore_collect(collection_path=path, config=config): + continue + col = ihook.pytest_collect_directory(path=path, parent=self) + if col is not None: + yield col + + elif direntry.is_file(): + path = Path(direntry.path) + if not self.session.isinitpath(path): + if ihook.pytest_ignore_collect(collection_path=path, config=config): + continue + cols = ihook.pytest_collect_file(file_path=path, parent=self) + yield from cols + + +@final +class Session(nodes.Collector): + """The root of the collection tree. + + ``Session`` collects the initial paths given as arguments to pytest. + """ + + Interrupted = Interrupted + Failed = Failed + # Set on the session by runner.pytest_sessionstart. + _setupstate: SetupState + # Set on the session by fixtures.pytest_sessionstart. + _fixturemanager: FixtureManager + exitstatus: int | ExitCode + + def __init__(self, config: Config) -> None: + super().__init__( + name="", + path=config.rootpath, + fspath=None, + parent=None, + config=config, + session=self, + nodeid="", + ) + self.testsfailed = 0 + self.testscollected = 0 + self._shouldstop: bool | str = False + self._shouldfail: bool | str = False + self.trace = config.trace.root.get("collection") + self._initialpaths: frozenset[Path] = frozenset() + self._initialpaths_with_parents: frozenset[Path] = frozenset() + self._notfound: list[tuple[str, Sequence[nodes.Collector]]] = [] + self._initial_parts: list[CollectionArgument] = [] + self._collection_cache: dict[nodes.Collector, CollectReport] = {} + self.items: list[nodes.Item] = [] + + self._bestrelpathcache: dict[Path, str] = _bestrelpath_cache(config.rootpath) + + self.config.pluginmanager.register(self, name="session") + + @classmethod + def from_config(cls, config: Config) -> Session: + session: Session = cls._create(config=config) + return session + + def __repr__(self) -> str: + return ( + f"<{self.__class__.__name__} {self.name} " + f"exitstatus=%r " + f"testsfailed={self.testsfailed} " + f"testscollected={self.testscollected}>" + ) % getattr(self, "exitstatus", "") + + @property + def shouldstop(self) -> bool | str: + return self._shouldstop + + @shouldstop.setter + def shouldstop(self, value: bool | str) -> None: + # The runner checks shouldfail and assumes that if it is set we are + # definitely stopping, so prevent unsetting it. + if value is False and self._shouldstop: + warnings.warn( + PytestWarning( + "session.shouldstop cannot be unset after it has been set; ignoring." + ), + stacklevel=2, + ) + return + self._shouldstop = value + + @property + def shouldfail(self) -> bool | str: + return self._shouldfail + + @shouldfail.setter + def shouldfail(self, value: bool | str) -> None: + # The runner checks shouldfail and assumes that if it is set we are + # definitely stopping, so prevent unsetting it. + if value is False and self._shouldfail: + warnings.warn( + PytestWarning( + "session.shouldfail cannot be unset after it has been set; ignoring." + ), + stacklevel=2, + ) + return + self._shouldfail = value + + @property + def startpath(self) -> Path: + """The path from which pytest was invoked. + + .. versionadded:: 7.0.0 + """ + return self.config.invocation_params.dir + + def _node_location_to_relpath(self, node_path: Path) -> str: + # bestrelpath is a quite slow function. + return self._bestrelpathcache[node_path] + + @hookimpl(tryfirst=True) + def pytest_collectstart(self) -> None: + if self.shouldfail: + raise self.Failed(self.shouldfail) + if self.shouldstop: + raise self.Interrupted(self.shouldstop) + + @hookimpl(tryfirst=True) + def pytest_runtest_logreport(self, report: TestReport | CollectReport) -> None: + if report.failed and not hasattr(report, "wasxfail"): + self.testsfailed += 1 + maxfail = self.config.getvalue("maxfail") + if maxfail and self.testsfailed >= maxfail: + self.shouldfail = f"stopping after {self.testsfailed} failures" + + pytest_collectreport = pytest_runtest_logreport + + def isinitpath( + self, + path: str | os.PathLike[str], + *, + with_parents: bool = False, + ) -> bool: + """Is path an initial path? + + An initial path is a path explicitly given to pytest on the command + line. + + :param with_parents: + If set, also return True if the path is a parent of an initial path. + + .. versionchanged:: 8.0 + Added the ``with_parents`` parameter. + """ + # Optimization: Path(Path(...)) is much slower than isinstance. + path_ = path if isinstance(path, Path) else Path(path) + if with_parents: + return path_ in self._initialpaths_with_parents + else: + return path_ in self._initialpaths + + def gethookproxy(self, fspath: os.PathLike[str]) -> pluggy.HookRelay: + # Optimization: Path(Path(...)) is much slower than isinstance. + path = fspath if isinstance(fspath, Path) else Path(fspath) + pm = self.config.pluginmanager + # Check if we have the common case of running + # hooks with all conftest.py files. + my_conftestmodules = pm._getconftestmodules(path) + remove_mods = pm._conftest_plugins.difference(my_conftestmodules) + proxy: pluggy.HookRelay + if remove_mods: + # One or more conftests are not in use at this path. + proxy = PathAwareHookProxy(FSHookProxy(pm, remove_mods)) # type: ignore[arg-type,assignment] + else: + # All plugins are active for this fspath. + proxy = self.config.hook + return proxy + + def _collect_path( + self, + path: Path, + path_cache: dict[Path, Sequence[nodes.Collector]], + ) -> Sequence[nodes.Collector]: + """Create a Collector for the given path. + + `path_cache` makes it so the same Collectors are returned for the same + path. + """ + if path in path_cache: + return path_cache[path] + + if path.is_dir(): + ihook = self.gethookproxy(path.parent) + col: nodes.Collector | None = ihook.pytest_collect_directory( + path=path, parent=self + ) + cols: Sequence[nodes.Collector] = (col,) if col is not None else () + + elif path.is_file(): + ihook = self.gethookproxy(path) + cols = ihook.pytest_collect_file(file_path=path, parent=self) + + else: + # Broken symlink or invalid/missing file. + cols = () + + path_cache[path] = cols + return cols + + @overload + def perform_collect( + self, args: Sequence[str] | None = ..., genitems: Literal[True] = ... + ) -> Sequence[nodes.Item]: ... + + @overload + def perform_collect( + self, args: Sequence[str] | None = ..., genitems: bool = ... + ) -> Sequence[nodes.Item | nodes.Collector]: ... + + def perform_collect( + self, args: Sequence[str] | None = None, genitems: bool = True + ) -> Sequence[nodes.Item | nodes.Collector]: + """Perform the collection phase for this session. + + This is called by the default :hook:`pytest_collection` hook + implementation; see the documentation of this hook for more details. + For testing purposes, it may also be called directly on a fresh + ``Session``. + + This function normally recursively expands any collectors collected + from the session to their items, and only items are returned. For + testing purposes, this may be suppressed by passing ``genitems=False``, + in which case the return value contains these collectors unexpanded, + and ``session.items`` is empty. + """ + if args is None: + args = self.config.args + + self.trace("perform_collect", self, args) + self.trace.root.indent += 1 + + hook = self.config.hook + + self._notfound = [] + self._initial_parts = [] + self._collection_cache = {} + self.items = [] + items: Sequence[nodes.Item | nodes.Collector] = self.items + try: + initialpaths: list[Path] = [] + initialpaths_with_parents: list[Path] = [] + for arg in args: + collection_argument = resolve_collection_argument( + self.config.invocation_params.dir, + arg, + as_pypath=self.config.option.pyargs, + ) + self._initial_parts.append(collection_argument) + initialpaths.append(collection_argument.path) + initialpaths_with_parents.append(collection_argument.path) + initialpaths_with_parents.extend(collection_argument.path.parents) + self._initialpaths = frozenset(initialpaths) + self._initialpaths_with_parents = frozenset(initialpaths_with_parents) + + rep = collect_one_node(self) + self.ihook.pytest_collectreport(report=rep) + self.trace.root.indent -= 1 + if self._notfound: + errors = [] + for arg, collectors in self._notfound: + if collectors: + errors.append( + f"not found: {arg}\n(no match in any of {collectors!r})" + ) + else: + errors.append(f"found no collectors for {arg}") + + raise UsageError(*errors) + + if not genitems: + items = rep.result + else: + if rep.passed: + for node in rep.result: + self.items.extend(self.genitems(node)) + + self.config.pluginmanager.check_pending() + hook.pytest_collection_modifyitems( + session=self, config=self.config, items=items + ) + finally: + self._notfound = [] + self._initial_parts = [] + self._collection_cache = {} + hook.pytest_collection_finish(session=self) + + if genitems: + self.testscollected = len(items) + + return items + + def _collect_one_node( + self, + node: nodes.Collector, + handle_dupes: bool = True, + ) -> tuple[CollectReport, bool]: + if node in self._collection_cache and handle_dupes: + rep = self._collection_cache[node] + return rep, True + else: + rep = collect_one_node(node) + self._collection_cache[node] = rep + return rep, False + + def collect(self) -> Iterator[nodes.Item | nodes.Collector]: + # This is a cache for the root directories of the initial paths. + # We can't use collection_cache for Session because of its special + # role as the bootstrapping collector. + path_cache: dict[Path, Sequence[nodes.Collector]] = {} + + pm = self.config.pluginmanager + + for collection_argument in self._initial_parts: + self.trace("processing argument", collection_argument) + self.trace.root.indent += 1 + + argpath = collection_argument.path + names = collection_argument.parts + module_name = collection_argument.module_name + + # resolve_collection_argument() ensures this. + if argpath.is_dir(): + assert not names, f"invalid arg {(argpath, names)!r}" + + paths = [argpath] + # Add relevant parents of the path, from the root, e.g. + # /a/b/c.py -> [/, /a, /a/b, /a/b/c.py] + if module_name is None: + # Paths outside of the confcutdir should not be considered. + for path in argpath.parents: + if not pm._is_in_confcutdir(path): + break + paths.insert(0, path) + else: + # For --pyargs arguments, only consider paths matching the module + # name. Paths beyond the package hierarchy are not included. + module_name_parts = module_name.split(".") + for i, path in enumerate(argpath.parents, 2): + if i > len(module_name_parts) or path.stem != module_name_parts[-i]: + break + paths.insert(0, path) + + # Start going over the parts from the root, collecting each level + # and discarding all nodes which don't match the level's part. + any_matched_in_initial_part = False + notfound_collectors = [] + work: list[tuple[nodes.Collector | nodes.Item, list[Path | str]]] = [ + (self, [*paths, *names]) + ] + while work: + matchnode, matchparts = work.pop() + + # Pop'd all of the parts, this is a match. + if not matchparts: + yield matchnode + any_matched_in_initial_part = True + continue + + # Should have been matched by now, discard. + if not isinstance(matchnode, nodes.Collector): + continue + + # Collect this level of matching. + # Collecting Session (self) is done directly to avoid endless + # recursion to this function. + subnodes: Sequence[nodes.Collector | nodes.Item] + if isinstance(matchnode, Session): + assert isinstance(matchparts[0], Path) + subnodes = matchnode._collect_path(matchparts[0], path_cache) + else: + # For backward compat, files given directly multiple + # times on the command line should not be deduplicated. + handle_dupes = not ( + len(matchparts) == 1 + and isinstance(matchparts[0], Path) + and matchparts[0].is_file() + ) + rep, duplicate = self._collect_one_node(matchnode, handle_dupes) + if not duplicate and not rep.passed: + # Report collection failures here to avoid failing to + # run some test specified in the command line because + # the module could not be imported (#134). + matchnode.ihook.pytest_collectreport(report=rep) + if not rep.passed: + continue + subnodes = rep.result + + # Prune this level. + any_matched_in_collector = False + for node in reversed(subnodes): + # Path part e.g. `/a/b/` in `/a/b/test_file.py::TestIt::test_it`. + if isinstance(matchparts[0], Path): + is_match = node.path == matchparts[0] + if sys.platform == "win32" and not is_match: + # In case the file paths do not match, fallback to samefile() to + # account for short-paths on Windows (#11895). + same_file = os.path.samefile(node.path, matchparts[0]) + # We don't want to match links to the current node, + # otherwise we would match the same file more than once (#12039). + is_match = same_file and ( + os.path.islink(node.path) + == os.path.islink(matchparts[0]) + ) + + # Name part e.g. `TestIt` in `/a/b/test_file.py::TestIt::test_it`. + else: + # TODO: Remove parametrized workaround once collection structure contains + # parametrization. + is_match = ( + node.name == matchparts[0] + or node.name.split("[")[0] == matchparts[0] + ) + if is_match: + work.append((node, matchparts[1:])) + any_matched_in_collector = True + + if not any_matched_in_collector: + notfound_collectors.append(matchnode) + + if not any_matched_in_initial_part: + report_arg = "::".join((str(argpath), *names)) + self._notfound.append((report_arg, notfound_collectors)) + + self.trace.root.indent -= 1 + + def genitems(self, node: nodes.Item | nodes.Collector) -> Iterator[nodes.Item]: + self.trace("genitems", node) + if isinstance(node, nodes.Item): + node.ihook.pytest_itemcollected(item=node) + yield node + else: + assert isinstance(node, nodes.Collector) + keepduplicates = self.config.getoption("keepduplicates") + # For backward compat, dedup only applies to files. + handle_dupes = not (keepduplicates and isinstance(node, nodes.File)) + rep, duplicate = self._collect_one_node(node, handle_dupes) + if duplicate and not keepduplicates: + return + if rep.passed: + for subnode in rep.result: + yield from self.genitems(subnode) + if not duplicate: + node.ihook.pytest_collectreport(report=rep) + + +def search_pypath(module_name: str) -> str | None: + """Search sys.path for the given a dotted module name, and return its file + system path if found.""" + try: + spec = importlib.util.find_spec(module_name) + # AttributeError: looks like package module, but actually filename + # ImportError: module does not exist + # ValueError: not a module name + except (AttributeError, ImportError, ValueError): + return None + if spec is None or spec.origin is None or spec.origin == "namespace": + return None + elif spec.submodule_search_locations: + return os.path.dirname(spec.origin) + else: + return spec.origin + + +@dataclasses.dataclass(frozen=True) +class CollectionArgument: + """A resolved collection argument.""" + + path: Path + parts: Sequence[str] + module_name: str | None + + +def resolve_collection_argument( + invocation_path: Path, arg: str, *, as_pypath: bool = False +) -> CollectionArgument: + """Parse path arguments optionally containing selection parts and return (fspath, names). + + Command-line arguments can point to files and/or directories, and optionally contain + parts for specific tests selection, for example: + + "pkg/tests/test_foo.py::TestClass::test_foo" + + This function ensures the path exists, and returns a resolved `CollectionArgument`: + + CollectionArgument( + path=Path("/full/path/to/pkg/tests/test_foo.py"), + parts=["TestClass", "test_foo"], + module_name=None, + ) + + When as_pypath is True, expects that the command-line argument actually contains + module paths instead of file-system paths: + + "pkg.tests.test_foo::TestClass::test_foo" + + In which case we search sys.path for a matching module, and then return the *path* to the + found module, which may look like this: + + CollectionArgument( + path=Path("/home/u/myvenv/lib/site-packages/pkg/tests/test_foo.py"), + parts=["TestClass", "test_foo"], + module_name="pkg.tests.test_foo", + ) + + If the path doesn't exist, raise UsageError. + If the path is a directory and selection parts are present, raise UsageError. + """ + base, squacket, rest = str(arg).partition("[") + strpath, *parts = base.split("::") + if parts: + parts[-1] = f"{parts[-1]}{squacket}{rest}" + module_name = None + if as_pypath: + pyarg_strpath = search_pypath(strpath) + if pyarg_strpath is not None: + module_name = strpath + strpath = pyarg_strpath + fspath = invocation_path / strpath + fspath = absolutepath(fspath) + if not safe_exists(fspath): + msg = ( + "module or package not found: {arg} (missing __init__.py?)" + if as_pypath + else "file or directory not found: {arg}" + ) + raise UsageError(msg.format(arg=arg)) + if parts and fspath.is_dir(): + msg = ( + "package argument cannot contain :: selection parts: {arg}" + if as_pypath + else "directory argument cannot contain :: selection parts: {arg}" + ) + raise UsageError(msg.format(arg=arg)) + return CollectionArgument( + path=fspath, + parts=parts, + module_name=module_name, + ) diff --git a/venv/lib/python3.10/site-packages/_pytest/mark/__init__.py b/venv/lib/python3.10/site-packages/_pytest/mark/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..068c7410a46548e3a62761a4ac9fe70898215ceb --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/mark/__init__.py @@ -0,0 +1,301 @@ +"""Generic mechanism for marking and selecting python functions.""" + +from __future__ import annotations + +import collections +from collections.abc import Collection +from collections.abc import Iterable +from collections.abc import Set as AbstractSet +import dataclasses +from typing import Optional +from typing import TYPE_CHECKING + +from .expression import Expression +from .expression import ParseError +from .structures import _HiddenParam +from .structures import EMPTY_PARAMETERSET_OPTION +from .structures import get_empty_parameterset_mark +from .structures import HIDDEN_PARAM +from .structures import Mark +from .structures import MARK_GEN +from .structures import MarkDecorator +from .structures import MarkGenerator +from .structures import ParameterSet +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config import UsageError +from _pytest.config.argparsing import NOT_SET +from _pytest.config.argparsing import Parser +from _pytest.stash import StashKey + + +if TYPE_CHECKING: + from _pytest.nodes import Item + + +__all__ = [ + "HIDDEN_PARAM", + "MARK_GEN", + "Mark", + "MarkDecorator", + "MarkGenerator", + "ParameterSet", + "get_empty_parameterset_mark", +] + + +old_mark_config_key = StashKey[Optional[Config]]() + + +def param( + *values: object, + marks: MarkDecorator | Collection[MarkDecorator | Mark] = (), + id: str | _HiddenParam | None = None, +) -> ParameterSet: + """Specify a parameter in `pytest.mark.parametrize`_ calls or + :ref:`parametrized fixtures `. + + .. code-block:: python + + @pytest.mark.parametrize( + "test_input,expected", + [ + ("3+5", 8), + pytest.param("6*9", 42, marks=pytest.mark.xfail), + ], + ) + def test_eval(test_input, expected): + assert eval(test_input) == expected + + :param values: Variable args of the values of the parameter set, in order. + + :param marks: + A single mark or a list of marks to be applied to this parameter set. + + :ref:`pytest.mark.usefixtures ` cannot be added via this parameter. + + :type id: str | Literal[pytest.HIDDEN_PARAM] | None + :param id: + The id to attribute to this parameter set. + + .. versionadded:: 8.4 + :ref:`hidden-param` means to hide the parameter set + from the test name. Can only be used at most 1 time, as + test names need to be unique. + """ + return ParameterSet.param(*values, marks=marks, id=id) + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group._addoption( # private to use reserved lower-case short option + "-k", + action="store", + dest="keyword", + default="", + metavar="EXPRESSION", + help="Only run tests which match the given substring expression. " + "An expression is a Python evaluable expression " + "where all names are substring-matched against test names " + "and their parent classes. Example: -k 'test_method or test_" + "other' matches all test functions and classes whose name " + "contains 'test_method' or 'test_other', while -k 'not test_method' " + "matches those that don't contain 'test_method' in their names. " + "-k 'not test_method and not test_other' will eliminate the matches. " + "Additionally keywords are matched to classes and functions " + "containing extra names in their 'extra_keyword_matches' set, " + "as well as functions which have names assigned directly to them. " + "The matching is case-insensitive.", + ) + + group._addoption( # private to use reserved lower-case short option + "-m", + action="store", + dest="markexpr", + default="", + metavar="MARKEXPR", + help="Only run tests matching given mark expression. " + "For example: -m 'mark1 and not mark2'.", + ) + + group.addoption( + "--markers", + action="store_true", + help="show markers (builtin, plugin and per-project ones).", + ) + + parser.addini("markers", "Register new markers for test functions", "linelist") + parser.addini(EMPTY_PARAMETERSET_OPTION, "Default marker for empty parametersets") + + +@hookimpl(tryfirst=True) +def pytest_cmdline_main(config: Config) -> int | ExitCode | None: + import _pytest.config + + if config.option.markers: + config._do_configure() + tw = _pytest.config.create_terminal_writer(config) + for line in config.getini("markers"): + parts = line.split(":", 1) + name = parts[0] + rest = parts[1] if len(parts) == 2 else "" + tw.write(f"@pytest.mark.{name}:", bold=True) + tw.line(rest) + tw.line() + config._ensure_unconfigure() + return 0 + + return None + + +@dataclasses.dataclass +class KeywordMatcher: + """A matcher for keywords. + + Given a list of names, matches any substring of one of these names. The + string inclusion check is case-insensitive. + + Will match on the name of colitem, including the names of its parents. + Only matches names of items which are either a :class:`Class` or a + :class:`Function`. + + Additionally, matches on names in the 'extra_keyword_matches' set of + any item, as well as names directly assigned to test functions. + """ + + __slots__ = ("_names",) + + _names: AbstractSet[str] + + @classmethod + def from_item(cls, item: Item) -> KeywordMatcher: + mapped_names = set() + + # Add the names of the current item and any parent items, + # except the Session and root Directory's which are not + # interesting for matching. + import pytest + + for node in item.listchain(): + if isinstance(node, pytest.Session): + continue + if isinstance(node, pytest.Directory) and isinstance( + node.parent, pytest.Session + ): + continue + mapped_names.add(node.name) + + # Add the names added as extra keywords to current or parent items. + mapped_names.update(item.listextrakeywords()) + + # Add the names attached to the current function through direct assignment. + function_obj = getattr(item, "function", None) + if function_obj: + mapped_names.update(function_obj.__dict__) + + # Add the markers to the keywords as we no longer handle them correctly. + mapped_names.update(mark.name for mark in item.iter_markers()) + + return cls(mapped_names) + + def __call__(self, subname: str, /, **kwargs: str | int | bool | None) -> bool: + if kwargs: + raise UsageError("Keyword expressions do not support call parameters.") + subname = subname.lower() + return any(subname in name.lower() for name in self._names) + + +def deselect_by_keyword(items: list[Item], config: Config) -> None: + keywordexpr = config.option.keyword.lstrip() + if not keywordexpr: + return + + expr = _parse_expression(keywordexpr, "Wrong expression passed to '-k'") + + remaining = [] + deselected = [] + for colitem in items: + if not expr.evaluate(KeywordMatcher.from_item(colitem)): + deselected.append(colitem) + else: + remaining.append(colitem) + + if deselected: + config.hook.pytest_deselected(items=deselected) + items[:] = remaining + + +@dataclasses.dataclass +class MarkMatcher: + """A matcher for markers which are present. + + Tries to match on any marker names, attached to the given colitem. + """ + + __slots__ = ("own_mark_name_mapping",) + + own_mark_name_mapping: dict[str, list[Mark]] + + @classmethod + def from_markers(cls, markers: Iterable[Mark]) -> MarkMatcher: + mark_name_mapping = collections.defaultdict(list) + for mark in markers: + mark_name_mapping[mark.name].append(mark) + return cls(mark_name_mapping) + + def __call__(self, name: str, /, **kwargs: str | int | bool | None) -> bool: + if not (matches := self.own_mark_name_mapping.get(name, [])): + return False + + for mark in matches: # pylint: disable=consider-using-any-or-all + if all(mark.kwargs.get(k, NOT_SET) == v for k, v in kwargs.items()): + return True + return False + + +def deselect_by_mark(items: list[Item], config: Config) -> None: + matchexpr = config.option.markexpr + if not matchexpr: + return + + expr = _parse_expression(matchexpr, "Wrong expression passed to '-m'") + remaining: list[Item] = [] + deselected: list[Item] = [] + for item in items: + if expr.evaluate(MarkMatcher.from_markers(item.iter_markers())): + remaining.append(item) + else: + deselected.append(item) + if deselected: + config.hook.pytest_deselected(items=deselected) + items[:] = remaining + + +def _parse_expression(expr: str, exc_message: str) -> Expression: + try: + return Expression.compile(expr) + except ParseError as e: + raise UsageError(f"{exc_message}: {expr}: {e}") from None + + +def pytest_collection_modifyitems(items: list[Item], config: Config) -> None: + deselect_by_keyword(items, config) + deselect_by_mark(items, config) + + +def pytest_configure(config: Config) -> None: + config.stash[old_mark_config_key] = MARK_GEN._config + MARK_GEN._config = config + + empty_parameterset = config.getini(EMPTY_PARAMETERSET_OPTION) + + if empty_parameterset not in ("skip", "xfail", "fail_at_collect", None, ""): + raise UsageError( + f"{EMPTY_PARAMETERSET_OPTION!s} must be one of skip, xfail or fail_at_collect" + f" but it is {empty_parameterset!r}" + ) + + +def pytest_unconfigure(config: Config) -> None: + MARK_GEN._config = config.stash.get(old_mark_config_key, None) diff --git a/venv/lib/python3.10/site-packages/_pytest/mark/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/mark/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fba8807954519a3a78b3e0996bad5d769b8ba113 Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/mark/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/mark/__pycache__/expression.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/mark/__pycache__/expression.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..456adea80a5b5c0cf2d202eb9d801bf9b1b9480c Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/mark/__pycache__/expression.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/mark/__pycache__/structures.cpython-310.pyc b/venv/lib/python3.10/site-packages/_pytest/mark/__pycache__/structures.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..efaf907408c9a6ca34c6519e1ce48edbf504408a Binary files /dev/null and b/venv/lib/python3.10/site-packages/_pytest/mark/__pycache__/structures.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/_pytest/mark/expression.py b/venv/lib/python3.10/site-packages/_pytest/mark/expression.py new file mode 100644 index 0000000000000000000000000000000000000000..743a46bcc1713e7bb01ca5ed7776c5bcbf61b451 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/mark/expression.py @@ -0,0 +1,331 @@ +r"""Evaluate match expressions, as used by `-k` and `-m`. + +The grammar is: + +expression: expr? EOF +expr: and_expr ('or' and_expr)* +and_expr: not_expr ('and' not_expr)* +not_expr: 'not' not_expr | '(' expr ')' | ident kwargs? + +ident: (\w|:|\+|-|\.|\[|\]|\\|/)+ +kwargs: ('(' name '=' value ( ', ' name '=' value )* ')') +name: a valid ident, but not a reserved keyword +value: (unescaped) string literal | (-)?[0-9]+ | 'False' | 'True' | 'None' + +The semantics are: + +- Empty expression evaluates to False. +- ident evaluates to True or False according to a provided matcher function. +- or/and/not evaluate according to the usual boolean semantics. +- ident with parentheses and keyword arguments evaluates to True or False according to a provided matcher function. +""" + +from __future__ import annotations + +import ast +from collections.abc import Iterator +from collections.abc import Mapping +from collections.abc import Sequence +import dataclasses +import enum +import keyword +import re +import types +from typing import Literal +from typing import NoReturn +from typing import overload +from typing import Protocol + + +__all__ = [ + "Expression", + "ParseError", +] + + +class TokenType(enum.Enum): + LPAREN = "left parenthesis" + RPAREN = "right parenthesis" + OR = "or" + AND = "and" + NOT = "not" + IDENT = "identifier" + EOF = "end of input" + EQUAL = "=" + STRING = "string literal" + COMMA = "," + + +@dataclasses.dataclass(frozen=True) +class Token: + __slots__ = ("pos", "type", "value") + type: TokenType + value: str + pos: int + + +class ParseError(Exception): + """The expression contains invalid syntax. + + :param column: The column in the line where the error occurred (1-based). + :param message: A description of the error. + """ + + def __init__(self, column: int, message: str) -> None: + self.column = column + self.message = message + + def __str__(self) -> str: + return f"at column {self.column}: {self.message}" + + +class Scanner: + __slots__ = ("current", "tokens") + + def __init__(self, input: str) -> None: + self.tokens = self.lex(input) + self.current = next(self.tokens) + + def lex(self, input: str) -> Iterator[Token]: + pos = 0 + while pos < len(input): + if input[pos] in (" ", "\t"): + pos += 1 + elif input[pos] == "(": + yield Token(TokenType.LPAREN, "(", pos) + pos += 1 + elif input[pos] == ")": + yield Token(TokenType.RPAREN, ")", pos) + pos += 1 + elif input[pos] == "=": + yield Token(TokenType.EQUAL, "=", pos) + pos += 1 + elif input[pos] == ",": + yield Token(TokenType.COMMA, ",", pos) + pos += 1 + elif (quote_char := input[pos]) in ("'", '"'): + end_quote_pos = input.find(quote_char, pos + 1) + if end_quote_pos == -1: + raise ParseError( + pos + 1, + f'closing quote "{quote_char}" is missing', + ) + value = input[pos : end_quote_pos + 1] + if (backslash_pos := input.find("\\")) != -1: + raise ParseError( + backslash_pos + 1, + r'escaping with "\" not supported in marker expression', + ) + yield Token(TokenType.STRING, value, pos) + pos += len(value) + else: + match = re.match(r"(:?\w|:|\+|-|\.|\[|\]|\\|/)+", input[pos:]) + if match: + value = match.group(0) + if value == "or": + yield Token(TokenType.OR, value, pos) + elif value == "and": + yield Token(TokenType.AND, value, pos) + elif value == "not": + yield Token(TokenType.NOT, value, pos) + else: + yield Token(TokenType.IDENT, value, pos) + pos += len(value) + else: + raise ParseError( + pos + 1, + f'unexpected character "{input[pos]}"', + ) + yield Token(TokenType.EOF, "", pos) + + @overload + def accept(self, type: TokenType, *, reject: Literal[True]) -> Token: ... + + @overload + def accept( + self, type: TokenType, *, reject: Literal[False] = False + ) -> Token | None: ... + + def accept(self, type: TokenType, *, reject: bool = False) -> Token | None: + if self.current.type is type: + token = self.current + if token.type is not TokenType.EOF: + self.current = next(self.tokens) + return token + if reject: + self.reject((type,)) + return None + + def reject(self, expected: Sequence[TokenType]) -> NoReturn: + raise ParseError( + self.current.pos + 1, + "expected {}; got {}".format( + " OR ".join(type.value for type in expected), + self.current.type.value, + ), + ) + + +# True, False and None are legal match expression identifiers, +# but illegal as Python identifiers. To fix this, this prefix +# is added to identifiers in the conversion to Python AST. +IDENT_PREFIX = "$" + + +def expression(s: Scanner) -> ast.Expression: + if s.accept(TokenType.EOF): + ret: ast.expr = ast.Constant(False) + else: + ret = expr(s) + s.accept(TokenType.EOF, reject=True) + return ast.fix_missing_locations(ast.Expression(ret)) + + +def expr(s: Scanner) -> ast.expr: + ret = and_expr(s) + while s.accept(TokenType.OR): + rhs = and_expr(s) + ret = ast.BoolOp(ast.Or(), [ret, rhs]) + return ret + + +def and_expr(s: Scanner) -> ast.expr: + ret = not_expr(s) + while s.accept(TokenType.AND): + rhs = not_expr(s) + ret = ast.BoolOp(ast.And(), [ret, rhs]) + return ret + + +def not_expr(s: Scanner) -> ast.expr: + if s.accept(TokenType.NOT): + return ast.UnaryOp(ast.Not(), not_expr(s)) + if s.accept(TokenType.LPAREN): + ret = expr(s) + s.accept(TokenType.RPAREN, reject=True) + return ret + ident = s.accept(TokenType.IDENT) + if ident: + name = ast.Name(IDENT_PREFIX + ident.value, ast.Load()) + if s.accept(TokenType.LPAREN): + ret = ast.Call(func=name, args=[], keywords=all_kwargs(s)) + s.accept(TokenType.RPAREN, reject=True) + else: + ret = name + return ret + + s.reject((TokenType.NOT, TokenType.LPAREN, TokenType.IDENT)) + + +BUILTIN_MATCHERS = {"True": True, "False": False, "None": None} + + +def single_kwarg(s: Scanner) -> ast.keyword: + keyword_name = s.accept(TokenType.IDENT, reject=True) + if not keyword_name.value.isidentifier(): + raise ParseError( + keyword_name.pos + 1, + f"not a valid python identifier {keyword_name.value}", + ) + if keyword.iskeyword(keyword_name.value): + raise ParseError( + keyword_name.pos + 1, + f"unexpected reserved python keyword `{keyword_name.value}`", + ) + s.accept(TokenType.EQUAL, reject=True) + + if value_token := s.accept(TokenType.STRING): + value: str | int | bool | None = value_token.value[1:-1] # strip quotes + else: + value_token = s.accept(TokenType.IDENT, reject=True) + if (number := value_token.value).isdigit() or ( + number.startswith("-") and number[1:].isdigit() + ): + value = int(number) + elif value_token.value in BUILTIN_MATCHERS: + value = BUILTIN_MATCHERS[value_token.value] + else: + raise ParseError( + value_token.pos + 1, + f'unexpected character/s "{value_token.value}"', + ) + + ret = ast.keyword(keyword_name.value, ast.Constant(value)) + return ret + + +def all_kwargs(s: Scanner) -> list[ast.keyword]: + ret = [single_kwarg(s)] + while s.accept(TokenType.COMMA): + ret.append(single_kwarg(s)) + return ret + + +class MatcherCall(Protocol): + def __call__(self, name: str, /, **kwargs: str | int | bool | None) -> bool: ... + + +@dataclasses.dataclass +class MatcherNameAdapter: + matcher: MatcherCall + name: str + + def __bool__(self) -> bool: + return self.matcher(self.name) + + def __call__(self, **kwargs: str | int | bool | None) -> bool: + return self.matcher(self.name, **kwargs) + + +class MatcherAdapter(Mapping[str, MatcherNameAdapter]): + """Adapts a matcher function to a locals mapping as required by eval().""" + + def __init__(self, matcher: MatcherCall) -> None: + self.matcher = matcher + + def __getitem__(self, key: str) -> MatcherNameAdapter: + return MatcherNameAdapter(matcher=self.matcher, name=key[len(IDENT_PREFIX) :]) + + def __iter__(self) -> Iterator[str]: + raise NotImplementedError() + + def __len__(self) -> int: + raise NotImplementedError() + + +class Expression: + """A compiled match expression as used by -k and -m. + + The expression can be evaluated against different matchers. + """ + + __slots__ = ("code",) + + def __init__(self, code: types.CodeType) -> None: + self.code = code + + @classmethod + def compile(cls, input: str) -> Expression: + """Compile a match expression. + + :param input: The input expression - one line. + """ + astexpr = expression(Scanner(input)) + code: types.CodeType = compile( + astexpr, + filename="", + mode="eval", + ) + return Expression(code) + + def evaluate(self, matcher: MatcherCall) -> bool: + """Evaluate the match expression. + + :param matcher: + Given an identifier, should return whether it matches or not. + Should be prepared to handle arbitrary strings as input. + + :returns: Whether the expression matches or not. + """ + ret: bool = bool(eval(self.code, {"__builtins__": {}}, MatcherAdapter(matcher))) + return ret diff --git a/venv/lib/python3.10/site-packages/_pytest/mark/structures.py b/venv/lib/python3.10/site-packages/_pytest/mark/structures.py new file mode 100644 index 0000000000000000000000000000000000000000..f9261076ad03f80fdfbf50bc23f9194db7dfa23d --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/mark/structures.py @@ -0,0 +1,662 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +import collections.abc +from collections.abc import Callable +from collections.abc import Collection +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Mapping +from collections.abc import MutableMapping +from collections.abc import Sequence +import dataclasses +import enum +import inspect +from typing import Any +from typing import final +from typing import NamedTuple +from typing import overload +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union +import warnings + +from .._code import getfslineno +from ..compat import NOTSET +from ..compat import NotSetType +from _pytest.config import Config +from _pytest.deprecated import check_ispytest +from _pytest.deprecated import MARKED_FIXTURE +from _pytest.outcomes import fail +from _pytest.raises import AbstractRaises +from _pytest.scope import _ScopeName +from _pytest.warning_types import PytestUnknownMarkWarning + + +if TYPE_CHECKING: + from ..nodes import Node + + +EMPTY_PARAMETERSET_OPTION = "empty_parameter_set_mark" + + +# Singleton type for HIDDEN_PARAM, as described in: +# https://www.python.org/dev/peps/pep-0484/#support-for-singleton-types-in-unions +class _HiddenParam(enum.Enum): + token = 0 + + +#: Can be used as a parameter set id to hide it from the test name. +HIDDEN_PARAM = _HiddenParam.token + + +def istestfunc(func) -> bool: + return callable(func) and getattr(func, "__name__", "") != "" + + +def get_empty_parameterset_mark( + config: Config, argnames: Sequence[str], func +) -> MarkDecorator: + from ..nodes import Collector + + argslisting = ", ".join(argnames) + + fs, lineno = getfslineno(func) + reason = f"got empty parameter set for ({argslisting})" + requested_mark = config.getini(EMPTY_PARAMETERSET_OPTION) + if requested_mark in ("", None, "skip"): + mark = MARK_GEN.skip(reason=reason) + elif requested_mark == "xfail": + mark = MARK_GEN.xfail(reason=reason, run=False) + elif requested_mark == "fail_at_collect": + raise Collector.CollectError( + f"Empty parameter set in '{func.__name__}' at line {lineno + 1}" + ) + else: + raise LookupError(requested_mark) + return mark + + +class ParameterSet(NamedTuple): + """A set of values for a set of parameters along with associated marks and + an optional ID for the set. + + Examples:: + + pytest.param(1, 2, 3) + # ParameterSet(values=(1, 2, 3), marks=(), id=None) + + pytest.param("hello", id="greeting") + # ParameterSet(values=("hello",), marks=(), id="greeting") + + # Parameter set with marks + pytest.param(42, marks=pytest.mark.xfail) + # ParameterSet(values=(42,), marks=(MarkDecorator(...),), id=None) + + # From parametrize mark (parameter names + list of parameter sets) + pytest.mark.parametrize( + ("a", "b", "expected"), + [ + (1, 2, 3), + pytest.param(40, 2, 42, id="everything"), + ], + ) + # ParameterSet(values=(1, 2, 3), marks=(), id=None) + # ParameterSet(values=(2, 2, 3), marks=(), id="everything") + """ + + values: Sequence[object | NotSetType] + marks: Collection[MarkDecorator | Mark] + id: str | _HiddenParam | None + + @classmethod + def param( + cls, + *values: object, + marks: MarkDecorator | Collection[MarkDecorator | Mark] = (), + id: str | _HiddenParam | None = None, + ) -> ParameterSet: + if isinstance(marks, MarkDecorator): + marks = (marks,) + else: + assert isinstance(marks, collections.abc.Collection) + if any(i.name == "usefixtures" for i in marks): + raise ValueError( + "pytest.param cannot add pytest.mark.usefixtures; see " + "https://docs.pytest.org/en/stable/reference/reference.html#pytest-param" + ) + + if id is not None: + if not isinstance(id, str) and id is not HIDDEN_PARAM: + raise TypeError( + "Expected id to be a string or a `pytest.HIDDEN_PARAM` sentinel, " + f"got {type(id)}: {id!r}", + ) + return cls(values, marks, id) + + @classmethod + def extract_from( + cls, + parameterset: ParameterSet | Sequence[object] | object, + force_tuple: bool = False, + ) -> ParameterSet: + """Extract from an object or objects. + + :param parameterset: + A legacy style parameterset that may or may not be a tuple, + and may or may not be wrapped into a mess of mark objects. + + :param force_tuple: + Enforce tuple wrapping so single argument tuple values + don't get decomposed and break tests. + """ + if isinstance(parameterset, cls): + return parameterset + if force_tuple: + return cls.param(parameterset) + else: + # TODO: Refactor to fix this type-ignore. Currently the following + # passes type-checking but crashes: + # + # @pytest.mark.parametrize(('x', 'y'), [1, 2]) + # def test_foo(x, y): pass + return cls(parameterset, marks=[], id=None) # type: ignore[arg-type] + + @staticmethod + def _parse_parametrize_args( + argnames: str | Sequence[str], + argvalues: Iterable[ParameterSet | Sequence[object] | object], + *args, + **kwargs, + ) -> tuple[Sequence[str], bool]: + if isinstance(argnames, str): + argnames = [x.strip() for x in argnames.split(",") if x.strip()] + force_tuple = len(argnames) == 1 + else: + force_tuple = False + return argnames, force_tuple + + @staticmethod + def _parse_parametrize_parameters( + argvalues: Iterable[ParameterSet | Sequence[object] | object], + force_tuple: bool, + ) -> list[ParameterSet]: + return [ + ParameterSet.extract_from(x, force_tuple=force_tuple) for x in argvalues + ] + + @classmethod + def _for_parametrize( + cls, + argnames: str | Sequence[str], + argvalues: Iterable[ParameterSet | Sequence[object] | object], + func, + config: Config, + nodeid: str, + ) -> tuple[Sequence[str], list[ParameterSet]]: + argnames, force_tuple = cls._parse_parametrize_args(argnames, argvalues) + parameters = cls._parse_parametrize_parameters(argvalues, force_tuple) + del argvalues + + if parameters: + # Check all parameter sets have the correct number of values. + for param in parameters: + if len(param.values) != len(argnames): + msg = ( + '{nodeid}: in "parametrize" the number of names ({names_len}):\n' + " {names}\n" + "must be equal to the number of values ({values_len}):\n" + " {values}" + ) + fail( + msg.format( + nodeid=nodeid, + values=param.values, + names=argnames, + names_len=len(argnames), + values_len=len(param.values), + ), + pytrace=False, + ) + else: + # Empty parameter set (likely computed at runtime): create a single + # parameter set with NOTSET values, with the "empty parameter set" mark applied to it. + mark = get_empty_parameterset_mark(config, argnames, func) + parameters.append( + ParameterSet( + values=(NOTSET,) * len(argnames), marks=[mark], id="NOTSET" + ) + ) + return argnames, parameters + + +@final +@dataclasses.dataclass(frozen=True) +class Mark: + """A pytest mark.""" + + #: Name of the mark. + name: str + #: Positional arguments of the mark decorator. + args: tuple[Any, ...] + #: Keyword arguments of the mark decorator. + kwargs: Mapping[str, Any] + + #: Source Mark for ids with parametrize Marks. + _param_ids_from: Mark | None = dataclasses.field(default=None, repr=False) + #: Resolved/generated ids with parametrize Marks. + _param_ids_generated: Sequence[str] | None = dataclasses.field( + default=None, repr=False + ) + + def __init__( + self, + name: str, + args: tuple[Any, ...], + kwargs: Mapping[str, Any], + param_ids_from: Mark | None = None, + param_ids_generated: Sequence[str] | None = None, + *, + _ispytest: bool = False, + ) -> None: + """:meta private:""" + check_ispytest(_ispytest) + # Weirdness to bypass frozen=True. + object.__setattr__(self, "name", name) + object.__setattr__(self, "args", args) + object.__setattr__(self, "kwargs", kwargs) + object.__setattr__(self, "_param_ids_from", param_ids_from) + object.__setattr__(self, "_param_ids_generated", param_ids_generated) + + def _has_param_ids(self) -> bool: + return "ids" in self.kwargs or len(self.args) >= 4 + + def combined_with(self, other: Mark) -> Mark: + """Return a new Mark which is a combination of this + Mark and another Mark. + + Combines by appending args and merging kwargs. + + :param Mark other: The mark to combine with. + :rtype: Mark + """ + assert self.name == other.name + + # Remember source of ids with parametrize Marks. + param_ids_from: Mark | None = None + if self.name == "parametrize": + if other._has_param_ids(): + param_ids_from = other + elif self._has_param_ids(): + param_ids_from = self + + return Mark( + self.name, + self.args + other.args, + dict(self.kwargs, **other.kwargs), + param_ids_from=param_ids_from, + _ispytest=True, + ) + + +# A generic parameter designating an object to which a Mark may +# be applied -- a test function (callable) or class. +# Note: a lambda is not allowed, but this can't be represented. +Markable = TypeVar("Markable", bound=Union[Callable[..., object], type]) + + +@dataclasses.dataclass +class MarkDecorator: + """A decorator for applying a mark on test functions and classes. + + ``MarkDecorators`` are created with ``pytest.mark``:: + + mark1 = pytest.mark.NAME # Simple MarkDecorator + mark2 = pytest.mark.NAME(name1=value) # Parametrized MarkDecorator + + and can then be applied as decorators to test functions:: + + @mark2 + def test_function(): + pass + + When a ``MarkDecorator`` is called, it does the following: + + 1. If called with a single class as its only positional argument and no + additional keyword arguments, it attaches the mark to the class so it + gets applied automatically to all test cases found in that class. + + 2. If called with a single function as its only positional argument and + no additional keyword arguments, it attaches the mark to the function, + containing all the arguments already stored internally in the + ``MarkDecorator``. + + 3. When called in any other case, it returns a new ``MarkDecorator`` + instance with the original ``MarkDecorator``'s content updated with + the arguments passed to this call. + + Note: The rules above prevent a ``MarkDecorator`` from storing only a + single function or class reference as its positional argument with no + additional keyword or positional arguments. You can work around this by + using `with_args()`. + """ + + mark: Mark + + def __init__(self, mark: Mark, *, _ispytest: bool = False) -> None: + """:meta private:""" + check_ispytest(_ispytest) + self.mark = mark + + @property + def name(self) -> str: + """Alias for mark.name.""" + return self.mark.name + + @property + def args(self) -> tuple[Any, ...]: + """Alias for mark.args.""" + return self.mark.args + + @property + def kwargs(self) -> Mapping[str, Any]: + """Alias for mark.kwargs.""" + return self.mark.kwargs + + @property + def markname(self) -> str: + """:meta private:""" + return self.name # for backward-compat (2.4.1 had this attr) + + def with_args(self, *args: object, **kwargs: object) -> MarkDecorator: + """Return a MarkDecorator with extra arguments added. + + Unlike calling the MarkDecorator, with_args() can be used even + if the sole argument is a callable/class. + """ + mark = Mark(self.name, args, kwargs, _ispytest=True) + return MarkDecorator(self.mark.combined_with(mark), _ispytest=True) + + # Type ignored because the overloads overlap with an incompatible + # return type. Not much we can do about that. Thankfully mypy picks + # the first match so it works out even if we break the rules. + @overload + def __call__(self, arg: Markable) -> Markable: # type: ignore[overload-overlap] + pass + + @overload + def __call__(self, *args: object, **kwargs: object) -> MarkDecorator: + pass + + def __call__(self, *args: object, **kwargs: object): + """Call the MarkDecorator.""" + if args and not kwargs: + func = args[0] + is_class = inspect.isclass(func) + # For staticmethods/classmethods, the marks are eventually fetched from the + # function object, not the descriptor, so unwrap. + unwrapped_func = func + if isinstance(func, (staticmethod, classmethod)): + unwrapped_func = func.__func__ + if len(args) == 1 and (istestfunc(unwrapped_func) or is_class): + store_mark(unwrapped_func, self.mark, stacklevel=3) + return func + return self.with_args(*args, **kwargs) + + +def get_unpacked_marks( + obj: object | type, + *, + consider_mro: bool = True, +) -> list[Mark]: + """Obtain the unpacked marks that are stored on an object. + + If obj is a class and consider_mro is true, return marks applied to + this class and all of its super-classes in MRO order. If consider_mro + is false, only return marks applied directly to this class. + """ + if isinstance(obj, type): + if not consider_mro: + mark_lists = [obj.__dict__.get("pytestmark", [])] + else: + mark_lists = [ + x.__dict__.get("pytestmark", []) for x in reversed(obj.__mro__) + ] + mark_list = [] + for item in mark_lists: + if isinstance(item, list): + mark_list.extend(item) + else: + mark_list.append(item) + else: + mark_attribute = getattr(obj, "pytestmark", []) + if isinstance(mark_attribute, list): + mark_list = mark_attribute + else: + mark_list = [mark_attribute] + return list(normalize_mark_list(mark_list)) + + +def normalize_mark_list( + mark_list: Iterable[Mark | MarkDecorator], +) -> Iterable[Mark]: + """ + Normalize an iterable of Mark or MarkDecorator objects into a list of marks + by retrieving the `mark` attribute on MarkDecorator instances. + + :param mark_list: marks to normalize + :returns: A new list of the extracted Mark objects + """ + for mark in mark_list: + mark_obj = getattr(mark, "mark", mark) + if not isinstance(mark_obj, Mark): + raise TypeError(f"got {mark_obj!r} instead of Mark") + yield mark_obj + + +def store_mark(obj, mark: Mark, *, stacklevel: int = 2) -> None: + """Store a Mark on an object. + + This is used to implement the Mark declarations/decorators correctly. + """ + assert isinstance(mark, Mark), mark + + from ..fixtures import getfixturemarker + + if getfixturemarker(obj) is not None: + warnings.warn(MARKED_FIXTURE, stacklevel=stacklevel) + + # Always reassign name to avoid updating pytestmark in a reference that + # was only borrowed. + obj.pytestmark = [*get_unpacked_marks(obj, consider_mro=False), mark] + + +# Typing for builtin pytest marks. This is cheating; it gives builtin marks +# special privilege, and breaks modularity. But practicality beats purity... +if TYPE_CHECKING: + + class _SkipMarkDecorator(MarkDecorator): + @overload # type: ignore[override,no-overload-impl] + def __call__(self, arg: Markable) -> Markable: ... + + @overload + def __call__(self, reason: str = ...) -> MarkDecorator: ... + + class _SkipifMarkDecorator(MarkDecorator): + def __call__( # type: ignore[override] + self, + condition: str | bool = ..., + *conditions: str | bool, + reason: str = ..., + ) -> MarkDecorator: ... + + class _XfailMarkDecorator(MarkDecorator): + @overload # type: ignore[override,no-overload-impl] + def __call__(self, arg: Markable) -> Markable: ... + + @overload + def __call__( + self, + condition: str | bool = False, + *conditions: str | bool, + reason: str = ..., + run: bool = ..., + raises: None + | type[BaseException] + | tuple[type[BaseException], ...] + | AbstractRaises[BaseException] = ..., + strict: bool = ..., + ) -> MarkDecorator: ... + + class _ParametrizeMarkDecorator(MarkDecorator): + def __call__( # type: ignore[override] + self, + argnames: str | Sequence[str], + argvalues: Iterable[ParameterSet | Sequence[object] | object], + *, + indirect: bool | Sequence[str] = ..., + ids: Iterable[None | str | float | int | bool] + | Callable[[Any], object | None] + | None = ..., + scope: _ScopeName | None = ..., + ) -> MarkDecorator: ... + + class _UsefixturesMarkDecorator(MarkDecorator): + def __call__(self, *fixtures: str) -> MarkDecorator: # type: ignore[override] + ... + + class _FilterwarningsMarkDecorator(MarkDecorator): + def __call__(self, *filters: str) -> MarkDecorator: # type: ignore[override] + ... + + +@final +class MarkGenerator: + """Factory for :class:`MarkDecorator` objects - exposed as + a ``pytest.mark`` singleton instance. + + Example:: + + import pytest + + + @pytest.mark.slowtest + def test_function(): + pass + + applies a 'slowtest' :class:`Mark` on ``test_function``. + """ + + # See TYPE_CHECKING above. + if TYPE_CHECKING: + skip: _SkipMarkDecorator + skipif: _SkipifMarkDecorator + xfail: _XfailMarkDecorator + parametrize: _ParametrizeMarkDecorator + usefixtures: _UsefixturesMarkDecorator + filterwarnings: _FilterwarningsMarkDecorator + + def __init__(self, *, _ispytest: bool = False) -> None: + check_ispytest(_ispytest) + self._config: Config | None = None + self._markers: set[str] = set() + + def __getattr__(self, name: str) -> MarkDecorator: + """Generate a new :class:`MarkDecorator` with the given name.""" + if name[0] == "_": + raise AttributeError("Marker name must NOT start with underscore") + + if self._config is not None: + # We store a set of markers as a performance optimisation - if a mark + # name is in the set we definitely know it, but a mark may be known and + # not in the set. We therefore start by updating the set! + if name not in self._markers: + for line in self._config.getini("markers"): + # example lines: "skipif(condition): skip the given test if..." + # or "hypothesis: tests which use Hypothesis", so to get the + # marker name we split on both `:` and `(`. + marker = line.split(":")[0].split("(")[0].strip() + self._markers.add(marker) + + # If the name is not in the set of known marks after updating, + # then it really is time to issue a warning or an error. + if name not in self._markers: + if self._config.option.strict_markers or self._config.option.strict: + fail( + f"{name!r} not found in `markers` configuration option", + pytrace=False, + ) + + # Raise a specific error for common misspellings of "parametrize". + if name in ["parameterize", "parametrise", "parameterise"]: + __tracebackhide__ = True + fail(f"Unknown '{name}' mark, did you mean 'parametrize'?") + + warnings.warn( + f"Unknown pytest.mark.{name} - is this a typo? You can register " + "custom marks to avoid this warning - for details, see " + "https://docs.pytest.org/en/stable/how-to/mark.html", + PytestUnknownMarkWarning, + 2, + ) + + return MarkDecorator(Mark(name, (), {}, _ispytest=True), _ispytest=True) + + +MARK_GEN = MarkGenerator(_ispytest=True) + + +@final +class NodeKeywords(MutableMapping[str, Any]): + __slots__ = ("_markers", "node", "parent") + + def __init__(self, node: Node) -> None: + self.node = node + self.parent = node.parent + self._markers = {node.name: True} + + def __getitem__(self, key: str) -> Any: + try: + return self._markers[key] + except KeyError: + if self.parent is None: + raise + return self.parent.keywords[key] + + def __setitem__(self, key: str, value: Any) -> None: + self._markers[key] = value + + # Note: we could've avoided explicitly implementing some of the methods + # below and use the collections.abc fallback, but that would be slow. + + def __contains__(self, key: object) -> bool: + return key in self._markers or ( + self.parent is not None and key in self.parent.keywords + ) + + def update( # type: ignore[override] + self, + other: Mapping[str, Any] | Iterable[tuple[str, Any]] = (), + **kwds: Any, + ) -> None: + self._markers.update(other) + self._markers.update(kwds) + + def __delitem__(self, key: str) -> None: + raise ValueError("cannot delete key in keywords dict") + + def __iter__(self) -> Iterator[str]: + # Doesn't need to be fast. + yield from self._markers + if self.parent is not None: + for keyword in self.parent.keywords: + # self._marks and self.parent.keywords can have duplicates. + if keyword not in self._markers: + yield keyword + + def __len__(self) -> int: + # Doesn't need to be fast. + return sum(1 for keyword in self) + + def __repr__(self) -> str: + return f"" diff --git a/venv/lib/python3.10/site-packages/_pytest/monkeypatch.py b/venv/lib/python3.10/site-packages/_pytest/monkeypatch.py new file mode 100644 index 0000000000000000000000000000000000000000..1285e5715514fae7a89f0067664c56ac5ea606bc --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/monkeypatch.py @@ -0,0 +1,415 @@ +# mypy: allow-untyped-defs +"""Monkeypatching and mocking functionality.""" + +from __future__ import annotations + +from collections.abc import Generator +from collections.abc import Mapping +from collections.abc import MutableMapping +from contextlib import contextmanager +import os +import re +import sys +from typing import Any +from typing import final +from typing import overload +from typing import TypeVar +import warnings + +from _pytest.fixtures import fixture +from _pytest.warning_types import PytestWarning + + +RE_IMPORT_ERROR_NAME = re.compile(r"^No module named (.*)$") + + +K = TypeVar("K") +V = TypeVar("V") + + +@fixture +def monkeypatch() -> Generator[MonkeyPatch]: + """A convenient fixture for monkey-patching. + + The fixture provides these methods to modify objects, dictionaries, or + :data:`os.environ`: + + * :meth:`monkeypatch.setattr(obj, name, value, raising=True) ` + * :meth:`monkeypatch.delattr(obj, name, raising=True) ` + * :meth:`monkeypatch.setitem(mapping, name, value) ` + * :meth:`monkeypatch.delitem(obj, name, raising=True) ` + * :meth:`monkeypatch.setenv(name, value, prepend=None) ` + * :meth:`monkeypatch.delenv(name, raising=True) ` + * :meth:`monkeypatch.syspath_prepend(path) ` + * :meth:`monkeypatch.chdir(path) ` + * :meth:`monkeypatch.context() ` + + All modifications will be undone after the requesting test function or + fixture has finished. The ``raising`` parameter determines if a :class:`KeyError` + or :class:`AttributeError` will be raised if the set/deletion operation does not have the + specified target. + + To undo modifications done by the fixture in a contained scope, + use :meth:`context() `. + """ + mpatch = MonkeyPatch() + yield mpatch + mpatch.undo() + + +def resolve(name: str) -> object: + # Simplified from zope.dottedname. + parts = name.split(".") + + used = parts.pop(0) + found: object = __import__(used) + for part in parts: + used += "." + part + try: + found = getattr(found, part) + except AttributeError: + pass + else: + continue + # We use explicit un-nesting of the handling block in order + # to avoid nested exceptions. + try: + __import__(used) + except ImportError as ex: + expected = str(ex).split()[-1] + if expected == used: + raise + else: + raise ImportError(f"import error in {used}: {ex}") from ex + found = annotated_getattr(found, part, used) + return found + + +def annotated_getattr(obj: object, name: str, ann: str) -> object: + try: + obj = getattr(obj, name) + except AttributeError as e: + raise AttributeError( + f"{type(obj).__name__!r} object at {ann} has no attribute {name!r}" + ) from e + return obj + + +def derive_importpath(import_path: str, raising: bool) -> tuple[str, object]: + if not isinstance(import_path, str) or "." not in import_path: + raise TypeError(f"must be absolute import path string, not {import_path!r}") + module, attr = import_path.rsplit(".", 1) + target = resolve(module) + if raising: + annotated_getattr(target, attr, ann=module) + return attr, target + + +class Notset: + def __repr__(self) -> str: + return "" + + +notset = Notset() + + +@final +class MonkeyPatch: + """Helper to conveniently monkeypatch attributes/items/environment + variables/syspath. + + Returned by the :fixture:`monkeypatch` fixture. + + .. versionchanged:: 6.2 + Can now also be used directly as `pytest.MonkeyPatch()`, for when + the fixture is not available. In this case, use + :meth:`with MonkeyPatch.context() as mp: ` or remember to call + :meth:`undo` explicitly. + """ + + def __init__(self) -> None: + self._setattr: list[tuple[object, str, object]] = [] + self._setitem: list[tuple[Mapping[Any, Any], object, object]] = [] + self._cwd: str | None = None + self._savesyspath: list[str] | None = None + + @classmethod + @contextmanager + def context(cls) -> Generator[MonkeyPatch]: + """Context manager that returns a new :class:`MonkeyPatch` object + which undoes any patching done inside the ``with`` block upon exit. + + Example: + + .. code-block:: python + + import functools + + + def test_partial(monkeypatch): + with monkeypatch.context() as m: + m.setattr(functools, "partial", 3) + + Useful in situations where it is desired to undo some patches before the test ends, + such as mocking ``stdlib`` functions that might break pytest itself if mocked (for examples + of this see :issue:`3290`). + """ + m = cls() + try: + yield m + finally: + m.undo() + + @overload + def setattr( + self, + target: str, + name: object, + value: Notset = ..., + raising: bool = ..., + ) -> None: ... + + @overload + def setattr( + self, + target: object, + name: str, + value: object, + raising: bool = ..., + ) -> None: ... + + def setattr( + self, + target: str | object, + name: object | str, + value: object = notset, + raising: bool = True, + ) -> None: + """ + Set attribute value on target, memorizing the old value. + + For example: + + .. code-block:: python + + import os + + monkeypatch.setattr(os, "getcwd", lambda: "/") + + The code above replaces the :func:`os.getcwd` function by a ``lambda`` which + always returns ``"/"``. + + For convenience, you can specify a string as ``target`` which + will be interpreted as a dotted import path, with the last part + being the attribute name: + + .. code-block:: python + + monkeypatch.setattr("os.getcwd", lambda: "/") + + Raises :class:`AttributeError` if the attribute does not exist, unless + ``raising`` is set to False. + + **Where to patch** + + ``monkeypatch.setattr`` works by (temporarily) changing the object that a name points to with another one. + There can be many names pointing to any individual object, so for patching to work you must ensure + that you patch the name used by the system under test. + + See the section :ref:`Where to patch ` in the :mod:`unittest.mock` + docs for a complete explanation, which is meant for :func:`unittest.mock.patch` but + applies to ``monkeypatch.setattr`` as well. + """ + __tracebackhide__ = True + import inspect + + if isinstance(value, Notset): + if not isinstance(target, str): + raise TypeError( + "use setattr(target, name, value) or " + "setattr(target, value) with target being a dotted " + "import string" + ) + value = name + name, target = derive_importpath(target, raising) + else: + if not isinstance(name, str): + raise TypeError( + "use setattr(target, name, value) with name being a string or " + "setattr(target, value) with target being a dotted " + "import string" + ) + + oldval = getattr(target, name, notset) + if raising and oldval is notset: + raise AttributeError(f"{target!r} has no attribute {name!r}") + + # avoid class descriptors like staticmethod/classmethod + if inspect.isclass(target): + oldval = target.__dict__.get(name, notset) + self._setattr.append((target, name, oldval)) + setattr(target, name, value) + + def delattr( + self, + target: object | str, + name: str | Notset = notset, + raising: bool = True, + ) -> None: + """Delete attribute ``name`` from ``target``. + + If no ``name`` is specified and ``target`` is a string + it will be interpreted as a dotted import path with the + last part being the attribute name. + + Raises AttributeError it the attribute does not exist, unless + ``raising`` is set to False. + """ + __tracebackhide__ = True + import inspect + + if isinstance(name, Notset): + if not isinstance(target, str): + raise TypeError( + "use delattr(target, name) or " + "delattr(target) with target being a dotted " + "import string" + ) + name, target = derive_importpath(target, raising) + + if not hasattr(target, name): + if raising: + raise AttributeError(name) + else: + oldval = getattr(target, name, notset) + # Avoid class descriptors like staticmethod/classmethod. + if inspect.isclass(target): + oldval = target.__dict__.get(name, notset) + self._setattr.append((target, name, oldval)) + delattr(target, name) + + def setitem(self, dic: Mapping[K, V], name: K, value: V) -> None: + """Set dictionary entry ``name`` to value.""" + self._setitem.append((dic, name, dic.get(name, notset))) + # Not all Mapping types support indexing, but MutableMapping doesn't support TypedDict + dic[name] = value # type: ignore[index] + + def delitem(self, dic: Mapping[K, V], name: K, raising: bool = True) -> None: + """Delete ``name`` from dict. + + Raises ``KeyError`` if it doesn't exist, unless ``raising`` is set to + False. + """ + if name not in dic: + if raising: + raise KeyError(name) + else: + self._setitem.append((dic, name, dic.get(name, notset))) + # Not all Mapping types support indexing, but MutableMapping doesn't support TypedDict + del dic[name] # type: ignore[attr-defined] + + def setenv(self, name: str, value: str, prepend: str | None = None) -> None: + """Set environment variable ``name`` to ``value``. + + If ``prepend`` is a character, read the current environment variable + value and prepend the ``value`` adjoined with the ``prepend`` + character. + """ + if not isinstance(value, str): + warnings.warn( # type: ignore[unreachable] + PytestWarning( + f"Value of environment variable {name} type should be str, but got " + f"{value!r} (type: {type(value).__name__}); converted to str implicitly" + ), + stacklevel=2, + ) + value = str(value) + if prepend and name in os.environ: + value = value + prepend + os.environ[name] + self.setitem(os.environ, name, value) + + def delenv(self, name: str, raising: bool = True) -> None: + """Delete ``name`` from the environment. + + Raises ``KeyError`` if it does not exist, unless ``raising`` is set to + False. + """ + environ: MutableMapping[str, str] = os.environ + self.delitem(environ, name, raising=raising) + + def syspath_prepend(self, path) -> None: + """Prepend ``path`` to ``sys.path`` list of import locations.""" + if self._savesyspath is None: + self._savesyspath = sys.path[:] + sys.path.insert(0, str(path)) + + # https://github.com/pypa/setuptools/blob/d8b901bc/docs/pkg_resources.txt#L162-L171 + # this is only needed when pkg_resources was already loaded by the namespace package + if "pkg_resources" in sys.modules: + from pkg_resources import fixup_namespace_packages + + fixup_namespace_packages(str(path)) + + # A call to syspathinsert() usually means that the caller wants to + # import some dynamically created files, thus with python3 we + # invalidate its import caches. + # This is especially important when any namespace package is in use, + # since then the mtime based FileFinder cache (that gets created in + # this case already) gets not invalidated when writing the new files + # quickly afterwards. + from importlib import invalidate_caches + + invalidate_caches() + + def chdir(self, path: str | os.PathLike[str]) -> None: + """Change the current working directory to the specified path. + + :param path: + The path to change into. + """ + if self._cwd is None: + self._cwd = os.getcwd() + os.chdir(path) + + def undo(self) -> None: + """Undo previous changes. + + This call consumes the undo stack. Calling it a second time has no + effect unless you do more monkeypatching after the undo call. + + There is generally no need to call `undo()`, since it is + called automatically during tear-down. + + .. note:: + The same `monkeypatch` fixture is used across a + single test function invocation. If `monkeypatch` is used both by + the test function itself and one of the test fixtures, + calling `undo()` will undo all of the changes made in + both functions. + + Prefer to use :meth:`context() ` instead. + """ + for obj, name, value in reversed(self._setattr): + if value is not notset: + setattr(obj, name, value) + else: + delattr(obj, name) + self._setattr[:] = [] + for dictionary, key, value in reversed(self._setitem): + if value is notset: + try: + # Not all Mapping types support indexing, but MutableMapping doesn't support TypedDict + del dictionary[key] # type: ignore[attr-defined] + except KeyError: + pass # Was already deleted, so we have the desired state. + else: + # Not all Mapping types support indexing, but MutableMapping doesn't support TypedDict + dictionary[key] = value # type: ignore[index] + self._setitem[:] = [] + if self._savesyspath is not None: + sys.path[:] = self._savesyspath + self._savesyspath = None + + if self._cwd is not None: + os.chdir(self._cwd) + self._cwd = None diff --git a/venv/lib/python3.10/site-packages/_pytest/nodes.py b/venv/lib/python3.10/site-packages/_pytest/nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..6690f6ab1f814671d3342e5f14d85fbc39d7b083 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/nodes.py @@ -0,0 +1,772 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +import abc +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import MutableMapping +from functools import cached_property +from functools import lru_cache +import os +import pathlib +from pathlib import Path +from typing import Any +from typing import cast +from typing import NoReturn +from typing import overload +from typing import TYPE_CHECKING +from typing import TypeVar +import warnings + +import pluggy + +import _pytest._code +from _pytest._code import getfslineno +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import TerminalRepr +from _pytest._code.code import Traceback +from _pytest._code.code import TracebackStyle +from _pytest.compat import LEGACY_PATH +from _pytest.compat import signature +from _pytest.config import Config +from _pytest.config import ConftestImportFailure +from _pytest.config.compat import _check_path +from _pytest.deprecated import NODE_CTOR_FSPATH_ARG +from _pytest.mark.structures import Mark +from _pytest.mark.structures import MarkDecorator +from _pytest.mark.structures import NodeKeywords +from _pytest.outcomes import fail +from _pytest.pathlib import absolutepath +from _pytest.stash import Stash +from _pytest.warning_types import PytestWarning + + +if TYPE_CHECKING: + from typing_extensions import Self + + # Imported here due to circular import. + from _pytest.main import Session + + +SEP = "/" + +tracebackcutdir = Path(_pytest.__file__).parent + + +_T = TypeVar("_T") + + +def _imply_path( + node_type: type[Node], + path: Path | None, + fspath: LEGACY_PATH | None, +) -> Path: + if fspath is not None: + warnings.warn( + NODE_CTOR_FSPATH_ARG.format( + node_type_name=node_type.__name__, + ), + stacklevel=6, + ) + if path is not None: + if fspath is not None: + _check_path(path, fspath) + return path + else: + assert fspath is not None + return Path(fspath) + + +_NodeType = TypeVar("_NodeType", bound="Node") + + +class NodeMeta(abc.ABCMeta): + """Metaclass used by :class:`Node` to enforce that direct construction raises + :class:`Failed`. + + This behaviour supports the indirection introduced with :meth:`Node.from_parent`, + the named constructor to be used instead of direct construction. The design + decision to enforce indirection with :class:`NodeMeta` was made as a + temporary aid for refactoring the collection tree, which was diagnosed to + have :class:`Node` objects whose creational patterns were overly entangled. + Once the refactoring is complete, this metaclass can be removed. + + See https://github.com/pytest-dev/pytest/projects/3 for an overview of the + progress on detangling the :class:`Node` classes. + """ + + def __call__(cls, *k, **kw) -> NoReturn: + msg = ( + "Direct construction of {name} has been deprecated, please use {name}.from_parent.\n" + "See " + "https://docs.pytest.org/en/stable/deprecations.html#node-construction-changed-to-node-from-parent" + " for more details." + ).format(name=f"{cls.__module__}.{cls.__name__}") + fail(msg, pytrace=False) + + def _create(cls: type[_T], *k, **kw) -> _T: + try: + return super().__call__(*k, **kw) # type: ignore[no-any-return,misc] + except TypeError: + sig = signature(getattr(cls, "__init__")) + known_kw = {k: v for k, v in kw.items() if k in sig.parameters} + from .warning_types import PytestDeprecationWarning + + warnings.warn( + PytestDeprecationWarning( + f"{cls} is not using a cooperative constructor and only takes {set(known_kw)}.\n" + "See https://docs.pytest.org/en/stable/deprecations.html" + "#constructors-of-custom-pytest-node-subclasses-should-take-kwargs " + "for more details." + ) + ) + + return super().__call__(*k, **known_kw) # type: ignore[no-any-return,misc] + + +class Node(abc.ABC, metaclass=NodeMeta): + r"""Base class of :class:`Collector` and :class:`Item`, the components of + the test collection tree. + + ``Collector``\'s are the internal nodes of the tree, and ``Item``\'s are the + leaf nodes. + """ + + # Implemented in the legacypath plugin. + #: A ``LEGACY_PATH`` copy of the :attr:`path` attribute. Intended for usage + #: for methods not migrated to ``pathlib.Path`` yet, such as + #: :meth:`Item.reportinfo `. Will be deprecated in + #: a future release, prefer using :attr:`path` instead. + fspath: LEGACY_PATH + + # Use __slots__ to make attribute access faster. + # Note that __dict__ is still available. + __slots__ = ( + "__dict__", + "_nodeid", + "_store", + "config", + "name", + "parent", + "path", + "session", + ) + + def __init__( + self, + name: str, + parent: Node | None = None, + config: Config | None = None, + session: Session | None = None, + fspath: LEGACY_PATH | None = None, + path: Path | None = None, + nodeid: str | None = None, + ) -> None: + #: A unique name within the scope of the parent node. + self.name: str = name + + #: The parent collector node. + self.parent = parent + + if config: + #: The pytest config object. + self.config: Config = config + else: + if not parent: + raise TypeError("config or parent must be provided") + self.config = parent.config + + if session: + #: The pytest session this node is part of. + self.session: Session = session + else: + if not parent: + raise TypeError("session or parent must be provided") + self.session = parent.session + + if path is None and fspath is None: + path = getattr(parent, "path", None) + #: Filesystem path where this node was collected from (can be None). + self.path: pathlib.Path = _imply_path(type(self), path, fspath=fspath) + + # The explicit annotation is to avoid publicly exposing NodeKeywords. + #: Keywords/markers collected from all scopes. + self.keywords: MutableMapping[str, Any] = NodeKeywords(self) + + #: The marker objects belonging to this node. + self.own_markers: list[Mark] = [] + + #: Allow adding of extra keywords to use for matching. + self.extra_keyword_matches: set[str] = set() + + if nodeid is not None: + assert "::()" not in nodeid + self._nodeid = nodeid + else: + if not self.parent: + raise TypeError("nodeid or parent must be provided") + self._nodeid = self.parent.nodeid + "::" + self.name + + #: A place where plugins can store information on the node for their + #: own use. + self.stash: Stash = Stash() + # Deprecated alias. Was never public. Can be removed in a few releases. + self._store = self.stash + + @classmethod + def from_parent(cls, parent: Node, **kw) -> Self: + """Public constructor for Nodes. + + This indirection got introduced in order to enable removing + the fragile logic from the node constructors. + + Subclasses can use ``super().from_parent(...)`` when overriding the + construction. + + :param parent: The parent node of this Node. + """ + if "config" in kw: + raise TypeError("config is not a valid argument for from_parent") + if "session" in kw: + raise TypeError("session is not a valid argument for from_parent") + return cls._create(parent=parent, **kw) + + @property + def ihook(self) -> pluggy.HookRelay: + """fspath-sensitive hook proxy used to call pytest hooks.""" + return self.session.gethookproxy(self.path) + + def __repr__(self) -> str: + return "<{} {}>".format(self.__class__.__name__, getattr(self, "name", None)) + + def warn(self, warning: Warning) -> None: + """Issue a warning for this Node. + + Warnings will be displayed after the test session, unless explicitly suppressed. + + :param Warning warning: + The warning instance to issue. + + :raises ValueError: If ``warning`` instance is not a subclass of Warning. + + Example usage: + + .. code-block:: python + + node.warn(PytestWarning("some message")) + node.warn(UserWarning("some message")) + + .. versionchanged:: 6.2 + Any subclass of :class:`Warning` is now accepted, rather than only + :class:`PytestWarning ` subclasses. + """ + # enforce type checks here to avoid getting a generic type error later otherwise. + if not isinstance(warning, Warning): + raise ValueError( + f"warning must be an instance of Warning or subclass, got {warning!r}" + ) + path, lineno = get_fslocation_from_item(self) + assert lineno is not None + warnings.warn_explicit( + warning, + category=None, + filename=str(path), + lineno=lineno + 1, + ) + + # Methods for ordering nodes. + + @property + def nodeid(self) -> str: + """A ::-separated string denoting its collection tree address.""" + return self._nodeid + + def __hash__(self) -> int: + return hash(self._nodeid) + + def setup(self) -> None: + pass + + def teardown(self) -> None: + pass + + def iter_parents(self) -> Iterator[Node]: + """Iterate over all parent collectors starting from and including self + up to the root of the collection tree. + + .. versionadded:: 8.1 + """ + parent: Node | None = self + while parent is not None: + yield parent + parent = parent.parent + + def listchain(self) -> list[Node]: + """Return a list of all parent collectors starting from the root of the + collection tree down to and including self.""" + chain = [] + item: Node | None = self + while item is not None: + chain.append(item) + item = item.parent + chain.reverse() + return chain + + def add_marker(self, marker: str | MarkDecorator, append: bool = True) -> None: + """Dynamically add a marker object to the node. + + :param marker: + The marker. + :param append: + Whether to append the marker, or prepend it. + """ + from _pytest.mark import MARK_GEN + + if isinstance(marker, MarkDecorator): + marker_ = marker + elif isinstance(marker, str): + marker_ = getattr(MARK_GEN, marker) + else: + raise ValueError("is not a string or pytest.mark.* Marker") + self.keywords[marker_.name] = marker_ + if append: + self.own_markers.append(marker_.mark) + else: + self.own_markers.insert(0, marker_.mark) + + def iter_markers(self, name: str | None = None) -> Iterator[Mark]: + """Iterate over all markers of the node. + + :param name: If given, filter the results by the name attribute. + :returns: An iterator of the markers of the node. + """ + return (x[1] for x in self.iter_markers_with_node(name=name)) + + def iter_markers_with_node( + self, name: str | None = None + ) -> Iterator[tuple[Node, Mark]]: + """Iterate over all markers of the node. + + :param name: If given, filter the results by the name attribute. + :returns: An iterator of (node, mark) tuples. + """ + for node in self.iter_parents(): + for mark in node.own_markers: + if name is None or getattr(mark, "name", None) == name: + yield node, mark + + @overload + def get_closest_marker(self, name: str) -> Mark | None: ... + + @overload + def get_closest_marker(self, name: str, default: Mark) -> Mark: ... + + def get_closest_marker(self, name: str, default: Mark | None = None) -> Mark | None: + """Return the first marker matching the name, from closest (for + example function) to farther level (for example module level). + + :param default: Fallback return value if no marker was found. + :param name: Name to filter by. + """ + return next(self.iter_markers(name=name), default) + + def listextrakeywords(self) -> set[str]: + """Return a set of all extra keywords in self and any parents.""" + extra_keywords: set[str] = set() + for item in self.listchain(): + extra_keywords.update(item.extra_keyword_matches) + return extra_keywords + + def listnames(self) -> list[str]: + return [x.name for x in self.listchain()] + + def addfinalizer(self, fin: Callable[[], object]) -> None: + """Register a function to be called without arguments when this node is + finalized. + + This method can only be called when this node is active + in a setup chain, for example during self.setup(). + """ + self.session._setupstate.addfinalizer(fin, self) + + def getparent(self, cls: type[_NodeType]) -> _NodeType | None: + """Get the closest parent node (including self) which is an instance of + the given class. + + :param cls: The node class to search for. + :returns: The node, if found. + """ + for node in self.iter_parents(): + if isinstance(node, cls): + return node + return None + + def _traceback_filter(self, excinfo: ExceptionInfo[BaseException]) -> Traceback: + return excinfo.traceback + + def _repr_failure_py( + self, + excinfo: ExceptionInfo[BaseException], + style: TracebackStyle | None = None, + ) -> TerminalRepr: + from _pytest.fixtures import FixtureLookupError + + if isinstance(excinfo.value, ConftestImportFailure): + excinfo = ExceptionInfo.from_exception(excinfo.value.cause) + if isinstance(excinfo.value, fail.Exception): + if not excinfo.value.pytrace: + style = "value" + if isinstance(excinfo.value, FixtureLookupError): + return excinfo.value.formatrepr() + + tbfilter: bool | Callable[[ExceptionInfo[BaseException]], Traceback] + if self.config.getoption("fulltrace", False): + style = "long" + tbfilter = False + else: + tbfilter = self._traceback_filter + if style == "auto": + style = "long" + # XXX should excinfo.getrepr record all data and toterminal() process it? + if style is None: + if self.config.getoption("tbstyle", "auto") == "short": + style = "short" + else: + style = "long" + + if self.config.get_verbosity() > 1: + truncate_locals = False + else: + truncate_locals = True + + truncate_args = False if self.config.get_verbosity() > 2 else True + + # excinfo.getrepr() formats paths relative to the CWD if `abspath` is False. + # It is possible for a fixture/test to change the CWD while this code runs, which + # would then result in the user seeing confusing paths in the failure message. + # To fix this, if the CWD changed, always display the full absolute path. + # It will be better to just always display paths relative to invocation_dir, but + # this requires a lot of plumbing (#6428). + try: + abspath = Path(os.getcwd()) != self.config.invocation_params.dir + except OSError: + abspath = True + + return excinfo.getrepr( + funcargs=True, + abspath=abspath, + showlocals=self.config.getoption("showlocals", False), + style=style, + tbfilter=tbfilter, + truncate_locals=truncate_locals, + truncate_args=truncate_args, + ) + + def repr_failure( + self, + excinfo: ExceptionInfo[BaseException], + style: TracebackStyle | None = None, + ) -> str | TerminalRepr: + """Return a representation of a collection or test failure. + + .. seealso:: :ref:`non-python tests` + + :param excinfo: Exception information for the failure. + """ + return self._repr_failure_py(excinfo, style) + + +def get_fslocation_from_item(node: Node) -> tuple[str | Path, int | None]: + """Try to extract the actual location from a node, depending on available attributes: + + * "location": a pair (path, lineno) + * "obj": a Python object that the node wraps. + * "path": just a path + + :rtype: A tuple of (str|Path, int) with filename and 0-based line number. + """ + # See Item.location. + location: tuple[str, int | None, str] | None = getattr(node, "location", None) + if location is not None: + return location[:2] + obj = getattr(node, "obj", None) + if obj is not None: + return getfslineno(obj) + return getattr(node, "path", "unknown location"), -1 + + +class Collector(Node, abc.ABC): + """Base class of all collectors. + + Collector create children through `collect()` and thus iteratively build + the collection tree. + """ + + class CollectError(Exception): + """An error during collection, contains a custom message.""" + + @abc.abstractmethod + def collect(self) -> Iterable[Item | Collector]: + """Collect children (items and collectors) for this collector.""" + raise NotImplementedError("abstract") + + # TODO: This omits the style= parameter which breaks Liskov Substitution. + def repr_failure( # type: ignore[override] + self, excinfo: ExceptionInfo[BaseException] + ) -> str | TerminalRepr: + """Return a representation of a collection failure. + + :param excinfo: Exception information for the failure. + """ + if isinstance(excinfo.value, self.CollectError) and not self.config.getoption( + "fulltrace", False + ): + exc = excinfo.value + return str(exc.args[0]) + + # Respect explicit tbstyle option, but default to "short" + # (_repr_failure_py uses "long" with "fulltrace" option always). + tbstyle = self.config.getoption("tbstyle", "auto") + if tbstyle == "auto": + tbstyle = "short" + + return self._repr_failure_py(excinfo, style=tbstyle) + + def _traceback_filter(self, excinfo: ExceptionInfo[BaseException]) -> Traceback: + if hasattr(self, "path"): + traceback = excinfo.traceback + ntraceback = traceback.cut(path=self.path) + if ntraceback == traceback: + ntraceback = ntraceback.cut(excludepath=tracebackcutdir) + return ntraceback.filter(excinfo) + return excinfo.traceback + + +@lru_cache(maxsize=1000) +def _check_initialpaths_for_relpath( + initial_paths: frozenset[Path], path: Path +) -> str | None: + if path in initial_paths: + return "" + + for parent in path.parents: + if parent in initial_paths: + return str(path.relative_to(parent)) + + return None + + +class FSCollector(Collector, abc.ABC): + """Base class for filesystem collectors.""" + + def __init__( + self, + fspath: LEGACY_PATH | None = None, + path_or_parent: Path | Node | None = None, + path: Path | None = None, + name: str | None = None, + parent: Node | None = None, + config: Config | None = None, + session: Session | None = None, + nodeid: str | None = None, + ) -> None: + if path_or_parent: + if isinstance(path_or_parent, Node): + assert parent is None + parent = cast(FSCollector, path_or_parent) + elif isinstance(path_or_parent, Path): + assert path is None + path = path_or_parent + + path = _imply_path(type(self), path, fspath=fspath) + if name is None: + name = path.name + if parent is not None and parent.path != path: + try: + rel = path.relative_to(parent.path) + except ValueError: + pass + else: + name = str(rel) + name = name.replace(os.sep, SEP) + self.path = path + + if session is None: + assert parent is not None + session = parent.session + + if nodeid is None: + try: + nodeid = str(self.path.relative_to(session.config.rootpath)) + except ValueError: + nodeid = _check_initialpaths_for_relpath(session._initialpaths, path) + + if nodeid and os.sep != SEP: + nodeid = nodeid.replace(os.sep, SEP) + + super().__init__( + name=name, + parent=parent, + config=config, + session=session, + nodeid=nodeid, + path=path, + ) + + @classmethod + def from_parent( + cls, + parent, + *, + fspath: LEGACY_PATH | None = None, + path: Path | None = None, + **kw, + ) -> Self: + """The public constructor.""" + return super().from_parent(parent=parent, fspath=fspath, path=path, **kw) + + +class File(FSCollector, abc.ABC): + """Base class for collecting tests from a file. + + :ref:`non-python tests`. + """ + + +class Directory(FSCollector, abc.ABC): + """Base class for collecting files from a directory. + + A basic directory collector does the following: goes over the files and + sub-directories in the directory and creates collectors for them by calling + the hooks :hook:`pytest_collect_directory` and :hook:`pytest_collect_file`, + after checking that they are not ignored using + :hook:`pytest_ignore_collect`. + + The default directory collectors are :class:`~pytest.Dir` and + :class:`~pytest.Package`. + + .. versionadded:: 8.0 + + :ref:`custom directory collectors`. + """ + + +class Item(Node, abc.ABC): + """Base class of all test invocation items. + + Note that for a single function there might be multiple test invocation items. + """ + + nextitem = None + + def __init__( + self, + name, + parent=None, + config: Config | None = None, + session: Session | None = None, + nodeid: str | None = None, + **kw, + ) -> None: + # The first two arguments are intentionally passed positionally, + # to keep plugins who define a node type which inherits from + # (pytest.Item, pytest.File) working (see issue #8435). + # They can be made kwargs when the deprecation above is done. + super().__init__( + name, + parent, + config=config, + session=session, + nodeid=nodeid, + **kw, + ) + self._report_sections: list[tuple[str, str, str]] = [] + + #: A list of tuples (name, value) that holds user defined properties + #: for this test. + self.user_properties: list[tuple[str, object]] = [] + + self._check_item_and_collector_diamond_inheritance() + + def _check_item_and_collector_diamond_inheritance(self) -> None: + """ + Check if the current type inherits from both File and Collector + at the same time, emitting a warning accordingly (#8447). + """ + cls = type(self) + + # We inject an attribute in the type to avoid issuing this warning + # for the same class more than once, which is not helpful. + # It is a hack, but was deemed acceptable in order to avoid + # flooding the user in the common case. + attr_name = "_pytest_diamond_inheritance_warning_shown" + if getattr(cls, attr_name, False): + return + setattr(cls, attr_name, True) + + problems = ", ".join( + base.__name__ for base in cls.__bases__ if issubclass(base, Collector) + ) + if problems: + warnings.warn( + f"{cls.__name__} is an Item subclass and should not be a collector, " + f"however its bases {problems} are collectors.\n" + "Please split the Collectors and the Item into separate node types.\n" + "Pytest Doc example: https://docs.pytest.org/en/latest/example/nonpython.html\n" + "example pull request on a plugin: https://github.com/asmeurer/pytest-flakes/pull/40/", + PytestWarning, + ) + + @abc.abstractmethod + def runtest(self) -> None: + """Run the test case for this item. + + Must be implemented by subclasses. + + .. seealso:: :ref:`non-python tests` + """ + raise NotImplementedError("runtest must be implemented by Item subclass") + + def add_report_section(self, when: str, key: str, content: str) -> None: + """Add a new report section, similar to what's done internally to add + stdout and stderr captured output:: + + item.add_report_section("call", "stdout", "report section contents") + + :param str when: + One of the possible capture states, ``"setup"``, ``"call"``, ``"teardown"``. + :param str key: + Name of the section, can be customized at will. Pytest uses ``"stdout"`` and + ``"stderr"`` internally. + :param str content: + The full contents as a string. + """ + if content: + self._report_sections.append((when, key, content)) + + def reportinfo(self) -> tuple[os.PathLike[str] | str, int | None, str]: + """Get location information for this item for test reports. + + Returns a tuple with three elements: + + - The path of the test (default ``self.path``) + - The 0-based line number of the test (default ``None``) + - A name of the test to be shown (default ``""``) + + .. seealso:: :ref:`non-python tests` + """ + return self.path, None, "" + + @cached_property + def location(self) -> tuple[str, int | None, str]: + """ + Returns a tuple of ``(relfspath, lineno, testname)`` for this item + where ``relfspath`` is file path relative to ``config.rootpath`` + and lineno is a 0-based line number. + """ + location = self.reportinfo() + path = absolutepath(location[0]) + relfspath = self.session._node_location_to_relpath(path) + assert type(location[2]) is str + return (relfspath, location[1], location[2]) diff --git a/venv/lib/python3.10/site-packages/_pytest/outcomes.py b/venv/lib/python3.10/site-packages/_pytest/outcomes.py new file mode 100644 index 0000000000000000000000000000000000000000..68ba05433658373bfbf544505296970d51a7189a --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/outcomes.py @@ -0,0 +1,317 @@ +"""Exception classes and constants handling test outcomes as well as +functions creating them.""" + +from __future__ import annotations + +from collections.abc import Callable +import sys +from typing import Any +from typing import cast +from typing import NoReturn +from typing import Protocol +from typing import TypeVar + +from .warning_types import PytestDeprecationWarning + + +class OutcomeException(BaseException): + """OutcomeException and its subclass instances indicate and contain info + about test and collection outcomes.""" + + def __init__(self, msg: str | None = None, pytrace: bool = True) -> None: + if msg is not None and not isinstance(msg, str): + error_msg = ( # type: ignore[unreachable] + "{} expected string as 'msg' parameter, got '{}' instead.\n" + "Perhaps you meant to use a mark?" + ) + raise TypeError(error_msg.format(type(self).__name__, type(msg).__name__)) + super().__init__(msg) + self.msg = msg + self.pytrace = pytrace + + def __repr__(self) -> str: + if self.msg is not None: + return self.msg + return f"<{self.__class__.__name__} instance>" + + __str__ = __repr__ + + +TEST_OUTCOME = (OutcomeException, Exception) + + +class Skipped(OutcomeException): + # XXX hackish: on 3k we fake to live in the builtins + # in order to have Skipped exception printing shorter/nicer + __module__ = "builtins" + + def __init__( + self, + msg: str | None = None, + pytrace: bool = True, + allow_module_level: bool = False, + *, + _use_item_location: bool = False, + ) -> None: + super().__init__(msg=msg, pytrace=pytrace) + self.allow_module_level = allow_module_level + # If true, the skip location is reported as the item's location, + # instead of the place that raises the exception/calls skip(). + self._use_item_location = _use_item_location + + +class Failed(OutcomeException): + """Raised from an explicit call to pytest.fail().""" + + __module__ = "builtins" + + +class Exit(Exception): + """Raised for immediate program exits (no tracebacks/summaries).""" + + def __init__( + self, msg: str = "unknown reason", returncode: int | None = None + ) -> None: + self.msg = msg + self.returncode = returncode + super().__init__(msg) + + +# We need a callable protocol to add attributes, for discussion see +# https://github.com/python/mypy/issues/2087. + +_F = TypeVar("_F", bound=Callable[..., object]) +_ET = TypeVar("_ET", bound=type[BaseException]) + + +class _WithException(Protocol[_F, _ET]): + Exception: _ET + __call__: _F + + +def _with_exception(exception_type: _ET) -> Callable[[_F], _WithException[_F, _ET]]: + def decorate(func: _F) -> _WithException[_F, _ET]: + func_with_exception = cast(_WithException[_F, _ET], func) + func_with_exception.Exception = exception_type + return func_with_exception + + return decorate + + +# Exposed helper methods. + + +@_with_exception(Exit) +def exit( + reason: str = "", + returncode: int | None = None, +) -> NoReturn: + """Exit testing process. + + :param reason: + The message to show as the reason for exiting pytest. reason has a default value + only because `msg` is deprecated. + + :param returncode: + Return code to be used when exiting pytest. None means the same as ``0`` (no error), same as :func:`sys.exit`. + + :raises pytest.exit.Exception: + The exception that is raised. + """ + __tracebackhide__ = True + raise Exit(reason, returncode) + + +@_with_exception(Skipped) +def skip( + reason: str = "", + *, + allow_module_level: bool = False, +) -> NoReturn: + """Skip an executing test with the given message. + + This function should be called only during testing (setup, call or teardown) or + during collection by using the ``allow_module_level`` flag. This function can + be called in doctests as well. + + :param reason: + The message to show the user as reason for the skip. + + :param allow_module_level: + Allows this function to be called at module level. + Raising the skip exception at module level will stop + the execution of the module and prevent the collection of all tests in the module, + even those defined before the `skip` call. + + Defaults to False. + + :raises pytest.skip.Exception: + The exception that is raised. + + .. note:: + It is better to use the :ref:`pytest.mark.skipif ref` marker when + possible to declare a test to be skipped under certain conditions + like mismatching platforms or dependencies. + Similarly, use the ``# doctest: +SKIP`` directive (see :py:data:`doctest.SKIP`) + to skip a doctest statically. + """ + __tracebackhide__ = True + raise Skipped(msg=reason, allow_module_level=allow_module_level) + + +@_with_exception(Failed) +def fail(reason: str = "", pytrace: bool = True) -> NoReturn: + """Explicitly fail an executing test with the given message. + + :param reason: + The message to show the user as reason for the failure. + + :param pytrace: + If False, msg represents the full failure information and no + python traceback will be reported. + + :raises pytest.fail.Exception: + The exception that is raised. + """ + __tracebackhide__ = True + raise Failed(msg=reason, pytrace=pytrace) + + +class XFailed(Failed): + """Raised from an explicit call to pytest.xfail().""" + + +@_with_exception(XFailed) +def xfail(reason: str = "") -> NoReturn: + """Imperatively xfail an executing test or setup function with the given reason. + + This function should be called only during testing (setup, call or teardown). + + No other code is executed after using ``xfail()`` (it is implemented + internally by raising an exception). + + :param reason: + The message to show the user as reason for the xfail. + + .. note:: + It is better to use the :ref:`pytest.mark.xfail ref` marker when + possible to declare a test to be xfailed under certain conditions + like known bugs or missing features. + + :raises pytest.xfail.Exception: + The exception that is raised. + """ + __tracebackhide__ = True + raise XFailed(reason) + + +def importorskip( + modname: str, + minversion: str | None = None, + reason: str | None = None, + *, + exc_type: type[ImportError] | None = None, +) -> Any: + """Import and return the requested module ``modname``, or skip the + current test if the module cannot be imported. + + :param modname: + The name of the module to import. + :param minversion: + If given, the imported module's ``__version__`` attribute must be at + least this minimal version, otherwise the test is still skipped. + :param reason: + If given, this reason is shown as the message when the module cannot + be imported. + :param exc_type: + The exception that should be captured in order to skip modules. + Must be :py:class:`ImportError` or a subclass. + + If the module can be imported but raises :class:`ImportError`, pytest will + issue a warning to the user, as often users expect the module not to be + found (which would raise :class:`ModuleNotFoundError` instead). + + This warning can be suppressed by passing ``exc_type=ImportError`` explicitly. + + See :ref:`import-or-skip-import-error` for details. + + + :returns: + The imported module. This should be assigned to its canonical name. + + :raises pytest.skip.Exception: + If the module cannot be imported. + + Example:: + + docutils = pytest.importorskip("docutils") + + .. versionadded:: 8.2 + + The ``exc_type`` parameter. + """ + import warnings + + __tracebackhide__ = True + compile(modname, "", "eval") # to catch syntaxerrors + + # Until pytest 9.1, we will warn the user if we catch ImportError (instead of ModuleNotFoundError), + # as this might be hiding an installation/environment problem, which is not usually what is intended + # when using importorskip() (#11523). + # In 9.1, to keep the function signature compatible, we just change the code below to: + # 1. Use `exc_type = ModuleNotFoundError` if `exc_type` is not given. + # 2. Remove `warn_on_import` and the warning handling. + if exc_type is None: + exc_type = ImportError + warn_on_import_error = True + else: + warn_on_import_error = False + + skipped: Skipped | None = None + warning: Warning | None = None + + with warnings.catch_warnings(): + # Make sure to ignore ImportWarnings that might happen because + # of existing directories with the same name we're trying to + # import but without a __init__.py file. + warnings.simplefilter("ignore") + + try: + __import__(modname) + except exc_type as exc: + # Do not raise or issue warnings inside the catch_warnings() block. + if reason is None: + reason = f"could not import {modname!r}: {exc}" + skipped = Skipped(reason, allow_module_level=True) + + if warn_on_import_error and not isinstance(exc, ModuleNotFoundError): + lines = [ + "", + f"Module '{modname}' was found, but when imported by pytest it raised:", + f" {exc!r}", + "In pytest 9.1 this warning will become an error by default.", + "You can fix the underlying problem, or alternatively overwrite this behavior and silence this " + "warning by passing exc_type=ImportError explicitly.", + "See https://docs.pytest.org/en/stable/deprecations.html#pytest-importorskip-default-behavior-regarding-importerror", + ] + warning = PytestDeprecationWarning("\n".join(lines)) + + if warning: + warnings.warn(warning, stacklevel=2) + if skipped: + raise skipped + + mod = sys.modules[modname] + if minversion is None: + return mod + verattr = getattr(mod, "__version__", None) + if minversion is not None: + # Imported lazily to improve start-up time. + from packaging.version import Version + + if verattr is None or Version(verattr) < Version(minversion): + raise Skipped( + f"module {modname!r} has __version__ {verattr!r}, required is: {minversion!r}", + allow_module_level=True, + ) + return mod diff --git a/venv/lib/python3.10/site-packages/_pytest/pastebin.py b/venv/lib/python3.10/site-packages/_pytest/pastebin.py new file mode 100644 index 0000000000000000000000000000000000000000..c7b39d96f029c31d00a6a85125a77f4314fc97a5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/pastebin.py @@ -0,0 +1,117 @@ +# mypy: allow-untyped-defs +"""Submit failure or test session information to a pastebin service.""" + +from __future__ import annotations + +from io import StringIO +import tempfile +from typing import IO + +from _pytest.config import Config +from _pytest.config import create_terminal_writer +from _pytest.config.argparsing import Parser +from _pytest.stash import StashKey +from _pytest.terminal import TerminalReporter +import pytest + + +pastebinfile_key = StashKey[IO[bytes]]() + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("terminal reporting") + group.addoption( + "--pastebin", + metavar="mode", + action="store", + dest="pastebin", + default=None, + choices=["failed", "all"], + help="Send failed|all info to bpaste.net pastebin service", + ) + + +@pytest.hookimpl(trylast=True) +def pytest_configure(config: Config) -> None: + if config.option.pastebin == "all": + tr = config.pluginmanager.getplugin("terminalreporter") + # If no terminal reporter plugin is present, nothing we can do here; + # this can happen when this function executes in a worker node + # when using pytest-xdist, for example. + if tr is not None: + # pastebin file will be UTF-8 encoded binary file. + config.stash[pastebinfile_key] = tempfile.TemporaryFile("w+b") + oldwrite = tr._tw.write + + def tee_write(s, **kwargs): + oldwrite(s, **kwargs) + if isinstance(s, str): + s = s.encode("utf-8") + config.stash[pastebinfile_key].write(s) + + tr._tw.write = tee_write + + +def pytest_unconfigure(config: Config) -> None: + if pastebinfile_key in config.stash: + pastebinfile = config.stash[pastebinfile_key] + # Get terminal contents and delete file. + pastebinfile.seek(0) + sessionlog = pastebinfile.read() + pastebinfile.close() + del config.stash[pastebinfile_key] + # Undo our patching in the terminal reporter. + tr = config.pluginmanager.getplugin("terminalreporter") + del tr._tw.__dict__["write"] + # Write summary. + tr.write_sep("=", "Sending information to Paste Service") + pastebinurl = create_new_paste(sessionlog) + tr.write_line(f"pastebin session-log: {pastebinurl}\n") + + +def create_new_paste(contents: str | bytes) -> str: + """Create a new paste using the bpaste.net service. + + :contents: Paste contents string. + :returns: URL to the pasted contents, or an error message. + """ + import re + from urllib.error import HTTPError + from urllib.parse import urlencode + from urllib.request import urlopen + + params = {"code": contents, "lexer": "text", "expiry": "1week"} + url = "https://bpa.st" + try: + response: str = ( + urlopen(url, data=urlencode(params).encode("ascii")).read().decode("utf-8") + ) + except HTTPError as e: + with e: # HTTPErrors are also http responses that must be closed! + return f"bad response: {e}" + except OSError as e: # eg urllib.error.URLError + return f"bad response: {e}" + m = re.search(r'href="/raw/(\w+)"', response) + if m: + return f"{url}/show/{m.group(1)}" + else: + return "bad response: invalid format ('" + response + "')" + + +def pytest_terminal_summary(terminalreporter: TerminalReporter) -> None: + if terminalreporter.config.option.pastebin != "failed": + return + if "failed" in terminalreporter.stats: + terminalreporter.write_sep("=", "Sending information to Paste Service") + for rep in terminalreporter.stats["failed"]: + try: + msg = rep.longrepr.reprtraceback.reprentries[-1].reprfileloc + except AttributeError: + msg = terminalreporter._getfailureheadline(rep) + file = StringIO() + tw = create_terminal_writer(terminalreporter.config, file) + rep.toterminal(tw) + s = file.getvalue() + assert len(s) + pastebinurl = create_new_paste(s) + terminalreporter.write_line(f"{msg} --> {pastebinurl}") diff --git a/venv/lib/python3.10/site-packages/_pytest/pathlib.py b/venv/lib/python3.10/site-packages/_pytest/pathlib.py new file mode 100644 index 0000000000000000000000000000000000000000..b69e85404e7edd7e01a4557f317d943b6f9737ea --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/pathlib.py @@ -0,0 +1,1055 @@ +from __future__ import annotations + +import atexit +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Iterator +import contextlib +from enum import Enum +from errno import EBADF +from errno import ELOOP +from errno import ENOENT +from errno import ENOTDIR +import fnmatch +from functools import partial +from importlib.machinery import ModuleSpec +from importlib.machinery import PathFinder +import importlib.util +import itertools +import os +from os.path import expanduser +from os.path import expandvars +from os.path import isabs +from os.path import sep +from pathlib import Path +from pathlib import PurePath +from posixpath import sep as posix_sep +import shutil +import sys +import types +from types import ModuleType +from typing import Any +from typing import TypeVar +import uuid +import warnings + +from _pytest.compat import assert_never +from _pytest.outcomes import skip +from _pytest.warning_types import PytestWarning + + +if sys.version_info < (3, 11): + from importlib._bootstrap_external import _NamespaceLoader as NamespaceLoader +else: + from importlib.machinery import NamespaceLoader + +LOCK_TIMEOUT = 60 * 60 * 24 * 3 + +_AnyPurePath = TypeVar("_AnyPurePath", bound=PurePath) + +# The following function, variables and comments were +# copied from cpython 3.9 Lib/pathlib.py file. + +# EBADF - guard against macOS `stat` throwing EBADF +_IGNORED_ERRORS = (ENOENT, ENOTDIR, EBADF, ELOOP) + +_IGNORED_WINERRORS = ( + 21, # ERROR_NOT_READY - drive exists but is not accessible + 1921, # ERROR_CANT_RESOLVE_FILENAME - fix for broken symlink pointing to itself +) + + +def _ignore_error(exception: Exception) -> bool: + return ( + getattr(exception, "errno", None) in _IGNORED_ERRORS + or getattr(exception, "winerror", None) in _IGNORED_WINERRORS + ) + + +def get_lock_path(path: _AnyPurePath) -> _AnyPurePath: + return path.joinpath(".lock") + + +def on_rm_rf_error( + func: Callable[..., Any] | None, + path: str, + excinfo: BaseException + | tuple[type[BaseException], BaseException, types.TracebackType | None], + *, + start_path: Path, +) -> bool: + """Handle known read-only errors during rmtree. + + The returned value is used only by our own tests. + """ + if isinstance(excinfo, BaseException): + exc = excinfo + else: + exc = excinfo[1] + + # Another process removed the file in the middle of the "rm_rf" (xdist for example). + # More context: https://github.com/pytest-dev/pytest/issues/5974#issuecomment-543799018 + if isinstance(exc, FileNotFoundError): + return False + + if not isinstance(exc, PermissionError): + warnings.warn( + PytestWarning(f"(rm_rf) error removing {path}\n{type(exc)}: {exc}") + ) + return False + + if func not in (os.rmdir, os.remove, os.unlink): + if func not in (os.open,): + warnings.warn( + PytestWarning( + f"(rm_rf) unknown function {func} when removing {path}:\n{type(exc)}: {exc}" + ) + ) + return False + + # Chmod + retry. + import stat + + def chmod_rw(p: str) -> None: + mode = os.stat(p).st_mode + os.chmod(p, mode | stat.S_IRUSR | stat.S_IWUSR) + + # For files, we need to recursively go upwards in the directories to + # ensure they all are also writable. + p = Path(path) + if p.is_file(): + for parent in p.parents: + chmod_rw(str(parent)) + # Stop when we reach the original path passed to rm_rf. + if parent == start_path: + break + chmod_rw(str(path)) + + func(path) + return True + + +def ensure_extended_length_path(path: Path) -> Path: + """Get the extended-length version of a path (Windows). + + On Windows, by default, the maximum length of a path (MAX_PATH) is 260 + characters, and operations on paths longer than that fail. But it is possible + to overcome this by converting the path to "extended-length" form before + performing the operation: + https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file#maximum-path-length-limitation + + On Windows, this function returns the extended-length absolute version of path. + On other platforms it returns path unchanged. + """ + if sys.platform.startswith("win32"): + path = path.resolve() + path = Path(get_extended_length_path_str(str(path))) + return path + + +def get_extended_length_path_str(path: str) -> str: + """Convert a path to a Windows extended length path.""" + long_path_prefix = "\\\\?\\" + unc_long_path_prefix = "\\\\?\\UNC\\" + if path.startswith((long_path_prefix, unc_long_path_prefix)): + return path + # UNC + if path.startswith("\\\\"): + return unc_long_path_prefix + path[2:] + return long_path_prefix + path + + +def rm_rf(path: Path) -> None: + """Remove the path contents recursively, even if some elements + are read-only.""" + path = ensure_extended_length_path(path) + onerror = partial(on_rm_rf_error, start_path=path) + if sys.version_info >= (3, 12): + shutil.rmtree(str(path), onexc=onerror) + else: + shutil.rmtree(str(path), onerror=onerror) + + +def find_prefixed(root: Path, prefix: str) -> Iterator[os.DirEntry[str]]: + """Find all elements in root that begin with the prefix, case-insensitive.""" + l_prefix = prefix.lower() + for x in os.scandir(root): + if x.name.lower().startswith(l_prefix): + yield x + + +def extract_suffixes(iter: Iterable[os.DirEntry[str]], prefix: str) -> Iterator[str]: + """Return the parts of the paths following the prefix. + + :param iter: Iterator over path names. + :param prefix: Expected prefix of the path names. + """ + p_len = len(prefix) + for entry in iter: + yield entry.name[p_len:] + + +def find_suffixes(root: Path, prefix: str) -> Iterator[str]: + """Combine find_prefixes and extract_suffixes.""" + return extract_suffixes(find_prefixed(root, prefix), prefix) + + +def parse_num(maybe_num: str) -> int: + """Parse number path suffixes, returns -1 on error.""" + try: + return int(maybe_num) + except ValueError: + return -1 + + +def _force_symlink(root: Path, target: str | PurePath, link_to: str | Path) -> None: + """Helper to create the current symlink. + + It's full of race conditions that are reasonably OK to ignore + for the context of best effort linking to the latest test run. + + The presumption being that in case of much parallelism + the inaccuracy is going to be acceptable. + """ + current_symlink = root.joinpath(target) + try: + current_symlink.unlink() + except OSError: + pass + try: + current_symlink.symlink_to(link_to) + except Exception: + pass + + +def make_numbered_dir(root: Path, prefix: str, mode: int = 0o700) -> Path: + """Create a directory with an increased number as suffix for the given prefix.""" + for i in range(10): + # try up to 10 times to create the folder + max_existing = max(map(parse_num, find_suffixes(root, prefix)), default=-1) + new_number = max_existing + 1 + new_path = root.joinpath(f"{prefix}{new_number}") + try: + new_path.mkdir(mode=mode) + except Exception: + pass + else: + _force_symlink(root, prefix + "current", new_path) + return new_path + else: + raise OSError( + "could not create numbered dir with prefix " + f"{prefix} in {root} after 10 tries" + ) + + +def create_cleanup_lock(p: Path) -> Path: + """Create a lock to prevent premature folder cleanup.""" + lock_path = get_lock_path(p) + try: + fd = os.open(str(lock_path), os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0o644) + except FileExistsError as e: + raise OSError(f"cannot create lockfile in {p}") from e + else: + pid = os.getpid() + spid = str(pid).encode() + os.write(fd, spid) + os.close(fd) + if not lock_path.is_file(): + raise OSError("lock path got renamed after successful creation") + return lock_path + + +def register_cleanup_lock_removal( + lock_path: Path, register: Any = atexit.register +) -> Any: + """Register a cleanup function for removing a lock, by default on atexit.""" + pid = os.getpid() + + def cleanup_on_exit(lock_path: Path = lock_path, original_pid: int = pid) -> None: + current_pid = os.getpid() + if current_pid != original_pid: + # fork + return + try: + lock_path.unlink() + except OSError: + pass + + return register(cleanup_on_exit) + + +def maybe_delete_a_numbered_dir(path: Path) -> None: + """Remove a numbered directory if its lock can be obtained and it does + not seem to be in use.""" + path = ensure_extended_length_path(path) + lock_path = None + try: + lock_path = create_cleanup_lock(path) + parent = path.parent + + garbage = parent.joinpath(f"garbage-{uuid.uuid4()}") + path.rename(garbage) + rm_rf(garbage) + except OSError: + # known races: + # * other process did a cleanup at the same time + # * deletable folder was found + # * process cwd (Windows) + return + finally: + # If we created the lock, ensure we remove it even if we failed + # to properly remove the numbered dir. + if lock_path is not None: + try: + lock_path.unlink() + except OSError: + pass + + +def ensure_deletable(path: Path, consider_lock_dead_if_created_before: float) -> bool: + """Check if `path` is deletable based on whether the lock file is expired.""" + if path.is_symlink(): + return False + lock = get_lock_path(path) + try: + if not lock.is_file(): + return True + except OSError: + # we might not have access to the lock file at all, in this case assume + # we don't have access to the entire directory (#7491). + return False + try: + lock_time = lock.stat().st_mtime + except Exception: + return False + else: + if lock_time < consider_lock_dead_if_created_before: + # We want to ignore any errors while trying to remove the lock such as: + # - PermissionDenied, like the file permissions have changed since the lock creation; + # - FileNotFoundError, in case another pytest process got here first; + # and any other cause of failure. + with contextlib.suppress(OSError): + lock.unlink() + return True + return False + + +def try_cleanup(path: Path, consider_lock_dead_if_created_before: float) -> None: + """Try to cleanup a folder if we can ensure it's deletable.""" + if ensure_deletable(path, consider_lock_dead_if_created_before): + maybe_delete_a_numbered_dir(path) + + +def cleanup_candidates(root: Path, prefix: str, keep: int) -> Iterator[Path]: + """List candidates for numbered directories to be removed - follows py.path.""" + max_existing = max(map(parse_num, find_suffixes(root, prefix)), default=-1) + max_delete = max_existing - keep + entries = find_prefixed(root, prefix) + entries, entries2 = itertools.tee(entries) + numbers = map(parse_num, extract_suffixes(entries2, prefix)) + for entry, number in zip(entries, numbers): + if number <= max_delete: + yield Path(entry) + + +def cleanup_dead_symlinks(root: Path) -> None: + for left_dir in root.iterdir(): + if left_dir.is_symlink(): + if not left_dir.resolve().exists(): + left_dir.unlink() + + +def cleanup_numbered_dir( + root: Path, prefix: str, keep: int, consider_lock_dead_if_created_before: float +) -> None: + """Cleanup for lock driven numbered directories.""" + if not root.exists(): + return + for path in cleanup_candidates(root, prefix, keep): + try_cleanup(path, consider_lock_dead_if_created_before) + for path in root.glob("garbage-*"): + try_cleanup(path, consider_lock_dead_if_created_before) + + cleanup_dead_symlinks(root) + + +def make_numbered_dir_with_cleanup( + root: Path, + prefix: str, + keep: int, + lock_timeout: float, + mode: int, +) -> Path: + """Create a numbered dir with a cleanup lock and remove old ones.""" + e = None + for i in range(10): + try: + p = make_numbered_dir(root, prefix, mode) + # Only lock the current dir when keep is not 0 + if keep != 0: + lock_path = create_cleanup_lock(p) + register_cleanup_lock_removal(lock_path) + except Exception as exc: + e = exc + else: + consider_lock_dead_if_created_before = p.stat().st_mtime - lock_timeout + # Register a cleanup for program exit + atexit.register( + cleanup_numbered_dir, + root, + prefix, + keep, + consider_lock_dead_if_created_before, + ) + return p + assert e is not None + raise e + + +def resolve_from_str(input: str, rootpath: Path) -> Path: + input = expanduser(input) + input = expandvars(input) + if isabs(input): + return Path(input) + else: + return rootpath.joinpath(input) + + +def fnmatch_ex(pattern: str, path: str | os.PathLike[str]) -> bool: + """A port of FNMatcher from py.path.common which works with PurePath() instances. + + The difference between this algorithm and PurePath.match() is that the + latter matches "**" glob expressions for each part of the path, while + this algorithm uses the whole path instead. + + For example: + "tests/foo/bar/doc/test_foo.py" matches pattern "tests/**/doc/test*.py" + with this algorithm, but not with PurePath.match(). + + This algorithm was ported to keep backward-compatibility with existing + settings which assume paths match according this logic. + + References: + * https://bugs.python.org/issue29249 + * https://bugs.python.org/issue34731 + """ + path = PurePath(path) + iswin32 = sys.platform.startswith("win") + + if iswin32 and sep not in pattern and posix_sep in pattern: + # Running on Windows, the pattern has no Windows path separators, + # and the pattern has one or more Posix path separators. Replace + # the Posix path separators with the Windows path separator. + pattern = pattern.replace(posix_sep, sep) + + if sep not in pattern: + name = path.name + else: + name = str(path) + if path.is_absolute() and not os.path.isabs(pattern): + pattern = f"*{os.sep}{pattern}" + return fnmatch.fnmatch(name, pattern) + + +def parts(s: str) -> set[str]: + parts = s.split(sep) + return {sep.join(parts[: i + 1]) or sep for i in range(len(parts))} + + +def symlink_or_skip( + src: os.PathLike[str] | str, + dst: os.PathLike[str] | str, + **kwargs: Any, +) -> None: + """Make a symlink, or skip the test in case symlinks are not supported.""" + try: + os.symlink(src, dst, **kwargs) + except OSError as e: + skip(f"symlinks not supported: {e}") + + +class ImportMode(Enum): + """Possible values for `mode` parameter of `import_path`.""" + + prepend = "prepend" + append = "append" + importlib = "importlib" + + +class ImportPathMismatchError(ImportError): + """Raised on import_path() if there is a mismatch of __file__'s. + + This can happen when `import_path` is called multiple times with different filenames that has + the same basename but reside in packages + (for example "/tests1/test_foo.py" and "/tests2/test_foo.py"). + """ + + +def import_path( + path: str | os.PathLike[str], + *, + mode: str | ImportMode = ImportMode.prepend, + root: Path, + consider_namespace_packages: bool, +) -> ModuleType: + """ + Import and return a module from the given path, which can be a file (a module) or + a directory (a package). + + :param path: + Path to the file to import. + + :param mode: + Controls the underlying import mechanism that will be used: + + * ImportMode.prepend: the directory containing the module (or package, taking + `__init__.py` files into account) will be put at the *start* of `sys.path` before + being imported with `importlib.import_module`. + + * ImportMode.append: same as `prepend`, but the directory will be appended + to the end of `sys.path`, if not already in `sys.path`. + + * ImportMode.importlib: uses more fine control mechanisms provided by `importlib` + to import the module, which avoids having to muck with `sys.path` at all. It effectively + allows having same-named test modules in different places. + + :param root: + Used as an anchor when mode == ImportMode.importlib to obtain + a unique name for the module being imported so it can safely be stored + into ``sys.modules``. + + :param consider_namespace_packages: + If True, consider namespace packages when resolving module names. + + :raises ImportPathMismatchError: + If after importing the given `path` and the module `__file__` + are different. Only raised in `prepend` and `append` modes. + """ + path = Path(path) + mode = ImportMode(mode) + + if not path.exists(): + raise ImportError(path) + + if mode is ImportMode.importlib: + # Try to import this module using the standard import mechanisms, but + # without touching sys.path. + try: + pkg_root, module_name = resolve_pkg_root_and_module_name( + path, consider_namespace_packages=consider_namespace_packages + ) + except CouldNotResolvePathError: + pass + else: + # If the given module name is already in sys.modules, do not import it again. + with contextlib.suppress(KeyError): + return sys.modules[module_name] + + mod = _import_module_using_spec( + module_name, path, pkg_root, insert_modules=False + ) + if mod is not None: + return mod + + # Could not import the module with the current sys.path, so we fall back + # to importing the file as a single module, not being a part of a package. + module_name = module_name_from_path(path, root) + with contextlib.suppress(KeyError): + return sys.modules[module_name] + + mod = _import_module_using_spec( + module_name, path, path.parent, insert_modules=True + ) + if mod is None: + raise ImportError(f"Can't find module {module_name} at location {path}") + return mod + + try: + pkg_root, module_name = resolve_pkg_root_and_module_name( + path, consider_namespace_packages=consider_namespace_packages + ) + except CouldNotResolvePathError: + pkg_root, module_name = path.parent, path.stem + + # Change sys.path permanently: restoring it at the end of this function would cause surprising + # problems because of delayed imports: for example, a conftest.py file imported by this function + # might have local imports, which would fail at runtime if we restored sys.path. + if mode is ImportMode.append: + if str(pkg_root) not in sys.path: + sys.path.append(str(pkg_root)) + elif mode is ImportMode.prepend: + if str(pkg_root) != sys.path[0]: + sys.path.insert(0, str(pkg_root)) + else: + assert_never(mode) + + importlib.import_module(module_name) + + mod = sys.modules[module_name] + if path.name == "__init__.py": + return mod + + ignore = os.environ.get("PY_IGNORE_IMPORTMISMATCH", "") + if ignore != "1": + module_file = mod.__file__ + if module_file is None: + raise ImportPathMismatchError(module_name, module_file, path) + + if module_file.endswith((".pyc", ".pyo")): + module_file = module_file[:-1] + if module_file.endswith(os.sep + "__init__.py"): + module_file = module_file[: -(len(os.sep + "__init__.py"))] + + try: + is_same = _is_same(str(path), module_file) + except FileNotFoundError: + is_same = False + + if not is_same: + raise ImportPathMismatchError(module_name, module_file, path) + + return mod + + +def _import_module_using_spec( + module_name: str, module_path: Path, module_location: Path, *, insert_modules: bool +) -> ModuleType | None: + """ + Tries to import a module by its canonical name, path, and its parent location. + + :param module_name: + The expected module name, will become the key of `sys.modules`. + + :param module_path: + The file path of the module, for example `/foo/bar/test_demo.py`. + If module is a package, pass the path to the `__init__.py` of the package. + If module is a namespace package, pass directory path. + + :param module_location: + The parent location of the module. + If module is a package, pass the directory containing the `__init__.py` file. + + :param insert_modules: + If True, will call `insert_missing_modules` to create empty intermediate modules + with made-up module names (when importing test files not reachable from `sys.path`). + + Example 1 of parent_module_*: + + module_name: "a.b.c.demo" + module_path: Path("a/b/c/demo.py") + module_location: Path("a/b/c/") + if "a.b.c" is package ("a/b/c/__init__.py" exists), then + parent_module_name: "a.b.c" + parent_module_path: Path("a/b/c/__init__.py") + parent_module_location: Path("a/b/c/") + else: + parent_module_name: "a.b.c" + parent_module_path: Path("a/b/c") + parent_module_location: Path("a/b/") + + Example 2 of parent_module_*: + + module_name: "a.b.c" + module_path: Path("a/b/c/__init__.py") + module_location: Path("a/b/c/") + if "a.b" is package ("a/b/__init__.py" exists), then + parent_module_name: "a.b" + parent_module_path: Path("a/b/__init__.py") + parent_module_location: Path("a/b/") + else: + parent_module_name: "a.b" + parent_module_path: Path("a/b/") + parent_module_location: Path("a/") + """ + # Attempt to import the parent module, seems is our responsibility: + # https://github.com/python/cpython/blob/73906d5c908c1e0b73c5436faeff7d93698fc074/Lib/importlib/_bootstrap.py#L1308-L1311 + parent_module_name, _, name = module_name.rpartition(".") + parent_module: ModuleType | None = None + if parent_module_name: + parent_module = sys.modules.get(parent_module_name) + # If the parent_module lacks the `__path__` attribute, AttributeError when finding a submodule's spec, + # requiring re-import according to the path. + need_reimport = not hasattr(parent_module, "__path__") + if parent_module is None or need_reimport: + # Get parent_location based on location, get parent_path based on path. + if module_path.name == "__init__.py": + # If the current module is in a package, + # need to leave the package first and then enter the parent module. + parent_module_path = module_path.parent.parent + else: + parent_module_path = module_path.parent + + if (parent_module_path / "__init__.py").is_file(): + # If the parent module is a package, loading by __init__.py file. + parent_module_path = parent_module_path / "__init__.py" + + parent_module = _import_module_using_spec( + parent_module_name, + parent_module_path, + parent_module_path.parent, + insert_modules=insert_modules, + ) + + # Checking with sys.meta_path first in case one of its hooks can import this module, + # such as our own assertion-rewrite hook. + for meta_importer in sys.meta_path: + module_name_of_meta = getattr(meta_importer.__class__, "__module__", "") + if module_name_of_meta == "_pytest.assertion.rewrite" and module_path.is_file(): + # Import modules in subdirectories by module_path + # to ensure assertion rewrites are not missed (#12659). + find_spec_path = [str(module_location), str(module_path)] + else: + find_spec_path = [str(module_location)] + + spec = meta_importer.find_spec(module_name, find_spec_path) + + if spec_matches_module_path(spec, module_path): + break + else: + loader = None + if module_path.is_dir(): + # The `spec_from_file_location` matches a loader based on the file extension by default. + # For a namespace package, need to manually specify a loader. + loader = NamespaceLoader(name, module_path, PathFinder()) # type: ignore[arg-type] + + spec = importlib.util.spec_from_file_location( + module_name, str(module_path), loader=loader + ) + + if spec_matches_module_path(spec, module_path): + assert spec is not None + # Find spec and import this module. + mod = importlib.util.module_from_spec(spec) + sys.modules[module_name] = mod + spec.loader.exec_module(mod) # type: ignore[union-attr] + + # Set this module as an attribute of the parent module (#12194). + if parent_module is not None: + setattr(parent_module, name, mod) + + if insert_modules: + insert_missing_modules(sys.modules, module_name) + return mod + + return None + + +def spec_matches_module_path(module_spec: ModuleSpec | None, module_path: Path) -> bool: + """Return true if the given ModuleSpec can be used to import the given module path.""" + if module_spec is None: + return False + + if module_spec.origin: + return Path(module_spec.origin) == module_path + + # Compare the path with the `module_spec.submodule_Search_Locations` in case + # the module is part of a namespace package. + # https://docs.python.org/3/library/importlib.html#importlib.machinery.ModuleSpec.submodule_search_locations + if module_spec.submodule_search_locations: # can be None. + for path in module_spec.submodule_search_locations: + if Path(path) == module_path: + return True + + return False + + +# Implement a special _is_same function on Windows which returns True if the two filenames +# compare equal, to circumvent os.path.samefile returning False for mounts in UNC (#7678). +if sys.platform.startswith("win"): + + def _is_same(f1: str, f2: str) -> bool: + return Path(f1) == Path(f2) or os.path.samefile(f1, f2) + +else: + + def _is_same(f1: str, f2: str) -> bool: + return os.path.samefile(f1, f2) + + +def module_name_from_path(path: Path, root: Path) -> str: + """ + Return a dotted module name based on the given path, anchored on root. + + For example: path="projects/src/tests/test_foo.py" and root="/projects", the + resulting module name will be "src.tests.test_foo". + """ + path = path.with_suffix("") + try: + relative_path = path.relative_to(root) + except ValueError: + # If we can't get a relative path to root, use the full path, except + # for the first part ("d:\\" or "/" depending on the platform, for example). + path_parts = path.parts[1:] + else: + # Use the parts for the relative path to the root path. + path_parts = relative_path.parts + + # Module name for packages do not contain the __init__ file, unless + # the `__init__.py` file is at the root. + if len(path_parts) >= 2 and path_parts[-1] == "__init__": + path_parts = path_parts[:-1] + + # Module names cannot contain ".", normalize them to "_". This prevents + # a directory having a "." in the name (".env.310" for example) causing extra intermediate modules. + # Also, important to replace "." at the start of paths, as those are considered relative imports. + path_parts = tuple(x.replace(".", "_") for x in path_parts) + + return ".".join(path_parts) + + +def insert_missing_modules(modules: dict[str, ModuleType], module_name: str) -> None: + """ + Used by ``import_path`` to create intermediate modules when using mode=importlib. + + When we want to import a module as "src.tests.test_foo" for example, we need + to create empty modules "src" and "src.tests" after inserting "src.tests.test_foo", + otherwise "src.tests.test_foo" is not importable by ``__import__``. + """ + module_parts = module_name.split(".") + while module_name: + parent_module_name, _, child_name = module_name.rpartition(".") + if parent_module_name: + parent_module = modules.get(parent_module_name) + if parent_module is None: + try: + # If sys.meta_path is empty, calling import_module will issue + # a warning and raise ModuleNotFoundError. To avoid the + # warning, we check sys.meta_path explicitly and raise the error + # ourselves to fall back to creating a dummy module. + if not sys.meta_path: + raise ModuleNotFoundError + parent_module = importlib.import_module(parent_module_name) + except ModuleNotFoundError: + parent_module = ModuleType( + module_name, + doc="Empty module created by pytest's importmode=importlib.", + ) + modules[parent_module_name] = parent_module + + # Add child attribute to the parent that can reference the child + # modules. + if not hasattr(parent_module, child_name): + setattr(parent_module, child_name, modules[module_name]) + + module_parts.pop(-1) + module_name = ".".join(module_parts) + + +def resolve_package_path(path: Path) -> Path | None: + """Return the Python package path by looking for the last + directory upwards which still contains an __init__.py. + + Returns None if it cannot be determined. + """ + result = None + for parent in itertools.chain((path,), path.parents): + if parent.is_dir(): + if not (parent / "__init__.py").is_file(): + break + if not parent.name.isidentifier(): + break + result = parent + return result + + +def resolve_pkg_root_and_module_name( + path: Path, *, consider_namespace_packages: bool = False +) -> tuple[Path, str]: + """ + Return the path to the directory of the root package that contains the + given Python file, and its module name: + + src/ + app/ + __init__.py + core/ + __init__.py + models.py + + Passing the full path to `models.py` will yield Path("src") and "app.core.models". + + If consider_namespace_packages is True, then we additionally check upwards in the hierarchy + for namespace packages: + + https://packaging.python.org/en/latest/guides/packaging-namespace-packages + + Raises CouldNotResolvePathError if the given path does not belong to a package (missing any __init__.py files). + """ + pkg_root: Path | None = None + pkg_path = resolve_package_path(path) + if pkg_path is not None: + pkg_root = pkg_path.parent + if consider_namespace_packages: + start = pkg_root if pkg_root is not None else path.parent + for candidate in (start, *start.parents): + module_name = compute_module_name(candidate, path) + if module_name and is_importable(module_name, path): + # Point the pkg_root to the root of the namespace package. + pkg_root = candidate + break + + if pkg_root is not None: + module_name = compute_module_name(pkg_root, path) + if module_name: + return pkg_root, module_name + + raise CouldNotResolvePathError(f"Could not resolve for {path}") + + +def is_importable(module_name: str, module_path: Path) -> bool: + """ + Return if the given module path could be imported normally by Python, akin to the user + entering the REPL and importing the corresponding module name directly, and corresponds + to the module_path specified. + + :param module_name: + Full module name that we want to check if is importable. + For example, "app.models". + + :param module_path: + Full path to the python module/package we want to check if is importable. + For example, "/projects/src/app/models.py". + """ + try: + # Note this is different from what we do in ``_import_module_using_spec``, where we explicitly search through + # sys.meta_path to be able to pass the path of the module that we want to import (``meta_importer.find_spec``). + # Using importlib.util.find_spec() is different, it gives the same results as trying to import + # the module normally in the REPL. + spec = importlib.util.find_spec(module_name) + except (ImportError, ValueError, ImportWarning): + return False + else: + return spec_matches_module_path(spec, module_path) + + +def compute_module_name(root: Path, module_path: Path) -> str | None: + """Compute a module name based on a path and a root anchor.""" + try: + path_without_suffix = module_path.with_suffix("") + except ValueError: + # Empty paths (such as Path.cwd()) might break meta_path hooks (like our own assertion rewriter). + return None + + try: + relative = path_without_suffix.relative_to(root) + except ValueError: # pragma: no cover + return None + names = list(relative.parts) + if not names: + return None + if names[-1] == "__init__": + names.pop() + return ".".join(names) + + +class CouldNotResolvePathError(Exception): + """Custom exception raised by resolve_pkg_root_and_module_name.""" + + +def scandir( + path: str | os.PathLike[str], + sort_key: Callable[[os.DirEntry[str]], object] = lambda entry: entry.name, +) -> list[os.DirEntry[str]]: + """Scan a directory recursively, in breadth-first order. + + The returned entries are sorted according to the given key. + The default is to sort by name. + If the directory does not exist, return an empty list. + """ + entries = [] + # Attempt to create a scandir iterator for the given path. + try: + scandir_iter = os.scandir(path) + except FileNotFoundError: + # If the directory does not exist, return an empty list. + return [] + # Use the scandir iterator in a context manager to ensure it is properly closed. + with scandir_iter as s: + for entry in s: + try: + entry.is_file() + except OSError as err: + if _ignore_error(err): + continue + # Reraise non-ignorable errors to avoid hiding issues. + raise + entries.append(entry) + entries.sort(key=sort_key) # type: ignore[arg-type] + return entries + + +def visit( + path: str | os.PathLike[str], recurse: Callable[[os.DirEntry[str]], bool] +) -> Iterator[os.DirEntry[str]]: + """Walk a directory recursively, in breadth-first order. + + The `recurse` predicate determines whether a directory is recursed. + + Entries at each directory level are sorted. + """ + entries = scandir(path) + yield from entries + for entry in entries: + if entry.is_dir() and recurse(entry): + yield from visit(entry.path, recurse) + + +def absolutepath(path: str | os.PathLike[str]) -> Path: + """Convert a path to an absolute path using os.path.abspath. + + Prefer this over Path.resolve() (see #6523). + Prefer this over Path.absolute() (not public, doesn't normalize). + """ + return Path(os.path.abspath(path)) + + +def commonpath(path1: Path, path2: Path) -> Path | None: + """Return the common part shared with the other path, or None if there is + no common part. + + If one path is relative and one is absolute, returns None. + """ + try: + return Path(os.path.commonpath((str(path1), str(path2)))) + except ValueError: + return None + + +def bestrelpath(directory: Path, dest: Path) -> str: + """Return a string which is a relative path from directory to dest such + that directory/bestrelpath == dest. + + The paths must be either both absolute or both relative. + + If no such path can be determined, returns dest. + """ + assert isinstance(directory, Path) + assert isinstance(dest, Path) + if dest == directory: + return os.curdir + # Find the longest common directory. + base = commonpath(directory, dest) + # Can be the case on Windows for two absolute paths on different drives. + # Can be the case for two relative paths without common prefix. + # Can be the case for a relative path and an absolute path. + if not base: + return str(dest) + reldirectory = directory.relative_to(base) + reldest = dest.relative_to(base) + return os.path.join( + # Back from directory to base. + *([os.pardir] * len(reldirectory.parts)), + # Forward from base to dest. + *reldest.parts, + ) + + +def safe_exists(p: Path) -> bool: + """Like Path.exists(), but account for input arguments that might be too long (#11394).""" + try: + return p.exists() + except (ValueError, OSError): + # ValueError: stat: path too long for Windows + # OSError: [WinError 123] The filename, directory name, or volume label syntax is incorrect + return False diff --git a/venv/lib/python3.10/site-packages/_pytest/py.typed b/venv/lib/python3.10/site-packages/_pytest/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.10/site-packages/_pytest/pytester.py b/venv/lib/python3.10/site-packages/_pytest/pytester.py new file mode 100644 index 0000000000000000000000000000000000000000..59d2b0befe98389849f7d89314a52fc0e2bda67d --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/pytester.py @@ -0,0 +1,1775 @@ +# mypy: allow-untyped-defs +"""(Disabled by default) support for testing pytest and pytest plugins. + +PYTEST_DONT_REWRITE +""" + +from __future__ import annotations + +import collections.abc +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Iterable +from collections.abc import Sequence +import contextlib +from fnmatch import fnmatch +import gc +import importlib +from io import StringIO +import locale +import os +from pathlib import Path +import platform +import re +import shutil +import subprocess +import sys +import traceback +from typing import Any +from typing import Final +from typing import final +from typing import IO +from typing import Literal +from typing import overload +from typing import TextIO +from typing import TYPE_CHECKING +from weakref import WeakKeyDictionary + +from iniconfig import IniConfig +from iniconfig import SectionWrapper + +from _pytest import timing +from _pytest._code import Source +from _pytest.capture import _get_multicapture +from _pytest.compat import NOTSET +from _pytest.compat import NotSetType +from _pytest.config import _PluggyPlugin +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config import main +from _pytest.config import PytestPluginManager +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.main import Session +from _pytest.monkeypatch import MonkeyPatch +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.outcomes import fail +from _pytest.outcomes import importorskip +from _pytest.outcomes import skip +from _pytest.pathlib import bestrelpath +from _pytest.pathlib import make_numbered_dir +from _pytest.reports import CollectReport +from _pytest.reports import TestReport +from _pytest.tmpdir import TempPathFactory +from _pytest.warning_types import PytestFDWarning + + +if TYPE_CHECKING: + import pexpect + + +pytest_plugins = ["pytester_assertions"] + + +IGNORE_PAM = [ # filenames added when obtaining details about the current user + "/var/lib/sss/mc/passwd" +] + + +def pytest_addoption(parser: Parser) -> None: + parser.addoption( + "--lsof", + action="store_true", + dest="lsof", + default=False, + help="Run FD checks if lsof is available", + ) + + parser.addoption( + "--runpytest", + default="inprocess", + dest="runpytest", + choices=("inprocess", "subprocess"), + help=( + "Run pytest sub runs in tests using an 'inprocess' " + "or 'subprocess' (python -m main) method" + ), + ) + + parser.addini( + "pytester_example_dir", help="Directory to take the pytester example files from" + ) + + +def pytest_configure(config: Config) -> None: + if config.getvalue("lsof"): + checker = LsofFdLeakChecker() + if checker.matching_platform(): + config.pluginmanager.register(checker) + + config.addinivalue_line( + "markers", + "pytester_example_path(*path_segments): join the given path " + "segments to `pytester_example_dir` for this test.", + ) + + +class LsofFdLeakChecker: + def get_open_files(self) -> list[tuple[str, str]]: + if sys.version_info >= (3, 11): + # New in Python 3.11, ignores utf-8 mode + encoding = locale.getencoding() + else: + encoding = locale.getpreferredencoding(False) + out = subprocess.run( + ("lsof", "-Ffn0", "-p", str(os.getpid())), + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + check=True, + text=True, + encoding=encoding, + ).stdout + + def isopen(line: str) -> bool: + return line.startswith("f") and ( + "deleted" not in line + and "mem" not in line + and "txt" not in line + and "cwd" not in line + ) + + open_files = [] + + for line in out.split("\n"): + if isopen(line): + fields = line.split("\0") + fd = fields[0][1:] + filename = fields[1][1:] + if filename in IGNORE_PAM: + continue + if filename.startswith("/"): + open_files.append((fd, filename)) + + return open_files + + def matching_platform(self) -> bool: + try: + subprocess.run(("lsof", "-v"), check=True) + except (OSError, subprocess.CalledProcessError): + return False + else: + return True + + @hookimpl(wrapper=True, tryfirst=True) + def pytest_runtest_protocol(self, item: Item) -> Generator[None, object, object]: + lines1 = self.get_open_files() + try: + return (yield) + finally: + if hasattr(sys, "pypy_version_info"): + gc.collect() + lines2 = self.get_open_files() + + new_fds = {t[0] for t in lines2} - {t[0] for t in lines1} + leaked_files = [t for t in lines2 if t[0] in new_fds] + if leaked_files: + error = [ + f"***** {len(leaked_files)} FD leakage detected", + *(str(f) for f in leaked_files), + "*** Before:", + *(str(f) for f in lines1), + "*** After:", + *(str(f) for f in lines2), + f"***** {len(leaked_files)} FD leakage detected", + "*** function {}:{}: {} ".format(*item.location), + "See issue #2366", + ] + item.warn(PytestFDWarning("\n".join(error))) + + +# used at least by pytest-xdist plugin + + +@fixture +def _pytest(request: FixtureRequest) -> PytestArg: + """Return a helper which offers a gethookrecorder(hook) method which + returns a HookRecorder instance which helps to make assertions about called + hooks.""" + return PytestArg(request) + + +class PytestArg: + def __init__(self, request: FixtureRequest) -> None: + self._request = request + + def gethookrecorder(self, hook) -> HookRecorder: + hookrecorder = HookRecorder(hook._pm) + self._request.addfinalizer(hookrecorder.finish_recording) + return hookrecorder + + +def get_public_names(values: Iterable[str]) -> list[str]: + """Only return names from iterator values without a leading underscore.""" + return [x for x in values if x[0] != "_"] + + +@final +class RecordedHookCall: + """A recorded call to a hook. + + The arguments to the hook call are set as attributes. + For example: + + .. code-block:: python + + calls = hook_recorder.getcalls("pytest_runtest_setup") + # Suppose pytest_runtest_setup was called once with `item=an_item`. + assert calls[0].item is an_item + """ + + def __init__(self, name: str, kwargs) -> None: + self.__dict__.update(kwargs) + self._name = name + + def __repr__(self) -> str: + d = self.__dict__.copy() + del d["_name"] + return f"" + + if TYPE_CHECKING: + # The class has undetermined attributes, this tells mypy about it. + def __getattr__(self, key: str): ... + + +@final +class HookRecorder: + """Record all hooks called in a plugin manager. + + Hook recorders are created by :class:`Pytester`. + + This wraps all the hook calls in the plugin manager, recording each call + before propagating the normal calls. + """ + + def __init__( + self, pluginmanager: PytestPluginManager, *, _ispytest: bool = False + ) -> None: + check_ispytest(_ispytest) + + self._pluginmanager = pluginmanager + self.calls: list[RecordedHookCall] = [] + self.ret: int | ExitCode | None = None + + def before(hook_name: str, hook_impls, kwargs) -> None: + self.calls.append(RecordedHookCall(hook_name, kwargs)) + + def after(outcome, hook_name: str, hook_impls, kwargs) -> None: + pass + + self._undo_wrapping = pluginmanager.add_hookcall_monitoring(before, after) + + def finish_recording(self) -> None: + self._undo_wrapping() + + def getcalls(self, names: str | Iterable[str]) -> list[RecordedHookCall]: + """Get all recorded calls to hooks with the given names (or name).""" + if isinstance(names, str): + names = names.split() + return [call for call in self.calls if call._name in names] + + def assert_contains(self, entries: Sequence[tuple[str, str]]) -> None: + __tracebackhide__ = True + i = 0 + entries = list(entries) + # Since Python 3.13, f_locals is not a dict, but eval requires a dict. + backlocals = dict(sys._getframe(1).f_locals) + while entries: + name, check = entries.pop(0) + for ind, call in enumerate(self.calls[i:]): + if call._name == name: + print("NAMEMATCH", name, call) + if eval(check, backlocals, call.__dict__): + print("CHECKERMATCH", repr(check), "->", call) + else: + print("NOCHECKERMATCH", repr(check), "-", call) + continue + i += ind + 1 + break + print("NONAMEMATCH", name, "with", call) + else: + fail(f"could not find {name!r} check {check!r}") + + def popcall(self, name: str) -> RecordedHookCall: + __tracebackhide__ = True + for i, call in enumerate(self.calls): + if call._name == name: + del self.calls[i] + return call + lines = [f"could not find call {name!r}, in:"] + lines.extend([f" {x}" for x in self.calls]) + fail("\n".join(lines)) + + def getcall(self, name: str) -> RecordedHookCall: + values = self.getcalls(name) + assert len(values) == 1, (name, values) + return values[0] + + # functionality for test reports + + @overload + def getreports( + self, + names: Literal["pytest_collectreport"], + ) -> Sequence[CollectReport]: ... + + @overload + def getreports( + self, + names: Literal["pytest_runtest_logreport"], + ) -> Sequence[TestReport]: ... + + @overload + def getreports( + self, + names: str | Iterable[str] = ( + "pytest_collectreport", + "pytest_runtest_logreport", + ), + ) -> Sequence[CollectReport | TestReport]: ... + + def getreports( + self, + names: str | Iterable[str] = ( + "pytest_collectreport", + "pytest_runtest_logreport", + ), + ) -> Sequence[CollectReport | TestReport]: + return [x.report for x in self.getcalls(names)] + + def matchreport( + self, + inamepart: str = "", + names: str | Iterable[str] = ( + "pytest_runtest_logreport", + "pytest_collectreport", + ), + when: str | None = None, + ) -> CollectReport | TestReport: + """Return a testreport whose dotted import path matches.""" + values = [] + for rep in self.getreports(names=names): + if not when and rep.when != "call" and rep.passed: + # setup/teardown passing reports - let's ignore those + continue + if when and rep.when != when: + continue + if not inamepart or inamepart in rep.nodeid.split("::"): + values.append(rep) + if not values: + raise ValueError( + f"could not find test report matching {inamepart!r}: " + "no test reports at all!" + ) + if len(values) > 1: + raise ValueError( + f"found 2 or more testreports matching {inamepart!r}: {values}" + ) + return values[0] + + @overload + def getfailures( + self, + names: Literal["pytest_collectreport"], + ) -> Sequence[CollectReport]: ... + + @overload + def getfailures( + self, + names: Literal["pytest_runtest_logreport"], + ) -> Sequence[TestReport]: ... + + @overload + def getfailures( + self, + names: str | Iterable[str] = ( + "pytest_collectreport", + "pytest_runtest_logreport", + ), + ) -> Sequence[CollectReport | TestReport]: ... + + def getfailures( + self, + names: str | Iterable[str] = ( + "pytest_collectreport", + "pytest_runtest_logreport", + ), + ) -> Sequence[CollectReport | TestReport]: + return [rep for rep in self.getreports(names) if rep.failed] + + def getfailedcollections(self) -> Sequence[CollectReport]: + return self.getfailures("pytest_collectreport") + + def listoutcomes( + self, + ) -> tuple[ + Sequence[TestReport], + Sequence[CollectReport | TestReport], + Sequence[CollectReport | TestReport], + ]: + passed = [] + skipped = [] + failed = [] + for rep in self.getreports( + ("pytest_collectreport", "pytest_runtest_logreport") + ): + if rep.passed: + if rep.when == "call": + assert isinstance(rep, TestReport) + passed.append(rep) + elif rep.skipped: + skipped.append(rep) + else: + assert rep.failed, f"Unexpected outcome: {rep!r}" + failed.append(rep) + return passed, skipped, failed + + def countoutcomes(self) -> list[int]: + return [len(x) for x in self.listoutcomes()] + + def assertoutcome(self, passed: int = 0, skipped: int = 0, failed: int = 0) -> None: + __tracebackhide__ = True + from _pytest.pytester_assertions import assertoutcome + + outcomes = self.listoutcomes() + assertoutcome( + outcomes, + passed=passed, + skipped=skipped, + failed=failed, + ) + + def clear(self) -> None: + self.calls[:] = [] + + +@fixture +def linecomp() -> LineComp: + """A :class: `LineComp` instance for checking that an input linearly + contains a sequence of strings.""" + return LineComp() + + +@fixture(name="LineMatcher") +def LineMatcher_fixture(request: FixtureRequest) -> type[LineMatcher]: + """A reference to the :class: `LineMatcher`. + + This is instantiable with a list of lines (without their trailing newlines). + This is useful for testing large texts, such as the output of commands. + """ + return LineMatcher + + +@fixture +def pytester( + request: FixtureRequest, tmp_path_factory: TempPathFactory, monkeypatch: MonkeyPatch +) -> Pytester: + """ + Facilities to write tests/configuration files, execute pytest in isolation, and match + against expected output, perfect for black-box testing of pytest plugins. + + It attempts to isolate the test run from external factors as much as possible, modifying + the current working directory to ``path`` and environment variables during initialization. + + It is particularly useful for testing plugins. It is similar to the :fixture:`tmp_path` + fixture but provides methods which aid in testing pytest itself. + """ + return Pytester(request, tmp_path_factory, monkeypatch, _ispytest=True) + + +@fixture +def _sys_snapshot() -> Generator[None]: + snappaths = SysPathsSnapshot() + snapmods = SysModulesSnapshot() + yield + snapmods.restore() + snappaths.restore() + + +@fixture +def _config_for_test() -> Generator[Config]: + from _pytest.config import get_config + + config = get_config() + yield config + config._ensure_unconfigure() # cleanup, e.g. capman closing tmpfiles. + + +# Regex to match the session duration string in the summary: "74.34s". +rex_session_duration = re.compile(r"\d+\.\d\ds") +# Regex to match all the counts and phrases in the summary line: "34 passed, 111 skipped". +rex_outcome = re.compile(r"(\d+) (\w+)") + + +@final +class RunResult: + """The result of running a command from :class:`~pytest.Pytester`.""" + + def __init__( + self, + ret: int | ExitCode, + outlines: list[str], + errlines: list[str], + duration: float, + ) -> None: + try: + self.ret: int | ExitCode = ExitCode(ret) + """The return value.""" + except ValueError: + self.ret = ret + self.outlines = outlines + """List of lines captured from stdout.""" + self.errlines = errlines + """List of lines captured from stderr.""" + self.stdout = LineMatcher(outlines) + """:class:`~pytest.LineMatcher` of stdout. + + Use e.g. :func:`str(stdout) ` to reconstruct stdout, or the commonly used + :func:`stdout.fnmatch_lines() ` method. + """ + self.stderr = LineMatcher(errlines) + """:class:`~pytest.LineMatcher` of stderr.""" + self.duration = duration + """Duration in seconds.""" + + def __repr__(self) -> str: + return ( + f"" + ) + + def parseoutcomes(self) -> dict[str, int]: + """Return a dictionary of outcome noun -> count from parsing the terminal + output that the test process produced. + + The returned nouns will always be in plural form:: + + ======= 1 failed, 1 passed, 1 warning, 1 error in 0.13s ==== + + Will return ``{"failed": 1, "passed": 1, "warnings": 1, "errors": 1}``. + """ + return self.parse_summary_nouns(self.outlines) + + @classmethod + def parse_summary_nouns(cls, lines) -> dict[str, int]: + """Extract the nouns from a pytest terminal summary line. + + It always returns the plural noun for consistency:: + + ======= 1 failed, 1 passed, 1 warning, 1 error in 0.13s ==== + + Will return ``{"failed": 1, "passed": 1, "warnings": 1, "errors": 1}``. + """ + for line in reversed(lines): + if rex_session_duration.search(line): + outcomes = rex_outcome.findall(line) + ret = {noun: int(count) for (count, noun) in outcomes} + break + else: + raise ValueError("Pytest terminal summary report not found") + + to_plural = { + "warning": "warnings", + "error": "errors", + } + return {to_plural.get(k, k): v for k, v in ret.items()} + + def assert_outcomes( + self, + passed: int = 0, + skipped: int = 0, + failed: int = 0, + errors: int = 0, + xpassed: int = 0, + xfailed: int = 0, + warnings: int | None = None, + deselected: int | None = None, + ) -> None: + """ + Assert that the specified outcomes appear with the respective + numbers (0 means it didn't occur) in the text output from a test run. + + ``warnings`` and ``deselected`` are only checked if not None. + """ + __tracebackhide__ = True + from _pytest.pytester_assertions import assert_outcomes + + outcomes = self.parseoutcomes() + assert_outcomes( + outcomes, + passed=passed, + skipped=skipped, + failed=failed, + errors=errors, + xpassed=xpassed, + xfailed=xfailed, + warnings=warnings, + deselected=deselected, + ) + + +class SysModulesSnapshot: + def __init__(self, preserve: Callable[[str], bool] | None = None) -> None: + self.__preserve = preserve + self.__saved = dict(sys.modules) + + def restore(self) -> None: + if self.__preserve: + self.__saved.update( + (k, m) for k, m in sys.modules.items() if self.__preserve(k) + ) + sys.modules.clear() + sys.modules.update(self.__saved) + + +class SysPathsSnapshot: + def __init__(self) -> None: + self.__saved = list(sys.path), list(sys.meta_path) + + def restore(self) -> None: + sys.path[:], sys.meta_path[:] = self.__saved + + +@final +class Pytester: + """ + Facilities to write tests/configuration files, execute pytest in isolation, and match + against expected output, perfect for black-box testing of pytest plugins. + + It attempts to isolate the test run from external factors as much as possible, modifying + the current working directory to :attr:`path` and environment variables during initialization. + """ + + __test__ = False + + CLOSE_STDIN: Final = NOTSET + + class TimeoutExpired(Exception): + pass + + def __init__( + self, + request: FixtureRequest, + tmp_path_factory: TempPathFactory, + monkeypatch: MonkeyPatch, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._request = request + self._mod_collections: WeakKeyDictionary[Collector, list[Item | Collector]] = ( + WeakKeyDictionary() + ) + if request.function: + name: str = request.function.__name__ + else: + name = request.node.name + self._name = name + self._path: Path = tmp_path_factory.mktemp(name, numbered=True) + #: A list of plugins to use with :py:meth:`parseconfig` and + #: :py:meth:`runpytest`. Initially this is an empty list but plugins can + #: be added to the list. The type of items to add to the list depends on + #: the method using them so refer to them for details. + self.plugins: list[str | _PluggyPlugin] = [] + self._sys_path_snapshot = SysPathsSnapshot() + self._sys_modules_snapshot = self.__take_sys_modules_snapshot() + self._request.addfinalizer(self._finalize) + self._method = self._request.config.getoption("--runpytest") + self._test_tmproot = tmp_path_factory.mktemp(f"tmp-{name}", numbered=True) + + self._monkeypatch = mp = monkeypatch + self.chdir() + mp.setenv("PYTEST_DEBUG_TEMPROOT", str(self._test_tmproot)) + # Ensure no unexpected caching via tox. + mp.delenv("TOX_ENV_DIR", raising=False) + # Discard outer pytest options. + mp.delenv("PYTEST_ADDOPTS", raising=False) + # Ensure no user config is used. + tmphome = str(self.path) + mp.setenv("HOME", tmphome) + mp.setenv("USERPROFILE", tmphome) + # Do not use colors for inner runs by default. + mp.setenv("PY_COLORS", "0") + + @property + def path(self) -> Path: + """Temporary directory path used to create files/run tests from, etc.""" + return self._path + + def __repr__(self) -> str: + return f"" + + def _finalize(self) -> None: + """ + Clean up global state artifacts. + + Some methods modify the global interpreter state and this tries to + clean this up. It does not remove the temporary directory however so + it can be looked at after the test run has finished. + """ + self._sys_modules_snapshot.restore() + self._sys_path_snapshot.restore() + + def __take_sys_modules_snapshot(self) -> SysModulesSnapshot: + # Some zope modules used by twisted-related tests keep internal state + # and can't be deleted; we had some trouble in the past with + # `zope.interface` for example. + # + # Preserve readline due to https://bugs.python.org/issue41033. + # pexpect issues a SIGWINCH. + def preserve_module(name): + return name.startswith(("zope", "readline")) + + return SysModulesSnapshot(preserve=preserve_module) + + def make_hook_recorder(self, pluginmanager: PytestPluginManager) -> HookRecorder: + """Create a new :class:`HookRecorder` for a :class:`PytestPluginManager`.""" + pluginmanager.reprec = reprec = HookRecorder(pluginmanager, _ispytest=True) # type: ignore[attr-defined] + self._request.addfinalizer(reprec.finish_recording) + return reprec + + def chdir(self) -> None: + """Cd into the temporary directory. + + This is done automatically upon instantiation. + """ + self._monkeypatch.chdir(self.path) + + def _makefile( + self, + ext: str, + lines: Sequence[Any | bytes], + files: dict[str, str], + encoding: str = "utf-8", + ) -> Path: + items = list(files.items()) + + if ext is None: + raise TypeError("ext must not be None") + + if ext and not ext.startswith("."): + raise ValueError( + f"pytester.makefile expects a file extension, try .{ext} instead of {ext}" + ) + + def to_text(s: Any | bytes) -> str: + return s.decode(encoding) if isinstance(s, bytes) else str(s) + + if lines: + source = "\n".join(to_text(x) for x in lines) + basename = self._name + items.insert(0, (basename, source)) + + ret = None + for basename, value in items: + p = self.path.joinpath(basename).with_suffix(ext) + p.parent.mkdir(parents=True, exist_ok=True) + source_ = Source(value) + source = "\n".join(to_text(line) for line in source_.lines) + p.write_text(source.strip(), encoding=encoding) + if ret is None: + ret = p + assert ret is not None + return ret + + def makefile(self, ext: str, *args: str, **kwargs: str) -> Path: + r"""Create new text file(s) in the test directory. + + :param ext: + The extension the file(s) should use, including the dot, e.g. `.py`. + :param args: + All args are treated as strings and joined using newlines. + The result is written as contents to the file. The name of the + file is based on the test function requesting this fixture. + :param kwargs: + Each keyword is the name of a file, while the value of it will + be written as contents of the file. + :returns: + The first created file. + + Examples: + + .. code-block:: python + + pytester.makefile(".txt", "line1", "line2") + + pytester.makefile(".ini", pytest="[pytest]\naddopts=-rs\n") + + To create binary files, use :meth:`pathlib.Path.write_bytes` directly: + + .. code-block:: python + + filename = pytester.path.joinpath("foo.bin") + filename.write_bytes(b"...") + """ + return self._makefile(ext, args, kwargs) + + def makeconftest(self, source: str) -> Path: + """Write a conftest.py file. + + :param source: The contents. + :returns: The conftest.py file. + """ + return self.makepyfile(conftest=source) + + def makeini(self, source: str) -> Path: + """Write a tox.ini file. + + :param source: The contents. + :returns: The tox.ini file. + """ + return self.makefile(".ini", tox=source) + + def getinicfg(self, source: str) -> SectionWrapper: + """Return the pytest section from the tox.ini config file.""" + p = self.makeini(source) + return IniConfig(str(p))["pytest"] + + def makepyprojecttoml(self, source: str) -> Path: + """Write a pyproject.toml file. + + :param source: The contents. + :returns: The pyproject.ini file. + + .. versionadded:: 6.0 + """ + return self.makefile(".toml", pyproject=source) + + def makepyfile(self, *args, **kwargs) -> Path: + r"""Shortcut for .makefile() with a .py extension. + + Defaults to the test name with a '.py' extension, e.g test_foobar.py, overwriting + existing files. + + Examples: + + .. code-block:: python + + def test_something(pytester): + # Initial file is created test_something.py. + pytester.makepyfile("foobar") + # To create multiple files, pass kwargs accordingly. + pytester.makepyfile(custom="foobar") + # At this point, both 'test_something.py' & 'custom.py' exist in the test directory. + + """ + return self._makefile(".py", args, kwargs) + + def maketxtfile(self, *args, **kwargs) -> Path: + r"""Shortcut for .makefile() with a .txt extension. + + Defaults to the test name with a '.txt' extension, e.g test_foobar.txt, overwriting + existing files. + + Examples: + + .. code-block:: python + + def test_something(pytester): + # Initial file is created test_something.txt. + pytester.maketxtfile("foobar") + # To create multiple files, pass kwargs accordingly. + pytester.maketxtfile(custom="foobar") + # At this point, both 'test_something.txt' & 'custom.txt' exist in the test directory. + + """ + return self._makefile(".txt", args, kwargs) + + def syspathinsert(self, path: str | os.PathLike[str] | None = None) -> None: + """Prepend a directory to sys.path, defaults to :attr:`path`. + + This is undone automatically when this object dies at the end of each + test. + + :param path: + The path. + """ + if path is None: + path = self.path + + self._monkeypatch.syspath_prepend(str(path)) + + def mkdir(self, name: str | os.PathLike[str]) -> Path: + """Create a new (sub)directory. + + :param name: + The name of the directory, relative to the pytester path. + :returns: + The created directory. + :rtype: pathlib.Path + """ + p = self.path / name + p.mkdir() + return p + + def mkpydir(self, name: str | os.PathLike[str]) -> Path: + """Create a new python package. + + This creates a (sub)directory with an empty ``__init__.py`` file so it + gets recognised as a Python package. + """ + p = self.path / name + p.mkdir() + p.joinpath("__init__.py").touch() + return p + + def copy_example(self, name: str | None = None) -> Path: + """Copy file from project's directory into the testdir. + + :param name: + The name of the file to copy. + :return: + Path to the copied directory (inside ``self.path``). + :rtype: pathlib.Path + """ + example_dir_ = self._request.config.getini("pytester_example_dir") + if example_dir_ is None: + raise ValueError("pytester_example_dir is unset, can't copy examples") + example_dir: Path = self._request.config.rootpath / example_dir_ + + for extra_element in self._request.node.iter_markers("pytester_example_path"): + assert extra_element.args + example_dir = example_dir.joinpath(*extra_element.args) + + if name is None: + func_name = self._name + maybe_dir = example_dir / func_name + maybe_file = example_dir / (func_name + ".py") + + if maybe_dir.is_dir(): + example_path = maybe_dir + elif maybe_file.is_file(): + example_path = maybe_file + else: + raise LookupError( + f"{func_name} can't be found as module or package in {example_dir}" + ) + else: + example_path = example_dir.joinpath(name) + + if example_path.is_dir() and not example_path.joinpath("__init__.py").is_file(): + shutil.copytree(example_path, self.path, symlinks=True, dirs_exist_ok=True) + return self.path + elif example_path.is_file(): + result = self.path.joinpath(example_path.name) + shutil.copy(example_path, result) + return result + else: + raise LookupError( + f'example "{example_path}" is not found as a file or directory' + ) + + def getnode(self, config: Config, arg: str | os.PathLike[str]) -> Collector | Item: + """Get the collection node of a file. + + :param config: + A pytest config. + See :py:meth:`parseconfig` and :py:meth:`parseconfigure` for creating it. + :param arg: + Path to the file. + :returns: + The node. + """ + session = Session.from_config(config) + assert "::" not in str(arg) + p = Path(os.path.abspath(arg)) + config.hook.pytest_sessionstart(session=session) + res = session.perform_collect([str(p)], genitems=False)[0] + config.hook.pytest_sessionfinish(session=session, exitstatus=ExitCode.OK) + return res + + def getpathnode(self, path: str | os.PathLike[str]) -> Collector | Item: + """Return the collection node of a file. + + This is like :py:meth:`getnode` but uses :py:meth:`parseconfigure` to + create the (configured) pytest Config instance. + + :param path: + Path to the file. + :returns: + The node. + """ + path = Path(path) + config = self.parseconfigure(path) + session = Session.from_config(config) + x = bestrelpath(session.path, path) + config.hook.pytest_sessionstart(session=session) + res = session.perform_collect([x], genitems=False)[0] + config.hook.pytest_sessionfinish(session=session, exitstatus=ExitCode.OK) + return res + + def genitems(self, colitems: Sequence[Item | Collector]) -> list[Item]: + """Generate all test items from a collection node. + + This recurses into the collection node and returns a list of all the + test items contained within. + + :param colitems: + The collection nodes. + :returns: + The collected items. + """ + session = colitems[0].session + result: list[Item] = [] + for colitem in colitems: + result.extend(session.genitems(colitem)) + return result + + def runitem(self, source: str) -> Any: + """Run the "test_func" Item. + + The calling test instance (class containing the test method) must + provide a ``.getrunner()`` method which should return a runner which + can run the test protocol for a single item, e.g. + ``_pytest.runner.runtestprotocol``. + """ + # used from runner functional tests + item = self.getitem(source) + # the test class where we are called from wants to provide the runner + testclassinstance = self._request.instance + runner = testclassinstance.getrunner() + return runner(item) + + def inline_runsource(self, source: str, *cmdlineargs) -> HookRecorder: + """Run a test module in process using ``pytest.main()``. + + This run writes "source" into a temporary file and runs + ``pytest.main()`` on it, returning a :py:class:`HookRecorder` instance + for the result. + + :param source: The source code of the test module. + :param cmdlineargs: Any extra command line arguments to use. + """ + p = self.makepyfile(source) + values = [*list(cmdlineargs), p] + return self.inline_run(*values) + + def inline_genitems(self, *args) -> tuple[list[Item], HookRecorder]: + """Run ``pytest.main(['--collect-only'])`` in-process. + + Runs the :py:func:`pytest.main` function to run all of pytest inside + the test process itself like :py:meth:`inline_run`, but returns a + tuple of the collected items and a :py:class:`HookRecorder` instance. + """ + rec = self.inline_run("--collect-only", *args) + items = [x.item for x in rec.getcalls("pytest_itemcollected")] + return items, rec + + def inline_run( + self, + *args: str | os.PathLike[str], + plugins=(), + no_reraise_ctrlc: bool = False, + ) -> HookRecorder: + """Run ``pytest.main()`` in-process, returning a HookRecorder. + + Runs the :py:func:`pytest.main` function to run all of pytest inside + the test process itself. This means it can return a + :py:class:`HookRecorder` instance which gives more detailed results + from that run than can be done by matching stdout/stderr from + :py:meth:`runpytest`. + + :param args: + Command line arguments to pass to :py:func:`pytest.main`. + :param plugins: + Extra plugin instances the ``pytest.main()`` instance should use. + :param no_reraise_ctrlc: + Typically we reraise keyboard interrupts from the child run. If + True, the KeyboardInterrupt exception is captured. + """ + from _pytest.unraisableexception import gc_collect_iterations_key + + # (maybe a cpython bug?) the importlib cache sometimes isn't updated + # properly between file creation and inline_run (especially if imports + # are interspersed with file creation) + importlib.invalidate_caches() + + plugins = list(plugins) + finalizers = [] + try: + # Any sys.module or sys.path changes done while running pytest + # inline should be reverted after the test run completes to avoid + # clashing with later inline tests run within the same pytest test, + # e.g. just because they use matching test module names. + finalizers.append(self.__take_sys_modules_snapshot().restore) + finalizers.append(SysPathsSnapshot().restore) + + # Important note: + # - our tests should not leave any other references/registrations + # laying around other than possibly loaded test modules + # referenced from sys.modules, as nothing will clean those up + # automatically + + rec = [] + + class PytesterHelperPlugin: + @staticmethod + def pytest_configure(config: Config) -> None: + rec.append(self.make_hook_recorder(config.pluginmanager)) + + # The unraisable plugin GC collect slows down inline + # pytester runs too much. + config.stash[gc_collect_iterations_key] = 0 + + plugins.append(PytesterHelperPlugin()) + ret = main([str(x) for x in args], plugins=plugins) + if len(rec) == 1: + reprec = rec.pop() + else: + + class reprec: # type: ignore + pass + + reprec.ret = ret + + # Typically we reraise keyboard interrupts from the child run + # because it's our user requesting interruption of the testing. + if ret == ExitCode.INTERRUPTED and not no_reraise_ctrlc: + calls = reprec.getcalls("pytest_keyboard_interrupt") + if calls and calls[-1].excinfo.type == KeyboardInterrupt: + raise KeyboardInterrupt() + return reprec + finally: + for finalizer in finalizers: + finalizer() + + def runpytest_inprocess( + self, *args: str | os.PathLike[str], **kwargs: Any + ) -> RunResult: + """Return result of running pytest in-process, providing a similar + interface to what self.runpytest() provides.""" + syspathinsert = kwargs.pop("syspathinsert", False) + + if syspathinsert: + self.syspathinsert() + instant = timing.Instant() + capture = _get_multicapture("sys") + capture.start_capturing() + try: + try: + reprec = self.inline_run(*args, **kwargs) + except SystemExit as e: + ret = e.args[0] + try: + ret = ExitCode(e.args[0]) + except ValueError: + pass + + class reprec: # type: ignore + ret = ret + + except Exception: + traceback.print_exc() + + class reprec: # type: ignore + ret = ExitCode(3) + + finally: + out, err = capture.readouterr() + capture.stop_capturing() + sys.stdout.write(out) + sys.stderr.write(err) + + assert reprec.ret is not None + res = RunResult( + reprec.ret, out.splitlines(), err.splitlines(), instant.elapsed().seconds + ) + res.reprec = reprec # type: ignore + return res + + def runpytest(self, *args: str | os.PathLike[str], **kwargs: Any) -> RunResult: + """Run pytest inline or in a subprocess, depending on the command line + option "--runpytest" and return a :py:class:`~pytest.RunResult`.""" + new_args = self._ensure_basetemp(args) + if self._method == "inprocess": + return self.runpytest_inprocess(*new_args, **kwargs) + elif self._method == "subprocess": + return self.runpytest_subprocess(*new_args, **kwargs) + raise RuntimeError(f"Unrecognized runpytest option: {self._method}") + + def _ensure_basetemp( + self, args: Sequence[str | os.PathLike[str]] + ) -> list[str | os.PathLike[str]]: + new_args = list(args) + for x in new_args: + if str(x).startswith("--basetemp"): + break + else: + new_args.append( + "--basetemp={}".format(self.path.parent.joinpath("basetemp")) + ) + return new_args + + def parseconfig(self, *args: str | os.PathLike[str]) -> Config: + """Return a new pytest :class:`pytest.Config` instance from given + commandline args. + + This invokes the pytest bootstrapping code in _pytest.config to create a + new :py:class:`pytest.PytestPluginManager` and call the + :hook:`pytest_cmdline_parse` hook to create a new :class:`pytest.Config` + instance. + + If :attr:`plugins` has been populated they should be plugin modules + to be registered with the plugin manager. + """ + import _pytest.config + + new_args = self._ensure_basetemp(args) + new_args = [str(x) for x in new_args] + + config = _pytest.config._prepareconfig(new_args, self.plugins) # type: ignore[arg-type] + # we don't know what the test will do with this half-setup config + # object and thus we make sure it gets unconfigured properly in any + # case (otherwise capturing could still be active, for example) + self._request.addfinalizer(config._ensure_unconfigure) + return config + + def parseconfigure(self, *args: str | os.PathLike[str]) -> Config: + """Return a new pytest configured Config instance. + + Returns a new :py:class:`pytest.Config` instance like + :py:meth:`parseconfig`, but also calls the :hook:`pytest_configure` + hook. + """ + config = self.parseconfig(*args) + config._do_configure() + return config + + def getitem( + self, source: str | os.PathLike[str], funcname: str = "test_func" + ) -> Item: + """Return the test item for a test function. + + Writes the source to a python file and runs pytest's collection on + the resulting module, returning the test item for the requested + function name. + + :param source: + The module source. + :param funcname: + The name of the test function for which to return a test item. + :returns: + The test item. + """ + items = self.getitems(source) + for item in items: + if item.name == funcname: + return item + assert 0, f"{funcname!r} item not found in module:\n{source}\nitems: {items}" + + def getitems(self, source: str | os.PathLike[str]) -> list[Item]: + """Return all test items collected from the module. + + Writes the source to a Python file and runs pytest's collection on + the resulting module, returning all test items contained within. + """ + modcol = self.getmodulecol(source) + return self.genitems([modcol]) + + def getmodulecol( + self, + source: str | os.PathLike[str], + configargs=(), + *, + withinit: bool = False, + ): + """Return the module collection node for ``source``. + + Writes ``source`` to a file using :py:meth:`makepyfile` and then + runs the pytest collection on it, returning the collection node for the + test module. + + :param source: + The source code of the module to collect. + + :param configargs: + Any extra arguments to pass to :py:meth:`parseconfigure`. + + :param withinit: + Whether to also write an ``__init__.py`` file to the same + directory to ensure it is a package. + """ + if isinstance(source, os.PathLike): + path = self.path.joinpath(source) + assert not withinit, "not supported for paths" + else: + kw = {self._name: str(source)} + path = self.makepyfile(**kw) + if withinit: + self.makepyfile(__init__="#") + self.config = config = self.parseconfigure(path, *configargs) + return self.getnode(config, path) + + def collect_by_name(self, modcol: Collector, name: str) -> Item | Collector | None: + """Return the collection node for name from the module collection. + + Searches a module collection node for a collection node matching the + given name. + + :param modcol: A module collection node; see :py:meth:`getmodulecol`. + :param name: The name of the node to return. + """ + if modcol not in self._mod_collections: + self._mod_collections[modcol] = list(modcol.collect()) + for colitem in self._mod_collections[modcol]: + if colitem.name == name: + return colitem + return None + + def popen( + self, + cmdargs: Sequence[str | os.PathLike[str]], + stdout: int | TextIO = subprocess.PIPE, + stderr: int | TextIO = subprocess.PIPE, + stdin: NotSetType | bytes | IO[Any] | int = CLOSE_STDIN, + **kw, + ): + """Invoke :py:class:`subprocess.Popen`. + + Calls :py:class:`subprocess.Popen` making sure the current working + directory is in ``PYTHONPATH``. + + You probably want to use :py:meth:`run` instead. + """ + env = os.environ.copy() + env["PYTHONPATH"] = os.pathsep.join( + filter(None, [os.getcwd(), env.get("PYTHONPATH", "")]) + ) + kw["env"] = env + + if stdin is self.CLOSE_STDIN: + kw["stdin"] = subprocess.PIPE + elif isinstance(stdin, bytes): + kw["stdin"] = subprocess.PIPE + else: + kw["stdin"] = stdin + + popen = subprocess.Popen(cmdargs, stdout=stdout, stderr=stderr, **kw) + if stdin is self.CLOSE_STDIN: + assert popen.stdin is not None + popen.stdin.close() + elif isinstance(stdin, bytes): + assert popen.stdin is not None + popen.stdin.write(stdin) + + return popen + + def run( + self, + *cmdargs: str | os.PathLike[str], + timeout: float | None = None, + stdin: NotSetType | bytes | IO[Any] | int = CLOSE_STDIN, + ) -> RunResult: + """Run a command with arguments. + + Run a process using :py:class:`subprocess.Popen` saving the stdout and + stderr. + + :param cmdargs: + The sequence of arguments to pass to :py:class:`subprocess.Popen`, + with path-like objects being converted to :py:class:`str` + automatically. + :param timeout: + The period in seconds after which to timeout and raise + :py:class:`Pytester.TimeoutExpired`. + :param stdin: + Optional standard input. + + - If it is ``CLOSE_STDIN`` (Default), then this method calls + :py:class:`subprocess.Popen` with ``stdin=subprocess.PIPE``, and + the standard input is closed immediately after the new command is + started. + + - If it is of type :py:class:`bytes`, these bytes are sent to the + standard input of the command. + + - Otherwise, it is passed through to :py:class:`subprocess.Popen`. + For further information in this case, consult the document of the + ``stdin`` parameter in :py:class:`subprocess.Popen`. + :type stdin: _pytest.compat.NotSetType | bytes | IO[Any] | int + :returns: + The result. + + """ + __tracebackhide__ = True + + cmdargs = tuple(os.fspath(arg) for arg in cmdargs) + p1 = self.path.joinpath("stdout") + p2 = self.path.joinpath("stderr") + print("running:", *cmdargs) + print(" in:", Path.cwd()) + + with p1.open("w", encoding="utf8") as f1, p2.open("w", encoding="utf8") as f2: + instant = timing.Instant() + popen = self.popen( + cmdargs, + stdin=stdin, + stdout=f1, + stderr=f2, + close_fds=(sys.platform != "win32"), + ) + if popen.stdin is not None: + popen.stdin.close() + + def handle_timeout() -> None: + __tracebackhide__ = True + + timeout_message = f"{timeout} second timeout expired running: {cmdargs}" + + popen.kill() + popen.wait() + raise self.TimeoutExpired(timeout_message) + + if timeout is None: + ret = popen.wait() + else: + try: + ret = popen.wait(timeout) + except subprocess.TimeoutExpired: + handle_timeout() + + with p1.open(encoding="utf8") as f1, p2.open(encoding="utf8") as f2: + out = f1.read().splitlines() + err = f2.read().splitlines() + + self._dump_lines(out, sys.stdout) + self._dump_lines(err, sys.stderr) + + with contextlib.suppress(ValueError): + ret = ExitCode(ret) + return RunResult(ret, out, err, instant.elapsed().seconds) + + def _dump_lines(self, lines, fp): + try: + for line in lines: + print(line, file=fp) + except UnicodeEncodeError: + print(f"couldn't print to {fp} because of encoding") + + def _getpytestargs(self) -> tuple[str, ...]: + return sys.executable, "-mpytest" + + def runpython(self, script: os.PathLike[str]) -> RunResult: + """Run a python script using sys.executable as interpreter.""" + return self.run(sys.executable, script) + + def runpython_c(self, command: str) -> RunResult: + """Run ``python -c "command"``.""" + return self.run(sys.executable, "-c", command) + + def runpytest_subprocess( + self, *args: str | os.PathLike[str], timeout: float | None = None + ) -> RunResult: + """Run pytest as a subprocess with given arguments. + + Any plugins added to the :py:attr:`plugins` list will be added using the + ``-p`` command line option. Additionally ``--basetemp`` is used to put + any temporary files and directories in a numbered directory prefixed + with "runpytest-" to not conflict with the normal numbered pytest + location for temporary files and directories. + + :param args: + The sequence of arguments to pass to the pytest subprocess. + :param timeout: + The period in seconds after which to timeout and raise + :py:class:`Pytester.TimeoutExpired`. + :returns: + The result. + """ + __tracebackhide__ = True + p = make_numbered_dir(root=self.path, prefix="runpytest-", mode=0o700) + args = (f"--basetemp={p}", *args) + plugins = [x for x in self.plugins if isinstance(x, str)] + if plugins: + args = ("-p", plugins[0], *args) + args = self._getpytestargs() + args + return self.run(*args, timeout=timeout) + + def spawn_pytest(self, string: str, expect_timeout: float = 10.0) -> pexpect.spawn: + """Run pytest using pexpect. + + This makes sure to use the right pytest and sets up the temporary + directory locations. + + The pexpect child is returned. + """ + basetemp = self.path / "temp-pexpect" + basetemp.mkdir(mode=0o700) + invoke = " ".join(map(str, self._getpytestargs())) + cmd = f"{invoke} --basetemp={basetemp} {string}" + return self.spawn(cmd, expect_timeout=expect_timeout) + + def spawn(self, cmd: str, expect_timeout: float = 10.0) -> pexpect.spawn: + """Run a command using pexpect. + + The pexpect child is returned. + """ + pexpect = importorskip("pexpect", "3.0") + if hasattr(sys, "pypy_version_info") and "64" in platform.machine(): + skip("pypy-64 bit not supported") + if not hasattr(pexpect, "spawn"): + skip("pexpect.spawn not available") + logfile = self.path.joinpath("spawn.out").open("wb") + + child = pexpect.spawn(cmd, logfile=logfile, timeout=expect_timeout) + self._request.addfinalizer(logfile.close) + return child + + +class LineComp: + def __init__(self) -> None: + self.stringio = StringIO() + """:class:`python:io.StringIO()` instance used for input.""" + + def assert_contains_lines(self, lines2: Sequence[str]) -> None: + """Assert that ``lines2`` are contained (linearly) in :attr:`stringio`'s value. + + Lines are matched using :func:`LineMatcher.fnmatch_lines `. + """ + __tracebackhide__ = True + val = self.stringio.getvalue() + self.stringio.truncate(0) + self.stringio.seek(0) + lines1 = val.split("\n") + LineMatcher(lines1).fnmatch_lines(lines2) + + +class LineMatcher: + """Flexible matching of text. + + This is a convenience class to test large texts like the output of + commands. + + The constructor takes a list of lines without their trailing newlines, i.e. + ``text.splitlines()``. + """ + + def __init__(self, lines: list[str]) -> None: + self.lines = lines + self._log_output: list[str] = [] + + def __str__(self) -> str: + """Return the entire original text. + + .. versionadded:: 6.2 + You can use :meth:`str` in older versions. + """ + return "\n".join(self.lines) + + def _getlines(self, lines2: str | Sequence[str] | Source) -> Sequence[str]: + if isinstance(lines2, str): + lines2 = Source(lines2) + if isinstance(lines2, Source): + lines2 = lines2.strip().lines + return lines2 + + def fnmatch_lines_random(self, lines2: Sequence[str]) -> None: + """Check lines exist in the output in any order (using :func:`python:fnmatch.fnmatch`).""" + __tracebackhide__ = True + self._match_lines_random(lines2, fnmatch) + + def re_match_lines_random(self, lines2: Sequence[str]) -> None: + """Check lines exist in the output in any order (using :func:`python:re.match`).""" + __tracebackhide__ = True + self._match_lines_random(lines2, lambda name, pat: bool(re.match(pat, name))) + + def _match_lines_random( + self, lines2: Sequence[str], match_func: Callable[[str, str], bool] + ) -> None: + __tracebackhide__ = True + lines2 = self._getlines(lines2) + for line in lines2: + for x in self.lines: + if line == x or match_func(x, line): + self._log("matched: ", repr(line)) + break + else: + msg = f"line {line!r} not found in output" + self._log(msg) + self._fail(msg) + + def get_lines_after(self, fnline: str) -> Sequence[str]: + """Return all lines following the given line in the text. + + The given line can contain glob wildcards. + """ + for i, line in enumerate(self.lines): + if fnline == line or fnmatch(line, fnline): + return self.lines[i + 1 :] + raise ValueError(f"line {fnline!r} not found in output") + + def _log(self, *args) -> None: + self._log_output.append(" ".join(str(x) for x in args)) + + @property + def _log_text(self) -> str: + return "\n".join(self._log_output) + + def fnmatch_lines( + self, lines2: Sequence[str], *, consecutive: bool = False + ) -> None: + """Check lines exist in the output (using :func:`python:fnmatch.fnmatch`). + + The argument is a list of lines which have to match and can use glob + wildcards. If they do not match a pytest.fail() is called. The + matches and non-matches are also shown as part of the error message. + + :param lines2: String patterns to match. + :param consecutive: Match lines consecutively? + """ + __tracebackhide__ = True + self._match_lines(lines2, fnmatch, "fnmatch", consecutive=consecutive) + + def re_match_lines( + self, lines2: Sequence[str], *, consecutive: bool = False + ) -> None: + """Check lines exist in the output (using :func:`python:re.match`). + + The argument is a list of lines which have to match using ``re.match``. + If they do not match a pytest.fail() is called. + + The matches and non-matches are also shown as part of the error message. + + :param lines2: string patterns to match. + :param consecutive: match lines consecutively? + """ + __tracebackhide__ = True + self._match_lines( + lines2, + lambda name, pat: bool(re.match(pat, name)), + "re.match", + consecutive=consecutive, + ) + + def _match_lines( + self, + lines2: Sequence[str], + match_func: Callable[[str, str], bool], + match_nickname: str, + *, + consecutive: bool = False, + ) -> None: + """Underlying implementation of ``fnmatch_lines`` and ``re_match_lines``. + + :param Sequence[str] lines2: + List of string patterns to match. The actual format depends on + ``match_func``. + :param match_func: + A callable ``match_func(line, pattern)`` where line is the + captured line from stdout/stderr and pattern is the matching + pattern. + :param str match_nickname: + The nickname for the match function that will be logged to stdout + when a match occurs. + :param consecutive: + Match lines consecutively? + """ + if not isinstance(lines2, collections.abc.Sequence): + raise TypeError(f"invalid type for lines2: {type(lines2).__name__}") + lines2 = self._getlines(lines2) + lines1 = self.lines[:] + extralines = [] + __tracebackhide__ = True + wnick = len(match_nickname) + 1 + started = False + for line in lines2: + nomatchprinted = False + while lines1: + nextline = lines1.pop(0) + if line == nextline: + self._log("exact match:", repr(line)) + started = True + break + elif match_func(nextline, line): + self._log(f"{match_nickname}:", repr(line)) + self._log( + "{:>{width}}".format("with:", width=wnick), repr(nextline) + ) + started = True + break + else: + if consecutive and started: + msg = f"no consecutive match: {line!r}" + self._log(msg) + self._log( + "{:>{width}}".format("with:", width=wnick), repr(nextline) + ) + self._fail(msg) + if not nomatchprinted: + self._log( + "{:>{width}}".format("nomatch:", width=wnick), repr(line) + ) + nomatchprinted = True + self._log("{:>{width}}".format("and:", width=wnick), repr(nextline)) + extralines.append(nextline) + else: + msg = f"remains unmatched: {line!r}" + self._log(msg) + self._fail(msg) + self._log_output = [] + + def no_fnmatch_line(self, pat: str) -> None: + """Ensure captured lines do not match the given pattern, using ``fnmatch.fnmatch``. + + :param str pat: The pattern to match lines. + """ + __tracebackhide__ = True + self._no_match_line(pat, fnmatch, "fnmatch") + + def no_re_match_line(self, pat: str) -> None: + """Ensure captured lines do not match the given pattern, using ``re.match``. + + :param str pat: The regular expression to match lines. + """ + __tracebackhide__ = True + self._no_match_line( + pat, lambda name, pat: bool(re.match(pat, name)), "re.match" + ) + + def _no_match_line( + self, pat: str, match_func: Callable[[str, str], bool], match_nickname: str + ) -> None: + """Ensure captured lines does not have a the given pattern, using ``fnmatch.fnmatch``. + + :param str pat: The pattern to match lines. + """ + __tracebackhide__ = True + nomatch_printed = False + wnick = len(match_nickname) + 1 + for line in self.lines: + if match_func(line, pat): + msg = f"{match_nickname}: {pat!r}" + self._log(msg) + self._log("{:>{width}}".format("with:", width=wnick), repr(line)) + self._fail(msg) + else: + if not nomatch_printed: + self._log("{:>{width}}".format("nomatch:", width=wnick), repr(pat)) + nomatch_printed = True + self._log("{:>{width}}".format("and:", width=wnick), repr(line)) + self._log_output = [] + + def _fail(self, msg: str) -> None: + __tracebackhide__ = True + log_text = self._log_text + self._log_output = [] + fail(log_text) + + def str(self) -> str: + """Return the entire original text.""" + return str(self) diff --git a/venv/lib/python3.10/site-packages/_pytest/pytester_assertions.py b/venv/lib/python3.10/site-packages/_pytest/pytester_assertions.py new file mode 100644 index 0000000000000000000000000000000000000000..915cc8a10ff4781da48a44d88bf591788b8ab673 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/pytester_assertions.py @@ -0,0 +1,74 @@ +"""Helper plugin for pytester; should not be loaded on its own.""" + +# This plugin contains assertions used by pytester. pytester cannot +# contain them itself, since it is imported by the `pytest` module, +# hence cannot be subject to assertion rewriting, which requires a +# module to not be already imported. +from __future__ import annotations + +from collections.abc import Sequence + +from _pytest.reports import CollectReport +from _pytest.reports import TestReport + + +def assertoutcome( + outcomes: tuple[ + Sequence[TestReport], + Sequence[CollectReport | TestReport], + Sequence[CollectReport | TestReport], + ], + passed: int = 0, + skipped: int = 0, + failed: int = 0, +) -> None: + __tracebackhide__ = True + + realpassed, realskipped, realfailed = outcomes + obtained = { + "passed": len(realpassed), + "skipped": len(realskipped), + "failed": len(realfailed), + } + expected = {"passed": passed, "skipped": skipped, "failed": failed} + assert obtained == expected, outcomes + + +def assert_outcomes( + outcomes: dict[str, int], + passed: int = 0, + skipped: int = 0, + failed: int = 0, + errors: int = 0, + xpassed: int = 0, + xfailed: int = 0, + warnings: int | None = None, + deselected: int | None = None, +) -> None: + """Assert that the specified outcomes appear with the respective + numbers (0 means it didn't occur) in the text output from a test run.""" + __tracebackhide__ = True + + obtained = { + "passed": outcomes.get("passed", 0), + "skipped": outcomes.get("skipped", 0), + "failed": outcomes.get("failed", 0), + "errors": outcomes.get("errors", 0), + "xpassed": outcomes.get("xpassed", 0), + "xfailed": outcomes.get("xfailed", 0), + } + expected = { + "passed": passed, + "skipped": skipped, + "failed": failed, + "errors": errors, + "xpassed": xpassed, + "xfailed": xfailed, + } + if warnings is not None: + obtained["warnings"] = outcomes.get("warnings", 0) + expected["warnings"] = warnings + if deselected is not None: + obtained["deselected"] = outcomes.get("deselected", 0) + expected["deselected"] = deselected + assert obtained == expected diff --git a/venv/lib/python3.10/site-packages/_pytest/python.py b/venv/lib/python3.10/site-packages/_pytest/python.py new file mode 100644 index 0000000000000000000000000000000000000000..8e4fb0415320ff20fe63bc710934e92f075b90a2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/python.py @@ -0,0 +1,1723 @@ +# mypy: allow-untyped-defs +"""Python test discovery, setup and run of test functions.""" + +from __future__ import annotations + +import abc +from collections import Counter +from collections import defaultdict +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Mapping +from collections.abc import Sequence +import dataclasses +import enum +import fnmatch +from functools import partial +import inspect +import itertools +import os +from pathlib import Path +import re +import types +from typing import Any +from typing import final +from typing import Literal +from typing import NoReturn +from typing import TYPE_CHECKING +import warnings + +import _pytest +from _pytest import fixtures +from _pytest import nodes +from _pytest._code import filter_traceback +from _pytest._code import getfslineno +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import TerminalRepr +from _pytest._code.code import Traceback +from _pytest._io.saferepr import saferepr +from _pytest.compat import ascii_escaped +from _pytest.compat import get_default_arg_names +from _pytest.compat import get_real_func +from _pytest.compat import getimfunc +from _pytest.compat import is_async_function +from _pytest.compat import LEGACY_PATH +from _pytest.compat import NOTSET +from _pytest.compat import safe_getattr +from _pytest.compat import safe_isclass +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import FixtureDef +from _pytest.fixtures import FixtureRequest +from _pytest.fixtures import FuncFixtureInfo +from _pytest.fixtures import get_scope_node +from _pytest.main import Session +from _pytest.mark import ParameterSet +from _pytest.mark.structures import _HiddenParam +from _pytest.mark.structures import get_unpacked_marks +from _pytest.mark.structures import HIDDEN_PARAM +from _pytest.mark.structures import Mark +from _pytest.mark.structures import MarkDecorator +from _pytest.mark.structures import normalize_mark_list +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.pathlib import fnmatch_ex +from _pytest.pathlib import import_path +from _pytest.pathlib import ImportPathMismatchError +from _pytest.pathlib import scandir +from _pytest.scope import _ScopeName +from _pytest.scope import Scope +from _pytest.stash import StashKey +from _pytest.warning_types import PytestCollectionWarning +from _pytest.warning_types import PytestReturnNotNoneWarning + + +if TYPE_CHECKING: + from typing_extensions import Self + + +def pytest_addoption(parser: Parser) -> None: + parser.addini( + "python_files", + type="args", + # NOTE: default is also used in AssertionRewritingHook. + default=["test_*.py", "*_test.py"], + help="Glob-style file patterns for Python test module discovery", + ) + parser.addini( + "python_classes", + type="args", + default=["Test"], + help="Prefixes or glob names for Python test class discovery", + ) + parser.addini( + "python_functions", + type="args", + default=["test"], + help="Prefixes or glob names for Python test function and method discovery", + ) + parser.addini( + "disable_test_id_escaping_and_forfeit_all_rights_to_community_support", + type="bool", + default=False, + help="Disable string escape non-ASCII characters, might cause unwanted " + "side effects(use at your own risk)", + ) + + +def pytest_generate_tests(metafunc: Metafunc) -> None: + for marker in metafunc.definition.iter_markers(name="parametrize"): + metafunc.parametrize(*marker.args, **marker.kwargs, _param_mark=marker) + + +def pytest_configure(config: Config) -> None: + config.addinivalue_line( + "markers", + "parametrize(argnames, argvalues): call a test function multiple " + "times passing in different arguments in turn. argvalues generally " + "needs to be a list of values if argnames specifies only one name " + "or a list of tuples of values if argnames specifies multiple names. " + "Example: @parametrize('arg1', [1,2]) would lead to two calls of the " + "decorated test function, one with arg1=1 and another with arg1=2." + "see https://docs.pytest.org/en/stable/how-to/parametrize.html for more info " + "and examples.", + ) + config.addinivalue_line( + "markers", + "usefixtures(fixturename1, fixturename2, ...): mark tests as needing " + "all of the specified fixtures. see " + "https://docs.pytest.org/en/stable/explanation/fixtures.html#usefixtures ", + ) + + +def async_fail(nodeid: str) -> None: + msg = ( + "async def functions are not natively supported.\n" + "You need to install a suitable plugin for your async framework, for example:\n" + " - anyio\n" + " - pytest-asyncio\n" + " - pytest-tornasync\n" + " - pytest-trio\n" + " - pytest-twisted" + ) + fail(msg, pytrace=False) + + +@hookimpl(trylast=True) +def pytest_pyfunc_call(pyfuncitem: Function) -> object | None: + testfunction = pyfuncitem.obj + if is_async_function(testfunction): + async_fail(pyfuncitem.nodeid) + funcargs = pyfuncitem.funcargs + testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} + result = testfunction(**testargs) + if hasattr(result, "__await__") or hasattr(result, "__aiter__"): + async_fail(pyfuncitem.nodeid) + elif result is not None: + warnings.warn( + PytestReturnNotNoneWarning( + f"Test functions should return None, but {pyfuncitem.nodeid} returned {type(result)!r}.\n" + "Did you mean to use `assert` instead of `return`?\n" + "See https://docs.pytest.org/en/stable/how-to/assert.html#return-not-none for more information." + ) + ) + return True + + +def pytest_collect_directory( + path: Path, parent: nodes.Collector +) -> nodes.Collector | None: + pkginit = path / "__init__.py" + try: + has_pkginit = pkginit.is_file() + except PermissionError: + # See https://github.com/pytest-dev/pytest/issues/12120#issuecomment-2106349096. + return None + if has_pkginit: + return Package.from_parent(parent, path=path) + return None + + +def pytest_collect_file(file_path: Path, parent: nodes.Collector) -> Module | None: + if file_path.suffix == ".py": + if not parent.session.isinitpath(file_path): + if not path_matches_patterns( + file_path, parent.config.getini("python_files") + ): + return None + ihook = parent.session.gethookproxy(file_path) + module: Module = ihook.pytest_pycollect_makemodule( + module_path=file_path, parent=parent + ) + return module + return None + + +def path_matches_patterns(path: Path, patterns: Iterable[str]) -> bool: + """Return whether path matches any of the patterns in the list of globs given.""" + return any(fnmatch_ex(pattern, path) for pattern in patterns) + + +def pytest_pycollect_makemodule(module_path: Path, parent) -> Module: + return Module.from_parent(parent, path=module_path) + + +@hookimpl(trylast=True) +def pytest_pycollect_makeitem( + collector: Module | Class, name: str, obj: object +) -> None | nodes.Item | nodes.Collector | list[nodes.Item | nodes.Collector]: + assert isinstance(collector, (Class, Module)), type(collector) + # Nothing was collected elsewhere, let's do it here. + if safe_isclass(obj): + if collector.istestclass(obj, name): + return Class.from_parent(collector, name=name, obj=obj) + elif collector.istestfunction(obj, name): + # mock seems to store unbound methods (issue473), normalize it. + obj = getattr(obj, "__func__", obj) + # We need to try and unwrap the function if it's a functools.partial + # or a functools.wrapped. + # We mustn't if it's been wrapped with mock.patch (python 2 only). + if not (inspect.isfunction(obj) or inspect.isfunction(get_real_func(obj))): + filename, lineno = getfslineno(obj) + warnings.warn_explicit( + message=PytestCollectionWarning( + f"cannot collect {name!r} because it is not a function." + ), + category=None, + filename=str(filename), + lineno=lineno + 1, + ) + elif getattr(obj, "__test__", True): + if inspect.isgeneratorfunction(obj): + fail( + f"'yield' keyword is allowed in fixtures, but not in tests ({name})", + pytrace=False, + ) + return list(collector._genfunctions(name, obj)) + return None + return None + + +class PyobjMixin(nodes.Node): + """this mix-in inherits from Node to carry over the typing information + + as its intended to always mix in before a node + its position in the mro is unaffected""" + + _ALLOW_MARKERS = True + + @property + def module(self): + """Python module object this node was collected from (can be None).""" + node = self.getparent(Module) + return node.obj if node is not None else None + + @property + def cls(self): + """Python class object this node was collected from (can be None).""" + node = self.getparent(Class) + return node.obj if node is not None else None + + @property + def instance(self): + """Python instance object the function is bound to. + + Returns None if not a test method, e.g. for a standalone test function, + a class or a module. + """ + # Overridden by Function. + return None + + @property + def obj(self): + """Underlying Python object.""" + obj = getattr(self, "_obj", None) + if obj is None: + self._obj = obj = self._getobj() + # XXX evil hack + # used to avoid Function marker duplication + if self._ALLOW_MARKERS: + self.own_markers.extend(get_unpacked_marks(self.obj)) + # This assumes that `obj` is called before there is a chance + # to add custom keys to `self.keywords`, so no fear of overriding. + self.keywords.update((mark.name, mark) for mark in self.own_markers) + return obj + + @obj.setter + def obj(self, value): + self._obj = value + + def _getobj(self): + """Get the underlying Python object. May be overwritten by subclasses.""" + # TODO: Improve the type of `parent` such that assert/ignore aren't needed. + assert self.parent is not None + obj = self.parent.obj # type: ignore[attr-defined] + return getattr(obj, self.name) + + def getmodpath(self, stopatmodule: bool = True, includemodule: bool = False) -> str: + """Return Python path relative to the containing module.""" + parts = [] + for node in self.iter_parents(): + name = node.name + if isinstance(node, Module): + name = os.path.splitext(name)[0] + if stopatmodule: + if includemodule: + parts.append(name) + break + parts.append(name) + parts.reverse() + return ".".join(parts) + + def reportinfo(self) -> tuple[os.PathLike[str] | str, int | None, str]: + # XXX caching? + path, lineno = getfslineno(self.obj) + modpath = self.getmodpath() + return path, lineno, modpath + + +# As an optimization, these builtin attribute names are pre-ignored when +# iterating over an object during collection -- the pytest_pycollect_makeitem +# hook is not called for them. +# fmt: off +class _EmptyClass: pass # noqa: E701 +IGNORED_ATTRIBUTES = frozenset.union( + frozenset(), + # Module. + dir(types.ModuleType("empty_module")), + # Some extra module attributes the above doesn't catch. + {"__builtins__", "__file__", "__cached__"}, + # Class. + dir(_EmptyClass), + # Instance. + dir(_EmptyClass()), +) +del _EmptyClass +# fmt: on + + +class PyCollector(PyobjMixin, nodes.Collector, abc.ABC): + def funcnamefilter(self, name: str) -> bool: + return self._matches_prefix_or_glob_option("python_functions", name) + + def isnosetest(self, obj: object) -> bool: + """Look for the __test__ attribute, which is applied by the + @nose.tools.istest decorator. + """ + # We explicitly check for "is True" here to not mistakenly treat + # classes with a custom __getattr__ returning something truthy (like a + # function) as test classes. + return safe_getattr(obj, "__test__", False) is True + + def classnamefilter(self, name: str) -> bool: + return self._matches_prefix_or_glob_option("python_classes", name) + + def istestfunction(self, obj: object, name: str) -> bool: + if self.funcnamefilter(name) or self.isnosetest(obj): + if isinstance(obj, (staticmethod, classmethod)): + # staticmethods and classmethods need to be unwrapped. + obj = safe_getattr(obj, "__func__", False) + return callable(obj) and fixtures.getfixturemarker(obj) is None + else: + return False + + def istestclass(self, obj: object, name: str) -> bool: + if not (self.classnamefilter(name) or self.isnosetest(obj)): + return False + if inspect.isabstract(obj): + return False + return True + + def _matches_prefix_or_glob_option(self, option_name: str, name: str) -> bool: + """Check if the given name matches the prefix or glob-pattern defined + in ini configuration.""" + for option in self.config.getini(option_name): + if name.startswith(option): + return True + # Check that name looks like a glob-string before calling fnmatch + # because this is called for every name in each collected module, + # and fnmatch is somewhat expensive to call. + elif ("*" in option or "?" in option or "[" in option) and fnmatch.fnmatch( + name, option + ): + return True + return False + + def collect(self) -> Iterable[nodes.Item | nodes.Collector]: + if not getattr(self.obj, "__test__", True): + return [] + + # Avoid random getattrs and peek in the __dict__ instead. + dicts = [getattr(self.obj, "__dict__", {})] + if isinstance(self.obj, type): + for basecls in self.obj.__mro__: + dicts.append(basecls.__dict__) + + # In each class, nodes should be definition ordered. + # __dict__ is definition ordered. + seen: set[str] = set() + dict_values: list[list[nodes.Item | nodes.Collector]] = [] + collect_imported_tests = self.session.config.getini("collect_imported_tests") + ihook = self.ihook + for dic in dicts: + values: list[nodes.Item | nodes.Collector] = [] + # Note: seems like the dict can change during iteration - + # be careful not to remove the list() without consideration. + for name, obj in list(dic.items()): + if name in IGNORED_ATTRIBUTES: + continue + if name in seen: + continue + seen.add(name) + + if not collect_imported_tests and isinstance(self, Module): + # Do not collect functions and classes from other modules. + if inspect.isfunction(obj) or inspect.isclass(obj): + if obj.__module__ != self._getobj().__name__: + continue + + res = ihook.pytest_pycollect_makeitem( + collector=self, name=name, obj=obj + ) + if res is None: + continue + elif isinstance(res, list): + values.extend(res) + else: + values.append(res) + dict_values.append(values) + + # Between classes in the class hierarchy, reverse-MRO order -- nodes + # inherited from base classes should come before subclasses. + result = [] + for values in reversed(dict_values): + result.extend(values) + return result + + def _genfunctions(self, name: str, funcobj) -> Iterator[Function]: + modulecol = self.getparent(Module) + assert modulecol is not None + module = modulecol.obj + clscol = self.getparent(Class) + cls = (clscol and clscol.obj) or None + + definition = FunctionDefinition.from_parent(self, name=name, callobj=funcobj) + fixtureinfo = definition._fixtureinfo + + # pytest_generate_tests impls call metafunc.parametrize() which fills + # metafunc._calls, the outcome of the hook. + metafunc = Metafunc( + definition=definition, + fixtureinfo=fixtureinfo, + config=self.config, + cls=cls, + module=module, + _ispytest=True, + ) + methods = [] + if hasattr(module, "pytest_generate_tests"): + methods.append(module.pytest_generate_tests) + if cls is not None and hasattr(cls, "pytest_generate_tests"): + methods.append(cls().pytest_generate_tests) + self.ihook.pytest_generate_tests.call_extra(methods, dict(metafunc=metafunc)) + + if not metafunc._calls: + yield Function.from_parent(self, name=name, fixtureinfo=fixtureinfo) + else: + metafunc._recompute_direct_params_indices() + # Direct parametrizations taking place in module/class-specific + # `metafunc.parametrize` calls may have shadowed some fixtures, so make sure + # we update what the function really needs a.k.a its fixture closure. Note that + # direct parametrizations using `@pytest.mark.parametrize` have already been considered + # into making the closure using `ignore_args` arg to `getfixtureclosure`. + fixtureinfo.prune_dependency_tree() + + for callspec in metafunc._calls: + subname = f"{name}[{callspec.id}]" if callspec._idlist else name + yield Function.from_parent( + self, + name=subname, + callspec=callspec, + fixtureinfo=fixtureinfo, + keywords={callspec.id: True}, + originalname=name, + ) + + +def importtestmodule( + path: Path, + config: Config, +): + # We assume we are only called once per module. + importmode = config.getoption("--import-mode") + try: + mod = import_path( + path, + mode=importmode, + root=config.rootpath, + consider_namespace_packages=config.getini("consider_namespace_packages"), + ) + except SyntaxError as e: + raise nodes.Collector.CollectError( + ExceptionInfo.from_current().getrepr(style="short") + ) from e + except ImportPathMismatchError as e: + raise nodes.Collector.CollectError( + "import file mismatch:\n" + "imported module {!r} has this __file__ attribute:\n" + " {}\n" + "which is not the same as the test file we want to collect:\n" + " {}\n" + "HINT: remove __pycache__ / .pyc files and/or use a " + "unique basename for your test file modules".format(*e.args) + ) from e + except ImportError as e: + exc_info = ExceptionInfo.from_current() + if config.get_verbosity() < 2: + exc_info.traceback = exc_info.traceback.filter(filter_traceback) + exc_repr = ( + exc_info.getrepr(style="short") + if exc_info.traceback + else exc_info.exconly() + ) + formatted_tb = str(exc_repr) + raise nodes.Collector.CollectError( + f"ImportError while importing test module '{path}'.\n" + "Hint: make sure your test modules/packages have valid Python names.\n" + "Traceback:\n" + f"{formatted_tb}" + ) from e + except skip.Exception as e: + if e.allow_module_level: + raise + raise nodes.Collector.CollectError( + "Using pytest.skip outside of a test will skip the entire module. " + "If that's your intention, pass `allow_module_level=True`. " + "If you want to skip a specific test or an entire class, " + "use the @pytest.mark.skip or @pytest.mark.skipif decorators." + ) from e + config.pluginmanager.consider_module(mod) + return mod + + +class Module(nodes.File, PyCollector): + """Collector for test classes and functions in a Python module.""" + + def _getobj(self): + return importtestmodule(self.path, self.config) + + def collect(self) -> Iterable[nodes.Item | nodes.Collector]: + self._register_setup_module_fixture() + self._register_setup_function_fixture() + self.session._fixturemanager.parsefactories(self) + return super().collect() + + def _register_setup_module_fixture(self) -> None: + """Register an autouse, module-scoped fixture for the collected module object + that invokes setUpModule/tearDownModule if either or both are available. + + Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + setup_module = _get_first_non_fixture_func( + self.obj, ("setUpModule", "setup_module") + ) + teardown_module = _get_first_non_fixture_func( + self.obj, ("tearDownModule", "teardown_module") + ) + + if setup_module is None and teardown_module is None: + return + + def xunit_setup_module_fixture(request) -> Generator[None]: + module = request.module + if setup_module is not None: + _call_with_optional_argument(setup_module, module) + yield + if teardown_module is not None: + _call_with_optional_argument(teardown_module, module) + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_xunit_setup_module_fixture_{self.obj.__name__}", + func=xunit_setup_module_fixture, + nodeid=self.nodeid, + scope="module", + autouse=True, + ) + + def _register_setup_function_fixture(self) -> None: + """Register an autouse, function-scoped fixture for the collected module object + that invokes setup_function/teardown_function if either or both are available. + + Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + setup_function = _get_first_non_fixture_func(self.obj, ("setup_function",)) + teardown_function = _get_first_non_fixture_func( + self.obj, ("teardown_function",) + ) + if setup_function is None and teardown_function is None: + return + + def xunit_setup_function_fixture(request) -> Generator[None]: + if request.instance is not None: + # in this case we are bound to an instance, so we need to let + # setup_method handle this + yield + return + function = request.function + if setup_function is not None: + _call_with_optional_argument(setup_function, function) + yield + if teardown_function is not None: + _call_with_optional_argument(teardown_function, function) + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_xunit_setup_function_fixture_{self.obj.__name__}", + func=xunit_setup_function_fixture, + nodeid=self.nodeid, + scope="function", + autouse=True, + ) + + +class Package(nodes.Directory): + """Collector for files and directories in a Python packages -- directories + with an `__init__.py` file. + + .. note:: + + Directories without an `__init__.py` file are instead collected by + :class:`~pytest.Dir` by default. Both are :class:`~pytest.Directory` + collectors. + + .. versionchanged:: 8.0 + + Now inherits from :class:`~pytest.Directory`. + """ + + def __init__( + self, + fspath: LEGACY_PATH | None, + parent: nodes.Collector, + # NOTE: following args are unused: + config=None, + session=None, + nodeid=None, + path: Path | None = None, + ) -> None: + # NOTE: Could be just the following, but kept as-is for compat. + # super().__init__(self, fspath, parent=parent) + session = parent.session + super().__init__( + fspath=fspath, + path=path, + parent=parent, + config=config, + session=session, + nodeid=nodeid, + ) + + def setup(self) -> None: + init_mod = importtestmodule(self.path / "__init__.py", self.config) + + # Not using fixtures to call setup_module here because autouse fixtures + # from packages are not called automatically (#4085). + setup_module = _get_first_non_fixture_func( + init_mod, ("setUpModule", "setup_module") + ) + if setup_module is not None: + _call_with_optional_argument(setup_module, init_mod) + + teardown_module = _get_first_non_fixture_func( + init_mod, ("tearDownModule", "teardown_module") + ) + if teardown_module is not None: + func = partial(_call_with_optional_argument, teardown_module, init_mod) + self.addfinalizer(func) + + def collect(self) -> Iterable[nodes.Item | nodes.Collector]: + # Always collect __init__.py first. + def sort_key(entry: os.DirEntry[str]) -> object: + return (entry.name != "__init__.py", entry.name) + + config = self.config + col: nodes.Collector | None + cols: Sequence[nodes.Collector] + ihook = self.ihook + for direntry in scandir(self.path, sort_key): + if direntry.is_dir(): + path = Path(direntry.path) + if not self.session.isinitpath(path, with_parents=True): + if ihook.pytest_ignore_collect(collection_path=path, config=config): + continue + col = ihook.pytest_collect_directory(path=path, parent=self) + if col is not None: + yield col + + elif direntry.is_file(): + path = Path(direntry.path) + if not self.session.isinitpath(path): + if ihook.pytest_ignore_collect(collection_path=path, config=config): + continue + cols = ihook.pytest_collect_file(file_path=path, parent=self) + yield from cols + + +def _call_with_optional_argument(func, arg) -> None: + """Call the given function with the given argument if func accepts one argument, otherwise + calls func without arguments.""" + arg_count = func.__code__.co_argcount + if inspect.ismethod(func): + arg_count -= 1 + if arg_count: + func(arg) + else: + func() + + +def _get_first_non_fixture_func(obj: object, names: Iterable[str]) -> object | None: + """Return the attribute from the given object to be used as a setup/teardown + xunit-style function, but only if not marked as a fixture to avoid calling it twice. + """ + for name in names: + meth: object | None = getattr(obj, name, None) + if meth is not None and fixtures.getfixturemarker(meth) is None: + return meth + return None + + +class Class(PyCollector): + """Collector for test methods (and nested classes) in a Python class.""" + + @classmethod + def from_parent(cls, parent, *, name, obj=None, **kw) -> Self: # type: ignore[override] + """The public constructor.""" + return super().from_parent(name=name, parent=parent, **kw) + + def newinstance(self): + return self.obj() + + def collect(self) -> Iterable[nodes.Item | nodes.Collector]: + if not safe_getattr(self.obj, "__test__", True): + return [] + if hasinit(self.obj): + assert self.parent is not None + self.warn( + PytestCollectionWarning( + f"cannot collect test class {self.obj.__name__!r} because it has a " + f"__init__ constructor (from: {self.parent.nodeid})" + ) + ) + return [] + elif hasnew(self.obj): + assert self.parent is not None + self.warn( + PytestCollectionWarning( + f"cannot collect test class {self.obj.__name__!r} because it has a " + f"__new__ constructor (from: {self.parent.nodeid})" + ) + ) + return [] + + self._register_setup_class_fixture() + self._register_setup_method_fixture() + + self.session._fixturemanager.parsefactories(self.newinstance(), self.nodeid) + + return super().collect() + + def _register_setup_class_fixture(self) -> None: + """Register an autouse, class scoped fixture into the collected class object + that invokes setup_class/teardown_class if either or both are available. + + Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + setup_class = _get_first_non_fixture_func(self.obj, ("setup_class",)) + teardown_class = _get_first_non_fixture_func(self.obj, ("teardown_class",)) + if setup_class is None and teardown_class is None: + return + + def xunit_setup_class_fixture(request) -> Generator[None]: + cls = request.cls + if setup_class is not None: + func = getimfunc(setup_class) + _call_with_optional_argument(func, cls) + yield + if teardown_class is not None: + func = getimfunc(teardown_class) + _call_with_optional_argument(func, cls) + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_xunit_setup_class_fixture_{self.obj.__qualname__}", + func=xunit_setup_class_fixture, + nodeid=self.nodeid, + scope="class", + autouse=True, + ) + + def _register_setup_method_fixture(self) -> None: + """Register an autouse, function scoped fixture into the collected class object + that invokes setup_method/teardown_method if either or both are available. + + Using a fixture to invoke these methods ensures we play nicely and unsurprisingly with + other fixtures (#517). + """ + setup_name = "setup_method" + setup_method = _get_first_non_fixture_func(self.obj, (setup_name,)) + teardown_name = "teardown_method" + teardown_method = _get_first_non_fixture_func(self.obj, (teardown_name,)) + if setup_method is None and teardown_method is None: + return + + def xunit_setup_method_fixture(request) -> Generator[None]: + instance = request.instance + method = request.function + if setup_method is not None: + func = getattr(instance, setup_name) + _call_with_optional_argument(func, method) + yield + if teardown_method is not None: + func = getattr(instance, teardown_name) + _call_with_optional_argument(func, method) + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_xunit_setup_method_fixture_{self.obj.__qualname__}", + func=xunit_setup_method_fixture, + nodeid=self.nodeid, + scope="function", + autouse=True, + ) + + +def hasinit(obj: object) -> bool: + init: object = getattr(obj, "__init__", None) + if init: + return init != object.__init__ + return False + + +def hasnew(obj: object) -> bool: + new: object = getattr(obj, "__new__", None) + if new: + return new != object.__new__ + return False + + +@final +@dataclasses.dataclass(frozen=True) +class IdMaker: + """Make IDs for a parametrization.""" + + __slots__ = ( + "argnames", + "config", + "func_name", + "idfn", + "ids", + "nodeid", + "parametersets", + ) + + # The argnames of the parametrization. + argnames: Sequence[str] + # The ParameterSets of the parametrization. + parametersets: Sequence[ParameterSet] + # Optionally, a user-provided callable to make IDs for parameters in a + # ParameterSet. + idfn: Callable[[Any], object | None] | None + # Optionally, explicit IDs for ParameterSets by index. + ids: Sequence[object | None] | None + # Optionally, the pytest config. + # Used for controlling ASCII escaping, and for calling the + # :hook:`pytest_make_parametrize_id` hook. + config: Config | None + # Optionally, the ID of the node being parametrized. + # Used only for clearer error messages. + nodeid: str | None + # Optionally, the ID of the function being parametrized. + # Used only for clearer error messages. + func_name: str | None + + def make_unique_parameterset_ids(self) -> list[str | _HiddenParam]: + """Make a unique identifier for each ParameterSet, that may be used to + identify the parametrization in a node ID. + + Format is -...-[counter], where prm_x_token is + - user-provided id, if given + - else an id derived from the value, applicable for certain types + - else + The counter suffix is appended only in case a string wouldn't be unique + otherwise. + """ + resolved_ids = list(self._resolve_ids()) + # All IDs must be unique! + if len(resolved_ids) != len(set(resolved_ids)): + # Record the number of occurrences of each ID. + id_counts = Counter(resolved_ids) + # Map the ID to its next suffix. + id_suffixes: dict[str, int] = defaultdict(int) + # Suffix non-unique IDs to make them unique. + for index, id in enumerate(resolved_ids): + if id_counts[id] > 1: + if id is HIDDEN_PARAM: + self._complain_multiple_hidden_parameter_sets() + suffix = "" + if id and id[-1].isdigit(): + suffix = "_" + new_id = f"{id}{suffix}{id_suffixes[id]}" + while new_id in set(resolved_ids): + id_suffixes[id] += 1 + new_id = f"{id}{suffix}{id_suffixes[id]}" + resolved_ids[index] = new_id + id_suffixes[id] += 1 + assert len(resolved_ids) == len(set(resolved_ids)), ( + f"Internal error: {resolved_ids=}" + ) + return resolved_ids + + def _resolve_ids(self) -> Iterable[str | _HiddenParam]: + """Resolve IDs for all ParameterSets (may contain duplicates).""" + for idx, parameterset in enumerate(self.parametersets): + if parameterset.id is not None: + # ID provided directly - pytest.param(..., id="...") + if parameterset.id is HIDDEN_PARAM: + yield HIDDEN_PARAM + else: + yield _ascii_escaped_by_config(parameterset.id, self.config) + elif self.ids and idx < len(self.ids) and self.ids[idx] is not None: + # ID provided in the IDs list - parametrize(..., ids=[...]). + if self.ids[idx] is HIDDEN_PARAM: + yield HIDDEN_PARAM + else: + yield self._idval_from_value_required(self.ids[idx], idx) + else: + # ID not provided - generate it. + yield "-".join( + self._idval(val, argname, idx) + for val, argname in zip(parameterset.values, self.argnames) + ) + + def _idval(self, val: object, argname: str, idx: int) -> str: + """Make an ID for a parameter in a ParameterSet.""" + idval = self._idval_from_function(val, argname, idx) + if idval is not None: + return idval + idval = self._idval_from_hook(val, argname) + if idval is not None: + return idval + idval = self._idval_from_value(val) + if idval is not None: + return idval + return self._idval_from_argname(argname, idx) + + def _idval_from_function(self, val: object, argname: str, idx: int) -> str | None: + """Try to make an ID for a parameter in a ParameterSet using the + user-provided id callable, if given.""" + if self.idfn is None: + return None + try: + id = self.idfn(val) + except Exception as e: + prefix = f"{self.nodeid}: " if self.nodeid is not None else "" + msg = "error raised while trying to determine id of parameter '{}' at position {}" + msg = prefix + msg.format(argname, idx) + raise ValueError(msg) from e + if id is None: + return None + return self._idval_from_value(id) + + def _idval_from_hook(self, val: object, argname: str) -> str | None: + """Try to make an ID for a parameter in a ParameterSet by calling the + :hook:`pytest_make_parametrize_id` hook.""" + if self.config: + id: str | None = self.config.hook.pytest_make_parametrize_id( + config=self.config, val=val, argname=argname + ) + return id + return None + + def _idval_from_value(self, val: object) -> str | None: + """Try to make an ID for a parameter in a ParameterSet from its value, + if the value type is supported.""" + if isinstance(val, (str, bytes)): + return _ascii_escaped_by_config(val, self.config) + elif val is None or isinstance(val, (float, int, bool, complex)): + return str(val) + elif isinstance(val, re.Pattern): + return ascii_escaped(val.pattern) + elif val is NOTSET: + # Fallback to default. Note that NOTSET is an enum.Enum. + pass + elif isinstance(val, enum.Enum): + return str(val) + elif isinstance(getattr(val, "__name__", None), str): + # Name of a class, function, module, etc. + name: str = getattr(val, "__name__") + return name + return None + + def _idval_from_value_required(self, val: object, idx: int) -> str: + """Like _idval_from_value(), but fails if the type is not supported.""" + id = self._idval_from_value(val) + if id is not None: + return id + + # Fail. + prefix = self._make_error_prefix() + msg = ( + f"{prefix}ids contains unsupported value {saferepr(val)} (type: {type(val)!r}) at index {idx}. " + "Supported types are: str, bytes, int, float, complex, bool, enum, regex or anything with a __name__." + ) + fail(msg, pytrace=False) + + @staticmethod + def _idval_from_argname(argname: str, idx: int) -> str: + """Make an ID for a parameter in a ParameterSet from the argument name + and the index of the ParameterSet.""" + return str(argname) + str(idx) + + def _complain_multiple_hidden_parameter_sets(self) -> NoReturn: + fail( + f"{self._make_error_prefix()}multiple instances of HIDDEN_PARAM " + "cannot be used in the same parametrize call, " + "because the tests names need to be unique." + ) + + def _make_error_prefix(self) -> str: + if self.func_name is not None: + return f"In {self.func_name}: " + elif self.nodeid is not None: + return f"In {self.nodeid}: " + else: + return "" + + +@final +@dataclasses.dataclass(frozen=True) +class CallSpec2: + """A planned parameterized invocation of a test function. + + Calculated during collection for a given test function's Metafunc. + Once collection is over, each callspec is turned into a single Item + and stored in item.callspec. + """ + + # arg name -> arg value which will be passed to a fixture or pseudo-fixture + # of the same name. (indirect or direct parametrization respectively) + params: dict[str, object] = dataclasses.field(default_factory=dict) + # arg name -> arg index. + indices: dict[str, int] = dataclasses.field(default_factory=dict) + # arg name -> parameter scope. + # Used for sorting parametrized resources. + _arg2scope: Mapping[str, Scope] = dataclasses.field(default_factory=dict) + # Parts which will be added to the item's name in `[..]` separated by "-". + _idlist: Sequence[str] = dataclasses.field(default_factory=tuple) + # Marks which will be applied to the item. + marks: list[Mark] = dataclasses.field(default_factory=list) + + def setmulti( + self, + *, + argnames: Iterable[str], + valset: Iterable[object], + id: str | _HiddenParam, + marks: Iterable[Mark | MarkDecorator], + scope: Scope, + param_index: int, + nodeid: str, + ) -> CallSpec2: + params = self.params.copy() + indices = self.indices.copy() + arg2scope = dict(self._arg2scope) + for arg, val in zip(argnames, valset): + if arg in params: + raise nodes.Collector.CollectError( + f"{nodeid}: duplicate parametrization of {arg!r}" + ) + params[arg] = val + indices[arg] = param_index + arg2scope[arg] = scope + return CallSpec2( + params=params, + indices=indices, + _arg2scope=arg2scope, + _idlist=self._idlist if id is HIDDEN_PARAM else [*self._idlist, id], + marks=[*self.marks, *normalize_mark_list(marks)], + ) + + def getparam(self, name: str) -> object: + try: + return self.params[name] + except KeyError as e: + raise ValueError(name) from e + + @property + def id(self) -> str: + return "-".join(self._idlist) + + +def get_direct_param_fixture_func(request: FixtureRequest) -> Any: + return request.param + + +# Used for storing pseudo fixturedefs for direct parametrization. +name2pseudofixturedef_key = StashKey[dict[str, FixtureDef[Any]]]() + + +@final +class Metafunc: + """Objects passed to the :hook:`pytest_generate_tests` hook. + + They help to inspect a test function and to generate tests according to + test configuration or values specified in the class or module where a + test function is defined. + """ + + def __init__( + self, + definition: FunctionDefinition, + fixtureinfo: fixtures.FuncFixtureInfo, + config: Config, + cls=None, + module=None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + + #: Access to the underlying :class:`_pytest.python.FunctionDefinition`. + self.definition = definition + + #: Access to the :class:`pytest.Config` object for the test session. + self.config = config + + #: The module object where the test function is defined in. + self.module = module + + #: Underlying Python test function. + self.function = definition.obj + + #: Set of fixture names required by the test function. + self.fixturenames = fixtureinfo.names_closure + + #: Class object where the test function is defined in or ``None``. + self.cls = cls + + self._arg2fixturedefs = fixtureinfo.name2fixturedefs + + # Result of parametrize(). + self._calls: list[CallSpec2] = [] + + self._params_directness: dict[str, Literal["indirect", "direct"]] = {} + + def parametrize( + self, + argnames: str | Sequence[str], + argvalues: Iterable[ParameterSet | Sequence[object] | object], + indirect: bool | Sequence[str] = False, + ids: Iterable[object | None] | Callable[[Any], object | None] | None = None, + scope: _ScopeName | None = None, + *, + _param_mark: Mark | None = None, + ) -> None: + """Add new invocations to the underlying test function using the list + of argvalues for the given argnames. Parametrization is performed + during the collection phase. If you need to setup expensive resources + see about setting ``indirect`` to do it at test setup time instead. + + Can be called multiple times per test function (but only on different + argument names), in which case each call parametrizes all previous + parametrizations, e.g. + + :: + + unparametrized: t + parametrize ["x", "y"]: t[x], t[y] + parametrize [1, 2]: t[x-1], t[x-2], t[y-1], t[y-2] + + :param argnames: + A comma-separated string denoting one or more argument names, or + a list/tuple of argument strings. + + :param argvalues: + The list of argvalues determines how often a test is invoked with + different argument values. + + If only one argname was specified argvalues is a list of values. + If N argnames were specified, argvalues must be a list of + N-tuples, where each tuple-element specifies a value for its + respective argname. + + :param indirect: + A list of arguments' names (subset of argnames) or a boolean. + If True the list contains all names from the argnames. Each + argvalue corresponding to an argname in this list will + be passed as request.param to its respective argname fixture + function so that it can perform more expensive setups during the + setup phase of a test rather than at collection time. + + :param ids: + Sequence of (or generator for) ids for ``argvalues``, + or a callable to return part of the id for each argvalue. + + With sequences (and generators like ``itertools.count()``) the + returned ids should be of type ``string``, ``int``, ``float``, + ``bool``, or ``None``. + They are mapped to the corresponding index in ``argvalues``. + ``None`` means to use the auto-generated id. + + .. versionadded:: 8.4 + :ref:`hidden-param` means to hide the parameter set + from the test name. Can only be used at most 1 time, as + test names need to be unique. + + If it is a callable it will be called for each entry in + ``argvalues``, and the return value is used as part of the + auto-generated id for the whole set (where parts are joined with + dashes ("-")). + This is useful to provide more specific ids for certain items, e.g. + dates. Returning ``None`` will use an auto-generated id. + + If no ids are provided they will be generated automatically from + the argvalues. + + :param scope: + If specified it denotes the scope of the parameters. + The scope is used for grouping tests by parameter instances. + It will also override any fixture-function defined scope, allowing + to set a dynamic scope using test context or configuration. + """ + nodeid = self.definition.nodeid + + argnames, parametersets = ParameterSet._for_parametrize( + argnames, + argvalues, + self.function, + self.config, + nodeid=self.definition.nodeid, + ) + del argvalues + + if "request" in argnames: + fail( + f"{nodeid}: 'request' is a reserved name and cannot be used in @pytest.mark.parametrize", + pytrace=False, + ) + + if scope is not None: + scope_ = Scope.from_user( + scope, descr=f"parametrize() call in {self.function.__name__}" + ) + else: + scope_ = _find_parametrized_scope(argnames, self._arg2fixturedefs, indirect) + + self._validate_if_using_arg_names(argnames, indirect) + + # Use any already (possibly) generated ids with parametrize Marks. + if _param_mark and _param_mark._param_ids_from: + generated_ids = _param_mark._param_ids_from._param_ids_generated + if generated_ids is not None: + ids = generated_ids + + ids = self._resolve_parameter_set_ids( + argnames, ids, parametersets, nodeid=self.definition.nodeid + ) + + # Store used (possibly generated) ids with parametrize Marks. + if _param_mark and _param_mark._param_ids_from and generated_ids is None: + object.__setattr__(_param_mark._param_ids_from, "_param_ids_generated", ids) + + # Calculate directness. + arg_directness = self._resolve_args_directness(argnames, indirect) + self._params_directness.update(arg_directness) + + # Add direct parametrizations as fixturedefs to arg2fixturedefs by + # registering artificial "pseudo" FixtureDef's such that later at test + # setup time we can rely on FixtureDefs to exist for all argnames. + node = None + # For scopes higher than function, a "pseudo" FixtureDef might have + # already been created for the scope. We thus store and cache the + # FixtureDef on the node related to the scope. + if scope_ is Scope.Function: + name2pseudofixturedef = None + else: + collector = self.definition.parent + assert collector is not None + node = get_scope_node(collector, scope_) + if node is None: + # If used class scope and there is no class, use module-level + # collector (for now). + if scope_ is Scope.Class: + assert isinstance(collector, Module) + node = collector + # If used package scope and there is no package, use session + # (for now). + elif scope_ is Scope.Package: + node = collector.session + else: + assert False, f"Unhandled missing scope: {scope}" + default: dict[str, FixtureDef[Any]] = {} + name2pseudofixturedef = node.stash.setdefault( + name2pseudofixturedef_key, default + ) + for argname in argnames: + if arg_directness[argname] == "indirect": + continue + if name2pseudofixturedef is not None and argname in name2pseudofixturedef: + fixturedef = name2pseudofixturedef[argname] + else: + fixturedef = FixtureDef( + config=self.config, + baseid="", + argname=argname, + func=get_direct_param_fixture_func, + scope=scope_, + params=None, + ids=None, + _ispytest=True, + ) + if name2pseudofixturedef is not None: + name2pseudofixturedef[argname] = fixturedef + self._arg2fixturedefs[argname] = [fixturedef] + + # Create the new calls: if we are parametrize() multiple times (by applying the decorator + # more than once) then we accumulate those calls generating the cartesian product + # of all calls. + newcalls = [] + for callspec in self._calls or [CallSpec2()]: + for param_index, (param_id, param_set) in enumerate( + zip(ids, parametersets) + ): + newcallspec = callspec.setmulti( + argnames=argnames, + valset=param_set.values, + id=param_id, + marks=param_set.marks, + scope=scope_, + param_index=param_index, + nodeid=nodeid, + ) + newcalls.append(newcallspec) + self._calls = newcalls + + def _resolve_parameter_set_ids( + self, + argnames: Sequence[str], + ids: Iterable[object | None] | Callable[[Any], object | None] | None, + parametersets: Sequence[ParameterSet], + nodeid: str, + ) -> list[str | _HiddenParam]: + """Resolve the actual ids for the given parameter sets. + + :param argnames: + Argument names passed to ``parametrize()``. + :param ids: + The `ids` parameter of the ``parametrize()`` call (see docs). + :param parametersets: + The parameter sets, each containing a set of values corresponding + to ``argnames``. + :param nodeid str: + The nodeid of the definition item that generated this + parametrization. + :returns: + List with ids for each parameter set given. + """ + if ids is None: + idfn = None + ids_ = None + elif callable(ids): + idfn = ids + ids_ = None + else: + idfn = None + ids_ = self._validate_ids(ids, parametersets, self.function.__name__) + id_maker = IdMaker( + argnames, + parametersets, + idfn, + ids_, + self.config, + nodeid=nodeid, + func_name=self.function.__name__, + ) + return id_maker.make_unique_parameterset_ids() + + def _validate_ids( + self, + ids: Iterable[object | None], + parametersets: Sequence[ParameterSet], + func_name: str, + ) -> list[object | None]: + try: + num_ids = len(ids) # type: ignore[arg-type] + except TypeError: + try: + iter(ids) + except TypeError as e: + raise TypeError("ids must be a callable or an iterable") from e + num_ids = len(parametersets) + + # num_ids == 0 is a special case: https://github.com/pytest-dev/pytest/issues/1849 + if num_ids != len(parametersets) and num_ids != 0: + msg = "In {}: {} parameter sets specified, with different number of ids: {}" + fail(msg.format(func_name, len(parametersets), num_ids), pytrace=False) + + return list(itertools.islice(ids, num_ids)) + + def _resolve_args_directness( + self, + argnames: Sequence[str], + indirect: bool | Sequence[str], + ) -> dict[str, Literal["indirect", "direct"]]: + """Resolve if each parametrized argument must be considered an indirect + parameter to a fixture of the same name, or a direct parameter to the + parametrized function, based on the ``indirect`` parameter of the + parametrized() call. + + :param argnames: + List of argument names passed to ``parametrize()``. + :param indirect: + Same as the ``indirect`` parameter of ``parametrize()``. + :returns + A dict mapping each arg name to either "indirect" or "direct". + """ + arg_directness: dict[str, Literal["indirect", "direct"]] + if isinstance(indirect, bool): + arg_directness = dict.fromkeys( + argnames, "indirect" if indirect else "direct" + ) + elif isinstance(indirect, Sequence): + arg_directness = dict.fromkeys(argnames, "direct") + for arg in indirect: + if arg not in argnames: + fail( + f"In {self.function.__name__}: indirect fixture '{arg}' doesn't exist", + pytrace=False, + ) + arg_directness[arg] = "indirect" + else: + fail( + f"In {self.function.__name__}: expected Sequence or boolean" + f" for indirect, got {type(indirect).__name__}", + pytrace=False, + ) + return arg_directness + + def _validate_if_using_arg_names( + self, + argnames: Sequence[str], + indirect: bool | Sequence[str], + ) -> None: + """Check if all argnames are being used, by default values, or directly/indirectly. + + :param List[str] argnames: List of argument names passed to ``parametrize()``. + :param indirect: Same as the ``indirect`` parameter of ``parametrize()``. + :raises ValueError: If validation fails. + """ + default_arg_names = set(get_default_arg_names(self.function)) + func_name = self.function.__name__ + for arg in argnames: + if arg not in self.fixturenames: + if arg in default_arg_names: + fail( + f"In {func_name}: function already takes an argument '{arg}' with a default value", + pytrace=False, + ) + else: + if isinstance(indirect, Sequence): + name = "fixture" if arg in indirect else "argument" + else: + name = "fixture" if indirect else "argument" + fail( + f"In {func_name}: function uses no {name} '{arg}'", + pytrace=False, + ) + + def _recompute_direct_params_indices(self) -> None: + for argname, param_type in self._params_directness.items(): + if param_type == "direct": + for i, callspec in enumerate(self._calls): + callspec.indices[argname] = i + + +def _find_parametrized_scope( + argnames: Sequence[str], + arg2fixturedefs: Mapping[str, Sequence[fixtures.FixtureDef[object]]], + indirect: bool | Sequence[str], +) -> Scope: + """Find the most appropriate scope for a parametrized call based on its arguments. + + When there's at least one direct argument, always use "function" scope. + + When a test function is parametrized and all its arguments are indirect + (e.g. fixtures), return the most narrow scope based on the fixtures used. + + Related to issue #1832, based on code posted by @Kingdread. + """ + if isinstance(indirect, Sequence): + all_arguments_are_fixtures = len(indirect) == len(argnames) + else: + all_arguments_are_fixtures = bool(indirect) + + if all_arguments_are_fixtures: + fixturedefs = arg2fixturedefs or {} + used_scopes = [ + fixturedef[-1]._scope + for name, fixturedef in fixturedefs.items() + if name in argnames + ] + # Takes the most narrow scope from used fixtures. + return min(used_scopes, default=Scope.Function) + + return Scope.Function + + +def _ascii_escaped_by_config(val: str | bytes, config: Config | None) -> str: + if config is None: + escape_option = False + else: + escape_option = config.getini( + "disable_test_id_escaping_and_forfeit_all_rights_to_community_support" + ) + # TODO: If escaping is turned off and the user passes bytes, + # will return a bytes. For now we ignore this but the + # code *probably* doesn't handle this case. + return val if escape_option else ascii_escaped(val) # type: ignore + + +class Function(PyobjMixin, nodes.Item): + """Item responsible for setting up and executing a Python test function. + + :param name: + The full function name, including any decorations like those + added by parametrization (``my_func[my_param]``). + :param parent: + The parent Node. + :param config: + The pytest Config object. + :param callspec: + If given, this function has been parametrized and the callspec contains + meta information about the parametrization. + :param callobj: + If given, the object which will be called when the Function is invoked, + otherwise the callobj will be obtained from ``parent`` using ``originalname``. + :param keywords: + Keywords bound to the function object for "-k" matching. + :param session: + The pytest Session object. + :param fixtureinfo: + Fixture information already resolved at this fixture node.. + :param originalname: + The attribute name to use for accessing the underlying function object. + Defaults to ``name``. Set this if name is different from the original name, + for example when it contains decorations like those added by parametrization + (``my_func[my_param]``). + """ + + # Disable since functions handle it themselves. + _ALLOW_MARKERS = False + + def __init__( + self, + name: str, + parent, + config: Config | None = None, + callspec: CallSpec2 | None = None, + callobj=NOTSET, + keywords: Mapping[str, Any] | None = None, + session: Session | None = None, + fixtureinfo: FuncFixtureInfo | None = None, + originalname: str | None = None, + ) -> None: + super().__init__(name, parent, config=config, session=session) + + if callobj is not NOTSET: + self._obj = callobj + self._instance = getattr(callobj, "__self__", None) + + #: Original function name, without any decorations (for example + #: parametrization adds a ``"[...]"`` suffix to function names), used to access + #: the underlying function object from ``parent`` (in case ``callobj`` is not given + #: explicitly). + #: + #: .. versionadded:: 3.0 + self.originalname = originalname or name + + # Note: when FunctionDefinition is introduced, we should change ``originalname`` + # to a readonly property that returns FunctionDefinition.name. + + self.own_markers.extend(get_unpacked_marks(self.obj)) + if callspec: + self.callspec = callspec + self.own_markers.extend(callspec.marks) + + # todo: this is a hell of a hack + # https://github.com/pytest-dev/pytest/issues/4569 + # Note: the order of the updates is important here; indicates what + # takes priority (ctor argument over function attributes over markers). + # Take own_markers only; NodeKeywords handles parent traversal on its own. + self.keywords.update((mark.name, mark) for mark in self.own_markers) + self.keywords.update(self.obj.__dict__) + if keywords: + self.keywords.update(keywords) + + if fixtureinfo is None: + fm = self.session._fixturemanager + fixtureinfo = fm.getfixtureinfo(self, self.obj, self.cls) + self._fixtureinfo: FuncFixtureInfo = fixtureinfo + self.fixturenames = fixtureinfo.names_closure + self._initrequest() + + # todo: determine sound type limitations + @classmethod + def from_parent(cls, parent, **kw) -> Self: + """The public constructor.""" + return super().from_parent(parent=parent, **kw) + + def _initrequest(self) -> None: + self.funcargs: dict[str, object] = {} + self._request = fixtures.TopRequest(self, _ispytest=True) + + @property + def function(self): + """Underlying python 'function' object.""" + return getimfunc(self.obj) + + @property + def instance(self): + try: + return self._instance + except AttributeError: + if isinstance(self.parent, Class): + # Each Function gets a fresh class instance. + self._instance = self._getinstance() + else: + self._instance = None + return self._instance + + def _getinstance(self): + if isinstance(self.parent, Class): + # Each Function gets a fresh class instance. + return self.parent.newinstance() + else: + return None + + def _getobj(self): + instance = self.instance + if instance is not None: + parent_obj = instance + else: + assert self.parent is not None + parent_obj = self.parent.obj # type: ignore[attr-defined] + return getattr(parent_obj, self.originalname) + + @property + def _pyfuncitem(self): + """(compatonly) for code expecting pytest-2.2 style request objects.""" + return self + + def runtest(self) -> None: + """Execute the underlying test function.""" + self.ihook.pytest_pyfunc_call(pyfuncitem=self) + + def setup(self) -> None: + self._request._fillfixtures() + + def _traceback_filter(self, excinfo: ExceptionInfo[BaseException]) -> Traceback: + if hasattr(self, "_obj") and not self.config.getoption("fulltrace", False): + code = _pytest._code.Code.from_function(get_real_func(self.obj)) + path, firstlineno = code.path, code.firstlineno + traceback = excinfo.traceback + ntraceback = traceback.cut(path=path, firstlineno=firstlineno) + if ntraceback == traceback: + ntraceback = ntraceback.cut(path=path) + if ntraceback == traceback: + ntraceback = ntraceback.filter(filter_traceback) + if not ntraceback: + ntraceback = traceback + ntraceback = ntraceback.filter(excinfo) + + # issue364: mark all but first and last frames to + # only show a single-line message for each frame. + if self.config.getoption("tbstyle", "auto") == "auto": + if len(ntraceback) > 2: + ntraceback = Traceback( + ( + ntraceback[0], + *(t.with_repr_style("short") for t in ntraceback[1:-1]), + ntraceback[-1], + ) + ) + + return ntraceback + return excinfo.traceback + + # TODO: Type ignored -- breaks Liskov Substitution. + def repr_failure( # type: ignore[override] + self, + excinfo: ExceptionInfo[BaseException], + ) -> str | TerminalRepr: + style = self.config.getoption("tbstyle", "auto") + if style == "auto": + style = "long" + return self._repr_failure_py(excinfo, style=style) + + +class FunctionDefinition(Function): + """This class is a stop gap solution until we evolve to have actual function + definition nodes and manage to get rid of ``metafunc``.""" + + def runtest(self) -> None: + raise RuntimeError("function definitions are not supposed to be run as tests") + + setup = runtest diff --git a/venv/lib/python3.10/site-packages/_pytest/python_api.py b/venv/lib/python3.10/site-packages/_pytest/python_api.py new file mode 100644 index 0000000000000000000000000000000000000000..74346c349769fe0cbf725f881240b7170d2f9b9a --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/python_api.py @@ -0,0 +1,809 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +from collections.abc import Collection +from collections.abc import Mapping +from collections.abc import Sequence +from collections.abc import Sized +from decimal import Decimal +import math +from numbers import Complex +import pprint +import sys +from typing import Any +from typing import TYPE_CHECKING + + +if TYPE_CHECKING: + from numpy import ndarray + + +def _compare_approx( + full_object: object, + message_data: Sequence[tuple[str, str, str]], + number_of_elements: int, + different_ids: Sequence[object], + max_abs_diff: float, + max_rel_diff: float, +) -> list[str]: + message_list = list(message_data) + message_list.insert(0, ("Index", "Obtained", "Expected")) + max_sizes = [0, 0, 0] + for index, obtained, expected in message_list: + max_sizes[0] = max(max_sizes[0], len(index)) + max_sizes[1] = max(max_sizes[1], len(obtained)) + max_sizes[2] = max(max_sizes[2], len(expected)) + explanation = [ + f"comparison failed. Mismatched elements: {len(different_ids)} / {number_of_elements}:", + f"Max absolute difference: {max_abs_diff}", + f"Max relative difference: {max_rel_diff}", + ] + [ + f"{indexes:<{max_sizes[0]}} | {obtained:<{max_sizes[1]}} | {expected:<{max_sizes[2]}}" + for indexes, obtained, expected in message_list + ] + return explanation + + +# builtin pytest.approx helper + + +class ApproxBase: + """Provide shared utilities for making approximate comparisons between + numbers or sequences of numbers.""" + + # Tell numpy to use our `__eq__` operator instead of its. + __array_ufunc__ = None + __array_priority__ = 100 + + def __init__(self, expected, rel=None, abs=None, nan_ok: bool = False) -> None: + __tracebackhide__ = True + self.expected = expected + self.abs = abs + self.rel = rel + self.nan_ok = nan_ok + self._check_type() + + def __repr__(self) -> str: + raise NotImplementedError + + def _repr_compare(self, other_side: Any) -> list[str]: + return [ + "comparison failed", + f"Obtained: {other_side}", + f"Expected: {self}", + ] + + def __eq__(self, actual) -> bool: + return all( + a == self._approx_scalar(x) for a, x in self._yield_comparisons(actual) + ) + + def __bool__(self): + __tracebackhide__ = True + raise AssertionError( + "approx() is not supported in a boolean context.\nDid you mean: `assert a == approx(b)`?" + ) + + # Ignore type because of https://github.com/python/mypy/issues/4266. + __hash__ = None # type: ignore + + def __ne__(self, actual) -> bool: + return not (actual == self) + + def _approx_scalar(self, x) -> ApproxScalar: + if isinstance(x, Decimal): + return ApproxDecimal(x, rel=self.rel, abs=self.abs, nan_ok=self.nan_ok) + return ApproxScalar(x, rel=self.rel, abs=self.abs, nan_ok=self.nan_ok) + + def _yield_comparisons(self, actual): + """Yield all the pairs of numbers to be compared. + + This is used to implement the `__eq__` method. + """ + raise NotImplementedError + + def _check_type(self) -> None: + """Raise a TypeError if the expected value is not a valid type.""" + # This is only a concern if the expected value is a sequence. In every + # other case, the approx() function ensures that the expected value has + # a numeric type. For this reason, the default is to do nothing. The + # classes that deal with sequences should reimplement this method to + # raise if there are any non-numeric elements in the sequence. + + +def _recursive_sequence_map(f, x): + """Recursively map a function over a sequence of arbitrary depth""" + if isinstance(x, (list, tuple)): + seq_type = type(x) + return seq_type(_recursive_sequence_map(f, xi) for xi in x) + elif _is_sequence_like(x): + return [_recursive_sequence_map(f, xi) for xi in x] + else: + return f(x) + + +class ApproxNumpy(ApproxBase): + """Perform approximate comparisons where the expected value is numpy array.""" + + def __repr__(self) -> str: + list_scalars = _recursive_sequence_map( + self._approx_scalar, self.expected.tolist() + ) + return f"approx({list_scalars!r})" + + def _repr_compare(self, other_side: ndarray | list[Any]) -> list[str]: + import itertools + import math + + def get_value_from_nested_list( + nested_list: list[Any], nd_index: tuple[Any, ...] + ) -> Any: + """ + Helper function to get the value out of a nested list, given an n-dimensional index. + This mimics numpy's indexing, but for raw nested python lists. + """ + value: Any = nested_list + for i in nd_index: + value = value[i] + return value + + np_array_shape = self.expected.shape + approx_side_as_seq = _recursive_sequence_map( + self._approx_scalar, self.expected.tolist() + ) + + # convert other_side to numpy array to ensure shape attribute is available + other_side_as_array = _as_numpy_array(other_side) + assert other_side_as_array is not None + + if np_array_shape != other_side_as_array.shape: + return [ + "Impossible to compare arrays with different shapes.", + f"Shapes: {np_array_shape} and {other_side_as_array.shape}", + ] + + number_of_elements = self.expected.size + max_abs_diff = -math.inf + max_rel_diff = -math.inf + different_ids = [] + for index in itertools.product(*(range(i) for i in np_array_shape)): + approx_value = get_value_from_nested_list(approx_side_as_seq, index) + other_value = get_value_from_nested_list(other_side_as_array, index) + if approx_value != other_value: + abs_diff = abs(approx_value.expected - other_value) + max_abs_diff = max(max_abs_diff, abs_diff) + if other_value == 0.0: + max_rel_diff = math.inf + else: + max_rel_diff = max(max_rel_diff, abs_diff / abs(other_value)) + different_ids.append(index) + + message_data = [ + ( + str(index), + str(get_value_from_nested_list(other_side_as_array, index)), + str(get_value_from_nested_list(approx_side_as_seq, index)), + ) + for index in different_ids + ] + return _compare_approx( + self.expected, + message_data, + number_of_elements, + different_ids, + max_abs_diff, + max_rel_diff, + ) + + def __eq__(self, actual) -> bool: + import numpy as np + + # self.expected is supposed to always be an array here. + + if not np.isscalar(actual): + try: + actual = np.asarray(actual) + except Exception as e: + raise TypeError(f"cannot compare '{actual}' to numpy.ndarray") from e + + if not np.isscalar(actual) and actual.shape != self.expected.shape: + return False + + return super().__eq__(actual) + + def _yield_comparisons(self, actual): + import numpy as np + + # `actual` can either be a numpy array or a scalar, it is treated in + # `__eq__` before being passed to `ApproxBase.__eq__`, which is the + # only method that calls this one. + + if np.isscalar(actual): + for i in np.ndindex(self.expected.shape): + yield actual, self.expected[i].item() + else: + for i in np.ndindex(self.expected.shape): + yield actual[i].item(), self.expected[i].item() + + +class ApproxMapping(ApproxBase): + """Perform approximate comparisons where the expected value is a mapping + with numeric values (the keys can be anything).""" + + def __repr__(self) -> str: + return f"approx({ ({k: self._approx_scalar(v) for k, v in self.expected.items()})!r})" + + def _repr_compare(self, other_side: Mapping[object, float]) -> list[str]: + import math + + approx_side_as_map = { + k: self._approx_scalar(v) for k, v in self.expected.items() + } + + number_of_elements = len(approx_side_as_map) + max_abs_diff = -math.inf + max_rel_diff = -math.inf + different_ids = [] + for (approx_key, approx_value), other_value in zip( + approx_side_as_map.items(), other_side.values() + ): + if approx_value != other_value: + if approx_value.expected is not None and other_value is not None: + try: + max_abs_diff = max( + max_abs_diff, abs(approx_value.expected - other_value) + ) + if approx_value.expected == 0.0: + max_rel_diff = math.inf + else: + max_rel_diff = max( + max_rel_diff, + abs( + (approx_value.expected - other_value) + / approx_value.expected + ), + ) + except ZeroDivisionError: + pass + different_ids.append(approx_key) + + message_data = [ + (str(key), str(other_side[key]), str(approx_side_as_map[key])) + for key in different_ids + ] + + return _compare_approx( + self.expected, + message_data, + number_of_elements, + different_ids, + max_abs_diff, + max_rel_diff, + ) + + def __eq__(self, actual) -> bool: + try: + if set(actual.keys()) != set(self.expected.keys()): + return False + except AttributeError: + return False + + return super().__eq__(actual) + + def _yield_comparisons(self, actual): + for k in self.expected.keys(): + yield actual[k], self.expected[k] + + def _check_type(self) -> None: + __tracebackhide__ = True + for key, value in self.expected.items(): + if isinstance(value, type(self.expected)): + msg = "pytest.approx() does not support nested dictionaries: key={!r} value={!r}\n full mapping={}" + raise TypeError(msg.format(key, value, pprint.pformat(self.expected))) + + +class ApproxSequenceLike(ApproxBase): + """Perform approximate comparisons where the expected value is a sequence of numbers.""" + + def __repr__(self) -> str: + seq_type = type(self.expected) + if seq_type not in (tuple, list): + seq_type = list + return f"approx({seq_type(self._approx_scalar(x) for x in self.expected)!r})" + + def _repr_compare(self, other_side: Sequence[float]) -> list[str]: + import math + + if len(self.expected) != len(other_side): + return [ + "Impossible to compare lists with different sizes.", + f"Lengths: {len(self.expected)} and {len(other_side)}", + ] + + approx_side_as_map = _recursive_sequence_map(self._approx_scalar, self.expected) + + number_of_elements = len(approx_side_as_map) + max_abs_diff = -math.inf + max_rel_diff = -math.inf + different_ids = [] + for i, (approx_value, other_value) in enumerate( + zip(approx_side_as_map, other_side) + ): + if approx_value != other_value: + try: + abs_diff = abs(approx_value.expected - other_value) + max_abs_diff = max(max_abs_diff, abs_diff) + # Ignore non-numbers for the diff calculations (#13012). + except TypeError: + pass + else: + if other_value == 0.0: + max_rel_diff = math.inf + else: + max_rel_diff = max(max_rel_diff, abs_diff / abs(other_value)) + different_ids.append(i) + message_data = [ + (str(i), str(other_side[i]), str(approx_side_as_map[i])) + for i in different_ids + ] + + return _compare_approx( + self.expected, + message_data, + number_of_elements, + different_ids, + max_abs_diff, + max_rel_diff, + ) + + def __eq__(self, actual) -> bool: + try: + if len(actual) != len(self.expected): + return False + except TypeError: + return False + return super().__eq__(actual) + + def _yield_comparisons(self, actual): + return zip(actual, self.expected) + + def _check_type(self) -> None: + __tracebackhide__ = True + for index, x in enumerate(self.expected): + if isinstance(x, type(self.expected)): + msg = "pytest.approx() does not support nested data structures: {!r} at index {}\n full sequence: {}" + raise TypeError(msg.format(x, index, pprint.pformat(self.expected))) + + +class ApproxScalar(ApproxBase): + """Perform approximate comparisons where the expected value is a single number.""" + + # Using Real should be better than this Union, but not possible yet: + # https://github.com/python/typeshed/pull/3108 + DEFAULT_ABSOLUTE_TOLERANCE: float | Decimal = 1e-12 + DEFAULT_RELATIVE_TOLERANCE: float | Decimal = 1e-6 + + def __repr__(self) -> str: + """Return a string communicating both the expected value and the + tolerance for the comparison being made. + + For example, ``1.0 ± 1e-6``, ``(3+4j) ± 5e-6 ∠ ±180°``. + """ + # Don't show a tolerance for values that aren't compared using + # tolerances, i.e. non-numerics and infinities. Need to call abs to + # handle complex numbers, e.g. (inf + 1j). + if ( + isinstance(self.expected, bool) + or (not isinstance(self.expected, (Complex, Decimal))) + or math.isinf(abs(self.expected) or isinstance(self.expected, bool)) + ): + return str(self.expected) + + # If a sensible tolerance can't be calculated, self.tolerance will + # raise a ValueError. In this case, display '???'. + try: + if 1e-3 <= self.tolerance < 1e3: + vetted_tolerance = f"{self.tolerance:n}" + else: + vetted_tolerance = f"{self.tolerance:.1e}" + + if ( + isinstance(self.expected, Complex) + and self.expected.imag + and not math.isinf(self.tolerance) + ): + vetted_tolerance += " ∠ ±180°" + except ValueError: + vetted_tolerance = "???" + + return f"{self.expected} ± {vetted_tolerance}" + + def __eq__(self, actual) -> bool: + """Return whether the given value is equal to the expected value + within the pre-specified tolerance.""" + + def is_bool(val: Any) -> bool: + # Check if `val` is a native bool or numpy bool. + if isinstance(val, bool): + return True + if np := sys.modules.get("numpy"): + return isinstance(val, np.bool_) + return False + + asarray = _as_numpy_array(actual) + if asarray is not None: + # Call ``__eq__()`` manually to prevent infinite-recursion with + # numpy<1.13. See #3748. + return all(self.__eq__(a) for a in asarray.flat) + + # Short-circuit exact equality, except for bool and np.bool_ + if is_bool(self.expected) and not is_bool(actual): + return False + elif actual == self.expected: + return True + + # If either type is non-numeric, fall back to strict equality. + # NB: we need Complex, rather than just Number, to ensure that __abs__, + # __sub__, and __float__ are defined. Also, consider bool to be + # non-numeric, even though it has the required arithmetic. + if is_bool(self.expected) or not ( + isinstance(self.expected, (Complex, Decimal)) + and isinstance(actual, (Complex, Decimal)) + ): + return False + + # Allow the user to control whether NaNs are considered equal to each + # other or not. The abs() calls are for compatibility with complex + # numbers. + if math.isnan(abs(self.expected)): + return self.nan_ok and math.isnan(abs(actual)) + + # Infinity shouldn't be approximately equal to anything but itself, but + # if there's a relative tolerance, it will be infinite and infinity + # will seem approximately equal to everything. The equal-to-itself + # case would have been short circuited above, so here we can just + # return false if the expected value is infinite. The abs() call is + # for compatibility with complex numbers. + if math.isinf(abs(self.expected)): + return False + + # Return true if the two numbers are within the tolerance. + result: bool = abs(self.expected - actual) <= self.tolerance + return result + + # Ignore type because of https://github.com/python/mypy/issues/4266. + __hash__ = None # type: ignore + + @property + def tolerance(self): + """Return the tolerance for the comparison. + + This could be either an absolute tolerance or a relative tolerance, + depending on what the user specified or which would be larger. + """ + + def set_default(x, default): + return x if x is not None else default + + # Figure out what the absolute tolerance should be. ``self.abs`` is + # either None or a value specified by the user. + absolute_tolerance = set_default(self.abs, self.DEFAULT_ABSOLUTE_TOLERANCE) + + if absolute_tolerance < 0: + raise ValueError( + f"absolute tolerance can't be negative: {absolute_tolerance}" + ) + if math.isnan(absolute_tolerance): + raise ValueError("absolute tolerance can't be NaN.") + + # If the user specified an absolute tolerance but not a relative one, + # just return the absolute tolerance. + if self.rel is None: + if self.abs is not None: + return absolute_tolerance + + # Figure out what the relative tolerance should be. ``self.rel`` is + # either None or a value specified by the user. This is done after + # we've made sure the user didn't ask for an absolute tolerance only, + # because we don't want to raise errors about the relative tolerance if + # we aren't even going to use it. + relative_tolerance = set_default( + self.rel, self.DEFAULT_RELATIVE_TOLERANCE + ) * abs(self.expected) + + if relative_tolerance < 0: + raise ValueError( + f"relative tolerance can't be negative: {relative_tolerance}" + ) + if math.isnan(relative_tolerance): + raise ValueError("relative tolerance can't be NaN.") + + # Return the larger of the relative and absolute tolerances. + return max(relative_tolerance, absolute_tolerance) + + +class ApproxDecimal(ApproxScalar): + """Perform approximate comparisons where the expected value is a Decimal.""" + + DEFAULT_ABSOLUTE_TOLERANCE = Decimal("1e-12") + DEFAULT_RELATIVE_TOLERANCE = Decimal("1e-6") + + def __repr__(self) -> str: + if isinstance(self.rel, float): + rel = Decimal.from_float(self.rel) + else: + rel = self.rel + + if isinstance(self.abs, float): + abs_ = Decimal.from_float(self.abs) + else: + abs_ = self.abs + + tol_str = "???" + if rel is not None and Decimal("1e-3") <= rel <= Decimal("1e3"): + tol_str = f"{rel:.1e}" + elif abs_ is not None: + tol_str = f"{abs_:.1e}" + + return f"{self.expected} ± {tol_str}" + + +def approx(expected, rel=None, abs=None, nan_ok: bool = False) -> ApproxBase: + """Assert that two numbers (or two ordered sequences of numbers) are equal to each other + within some tolerance. + + Due to the :doc:`python:tutorial/floatingpoint`, numbers that we + would intuitively expect to be equal are not always so:: + + >>> 0.1 + 0.2 == 0.3 + False + + This problem is commonly encountered when writing tests, e.g. when making + sure that floating-point values are what you expect them to be. One way to + deal with this problem is to assert that two floating-point numbers are + equal to within some appropriate tolerance:: + + >>> abs((0.1 + 0.2) - 0.3) < 1e-6 + True + + However, comparisons like this are tedious to write and difficult to + understand. Furthermore, absolute comparisons like the one above are + usually discouraged because there's no tolerance that works well for all + situations. ``1e-6`` is good for numbers around ``1``, but too small for + very big numbers and too big for very small ones. It's better to express + the tolerance as a fraction of the expected value, but relative comparisons + like that are even more difficult to write correctly and concisely. + + The ``approx`` class performs floating-point comparisons using a syntax + that's as intuitive as possible:: + + >>> from pytest import approx + >>> 0.1 + 0.2 == approx(0.3) + True + + The same syntax also works for ordered sequences of numbers:: + + >>> (0.1 + 0.2, 0.2 + 0.4) == approx((0.3, 0.6)) + True + + ``numpy`` arrays:: + + >>> import numpy as np # doctest: +SKIP + >>> np.array([0.1, 0.2]) + np.array([0.2, 0.4]) == approx(np.array([0.3, 0.6])) # doctest: +SKIP + True + + And for a ``numpy`` array against a scalar:: + + >>> import numpy as np # doctest: +SKIP + >>> np.array([0.1, 0.2]) + np.array([0.2, 0.1]) == approx(0.3) # doctest: +SKIP + True + + Only ordered sequences are supported, because ``approx`` needs + to infer the relative position of the sequences without ambiguity. This means + ``sets`` and other unordered sequences are not supported. + + Finally, dictionary *values* can also be compared:: + + >>> {'a': 0.1 + 0.2, 'b': 0.2 + 0.4} == approx({'a': 0.3, 'b': 0.6}) + True + + The comparison will be true if both mappings have the same keys and their + respective values match the expected tolerances. + + **Tolerances** + + By default, ``approx`` considers numbers within a relative tolerance of + ``1e-6`` (i.e. one part in a million) of its expected value to be equal. + This treatment would lead to surprising results if the expected value was + ``0.0``, because nothing but ``0.0`` itself is relatively close to ``0.0``. + To handle this case less surprisingly, ``approx`` also considers numbers + within an absolute tolerance of ``1e-12`` of its expected value to be + equal. Infinity and NaN are special cases. Infinity is only considered + equal to itself, regardless of the relative tolerance. NaN is not + considered equal to anything by default, but you can make it be equal to + itself by setting the ``nan_ok`` argument to True. (This is meant to + facilitate comparing arrays that use NaN to mean "no data".) + + Both the relative and absolute tolerances can be changed by passing + arguments to the ``approx`` constructor:: + + >>> 1.0001 == approx(1) + False + >>> 1.0001 == approx(1, rel=1e-3) + True + >>> 1.0001 == approx(1, abs=1e-3) + True + + If you specify ``abs`` but not ``rel``, the comparison will not consider + the relative tolerance at all. In other words, two numbers that are within + the default relative tolerance of ``1e-6`` will still be considered unequal + if they exceed the specified absolute tolerance. If you specify both + ``abs`` and ``rel``, the numbers will be considered equal if either + tolerance is met:: + + >>> 1 + 1e-8 == approx(1) + True + >>> 1 + 1e-8 == approx(1, abs=1e-12) + False + >>> 1 + 1e-8 == approx(1, rel=1e-6, abs=1e-12) + True + + **Non-numeric types** + + You can also use ``approx`` to compare non-numeric types, or dicts and + sequences containing non-numeric types, in which case it falls back to + strict equality. This can be useful for comparing dicts and sequences that + can contain optional values:: + + >>> {"required": 1.0000005, "optional": None} == approx({"required": 1, "optional": None}) + True + >>> [None, 1.0000005] == approx([None,1]) + True + >>> ["foo", 1.0000005] == approx([None,1]) + False + + If you're thinking about using ``approx``, then you might want to know how + it compares to other good ways of comparing floating-point numbers. All of + these algorithms are based on relative and absolute tolerances and should + agree for the most part, but they do have meaningful differences: + + - ``math.isclose(a, b, rel_tol=1e-9, abs_tol=0.0)``: True if the relative + tolerance is met w.r.t. either ``a`` or ``b`` or if the absolute + tolerance is met. Because the relative tolerance is calculated w.r.t. + both ``a`` and ``b``, this test is symmetric (i.e. neither ``a`` nor + ``b`` is a "reference value"). You have to specify an absolute tolerance + if you want to compare to ``0.0`` because there is no tolerance by + default. More information: :py:func:`math.isclose`. + + - ``numpy.isclose(a, b, rtol=1e-5, atol=1e-8)``: True if the difference + between ``a`` and ``b`` is less that the sum of the relative tolerance + w.r.t. ``b`` and the absolute tolerance. Because the relative tolerance + is only calculated w.r.t. ``b``, this test is asymmetric and you can + think of ``b`` as the reference value. Support for comparing sequences + is provided by :py:func:`numpy.allclose`. More information: + :std:doc:`numpy:reference/generated/numpy.isclose`. + + - ``unittest.TestCase.assertAlmostEqual(a, b)``: True if ``a`` and ``b`` + are within an absolute tolerance of ``1e-7``. No relative tolerance is + considered , so this function is not appropriate for very large or very + small numbers. Also, it's only available in subclasses of ``unittest.TestCase`` + and it's ugly because it doesn't follow PEP8. More information: + :py:meth:`unittest.TestCase.assertAlmostEqual`. + + - ``a == pytest.approx(b, rel=1e-6, abs=1e-12)``: True if the relative + tolerance is met w.r.t. ``b`` or if the absolute tolerance is met. + Because the relative tolerance is only calculated w.r.t. ``b``, this test + is asymmetric and you can think of ``b`` as the reference value. In the + special case that you explicitly specify an absolute tolerance but not a + relative tolerance, only the absolute tolerance is considered. + + .. note:: + + ``approx`` can handle numpy arrays, but we recommend the + specialised test helpers in :std:doc:`numpy:reference/routines.testing` + if you need support for comparisons, NaNs, or ULP-based tolerances. + + To match strings using regex, you can use + `Matches `_ + from the + `re_assert package `_. + + + .. note:: + + Unlike built-in equality, this function considers + booleans unequal to numeric zero or one. For example:: + + >>> 1 == approx(True) + False + + .. warning:: + + .. versionchanged:: 3.2 + + In order to avoid inconsistent behavior, :py:exc:`TypeError` is + raised for ``>``, ``>=``, ``<`` and ``<=`` comparisons. + The example below illustrates the problem:: + + assert approx(0.1) > 0.1 + 1e-10 # calls approx(0.1).__gt__(0.1 + 1e-10) + assert 0.1 + 1e-10 > approx(0.1) # calls approx(0.1).__lt__(0.1 + 1e-10) + + In the second example one expects ``approx(0.1).__le__(0.1 + 1e-10)`` + to be called. But instead, ``approx(0.1).__lt__(0.1 + 1e-10)`` is used to + comparison. This is because the call hierarchy of rich comparisons + follows a fixed behavior. More information: :py:meth:`object.__ge__` + + .. versionchanged:: 3.7.1 + ``approx`` raises ``TypeError`` when it encounters a dict value or + sequence element of non-numeric type. + + .. versionchanged:: 6.1.0 + ``approx`` falls back to strict equality for non-numeric types instead + of raising ``TypeError``. + """ + # Delegate the comparison to a class that knows how to deal with the type + # of the expected value (e.g. int, float, list, dict, numpy.array, etc). + # + # The primary responsibility of these classes is to implement ``__eq__()`` + # and ``__repr__()``. The former is used to actually check if some + # "actual" value is equivalent to the given expected value within the + # allowed tolerance. The latter is used to show the user the expected + # value and tolerance, in the case that a test failed. + # + # The actual logic for making approximate comparisons can be found in + # ApproxScalar, which is used to compare individual numbers. All of the + # other Approx classes eventually delegate to this class. The ApproxBase + # class provides some convenient methods and overloads, but isn't really + # essential. + + __tracebackhide__ = True + + if isinstance(expected, Decimal): + cls: type[ApproxBase] = ApproxDecimal + elif isinstance(expected, Mapping): + cls = ApproxMapping + elif _is_numpy_array(expected): + expected = _as_numpy_array(expected) + cls = ApproxNumpy + elif _is_sequence_like(expected): + cls = ApproxSequenceLike + elif isinstance(expected, Collection) and not isinstance(expected, (str, bytes)): + msg = f"pytest.approx() only supports ordered sequences, but got: {expected!r}" + raise TypeError(msg) + else: + cls = ApproxScalar + + return cls(expected, rel, abs, nan_ok) + + +def _is_sequence_like(expected: object) -> bool: + return ( + hasattr(expected, "__getitem__") + and isinstance(expected, Sized) + and not isinstance(expected, (str, bytes)) + ) + + +def _is_numpy_array(obj: object) -> bool: + """ + Return true if the given object is implicitly convertible to ndarray, + and numpy is already imported. + """ + return _as_numpy_array(obj) is not None + + +def _as_numpy_array(obj: object) -> ndarray | None: + """ + Return an ndarray if the given object is implicitly convertible to ndarray, + and numpy is already imported, otherwise None. + """ + np: Any = sys.modules.get("numpy") + if np is not None: + # avoid infinite recursion on numpy scalars, which have __array__ + if np.isscalar(obj): + return None + elif isinstance(obj, np.ndarray): + return obj + elif hasattr(obj, "__array__") or hasattr("obj", "__array_interface__"): + return np.asarray(obj) + return None diff --git a/venv/lib/python3.10/site-packages/_pytest/raises.py b/venv/lib/python3.10/site-packages/_pytest/raises.py new file mode 100644 index 0000000000000000000000000000000000000000..78fae6ddcde74e3187a95a958eb397857f21e64a --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/raises.py @@ -0,0 +1,1519 @@ +from __future__ import annotations + +from abc import ABC +from abc import abstractmethod +import re +from re import Pattern +import sys +from textwrap import indent +from typing import Any +from typing import cast +from typing import final +from typing import Generic +from typing import get_args +from typing import get_origin +from typing import Literal +from typing import overload +from typing import TYPE_CHECKING +import warnings + +from _pytest._code import ExceptionInfo +from _pytest._code.code import stringify_exception +from _pytest.outcomes import fail +from _pytest.warning_types import PytestWarning + + +if TYPE_CHECKING: + from collections.abc import Callable + from collections.abc import Sequence + + # for some reason Sphinx does not play well with 'from types import TracebackType' + import types + + from typing_extensions import ParamSpec + from typing_extensions import TypeGuard + from typing_extensions import TypeVar + + P = ParamSpec("P") + + # this conditional definition is because we want to allow a TypeVar default + BaseExcT_co_default = TypeVar( + "BaseExcT_co_default", + bound=BaseException, + default=BaseException, + covariant=True, + ) + + # Use short name because it shows up in docs. + E = TypeVar("E", bound=BaseException, default=BaseException) +else: + from typing import TypeVar + + BaseExcT_co_default = TypeVar( + "BaseExcT_co_default", bound=BaseException, covariant=True + ) + +# RaisesGroup doesn't work with a default. +BaseExcT_co = TypeVar("BaseExcT_co", bound=BaseException, covariant=True) +BaseExcT_1 = TypeVar("BaseExcT_1", bound=BaseException) +BaseExcT_2 = TypeVar("BaseExcT_2", bound=BaseException) +ExcT_1 = TypeVar("ExcT_1", bound=Exception) +ExcT_2 = TypeVar("ExcT_2", bound=Exception) + +if sys.version_info < (3, 11): + from exceptiongroup import BaseExceptionGroup + from exceptiongroup import ExceptionGroup + + +# String patterns default to including the unicode flag. +_REGEX_NO_FLAGS = re.compile(r"").flags + + +# pytest.raises helper +@overload +def raises( + expected_exception: type[E] | tuple[type[E], ...], + *, + match: str | re.Pattern[str] | None = ..., + check: Callable[[E], bool] = ..., +) -> RaisesExc[E]: ... + + +@overload +def raises( + *, + match: str | re.Pattern[str], + # If exception_type is not provided, check() must do any typechecks itself. + check: Callable[[BaseException], bool] = ..., +) -> RaisesExc[BaseException]: ... + + +@overload +def raises(*, check: Callable[[BaseException], bool]) -> RaisesExc[BaseException]: ... + + +@overload +def raises( + expected_exception: type[E] | tuple[type[E], ...], + func: Callable[..., Any], + *args: Any, + **kwargs: Any, +) -> ExceptionInfo[E]: ... + + +def raises( + expected_exception: type[E] | tuple[type[E], ...] | None = None, + *args: Any, + **kwargs: Any, +) -> RaisesExc[BaseException] | ExceptionInfo[E]: + r"""Assert that a code block/function call raises an exception type, or one of its subclasses. + + :param expected_exception: + The expected exception type, or a tuple if one of multiple possible + exception types are expected. Note that subclasses of the passed exceptions + will also match. + + This is not a required parameter, you may opt to only use ``match`` and/or + ``check`` for verifying the raised exception. + + :kwparam str | re.Pattern[str] | None match: + If specified, a string containing a regular expression, + or a regular expression object, that is tested against the string + representation of the exception and its :pep:`678` `__notes__` + using :func:`re.search`. + + To match a literal string that may contain :ref:`special characters + `, the pattern can first be escaped with :func:`re.escape`. + + (This is only used when ``pytest.raises`` is used as a context manager, + and passed through to the function otherwise. + When using ``pytest.raises`` as a function, you can use: + ``pytest.raises(Exc, func, match="passed on").match("my pattern")``.) + + :kwparam Callable[[BaseException], bool] check: + + .. versionadded:: 8.4 + + If specified, a callable that will be called with the exception as a parameter + after checking the type and the match regex if specified. + If it returns ``True`` it will be considered a match, if not it will + be considered a failed match. + + + Use ``pytest.raises`` as a context manager, which will capture the exception of the given + type, or any of its subclasses:: + + >>> import pytest + >>> with pytest.raises(ZeroDivisionError): + ... 1/0 + + If the code block does not raise the expected exception (:class:`ZeroDivisionError` in the example + above), or no exception at all, the check will fail instead. + + You can also use the keyword argument ``match`` to assert that the + exception matches a text or regex:: + + >>> with pytest.raises(ValueError, match='must be 0 or None'): + ... raise ValueError("value must be 0 or None") + + >>> with pytest.raises(ValueError, match=r'must be \d+$'): + ... raise ValueError("value must be 42") + + The ``match`` argument searches the formatted exception string, which includes any + `PEP-678 `__ ``__notes__``: + + >>> with pytest.raises(ValueError, match=r"had a note added"): # doctest: +SKIP + ... e = ValueError("value must be 42") + ... e.add_note("had a note added") + ... raise e + + The ``check`` argument, if provided, must return True when passed the raised exception + for the match to be successful, otherwise an :exc:`AssertionError` is raised. + + >>> import errno + >>> with pytest.raises(OSError, check=lambda e: e.errno == errno.EACCES): + ... raise OSError(errno.EACCES, "no permission to view") + + The context manager produces an :class:`ExceptionInfo` object which can be used to inspect the + details of the captured exception:: + + >>> with pytest.raises(ValueError) as exc_info: + ... raise ValueError("value must be 42") + >>> assert exc_info.type is ValueError + >>> assert exc_info.value.args[0] == "value must be 42" + + .. warning:: + + Given that ``pytest.raises`` matches subclasses, be wary of using it to match :class:`Exception` like this:: + + # Careful, this will catch ANY exception raised. + with pytest.raises(Exception): + some_function() + + Because :class:`Exception` is the base class of almost all exceptions, it is easy for this to hide + real bugs, where the user wrote this expecting a specific exception, but some other exception is being + raised due to a bug introduced during a refactoring. + + Avoid using ``pytest.raises`` to catch :class:`Exception` unless certain that you really want to catch + **any** exception raised. + + .. note:: + + When using ``pytest.raises`` as a context manager, it's worthwhile to + note that normal context manager rules apply and that the exception + raised *must* be the final line in the scope of the context manager. + Lines of code after that, within the scope of the context manager will + not be executed. For example:: + + >>> value = 15 + >>> with pytest.raises(ValueError) as exc_info: + ... if value > 10: + ... raise ValueError("value must be <= 10") + ... assert exc_info.type is ValueError # This will not execute. + + Instead, the following approach must be taken (note the difference in + scope):: + + >>> with pytest.raises(ValueError) as exc_info: + ... if value > 10: + ... raise ValueError("value must be <= 10") + ... + >>> assert exc_info.type is ValueError + + **Expecting exception groups** + + When expecting exceptions wrapped in :exc:`BaseExceptionGroup` or + :exc:`ExceptionGroup`, you should instead use :class:`pytest.RaisesGroup`. + + **Using with** ``pytest.mark.parametrize`` + + When using :ref:`pytest.mark.parametrize ref` + it is possible to parametrize tests such that + some runs raise an exception and others do not. + + See :ref:`parametrizing_conditional_raising` for an example. + + .. seealso:: + + :ref:`assertraises` for more examples and detailed discussion. + + **Legacy form** + + It is possible to specify a callable by passing a to-be-called lambda:: + + >>> raises(ZeroDivisionError, lambda: 1/0) + + + or you can specify an arbitrary callable with arguments:: + + >>> def f(x): return 1/x + ... + >>> raises(ZeroDivisionError, f, 0) + + >>> raises(ZeroDivisionError, f, x=0) + + + The form above is fully supported but discouraged for new code because the + context manager form is regarded as more readable and less error-prone. + + .. note:: + Similar to caught exception objects in Python, explicitly clearing + local references to returned ``ExceptionInfo`` objects can + help the Python interpreter speed up its garbage collection. + + Clearing those references breaks a reference cycle + (``ExceptionInfo`` --> caught exception --> frame stack raising + the exception --> current frame stack --> local variables --> + ``ExceptionInfo``) which makes Python keep all objects referenced + from that cycle (including all local variables in the current + frame) alive until the next cyclic garbage collection run. + More detailed information can be found in the official Python + documentation for :ref:`the try statement `. + """ + __tracebackhide__ = True + + if not args: + if set(kwargs) - {"match", "check", "expected_exception"}: + msg = "Unexpected keyword arguments passed to pytest.raises: " + msg += ", ".join(sorted(kwargs)) + msg += "\nUse context-manager form instead?" + raise TypeError(msg) + + if expected_exception is None: + return RaisesExc(**kwargs) + return RaisesExc(expected_exception, **kwargs) + + if not expected_exception: + raise ValueError( + f"Expected an exception type or a tuple of exception types, but got `{expected_exception!r}`. " + f"Raising exceptions is already understood as failing the test, so you don't need " + f"any special code to say 'this should never raise an exception'." + ) + func = args[0] + if not callable(func): + raise TypeError(f"{func!r} object (type: {type(func)}) must be callable") + with RaisesExc(expected_exception) as excinfo: + func(*args[1:], **kwargs) + try: + return excinfo + finally: + del excinfo + + +# note: RaisesExc/RaisesGroup uses fail() internally, so this alias +# indicates (to [internal] plugins?) that `pytest.raises` will +# raise `_pytest.outcomes.Failed`, where +# `outcomes.Failed is outcomes.fail.Exception is raises.Exception` +# note: this is *not* the same as `_pytest.main.Failed` +# note: mypy does not recognize this attribute, and it's not possible +# to use a protocol/decorator like the others in outcomes due to +# https://github.com/python/mypy/issues/18715 +raises.Exception = fail.Exception # type: ignore[attr-defined] + + +def _match_pattern(match: Pattern[str]) -> str | Pattern[str]: + """Helper function to remove redundant `re.compile` calls when printing regex""" + return match.pattern if match.flags == _REGEX_NO_FLAGS else match + + +def repr_callable(fun: Callable[[BaseExcT_1], bool]) -> str: + """Get the repr of a ``check`` parameter. + + Split out so it can be monkeypatched (e.g. by hypothesis) + """ + return repr(fun) + + +def backquote(s: str) -> str: + return "`" + s + "`" + + +def _exception_type_name( + e: type[BaseException] | tuple[type[BaseException], ...], +) -> str: + if isinstance(e, type): + return e.__name__ + if len(e) == 1: + return e[0].__name__ + return "(" + ", ".join(ee.__name__ for ee in e) + ")" + + +def _check_raw_type( + expected_type: type[BaseException] | tuple[type[BaseException], ...] | None, + exception: BaseException, +) -> str | None: + if expected_type is None or expected_type == (): + return None + + if not isinstance( + exception, + expected_type, + ): + actual_type_str = backquote(_exception_type_name(type(exception)) + "()") + expected_type_str = backquote(_exception_type_name(expected_type)) + if ( + isinstance(exception, BaseExceptionGroup) + and isinstance(expected_type, type) + and not issubclass(expected_type, BaseExceptionGroup) + ): + return f"Unexpected nested {actual_type_str}, expected {expected_type_str}" + return f"{actual_type_str} is not an instance of {expected_type_str}" + return None + + +def is_fully_escaped(s: str) -> bool: + # we know we won't compile with re.VERBOSE, so whitespace doesn't need to be escaped + metacharacters = "{}()+.*?^$[]" + return not any( + c in metacharacters and (i == 0 or s[i - 1] != "\\") for (i, c) in enumerate(s) + ) + + +def unescape(s: str) -> str: + return re.sub(r"\\([{}()+-.*?^$\[\]\s\\])", r"\1", s) + + +# These classes conceptually differ from ExceptionInfo in that ExceptionInfo is tied, and +# constructed from, a particular exception - whereas these are constructed with expected +# exceptions, and later allow matching towards particular exceptions. +# But there's overlap in `ExceptionInfo.match` and `AbstractRaises._check_match`, as with +# `AbstractRaises.matches` and `ExceptionInfo.errisinstance`+`ExceptionInfo.group_contains`. +# The interaction between these classes should perhaps be improved. +class AbstractRaises(ABC, Generic[BaseExcT_co]): + """ABC with common functionality shared between RaisesExc and RaisesGroup""" + + def __init__( + self, + *, + match: str | Pattern[str] | None, + check: Callable[[BaseExcT_co], bool] | None, + ) -> None: + if isinstance(match, str): + # juggle error in order to avoid context to fail (necessary?) + re_error = None + try: + self.match: Pattern[str] | None = re.compile(match) + except re.error as e: + re_error = e + if re_error is not None: + fail(f"Invalid regex pattern provided to 'match': {re_error}") + if match == "": + warnings.warn( + PytestWarning( + "matching against an empty string will *always* pass. If you want " + "to check for an empty message you need to pass '^$'. If you don't " + "want to match you should pass `None` or leave out the parameter." + ), + stacklevel=2, + ) + else: + self.match = match + + # check if this is a fully escaped regex and has ^$ to match fully + # in which case we can do a proper diff on error + self.rawmatch: str | None = None + if isinstance(match, str) or ( + isinstance(match, Pattern) and match.flags == _REGEX_NO_FLAGS + ): + if isinstance(match, Pattern): + match = match.pattern + if ( + match + and match[0] == "^" + and match[-1] == "$" + and is_fully_escaped(match[1:-1]) + ): + self.rawmatch = unescape(match[1:-1]) + + self.check = check + self._fail_reason: str | None = None + + # used to suppress repeated printing of `repr(self.check)` + self._nested: bool = False + + # set in self._parse_exc + self.is_baseexception = False + + def _parse_exc( + self, exc: type[BaseExcT_1] | types.GenericAlias, expected: str + ) -> type[BaseExcT_1]: + if isinstance(exc, type) and issubclass(exc, BaseException): + if not issubclass(exc, Exception): + self.is_baseexception = True + return exc + # because RaisesGroup does not support variable number of exceptions there's + # still a use for RaisesExc(ExceptionGroup[Exception]). + origin_exc: type[BaseException] | None = get_origin(exc) + if origin_exc and issubclass(origin_exc, BaseExceptionGroup): + exc_type = get_args(exc)[0] + if ( + issubclass(origin_exc, ExceptionGroup) and exc_type in (Exception, Any) + ) or ( + issubclass(origin_exc, BaseExceptionGroup) + and exc_type in (BaseException, Any) + ): + if not isinstance(exc, Exception): + self.is_baseexception = True + return cast(type[BaseExcT_1], origin_exc) + else: + raise ValueError( + f"Only `ExceptionGroup[Exception]` or `BaseExceptionGroup[BaseException]` " + f"are accepted as generic types but got `{exc}`. " + f"As `raises` will catch all instances of the specified group regardless of the " + f"generic argument specific nested exceptions has to be checked " + f"with `RaisesGroup`." + ) + # unclear if the Type/ValueError distinction is even helpful here + msg = f"expected exception must be {expected}, not " + if isinstance(exc, type): + raise ValueError(msg + f"{exc.__name__!r}") + if isinstance(exc, BaseException): + raise TypeError(msg + f"an exception instance ({type(exc).__name__})") + raise TypeError(msg + repr(type(exc).__name__)) + + @property + def fail_reason(self) -> str | None: + """Set after a call to :meth:`matches` to give a human-readable reason for why the match failed. + When used as a context manager the string will be printed as the reason for the + test failing.""" + return self._fail_reason + + def _check_check( + self: AbstractRaises[BaseExcT_1], + exception: BaseExcT_1, + ) -> bool: + if self.check is None: + return True + + if self.check(exception): + return True + + check_repr = "" if self._nested else " " + repr_callable(self.check) + self._fail_reason = f"check{check_repr} did not return True" + return False + + # TODO: harmonize with ExceptionInfo.match + def _check_match(self, e: BaseException) -> bool: + if self.match is None or re.search( + self.match, + stringified_exception := stringify_exception( + e, include_subexception_msg=False + ), + ): + return True + + # if we're matching a group, make sure we're explicit to reduce confusion + # if they're trying to match an exception contained within the group + maybe_specify_type = ( + f" the `{_exception_type_name(type(e))}()`" + if isinstance(e, BaseExceptionGroup) + else "" + ) + if isinstance(self.rawmatch, str): + # TODO: it instructs to use `-v` to print leading text, but that doesn't work + # I also don't know if this is the proper entry point, or tool to use at all + from _pytest.assertion.util import _diff_text + from _pytest.assertion.util import dummy_highlighter + + diff = _diff_text(self.rawmatch, stringified_exception, dummy_highlighter) + self._fail_reason = ("\n" if diff[0][0] == "-" else "") + "\n".join(diff) + return False + + # I don't love "Regex"+"Input" vs something like "expected regex"+"exception message" + # when they're similar it's not always obvious which is which + self._fail_reason = ( + f"Regex pattern did not match{maybe_specify_type}.\n" + f" Regex: {_match_pattern(self.match)!r}\n" + f" Input: {stringified_exception!r}" + ) + if _match_pattern(self.match) == stringified_exception: + self._fail_reason += "\n Did you mean to `re.escape()` the regex?" + return False + + @abstractmethod + def matches( + self: AbstractRaises[BaseExcT_1], exception: BaseException + ) -> TypeGuard[BaseExcT_1]: + """Check if an exception matches the requirements of this AbstractRaises. + If it fails, :meth:`AbstractRaises.fail_reason` should be set. + """ + + +@final +class RaisesExc(AbstractRaises[BaseExcT_co_default]): + """ + .. versionadded:: 8.4 + + + This is the class constructed when calling :func:`pytest.raises`, but may be used + directly as a helper class with :class:`RaisesGroup` when you want to specify + requirements on sub-exceptions. + + You don't need this if you only want to specify the type, since :class:`RaisesGroup` + accepts ``type[BaseException]``. + + :param type[BaseException] | tuple[type[BaseException]] | None expected_exception: + The expected type, or one of several possible types. + May be ``None`` in order to only make use of ``match`` and/or ``check`` + + The type is checked with :func:`isinstance`, and does not need to be an exact match. + If that is wanted you can use the ``check`` parameter. + + :kwparam str | Pattern[str] match: + A regex to match. + + :kwparam Callable[[BaseException], bool] check: + If specified, a callable that will be called with the exception as a parameter + after checking the type and the match regex if specified. + If it returns ``True`` it will be considered a match, if not it will + be considered a failed match. + + :meth:`RaisesExc.matches` can also be used standalone to check individual exceptions. + + Examples:: + + with RaisesGroup(RaisesExc(ValueError, match="string")) + ... + with RaisesGroup(RaisesExc(check=lambda x: x.args == (3, "hello"))): + ... + with RaisesGroup(RaisesExc(check=lambda x: type(x) is ValueError)): + ... + """ + + # Trio bundled hypothesis monkeypatching, we will probably instead assume that + # hypothesis will handle that in their pytest plugin by the time this is released. + # Alternatively we could add a version of get_pretty_function_description ourselves + # https://github.com/HypothesisWorks/hypothesis/blob/8ced2f59f5c7bea3344e35d2d53e1f8f8eb9fcd8/hypothesis-python/src/hypothesis/internal/reflection.py#L439 + + # At least one of the three parameters must be passed. + @overload + def __init__( + self, + expected_exception: ( + type[BaseExcT_co_default] | tuple[type[BaseExcT_co_default], ...] + ), + /, + *, + match: str | Pattern[str] | None = ..., + check: Callable[[BaseExcT_co_default], bool] | None = ..., + ) -> None: ... + + @overload + def __init__( + self: RaisesExc[BaseException], # Give E a value. + /, + *, + match: str | Pattern[str] | None, + # If exception_type is not provided, check() must do any typechecks itself. + check: Callable[[BaseException], bool] | None = ..., + ) -> None: ... + + @overload + def __init__(self, /, *, check: Callable[[BaseException], bool]) -> None: ... + + def __init__( + self, + expected_exception: ( + type[BaseExcT_co_default] | tuple[type[BaseExcT_co_default], ...] | None + ) = None, + /, + *, + match: str | Pattern[str] | None = None, + check: Callable[[BaseExcT_co_default], bool] | None = None, + ): + super().__init__(match=match, check=check) + if isinstance(expected_exception, tuple): + expected_exceptions = expected_exception + elif expected_exception is None: + expected_exceptions = () + else: + expected_exceptions = (expected_exception,) + + if (expected_exceptions == ()) and match is None and check is None: + raise ValueError("You must specify at least one parameter to match on.") + + self.expected_exceptions = tuple( + self._parse_exc(e, expected="a BaseException type") + for e in expected_exceptions + ) + + self._just_propagate = False + + def matches( + self, + exception: BaseException | None, + ) -> TypeGuard[BaseExcT_co_default]: + """Check if an exception matches the requirements of this :class:`RaisesExc`. + If it fails, :attr:`RaisesExc.fail_reason` will be set. + + Examples:: + + assert RaisesExc(ValueError).matches(my_exception): + # is equivalent to + assert isinstance(my_exception, ValueError) + + # this can be useful when checking e.g. the ``__cause__`` of an exception. + with pytest.raises(ValueError) as excinfo: + ... + assert RaisesExc(SyntaxError, match="foo").matches(excinfo.value.__cause__) + # above line is equivalent to + assert isinstance(excinfo.value.__cause__, SyntaxError) + assert re.search("foo", str(excinfo.value.__cause__) + + """ + self._just_propagate = False + if exception is None: + self._fail_reason = "exception is None" + return False + if not self._check_type(exception): + self._just_propagate = True + return False + + if not self._check_match(exception): + return False + + return self._check_check(exception) + + def __repr__(self) -> str: + parameters = [] + if self.expected_exceptions: + parameters.append(_exception_type_name(self.expected_exceptions)) + if self.match is not None: + # If no flags were specified, discard the redundant re.compile() here. + parameters.append( + f"match={_match_pattern(self.match)!r}", + ) + if self.check is not None: + parameters.append(f"check={repr_callable(self.check)}") + return f"RaisesExc({', '.join(parameters)})" + + def _check_type(self, exception: BaseException) -> TypeGuard[BaseExcT_co_default]: + self._fail_reason = _check_raw_type(self.expected_exceptions, exception) + return self._fail_reason is None + + def __enter__(self) -> ExceptionInfo[BaseExcT_co_default]: + self.excinfo: ExceptionInfo[BaseExcT_co_default] = ExceptionInfo.for_later() + return self.excinfo + + # TODO: move common code into superclass + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> bool: + __tracebackhide__ = True + if exc_type is None: + if not self.expected_exceptions: + fail("DID NOT RAISE any exception") + if len(self.expected_exceptions) > 1: + fail(f"DID NOT RAISE any of {self.expected_exceptions!r}") + + fail(f"DID NOT RAISE {self.expected_exceptions[0]!r}") + + assert self.excinfo is not None, ( + "Internal error - should have been constructed in __enter__" + ) + + if not self.matches(exc_val): + if self._just_propagate: + return False + raise AssertionError(self._fail_reason) + + # Cast to narrow the exception type now that it's verified.... + # even though the TypeGuard in self.matches should be narrowing + exc_info = cast( + "tuple[type[BaseExcT_co_default], BaseExcT_co_default, types.TracebackType]", + (exc_type, exc_val, exc_tb), + ) + self.excinfo.fill_unfilled(exc_info) + return True + + +@final +class RaisesGroup(AbstractRaises[BaseExceptionGroup[BaseExcT_co]]): + """ + .. versionadded:: 8.4 + + Contextmanager for checking for an expected :exc:`ExceptionGroup`. + This works similar to :func:`pytest.raises`, but allows for specifying the structure of an :exc:`ExceptionGroup`. + :meth:`ExceptionInfo.group_contains` also tries to handle exception groups, + but it is very bad at checking that you *didn't* get unexpected exceptions. + + The catching behaviour differs from :ref:`except* `, being much + stricter about the structure by default. + By using ``allow_unwrapped=True`` and ``flatten_subgroups=True`` you can match + :ref:`except* ` fully when expecting a single exception. + + :param args: + Any number of exception types, :class:`RaisesGroup` or :class:`RaisesExc` + to specify the exceptions contained in this exception. + All specified exceptions must be present in the raised group, *and no others*. + + If you expect a variable number of exceptions you need to use + :func:`pytest.raises(ExceptionGroup) ` and manually check + the contained exceptions. Consider making use of :meth:`RaisesExc.matches`. + + It does not care about the order of the exceptions, so + ``RaisesGroup(ValueError, TypeError)`` + is equivalent to + ``RaisesGroup(TypeError, ValueError)``. + :kwparam str | re.Pattern[str] | None match: + If specified, a string containing a regular expression, + or a regular expression object, that is tested against the string + representation of the exception group and its :pep:`678` `__notes__` + using :func:`re.search`. + + To match a literal string that may contain :ref:`special characters + `, the pattern can first be escaped with :func:`re.escape`. + + Note that " (5 subgroups)" will be stripped from the ``repr`` before matching. + :kwparam Callable[[E], bool] check: + If specified, a callable that will be called with the group as a parameter + after successfully matching the expected exceptions. If it returns ``True`` + it will be considered a match, if not it will be considered a failed match. + :kwparam bool allow_unwrapped: + If expecting a single exception or :class:`RaisesExc` it will match even + if the exception is not inside an exceptiongroup. + + Using this together with ``match``, ``check`` or expecting multiple exceptions + will raise an error. + :kwparam bool flatten_subgroups: + "flatten" any groups inside the raised exception group, extracting all exceptions + inside any nested groups, before matching. Without this it expects you to + fully specify the nesting structure by passing :class:`RaisesGroup` as expected + parameter. + + Examples:: + + with RaisesGroup(ValueError): + raise ExceptionGroup("", (ValueError(),)) + # match + with RaisesGroup( + ValueError, + ValueError, + RaisesExc(TypeError, match="^expected int$"), + match="^my group$", + ): + raise ExceptionGroup( + "my group", + [ + ValueError(), + TypeError("expected int"), + ValueError(), + ], + ) + # check + with RaisesGroup( + KeyboardInterrupt, + match="^hello$", + check=lambda x: isinstance(x.__cause__, ValueError), + ): + raise BaseExceptionGroup("hello", [KeyboardInterrupt()]) from ValueError + # nested groups + with RaisesGroup(RaisesGroup(ValueError)): + raise ExceptionGroup("", (ExceptionGroup("", (ValueError(),)),)) + + # flatten_subgroups + with RaisesGroup(ValueError, flatten_subgroups=True): + raise ExceptionGroup("", (ExceptionGroup("", (ValueError(),)),)) + + # allow_unwrapped + with RaisesGroup(ValueError, allow_unwrapped=True): + raise ValueError + + + :meth:`RaisesGroup.matches` can also be used directly to check a standalone exception group. + + + The matching algorithm is greedy, which means cases such as this may fail:: + + with RaisesGroup(ValueError, RaisesExc(ValueError, match="hello")): + raise ExceptionGroup("", (ValueError("hello"), ValueError("goodbye"))) + + even though it generally does not care about the order of the exceptions in the group. + To avoid the above you should specify the first :exc:`ValueError` with a :class:`RaisesExc` as well. + + .. note:: + When raised exceptions don't match the expected ones, you'll get a detailed error + message explaining why. This includes ``repr(check)`` if set, which in Python can be + overly verbose, showing memory locations etc etc. + + If installed and imported (in e.g. ``conftest.py``), the ``hypothesis`` library will + monkeypatch this output to provide shorter & more readable repr's. + """ + + # allow_unwrapped=True requires: singular exception, exception not being + # RaisesGroup instance, match is None, check is None + @overload + def __init__( + self, + expected_exception: type[BaseExcT_co] | RaisesExc[BaseExcT_co], + /, + *, + allow_unwrapped: Literal[True], + flatten_subgroups: bool = False, + ) -> None: ... + + # flatten_subgroups = True also requires no nested RaisesGroup + @overload + def __init__( + self, + expected_exception: type[BaseExcT_co] | RaisesExc[BaseExcT_co], + /, + *other_exceptions: type[BaseExcT_co] | RaisesExc[BaseExcT_co], + flatten_subgroups: Literal[True], + match: str | Pattern[str] | None = None, + check: Callable[[BaseExceptionGroup[BaseExcT_co]], bool] | None = None, + ) -> None: ... + + # simplify the typevars if possible (the following 3 are equivalent but go simpler->complicated) + # ... the first handles RaisesGroup[ValueError], the second RaisesGroup[ExceptionGroup[ValueError]], + # the third RaisesGroup[ValueError | ExceptionGroup[ValueError]]. + # ... otherwise, we will get results like RaisesGroup[ValueError | ExceptionGroup[Never]] (I think) + # (technically correct but misleading) + @overload + def __init__( + self: RaisesGroup[ExcT_1], + expected_exception: type[ExcT_1] | RaisesExc[ExcT_1], + /, + *other_exceptions: type[ExcT_1] | RaisesExc[ExcT_1], + match: str | Pattern[str] | None = None, + check: Callable[[ExceptionGroup[ExcT_1]], bool] | None = None, + ) -> None: ... + + @overload + def __init__( + self: RaisesGroup[ExceptionGroup[ExcT_2]], + expected_exception: RaisesGroup[ExcT_2], + /, + *other_exceptions: RaisesGroup[ExcT_2], + match: str | Pattern[str] | None = None, + check: Callable[[ExceptionGroup[ExceptionGroup[ExcT_2]]], bool] | None = None, + ) -> None: ... + + @overload + def __init__( + self: RaisesGroup[ExcT_1 | ExceptionGroup[ExcT_2]], + expected_exception: type[ExcT_1] | RaisesExc[ExcT_1] | RaisesGroup[ExcT_2], + /, + *other_exceptions: type[ExcT_1] | RaisesExc[ExcT_1] | RaisesGroup[ExcT_2], + match: str | Pattern[str] | None = None, + check: ( + Callable[[ExceptionGroup[ExcT_1 | ExceptionGroup[ExcT_2]]], bool] | None + ) = None, + ) -> None: ... + + # same as the above 3 but handling BaseException + @overload + def __init__( + self: RaisesGroup[BaseExcT_1], + expected_exception: type[BaseExcT_1] | RaisesExc[BaseExcT_1], + /, + *other_exceptions: type[BaseExcT_1] | RaisesExc[BaseExcT_1], + match: str | Pattern[str] | None = None, + check: Callable[[BaseExceptionGroup[BaseExcT_1]], bool] | None = None, + ) -> None: ... + + @overload + def __init__( + self: RaisesGroup[BaseExceptionGroup[BaseExcT_2]], + expected_exception: RaisesGroup[BaseExcT_2], + /, + *other_exceptions: RaisesGroup[BaseExcT_2], + match: str | Pattern[str] | None = None, + check: ( + Callable[[BaseExceptionGroup[BaseExceptionGroup[BaseExcT_2]]], bool] | None + ) = None, + ) -> None: ... + + @overload + def __init__( + self: RaisesGroup[BaseExcT_1 | BaseExceptionGroup[BaseExcT_2]], + expected_exception: type[BaseExcT_1] + | RaisesExc[BaseExcT_1] + | RaisesGroup[BaseExcT_2], + /, + *other_exceptions: type[BaseExcT_1] + | RaisesExc[BaseExcT_1] + | RaisesGroup[BaseExcT_2], + match: str | Pattern[str] | None = None, + check: ( + Callable[ + [BaseExceptionGroup[BaseExcT_1 | BaseExceptionGroup[BaseExcT_2]]], + bool, + ] + | None + ) = None, + ) -> None: ... + + def __init__( + self: RaisesGroup[ExcT_1 | BaseExcT_1 | BaseExceptionGroup[BaseExcT_2]], + expected_exception: type[BaseExcT_1] + | RaisesExc[BaseExcT_1] + | RaisesGroup[BaseExcT_2], + /, + *other_exceptions: type[BaseExcT_1] + | RaisesExc[BaseExcT_1] + | RaisesGroup[BaseExcT_2], + allow_unwrapped: bool = False, + flatten_subgroups: bool = False, + match: str | Pattern[str] | None = None, + check: ( + Callable[[BaseExceptionGroup[BaseExcT_1]], bool] + | Callable[[ExceptionGroup[ExcT_1]], bool] + | None + ) = None, + ): + # The type hint on the `self` and `check` parameters uses different formats + # that are *very* hard to reconcile while adhering to the overloads, so we cast + # it to avoid an error when passing it to super().__init__ + check = cast( + "Callable[[BaseExceptionGroup[ExcT_1|BaseExcT_1|BaseExceptionGroup[BaseExcT_2]]], bool]", + check, + ) + super().__init__(match=match, check=check) + self.allow_unwrapped = allow_unwrapped + self.flatten_subgroups: bool = flatten_subgroups + self.is_baseexception = False + + if allow_unwrapped and other_exceptions: + raise ValueError( + "You cannot specify multiple exceptions with `allow_unwrapped=True.`" + " If you want to match one of multiple possible exceptions you should" + " use a `RaisesExc`." + " E.g. `RaisesExc(check=lambda e: isinstance(e, (...)))`", + ) + if allow_unwrapped and isinstance(expected_exception, RaisesGroup): + raise ValueError( + "`allow_unwrapped=True` has no effect when expecting a `RaisesGroup`." + " You might want it in the expected `RaisesGroup`, or" + " `flatten_subgroups=True` if you don't care about the structure.", + ) + if allow_unwrapped and (match is not None or check is not None): + raise ValueError( + "`allow_unwrapped=True` bypasses the `match` and `check` parameters" + " if the exception is unwrapped. If you intended to match/check the" + " exception you should use a `RaisesExc` object. If you want to match/check" + " the exceptiongroup when the exception *is* wrapped you need to" + " do e.g. `if isinstance(exc.value, ExceptionGroup):" + " assert RaisesGroup(...).matches(exc.value)` afterwards.", + ) + + self.expected_exceptions: tuple[ + type[BaseExcT_co] | RaisesExc[BaseExcT_co] | RaisesGroup[BaseException], ... + ] = tuple( + self._parse_excgroup(e, "a BaseException type, RaisesExc, or RaisesGroup") + for e in ( + expected_exception, + *other_exceptions, + ) + ) + + def _parse_excgroup( + self, + exc: ( + type[BaseExcT_co] + | types.GenericAlias + | RaisesExc[BaseExcT_1] + | RaisesGroup[BaseExcT_2] + ), + expected: str, + ) -> type[BaseExcT_co] | RaisesExc[BaseExcT_1] | RaisesGroup[BaseExcT_2]: + # verify exception type and set `self.is_baseexception` + if isinstance(exc, RaisesGroup): + if self.flatten_subgroups: + raise ValueError( + "You cannot specify a nested structure inside a RaisesGroup with" + " `flatten_subgroups=True`. The parameter will flatten subgroups" + " in the raised exceptiongroup before matching, which would never" + " match a nested structure.", + ) + self.is_baseexception |= exc.is_baseexception + exc._nested = True + return exc + elif isinstance(exc, RaisesExc): + self.is_baseexception |= exc.is_baseexception + exc._nested = True + return exc + elif isinstance(exc, tuple): + raise TypeError( + f"expected exception must be {expected}, not {type(exc).__name__!r}.\n" + "RaisesGroup does not support tuples of exception types when expecting one of " + "several possible exception types like RaisesExc.\n" + "If you meant to expect a group with multiple exceptions, list them as separate arguments." + ) + else: + return super()._parse_exc(exc, expected) + + @overload + def __enter__( + self: RaisesGroup[ExcT_1], + ) -> ExceptionInfo[ExceptionGroup[ExcT_1]]: ... + @overload + def __enter__( + self: RaisesGroup[BaseExcT_1], + ) -> ExceptionInfo[BaseExceptionGroup[BaseExcT_1]]: ... + + def __enter__(self) -> ExceptionInfo[BaseExceptionGroup[BaseException]]: + self.excinfo: ExceptionInfo[BaseExceptionGroup[BaseExcT_co]] = ( + ExceptionInfo.for_later() + ) + return self.excinfo + + def __repr__(self) -> str: + reqs = [ + e.__name__ if isinstance(e, type) else repr(e) + for e in self.expected_exceptions + ] + if self.allow_unwrapped: + reqs.append(f"allow_unwrapped={self.allow_unwrapped}") + if self.flatten_subgroups: + reqs.append(f"flatten_subgroups={self.flatten_subgroups}") + if self.match is not None: + # If no flags were specified, discard the redundant re.compile() here. + reqs.append(f"match={_match_pattern(self.match)!r}") + if self.check is not None: + reqs.append(f"check={repr_callable(self.check)}") + return f"RaisesGroup({', '.join(reqs)})" + + def _unroll_exceptions( + self, + exceptions: Sequence[BaseException], + ) -> Sequence[BaseException]: + """Used if `flatten_subgroups=True`.""" + res: list[BaseException] = [] + for exc in exceptions: + if isinstance(exc, BaseExceptionGroup): + res.extend(self._unroll_exceptions(exc.exceptions)) + + else: + res.append(exc) + return res + + @overload + def matches( + self: RaisesGroup[ExcT_1], + exception: BaseException | None, + ) -> TypeGuard[ExceptionGroup[ExcT_1]]: ... + @overload + def matches( + self: RaisesGroup[BaseExcT_1], + exception: BaseException | None, + ) -> TypeGuard[BaseExceptionGroup[BaseExcT_1]]: ... + + def matches( + self, + exception: BaseException | None, + ) -> bool: + """Check if an exception matches the requirements of this RaisesGroup. + If it fails, `RaisesGroup.fail_reason` will be set. + + Example:: + + with pytest.raises(TypeError) as excinfo: + ... + assert RaisesGroup(ValueError).matches(excinfo.value.__cause__) + # the above line is equivalent to + myexc = excinfo.value.__cause + assert isinstance(myexc, BaseExceptionGroup) + assert len(myexc.exceptions) == 1 + assert isinstance(myexc.exceptions[0], ValueError) + """ + self._fail_reason = None + if exception is None: + self._fail_reason = "exception is None" + return False + if not isinstance(exception, BaseExceptionGroup): + # we opt to only print type of the exception here, as the repr would + # likely be quite long + not_group_msg = f"`{type(exception).__name__}()` is not an exception group" + if len(self.expected_exceptions) > 1: + self._fail_reason = not_group_msg + return False + # if we have 1 expected exception, check if it would work even if + # allow_unwrapped is not set + res = self._check_expected(self.expected_exceptions[0], exception) + if res is None and self.allow_unwrapped: + return True + + if res is None: + self._fail_reason = ( + f"{not_group_msg}, but would match with `allow_unwrapped=True`" + ) + elif self.allow_unwrapped: + self._fail_reason = res + else: + self._fail_reason = not_group_msg + return False + + actual_exceptions: Sequence[BaseException] = exception.exceptions + if self.flatten_subgroups: + actual_exceptions = self._unroll_exceptions(actual_exceptions) + + if not self._check_match(exception): + self._fail_reason = cast(str, self._fail_reason) + old_reason = self._fail_reason + if ( + len(actual_exceptions) == len(self.expected_exceptions) == 1 + and isinstance(expected := self.expected_exceptions[0], type) + and isinstance(actual := actual_exceptions[0], expected) + and self._check_match(actual) + ): + assert self.match is not None, "can't be None if _check_match failed" + assert self._fail_reason is old_reason is not None + self._fail_reason += ( + f"\n" + f" but matched the expected `{self._repr_expected(expected)}`.\n" + f" You might want " + f"`RaisesGroup(RaisesExc({expected.__name__}, match={_match_pattern(self.match)!r}))`" + ) + else: + self._fail_reason = old_reason + return False + + # do the full check on expected exceptions + if not self._check_exceptions( + exception, + actual_exceptions, + ): + self._fail_reason = cast(str, self._fail_reason) + assert self._fail_reason is not None + old_reason = self._fail_reason + # if we're not expecting a nested structure, and there is one, do a second + # pass where we try flattening it + if ( + not self.flatten_subgroups + and not any( + isinstance(e, RaisesGroup) for e in self.expected_exceptions + ) + and any(isinstance(e, BaseExceptionGroup) for e in actual_exceptions) + and self._check_exceptions( + exception, + self._unroll_exceptions(exception.exceptions), + ) + ): + # only indent if it's a single-line reason. In a multi-line there's already + # indented lines that this does not belong to. + indent = " " if "\n" not in self._fail_reason else "" + self._fail_reason = ( + old_reason + + f"\n{indent}Did you mean to use `flatten_subgroups=True`?" + ) + else: + self._fail_reason = old_reason + return False + + # Only run `self.check` once we know `exception` is of the correct type. + if not self._check_check(exception): + reason = ( + cast(str, self._fail_reason) + f" on the {type(exception).__name__}" + ) + if ( + len(actual_exceptions) == len(self.expected_exceptions) == 1 + and isinstance(expected := self.expected_exceptions[0], type) + # we explicitly break typing here :) + and self._check_check(actual_exceptions[0]) # type: ignore[arg-type] + ): + self._fail_reason = reason + ( + f", but did return True for the expected {self._repr_expected(expected)}." + f" You might want RaisesGroup(RaisesExc({expected.__name__}, check=<...>))" + ) + else: + self._fail_reason = reason + return False + + return True + + @staticmethod + def _check_expected( + expected_type: ( + type[BaseException] | RaisesExc[BaseException] | RaisesGroup[BaseException] + ), + exception: BaseException, + ) -> str | None: + """Helper method for `RaisesGroup.matches` and `RaisesGroup._check_exceptions` + to check one of potentially several expected exceptions.""" + if isinstance(expected_type, type): + return _check_raw_type(expected_type, exception) + res = expected_type.matches(exception) + if res: + return None + assert expected_type.fail_reason is not None + if expected_type.fail_reason.startswith("\n"): + return f"\n{expected_type!r}: {indent(expected_type.fail_reason, ' ')}" + return f"{expected_type!r}: {expected_type.fail_reason}" + + @staticmethod + def _repr_expected(e: type[BaseException] | AbstractRaises[BaseException]) -> str: + """Get the repr of an expected type/RaisesExc/RaisesGroup, but we only want + the name if it's a type""" + if isinstance(e, type): + return _exception_type_name(e) + return repr(e) + + @overload + def _check_exceptions( + self: RaisesGroup[ExcT_1], + _exception: Exception, + actual_exceptions: Sequence[Exception], + ) -> TypeGuard[ExceptionGroup[ExcT_1]]: ... + @overload + def _check_exceptions( + self: RaisesGroup[BaseExcT_1], + _exception: BaseException, + actual_exceptions: Sequence[BaseException], + ) -> TypeGuard[BaseExceptionGroup[BaseExcT_1]]: ... + + def _check_exceptions( + self, + _exception: BaseException, + actual_exceptions: Sequence[BaseException], + ) -> bool: + """Helper method for RaisesGroup.matches that attempts to pair up expected and actual exceptions""" + # The _exception parameter is not used, but necessary for the TypeGuard + + # full table with all results + results = ResultHolder(self.expected_exceptions, actual_exceptions) + + # (indexes of) raised exceptions that haven't (yet) found an expected + remaining_actual = list(range(len(actual_exceptions))) + # (indexes of) expected exceptions that haven't found a matching raised + failed_expected: list[int] = [] + # successful greedy matches + matches: dict[int, int] = {} + + # loop over expected exceptions first to get a more predictable result + for i_exp, expected in enumerate(self.expected_exceptions): + for i_rem in remaining_actual: + res = self._check_expected(expected, actual_exceptions[i_rem]) + results.set_result(i_exp, i_rem, res) + if res is None: + remaining_actual.remove(i_rem) + matches[i_exp] = i_rem + break + else: + failed_expected.append(i_exp) + + # All exceptions matched up successfully + if not remaining_actual and not failed_expected: + return True + + # in case of a single expected and single raised we simplify the output + if 1 == len(actual_exceptions) == len(self.expected_exceptions): + assert not matches + self._fail_reason = res + return False + + # The test case is failing, so we can do a slow and exhaustive check to find + # duplicate matches etc that will be helpful in debugging + for i_exp, expected in enumerate(self.expected_exceptions): + for i_actual, actual in enumerate(actual_exceptions): + if results.has_result(i_exp, i_actual): + continue + results.set_result( + i_exp, i_actual, self._check_expected(expected, actual) + ) + + successful_str = ( + f"{len(matches)} matched exception{'s' if len(matches) > 1 else ''}. " + if matches + else "" + ) + + # all expected were found + if not failed_expected and results.no_match_for_actual(remaining_actual): + self._fail_reason = ( + f"{successful_str}Unexpected exception(s):" + f" {[actual_exceptions[i] for i in remaining_actual]!r}" + ) + return False + # all raised exceptions were expected + if not remaining_actual and results.no_match_for_expected(failed_expected): + no_match_for_str = ", ".join( + self._repr_expected(self.expected_exceptions[i]) + for i in failed_expected + ) + self._fail_reason = f"{successful_str}Too few exceptions raised, found no match for: [{no_match_for_str}]" + return False + + # if there's only one remaining and one failed, and the unmatched didn't match anything else, + # we elect to only print why the remaining and the failed didn't match. + if ( + 1 == len(remaining_actual) == len(failed_expected) + and results.no_match_for_actual(remaining_actual) + and results.no_match_for_expected(failed_expected) + ): + self._fail_reason = f"{successful_str}{results.get_result(failed_expected[0], remaining_actual[0])}" + return False + + # there's both expected and raised exceptions without matches + s = "" + if matches: + s += f"\n{successful_str}" + indent_1 = " " * 2 + indent_2 = " " * 4 + + if not remaining_actual: + s += "\nToo few exceptions raised!" + elif not failed_expected: + s += "\nUnexpected exception(s)!" + + if failed_expected: + s += "\nThe following expected exceptions did not find a match:" + rev_matches = {v: k for k, v in matches.items()} + for i_failed in failed_expected: + s += ( + f"\n{indent_1}{self._repr_expected(self.expected_exceptions[i_failed])}" + ) + for i_actual, actual in enumerate(actual_exceptions): + if results.get_result(i_exp, i_actual) is None: + # we print full repr of match target + s += ( + f"\n{indent_2}It matches {backquote(repr(actual))} which was paired with " + + backquote( + self._repr_expected( + self.expected_exceptions[rev_matches[i_actual]] + ) + ) + ) + + if remaining_actual: + s += "\nThe following raised exceptions did not find a match" + for i_actual in remaining_actual: + s += f"\n{indent_1}{actual_exceptions[i_actual]!r}:" + for i_exp, expected in enumerate(self.expected_exceptions): + res = results.get_result(i_exp, i_actual) + if i_exp in failed_expected: + assert res is not None + if res[0] != "\n": + s += "\n" + s += indent(res, indent_2) + if res is None: + # we print full repr of match target + s += ( + f"\n{indent_2}It matches {backquote(self._repr_expected(expected))} " + f"which was paired with {backquote(repr(actual_exceptions[matches[i_exp]]))}" + ) + + if len(self.expected_exceptions) == len(actual_exceptions) and possible_match( + results + ): + s += ( + "\nThere exist a possible match when attempting an exhaustive check," + " but RaisesGroup uses a greedy algorithm. " + "Please make your expected exceptions more stringent with `RaisesExc` etc" + " so the greedy algorithm can function." + ) + self._fail_reason = s + return False + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> bool: + __tracebackhide__ = True + if exc_type is None: + fail(f"DID NOT RAISE any exception, expected `{self.expected_type()}`") + + assert self.excinfo is not None, ( + "Internal error - should have been constructed in __enter__" + ) + + # group_str is the only thing that differs between RaisesExc and RaisesGroup... + # I might just scrap it? Or make it part of fail_reason + group_str = ( + "(group)" + if self.allow_unwrapped and not issubclass(exc_type, BaseExceptionGroup) + else "group" + ) + + if not self.matches(exc_val): + fail(f"Raised exception {group_str} did not match: {self._fail_reason}") + + # Cast to narrow the exception type now that it's verified.... + # even though the TypeGuard in self.matches should be narrowing + exc_info = cast( + "tuple[type[BaseExceptionGroup[BaseExcT_co]], BaseExceptionGroup[BaseExcT_co], types.TracebackType]", + (exc_type, exc_val, exc_tb), + ) + self.excinfo.fill_unfilled(exc_info) + return True + + def expected_type(self) -> str: + subexcs = [] + for e in self.expected_exceptions: + if isinstance(e, RaisesExc): + subexcs.append(repr(e)) + elif isinstance(e, RaisesGroup): + subexcs.append(e.expected_type()) + elif isinstance(e, type): + subexcs.append(e.__name__) + else: # pragma: no cover + raise AssertionError("unknown type") + group_type = "Base" if self.is_baseexception else "" + return f"{group_type}ExceptionGroup({', '.join(subexcs)})" + + +@final +class NotChecked: + """Singleton for unchecked values in ResultHolder""" + + +class ResultHolder: + """Container for results of checking exceptions. + Used in RaisesGroup._check_exceptions and possible_match. + """ + + def __init__( + self, + expected_exceptions: tuple[ + type[BaseException] | AbstractRaises[BaseException], ... + ], + actual_exceptions: Sequence[BaseException], + ) -> None: + self.results: list[list[str | type[NotChecked] | None]] = [ + [NotChecked for _ in expected_exceptions] for _ in actual_exceptions + ] + + def set_result(self, expected: int, actual: int, result: str | None) -> None: + self.results[actual][expected] = result + + def get_result(self, expected: int, actual: int) -> str | None: + res = self.results[actual][expected] + assert res is not NotChecked + # mypy doesn't support identity checking against anything but None + return res # type: ignore[return-value] + + def has_result(self, expected: int, actual: int) -> bool: + return self.results[actual][expected] is not NotChecked + + def no_match_for_expected(self, expected: list[int]) -> bool: + for i in expected: + for actual_results in self.results: + assert actual_results[i] is not NotChecked + if actual_results[i] is None: + return False + return True + + def no_match_for_actual(self, actual: list[int]) -> bool: + for i in actual: + for res in self.results[i]: + assert res is not NotChecked + if res is None: + return False + return True + + +def possible_match(results: ResultHolder, used: set[int] | None = None) -> bool: + if used is None: + used = set() + curr_row = len(used) + if curr_row == len(results.results): + return True + return any( + val is None and i not in used and possible_match(results, used | {i}) + for (i, val) in enumerate(results.results[curr_row]) + ) diff --git a/venv/lib/python3.10/site-packages/_pytest/recwarn.py b/venv/lib/python3.10/site-packages/_pytest/recwarn.py new file mode 100644 index 0000000000000000000000000000000000000000..440e3efac8acea4cc42f6e9dfb43aa0df2b422c6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/recwarn.py @@ -0,0 +1,365 @@ +# mypy: allow-untyped-defs +"""Record warnings during test function execution.""" + +from __future__ import annotations + +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Iterator +from pprint import pformat +import re +from types import TracebackType +from typing import Any +from typing import final +from typing import overload +from typing import TYPE_CHECKING +from typing import TypeVar + + +if TYPE_CHECKING: + from typing_extensions import Self + +import warnings + +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.outcomes import Exit +from _pytest.outcomes import fail + + +T = TypeVar("T") + + +@fixture +def recwarn() -> Generator[WarningsRecorder]: + """Return a :class:`WarningsRecorder` instance that records all warnings emitted by test functions. + + See :ref:`warnings` for information on warning categories. + """ + wrec = WarningsRecorder(_ispytest=True) + with wrec: + warnings.simplefilter("default") + yield wrec + + +@overload +def deprecated_call( + *, match: str | re.Pattern[str] | None = ... +) -> WarningsRecorder: ... + + +@overload +def deprecated_call(func: Callable[..., T], *args: Any, **kwargs: Any) -> T: ... + + +def deprecated_call( + func: Callable[..., Any] | None = None, *args: Any, **kwargs: Any +) -> WarningsRecorder | Any: + """Assert that code produces a ``DeprecationWarning`` or ``PendingDeprecationWarning`` or ``FutureWarning``. + + This function can be used as a context manager:: + + >>> import warnings + >>> def api_call_v2(): + ... warnings.warn('use v3 of this api', DeprecationWarning) + ... return 200 + + >>> import pytest + >>> with pytest.deprecated_call(): + ... assert api_call_v2() == 200 + + It can also be used by passing a function and ``*args`` and ``**kwargs``, + in which case it will ensure calling ``func(*args, **kwargs)`` produces one of + the warnings types above. The return value is the return value of the function. + + In the context manager form you may use the keyword argument ``match`` to assert + that the warning matches a text or regex. + + The context manager produces a list of :class:`warnings.WarningMessage` objects, + one for each warning raised. + """ + __tracebackhide__ = True + if func is not None: + args = (func, *args) + return warns( + (DeprecationWarning, PendingDeprecationWarning, FutureWarning), *args, **kwargs + ) + + +@overload +def warns( + expected_warning: type[Warning] | tuple[type[Warning], ...] = ..., + *, + match: str | re.Pattern[str] | None = ..., +) -> WarningsChecker: ... + + +@overload +def warns( + expected_warning: type[Warning] | tuple[type[Warning], ...], + func: Callable[..., T], + *args: Any, + **kwargs: Any, +) -> T: ... + + +def warns( + expected_warning: type[Warning] | tuple[type[Warning], ...] = Warning, + *args: Any, + match: str | re.Pattern[str] | None = None, + **kwargs: Any, +) -> WarningsChecker | Any: + r"""Assert that code raises a particular class of warning. + + Specifically, the parameter ``expected_warning`` can be a warning class or tuple + of warning classes, and the code inside the ``with`` block must issue at least one + warning of that class or classes. + + This helper produces a list of :class:`warnings.WarningMessage` objects, one for + each warning emitted (regardless of whether it is an ``expected_warning`` or not). + Since pytest 8.0, unmatched warnings are also re-emitted when the context closes. + + This function can be used as a context manager:: + + >>> import pytest + >>> with pytest.warns(RuntimeWarning): + ... warnings.warn("my warning", RuntimeWarning) + + In the context manager form you may use the keyword argument ``match`` to assert + that the warning matches a text or regex:: + + >>> with pytest.warns(UserWarning, match='must be 0 or None'): + ... warnings.warn("value must be 0 or None", UserWarning) + + >>> with pytest.warns(UserWarning, match=r'must be \d+$'): + ... warnings.warn("value must be 42", UserWarning) + + >>> with pytest.warns(UserWarning): # catch re-emitted warning + ... with pytest.warns(UserWarning, match=r'must be \d+$'): + ... warnings.warn("this is not here", UserWarning) + Traceback (most recent call last): + ... + Failed: DID NOT WARN. No warnings of type ...UserWarning... were emitted... + + **Using with** ``pytest.mark.parametrize`` + + When using :ref:`pytest.mark.parametrize ref` it is possible to parametrize tests + such that some runs raise a warning and others do not. + + This could be achieved in the same way as with exceptions, see + :ref:`parametrizing_conditional_raising` for an example. + + """ + __tracebackhide__ = True + if not args: + if kwargs: + argnames = ", ".join(sorted(kwargs)) + raise TypeError( + f"Unexpected keyword arguments passed to pytest.warns: {argnames}" + "\nUse context-manager form instead?" + ) + return WarningsChecker(expected_warning, match_expr=match, _ispytest=True) + else: + func = args[0] + if not callable(func): + raise TypeError(f"{func!r} object (type: {type(func)}) must be callable") + with WarningsChecker(expected_warning, _ispytest=True): + return func(*args[1:], **kwargs) + + +class WarningsRecorder(warnings.catch_warnings): # type:ignore[type-arg] + """A context manager to record raised warnings. + + Each recorded warning is an instance of :class:`warnings.WarningMessage`. + + Adapted from `warnings.catch_warnings`. + + .. note:: + ``DeprecationWarning`` and ``PendingDeprecationWarning`` are treated + differently; see :ref:`ensuring_function_triggers`. + + """ + + def __init__(self, *, _ispytest: bool = False) -> None: + check_ispytest(_ispytest) + super().__init__(record=True) + self._entered = False + self._list: list[warnings.WarningMessage] = [] + + @property + def list(self) -> list[warnings.WarningMessage]: + """The list of recorded warnings.""" + return self._list + + def __getitem__(self, i: int) -> warnings.WarningMessage: + """Get a recorded warning by index.""" + return self._list[i] + + def __iter__(self) -> Iterator[warnings.WarningMessage]: + """Iterate through the recorded warnings.""" + return iter(self._list) + + def __len__(self) -> int: + """The number of recorded warnings.""" + return len(self._list) + + def pop(self, cls: type[Warning] = Warning) -> warnings.WarningMessage: + """Pop the first recorded warning which is an instance of ``cls``, + but not an instance of a child class of any other match. + Raises ``AssertionError`` if there is no match. + """ + best_idx: int | None = None + for i, w in enumerate(self._list): + if w.category == cls: + return self._list.pop(i) # exact match, stop looking + if issubclass(w.category, cls) and ( + best_idx is None + or not issubclass(w.category, self._list[best_idx].category) + ): + best_idx = i + if best_idx is not None: + return self._list.pop(best_idx) + __tracebackhide__ = True + raise AssertionError(f"{cls!r} not found in warning list") + + def clear(self) -> None: + """Clear the list of recorded warnings.""" + self._list[:] = [] + + def __enter__(self) -> Self: + if self._entered: + __tracebackhide__ = True + raise RuntimeError(f"Cannot enter {self!r} twice") + _list = super().__enter__() + # record=True means it's None. + assert _list is not None + self._list = _list + warnings.simplefilter("always") + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + if not self._entered: + __tracebackhide__ = True + raise RuntimeError(f"Cannot exit {self!r} without entering first") + + super().__exit__(exc_type, exc_val, exc_tb) + + # Built-in catch_warnings does not reset entered state so we do it + # manually here for this context manager to become reusable. + self._entered = False + + +@final +class WarningsChecker(WarningsRecorder): + def __init__( + self, + expected_warning: type[Warning] | tuple[type[Warning], ...] = Warning, + match_expr: str | re.Pattern[str] | None = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + super().__init__(_ispytest=True) + + msg = "exceptions must be derived from Warning, not %s" + if isinstance(expected_warning, tuple): + for exc in expected_warning: + if not issubclass(exc, Warning): + raise TypeError(msg % type(exc)) + expected_warning_tup = expected_warning + elif isinstance(expected_warning, type) and issubclass( + expected_warning, Warning + ): + expected_warning_tup = (expected_warning,) + else: + raise TypeError(msg % type(expected_warning)) + + self.expected_warning = expected_warning_tup + self.match_expr = match_expr + + def matches(self, warning: warnings.WarningMessage) -> bool: + assert self.expected_warning is not None + return issubclass(warning.category, self.expected_warning) and bool( + self.match_expr is None or re.search(self.match_expr, str(warning.message)) + ) + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + super().__exit__(exc_type, exc_val, exc_tb) + + __tracebackhide__ = True + + # BaseExceptions like pytest.{skip,fail,xfail,exit} or Ctrl-C within + # pytest.warns should *not* trigger "DID NOT WARN" and get suppressed + # when the warning doesn't happen. Control-flow exceptions should always + # propagate. + if exc_val is not None and ( + not isinstance(exc_val, Exception) + # Exit is an Exception, not a BaseException, for some reason. + or isinstance(exc_val, Exit) + ): + return + + def found_str() -> str: + return pformat([record.message for record in self], indent=2) + + try: + if not any(issubclass(w.category, self.expected_warning) for w in self): + fail( + f"DID NOT WARN. No warnings of type {self.expected_warning} were emitted.\n" + f" Emitted warnings: {found_str()}." + ) + elif not any(self.matches(w) for w in self): + fail( + f"DID NOT WARN. No warnings of type {self.expected_warning} matching the regex were emitted.\n" + f" Regex: {self.match_expr}\n" + f" Emitted warnings: {found_str()}." + ) + finally: + # Whether or not any warnings matched, we want to re-emit all unmatched warnings. + for w in self: + if not self.matches(w): + warnings.warn_explicit( + message=w.message, + category=w.category, + filename=w.filename, + lineno=w.lineno, + module=w.__module__, + source=w.source, + ) + + # Currently in Python it is possible to pass other types than an + # `str` message when creating `Warning` instances, however this + # causes an exception when :func:`warnings.filterwarnings` is used + # to filter those warnings. See + # https://github.com/python/cpython/issues/103577 for a discussion. + # While this can be considered a bug in CPython, we put guards in + # pytest as the error message produced without this check in place + # is confusing (#10865). + for w in self: + if type(w.message) is not UserWarning: + # If the warning was of an incorrect type then `warnings.warn()` + # creates a UserWarning. Any other warning must have been specified + # explicitly. + continue + if not w.message.args: + # UserWarning() without arguments must have been specified explicitly. + continue + msg = w.message.args[0] + if isinstance(msg, str): + continue + # It's possible that UserWarning was explicitly specified, and + # its first argument was not a string. But that case can't be + # distinguished from an invalid type. + raise TypeError( + f"Warning must be str or Warning, got {msg!r} (type {type(msg).__name__})" + ) diff --git a/venv/lib/python3.10/site-packages/_pytest/reports.py b/venv/lib/python3.10/site-packages/_pytest/reports.py new file mode 100644 index 0000000000000000000000000000000000000000..480ffae1f9cfd68559294ee982cb62d5fa281245 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/reports.py @@ -0,0 +1,637 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Mapping +from collections.abc import Sequence +import dataclasses +from io import StringIO +import os +from pprint import pprint +from typing import Any +from typing import cast +from typing import final +from typing import Literal +from typing import NoReturn +from typing import TYPE_CHECKING + +from _pytest._code.code import ExceptionChainRepr +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import ExceptionRepr +from _pytest._code.code import ReprEntry +from _pytest._code.code import ReprEntryNative +from _pytest._code.code import ReprExceptionInfo +from _pytest._code.code import ReprFileLocation +from _pytest._code.code import ReprFuncArgs +from _pytest._code.code import ReprLocals +from _pytest._code.code import ReprTraceback +from _pytest._code.code import TerminalRepr +from _pytest._io import TerminalWriter +from _pytest.config import Config +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.outcomes import fail +from _pytest.outcomes import skip + + +if TYPE_CHECKING: + from typing_extensions import Self + + from _pytest.runner import CallInfo + + +def getworkerinfoline(node): + try: + return node._workerinfocache + except AttributeError: + d = node.workerinfo + ver = "{}.{}.{}".format(*d["version_info"][:3]) + node._workerinfocache = s = "[{}] {} -- Python {} {}".format( + d["id"], d["sysplatform"], ver, d["executable"] + ) + return s + + +class BaseReport: + when: str | None + location: tuple[str, int | None, str] | None + longrepr: ( + None | ExceptionInfo[BaseException] | tuple[str, int, str] | str | TerminalRepr + ) + sections: list[tuple[str, str]] + nodeid: str + outcome: Literal["passed", "failed", "skipped"] + + def __init__(self, **kw: Any) -> None: + self.__dict__.update(kw) + + if TYPE_CHECKING: + # Can have arbitrary fields given to __init__(). + def __getattr__(self, key: str) -> Any: ... + + def toterminal(self, out: TerminalWriter) -> None: + if hasattr(self, "node"): + worker_info = getworkerinfoline(self.node) + if worker_info: + out.line(worker_info) + + longrepr = self.longrepr + if longrepr is None: + return + + if hasattr(longrepr, "toterminal"): + longrepr_terminal = cast(TerminalRepr, longrepr) + longrepr_terminal.toterminal(out) + else: + try: + s = str(longrepr) + except UnicodeEncodeError: + s = "" + out.line(s) + + def get_sections(self, prefix: str) -> Iterator[tuple[str, str]]: + for name, content in self.sections: + if name.startswith(prefix): + yield prefix, content + + @property + def longreprtext(self) -> str: + """Read-only property that returns the full string representation of + ``longrepr``. + + .. versionadded:: 3.0 + """ + file = StringIO() + tw = TerminalWriter(file) + tw.hasmarkup = False + self.toterminal(tw) + exc = file.getvalue() + return exc.strip() + + @property + def caplog(self) -> str: + """Return captured log lines, if log capturing is enabled. + + .. versionadded:: 3.5 + """ + return "\n".join( + content for (prefix, content) in self.get_sections("Captured log") + ) + + @property + def capstdout(self) -> str: + """Return captured text from stdout, if capturing is enabled. + + .. versionadded:: 3.0 + """ + return "".join( + content for (prefix, content) in self.get_sections("Captured stdout") + ) + + @property + def capstderr(self) -> str: + """Return captured text from stderr, if capturing is enabled. + + .. versionadded:: 3.0 + """ + return "".join( + content for (prefix, content) in self.get_sections("Captured stderr") + ) + + @property + def passed(self) -> bool: + """Whether the outcome is passed.""" + return self.outcome == "passed" + + @property + def failed(self) -> bool: + """Whether the outcome is failed.""" + return self.outcome == "failed" + + @property + def skipped(self) -> bool: + """Whether the outcome is skipped.""" + return self.outcome == "skipped" + + @property + def fspath(self) -> str: + """The path portion of the reported node, as a string.""" + return self.nodeid.split("::")[0] + + @property + def count_towards_summary(self) -> bool: + """**Experimental** Whether this report should be counted towards the + totals shown at the end of the test session: "1 passed, 1 failure, etc". + + .. note:: + + This function is considered **experimental**, so beware that it is subject to changes + even in patch releases. + """ + return True + + @property + def head_line(self) -> str | None: + """**Experimental** The head line shown with longrepr output for this + report, more commonly during traceback representation during + failures:: + + ________ Test.foo ________ + + + In the example above, the head_line is "Test.foo". + + .. note:: + + This function is considered **experimental**, so beware that it is subject to changes + even in patch releases. + """ + if self.location is not None: + fspath, lineno, domain = self.location + return domain + return None + + def _get_verbose_word_with_markup( + self, config: Config, default_markup: Mapping[str, bool] + ) -> tuple[str, Mapping[str, bool]]: + _category, _short, verbose = config.hook.pytest_report_teststatus( + report=self, config=config + ) + + if isinstance(verbose, str): + return verbose, default_markup + + if isinstance(verbose, Sequence) and len(verbose) == 2: + word, markup = verbose + if isinstance(word, str) and isinstance(markup, Mapping): + return word, markup + + fail( # pragma: no cover + "pytest_report_teststatus() hook (from a plugin) returned " + f"an invalid verbose value: {verbose!r}.\nExpected either a string " + "or a tuple of (word, markup)." + ) + + def _to_json(self) -> dict[str, Any]: + """Return the contents of this report as a dict of builtin entries, + suitable for serialization. + + This was originally the serialize_report() function from xdist (ca03269). + + Experimental method. + """ + return _report_to_json(self) + + @classmethod + def _from_json(cls, reportdict: dict[str, object]) -> Self: + """Create either a TestReport or CollectReport, depending on the calling class. + + It is the callers responsibility to know which class to pass here. + + This was originally the serialize_report() function from xdist (ca03269). + + Experimental method. + """ + kwargs = _report_kwargs_from_json(reportdict) + return cls(**kwargs) + + +def _report_unserialization_failure( + type_name: str, report_class: type[BaseReport], reportdict +) -> NoReturn: + url = "https://github.com/pytest-dev/pytest/issues" + stream = StringIO() + pprint("-" * 100, stream=stream) + pprint(f"INTERNALERROR: Unknown entry type returned: {type_name}", stream=stream) + pprint(f"report_name: {report_class}", stream=stream) + pprint(reportdict, stream=stream) + pprint(f"Please report this bug at {url}", stream=stream) + pprint("-" * 100, stream=stream) + raise RuntimeError(stream.getvalue()) + + +@final +class TestReport(BaseReport): + """Basic test report object (also used for setup and teardown calls if + they fail). + + Reports can contain arbitrary extra attributes. + """ + + __test__ = False + + # Defined by skipping plugin. + # xfail reason if xfailed, otherwise not defined. Use hasattr to distinguish. + wasxfail: str + + def __init__( + self, + nodeid: str, + location: tuple[str, int | None, str], + keywords: Mapping[str, Any], + outcome: Literal["passed", "failed", "skipped"], + longrepr: None + | ExceptionInfo[BaseException] + | tuple[str, int, str] + | str + | TerminalRepr, + when: Literal["setup", "call", "teardown"], + sections: Iterable[tuple[str, str]] = (), + duration: float = 0, + start: float = 0, + stop: float = 0, + user_properties: Iterable[tuple[str, object]] | None = None, + **extra, + ) -> None: + #: Normalized collection nodeid. + self.nodeid = nodeid + + #: A (filesystempath, lineno, domaininfo) tuple indicating the + #: actual location of a test item - it might be different from the + #: collected one e.g. if a method is inherited from a different module. + #: The filesystempath may be relative to ``config.rootdir``. + #: The line number is 0-based. + self.location: tuple[str, int | None, str] = location + + #: A name -> value dictionary containing all keywords and + #: markers associated with a test invocation. + self.keywords: Mapping[str, Any] = keywords + + #: Test outcome, always one of "passed", "failed", "skipped". + self.outcome = outcome + + #: None or a failure representation. + self.longrepr = longrepr + + #: One of 'setup', 'call', 'teardown' to indicate runtest phase. + self.when: Literal["setup", "call", "teardown"] = when + + #: User properties is a list of tuples (name, value) that holds user + #: defined properties of the test. + self.user_properties = list(user_properties or []) + + #: Tuples of str ``(heading, content)`` with extra information + #: for the test report. Used by pytest to add text captured + #: from ``stdout``, ``stderr``, and intercepted logging events. May + #: be used by other plugins to add arbitrary information to reports. + self.sections = list(sections) + + #: Time it took to run just the test. + self.duration: float = duration + + #: The system time when the call started, in seconds since the epoch. + self.start: float = start + #: The system time when the call ended, in seconds since the epoch. + self.stop: float = stop + + self.__dict__.update(extra) + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.nodeid!r} when={self.when!r} outcome={self.outcome!r}>" + + @classmethod + def from_item_and_call(cls, item: Item, call: CallInfo[None]) -> TestReport: + """Create and fill a TestReport with standard item and call info. + + :param item: The item. + :param call: The call info. + """ + when = call.when + # Remove "collect" from the Literal type -- only for collection calls. + assert when != "collect" + duration = call.duration + start = call.start + stop = call.stop + keywords = {x: 1 for x in item.keywords} + excinfo = call.excinfo + sections = [] + if not call.excinfo: + outcome: Literal["passed", "failed", "skipped"] = "passed" + longrepr: ( + None + | ExceptionInfo[BaseException] + | tuple[str, int, str] + | str + | TerminalRepr + ) = None + else: + if not isinstance(excinfo, ExceptionInfo): + outcome = "failed" + longrepr = excinfo + elif isinstance(excinfo.value, skip.Exception): + outcome = "skipped" + r = excinfo._getreprcrash() + assert r is not None, ( + "There should always be a traceback entry for skipping a test." + ) + if excinfo.value._use_item_location: + path, line = item.reportinfo()[:2] + assert line is not None + longrepr = os.fspath(path), line + 1, r.message + else: + longrepr = (str(r.path), r.lineno, r.message) + else: + outcome = "failed" + if call.when == "call": + longrepr = item.repr_failure(excinfo) + else: # exception in setup or teardown + longrepr = item._repr_failure_py( + excinfo, style=item.config.getoption("tbstyle", "auto") + ) + for rwhen, key, content in item._report_sections: + sections.append((f"Captured {key} {rwhen}", content)) + return cls( + item.nodeid, + item.location, + keywords, + outcome, + longrepr, + when, + sections, + duration, + start, + stop, + user_properties=item.user_properties, + ) + + +@final +class CollectReport(BaseReport): + """Collection report object. + + Reports can contain arbitrary extra attributes. + """ + + when = "collect" + + def __init__( + self, + nodeid: str, + outcome: Literal["passed", "failed", "skipped"], + longrepr: None + | ExceptionInfo[BaseException] + | tuple[str, int, str] + | str + | TerminalRepr, + result: list[Item | Collector] | None, + sections: Iterable[tuple[str, str]] = (), + **extra, + ) -> None: + #: Normalized collection nodeid. + self.nodeid = nodeid + + #: Test outcome, always one of "passed", "failed", "skipped". + self.outcome = outcome + + #: None or a failure representation. + self.longrepr = longrepr + + #: The collected items and collection nodes. + self.result = result or [] + + #: Tuples of str ``(heading, content)`` with extra information + #: for the test report. Used by pytest to add text captured + #: from ``stdout``, ``stderr``, and intercepted logging events. May + #: be used by other plugins to add arbitrary information to reports. + self.sections = list(sections) + + self.__dict__.update(extra) + + @property + def location( # type:ignore[override] + self, + ) -> tuple[str, int | None, str] | None: + return (self.fspath, None, self.fspath) + + def __repr__(self) -> str: + return f"" + + +class CollectErrorRepr(TerminalRepr): + def __init__(self, msg: str) -> None: + self.longrepr = msg + + def toterminal(self, out: TerminalWriter) -> None: + out.line(self.longrepr, red=True) + + +def pytest_report_to_serializable( + report: CollectReport | TestReport, +) -> dict[str, Any] | None: + if isinstance(report, (TestReport, CollectReport)): + data = report._to_json() + data["$report_type"] = report.__class__.__name__ + return data + # TODO: Check if this is actually reachable. + return None # type: ignore[unreachable] + + +def pytest_report_from_serializable( + data: dict[str, Any], +) -> CollectReport | TestReport | None: + if "$report_type" in data: + if data["$report_type"] == "TestReport": + return TestReport._from_json(data) + elif data["$report_type"] == "CollectReport": + return CollectReport._from_json(data) + assert False, "Unknown report_type unserialize data: {}".format( + data["$report_type"] + ) + return None + + +def _report_to_json(report: BaseReport) -> dict[str, Any]: + """Return the contents of this report as a dict of builtin entries, + suitable for serialization. + + This was originally the serialize_report() function from xdist (ca03269). + """ + + def serialize_repr_entry( + entry: ReprEntry | ReprEntryNative, + ) -> dict[str, Any]: + data = dataclasses.asdict(entry) + for key, value in data.items(): + if hasattr(value, "__dict__"): + data[key] = dataclasses.asdict(value) + entry_data = {"type": type(entry).__name__, "data": data} + return entry_data + + def serialize_repr_traceback(reprtraceback: ReprTraceback) -> dict[str, Any]: + result = dataclasses.asdict(reprtraceback) + result["reprentries"] = [ + serialize_repr_entry(x) for x in reprtraceback.reprentries + ] + return result + + def serialize_repr_crash( + reprcrash: ReprFileLocation | None, + ) -> dict[str, Any] | None: + if reprcrash is not None: + return dataclasses.asdict(reprcrash) + else: + return None + + def serialize_exception_longrepr(rep: BaseReport) -> dict[str, Any]: + assert rep.longrepr is not None + # TODO: Investigate whether the duck typing is really necessary here. + longrepr = cast(ExceptionRepr, rep.longrepr) + result: dict[str, Any] = { + "reprcrash": serialize_repr_crash(longrepr.reprcrash), + "reprtraceback": serialize_repr_traceback(longrepr.reprtraceback), + "sections": longrepr.sections, + } + if isinstance(longrepr, ExceptionChainRepr): + result["chain"] = [] + for repr_traceback, repr_crash, description in longrepr.chain: + result["chain"].append( + ( + serialize_repr_traceback(repr_traceback), + serialize_repr_crash(repr_crash), + description, + ) + ) + else: + result["chain"] = None + return result + + d = report.__dict__.copy() + if hasattr(report.longrepr, "toterminal"): + if hasattr(report.longrepr, "reprtraceback") and hasattr( + report.longrepr, "reprcrash" + ): + d["longrepr"] = serialize_exception_longrepr(report) + else: + d["longrepr"] = str(report.longrepr) + else: + d["longrepr"] = report.longrepr + for name in d: + if isinstance(d[name], os.PathLike): + d[name] = os.fspath(d[name]) + elif name == "result": + d[name] = None # for now + return d + + +def _report_kwargs_from_json(reportdict: dict[str, Any]) -> dict[str, Any]: + """Return **kwargs that can be used to construct a TestReport or + CollectReport instance. + + This was originally the serialize_report() function from xdist (ca03269). + """ + + def deserialize_repr_entry(entry_data): + data = entry_data["data"] + entry_type = entry_data["type"] + if entry_type == "ReprEntry": + reprfuncargs = None + reprfileloc = None + reprlocals = None + if data["reprfuncargs"]: + reprfuncargs = ReprFuncArgs(**data["reprfuncargs"]) + if data["reprfileloc"]: + reprfileloc = ReprFileLocation(**data["reprfileloc"]) + if data["reprlocals"]: + reprlocals = ReprLocals(data["reprlocals"]["lines"]) + + reprentry: ReprEntry | ReprEntryNative = ReprEntry( + lines=data["lines"], + reprfuncargs=reprfuncargs, + reprlocals=reprlocals, + reprfileloc=reprfileloc, + style=data["style"], + ) + elif entry_type == "ReprEntryNative": + reprentry = ReprEntryNative(data["lines"]) + else: + _report_unserialization_failure(entry_type, TestReport, reportdict) + return reprentry + + def deserialize_repr_traceback(repr_traceback_dict): + repr_traceback_dict["reprentries"] = [ + deserialize_repr_entry(x) for x in repr_traceback_dict["reprentries"] + ] + return ReprTraceback(**repr_traceback_dict) + + def deserialize_repr_crash(repr_crash_dict: dict[str, Any] | None): + if repr_crash_dict is not None: + return ReprFileLocation(**repr_crash_dict) + else: + return None + + if ( + reportdict["longrepr"] + and "reprcrash" in reportdict["longrepr"] + and "reprtraceback" in reportdict["longrepr"] + ): + reprtraceback = deserialize_repr_traceback( + reportdict["longrepr"]["reprtraceback"] + ) + reprcrash = deserialize_repr_crash(reportdict["longrepr"]["reprcrash"]) + if reportdict["longrepr"]["chain"]: + chain = [] + for repr_traceback_data, repr_crash_data, description in reportdict[ + "longrepr" + ]["chain"]: + chain.append( + ( + deserialize_repr_traceback(repr_traceback_data), + deserialize_repr_crash(repr_crash_data), + description, + ) + ) + exception_info: ExceptionChainRepr | ReprExceptionInfo = ExceptionChainRepr( + chain + ) + else: + exception_info = ReprExceptionInfo( + reprtraceback=reprtraceback, + reprcrash=reprcrash, + ) + + for section in reportdict["longrepr"]["sections"]: + exception_info.addsection(*section) + reportdict["longrepr"] = exception_info + + return reportdict diff --git a/venv/lib/python3.10/site-packages/_pytest/runner.py b/venv/lib/python3.10/site-packages/_pytest/runner.py new file mode 100644 index 0000000000000000000000000000000000000000..26e4e838b77c38df214a0c1485e6659c2d12d5bb --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/runner.py @@ -0,0 +1,571 @@ +# mypy: allow-untyped-defs +"""Basic collect and runtest protocol implementations.""" + +from __future__ import annotations + +import bdb +from collections.abc import Callable +import dataclasses +import os +import sys +import types +from typing import cast +from typing import final +from typing import Generic +from typing import Literal +from typing import TYPE_CHECKING +from typing import TypeVar + +from .reports import BaseReport +from .reports import CollectErrorRepr +from .reports import CollectReport +from .reports import TestReport +from _pytest import timing +from _pytest._code.code import ExceptionChainRepr +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import TerminalRepr +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.nodes import Collector +from _pytest.nodes import Directory +from _pytest.nodes import Item +from _pytest.nodes import Node +from _pytest.outcomes import Exit +from _pytest.outcomes import OutcomeException +from _pytest.outcomes import Skipped +from _pytest.outcomes import TEST_OUTCOME + + +if sys.version_info < (3, 11): + from exceptiongroup import BaseExceptionGroup + +if TYPE_CHECKING: + from _pytest.main import Session + from _pytest.terminal import TerminalReporter + +# +# pytest plugin hooks. + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("terminal reporting", "Reporting", after="general") + group.addoption( + "--durations", + action="store", + type=int, + default=None, + metavar="N", + help="Show N slowest setup/test durations (N=0 for all)", + ) + group.addoption( + "--durations-min", + action="store", + type=float, + default=None, + metavar="N", + help="Minimal duration in seconds for inclusion in slowest list. " + "Default: 0.005 (or 0.0 if -vv is given).", + ) + + +def pytest_terminal_summary(terminalreporter: TerminalReporter) -> None: + durations = terminalreporter.config.option.durations + durations_min = terminalreporter.config.option.durations_min + verbose = terminalreporter.config.get_verbosity() + if durations is None: + return + if durations_min is None: + durations_min = 0.005 if verbose < 2 else 0.0 + tr = terminalreporter + dlist = [] + for replist in tr.stats.values(): + for rep in replist: + if hasattr(rep, "duration"): + dlist.append(rep) + if not dlist: + return + dlist.sort(key=lambda x: x.duration, reverse=True) + if not durations: + tr.write_sep("=", "slowest durations") + else: + tr.write_sep("=", f"slowest {durations} durations") + dlist = dlist[:durations] + + for i, rep in enumerate(dlist): + if rep.duration < durations_min: + tr.write_line("") + message = f"({len(dlist) - i} durations < {durations_min:g}s hidden." + if terminalreporter.config.option.durations_min is None: + message += " Use -vv to show these durations." + message += ")" + tr.write_line(message) + break + tr.write_line(f"{rep.duration:02.2f}s {rep.when:<8} {rep.nodeid}") + + +def pytest_sessionstart(session: Session) -> None: + session._setupstate = SetupState() + + +def pytest_sessionfinish(session: Session) -> None: + session._setupstate.teardown_exact(None) + + +def pytest_runtest_protocol(item: Item, nextitem: Item | None) -> bool: + ihook = item.ihook + ihook.pytest_runtest_logstart(nodeid=item.nodeid, location=item.location) + runtestprotocol(item, nextitem=nextitem) + ihook.pytest_runtest_logfinish(nodeid=item.nodeid, location=item.location) + return True + + +def runtestprotocol( + item: Item, log: bool = True, nextitem: Item | None = None +) -> list[TestReport]: + hasrequest = hasattr(item, "_request") + if hasrequest and not item._request: # type: ignore[attr-defined] + # This only happens if the item is re-run, as is done by + # pytest-rerunfailures. + item._initrequest() # type: ignore[attr-defined] + rep = call_and_report(item, "setup", log) + reports = [rep] + if rep.passed: + if item.config.getoption("setupshow", False): + show_test_item(item) + if not item.config.getoption("setuponly", False): + reports.append(call_and_report(item, "call", log)) + # If the session is about to fail or stop, teardown everything - this is + # necessary to correctly report fixture teardown errors (see #11706) + if item.session.shouldfail or item.session.shouldstop: + nextitem = None + reports.append(call_and_report(item, "teardown", log, nextitem=nextitem)) + # After all teardown hooks have been called + # want funcargs and request info to go away. + if hasrequest: + item._request = False # type: ignore[attr-defined] + item.funcargs = None # type: ignore[attr-defined] + return reports + + +def show_test_item(item: Item) -> None: + """Show test function, parameters and the fixtures of the test item.""" + tw = item.config.get_terminal_writer() + tw.line() + tw.write(" " * 8) + tw.write(item.nodeid) + used_fixtures = sorted(getattr(item, "fixturenames", [])) + if used_fixtures: + tw.write(" (fixtures used: {})".format(", ".join(used_fixtures))) + tw.flush() + + +def pytest_runtest_setup(item: Item) -> None: + _update_current_test_var(item, "setup") + item.session._setupstate.setup(item) + + +def pytest_runtest_call(item: Item) -> None: + _update_current_test_var(item, "call") + try: + del sys.last_type + del sys.last_value + del sys.last_traceback + if sys.version_info >= (3, 12, 0): + del sys.last_exc # type:ignore[attr-defined] + except AttributeError: + pass + try: + item.runtest() + except Exception as e: + # Store trace info to allow postmortem debugging + sys.last_type = type(e) + sys.last_value = e + if sys.version_info >= (3, 12, 0): + sys.last_exc = e # type:ignore[attr-defined] + assert e.__traceback__ is not None + # Skip *this* frame + sys.last_traceback = e.__traceback__.tb_next + raise + + +def pytest_runtest_teardown(item: Item, nextitem: Item | None) -> None: + _update_current_test_var(item, "teardown") + item.session._setupstate.teardown_exact(nextitem) + _update_current_test_var(item, None) + + +def _update_current_test_var( + item: Item, when: Literal["setup", "call", "teardown"] | None +) -> None: + """Update :envvar:`PYTEST_CURRENT_TEST` to reflect the current item and stage. + + If ``when`` is None, delete ``PYTEST_CURRENT_TEST`` from the environment. + """ + var_name = "PYTEST_CURRENT_TEST" + if when: + value = f"{item.nodeid} ({when})" + # don't allow null bytes on environment variables (see #2644, #2957) + value = value.replace("\x00", "(null)") + os.environ[var_name] = value + else: + os.environ.pop(var_name) + + +def pytest_report_teststatus(report: BaseReport) -> tuple[str, str, str] | None: + if report.when in ("setup", "teardown"): + if report.failed: + # category, shortletter, verbose-word + return "error", "E", "ERROR" + elif report.skipped: + return "skipped", "s", "SKIPPED" + else: + return "", "", "" + return None + + +# +# Implementation + + +def call_and_report( + item: Item, when: Literal["setup", "call", "teardown"], log: bool = True, **kwds +) -> TestReport: + ihook = item.ihook + if when == "setup": + runtest_hook: Callable[..., None] = ihook.pytest_runtest_setup + elif when == "call": + runtest_hook = ihook.pytest_runtest_call + elif when == "teardown": + runtest_hook = ihook.pytest_runtest_teardown + else: + assert False, f"Unhandled runtest hook case: {when}" + reraise: tuple[type[BaseException], ...] = (Exit,) + if not item.config.getoption("usepdb", False): + reraise += (KeyboardInterrupt,) + call = CallInfo.from_call( + lambda: runtest_hook(item=item, **kwds), when=when, reraise=reraise + ) + report: TestReport = ihook.pytest_runtest_makereport(item=item, call=call) + if log: + ihook.pytest_runtest_logreport(report=report) + if check_interactive_exception(call, report): + ihook.pytest_exception_interact(node=item, call=call, report=report) + return report + + +def check_interactive_exception(call: CallInfo[object], report: BaseReport) -> bool: + """Check whether the call raised an exception that should be reported as + interactive.""" + if call.excinfo is None: + # Didn't raise. + return False + if hasattr(report, "wasxfail"): + # Exception was expected. + return False + if isinstance(call.excinfo.value, (Skipped, bdb.BdbQuit)): + # Special control flow exception. + return False + return True + + +TResult = TypeVar("TResult", covariant=True) + + +@final +@dataclasses.dataclass +class CallInfo(Generic[TResult]): + """Result/Exception info of a function invocation.""" + + _result: TResult | None + #: The captured exception of the call, if it raised. + excinfo: ExceptionInfo[BaseException] | None + #: The system time when the call started, in seconds since the epoch. + start: float + #: The system time when the call ended, in seconds since the epoch. + stop: float + #: The call duration, in seconds. + duration: float + #: The context of invocation: "collect", "setup", "call" or "teardown". + when: Literal["collect", "setup", "call", "teardown"] + + def __init__( + self, + result: TResult | None, + excinfo: ExceptionInfo[BaseException] | None, + start: float, + stop: float, + duration: float, + when: Literal["collect", "setup", "call", "teardown"], + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + self._result = result + self.excinfo = excinfo + self.start = start + self.stop = stop + self.duration = duration + self.when = when + + @property + def result(self) -> TResult: + """The return value of the call, if it didn't raise. + + Can only be accessed if excinfo is None. + """ + if self.excinfo is not None: + raise AttributeError(f"{self!r} has no valid result") + # The cast is safe because an exception wasn't raised, hence + # _result has the expected function return type (which may be + # None, that's why a cast and not an assert). + return cast(TResult, self._result) + + @classmethod + def from_call( + cls, + func: Callable[[], TResult], + when: Literal["collect", "setup", "call", "teardown"], + reraise: type[BaseException] | tuple[type[BaseException], ...] | None = None, + ) -> CallInfo[TResult]: + """Call func, wrapping the result in a CallInfo. + + :param func: + The function to call. Called without arguments. + :type func: Callable[[], _pytest.runner.TResult] + :param when: + The phase in which the function is called. + :param reraise: + Exception or exceptions that shall propagate if raised by the + function, instead of being wrapped in the CallInfo. + """ + excinfo = None + instant = timing.Instant() + try: + result: TResult | None = func() + except BaseException: + excinfo = ExceptionInfo.from_current() + if reraise is not None and isinstance(excinfo.value, reraise): + raise + result = None + duration = instant.elapsed() + return cls( + start=duration.start.time, + stop=duration.stop.time, + duration=duration.seconds, + when=when, + result=result, + excinfo=excinfo, + _ispytest=True, + ) + + def __repr__(self) -> str: + if self.excinfo is None: + return f"" + return f"" + + +def pytest_runtest_makereport(item: Item, call: CallInfo[None]) -> TestReport: + return TestReport.from_item_and_call(item, call) + + +def pytest_make_collect_report(collector: Collector) -> CollectReport: + def collect() -> list[Item | Collector]: + # Before collecting, if this is a Directory, load the conftests. + # If a conftest import fails to load, it is considered a collection + # error of the Directory collector. This is why it's done inside of the + # CallInfo wrapper. + # + # Note: initial conftests are loaded early, not here. + if isinstance(collector, Directory): + collector.config.pluginmanager._loadconftestmodules( + collector.path, + collector.config.getoption("importmode"), + rootpath=collector.config.rootpath, + consider_namespace_packages=collector.config.getini( + "consider_namespace_packages" + ), + ) + + return list(collector.collect()) + + call = CallInfo.from_call( + collect, "collect", reraise=(KeyboardInterrupt, SystemExit) + ) + longrepr: None | tuple[str, int, str] | str | TerminalRepr = None + if not call.excinfo: + outcome: Literal["passed", "skipped", "failed"] = "passed" + else: + skip_exceptions = [Skipped] + unittest = sys.modules.get("unittest") + if unittest is not None: + skip_exceptions.append(unittest.SkipTest) + if isinstance(call.excinfo.value, tuple(skip_exceptions)): + outcome = "skipped" + r_ = collector._repr_failure_py(call.excinfo, "line") + assert isinstance(r_, ExceptionChainRepr), repr(r_) + r = r_.reprcrash + assert r + longrepr = (str(r.path), r.lineno, r.message) + else: + outcome = "failed" + errorinfo = collector.repr_failure(call.excinfo) + if not hasattr(errorinfo, "toterminal"): + assert isinstance(errorinfo, str) + errorinfo = CollectErrorRepr(errorinfo) + longrepr = errorinfo + result = call.result if not call.excinfo else None + rep = CollectReport(collector.nodeid, outcome, longrepr, result) + rep.call = call # type: ignore # see collect_one_node + return rep + + +class SetupState: + """Shared state for setting up/tearing down test items or collectors + in a session. + + Suppose we have a collection tree as follows: + + + + + + + + The SetupState maintains a stack. The stack starts out empty: + + [] + + During the setup phase of item1, setup(item1) is called. What it does + is: + + push session to stack, run session.setup() + push mod1 to stack, run mod1.setup() + push item1 to stack, run item1.setup() + + The stack is: + + [session, mod1, item1] + + While the stack is in this shape, it is allowed to add finalizers to + each of session, mod1, item1 using addfinalizer(). + + During the teardown phase of item1, teardown_exact(item2) is called, + where item2 is the next item to item1. What it does is: + + pop item1 from stack, run its teardowns + pop mod1 from stack, run its teardowns + + mod1 was popped because it ended its purpose with item1. The stack is: + + [session] + + During the setup phase of item2, setup(item2) is called. What it does + is: + + push mod2 to stack, run mod2.setup() + push item2 to stack, run item2.setup() + + Stack: + + [session, mod2, item2] + + During the teardown phase of item2, teardown_exact(None) is called, + because item2 is the last item. What it does is: + + pop item2 from stack, run its teardowns + pop mod2 from stack, run its teardowns + pop session from stack, run its teardowns + + Stack: + + [] + + The end! + """ + + def __init__(self) -> None: + # The stack is in the dict insertion order. + self.stack: dict[ + Node, + tuple[ + # Node's finalizers. + list[Callable[[], object]], + # Node's exception and original traceback, if its setup raised. + tuple[OutcomeException | Exception, types.TracebackType | None] | None, + ], + ] = {} + + def setup(self, item: Item) -> None: + """Setup objects along the collector chain to the item.""" + needed_collectors = item.listchain() + + # If a collector fails its setup, fail its entire subtree of items. + # The setup is not retried for each item - the same exception is used. + for col, (finalizers, exc) in self.stack.items(): + assert col in needed_collectors, "previous item was not torn down properly" + if exc: + raise exc[0].with_traceback(exc[1]) + + for col in needed_collectors[len(self.stack) :]: + assert col not in self.stack + # Push onto the stack. + self.stack[col] = ([col.teardown], None) + try: + col.setup() + except TEST_OUTCOME as exc: + self.stack[col] = (self.stack[col][0], (exc, exc.__traceback__)) + raise + + def addfinalizer(self, finalizer: Callable[[], object], node: Node) -> None: + """Attach a finalizer to the given node. + + The node must be currently active in the stack. + """ + assert node and not isinstance(node, tuple) + assert callable(finalizer) + assert node in self.stack, (node, self.stack) + self.stack[node][0].append(finalizer) + + def teardown_exact(self, nextitem: Item | None) -> None: + """Teardown the current stack up until reaching nodes that nextitem + also descends from. + + When nextitem is None (meaning we're at the last item), the entire + stack is torn down. + """ + needed_collectors = (nextitem and nextitem.listchain()) or [] + exceptions: list[BaseException] = [] + while self.stack: + if list(self.stack.keys()) == needed_collectors[: len(self.stack)]: + break + node, (finalizers, _) = self.stack.popitem() + these_exceptions = [] + while finalizers: + fin = finalizers.pop() + try: + fin() + except TEST_OUTCOME as e: + these_exceptions.append(e) + + if len(these_exceptions) == 1: + exceptions.extend(these_exceptions) + elif these_exceptions: + msg = f"errors while tearing down {node!r}" + exceptions.append(BaseExceptionGroup(msg, these_exceptions[::-1])) + + if len(exceptions) == 1: + raise exceptions[0] + elif exceptions: + raise BaseExceptionGroup("errors during test teardown", exceptions[::-1]) + if nextitem is None: + assert not self.stack + + +def collect_one_node(collector: Collector) -> CollectReport: + ihook = collector.ihook + ihook.pytest_collectstart(collector=collector) + rep: CollectReport = ihook.pytest_make_collect_report(collector=collector) + call = rep.__dict__.pop("call", None) + if call and check_interactive_exception(call, rep): + ihook.pytest_exception_interact(node=collector, call=call, report=rep) + return rep diff --git a/venv/lib/python3.10/site-packages/_pytest/scope.py b/venv/lib/python3.10/site-packages/_pytest/scope.py new file mode 100644 index 0000000000000000000000000000000000000000..2b007e878936a2c7c8b40d7470ea6ac457c5251b --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/scope.py @@ -0,0 +1,91 @@ +""" +Scope definition and related utilities. + +Those are defined here, instead of in the 'fixtures' module because +their use is spread across many other pytest modules, and centralizing it in 'fixtures' +would cause circular references. + +Also this makes the module light to import, as it should. +""" + +from __future__ import annotations + +from enum import Enum +from functools import total_ordering +from typing import Literal + + +_ScopeName = Literal["session", "package", "module", "class", "function"] + + +@total_ordering +class Scope(Enum): + """ + Represents one of the possible fixture scopes in pytest. + + Scopes are ordered from lower to higher, that is: + + ->>> higher ->>> + + Function < Class < Module < Package < Session + + <<<- lower <<<- + """ + + # Scopes need to be listed from lower to higher. + Function = "function" + Class = "class" + Module = "module" + Package = "package" + Session = "session" + + def next_lower(self) -> Scope: + """Return the next lower scope.""" + index = _SCOPE_INDICES[self] + if index == 0: + raise ValueError(f"{self} is the lower-most scope") + return _ALL_SCOPES[index - 1] + + def next_higher(self) -> Scope: + """Return the next higher scope.""" + index = _SCOPE_INDICES[self] + if index == len(_SCOPE_INDICES) - 1: + raise ValueError(f"{self} is the upper-most scope") + return _ALL_SCOPES[index + 1] + + def __lt__(self, other: Scope) -> bool: + self_index = _SCOPE_INDICES[self] + other_index = _SCOPE_INDICES[other] + return self_index < other_index + + @classmethod + def from_user( + cls, scope_name: _ScopeName, descr: str, where: str | None = None + ) -> Scope: + """ + Given a scope name from the user, return the equivalent Scope enum. Should be used + whenever we want to convert a user provided scope name to its enum object. + + If the scope name is invalid, construct a user friendly message and call pytest.fail. + """ + from _pytest.outcomes import fail + + try: + # Holding this reference is necessary for mypy at the moment. + scope = Scope(scope_name) + except ValueError: + fail( + "{} {}got an unexpected scope value '{}'".format( + descr, f"from {where} " if where else "", scope_name + ), + pytrace=False, + ) + return scope + + +_ALL_SCOPES = list(Scope) +_SCOPE_INDICES = {scope: index for index, scope in enumerate(_ALL_SCOPES)} + + +# Ordered list of scopes which can contain many tests (in practice all except Function). +HIGH_SCOPES = [x for x in Scope if x is not Scope.Function] diff --git a/venv/lib/python3.10/site-packages/_pytest/setuponly.py b/venv/lib/python3.10/site-packages/_pytest/setuponly.py new file mode 100644 index 0000000000000000000000000000000000000000..7e6b46bcdb4ab6e646653df9f847b11b5f50e943 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/setuponly.py @@ -0,0 +1,98 @@ +from __future__ import annotations + +from collections.abc import Generator + +from _pytest._io.saferepr import saferepr +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config.argparsing import Parser +from _pytest.fixtures import FixtureDef +from _pytest.fixtures import SubRequest +from _pytest.scope import Scope +import pytest + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("debugconfig") + group.addoption( + "--setuponly", + "--setup-only", + action="store_true", + help="Only setup fixtures, do not execute tests", + ) + group.addoption( + "--setupshow", + "--setup-show", + action="store_true", + help="Show setup of fixtures while executing tests", + ) + + +@pytest.hookimpl(wrapper=True) +def pytest_fixture_setup( + fixturedef: FixtureDef[object], request: SubRequest +) -> Generator[None, object, object]: + try: + return (yield) + finally: + if request.config.option.setupshow: + if hasattr(request, "param"): + # Save the fixture parameter so ._show_fixture_action() can + # display it now and during the teardown (in .finish()). + if fixturedef.ids: + if callable(fixturedef.ids): + param = fixturedef.ids(request.param) + else: + param = fixturedef.ids[request.param_index] + else: + param = request.param + fixturedef.cached_param = param # type: ignore[attr-defined] + _show_fixture_action(fixturedef, request.config, "SETUP") + + +def pytest_fixture_post_finalizer( + fixturedef: FixtureDef[object], request: SubRequest +) -> None: + if fixturedef.cached_result is not None: + config = request.config + if config.option.setupshow: + _show_fixture_action(fixturedef, request.config, "TEARDOWN") + if hasattr(fixturedef, "cached_param"): + del fixturedef.cached_param + + +def _show_fixture_action( + fixturedef: FixtureDef[object], config: Config, msg: str +) -> None: + capman = config.pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend_global_capture() + + tw = config.get_terminal_writer() + tw.line() + # Use smaller indentation the higher the scope: Session = 0, Package = 1, etc. + scope_indent = list(reversed(Scope)).index(fixturedef._scope) + tw.write(" " * 2 * scope_indent) + + scopename = fixturedef.scope[0].upper() + tw.write(f"{msg:<8} {scopename} {fixturedef.argname}") + + if msg == "SETUP": + deps = sorted(arg for arg in fixturedef.argnames if arg != "request") + if deps: + tw.write(" (fixtures used: {})".format(", ".join(deps))) + + if hasattr(fixturedef, "cached_param"): + tw.write(f"[{saferepr(fixturedef.cached_param, maxsize=42)}]") + + tw.flush() + + if capman: + capman.resume_global_capture() + + +@pytest.hookimpl(tryfirst=True) +def pytest_cmdline_main(config: Config) -> int | ExitCode | None: + if config.option.setuponly: + config.option.setupshow = True + return None diff --git a/venv/lib/python3.10/site-packages/_pytest/setupplan.py b/venv/lib/python3.10/site-packages/_pytest/setupplan.py new file mode 100644 index 0000000000000000000000000000000000000000..4e124cce2434e02d928c8ceb8cc8eaf63e271404 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/setupplan.py @@ -0,0 +1,39 @@ +from __future__ import annotations + +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config.argparsing import Parser +from _pytest.fixtures import FixtureDef +from _pytest.fixtures import SubRequest +import pytest + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("debugconfig") + group.addoption( + "--setupplan", + "--setup-plan", + action="store_true", + help="Show what fixtures and tests would be executed but " + "don't execute anything", + ) + + +@pytest.hookimpl(tryfirst=True) +def pytest_fixture_setup( + fixturedef: FixtureDef[object], request: SubRequest +) -> object | None: + # Will return a dummy fixture if the setuponly option is provided. + if request.config.option.setupplan: + my_cache_key = fixturedef.cache_key(request) + fixturedef.cached_result = (None, my_cache_key, None) + return fixturedef.cached_result + return None + + +@pytest.hookimpl(tryfirst=True) +def pytest_cmdline_main(config: Config) -> int | ExitCode | None: + if config.option.setupplan: + config.option.setuponly = True + config.option.setupshow = True + return None diff --git a/venv/lib/python3.10/site-packages/_pytest/skipping.py b/venv/lib/python3.10/site-packages/_pytest/skipping.py new file mode 100644 index 0000000000000000000000000000000000000000..ec118f2c92fae8431d5313bdbc35cb9e1b11a05c --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/skipping.py @@ -0,0 +1,316 @@ +# mypy: allow-untyped-defs +"""Support for skip/xfail functions and markers.""" + +from __future__ import annotations + +from collections.abc import Generator +from collections.abc import Mapping +import dataclasses +import os +import platform +import sys +import traceback +from typing import Optional + +from _pytest.config import Config +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.mark.structures import Mark +from _pytest.nodes import Item +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.outcomes import xfail +from _pytest.raises import AbstractRaises +from _pytest.reports import BaseReport +from _pytest.reports import TestReport +from _pytest.runner import CallInfo +from _pytest.stash import StashKey + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group.addoption( + "--runxfail", + action="store_true", + dest="runxfail", + default=False, + help="Report the results of xfail tests as if they were not marked", + ) + + parser.addini( + "xfail_strict", + "Default for the strict parameter of xfail " + "markers when not given explicitly (default: False)", + default=False, + type="bool", + ) + + +def pytest_configure(config: Config) -> None: + if config.option.runxfail: + # yay a hack + import pytest + + old = pytest.xfail + config.add_cleanup(lambda: setattr(pytest, "xfail", old)) + + def nop(*args, **kwargs): + pass + + nop.Exception = xfail.Exception # type: ignore[attr-defined] + setattr(pytest, "xfail", nop) + + config.addinivalue_line( + "markers", + "skip(reason=None): skip the given test function with an optional reason. " + 'Example: skip(reason="no way of currently testing this") skips the ' + "test.", + ) + config.addinivalue_line( + "markers", + "skipif(condition, ..., *, reason=...): " + "skip the given test function if any of the conditions evaluate to True. " + "Example: skipif(sys.platform == 'win32') skips the test if we are on the win32 platform. " + "See https://docs.pytest.org/en/stable/reference/reference.html#pytest-mark-skipif", + ) + config.addinivalue_line( + "markers", + "xfail(condition, ..., *, reason=..., run=True, raises=None, strict=xfail_strict): " + "mark the test function as an expected failure if any of the conditions " + "evaluate to True. Optionally specify a reason for better reporting " + "and run=False if you don't even want to execute the test function. " + "If only specific exception(s) are expected, you can list them in " + "raises, and if the test fails in other ways, it will be reported as " + "a true failure. See https://docs.pytest.org/en/stable/reference/reference.html#pytest-mark-xfail", + ) + + +def evaluate_condition(item: Item, mark: Mark, condition: object) -> tuple[bool, str]: + """Evaluate a single skipif/xfail condition. + + If an old-style string condition is given, it is eval()'d, otherwise the + condition is bool()'d. If this fails, an appropriately formatted pytest.fail + is raised. + + Returns (result, reason). The reason is only relevant if the result is True. + """ + # String condition. + if isinstance(condition, str): + globals_ = { + "os": os, + "sys": sys, + "platform": platform, + "config": item.config, + } + for dictionary in reversed( + item.ihook.pytest_markeval_namespace(config=item.config) + ): + if not isinstance(dictionary, Mapping): + raise ValueError( + f"pytest_markeval_namespace() needs to return a dict, got {dictionary!r}" + ) + globals_.update(dictionary) + if hasattr(item, "obj"): + globals_.update(item.obj.__globals__) + try: + filename = f"<{mark.name} condition>" + condition_code = compile(condition, filename, "eval") + result = eval(condition_code, globals_) + except SyntaxError as exc: + msglines = [ + f"Error evaluating {mark.name!r} condition", + " " + condition, + " " + " " * (exc.offset or 0) + "^", + "SyntaxError: invalid syntax", + ] + fail("\n".join(msglines), pytrace=False) + except Exception as exc: + msglines = [ + f"Error evaluating {mark.name!r} condition", + " " + condition, + *traceback.format_exception_only(type(exc), exc), + ] + fail("\n".join(msglines), pytrace=False) + + # Boolean condition. + else: + try: + result = bool(condition) + except Exception as exc: + msglines = [ + f"Error evaluating {mark.name!r} condition as a boolean", + *traceback.format_exception_only(type(exc), exc), + ] + fail("\n".join(msglines), pytrace=False) + + reason = mark.kwargs.get("reason", None) + if reason is None: + if isinstance(condition, str): + reason = "condition: " + condition + else: + # XXX better be checked at collection time + msg = ( + f"Error evaluating {mark.name!r}: " + + "you need to specify reason=STRING when using booleans as conditions." + ) + fail(msg, pytrace=False) + + return result, reason + + +@dataclasses.dataclass(frozen=True) +class Skip: + """The result of evaluate_skip_marks().""" + + reason: str = "unconditional skip" + + +def evaluate_skip_marks(item: Item) -> Skip | None: + """Evaluate skip and skipif marks on item, returning Skip if triggered.""" + for mark in item.iter_markers(name="skipif"): + if "condition" not in mark.kwargs: + conditions = mark.args + else: + conditions = (mark.kwargs["condition"],) + + # Unconditional. + if not conditions: + reason = mark.kwargs.get("reason", "") + return Skip(reason) + + # If any of the conditions are true. + for condition in conditions: + result, reason = evaluate_condition(item, mark, condition) + if result: + return Skip(reason) + + for mark in item.iter_markers(name="skip"): + try: + return Skip(*mark.args, **mark.kwargs) + except TypeError as e: + raise TypeError(str(e) + " - maybe you meant pytest.mark.skipif?") from None + + return None + + +@dataclasses.dataclass(frozen=True) +class Xfail: + """The result of evaluate_xfail_marks().""" + + __slots__ = ("raises", "reason", "run", "strict") + + reason: str + run: bool + strict: bool + raises: ( + type[BaseException] + | tuple[type[BaseException], ...] + | AbstractRaises[BaseException] + | None + ) + + +def evaluate_xfail_marks(item: Item) -> Xfail | None: + """Evaluate xfail marks on item, returning Xfail if triggered.""" + for mark in item.iter_markers(name="xfail"): + run = mark.kwargs.get("run", True) + strict = mark.kwargs.get("strict", item.config.getini("xfail_strict")) + raises = mark.kwargs.get("raises", None) + if "condition" not in mark.kwargs: + conditions = mark.args + else: + conditions = (mark.kwargs["condition"],) + + # Unconditional. + if not conditions: + reason = mark.kwargs.get("reason", "") + return Xfail(reason, run, strict, raises) + + # If any of the conditions are true. + for condition in conditions: + result, reason = evaluate_condition(item, mark, condition) + if result: + return Xfail(reason, run, strict, raises) + + return None + + +# Saves the xfail mark evaluation. Can be refreshed during call if None. +xfailed_key = StashKey[Optional[Xfail]]() + + +@hookimpl(tryfirst=True) +def pytest_runtest_setup(item: Item) -> None: + skipped = evaluate_skip_marks(item) + if skipped: + raise skip.Exception(skipped.reason, _use_item_location=True) + + item.stash[xfailed_key] = xfailed = evaluate_xfail_marks(item) + if xfailed and not item.config.option.runxfail and not xfailed.run: + xfail("[NOTRUN] " + xfailed.reason) + + +@hookimpl(wrapper=True) +def pytest_runtest_call(item: Item) -> Generator[None]: + xfailed = item.stash.get(xfailed_key, None) + if xfailed is None: + item.stash[xfailed_key] = xfailed = evaluate_xfail_marks(item) + + if xfailed and not item.config.option.runxfail and not xfailed.run: + xfail("[NOTRUN] " + xfailed.reason) + + try: + return (yield) + finally: + # The test run may have added an xfail mark dynamically. + xfailed = item.stash.get(xfailed_key, None) + if xfailed is None: + item.stash[xfailed_key] = xfailed = evaluate_xfail_marks(item) + + +@hookimpl(wrapper=True) +def pytest_runtest_makereport( + item: Item, call: CallInfo[None] +) -> Generator[None, TestReport, TestReport]: + rep = yield + xfailed = item.stash.get(xfailed_key, None) + if item.config.option.runxfail: + pass # don't interfere + elif call.excinfo and isinstance(call.excinfo.value, xfail.Exception): + assert call.excinfo.value.msg is not None + rep.wasxfail = call.excinfo.value.msg + rep.outcome = "skipped" + elif not rep.skipped and xfailed: + if call.excinfo: + raises = xfailed.raises + if raises is None or ( + ( + isinstance(raises, (type, tuple)) + and isinstance(call.excinfo.value, raises) + ) + or ( + isinstance(raises, AbstractRaises) + and raises.matches(call.excinfo.value) + ) + ): + rep.outcome = "skipped" + rep.wasxfail = xfailed.reason + else: + rep.outcome = "failed" + elif call.when == "call": + if xfailed.strict: + rep.outcome = "failed" + rep.longrepr = "[XPASS(strict)] " + xfailed.reason + else: + rep.outcome = "passed" + rep.wasxfail = xfailed.reason + return rep + + +def pytest_report_teststatus(report: BaseReport) -> tuple[str, str, str] | None: + if hasattr(report, "wasxfail"): + if report.skipped: + return "xfailed", "x", "XFAIL" + elif report.passed: + return "xpassed", "X", "XPASS" + return None diff --git a/venv/lib/python3.10/site-packages/_pytest/stash.py b/venv/lib/python3.10/site-packages/_pytest/stash.py new file mode 100644 index 0000000000000000000000000000000000000000..6a9ff884e04880129db61e4b521be72e18c31919 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/stash.py @@ -0,0 +1,116 @@ +from __future__ import annotations + +from typing import Any +from typing import cast +from typing import Generic +from typing import TypeVar + + +__all__ = ["Stash", "StashKey"] + + +T = TypeVar("T") +D = TypeVar("D") + + +class StashKey(Generic[T]): + """``StashKey`` is an object used as a key to a :class:`Stash`. + + A ``StashKey`` is associated with the type ``T`` of the value of the key. + + A ``StashKey`` is unique and cannot conflict with another key. + + .. versionadded:: 7.0 + """ + + __slots__ = () + + +class Stash: + r"""``Stash`` is a type-safe heterogeneous mutable mapping that + allows keys and value types to be defined separately from + where it (the ``Stash``) is created. + + Usually you will be given an object which has a ``Stash``, for example + :class:`~pytest.Config` or a :class:`~_pytest.nodes.Node`: + + .. code-block:: python + + stash: Stash = some_object.stash + + If a module or plugin wants to store data in this ``Stash``, it creates + :class:`StashKey`\s for its keys (at the module level): + + .. code-block:: python + + # At the top-level of the module + some_str_key = StashKey[str]() + some_bool_key = StashKey[bool]() + + To store information: + + .. code-block:: python + + # Value type must match the key. + stash[some_str_key] = "value" + stash[some_bool_key] = True + + To retrieve the information: + + .. code-block:: python + + # The static type of some_str is str. + some_str = stash[some_str_key] + # The static type of some_bool is bool. + some_bool = stash[some_bool_key] + + .. versionadded:: 7.0 + """ + + __slots__ = ("_storage",) + + def __init__(self) -> None: + self._storage: dict[StashKey[Any], object] = {} + + def __setitem__(self, key: StashKey[T], value: T) -> None: + """Set a value for key.""" + self._storage[key] = value + + def __getitem__(self, key: StashKey[T]) -> T: + """Get the value for key. + + Raises ``KeyError`` if the key wasn't set before. + """ + return cast(T, self._storage[key]) + + def get(self, key: StashKey[T], default: D) -> T | D: + """Get the value for key, or return default if the key wasn't set + before.""" + try: + return self[key] + except KeyError: + return default + + def setdefault(self, key: StashKey[T], default: T) -> T: + """Return the value of key if already set, otherwise set the value + of key to default and return default.""" + try: + return self[key] + except KeyError: + self[key] = default + return default + + def __delitem__(self, key: StashKey[T]) -> None: + """Delete the value for key. + + Raises ``KeyError`` if the key wasn't set before. + """ + del self._storage[key] + + def __contains__(self, key: StashKey[T]) -> bool: + """Return whether key was set.""" + return key in self._storage + + def __len__(self) -> int: + """Return how many items exist in the stash.""" + return len(self._storage) diff --git a/venv/lib/python3.10/site-packages/_pytest/stepwise.py b/venv/lib/python3.10/site-packages/_pytest/stepwise.py new file mode 100644 index 0000000000000000000000000000000000000000..8901540eb59760822b1346861f8f8098fabe83a4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/stepwise.py @@ -0,0 +1,209 @@ +from __future__ import annotations + +import dataclasses +from datetime import datetime +from datetime import timedelta +from typing import Any +from typing import TYPE_CHECKING + +from _pytest import nodes +from _pytest.cacheprovider import Cache +from _pytest.config import Config +from _pytest.config.argparsing import Parser +from _pytest.main import Session +from _pytest.reports import TestReport + + +if TYPE_CHECKING: + from typing_extensions import Self + +STEPWISE_CACHE_DIR = "cache/stepwise" + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("general") + group.addoption( + "--sw", + "--stepwise", + action="store_true", + default=False, + dest="stepwise", + help="Exit on test failure and continue from last failing test next time", + ) + group.addoption( + "--sw-skip", + "--stepwise-skip", + action="store_true", + default=False, + dest="stepwise_skip", + help="Ignore the first failing test but stop on the next failing test. " + "Implicitly enables --stepwise.", + ) + group.addoption( + "--sw-reset", + "--stepwise-reset", + action="store_true", + default=False, + dest="stepwise_reset", + help="Resets stepwise state, restarting the stepwise workflow. " + "Implicitly enables --stepwise.", + ) + + +def pytest_configure(config: Config) -> None: + # --stepwise-skip/--stepwise-reset implies stepwise. + if config.option.stepwise_skip or config.option.stepwise_reset: + config.option.stepwise = True + if config.getoption("stepwise"): + config.pluginmanager.register(StepwisePlugin(config), "stepwiseplugin") + + +def pytest_sessionfinish(session: Session) -> None: + if not session.config.getoption("stepwise"): + assert session.config.cache is not None + if hasattr(session.config, "workerinput"): + # Do not update cache if this process is a xdist worker to prevent + # race conditions (#10641). + return + + +@dataclasses.dataclass +class StepwiseCacheInfo: + # The nodeid of the last failed test. + last_failed: str | None + + # The number of tests in the last time --stepwise was run. + # We use this information as a simple way to invalidate the cache information, avoiding + # confusing behavior in case the cache is stale. + last_test_count: int | None + + # The date when the cache was last updated, for information purposes only. + last_cache_date_str: str + + @property + def last_cache_date(self) -> datetime: + return datetime.fromisoformat(self.last_cache_date_str) + + @classmethod + def empty(cls) -> Self: + return cls( + last_failed=None, + last_test_count=None, + last_cache_date_str=datetime.now().isoformat(), + ) + + def update_date_to_now(self) -> None: + self.last_cache_date_str = datetime.now().isoformat() + + +class StepwisePlugin: + def __init__(self, config: Config) -> None: + self.config = config + self.session: Session | None = None + self.report_status: list[str] = [] + assert config.cache is not None + self.cache: Cache = config.cache + self.skip: bool = config.getoption("stepwise_skip") + self.reset: bool = config.getoption("stepwise_reset") + self.cached_info = self._load_cached_info() + + def _load_cached_info(self) -> StepwiseCacheInfo: + cached_dict: dict[str, Any] | None = self.cache.get(STEPWISE_CACHE_DIR, None) + if cached_dict: + try: + return StepwiseCacheInfo( + cached_dict["last_failed"], + cached_dict["last_test_count"], + cached_dict["last_cache_date_str"], + ) + except (KeyError, TypeError) as e: + error = f"{type(e).__name__}: {e}" + self.report_status.append(f"error reading cache, discarding ({error})") + + # Cache not found or error during load, return a new cache. + return StepwiseCacheInfo.empty() + + def pytest_sessionstart(self, session: Session) -> None: + self.session = session + + def pytest_collection_modifyitems( + self, config: Config, items: list[nodes.Item] + ) -> None: + last_test_count = self.cached_info.last_test_count + self.cached_info.last_test_count = len(items) + + if self.reset: + self.report_status.append("resetting state, not skipping.") + self.cached_info.last_failed = None + return + + if not self.cached_info.last_failed: + self.report_status.append("no previously failed tests, not skipping.") + return + + if last_test_count is not None and last_test_count != len(items): + self.report_status.append( + f"test count changed, not skipping (now {len(items)} tests, previously {last_test_count})." + ) + self.cached_info.last_failed = None + return + + # Check all item nodes until we find a match on last failed. + failed_index = None + for index, item in enumerate(items): + if item.nodeid == self.cached_info.last_failed: + failed_index = index + break + + # If the previously failed test was not found among the test items, + # do not skip any tests. + if failed_index is None: + self.report_status.append("previously failed test not found, not skipping.") + else: + cache_age = datetime.now() - self.cached_info.last_cache_date + # Round up to avoid showing microseconds. + cache_age = timedelta(seconds=int(cache_age.total_seconds())) + self.report_status.append( + f"skipping {failed_index} already passed items (cache from {cache_age} ago," + f" use --sw-reset to discard)." + ) + deselected = items[:failed_index] + del items[:failed_index] + config.hook.pytest_deselected(items=deselected) + + def pytest_runtest_logreport(self, report: TestReport) -> None: + if report.failed: + if self.skip: + # Remove test from the failed ones (if it exists) and unset the skip option + # to make sure the following tests will not be skipped. + if report.nodeid == self.cached_info.last_failed: + self.cached_info.last_failed = None + + self.skip = False + else: + # Mark test as the last failing and interrupt the test session. + self.cached_info.last_failed = report.nodeid + assert self.session is not None + self.session.shouldstop = ( + "Test failed, continuing from this test next run." + ) + + else: + # If the test was actually run and did pass. + if report.when == "call": + # Remove test from the failed ones, if exists. + if report.nodeid == self.cached_info.last_failed: + self.cached_info.last_failed = None + + def pytest_report_collectionfinish(self) -> list[str] | None: + if self.config.get_verbosity() >= 0 and self.report_status: + return [f"stepwise: {x}" for x in self.report_status] + return None + + def pytest_sessionfinish(self) -> None: + if hasattr(self.config, "workerinput"): + # Do not update cache if this process is a xdist worker to prevent + # race conditions (#10641). + return + self.cached_info.update_date_to_now() + self.cache.set(STEPWISE_CACHE_DIR, dataclasses.asdict(self.cached_info)) diff --git a/venv/lib/python3.10/site-packages/_pytest/terminal.py b/venv/lib/python3.10/site-packages/_pytest/terminal.py new file mode 100644 index 0000000000000000000000000000000000000000..a95f79ba6b66bda5197069af3754fddea6b6bf4b --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/terminal.py @@ -0,0 +1,1643 @@ +# mypy: allow-untyped-defs +"""Terminal reporting of the full testing process. + +This is a good source for looking at the various reporting hooks. +""" + +from __future__ import annotations + +import argparse +from collections import Counter +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Mapping +from collections.abc import Sequence +import dataclasses +import datetime +from functools import partial +import inspect +from pathlib import Path +import platform +import sys +import textwrap +from typing import Any +from typing import ClassVar +from typing import final +from typing import Literal +from typing import NamedTuple +from typing import TextIO +from typing import TYPE_CHECKING +import warnings + +import pluggy + +from _pytest import compat +from _pytest import nodes +from _pytest import timing +from _pytest._code import ExceptionInfo +from _pytest._code.code import ExceptionRepr +from _pytest._io import TerminalWriter +from _pytest._io.wcwidth import wcswidth +import _pytest._version +from _pytest.assertion.util import running_on_ci +from _pytest.config import _PluggyPlugin +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.nodes import Item +from _pytest.nodes import Node +from _pytest.pathlib import absolutepath +from _pytest.pathlib import bestrelpath +from _pytest.reports import BaseReport +from _pytest.reports import CollectReport +from _pytest.reports import TestReport + + +if TYPE_CHECKING: + from _pytest.main import Session + + +REPORT_COLLECTING_RESOLUTION = 0.5 + +KNOWN_TYPES = ( + "failed", + "passed", + "skipped", + "deselected", + "xfailed", + "xpassed", + "warnings", + "error", +) + +_REPORTCHARS_DEFAULT = "fE" + + +class MoreQuietAction(argparse.Action): + """A modified copy of the argparse count action which counts down and updates + the legacy quiet attribute at the same time. + + Used to unify verbosity handling. + """ + + def __init__( + self, + option_strings: Sequence[str], + dest: str, + default: object = None, + required: bool = False, + help: str | None = None, + ) -> None: + super().__init__( + option_strings=option_strings, + dest=dest, + nargs=0, + default=default, + required=required, + help=help, + ) + + def __call__( + self, + parser: argparse.ArgumentParser, + namespace: argparse.Namespace, + values: str | Sequence[object] | None, + option_string: str | None = None, + ) -> None: + new_count = getattr(namespace, self.dest, 0) - 1 + setattr(namespace, self.dest, new_count) + # todo Deprecate config.quiet + namespace.quiet = getattr(namespace, "quiet", 0) + 1 + + +class TestShortLogReport(NamedTuple): + """Used to store the test status result category, shortletter and verbose word. + For example ``"rerun", "R", ("RERUN", {"yellow": True})``. + + :ivar category: + The class of result, for example ``“passed”``, ``“skipped”``, ``“error”``, or the empty string. + + :ivar letter: + The short letter shown as testing progresses, for example ``"."``, ``"s"``, ``"E"``, or the empty string. + + :ivar word: + Verbose word is shown as testing progresses in verbose mode, for example ``"PASSED"``, ``"SKIPPED"``, + ``"ERROR"``, or the empty string. + """ + + category: str + letter: str + word: str | tuple[str, Mapping[str, bool]] + + +def pytest_addoption(parser: Parser) -> None: + group = parser.getgroup("terminal reporting", "Reporting", after="general") + group._addoption( # private to use reserved lower-case short option + "-v", + "--verbose", + action="count", + default=0, + dest="verbose", + help="Increase verbosity", + ) + group.addoption( + "--no-header", + action="store_true", + default=False, + dest="no_header", + help="Disable header", + ) + group.addoption( + "--no-summary", + action="store_true", + default=False, + dest="no_summary", + help="Disable summary", + ) + group.addoption( + "--no-fold-skipped", + action="store_false", + dest="fold_skipped", + default=True, + help="Do not fold skipped tests in short summary.", + ) + group.addoption( + "--force-short-summary", + action="store_true", + dest="force_short_summary", + default=False, + help="Force condensed summary output regardless of verbosity level.", + ) + group._addoption( # private to use reserved lower-case short option + "-q", + "--quiet", + action=MoreQuietAction, + default=0, + dest="verbose", + help="Decrease verbosity", + ) + group.addoption( + "--verbosity", + dest="verbose", + type=int, + default=0, + help="Set verbosity. Default: 0.", + ) + group._addoption( # private to use reserved lower-case short option + "-r", + action="store", + dest="reportchars", + default=_REPORTCHARS_DEFAULT, + metavar="chars", + help="Show extra test summary info as specified by chars: (f)ailed, " + "(E)rror, (s)kipped, (x)failed, (X)passed, " + "(p)assed, (P)assed with output, (a)ll except passed (p/P), or (A)ll. " + "(w)arnings are enabled by default (see --disable-warnings), " + "'N' can be used to reset the list. (default: 'fE').", + ) + group.addoption( + "--disable-warnings", + "--disable-pytest-warnings", + default=False, + dest="disable_warnings", + action="store_true", + help="Disable warnings summary", + ) + group._addoption( # private to use reserved lower-case short option + "-l", + "--showlocals", + action="store_true", + dest="showlocals", + default=False, + help="Show locals in tracebacks (disabled by default)", + ) + group.addoption( + "--no-showlocals", + action="store_false", + dest="showlocals", + help="Hide locals in tracebacks (negate --showlocals passed through addopts)", + ) + group.addoption( + "--tb", + metavar="style", + action="store", + dest="tbstyle", + default="auto", + choices=["auto", "long", "short", "no", "line", "native"], + help="Traceback print mode (auto/long/short/line/native/no)", + ) + group.addoption( + "--xfail-tb", + action="store_true", + dest="xfail_tb", + default=False, + help="Show tracebacks for xfail (as long as --tb != no)", + ) + group.addoption( + "--show-capture", + action="store", + dest="showcapture", + choices=["no", "stdout", "stderr", "log", "all"], + default="all", + help="Controls how captured stdout/stderr/log is shown on failed tests. " + "Default: all.", + ) + group.addoption( + "--fulltrace", + "--full-trace", + action="store_true", + default=False, + help="Don't cut any tracebacks (default is to cut)", + ) + group.addoption( + "--color", + metavar="color", + action="store", + dest="color", + default="auto", + choices=["yes", "no", "auto"], + help="Color terminal output (yes/no/auto)", + ) + group.addoption( + "--code-highlight", + default="yes", + choices=["yes", "no"], + help="Whether code should be highlighted (only if --color is also enabled). " + "Default: yes.", + ) + + parser.addini( + "console_output_style", + help='Console output: "classic", or with additional progress information ' + '("progress" (percentage) | "count" | "progress-even-when-capture-no" (forces ' + "progress even when capture=no)", + default="progress", + ) + Config._add_verbosity_ini( + parser, + Config.VERBOSITY_TEST_CASES, + help=( + "Specify a verbosity level for test case execution, overriding the main level. " + "Higher levels will provide more detailed information about each test case executed." + ), + ) + + +def pytest_configure(config: Config) -> None: + reporter = TerminalReporter(config, sys.stdout) + config.pluginmanager.register(reporter, "terminalreporter") + if config.option.debug or config.option.traceconfig: + + def mywriter(tags, args): + msg = " ".join(map(str, args)) + reporter.write_line("[traceconfig] " + msg) + + config.trace.root.setprocessor("pytest:config", mywriter) + + +def getreportopt(config: Config) -> str: + reportchars: str = config.option.reportchars + + old_aliases = {"F", "S"} + reportopts = "" + for char in reportchars: + if char in old_aliases: + char = char.lower() + if char == "a": + reportopts = "sxXEf" + elif char == "A": + reportopts = "PpsxXEf" + elif char == "N": + reportopts = "" + elif char not in reportopts: + reportopts += char + + if not config.option.disable_warnings and "w" not in reportopts: + reportopts = "w" + reportopts + elif config.option.disable_warnings and "w" in reportopts: + reportopts = reportopts.replace("w", "") + + return reportopts + + +@hookimpl(trylast=True) # after _pytest.runner +def pytest_report_teststatus(report: BaseReport) -> tuple[str, str, str]: + letter = "F" + if report.passed: + letter = "." + elif report.skipped: + letter = "s" + + outcome: str = report.outcome + if report.when in ("collect", "setup", "teardown") and outcome == "failed": + outcome = "error" + letter = "E" + + return outcome, letter, outcome.upper() + + +@dataclasses.dataclass +class WarningReport: + """Simple structure to hold warnings information captured by ``pytest_warning_recorded``. + + :ivar str message: + User friendly message about the warning. + :ivar str|None nodeid: + nodeid that generated the warning (see ``get_location``). + :ivar tuple fslocation: + File system location of the source of the warning (see ``get_location``). + """ + + message: str + nodeid: str | None = None + fslocation: tuple[str, int] | None = None + + count_towards_summary: ClassVar = True + + def get_location(self, config: Config) -> str | None: + """Return the more user-friendly information about the location of a warning, or None.""" + if self.nodeid: + return self.nodeid + if self.fslocation: + filename, linenum = self.fslocation + relpath = bestrelpath(config.invocation_params.dir, absolutepath(filename)) + return f"{relpath}:{linenum}" + return None + + +@final +class TerminalReporter: + def __init__(self, config: Config, file: TextIO | None = None) -> None: + import _pytest.config + + self.config = config + self._numcollected = 0 + self._session: Session | None = None + self._showfspath: bool | None = None + + self.stats: dict[str, list[Any]] = {} + self._main_color: str | None = None + self._known_types: list[str] | None = None + self.startpath = config.invocation_params.dir + if file is None: + file = sys.stdout + self._tw = _pytest.config.create_terminal_writer(config, file) + self._screen_width = self._tw.fullwidth + self.currentfspath: None | Path | str | int = None + self.reportchars = getreportopt(config) + self.foldskipped = config.option.fold_skipped + self.hasmarkup = self._tw.hasmarkup + # isatty should be a method but was wrongly implemented as a boolean. + # We use CallableBool here to support both. + self.isatty = compat.CallableBool(file.isatty()) + self._progress_nodeids_reported: set[str] = set() + self._timing_nodeids_reported: set[str] = set() + self._show_progress_info = self._determine_show_progress_info() + self._collect_report_last_write = timing.Instant() + self._already_displayed_warnings: int | None = None + self._keyboardinterrupt_memo: ExceptionRepr | None = None + + def _determine_show_progress_info( + self, + ) -> Literal["progress", "count", "times", False]: + """Return whether we should display progress information based on the current config.""" + # do not show progress if we are not capturing output (#3038) unless explicitly + # overridden by progress-even-when-capture-no + if ( + self.config.getoption("capture", "no") == "no" + and self.config.getini("console_output_style") + != "progress-even-when-capture-no" + ): + return False + # do not show progress if we are showing fixture setup/teardown + if self.config.getoption("setupshow", False): + return False + cfg: str = self.config.getini("console_output_style") + if cfg in {"progress", "progress-even-when-capture-no"}: + return "progress" + elif cfg == "count": + return "count" + elif cfg == "times": + return "times" + else: + return False + + @property + def verbosity(self) -> int: + verbosity: int = self.config.option.verbose + return verbosity + + @property + def showheader(self) -> bool: + return self.verbosity >= 0 + + @property + def no_header(self) -> bool: + return bool(self.config.option.no_header) + + @property + def no_summary(self) -> bool: + return bool(self.config.option.no_summary) + + @property + def showfspath(self) -> bool: + if self._showfspath is None: + return self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) >= 0 + return self._showfspath + + @showfspath.setter + def showfspath(self, value: bool | None) -> None: + self._showfspath = value + + @property + def showlongtestinfo(self) -> bool: + return self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) > 0 + + def hasopt(self, char: str) -> bool: + char = {"xfailed": "x", "skipped": "s"}.get(char, char) + return char in self.reportchars + + def write_fspath_result(self, nodeid: str, res: str, **markup: bool) -> None: + fspath = self.config.rootpath / nodeid.split("::")[0] + if self.currentfspath is None or fspath != self.currentfspath: + if self.currentfspath is not None and self._show_progress_info: + self._write_progress_information_filling_space() + self.currentfspath = fspath + relfspath = bestrelpath(self.startpath, fspath) + self._tw.line() + self._tw.write(relfspath + " ") + self._tw.write(res, flush=True, **markup) + + def write_ensure_prefix(self, prefix: str, extra: str = "", **kwargs) -> None: + if self.currentfspath != prefix: + self._tw.line() + self.currentfspath = prefix + self._tw.write(prefix) + if extra: + self._tw.write(extra, **kwargs) + self.currentfspath = -2 + + def ensure_newline(self) -> None: + if self.currentfspath: + self._tw.line() + self.currentfspath = None + + def wrap_write( + self, + content: str, + *, + flush: bool = False, + margin: int = 8, + line_sep: str = "\n", + **markup: bool, + ) -> None: + """Wrap message with margin for progress info.""" + width_of_current_line = self._tw.width_of_current_line + wrapped = line_sep.join( + textwrap.wrap( + " " * width_of_current_line + content, + width=self._screen_width - margin, + drop_whitespace=True, + replace_whitespace=False, + ), + ) + wrapped = wrapped[width_of_current_line:] + self._tw.write(wrapped, flush=flush, **markup) + + def write(self, content: str, *, flush: bool = False, **markup: bool) -> None: + self._tw.write(content, flush=flush, **markup) + + def flush(self) -> None: + self._tw.flush() + + def write_line(self, line: str | bytes, **markup: bool) -> None: + if not isinstance(line, str): + line = str(line, errors="replace") + self.ensure_newline() + self._tw.line(line, **markup) + + def rewrite(self, line: str, **markup: bool) -> None: + """Rewinds the terminal cursor to the beginning and writes the given line. + + :param erase: + If True, will also add spaces until the full terminal width to ensure + previous lines are properly erased. + + The rest of the keyword arguments are markup instructions. + """ + erase = markup.pop("erase", False) + if erase: + fill_count = self._tw.fullwidth - len(line) - 1 + fill = " " * fill_count + else: + fill = "" + line = str(line) + self._tw.write("\r" + line + fill, **markup) + + def write_sep( + self, + sep: str, + title: str | None = None, + fullwidth: int | None = None, + **markup: bool, + ) -> None: + self.ensure_newline() + self._tw.sep(sep, title, fullwidth, **markup) + + def section(self, title: str, sep: str = "=", **kw: bool) -> None: + self._tw.sep(sep, title, **kw) + + def line(self, msg: str, **kw: bool) -> None: + self._tw.line(msg, **kw) + + def _add_stats(self, category: str, items: Sequence[Any]) -> None: + set_main_color = category not in self.stats + self.stats.setdefault(category, []).extend(items) + if set_main_color: + self._set_main_color() + + def pytest_internalerror(self, excrepr: ExceptionRepr) -> bool: + for line in str(excrepr).split("\n"): + self.write_line("INTERNALERROR> " + line) + return True + + def pytest_warning_recorded( + self, + warning_message: warnings.WarningMessage, + nodeid: str, + ) -> None: + from _pytest.warnings import warning_record_to_str + + fslocation = warning_message.filename, warning_message.lineno + message = warning_record_to_str(warning_message) + + warning_report = WarningReport( + fslocation=fslocation, message=message, nodeid=nodeid + ) + self._add_stats("warnings", [warning_report]) + + def pytest_plugin_registered(self, plugin: _PluggyPlugin) -> None: + if self.config.option.traceconfig: + msg = f"PLUGIN registered: {plugin}" + # XXX This event may happen during setup/teardown time + # which unfortunately captures our output here + # which garbles our output if we use self.write_line. + self.write_line(msg) + + def pytest_deselected(self, items: Sequence[Item]) -> None: + self._add_stats("deselected", items) + + def pytest_runtest_logstart( + self, nodeid: str, location: tuple[str, int | None, str] + ) -> None: + fspath, lineno, domain = location + # Ensure that the path is printed before the + # 1st test of a module starts running. + if self.showlongtestinfo: + line = self._locationline(nodeid, fspath, lineno, domain) + self.write_ensure_prefix(line, "") + self.flush() + elif self.showfspath: + self.write_fspath_result(nodeid, "") + self.flush() + + def pytest_runtest_logreport(self, report: TestReport) -> None: + self._tests_ran = True + rep = report + + res = TestShortLogReport( + *self.config.hook.pytest_report_teststatus(report=rep, config=self.config) + ) + category, letter, word = res.category, res.letter, res.word + if not isinstance(word, tuple): + markup = None + else: + word, markup = word + self._add_stats(category, [rep]) + if not letter and not word: + # Probably passed setup/teardown. + return + if markup is None: + was_xfail = hasattr(report, "wasxfail") + if rep.passed and not was_xfail: + markup = {"green": True} + elif rep.passed and was_xfail: + markup = {"yellow": True} + elif rep.failed: + markup = {"red": True} + elif rep.skipped: + markup = {"yellow": True} + else: + markup = {} + self._progress_nodeids_reported.add(rep.nodeid) + if self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) <= 0: + self._tw.write(letter, **markup) + # When running in xdist, the logreport and logfinish of multiple + # items are interspersed, e.g. `logreport`, `logreport`, + # `logfinish`, `logfinish`. To avoid the "past edge" calculation + # from getting confused and overflowing (#7166), do the past edge + # printing here and not in logfinish, except for the 100% which + # should only be printed after all teardowns are finished. + if self._show_progress_info and not self._is_last_item: + self._write_progress_information_if_past_edge() + else: + line = self._locationline(rep.nodeid, *rep.location) + running_xdist = hasattr(rep, "node") + if not running_xdist: + self.write_ensure_prefix(line, word, **markup) + if rep.skipped or hasattr(report, "wasxfail"): + reason = _get_raw_skip_reason(rep) + if self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) < 2: + available_width = ( + (self._tw.fullwidth - self._tw.width_of_current_line) + - len(" [100%]") + - 1 + ) + formatted_reason = _format_trimmed( + " ({})", reason, available_width + ) + else: + formatted_reason = f" ({reason})" + + if reason and formatted_reason is not None: + self.wrap_write(formatted_reason) + if self._show_progress_info: + self._write_progress_information_filling_space() + else: + self.ensure_newline() + self._tw.write(f"[{rep.node.gateway.id}]") + if self._show_progress_info: + self._tw.write( + self._get_progress_information_message() + " ", cyan=True + ) + else: + self._tw.write(" ") + self._tw.write(word, **markup) + self._tw.write(" " + line) + self.currentfspath = -2 + self.flush() + + @property + def _is_last_item(self) -> bool: + assert self._session is not None + return len(self._progress_nodeids_reported) == self._session.testscollected + + @hookimpl(wrapper=True) + def pytest_runtestloop(self) -> Generator[None, object, object]: + result = yield + + # Write the final/100% progress -- deferred until the loop is complete. + if ( + self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) <= 0 + and self._show_progress_info + and self._progress_nodeids_reported + ): + self._write_progress_information_filling_space() + + return result + + def _get_progress_information_message(self) -> str: + assert self._session + collected = self._session.testscollected + if self._show_progress_info == "count": + if collected: + progress = len(self._progress_nodeids_reported) + counter_format = f"{{:{len(str(collected))}d}}" + format_string = f" [{counter_format}/{{}}]" + return format_string.format(progress, collected) + return f" [ {collected} / {collected} ]" + if self._show_progress_info == "times": + if not collected: + return "" + all_reports = ( + self._get_reports_to_display("passed") + + self._get_reports_to_display("xpassed") + + self._get_reports_to_display("failed") + + self._get_reports_to_display("xfailed") + + self._get_reports_to_display("skipped") + + self._get_reports_to_display("error") + + self._get_reports_to_display("") + ) + current_location = all_reports[-1].location[0] + not_reported = [ + r for r in all_reports if r.nodeid not in self._timing_nodeids_reported + ] + tests_in_module = sum( + i.location[0] == current_location for i in self._session.items + ) + tests_completed = sum( + r.when == "setup" + for r in not_reported + if r.location[0] == current_location + ) + last_in_module = tests_completed == tests_in_module + if self.showlongtestinfo or last_in_module: + self._timing_nodeids_reported.update(r.nodeid for r in not_reported) + return format_node_duration( + sum(r.duration for r in not_reported if isinstance(r, TestReport)) + ) + return "" + if collected: + return f" [{len(self._progress_nodeids_reported) * 100 // collected:3d}%]" + return " [100%]" + + def _write_progress_information_if_past_edge(self) -> None: + w = self._width_of_current_line + if self._show_progress_info == "count": + assert self._session + num_tests = self._session.testscollected + progress_length = len(f" [{num_tests}/{num_tests}]") + elif self._show_progress_info == "times": + progress_length = len(" 99h 59m") + else: + progress_length = len(" [100%]") + past_edge = w + progress_length + 1 >= self._screen_width + if past_edge: + main_color, _ = self._get_main_color() + msg = self._get_progress_information_message() + self._tw.write(msg + "\n", **{main_color: True}) + + def _write_progress_information_filling_space(self) -> None: + color, _ = self._get_main_color() + msg = self._get_progress_information_message() + w = self._width_of_current_line + fill = self._tw.fullwidth - w - 1 + self.write(msg.rjust(fill), flush=True, **{color: True}) + + @property + def _width_of_current_line(self) -> int: + """Return the width of the current line.""" + return self._tw.width_of_current_line + + def pytest_collection(self) -> None: + if self.isatty(): + if self.config.option.verbose >= 0: + self.write("collecting ... ", flush=True, bold=True) + elif self.config.option.verbose >= 1: + self.write("collecting ... ", flush=True, bold=True) + + def pytest_collectreport(self, report: CollectReport) -> None: + if report.failed: + self._add_stats("error", [report]) + elif report.skipped: + self._add_stats("skipped", [report]) + items = [x for x in report.result if isinstance(x, Item)] + self._numcollected += len(items) + if self.isatty(): + self.report_collect() + + def report_collect(self, final: bool = False) -> None: + if self.config.option.verbose < 0: + return + + if not final: + # Only write the "collecting" report every `REPORT_COLLECTING_RESOLUTION`. + if ( + self._collect_report_last_write.elapsed().seconds + < REPORT_COLLECTING_RESOLUTION + ): + return + self._collect_report_last_write = timing.Instant() + + errors = len(self.stats.get("error", [])) + skipped = len(self.stats.get("skipped", [])) + deselected = len(self.stats.get("deselected", [])) + selected = self._numcollected - deselected + line = "collected " if final else "collecting " + line += ( + str(self._numcollected) + " item" + ("" if self._numcollected == 1 else "s") + ) + if errors: + line += f" / {errors} error{'s' if errors != 1 else ''}" + if deselected: + line += f" / {deselected} deselected" + if skipped: + line += f" / {skipped} skipped" + if self._numcollected > selected: + line += f" / {selected} selected" + if self.isatty(): + self.rewrite(line, bold=True, erase=True) + if final: + self.write("\n") + else: + self.write_line(line) + + @hookimpl(trylast=True) + def pytest_sessionstart(self, session: Session) -> None: + self._session = session + self._session_start = timing.Instant() + if not self.showheader: + return + self.write_sep("=", "test session starts", bold=True) + verinfo = platform.python_version() + if not self.no_header: + msg = f"platform {sys.platform} -- Python {verinfo}" + pypy_version_info = getattr(sys, "pypy_version_info", None) + if pypy_version_info: + verinfo = ".".join(map(str, pypy_version_info[:3])) + msg += f"[pypy-{verinfo}-{pypy_version_info[3]}]" + msg += f", pytest-{_pytest._version.version}, pluggy-{pluggy.__version__}" + if ( + self.verbosity > 0 + or self.config.option.debug + or getattr(self.config.option, "pastebin", None) + ): + msg += " -- " + str(sys.executable) + self.write_line(msg) + lines = self.config.hook.pytest_report_header( + config=self.config, start_path=self.startpath + ) + self._write_report_lines_from_hooks(lines) + + def _write_report_lines_from_hooks( + self, lines: Sequence[str | Sequence[str]] + ) -> None: + for line_or_lines in reversed(lines): + if isinstance(line_or_lines, str): + self.write_line(line_or_lines) + else: + for line in line_or_lines: + self.write_line(line) + + def pytest_report_header(self, config: Config) -> list[str]: + result = [f"rootdir: {config.rootpath}"] + + if config.inipath: + result.append("configfile: " + bestrelpath(config.rootpath, config.inipath)) + + if config.args_source == Config.ArgsSource.TESTPATHS: + testpaths: list[str] = config.getini("testpaths") + result.append("testpaths: {}".format(", ".join(testpaths))) + + plugininfo = config.pluginmanager.list_plugin_distinfo() + if plugininfo: + result.append( + "plugins: {}".format(", ".join(_plugin_nameversions(plugininfo))) + ) + return result + + def pytest_collection_finish(self, session: Session) -> None: + self.report_collect(True) + + lines = self.config.hook.pytest_report_collectionfinish( + config=self.config, + start_path=self.startpath, + items=session.items, + ) + self._write_report_lines_from_hooks(lines) + + if self.config.getoption("collectonly"): + if session.items: + if self.config.option.verbose > -1: + self._tw.line("") + self._printcollecteditems(session.items) + + failed = self.stats.get("failed") + if failed: + self._tw.sep("!", "collection failures") + for rep in failed: + rep.toterminal(self._tw) + + def _printcollecteditems(self, items: Sequence[Item]) -> None: + test_cases_verbosity = self.config.get_verbosity(Config.VERBOSITY_TEST_CASES) + if test_cases_verbosity < 0: + if test_cases_verbosity < -1: + counts = Counter(item.nodeid.split("::", 1)[0] for item in items) + for name, count in sorted(counts.items()): + self._tw.line(f"{name}: {count}") + else: + for item in items: + self._tw.line(item.nodeid) + return + stack: list[Node] = [] + indent = "" + for item in items: + needed_collectors = item.listchain()[1:] # strip root node + while stack: + if stack == needed_collectors[: len(stack)]: + break + stack.pop() + for col in needed_collectors[len(stack) :]: + stack.append(col) + indent = (len(stack) - 1) * " " + self._tw.line(f"{indent}{col}") + if test_cases_verbosity >= 1: + obj = getattr(col, "obj", None) + doc = inspect.getdoc(obj) if obj else None + if doc: + for line in doc.splitlines(): + self._tw.line("{}{}".format(indent + " ", line)) + + @hookimpl(wrapper=True) + def pytest_sessionfinish( + self, session: Session, exitstatus: int | ExitCode + ) -> Generator[None]: + result = yield + self._tw.line("") + summary_exit_codes = ( + ExitCode.OK, + ExitCode.TESTS_FAILED, + ExitCode.INTERRUPTED, + ExitCode.USAGE_ERROR, + ExitCode.NO_TESTS_COLLECTED, + ) + if exitstatus in summary_exit_codes and not self.no_summary: + self.config.hook.pytest_terminal_summary( + terminalreporter=self, exitstatus=exitstatus, config=self.config + ) + if session.shouldfail: + self.write_sep("!", str(session.shouldfail), red=True) + if exitstatus == ExitCode.INTERRUPTED: + self._report_keyboardinterrupt() + self._keyboardinterrupt_memo = None + elif session.shouldstop: + self.write_sep("!", str(session.shouldstop), red=True) + self.summary_stats() + return result + + @hookimpl(wrapper=True) + def pytest_terminal_summary(self) -> Generator[None]: + self.summary_errors() + self.summary_failures() + self.summary_xfailures() + self.summary_warnings() + self.summary_passes() + self.summary_xpasses() + try: + return (yield) + finally: + self.short_test_summary() + # Display any extra warnings from teardown here (if any). + self.summary_warnings() + + def pytest_keyboard_interrupt(self, excinfo: ExceptionInfo[BaseException]) -> None: + self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True) + + def pytest_unconfigure(self) -> None: + if self._keyboardinterrupt_memo is not None: + self._report_keyboardinterrupt() + + def _report_keyboardinterrupt(self) -> None: + excrepr = self._keyboardinterrupt_memo + assert excrepr is not None + assert excrepr.reprcrash is not None + msg = excrepr.reprcrash.message + self.write_sep("!", msg) + if "KeyboardInterrupt" in msg: + if self.config.option.fulltrace: + excrepr.toterminal(self._tw) + else: + excrepr.reprcrash.toterminal(self._tw) + self._tw.line( + "(to show a full traceback on KeyboardInterrupt use --full-trace)", + yellow=True, + ) + + def _locationline( + self, nodeid: str, fspath: str, lineno: int | None, domain: str + ) -> str: + def mkrel(nodeid: str) -> str: + line = self.config.cwd_relative_nodeid(nodeid) + if domain and line.endswith(domain): + line = line[: -len(domain)] + values = domain.split("[") + values[0] = values[0].replace(".", "::") # don't replace '.' in params + line += "[".join(values) + return line + + # fspath comes from testid which has a "/"-normalized path. + if fspath: + res = mkrel(nodeid) + if self.verbosity >= 2 and nodeid.split("::")[0] != fspath.replace( + "\\", nodes.SEP + ): + res += " <- " + bestrelpath(self.startpath, Path(fspath)) + else: + res = "[location]" + return res + " " + + def _getfailureheadline(self, rep): + head_line = rep.head_line + if head_line: + return head_line + return "test session" # XXX? + + def _getcrashline(self, rep): + try: + return str(rep.longrepr.reprcrash) + except AttributeError: + try: + return str(rep.longrepr)[:50] + except AttributeError: + return "" + + # + # Summaries for sessionfinish. + # + def getreports(self, name: str): + return [x for x in self.stats.get(name, ()) if not hasattr(x, "_pdbshown")] + + def summary_warnings(self) -> None: + if self.hasopt("w"): + all_warnings: list[WarningReport] | None = self.stats.get("warnings") + if not all_warnings: + return + + final = self._already_displayed_warnings is not None + if final: + warning_reports = all_warnings[self._already_displayed_warnings :] + else: + warning_reports = all_warnings + self._already_displayed_warnings = len(warning_reports) + if not warning_reports: + return + + reports_grouped_by_message: dict[str, list[WarningReport]] = {} + for wr in warning_reports: + reports_grouped_by_message.setdefault(wr.message, []).append(wr) + + def collapsed_location_report(reports: list[WarningReport]) -> str: + locations = [] + for w in reports: + location = w.get_location(self.config) + if location: + locations.append(location) + + if len(locations) < 10: + return "\n".join(map(str, locations)) + + counts_by_filename = Counter( + str(loc).split("::", 1)[0] for loc in locations + ) + return "\n".join( + "{}: {} warning{}".format(k, v, "s" if v > 1 else "") + for k, v in counts_by_filename.items() + ) + + title = "warnings summary (final)" if final else "warnings summary" + self.write_sep("=", title, yellow=True, bold=False) + for message, message_reports in reports_grouped_by_message.items(): + maybe_location = collapsed_location_report(message_reports) + if maybe_location: + self._tw.line(maybe_location) + lines = message.splitlines() + indented = "\n".join(" " + x for x in lines) + message = indented.rstrip() + else: + message = message.rstrip() + self._tw.line(message) + self._tw.line() + self._tw.line( + "-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html" + ) + + def summary_passes(self) -> None: + self.summary_passes_combined("passed", "PASSES", "P") + + def summary_xpasses(self) -> None: + self.summary_passes_combined("xpassed", "XPASSES", "X") + + def summary_passes_combined( + self, which_reports: str, sep_title: str, needed_opt: str + ) -> None: + if self.config.option.tbstyle != "no": + if self.hasopt(needed_opt): + reports: list[TestReport] = self.getreports(which_reports) + if not reports: + return + self.write_sep("=", sep_title) + for rep in reports: + if rep.sections: + msg = self._getfailureheadline(rep) + self.write_sep("_", msg, green=True, bold=True) + self._outrep_summary(rep) + self._handle_teardown_sections(rep.nodeid) + + def _get_teardown_reports(self, nodeid: str) -> list[TestReport]: + reports = self.getreports("") + return [ + report + for report in reports + if report.when == "teardown" and report.nodeid == nodeid + ] + + def _handle_teardown_sections(self, nodeid: str) -> None: + for report in self._get_teardown_reports(nodeid): + self.print_teardown_sections(report) + + def print_teardown_sections(self, rep: TestReport) -> None: + showcapture = self.config.option.showcapture + if showcapture == "no": + return + for secname, content in rep.sections: + if showcapture != "all" and showcapture not in secname: + continue + if "teardown" in secname: + self._tw.sep("-", secname) + if content[-1:] == "\n": + content = content[:-1] + self._tw.line(content) + + def summary_failures(self) -> None: + style = self.config.option.tbstyle + self.summary_failures_combined("failed", "FAILURES", style=style) + + def summary_xfailures(self) -> None: + show_tb = self.config.option.xfail_tb + style = self.config.option.tbstyle if show_tb else "no" + self.summary_failures_combined("xfailed", "XFAILURES", style=style) + + def summary_failures_combined( + self, + which_reports: str, + sep_title: str, + *, + style: str, + needed_opt: str | None = None, + ) -> None: + if style != "no": + if not needed_opt or self.hasopt(needed_opt): + reports: list[BaseReport] = self.getreports(which_reports) + if not reports: + return + self.write_sep("=", sep_title) + if style == "line": + for rep in reports: + line = self._getcrashline(rep) + self.write_line(line) + else: + for rep in reports: + msg = self._getfailureheadline(rep) + self.write_sep("_", msg, red=True, bold=True) + self._outrep_summary(rep) + self._handle_teardown_sections(rep.nodeid) + + def summary_errors(self) -> None: + if self.config.option.tbstyle != "no": + reports: list[BaseReport] = self.getreports("error") + if not reports: + return + self.write_sep("=", "ERRORS") + for rep in self.stats["error"]: + msg = self._getfailureheadline(rep) + if rep.when == "collect": + msg = "ERROR collecting " + msg + else: + msg = f"ERROR at {rep.when} of {msg}" + self.write_sep("_", msg, red=True, bold=True) + self._outrep_summary(rep) + + def _outrep_summary(self, rep: BaseReport) -> None: + rep.toterminal(self._tw) + showcapture = self.config.option.showcapture + if showcapture == "no": + return + for secname, content in rep.sections: + if showcapture != "all" and showcapture not in secname: + continue + self._tw.sep("-", secname) + if content[-1:] == "\n": + content = content[:-1] + self._tw.line(content) + + def summary_stats(self) -> None: + if self.verbosity < -1: + return + + session_duration = self._session_start.elapsed() + (parts, main_color) = self.build_summary_stats_line() + line_parts = [] + + display_sep = self.verbosity >= 0 + if display_sep: + fullwidth = self._tw.fullwidth + for text, markup in parts: + with_markup = self._tw.markup(text, **markup) + if display_sep: + fullwidth += len(with_markup) - len(text) + line_parts.append(with_markup) + msg = ", ".join(line_parts) + + main_markup = {main_color: True} + duration = f" in {format_session_duration(session_duration.seconds)}" + duration_with_markup = self._tw.markup(duration, **main_markup) + if display_sep: + fullwidth += len(duration_with_markup) - len(duration) + msg += duration_with_markup + + if display_sep: + markup_for_end_sep = self._tw.markup("", **main_markup) + if markup_for_end_sep.endswith("\x1b[0m"): + markup_for_end_sep = markup_for_end_sep[:-4] + fullwidth += len(markup_for_end_sep) + msg += markup_for_end_sep + + if display_sep: + self.write_sep("=", msg, fullwidth=fullwidth, **main_markup) + else: + self.write_line(msg, **main_markup) + + def short_test_summary(self) -> None: + if not self.reportchars: + return + + def show_simple(lines: list[str], *, stat: str) -> None: + failed = self.stats.get(stat, []) + if not failed: + return + config = self.config + for rep in failed: + color = _color_for_type.get(stat, _color_for_type_default) + line = _get_line_with_reprcrash_message( + config, rep, self._tw, {color: True} + ) + lines.append(line) + + def show_xfailed(lines: list[str]) -> None: + xfailed = self.stats.get("xfailed", []) + for rep in xfailed: + verbose_word, verbose_markup = rep._get_verbose_word_with_markup( + self.config, {_color_for_type["warnings"]: True} + ) + markup_word = self._tw.markup(verbose_word, **verbose_markup) + nodeid = _get_node_id_with_markup(self._tw, self.config, rep) + line = f"{markup_word} {nodeid}" + reason = rep.wasxfail + if reason: + line += " - " + str(reason) + + lines.append(line) + + def show_xpassed(lines: list[str]) -> None: + xpassed = self.stats.get("xpassed", []) + for rep in xpassed: + verbose_word, verbose_markup = rep._get_verbose_word_with_markup( + self.config, {_color_for_type["warnings"]: True} + ) + markup_word = self._tw.markup(verbose_word, **verbose_markup) + nodeid = _get_node_id_with_markup(self._tw, self.config, rep) + line = f"{markup_word} {nodeid}" + reason = rep.wasxfail + if reason: + line += " - " + str(reason) + lines.append(line) + + def show_skipped_folded(lines: list[str]) -> None: + skipped: list[CollectReport] = self.stats.get("skipped", []) + fskips = _folded_skips(self.startpath, skipped) if skipped else [] + if not fskips: + return + verbose_word, verbose_markup = skipped[0]._get_verbose_word_with_markup( + self.config, {_color_for_type["warnings"]: True} + ) + markup_word = self._tw.markup(verbose_word, **verbose_markup) + prefix = "Skipped: " + for num, fspath, lineno, reason in fskips: + if reason.startswith(prefix): + reason = reason[len(prefix) :] + if lineno is not None: + lines.append(f"{markup_word} [{num}] {fspath}:{lineno}: {reason}") + else: + lines.append(f"{markup_word} [{num}] {fspath}: {reason}") + + def show_skipped_unfolded(lines: list[str]) -> None: + skipped: list[CollectReport] = self.stats.get("skipped", []) + + for rep in skipped: + assert rep.longrepr is not None + assert isinstance(rep.longrepr, tuple), (rep, rep.longrepr) + assert len(rep.longrepr) == 3, (rep, rep.longrepr) + + verbose_word, verbose_markup = rep._get_verbose_word_with_markup( + self.config, {_color_for_type["warnings"]: True} + ) + markup_word = self._tw.markup(verbose_word, **verbose_markup) + nodeid = _get_node_id_with_markup(self._tw, self.config, rep) + line = f"{markup_word} {nodeid}" + reason = rep.longrepr[2] + if reason: + line += " - " + str(reason) + lines.append(line) + + def show_skipped(lines: list[str]) -> None: + if self.foldskipped: + show_skipped_folded(lines) + else: + show_skipped_unfolded(lines) + + REPORTCHAR_ACTIONS: Mapping[str, Callable[[list[str]], None]] = { + "x": show_xfailed, + "X": show_xpassed, + "f": partial(show_simple, stat="failed"), + "s": show_skipped, + "p": partial(show_simple, stat="passed"), + "E": partial(show_simple, stat="error"), + } + + lines: list[str] = [] + for char in self.reportchars: + action = REPORTCHAR_ACTIONS.get(char) + if action: # skipping e.g. "P" (passed with output) here. + action(lines) + + if lines: + self.write_sep("=", "short test summary info", cyan=True, bold=True) + for line in lines: + self.write_line(line) + + def _get_main_color(self) -> tuple[str, list[str]]: + if self._main_color is None or self._known_types is None or self._is_last_item: + self._set_main_color() + assert self._main_color + assert self._known_types + return self._main_color, self._known_types + + def _determine_main_color(self, unknown_type_seen: bool) -> str: + stats = self.stats + if "failed" in stats or "error" in stats: + main_color = "red" + elif "warnings" in stats or "xpassed" in stats or unknown_type_seen: + main_color = "yellow" + elif "passed" in stats or not self._is_last_item: + main_color = "green" + else: + main_color = "yellow" + return main_color + + def _set_main_color(self) -> None: + unknown_types: list[str] = [] + for found_type in self.stats: + if found_type: # setup/teardown reports have an empty key, ignore them + if found_type not in KNOWN_TYPES and found_type not in unknown_types: + unknown_types.append(found_type) + self._known_types = list(KNOWN_TYPES) + unknown_types + self._main_color = self._determine_main_color(bool(unknown_types)) + + def build_summary_stats_line(self) -> tuple[list[tuple[str, dict[str, bool]]], str]: + """ + Build the parts used in the last summary stats line. + + The summary stats line is the line shown at the end, "=== 12 passed, 2 errors in Xs===". + + This function builds a list of the "parts" that make up for the text in that line, in + the example above it would be:: + + [ + ("12 passed", {"green": True}), + ("2 errors", {"red": True} + ] + + That last dict for each line is a "markup dictionary", used by TerminalWriter to + color output. + + The final color of the line is also determined by this function, and is the second + element of the returned tuple. + """ + if self.config.getoption("collectonly"): + return self._build_collect_only_summary_stats_line() + else: + return self._build_normal_summary_stats_line() + + def _get_reports_to_display(self, key: str) -> list[Any]: + """Get test/collection reports for the given status key, such as `passed` or `error`.""" + reports = self.stats.get(key, []) + return [x for x in reports if getattr(x, "count_towards_summary", True)] + + def _build_normal_summary_stats_line( + self, + ) -> tuple[list[tuple[str, dict[str, bool]]], str]: + main_color, known_types = self._get_main_color() + parts = [] + + for key in known_types: + reports = self._get_reports_to_display(key) + if reports: + count = len(reports) + color = _color_for_type.get(key, _color_for_type_default) + markup = {color: True, "bold": color == main_color} + parts.append(("%d %s" % pluralize(count, key), markup)) # noqa: UP031 + + if not parts: + parts = [("no tests ran", {_color_for_type_default: True})] + + return parts, main_color + + def _build_collect_only_summary_stats_line( + self, + ) -> tuple[list[tuple[str, dict[str, bool]]], str]: + deselected = len(self._get_reports_to_display("deselected")) + errors = len(self._get_reports_to_display("error")) + + if self._numcollected == 0: + parts = [("no tests collected", {"yellow": True})] + main_color = "yellow" + + elif deselected == 0: + main_color = "green" + collected_output = "%d %s collected" % pluralize(self._numcollected, "test") # noqa: UP031 + parts = [(collected_output, {main_color: True})] + else: + all_tests_were_deselected = self._numcollected == deselected + if all_tests_were_deselected: + main_color = "yellow" + collected_output = f"no tests collected ({deselected} deselected)" + else: + main_color = "green" + selected = self._numcollected - deselected + collected_output = f"{selected}/{self._numcollected} tests collected ({deselected} deselected)" + + parts = [(collected_output, {main_color: True})] + + if errors: + main_color = _color_for_type["error"] + parts += [("%d %s" % pluralize(errors, "error"), {main_color: True})] # noqa: UP031 + + return parts, main_color + + +def _get_node_id_with_markup(tw: TerminalWriter, config: Config, rep: BaseReport): + nodeid = config.cwd_relative_nodeid(rep.nodeid) + path, *parts = nodeid.split("::") + if parts: + parts_markup = tw.markup("::".join(parts), bold=True) + return path + "::" + parts_markup + else: + return path + + +def _format_trimmed(format: str, msg: str, available_width: int) -> str | None: + """Format msg into format, ellipsizing it if doesn't fit in available_width. + + Returns None if even the ellipsis can't fit. + """ + # Only use the first line. + i = msg.find("\n") + if i != -1: + msg = msg[:i] + + ellipsis = "..." + format_width = wcswidth(format.format("")) + if format_width + len(ellipsis) > available_width: + return None + + if format_width + wcswidth(msg) > available_width: + available_width -= len(ellipsis) + msg = msg[:available_width] + while format_width + wcswidth(msg) > available_width: + msg = msg[:-1] + msg += ellipsis + + return format.format(msg) + + +def _get_line_with_reprcrash_message( + config: Config, rep: BaseReport, tw: TerminalWriter, word_markup: dict[str, bool] +) -> str: + """Get summary line for a report, trying to add reprcrash message.""" + verbose_word, verbose_markup = rep._get_verbose_word_with_markup( + config, word_markup + ) + word = tw.markup(verbose_word, **verbose_markup) + node = _get_node_id_with_markup(tw, config, rep) + + line = f"{word} {node}" + line_width = wcswidth(line) + + try: + # Type ignored intentionally -- possible AttributeError expected. + msg = rep.longrepr.reprcrash.message # type: ignore[union-attr] + except AttributeError: + pass + else: + if ( + running_on_ci() or config.option.verbose >= 2 + ) and not config.option.force_short_summary: + msg = f" - {msg}" + else: + available_width = tw.fullwidth - line_width + msg = _format_trimmed(" - {}", msg, available_width) + if msg is not None: + line += msg + + return line + + +def _folded_skips( + startpath: Path, + skipped: Sequence[CollectReport], +) -> list[tuple[int, str, int | None, str]]: + d: dict[tuple[str, int | None, str], list[CollectReport]] = {} + for event in skipped: + assert event.longrepr is not None + assert isinstance(event.longrepr, tuple), (event, event.longrepr) + assert len(event.longrepr) == 3, (event, event.longrepr) + fspath, lineno, reason = event.longrepr + # For consistency, report all fspaths in relative form. + fspath = bestrelpath(startpath, Path(fspath)) + keywords = getattr(event, "keywords", {}) + # Folding reports with global pytestmark variable. + # This is a workaround, because for now we cannot identify the scope of a skip marker + # TODO: Revisit after marks scope would be fixed. + if ( + event.when == "setup" + and "skip" in keywords + and "pytestmark" not in keywords + ): + key: tuple[str, int | None, str] = (fspath, None, reason) + else: + key = (fspath, lineno, reason) + d.setdefault(key, []).append(event) + values: list[tuple[int, str, int | None, str]] = [] + for key, events in d.items(): + values.append((len(events), *key)) + return values + + +_color_for_type = { + "failed": "red", + "error": "red", + "warnings": "yellow", + "passed": "green", +} +_color_for_type_default = "yellow" + + +def pluralize(count: int, noun: str) -> tuple[int, str]: + # No need to pluralize words such as `failed` or `passed`. + if noun not in ["error", "warnings", "test"]: + return count, noun + + # The `warnings` key is plural. To avoid API breakage, we keep it that way but + # set it to singular here so we can determine plurality in the same way as we do + # for `error`. + noun = noun.replace("warnings", "warning") + + return count, noun + "s" if count != 1 else noun + + +def _plugin_nameversions(plugininfo) -> list[str]: + values: list[str] = [] + for plugin, dist in plugininfo: + # Gets us name and version! + name = f"{dist.project_name}-{dist.version}" + # Questionable convenience, but it keeps things short. + if name.startswith("pytest-"): + name = name[7:] + # We decided to print python package names they can have more than one plugin. + if name not in values: + values.append(name) + return values + + +def format_session_duration(seconds: float) -> str: + """Format the given seconds in a human readable manner to show in the final summary.""" + if seconds < 60: + return f"{seconds:.2f}s" + else: + dt = datetime.timedelta(seconds=int(seconds)) + return f"{seconds:.2f}s ({dt})" + + +def format_node_duration(seconds: float) -> str: + """Format the given seconds in a human readable manner to show in the test progress.""" + # The formatting is designed to be compact and readable, with at most 7 characters + # for durations below 100 hours. + if seconds < 0.00001: + return f" {seconds * 1000000:.3f}us" + if seconds < 0.0001: + return f" {seconds * 1000000:.2f}us" + if seconds < 0.001: + return f" {seconds * 1000000:.1f}us" + if seconds < 0.01: + return f" {seconds * 1000:.3f}ms" + if seconds < 0.1: + return f" {seconds * 1000:.2f}ms" + if seconds < 1: + return f" {seconds * 1000:.1f}ms" + if seconds < 60: + return f" {seconds:.3f}s" + if seconds < 3600: + return f" {seconds // 60:.0f}m {seconds % 60:.0f}s" + return f" {seconds // 3600:.0f}h {(seconds % 3600) // 60:.0f}m" + + +def _get_raw_skip_reason(report: TestReport) -> str: + """Get the reason string of a skip/xfail/xpass test report. + + The string is just the part given by the user. + """ + if hasattr(report, "wasxfail"): + reason = report.wasxfail + if reason.startswith("reason: "): + reason = reason[len("reason: ") :] + return reason + else: + assert report.skipped + assert isinstance(report.longrepr, tuple) + _, _, reason = report.longrepr + if reason.startswith("Skipped: "): + reason = reason[len("Skipped: ") :] + elif reason == "Skipped": + reason = "" + return reason diff --git a/venv/lib/python3.10/site-packages/_pytest/threadexception.py b/venv/lib/python3.10/site-packages/_pytest/threadexception.py new file mode 100644 index 0000000000000000000000000000000000000000..eb57783be261ebf30f1c03a04cab471f5ec6f063 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/threadexception.py @@ -0,0 +1,152 @@ +from __future__ import annotations + +import collections +from collections.abc import Callable +import functools +import sys +import threading +import traceback +from typing import NamedTuple +from typing import TYPE_CHECKING +import warnings + +from _pytest.config import Config +from _pytest.nodes import Item +from _pytest.stash import StashKey +from _pytest.tracemalloc import tracemalloc_message +import pytest + + +if TYPE_CHECKING: + pass + +if sys.version_info < (3, 11): + from exceptiongroup import ExceptionGroup + + +class ThreadExceptionMeta(NamedTuple): + msg: str + cause_msg: str + exc_value: BaseException | None + + +thread_exceptions: StashKey[collections.deque[ThreadExceptionMeta | BaseException]] = ( + StashKey() +) + + +def collect_thread_exception(config: Config) -> None: + pop_thread_exception = config.stash[thread_exceptions].pop + errors: list[pytest.PytestUnhandledThreadExceptionWarning | RuntimeError] = [] + meta = None + hook_error = None + try: + while True: + try: + meta = pop_thread_exception() + except IndexError: + break + + if isinstance(meta, BaseException): + hook_error = RuntimeError("Failed to process thread exception") + hook_error.__cause__ = meta + errors.append(hook_error) + continue + + msg = meta.msg + try: + warnings.warn(pytest.PytestUnhandledThreadExceptionWarning(msg)) + except pytest.PytestUnhandledThreadExceptionWarning as e: + # This except happens when the warning is treated as an error (e.g. `-Werror`). + if meta.exc_value is not None: + # Exceptions have a better way to show the traceback, but + # warnings do not, so hide the traceback from the msg and + # set the cause so the traceback shows up in the right place. + e.args = (meta.cause_msg,) + e.__cause__ = meta.exc_value + errors.append(e) + + if len(errors) == 1: + raise errors[0] + if errors: + raise ExceptionGroup("multiple thread exception warnings", errors) + finally: + del errors, meta, hook_error + + +def cleanup( + *, config: Config, prev_hook: Callable[[threading.ExceptHookArgs], object] +) -> None: + try: + try: + # We don't join threads here, so exceptions raised from any + # threads still running by the time _threading_atexits joins them + # do not get captured (see #13027). + collect_thread_exception(config) + finally: + threading.excepthook = prev_hook + finally: + del config.stash[thread_exceptions] + + +def thread_exception_hook( + args: threading.ExceptHookArgs, + /, + *, + append: Callable[[ThreadExceptionMeta | BaseException], object], +) -> None: + try: + # we need to compute these strings here as they might change after + # the excepthook finishes and before the metadata object is + # collected by a pytest hook + thread_name = "" if args.thread is None else args.thread.name + summary = f"Exception in thread {thread_name}" + traceback_message = "\n\n" + "".join( + traceback.format_exception( + args.exc_type, + args.exc_value, + args.exc_traceback, + ) + ) + tracemalloc_tb = "\n" + tracemalloc_message(args.thread) + msg = summary + traceback_message + tracemalloc_tb + cause_msg = summary + tracemalloc_tb + + append( + ThreadExceptionMeta( + # Compute these strings here as they might change later + msg=msg, + cause_msg=cause_msg, + exc_value=args.exc_value, + ) + ) + except BaseException as e: + append(e) + # Raising this will cause the exception to be logged twice, once in our + # collect_thread_exception and once by sys.excepthook + # which is fine - this should never happen anyway and if it does + # it should probably be reported as a pytest bug. + raise + + +def pytest_configure(config: Config) -> None: + prev_hook = threading.excepthook + deque: collections.deque[ThreadExceptionMeta | BaseException] = collections.deque() + config.stash[thread_exceptions] = deque + config.add_cleanup(functools.partial(cleanup, config=config, prev_hook=prev_hook)) + threading.excepthook = functools.partial(thread_exception_hook, append=deque.append) + + +@pytest.hookimpl(trylast=True) +def pytest_runtest_setup(item: Item) -> None: + collect_thread_exception(item.config) + + +@pytest.hookimpl(trylast=True) +def pytest_runtest_call(item: Item) -> None: + collect_thread_exception(item.config) + + +@pytest.hookimpl(trylast=True) +def pytest_runtest_teardown(item: Item) -> None: + collect_thread_exception(item.config) diff --git a/venv/lib/python3.10/site-packages/_pytest/timing.py b/venv/lib/python3.10/site-packages/_pytest/timing.py new file mode 100644 index 0000000000000000000000000000000000000000..221eeffc4fd00fe8b386de51ddf8e549d22d88cc --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/timing.py @@ -0,0 +1,94 @@ +"""Indirection for time functions. + +We intentionally grab some "time" functions internally to avoid tests mocking "time" to affect +pytest runtime information (issue #185). + +Fixture "mock_timing" also interacts with this module for pytest's own tests. +""" + +from __future__ import annotations + +import dataclasses +from datetime import datetime +from datetime import timezone +from time import perf_counter +from time import sleep +from time import time +from typing import TYPE_CHECKING + + +if TYPE_CHECKING: + from pytest import MonkeyPatch + + +@dataclasses.dataclass(frozen=True) +class Instant: + """ + Represents an instant in time, used to both get the timestamp value and to measure + the duration of a time span. + + Inspired by Rust's `std::time::Instant`. + """ + + # Creation time of this instant, using time.time(), to measure actual time. + # Note: using a `lambda` to correctly get the mocked time via `MockTiming`. + time: float = dataclasses.field(default_factory=lambda: time(), init=False) + + # Performance counter tick of the instant, used to measure precise elapsed time. + # Note: using a `lambda` to correctly get the mocked time via `MockTiming`. + perf_count: float = dataclasses.field( + default_factory=lambda: perf_counter(), init=False + ) + + def elapsed(self) -> Duration: + """Measure the duration since `Instant` was created.""" + return Duration(start=self, stop=Instant()) + + def as_utc(self) -> datetime: + """Instant as UTC datetime.""" + return datetime.fromtimestamp(self.time, timezone.utc) + + +@dataclasses.dataclass(frozen=True) +class Duration: + """A span of time as measured by `Instant.elapsed()`.""" + + start: Instant + stop: Instant + + @property + def seconds(self) -> float: + """Elapsed time of the duration in seconds, measured using a performance counter for precise timing.""" + return self.stop.perf_count - self.start.perf_count + + +@dataclasses.dataclass +class MockTiming: + """Mocks _pytest.timing with a known object that can be used to control timing in tests + deterministically. + + pytest itself should always use functions from `_pytest.timing` instead of `time` directly. + + This then allows us more control over time during testing, if testing code also + uses `_pytest.timing` functions. + + Time is static, and only advances through `sleep` calls, thus tests might sleep over large + numbers and obtain accurate time() calls at the end, making tests reliable and instant.""" + + _current_time: float = datetime(2020, 5, 22, 14, 20, 50).timestamp() + + def sleep(self, seconds: float) -> None: + self._current_time += seconds + + def time(self) -> float: + return self._current_time + + def patch(self, monkeypatch: MonkeyPatch) -> None: + from _pytest import timing # noqa: PLW0406 + + monkeypatch.setattr(timing, "sleep", self.sleep) + monkeypatch.setattr(timing, "time", self.time) + monkeypatch.setattr(timing, "perf_counter", self.time) + + +__all__ = ["perf_counter", "sleep", "time"] diff --git a/venv/lib/python3.10/site-packages/_pytest/tmpdir.py b/venv/lib/python3.10/site-packages/_pytest/tmpdir.py new file mode 100644 index 0000000000000000000000000000000000000000..dcd5784f88f631e4e4d413a36c0ec389bff0c440 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/tmpdir.py @@ -0,0 +1,312 @@ +# mypy: allow-untyped-defs +"""Support for providing temporary directories to test functions.""" + +from __future__ import annotations + +from collections.abc import Generator +import dataclasses +import os +from pathlib import Path +import re +from shutil import rmtree +import tempfile +from typing import Any +from typing import final +from typing import Literal + +from .pathlib import cleanup_dead_symlinks +from .pathlib import LOCK_TIMEOUT +from .pathlib import make_numbered_dir +from .pathlib import make_numbered_dir_with_cleanup +from .pathlib import rm_rf +from _pytest.compat import get_user_id +from _pytest.config import Config +from _pytest.config import ExitCode +from _pytest.config import hookimpl +from _pytest.config.argparsing import Parser +from _pytest.deprecated import check_ispytest +from _pytest.fixtures import fixture +from _pytest.fixtures import FixtureRequest +from _pytest.monkeypatch import MonkeyPatch +from _pytest.nodes import Item +from _pytest.reports import TestReport +from _pytest.stash import StashKey + + +tmppath_result_key = StashKey[dict[str, bool]]() +RetentionType = Literal["all", "failed", "none"] + + +@final +@dataclasses.dataclass +class TempPathFactory: + """Factory for temporary directories under the common base temp directory, + as discussed at :ref:`temporary directory location and retention`. + """ + + _given_basetemp: Path | None + # pluggy TagTracerSub, not currently exposed, so Any. + _trace: Any + _basetemp: Path | None + _retention_count: int + _retention_policy: RetentionType + + def __init__( + self, + given_basetemp: Path | None, + retention_count: int, + retention_policy: RetentionType, + trace, + basetemp: Path | None = None, + *, + _ispytest: bool = False, + ) -> None: + check_ispytest(_ispytest) + if given_basetemp is None: + self._given_basetemp = None + else: + # Use os.path.abspath() to get absolute path instead of resolve() as it + # does not work the same in all platforms (see #4427). + # Path.absolute() exists, but it is not public (see https://bugs.python.org/issue25012). + self._given_basetemp = Path(os.path.abspath(str(given_basetemp))) + self._trace = trace + self._retention_count = retention_count + self._retention_policy = retention_policy + self._basetemp = basetemp + + @classmethod + def from_config( + cls, + config: Config, + *, + _ispytest: bool = False, + ) -> TempPathFactory: + """Create a factory according to pytest configuration. + + :meta private: + """ + check_ispytest(_ispytest) + count = int(config.getini("tmp_path_retention_count")) + if count < 0: + raise ValueError( + f"tmp_path_retention_count must be >= 0. Current input: {count}." + ) + + policy = config.getini("tmp_path_retention_policy") + if policy not in ("all", "failed", "none"): + raise ValueError( + f"tmp_path_retention_policy must be either all, failed, none. Current input: {policy}." + ) + + return cls( + given_basetemp=config.option.basetemp, + trace=config.trace.get("tmpdir"), + retention_count=count, + retention_policy=policy, + _ispytest=True, + ) + + def _ensure_relative_to_basetemp(self, basename: str) -> str: + basename = os.path.normpath(basename) + if (self.getbasetemp() / basename).resolve().parent != self.getbasetemp(): + raise ValueError(f"{basename} is not a normalized and relative path") + return basename + + def mktemp(self, basename: str, numbered: bool = True) -> Path: + """Create a new temporary directory managed by the factory. + + :param basename: + Directory base name, must be a relative path. + + :param numbered: + If ``True``, ensure the directory is unique by adding a numbered + suffix greater than any existing one: ``basename="foo-"`` and ``numbered=True`` + means that this function will create directories named ``"foo-0"``, + ``"foo-1"``, ``"foo-2"`` and so on. + + :returns: + The path to the new directory. + """ + basename = self._ensure_relative_to_basetemp(basename) + if not numbered: + p = self.getbasetemp().joinpath(basename) + p.mkdir(mode=0o700) + else: + p = make_numbered_dir(root=self.getbasetemp(), prefix=basename, mode=0o700) + self._trace("mktemp", p) + return p + + def getbasetemp(self) -> Path: + """Return the base temporary directory, creating it if needed. + + :returns: + The base temporary directory. + """ + if self._basetemp is not None: + return self._basetemp + + if self._given_basetemp is not None: + basetemp = self._given_basetemp + if basetemp.exists(): + rm_rf(basetemp) + basetemp.mkdir(mode=0o700) + basetemp = basetemp.resolve() + else: + from_env = os.environ.get("PYTEST_DEBUG_TEMPROOT") + temproot = Path(from_env or tempfile.gettempdir()).resolve() + user = get_user() or "unknown" + # use a sub-directory in the temproot to speed-up + # make_numbered_dir() call + rootdir = temproot.joinpath(f"pytest-of-{user}") + try: + rootdir.mkdir(mode=0o700, exist_ok=True) + except OSError: + # getuser() likely returned illegal characters for the platform, use unknown back off mechanism + rootdir = temproot.joinpath("pytest-of-unknown") + rootdir.mkdir(mode=0o700, exist_ok=True) + # Because we use exist_ok=True with a predictable name, make sure + # we are the owners, to prevent any funny business (on unix, where + # temproot is usually shared). + # Also, to keep things private, fixup any world-readable temp + # rootdir's permissions. Historically 0o755 was used, so we can't + # just error out on this, at least for a while. + uid = get_user_id() + if uid is not None: + rootdir_stat = rootdir.stat() + if rootdir_stat.st_uid != uid: + raise OSError( + f"The temporary directory {rootdir} is not owned by the current user. " + "Fix this and try again." + ) + if (rootdir_stat.st_mode & 0o077) != 0: + os.chmod(rootdir, rootdir_stat.st_mode & ~0o077) + keep = self._retention_count + if self._retention_policy == "none": + keep = 0 + basetemp = make_numbered_dir_with_cleanup( + prefix="pytest-", + root=rootdir, + keep=keep, + lock_timeout=LOCK_TIMEOUT, + mode=0o700, + ) + assert basetemp is not None, basetemp + self._basetemp = basetemp + self._trace("new basetemp", basetemp) + return basetemp + + +def get_user() -> str | None: + """Return the current user name, or None if getuser() does not work + in the current environment (see #1010).""" + try: + # In some exotic environments, getpass may not be importable. + import getpass + + return getpass.getuser() + except (ImportError, OSError, KeyError): + return None + + +def pytest_configure(config: Config) -> None: + """Create a TempPathFactory and attach it to the config object. + + This is to comply with existing plugins which expect the handler to be + available at pytest_configure time, but ideally should be moved entirely + to the tmp_path_factory session fixture. + """ + mp = MonkeyPatch() + config.add_cleanup(mp.undo) + _tmp_path_factory = TempPathFactory.from_config(config, _ispytest=True) + mp.setattr(config, "_tmp_path_factory", _tmp_path_factory, raising=False) + + +def pytest_addoption(parser: Parser) -> None: + parser.addini( + "tmp_path_retention_count", + help="How many sessions should we keep the `tmp_path` directories, according to `tmp_path_retention_policy`.", + default=3, + ) + + parser.addini( + "tmp_path_retention_policy", + help="Controls which directories created by the `tmp_path` fixture are kept around, based on test outcome. " + "(all/failed/none)", + default="all", + ) + + +@fixture(scope="session") +def tmp_path_factory(request: FixtureRequest) -> TempPathFactory: + """Return a :class:`pytest.TempPathFactory` instance for the test session.""" + # Set dynamically by pytest_configure() above. + return request.config._tmp_path_factory # type: ignore + + +def _mk_tmp(request: FixtureRequest, factory: TempPathFactory) -> Path: + name = request.node.name + name = re.sub(r"[\W]", "_", name) + MAXVAL = 30 + name = name[:MAXVAL] + return factory.mktemp(name, numbered=True) + + +@fixture +def tmp_path( + request: FixtureRequest, tmp_path_factory: TempPathFactory +) -> Generator[Path]: + """Return a temporary directory (as :class:`pathlib.Path` object) + which is unique to each test function invocation. + The temporary directory is created as a subdirectory + of the base temporary directory, with configurable retention, + as discussed in :ref:`temporary directory location and retention`. + """ + path = _mk_tmp(request, tmp_path_factory) + yield path + + # Remove the tmpdir if the policy is "failed" and the test passed. + policy = tmp_path_factory._retention_policy + result_dict = request.node.stash[tmppath_result_key] + + if policy == "failed" and result_dict.get("call", True): + # We do a "best effort" to remove files, but it might not be possible due to some leaked resource, + # permissions, etc, in which case we ignore it. + rmtree(path, ignore_errors=True) + + del request.node.stash[tmppath_result_key] + + +def pytest_sessionfinish(session, exitstatus: int | ExitCode): + """After each session, remove base directory if all the tests passed, + the policy is "failed", and the basetemp is not specified by a user. + """ + tmp_path_factory: TempPathFactory = session.config._tmp_path_factory + basetemp = tmp_path_factory._basetemp + if basetemp is None: + return + + policy = tmp_path_factory._retention_policy + if ( + exitstatus == 0 + and policy == "failed" + and tmp_path_factory._given_basetemp is None + ): + if basetemp.is_dir(): + # We do a "best effort" to remove files, but it might not be possible due to some leaked resource, + # permissions, etc, in which case we ignore it. + rmtree(basetemp, ignore_errors=True) + + # Remove dead symlinks. + if basetemp.is_dir(): + cleanup_dead_symlinks(basetemp) + + +@hookimpl(wrapper=True, tryfirst=True) +def pytest_runtest_makereport( + item: Item, call +) -> Generator[None, TestReport, TestReport]: + rep = yield + assert rep.when is not None + empty: dict[str, bool] = {} + item.stash.setdefault(tmppath_result_key, empty)[rep.when] = rep.passed + return rep diff --git a/venv/lib/python3.10/site-packages/_pytest/tracemalloc.py b/venv/lib/python3.10/site-packages/_pytest/tracemalloc.py new file mode 100644 index 0000000000000000000000000000000000000000..5d0b19855c734ed8885c1ceb7bec20b34ac66b52 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/tracemalloc.py @@ -0,0 +1,24 @@ +from __future__ import annotations + + +def tracemalloc_message(source: object) -> str: + if source is None: + return "" + + try: + import tracemalloc + except ImportError: + return "" + + tb = tracemalloc.get_object_traceback(source) + if tb is not None: + formatted_tb = "\n".join(tb.format()) + # Use a leading new line to better separate the (large) output + # from the traceback to the previous warning text. + return f"\nObject allocated at:\n{formatted_tb}" + # No need for a leading new line. + url = "https://docs.pytest.org/en/stable/how-to/capture-warnings.html#resource-warnings" + return ( + "Enable tracemalloc to get traceback where the object was allocated.\n" + f"See {url} for more info." + ) diff --git a/venv/lib/python3.10/site-packages/_pytest/unittest.py b/venv/lib/python3.10/site-packages/_pytest/unittest.py new file mode 100644 index 0000000000000000000000000000000000000000..ef6ef64a0623ababf21a375c3efbe0204f13733a --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/unittest.py @@ -0,0 +1,516 @@ +# mypy: allow-untyped-defs +"""Discover and run std-library "unittest" style tests.""" + +from __future__ import annotations + +from collections.abc import Callable +from collections.abc import Generator +from collections.abc import Iterable +from collections.abc import Iterator +from enum import auto +from enum import Enum +import inspect +import sys +import traceback +import types +from typing import TYPE_CHECKING +from typing import Union + +import _pytest._code +from _pytest.compat import is_async_function +from _pytest.config import hookimpl +from _pytest.fixtures import FixtureRequest +from _pytest.monkeypatch import MonkeyPatch +from _pytest.nodes import Collector +from _pytest.nodes import Item +from _pytest.outcomes import exit +from _pytest.outcomes import fail +from _pytest.outcomes import skip +from _pytest.outcomes import xfail +from _pytest.python import Class +from _pytest.python import Function +from _pytest.python import Module +from _pytest.runner import CallInfo +import pytest + + +if sys.version_info[:2] < (3, 11): + from exceptiongroup import ExceptionGroup + +if TYPE_CHECKING: + import unittest + + import twisted.trial.unittest + + +_SysExcInfoType = Union[ + tuple[type[BaseException], BaseException, types.TracebackType], + tuple[None, None, None], +] + + +def pytest_pycollect_makeitem( + collector: Module | Class, name: str, obj: object +) -> UnitTestCase | None: + try: + # Has unittest been imported? + ut = sys.modules["unittest"] + # Is obj a subclass of unittest.TestCase? + # Type ignored because `ut` is an opaque module. + if not issubclass(obj, ut.TestCase): # type: ignore + return None + except Exception: + return None + # Is obj a concrete class? + # Abstract classes can't be instantiated so no point collecting them. + if inspect.isabstract(obj): + return None + # Yes, so let's collect it. + return UnitTestCase.from_parent(collector, name=name, obj=obj) + + +class UnitTestCase(Class): + # Marker for fixturemanger.getfixtureinfo() + # to declare that our children do not support funcargs. + nofuncargs = True + + def newinstance(self): + # TestCase __init__ takes the method (test) name. The TestCase + # constructor treats the name "runTest" as a special no-op, so it can be + # used when a dummy instance is needed. While unittest.TestCase has a + # default, some subclasses omit the default (#9610), so always supply + # it. + return self.obj("runTest") + + def collect(self) -> Iterable[Item | Collector]: + from unittest import TestLoader + + cls = self.obj + if not getattr(cls, "__test__", True): + return + + skipped = _is_skipped(cls) + if not skipped: + self._register_unittest_setup_method_fixture(cls) + self._register_unittest_setup_class_fixture(cls) + self._register_setup_class_fixture() + + self.session._fixturemanager.parsefactories(self.newinstance(), self.nodeid) + + loader = TestLoader() + foundsomething = False + for name in loader.getTestCaseNames(self.obj): + x = getattr(self.obj, name) + if not getattr(x, "__test__", True): + continue + yield TestCaseFunction.from_parent(self, name=name) + foundsomething = True + + if not foundsomething: + runtest = getattr(self.obj, "runTest", None) + if runtest is not None: + ut = sys.modules.get("twisted.trial.unittest", None) + if ut is None or runtest != ut.TestCase.runTest: + yield TestCaseFunction.from_parent(self, name="runTest") + + def _register_unittest_setup_class_fixture(self, cls: type) -> None: + """Register an auto-use fixture to invoke setUpClass and + tearDownClass (#517).""" + setup = getattr(cls, "setUpClass", None) + teardown = getattr(cls, "tearDownClass", None) + if setup is None and teardown is None: + return None + cleanup = getattr(cls, "doClassCleanups", lambda: None) + + def process_teardown_exceptions() -> None: + # tearDown_exceptions is a list set in the class containing exc_infos for errors during + # teardown for the class. + exc_infos = getattr(cls, "tearDown_exceptions", None) + if not exc_infos: + return + exceptions = [exc for (_, exc, _) in exc_infos] + # If a single exception, raise it directly as this provides a more readable + # error (hopefully this will improve in #12255). + if len(exceptions) == 1: + raise exceptions[0] + else: + raise ExceptionGroup("Unittest class cleanup errors", exceptions) + + def unittest_setup_class_fixture( + request: FixtureRequest, + ) -> Generator[None]: + cls = request.cls + if _is_skipped(cls): + reason = cls.__unittest_skip_why__ + raise pytest.skip.Exception(reason, _use_item_location=True) + if setup is not None: + try: + setup() + # unittest does not call the cleanup function for every BaseException, so we + # follow this here. + except Exception: + cleanup() + process_teardown_exceptions() + raise + yield + try: + if teardown is not None: + teardown() + finally: + cleanup() + process_teardown_exceptions() + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_unittest_setUpClass_fixture_{cls.__qualname__}", + func=unittest_setup_class_fixture, + nodeid=self.nodeid, + scope="class", + autouse=True, + ) + + def _register_unittest_setup_method_fixture(self, cls: type) -> None: + """Register an auto-use fixture to invoke setup_method and + teardown_method (#517).""" + setup = getattr(cls, "setup_method", None) + teardown = getattr(cls, "teardown_method", None) + if setup is None and teardown is None: + return None + + def unittest_setup_method_fixture( + request: FixtureRequest, + ) -> Generator[None]: + self = request.instance + if _is_skipped(self): + reason = self.__unittest_skip_why__ + raise pytest.skip.Exception(reason, _use_item_location=True) + if setup is not None: + setup(self, request.function) + yield + if teardown is not None: + teardown(self, request.function) + + self.session._fixturemanager._register_fixture( + # Use a unique name to speed up lookup. + name=f"_unittest_setup_method_fixture_{cls.__qualname__}", + func=unittest_setup_method_fixture, + nodeid=self.nodeid, + scope="function", + autouse=True, + ) + + +class TestCaseFunction(Function): + nofuncargs = True + _excinfo: list[_pytest._code.ExceptionInfo[BaseException]] | None = None + + def _getinstance(self): + assert isinstance(self.parent, UnitTestCase) + return self.parent.obj(self.name) + + # Backward compat for pytest-django; can be removed after pytest-django + # updates + some slack. + @property + def _testcase(self): + return self.instance + + def setup(self) -> None: + # A bound method to be called during teardown() if set (see 'runtest()'). + self._explicit_tearDown: Callable[[], None] | None = None + super().setup() + + def teardown(self) -> None: + if self._explicit_tearDown is not None: + self._explicit_tearDown() + self._explicit_tearDown = None + self._obj = None + del self._instance + super().teardown() + + def startTest(self, testcase: unittest.TestCase) -> None: + pass + + def _addexcinfo(self, rawexcinfo: _SysExcInfoType) -> None: + rawexcinfo = _handle_twisted_exc_info(rawexcinfo) + try: + excinfo = _pytest._code.ExceptionInfo[BaseException].from_exc_info( + rawexcinfo # type: ignore[arg-type] + ) + # Invoke the attributes to trigger storing the traceback + # trial causes some issue there. + _ = excinfo.value + _ = excinfo.traceback + except TypeError: + try: + try: + values = traceback.format_exception(*rawexcinfo) + values.insert( + 0, + "NOTE: Incompatible Exception Representation, " + "displaying natively:\n\n", + ) + fail("".join(values), pytrace=False) + except (fail.Exception, KeyboardInterrupt): + raise + except BaseException: + fail( + "ERROR: Unknown Incompatible Exception " + f"representation:\n{rawexcinfo!r}", + pytrace=False, + ) + except KeyboardInterrupt: + raise + except fail.Exception: + excinfo = _pytest._code.ExceptionInfo.from_current() + self.__dict__.setdefault("_excinfo", []).append(excinfo) + + def addError( + self, testcase: unittest.TestCase, rawexcinfo: _SysExcInfoType + ) -> None: + try: + if isinstance(rawexcinfo[1], exit.Exception): + exit(rawexcinfo[1].msg) + except TypeError: + pass + self._addexcinfo(rawexcinfo) + + def addFailure( + self, testcase: unittest.TestCase, rawexcinfo: _SysExcInfoType + ) -> None: + self._addexcinfo(rawexcinfo) + + def addSkip(self, testcase: unittest.TestCase, reason: str) -> None: + try: + raise pytest.skip.Exception(reason, _use_item_location=True) + except skip.Exception: + self._addexcinfo(sys.exc_info()) + + def addExpectedFailure( + self, + testcase: unittest.TestCase, + rawexcinfo: _SysExcInfoType, + reason: str = "", + ) -> None: + try: + xfail(str(reason)) + except xfail.Exception: + self._addexcinfo(sys.exc_info()) + + def addUnexpectedSuccess( + self, + testcase: unittest.TestCase, + reason: twisted.trial.unittest.Todo | None = None, + ) -> None: + msg = "Unexpected success" + if reason: + msg += f": {reason.reason}" + # Preserve unittest behaviour - fail the test. Explicitly not an XPASS. + try: + fail(msg, pytrace=False) + except fail.Exception: + self._addexcinfo(sys.exc_info()) + + def addSuccess(self, testcase: unittest.TestCase) -> None: + pass + + def stopTest(self, testcase: unittest.TestCase) -> None: + pass + + def addDuration(self, testcase: unittest.TestCase, elapsed: float) -> None: + pass + + def runtest(self) -> None: + from _pytest.debugging import maybe_wrap_pytest_function_for_tracing + + testcase = self.instance + assert testcase is not None + + maybe_wrap_pytest_function_for_tracing(self) + + # Let the unittest framework handle async functions. + if is_async_function(self.obj): + testcase(result=self) + else: + # When --pdb is given, we want to postpone calling tearDown() otherwise + # when entering the pdb prompt, tearDown() would have probably cleaned up + # instance variables, which makes it difficult to debug. + # Arguably we could always postpone tearDown(), but this changes the moment where the + # TestCase instance interacts with the results object, so better to only do it + # when absolutely needed. + # We need to consider if the test itself is skipped, or the whole class. + assert isinstance(self.parent, UnitTestCase) + skipped = _is_skipped(self.obj) or _is_skipped(self.parent.obj) + if self.config.getoption("usepdb") and not skipped: + self._explicit_tearDown = testcase.tearDown + setattr(testcase, "tearDown", lambda *args: None) + + # We need to update the actual bound method with self.obj, because + # wrap_pytest_function_for_tracing replaces self.obj by a wrapper. + setattr(testcase, self.name, self.obj) + try: + testcase(result=self) + finally: + delattr(testcase, self.name) + + def _traceback_filter( + self, excinfo: _pytest._code.ExceptionInfo[BaseException] + ) -> _pytest._code.Traceback: + traceback = super()._traceback_filter(excinfo) + ntraceback = traceback.filter( + lambda x: not x.frame.f_globals.get("__unittest"), + ) + if not ntraceback: + ntraceback = traceback + return ntraceback + + +@hookimpl(tryfirst=True) +def pytest_runtest_makereport(item: Item, call: CallInfo[None]) -> None: + if isinstance(item, TestCaseFunction): + if item._excinfo: + call.excinfo = item._excinfo.pop(0) + try: + del call.result + except AttributeError: + pass + + # Convert unittest.SkipTest to pytest.skip. + # This is actually only needed for nose, which reuses unittest.SkipTest for + # its own nose.SkipTest. For unittest TestCases, SkipTest is already + # handled internally, and doesn't reach here. + unittest = sys.modules.get("unittest") + if unittest and call.excinfo and isinstance(call.excinfo.value, unittest.SkipTest): + excinfo = call.excinfo + call2 = CallInfo[None].from_call( + lambda: pytest.skip(str(excinfo.value)), call.when + ) + call.excinfo = call2.excinfo + + +def _is_skipped(obj) -> bool: + """Return True if the given object has been marked with @unittest.skip.""" + return bool(getattr(obj, "__unittest_skip__", False)) + + +def pytest_configure() -> None: + """Register the TestCaseFunction class as an IReporter if twisted.trial is available.""" + if _get_twisted_version() is not TwistedVersion.NotInstalled: + from twisted.trial.itrial import IReporter + from zope.interface import classImplements + + classImplements(TestCaseFunction, IReporter) + + +class TwistedVersion(Enum): + """ + The Twisted version installed in the environment. + + We have different workarounds in place for different versions of Twisted. + """ + + # Twisted version 24 or prior. + Version24 = auto() + # Twisted version 25 or later. + Version25 = auto() + # Twisted version is not available. + NotInstalled = auto() + + +def _get_twisted_version() -> TwistedVersion: + # We need to check if "twisted.trial.unittest" is specifically present in sys.modules. + # This is because we intend to integrate with Trial only when it's actively running + # the test suite, but not needed when only other Twisted components are in use. + if "twisted.trial.unittest" not in sys.modules: + return TwistedVersion.NotInstalled + + import importlib.metadata + + import packaging.version + + version_str = importlib.metadata.version("twisted") + version = packaging.version.parse(version_str) + if version.major <= 24: + return TwistedVersion.Version24 + else: + return TwistedVersion.Version25 + + +# Name of the attribute in `twisted.python.Failure` instances that stores +# the `sys.exc_info()` tuple. +# See twisted.trial support in `pytest_runtest_protocol`. +TWISTED_RAW_EXCINFO_ATTR = "_twisted_raw_excinfo" + + +@hookimpl(wrapper=True) +def pytest_runtest_protocol(item: Item) -> Iterator[None]: + if _get_twisted_version() is TwistedVersion.Version24: + import twisted.python.failure as ut + + # Monkeypatch `Failure.__init__` to store the raw exception info. + original__init__ = ut.Failure.__init__ + + def store_raw_exception_info( + self, exc_value=None, exc_type=None, exc_tb=None, captureVars=None + ): # pragma: no cover + if exc_value is None: + raw_exc_info = sys.exc_info() + else: + if exc_type is None: + exc_type = type(exc_value) + if exc_tb is None: + exc_tb = sys.exc_info()[2] + raw_exc_info = (exc_type, exc_value, exc_tb) + setattr(self, TWISTED_RAW_EXCINFO_ATTR, tuple(raw_exc_info)) + try: + original__init__( + self, exc_value, exc_type, exc_tb, captureVars=captureVars + ) + except TypeError: # pragma: no cover + original__init__(self, exc_value, exc_type, exc_tb) + + with MonkeyPatch.context() as patcher: + patcher.setattr(ut.Failure, "__init__", store_raw_exception_info) + return (yield) + else: + return (yield) + + +def _handle_twisted_exc_info( + rawexcinfo: _SysExcInfoType | BaseException, +) -> _SysExcInfoType: + """ + Twisted passes a custom Failure instance to `addError()` instead of using `sys.exc_info()`. + Therefore, if `rawexcinfo` is a `Failure` instance, convert it into the equivalent `sys.exc_info()` tuple + as expected by pytest. + """ + twisted_version = _get_twisted_version() + if twisted_version is TwistedVersion.NotInstalled: + # Unfortunately, because we cannot import `twisted.python.failure` at the top of the file + # and use it in the signature, we need to use `type:ignore` here because we cannot narrow + # the type properly in the `if` statement above. + return rawexcinfo # type:ignore[return-value] + elif twisted_version is TwistedVersion.Version24: + # Twisted calls addError() passing its own classes (like `twisted.python.Failure`), which violates + # the `addError()` signature, so we extract the original `sys.exc_info()` tuple which is stored + # in the object. + if hasattr(rawexcinfo, TWISTED_RAW_EXCINFO_ATTR): + saved_exc_info = getattr(rawexcinfo, TWISTED_RAW_EXCINFO_ATTR) + # Delete the attribute from the original object to avoid leaks. + delattr(rawexcinfo, TWISTED_RAW_EXCINFO_ATTR) + return saved_exc_info # type:ignore[no-any-return] + return rawexcinfo # type:ignore[return-value] + elif twisted_version is TwistedVersion.Version25: + if isinstance(rawexcinfo, BaseException): + import twisted.python.failure + + if isinstance(rawexcinfo, twisted.python.failure.Failure): + tb = rawexcinfo.__traceback__ + if tb is None: + tb = sys.exc_info()[2] + return type(rawexcinfo.value), rawexcinfo.value, tb + + return rawexcinfo # type:ignore[return-value] + else: + # Ideally we would use assert_never() here, but it is not available in all Python versions + # we support, plus we do not require `type_extensions` currently. + assert False, f"Unexpected Twisted version: {twisted_version}" diff --git a/venv/lib/python3.10/site-packages/_pytest/unraisableexception.py b/venv/lib/python3.10/site-packages/_pytest/unraisableexception.py new file mode 100644 index 0000000000000000000000000000000000000000..0faca36aa00e5fe793fb4ff4d1c630eeff540770 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/unraisableexception.py @@ -0,0 +1,163 @@ +from __future__ import annotations + +import collections +from collections.abc import Callable +import functools +import gc +import sys +import traceback +from typing import NamedTuple +from typing import TYPE_CHECKING +import warnings + +from _pytest.config import Config +from _pytest.nodes import Item +from _pytest.stash import StashKey +from _pytest.tracemalloc import tracemalloc_message +import pytest + + +if TYPE_CHECKING: + pass + +if sys.version_info < (3, 11): + from exceptiongroup import ExceptionGroup + + +# This is a stash item and not a simple constant to allow pytester to override it. +gc_collect_iterations_key = StashKey[int]() + + +def gc_collect_harder(iterations: int) -> None: + for _ in range(iterations): + gc.collect() + + +class UnraisableMeta(NamedTuple): + msg: str + cause_msg: str + exc_value: BaseException | None + + +unraisable_exceptions: StashKey[collections.deque[UnraisableMeta | BaseException]] = ( + StashKey() +) + + +def collect_unraisable(config: Config) -> None: + pop_unraisable = config.stash[unraisable_exceptions].pop + errors: list[pytest.PytestUnraisableExceptionWarning | RuntimeError] = [] + meta = None + hook_error = None + try: + while True: + try: + meta = pop_unraisable() + except IndexError: + break + + if isinstance(meta, BaseException): + hook_error = RuntimeError("Failed to process unraisable exception") + hook_error.__cause__ = meta + errors.append(hook_error) + continue + + msg = meta.msg + try: + warnings.warn(pytest.PytestUnraisableExceptionWarning(msg)) + except pytest.PytestUnraisableExceptionWarning as e: + # This except happens when the warning is treated as an error (e.g. `-Werror`). + if meta.exc_value is not None: + # Exceptions have a better way to show the traceback, but + # warnings do not, so hide the traceback from the msg and + # set the cause so the traceback shows up in the right place. + e.args = (meta.cause_msg,) + e.__cause__ = meta.exc_value + errors.append(e) + + if len(errors) == 1: + raise errors[0] + if errors: + raise ExceptionGroup("multiple unraisable exception warnings", errors) + finally: + del errors, meta, hook_error + + +def cleanup( + *, config: Config, prev_hook: Callable[[sys.UnraisableHookArgs], object] +) -> None: + # A single collection doesn't necessarily collect everything. + # Constant determined experimentally by the Trio project. + gc_collect_iterations = config.stash.get(gc_collect_iterations_key, 5) + try: + try: + gc_collect_harder(gc_collect_iterations) + collect_unraisable(config) + finally: + sys.unraisablehook = prev_hook + finally: + del config.stash[unraisable_exceptions] + + +def unraisable_hook( + unraisable: sys.UnraisableHookArgs, + /, + *, + append: Callable[[UnraisableMeta | BaseException], object], +) -> None: + try: + # we need to compute these strings here as they might change after + # the unraisablehook finishes and before the metadata object is + # collected by a pytest hook + err_msg = ( + "Exception ignored in" if unraisable.err_msg is None else unraisable.err_msg + ) + summary = f"{err_msg}: {unraisable.object!r}" + traceback_message = "\n\n" + "".join( + traceback.format_exception( + unraisable.exc_type, + unraisable.exc_value, + unraisable.exc_traceback, + ) + ) + tracemalloc_tb = "\n" + tracemalloc_message(unraisable.object) + msg = summary + traceback_message + tracemalloc_tb + cause_msg = summary + tracemalloc_tb + + append( + UnraisableMeta( + msg=msg, + cause_msg=cause_msg, + exc_value=unraisable.exc_value, + ) + ) + except BaseException as e: + append(e) + # Raising this will cause the exception to be logged twice, once in our + # collect_unraisable and once by the unraisablehook calling machinery + # which is fine - this should never happen anyway and if it does + # it should probably be reported as a pytest bug. + raise + + +def pytest_configure(config: Config) -> None: + prev_hook = sys.unraisablehook + deque: collections.deque[UnraisableMeta | BaseException] = collections.deque() + config.stash[unraisable_exceptions] = deque + config.add_cleanup(functools.partial(cleanup, config=config, prev_hook=prev_hook)) + sys.unraisablehook = functools.partial(unraisable_hook, append=deque.append) + + +@pytest.hookimpl(trylast=True) +def pytest_runtest_setup(item: Item) -> None: + collect_unraisable(item.config) + + +@pytest.hookimpl(trylast=True) +def pytest_runtest_call(item: Item) -> None: + collect_unraisable(item.config) + + +@pytest.hookimpl(trylast=True) +def pytest_runtest_teardown(item: Item) -> None: + collect_unraisable(item.config) diff --git a/venv/lib/python3.10/site-packages/_pytest/warning_types.py b/venv/lib/python3.10/site-packages/_pytest/warning_types.py new file mode 100644 index 0000000000000000000000000000000000000000..5e78debb682b1324fac6ac8b8304e6dfc8b9f743 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/warning_types.py @@ -0,0 +1,166 @@ +from __future__ import annotations + +import dataclasses +import inspect +from types import FunctionType +from typing import Any +from typing import final +from typing import Generic +from typing import TypeVar +import warnings + + +class PytestWarning(UserWarning): + """Base class for all warnings emitted by pytest.""" + + __module__ = "pytest" + + +@final +class PytestAssertRewriteWarning(PytestWarning): + """Warning emitted by the pytest assert rewrite module.""" + + __module__ = "pytest" + + +@final +class PytestCacheWarning(PytestWarning): + """Warning emitted by the cache plugin in various situations.""" + + __module__ = "pytest" + + +@final +class PytestConfigWarning(PytestWarning): + """Warning emitted for configuration issues.""" + + __module__ = "pytest" + + +@final +class PytestCollectionWarning(PytestWarning): + """Warning emitted when pytest is not able to collect a file or symbol in a module.""" + + __module__ = "pytest" + + +class PytestDeprecationWarning(PytestWarning, DeprecationWarning): + """Warning class for features that will be removed in a future version.""" + + __module__ = "pytest" + + +class PytestRemovedIn9Warning(PytestDeprecationWarning): + """Warning class for features that will be removed in pytest 9.""" + + __module__ = "pytest" + + +@final +class PytestExperimentalApiWarning(PytestWarning, FutureWarning): + """Warning category used to denote experiments in pytest. + + Use sparingly as the API might change or even be removed completely in a + future version. + """ + + __module__ = "pytest" + + @classmethod + def simple(cls, apiname: str) -> PytestExperimentalApiWarning: + return cls(f"{apiname} is an experimental api that may change over time") + + +@final +class PytestReturnNotNoneWarning(PytestWarning): + """ + Warning emitted when a test function returns a value other than ``None``. + + See :ref:`return-not-none` for details. + """ + + __module__ = "pytest" + + +@final +class PytestUnknownMarkWarning(PytestWarning): + """Warning emitted on use of unknown markers. + + See :ref:`mark` for details. + """ + + __module__ = "pytest" + + +@final +class PytestUnraisableExceptionWarning(PytestWarning): + """An unraisable exception was reported. + + Unraisable exceptions are exceptions raised in :meth:`__del__ ` + implementations and similar situations when the exception cannot be raised + as normal. + """ + + __module__ = "pytest" + + +@final +class PytestUnhandledThreadExceptionWarning(PytestWarning): + """An unhandled exception occurred in a :class:`~threading.Thread`. + + Such exceptions don't propagate normally. + """ + + __module__ = "pytest" + + +_W = TypeVar("_W", bound=PytestWarning) + + +@final +@dataclasses.dataclass +class UnformattedWarning(Generic[_W]): + """A warning meant to be formatted during runtime. + + This is used to hold warnings that need to format their message at runtime, + as opposed to a direct message. + """ + + category: type[_W] + template: str + + def format(self, **kwargs: Any) -> _W: + """Return an instance of the warning category, formatted with given kwargs.""" + return self.category(self.template.format(**kwargs)) + + +@final +class PytestFDWarning(PytestWarning): + """When the lsof plugin finds leaked fds.""" + + __module__ = "pytest" + + +def warn_explicit_for(method: FunctionType, message: PytestWarning) -> None: + """ + Issue the warning :param:`message` for the definition of the given :param:`method` + + this helps to log warnings for functions defined prior to finding an issue with them + (like hook wrappers being marked in a legacy mechanism) + """ + lineno = method.__code__.co_firstlineno + filename = inspect.getfile(method) + module = method.__module__ + mod_globals = method.__globals__ + try: + warnings.warn_explicit( + message, + type(message), + filename=filename, + module=module, + registry=mod_globals.setdefault("__warningregistry__", {}), + lineno=lineno, + ) + except Warning as w: + # If warnings are errors (e.g. -Werror), location information gets lost, so we add it to the message. + raise type(w)(f"{w}\n at {filename}:{lineno}") from None diff --git a/venv/lib/python3.10/site-packages/_pytest/warnings.py b/venv/lib/python3.10/site-packages/_pytest/warnings.py new file mode 100644 index 0000000000000000000000000000000000000000..806681a5020177c7dc47d735f65e2145472f6b07 --- /dev/null +++ b/venv/lib/python3.10/site-packages/_pytest/warnings.py @@ -0,0 +1,152 @@ +# mypy: allow-untyped-defs +from __future__ import annotations + +from collections.abc import Generator +from contextlib import contextmanager +from contextlib import ExitStack +import sys +from typing import Literal +import warnings + +from _pytest.config import apply_warning_filters +from _pytest.config import Config +from _pytest.config import parse_warning_filter +from _pytest.main import Session +from _pytest.nodes import Item +from _pytest.terminal import TerminalReporter +from _pytest.tracemalloc import tracemalloc_message +import pytest + + +@contextmanager +def catch_warnings_for_item( + config: Config, + ihook, + when: Literal["config", "collect", "runtest"], + item: Item | None, + *, + record: bool = True, +) -> Generator[None]: + """Context manager that catches warnings generated in the contained execution block. + + ``item`` can be None if we are not in the context of an item execution. + + Each warning captured triggers the ``pytest_warning_recorded`` hook. + """ + config_filters = config.getini("filterwarnings") + cmdline_filters = config.known_args_namespace.pythonwarnings or [] + with warnings.catch_warnings(record=record) as log: + if not sys.warnoptions: + # If user is not explicitly configuring warning filters, show deprecation warnings by default (#2908). + warnings.filterwarnings("always", category=DeprecationWarning) + warnings.filterwarnings("always", category=PendingDeprecationWarning) + + # To be enabled in pytest 9.0.0. + # warnings.filterwarnings("error", category=pytest.PytestRemovedIn9Warning) + + apply_warning_filters(config_filters, cmdline_filters) + + # apply filters from "filterwarnings" marks + nodeid = "" if item is None else item.nodeid + if item is not None: + for mark in item.iter_markers(name="filterwarnings"): + for arg in mark.args: + warnings.filterwarnings(*parse_warning_filter(arg, escape=False)) + + try: + yield + finally: + if record: + # mypy can't infer that record=True means log is not None; help it. + assert log is not None + + for warning_message in log: + ihook.pytest_warning_recorded.call_historic( + kwargs=dict( + warning_message=warning_message, + nodeid=nodeid, + when=when, + location=None, + ) + ) + + +def warning_record_to_str(warning_message: warnings.WarningMessage) -> str: + """Convert a warnings.WarningMessage to a string.""" + return warnings.formatwarning( + str(warning_message.message), + warning_message.category, + warning_message.filename, + warning_message.lineno, + warning_message.line, + ) + tracemalloc_message(warning_message.source) + + +@pytest.hookimpl(wrapper=True, tryfirst=True) +def pytest_runtest_protocol(item: Item) -> Generator[None, object, object]: + with catch_warnings_for_item( + config=item.config, ihook=item.ihook, when="runtest", item=item + ): + return (yield) + + +@pytest.hookimpl(wrapper=True, tryfirst=True) +def pytest_collection(session: Session) -> Generator[None, object, object]: + config = session.config + with catch_warnings_for_item( + config=config, ihook=config.hook, when="collect", item=None + ): + return (yield) + + +@pytest.hookimpl(wrapper=True) +def pytest_terminal_summary( + terminalreporter: TerminalReporter, +) -> Generator[None]: + config = terminalreporter.config + with catch_warnings_for_item( + config=config, ihook=config.hook, when="config", item=None + ): + return (yield) + + +@pytest.hookimpl(wrapper=True) +def pytest_sessionfinish(session: Session) -> Generator[None]: + config = session.config + with catch_warnings_for_item( + config=config, ihook=config.hook, when="config", item=None + ): + return (yield) + + +@pytest.hookimpl(wrapper=True) +def pytest_load_initial_conftests( + early_config: Config, +) -> Generator[None]: + with catch_warnings_for_item( + config=early_config, ihook=early_config.hook, when="config", item=None + ): + return (yield) + + +def pytest_configure(config: Config) -> None: + with ExitStack() as stack: + stack.enter_context( + catch_warnings_for_item( + config=config, + ihook=config.hook, + when="config", + item=None, + # this disables recording because the terminalreporter has + # finished by the time it comes to reporting logged warnings + # from the end of config cleanup. So for now, this is only + # useful for setting a warning filter with an 'error' action. + record=False, + ) + ) + config.addinivalue_line( + "markers", + "filterwarnings(warning): add a warning filter to the given test. " + "see https://docs.pytest.org/en/stable/how-to/capture-warnings.html#pytest-mark-filterwarnings ", + ) + config.add_cleanup(stack.pop_all().close) diff --git a/venv/lib/python3.10/site-packages/ai2_olmo-0.2.1.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/ai2_olmo-0.2.1.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ai2_olmo-0.2.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/ai2_olmo-0.2.1.dist-info/LICENSE b/venv/lib/python3.10/site-packages/ai2_olmo-0.2.1.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..9b259bdfcf9022e7f4999a318ab7261300644e11 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ai2_olmo-0.2.1.dist-info/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.10/site-packages/ai2_olmo-0.2.1.dist-info/METADATA b/venv/lib/python3.10/site-packages/ai2_olmo-0.2.1.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..d3a9c9f086f015c7c6921ce359d7f8facefc5850 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ai2_olmo-0.2.1.dist-info/METADATA @@ -0,0 +1,250 @@ +Metadata-Version: 2.1 +Name: ai2-olmo +Version: 0.2.1 +Summary: Open Language Model (OLMo) +Author-email: Allen Institute for Artificial Intelligence +License: Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +Project-URL: Homepage, https://github.com/allenai/LLM +Project-URL: Repository, https://github.com/allenai/LLM +Requires-Python: >=3.8 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: numpy +Requires-Dist: torch >=2.0 +Requires-Dist: torchmetrics +Requires-Dist: tokenizers +Requires-Dist: click +Requires-Dist: rich +Requires-Dist: cached-path +Requires-Dist: beaker-gantry +Requires-Dist: omegaconf +Requires-Dist: wandb +Requires-Dist: logzio-python-handler +Requires-Dist: boto3 +Requires-Dist: google-cloud-storage +Requires-Dist: packaging +Requires-Dist: datasets +Requires-Dist: scikit-learn +Requires-Dist: smashed[remote] >=0.21.1 +Requires-Dist: msgspec >=0.14.0 +Requires-Dist: transformers +Provides-Extra: dev +Requires-Dist: ruff ; extra == 'dev' +Requires-Dist: mypy <1.4,>=1.0 ; extra == 'dev' +Requires-Dist: black <24.0,>=23.1 ; extra == 'dev' +Requires-Dist: isort <5.13,>=5.12 ; extra == 'dev' +Requires-Dist: pytest ; extra == 'dev' +Requires-Dist: pytest-sphinx ; extra == 'dev' +Requires-Dist: twine >=1.11.0 ; extra == 'dev' +Requires-Dist: setuptools ; extra == 'dev' +Requires-Dist: wheel ; extra == 'dev' +Requires-Dist: build ; extra == 'dev' + +# OLMo: Open Language Model + +## Installation + +``` +pip install ai2-olmo +``` diff --git a/venv/lib/python3.10/site-packages/ai2_olmo-0.2.1.dist-info/RECORD b/venv/lib/python3.10/site-packages/ai2_olmo-0.2.1.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..c3751534dc09cbeb1b4593c36da6b1bcbb912e5a --- /dev/null +++ b/venv/lib/python3.10/site-packages/ai2_olmo-0.2.1.dist-info/RECORD @@ -0,0 +1,61 @@ +ai2_olmo-0.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +ai2_olmo-0.2.1.dist-info/LICENSE,sha256=YvuKOpYh3COIF0yqq-nCMXtpS7mh1GyYvPVlW2j1G-M,11359 +ai2_olmo-0.2.1.dist-info/METADATA,sha256=qxGKsv35iq6SvhRzE2grJtdse3CRpePto6P1gD0ozVg,14354 +ai2_olmo-0.2.1.dist-info/RECORD,, +ai2_olmo-0.2.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +ai2_olmo-0.2.1.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92 +ai2_olmo-0.2.1.dist-info/top_level.txt,sha256=N7EKNbrS-2W1W_d_WN9FcE12Y3ilqd78xMSVyrxMW0c,13 +hf_olmo/__init__.py,sha256=LHYwPvSijoOlPKBehe1MM2b-yc9oADmx__wi5OFhLLs,140 +hf_olmo/__pycache__/__init__.cpython-310.pyc,, +hf_olmo/__pycache__/configuration_olmo.cpython-310.pyc,, +hf_olmo/__pycache__/convert_olmo_to_hf.cpython-310.pyc,, +hf_olmo/__pycache__/modeling_olmo.cpython-310.pyc,, +hf_olmo/__pycache__/tokenization_olmo_fast.cpython-310.pyc,, +hf_olmo/configuration_olmo.py,sha256=qd_QqpRUAwtb-ZJ_1sj5872-qDzbbKDcIBKBMea7Cyg,1159 +hf_olmo/convert_olmo_to_hf.py,sha256=MB2GVjWmw8hRB1XJwOVehT-VojE_6Dxpck_31Qc2ukw,3756 +hf_olmo/modeling_olmo.py,sha256=mmkDldsP0ZmY77bpsLEF75sYfee3GxIWfgWWDMdiozY,4778 +hf_olmo/requirements.txt,sha256=GLBaeTo_CSdhnHvbxQ0kzpEHdlLuA_33foIogaWxntI,13 +hf_olmo/tokenization_olmo_fast.py,sha256=ZutbLcrLh0A6GpoisvP2tSyx9-adv1rNbePze98jHv8,645 +olmo/__init__.py,sha256=XHuHnEIFs0Q5aigoEADYkMEirhlvPnuVPuG7tIjEk8I,314 +olmo/__pycache__/__init__.cpython-310.pyc,, +olmo/__pycache__/aliases.cpython-310.pyc,, +olmo/__pycache__/beam_search.cpython-310.pyc,, +olmo/__pycache__/checkpoint.cpython-310.pyc,, +olmo/__pycache__/config.cpython-310.pyc,, +olmo/__pycache__/exceptions.cpython-310.pyc,, +olmo/__pycache__/initialization.cpython-310.pyc,, +olmo/__pycache__/model.cpython-310.pyc,, +olmo/__pycache__/optim.cpython-310.pyc,, +olmo/__pycache__/tokenizer.cpython-310.pyc,, +olmo/__pycache__/torch_util.cpython-310.pyc,, +olmo/__pycache__/train.cpython-310.pyc,, +olmo/__pycache__/util.cpython-310.pyc,, +olmo/__pycache__/version.cpython-310.pyc,, +olmo/aliases.py,sha256=x4w3ViALWYVVFgrzu4uu1lnbLAt8SVN30JPraVp6d0Q,109 +olmo/beam_search.py,sha256=inkGUDmxdneo4B6fMyzHLx3Rkk1WgfOuDdckA7JL0dM,46609 +olmo/checkpoint.py,sha256=QbAo72Zr1099GUwvh0ZLOa10q9wykv0crdhfDNhr1XQ,63570 +olmo/config.py,sha256=a6kkaT112NuPCuyx2GrRWm5TimOb2ZACn3QUFMAaA-0,26766 +olmo/data/__init__.py,sha256=gKYN7ek7oHk0Ks6ouvMx3fNDcTYjG9p3FGxePoLwV64,4464 +olmo/data/__pycache__/__init__.cpython-310.pyc,, +olmo/data/__pycache__/collator.cpython-310.pyc,, +olmo/data/__pycache__/iterable_dataset.cpython-310.pyc,, +olmo/data/__pycache__/memmap_dataset.cpython-310.pyc,, +olmo/data/collator.py,sha256=f55MLVnj6kg7guxoNyJSJZrZOIyU5ebENStg1Yg2HRQ,3647 +olmo/data/iterable_dataset.py,sha256=0_A_j16wVdQAcudTTiduXKblGfqdGXc9lQP71AsTX78,8096 +olmo/data/memmap_dataset.py,sha256=iJEz7IGUovPEfDvC1n0hP9zc7UM2EHo8qOBz_sp9Pio,6441 +olmo/eval/__init__.py,sha256=iX9XMyPXQrHhZaSvRH19d1ByOtl4niDyWFNPHwc_veU,3804 +olmo/eval/__pycache__/__init__.cpython-310.pyc,, +olmo/eval/__pycache__/downstream.cpython-310.pyc,, +olmo/eval/__pycache__/evaluator.cpython-310.pyc,, +olmo/eval/downstream.py,sha256=tbnsjj4b_VtJufwpCPN1uIHs41gdvjt69GB0yrscoUA,34273 +olmo/eval/evaluator.py,sha256=2Qa4Jx04nIU0JGMtlWOC60Je2nHpkC-HS-wuMioLTlI,3363 +olmo/exceptions.py,sha256=JI2nwyRsTZwaIC5cOFfHYo73o2V9cl-cSwh5sKjqgX8,672 +olmo/initialization.py,sha256=auT8_EHigbJTcvg6gl8hQLI71tyXXEeLxDWlIC7_dQM,3501 +olmo/model.py,sha256=D3HvIsOqx-68VHcSzHMS35ABfFI2YA2F8ncpndndYTE,69435 +olmo/optim.py,sha256=Zd1hqI3HSpo86xF1AHAT7JQ-IofsP8CI4r7sPfQjR-c,33498 +olmo/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +olmo/tokenizer.py,sha256=3hnaRqdIZ9et9SbzxTUlXOdzuMPCwNmxlj28bntlphI,6584 +olmo/torch_util.py,sha256=XkW9VXMopNZH0OzvAJdqHNvfefjlUwv5yhREL67eb-A,3851 +olmo/train.py,sha256=pN0PeaU-ze2-DTcHNY6QmIE8r1_zz3uMftbW_o1Cu7s,40127 +olmo/util.py,sha256=dDbowCzUbEWUyy7HZ4pB_b3jH4QqOtbKYAHsX-3JhrE,23162 +olmo/version.py,sha256=iPLv1Isw6ofMyz9Im-7MZCP8FOttO-FGhVOuLVGZkQg,407 diff --git a/venv/lib/python3.10/site-packages/ai2_olmo-0.2.1.dist-info/REQUESTED b/venv/lib/python3.10/site-packages/ai2_olmo-0.2.1.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.10/site-packages/ai2_olmo-0.2.1.dist-info/WHEEL b/venv/lib/python3.10/site-packages/ai2_olmo-0.2.1.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..98c0d20b7a64f4f998d7913e1d38a05dba20916c --- /dev/null +++ b/venv/lib/python3.10/site-packages/ai2_olmo-0.2.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/ai2_olmo-0.2.1.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/ai2_olmo-0.2.1.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..d82ae61084c9ee0f8f6cb688993bb439485ed33c --- /dev/null +++ b/venv/lib/python3.10/site-packages/ai2_olmo-0.2.1.dist-info/top_level.txt @@ -0,0 +1,2 @@ +hf_olmo +olmo diff --git a/venv/lib/python3.10/site-packages/async_timeout/__init__.py b/venv/lib/python3.10/site-packages/async_timeout/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..fe4aa581635bb30ee4e880366b1bbab6227fb148 --- /dev/null +++ b/venv/lib/python3.10/site-packages/async_timeout/__init__.py @@ -0,0 +1,276 @@ +import asyncio +import enum +import sys +from types import TracebackType +from typing import Optional, Type, final + + +__version__ = "5.0.1" + + +__all__ = ("timeout", "timeout_at", "Timeout") + + +def timeout(delay: Optional[float]) -> "Timeout": + """timeout context manager. + + Useful in cases when you want to apply timeout logic around block + of code or in cases when asyncio.wait_for is not suitable. For example: + + >>> async with timeout(0.001): + ... async with aiohttp.get('https://github.com') as r: + ... await r.text() + + + delay - value in seconds or None to disable timeout logic + """ + loop = asyncio.get_running_loop() + if delay is not None: + deadline = loop.time() + delay # type: Optional[float] + else: + deadline = None + return Timeout(deadline, loop) + + +def timeout_at(deadline: Optional[float]) -> "Timeout": + """Schedule the timeout at absolute time. + + deadline argument points on the time in the same clock system + as loop.time(). + + Please note: it is not POSIX time but a time with + undefined starting base, e.g. the time of the system power on. + + >>> async with timeout_at(loop.time() + 10): + ... async with aiohttp.get('https://github.com') as r: + ... await r.text() + + + """ + loop = asyncio.get_running_loop() + return Timeout(deadline, loop) + + +class _State(enum.Enum): + INIT = "INIT" + ENTER = "ENTER" + TIMEOUT = "TIMEOUT" + EXIT = "EXIT" + + +if sys.version_info >= (3, 11): + + class _Expired: + __slots__ = ("_val",) + + def __init__(self, val: bool) -> None: + self._val = val + + def __call__(self) -> bool: + return self._val + + def __bool__(self) -> bool: + return self._val + + def __repr__(self) -> str: + return repr(self._val) + + def __str__(self) -> str: + return str(self._val) + + @final + class Timeout(asyncio.Timeout): # type: ignore[misc] + # Supports full asyncio.Timeout API. + # Also provides several asyncio_timeout specific methods + # for backward compatibility. + def __init__( + self, deadline: Optional[float], loop: asyncio.AbstractEventLoop + ) -> None: + super().__init__(deadline) + + @property + def expired(self) -> _Expired: + # a hacky property hat can provide both roles: + # timeout.expired() from asyncio + # timeout.expired from asyncio_timeout + return _Expired(super().expired()) + + @property + def deadline(self) -> Optional[float]: + return self.when() + + def reject(self) -> None: + """Reject scheduled timeout if any.""" + # cancel is maybe better name but + # task.cancel() raises CancelledError in asyncio world. + self.reschedule(None) + + def shift(self, delay: float) -> None: + """Advance timeout on delay seconds. + + The delay can be negative. + + Raise RuntimeError if shift is called when deadline is not scheduled + """ + deadline = self.when() + if deadline is None: + raise RuntimeError("cannot shift timeout if deadline is not scheduled") + self.reschedule(deadline + delay) + + def update(self, deadline: float) -> None: + """Set deadline to absolute value. + + deadline argument points on the time in the same clock system + as loop.time(). + + If new deadline is in the past the timeout is raised immediately. + + Please note: it is not POSIX time but a time with + undefined starting base, e.g. the time of the system power on. + """ + self.reschedule(deadline) + +else: + + @final + class Timeout: + # Internal class, please don't instantiate it directly + # Use timeout() and timeout_at() public factories instead. + # + # Implementation note: `async with timeout()` is preferred + # over `with timeout()`. + # While technically the Timeout class implementation + # doesn't need to be async at all, + # the `async with` statement explicitly points that + # the context manager should be used from async function context. + # + # This design allows to avoid many silly misusages. + # + # TimeoutError is raised immediately when scheduled + # if the deadline is passed. + # The purpose is to time out as soon as possible + # without waiting for the next await expression. + + __slots__ = ("_deadline", "_loop", "_state", "_timeout_handler", "_task") + + def __init__( + self, deadline: Optional[float], loop: asyncio.AbstractEventLoop + ) -> None: + self._loop = loop + self._state = _State.INIT + + self._task: Optional["asyncio.Task[object]"] = None + self._timeout_handler = None # type: Optional[asyncio.Handle] + if deadline is None: + self._deadline = None # type: Optional[float] + else: + self.update(deadline) + + async def __aenter__(self) -> "Timeout": + self._do_enter() + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + self._do_exit(exc_type) + return None + + @property + def expired(self) -> bool: + """Is timeout expired during execution?""" + return self._state == _State.TIMEOUT + + @property + def deadline(self) -> Optional[float]: + return self._deadline + + def reject(self) -> None: + """Reject scheduled timeout if any.""" + # cancel is maybe better name but + # task.cancel() raises CancelledError in asyncio world. + if self._state not in (_State.INIT, _State.ENTER): + raise RuntimeError(f"invalid state {self._state.value}") + self._reject() + + def _reject(self) -> None: + self._task = None + if self._timeout_handler is not None: + self._timeout_handler.cancel() + self._timeout_handler = None + + def shift(self, delay: float) -> None: + """Advance timeout on delay seconds. + + The delay can be negative. + + Raise RuntimeError if shift is called when deadline is not scheduled + """ + deadline = self._deadline + if deadline is None: + raise RuntimeError("cannot shift timeout if deadline is not scheduled") + self.update(deadline + delay) + + def update(self, deadline: float) -> None: + """Set deadline to absolute value. + + deadline argument points on the time in the same clock system + as loop.time(). + + If new deadline is in the past the timeout is raised immediately. + + Please note: it is not POSIX time but a time with + undefined starting base, e.g. the time of the system power on. + """ + if self._state == _State.EXIT: + raise RuntimeError("cannot reschedule after exit from context manager") + if self._state == _State.TIMEOUT: + raise RuntimeError("cannot reschedule expired timeout") + if self._timeout_handler is not None: + self._timeout_handler.cancel() + self._deadline = deadline + if self._state != _State.INIT: + self._reschedule() + + def _reschedule(self) -> None: + assert self._state == _State.ENTER + deadline = self._deadline + if deadline is None: + return + + now = self._loop.time() + if self._timeout_handler is not None: + self._timeout_handler.cancel() + + self._task = asyncio.current_task() + if deadline <= now: + self._timeout_handler = self._loop.call_soon(self._on_timeout) + else: + self._timeout_handler = self._loop.call_at(deadline, self._on_timeout) + + def _do_enter(self) -> None: + if self._state != _State.INIT: + raise RuntimeError(f"invalid state {self._state.value}") + self._state = _State.ENTER + self._reschedule() + + def _do_exit(self, exc_type: Optional[Type[BaseException]]) -> None: + if exc_type is asyncio.CancelledError and self._state == _State.TIMEOUT: + assert self._task is not None + self._timeout_handler = None + self._task = None + raise asyncio.TimeoutError + # timeout has not expired + self._state = _State.EXIT + self._reject() + return None + + def _on_timeout(self) -> None: + assert self._task is not None + self._task.cancel() + self._state = _State.TIMEOUT + # drop the reference early + self._timeout_handler = None diff --git a/venv/lib/python3.10/site-packages/async_timeout/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/async_timeout/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..de7d4635172f4474c11d9d7ade9d276cb663af88 Binary files /dev/null and b/venv/lib/python3.10/site-packages/async_timeout/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/async_timeout/py.typed b/venv/lib/python3.10/site-packages/async_timeout/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..3b94f915737aba1f12a0f067fdba3726bfe02df5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/async_timeout/py.typed @@ -0,0 +1 @@ +Placeholder diff --git a/venv/lib/python3.10/site-packages/beaker_gantry-3.0.0.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/beaker_gantry-3.0.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/beaker_gantry-3.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/beaker_gantry-3.0.0.dist-info/METADATA b/venv/lib/python3.10/site-packages/beaker_gantry-3.0.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..d081dd012ec8ff4fbb02f8d69fe6fec732813635 --- /dev/null +++ b/venv/lib/python3.10/site-packages/beaker_gantry-3.0.0.dist-info/METADATA @@ -0,0 +1,352 @@ +Metadata-Version: 2.4 +Name: beaker-gantry +Version: 3.0.0 +Summary: Gantry streamlines running Python experiments in Beaker by managing containers and boilerplate for you +Author-email: Allen Institute for Artificial Intelligence , Pete Walsh +Project-URL: homepage, https://github.com/allenai/beaker-gantry +Project-URL: repository, https://github.com/allenai/beaker-gantry +Classifier: Intended Audience :: Science/Research +Classifier: Programming Language :: Python :: 3 +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Requires-Python: >=3.10 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: beaker-py<3.0,>=2.4.7 +Requires-Dist: GitPython<4.0,>=3.0 +Requires-Dist: rich +Requires-Dist: click +Requires-Dist: click-help-colors +Requires-Dist: click-option-group +Requires-Dist: petname<3.0,>=2.6 +Requires-Dist: requests +Requires-Dist: packaging +Provides-Extra: dev +Requires-Dist: ruff; extra == "dev" +Requires-Dist: mypy<1.6,>=1.0; extra == "dev" +Requires-Dist: types-requests; extra == "dev" +Requires-Dist: types-protobuf; extra == "dev" +Requires-Dist: black<24.0,>=23.0; extra == "dev" +Requires-Dist: isort<5.13,>=5.11; extra == "dev" +Requires-Dist: pytest; extra == "dev" +Requires-Dist: twine>=1.11.0; extra == "dev" +Requires-Dist: build; extra == "dev" +Requires-Dist: setuptools; extra == "dev" +Requires-Dist: wheel; extra == "dev" +Requires-Dist: packaging; extra == "dev" +Dynamic: license-file + +
+
+ +
+

Beaker Gantry

+

Gantry streamlines running Python experiments in Beaker by managing containers and boilerplate for you

+
+ + + CI + + + PyPI + + + License + +

+
+ + +![2025-07-18 12 49 12](https://github.com/user-attachments/assets/82fa93ae-f512-4e76-ab95-95a535515e5b) + + +⚡️*Easy to use* + +- **No Docker required!** 🚫 🐳 +- No writing YAML experiment specs. +- Easy setup. +- Simple CLI. + +🏎 *Fast* + +- Fire off Beaker experiments from your local computer instantly! +- No local image build or upload. + +🪶 *Lightweight* + +- Pure Python (built on top of [beaker](https://github.com/allenai/beaker)'s Python client). +- Minimal dependencies. + +### Who is this for? + +Gantry is for both new and seasoned Beaker users who need to run Python batch jobs (as opposed to interactive sessions) from a rapidly changing repository. +Without Gantry, this workflow usually looks like this: + +1. Add a Dockerfile to your repository. +2. Build the Docker image locally. +3. Push the Docker image to Beaker. +4. Write a YAML Beaker experiment spec that points to the image you just uploaded. +5. Submit the experiment spec. +6. Make changes and repeat from step 2. + +This requires experience with Docker, experience writing Beaker experiment specs, and a fast and reliable internet connection (a luxury that some of us don't have, especially in the WFH era 🙃). + +With Gantry, on the other hand, that same workflow simplifies down to this: + +1. Write a `pyproject.toml`/`setup.py` file, a PIP `requirements.txt` file, a or conda `environment.yml` file. +2. Commit and push your changes. +3. Submit and track a Beaker experiment with the `gantry run` command. +4. Make changes and repeat from step 2. + + +## In this README + +- 💾 **[Installing](#installing)** +- 🚀 **[Quick start](#quick-start)** +- ❓ **[FAQ](#faq)** + +### Additional info + +#### 👋 *Examples* + +- [Savings results / metrics from an experiment](./examples/metrics) + +#### 💻 *For developers* + +- [CHANGELOG](https://github.com/allenai/beaker-gantry/blob/main/CHANGELOG.md) +- [CONTRIBUTING](https://github.com/allenai/beaker-gantry/blob/main/CONTRIBUTING.md) + + +## Installing + +### Installing with `pip` + +Gantry is available [on PyPI](https://pypi.org/project/gantry/). Just run + +```bash +pip install beaker-gantry +``` + +### Installing globally with `uv` + +Gantry can be installed and made available on the PATH using [uv](https://docs.astral.sh/uv/): + +```bash +uv tool install beaker-gantry +``` + +With this command, beaker-gantry is automatically installed to an isolated virtual environment. + +### Installing from source + +To install Gantry from source, first clone [the repository](https://github.com/allenai/beaker-gantry): + +```bash +git clone https://github.com/allenai/beaker-gantry.git +cd beaker-gantry +``` + +Then run + +```bash +pip install -e . +``` + + +## Quick start + +### One-time setup + +1. **Create and clone your repository.** + + If you haven't already done so, create a GitHub repository for your project and clone it locally. + **Every `gantry` command you run must be invoked from the root directory of your repository.** + +2. **Configure Gantry.** + + If you've already configured the [Beaker command-line client](https://github.com/allenai/beaker/), Gantry will + find and use the existing configuration file (usually located at `$HOME/.beaker/config.yml`). + Otherwise just set the environment variable `BEAKER_TOKEN` to your Beaker [user token](https://beaker.org/user). + + The first time you call `gantry run ...` you'll also be prompted to provide a [GitHub personal access token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token) with the `repo` scope if your repository is private. This allows Gantry to clone your private repository when it runs in Beaker. You don't have to do this just yet (Gantry will prompt you for it), but if you need to update this token later you can use the `gantry config set-gh-token` command. + +3. (Optional) **Specify your Python environment.** + + Typically you'll have to create one of several different files to specify your Python environment. There are three widely used options: + + 1. A [`pyproject.toml`](https://pip.pypa.io/en/stable/reference/build-system/pyproject-toml/) or [`setup.py`](https://docs.python.org/3/distutils/introduction.html#a-simple-example) file. + 2. A PIP [`requirements.txt`](https://pip.pypa.io/en/stable/user_guide/#requirements-files) file. + 3. A conda [`environment.yml`](https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html#create-env-file-manually) file. + + Gantry will automatically find and use these files to reconstruct your Python environment at runtime. + Alternatively you can provide a custom Python install command with the `--install` option to `gantry run`, or skip the Python setup completely with `--no-python`. + +### Submit your first experiment with Gantry + +Let's spin up a Beaker experiment that just prints "Hello, World!" from Python. + +First make sure you've committed *and* pushed all changes so far in your repository. +Then (from the root of your repository) run: + +```bash +gantry run --show-logs -- python -c 'print("Hello, World!")' +``` + +*❗Note: Everything after the `--` is the command + arguments you want to run on Beaker. It's necessary to include the `--` if any of your arguments look like options themselves (like `-c` in this example) so gantry can differentiate them from its own options.* + +In this case we didn't request any GPUs nor a specific cluster, so this could run on any Beaker cluster. +We can use the `--gpu-type` and `--gpus` options to get GPUs. For example: + +```bash +gantry run --show-logs --gpu-type=h100 --gpus=1 -- python -c 'print("Hello, World!")' +``` + +Or we can use the `--cluster` option to request clusters by their name or aliases. For example: + +```bash +gantry run --show-logs --cluster=ai2/jupiter --gpus=1 -- python -c 'print("Hello, World!")' +``` + +Try `gantry run --help` to see all of the available options. + + +## FAQ + +### Can I use my own Docker/Beaker image? + +You sure can! Just set the `--beaker-image TEXT` or `--docker-image TEXT` option. +Gantry can use any image that has bash, curl, and git installed. + +If your image comes with a Python environment that you want gantry to use, add the flag `--system-python`. +For example: + +```bash +gantry run --show-logs --docker-image='python:3.10' --system-python -- python --version +``` + +### Will Gantry work for GPU experiments? + +Absolutely! This was the main use-case Gantry was developed for. Just set the `--gpus INT` option for `gantry run` to the number of GPUs you need, and optionally `--gpu-type TEXT` (e.g. `--gpu-type=h100`). + +### How can I save results or metrics from an experiment? + +By default Gantry uses the `/results` directory on the image as the location of the results dataset. +That means that everything your experiment writes to this directory will be persisted as a Beaker dataset when the experiment finalizes. +And you can also create Beaker metrics for your experiment by writing a JSON file called `metrics.json` in the `/results` directory. + +### How can I see the Beaker experiment spec that Gantry uses? + +You can use the `--dry-run` option with `gantry run` to see what Gantry will submit without actually submitting an experiment. +You can also use `--save-spec PATH` in combination with `--dry-run` to save the actual experiment spec to a YAML file. + +### How can I update Gantry's GitHub token? + +Use the command `gantry config set-gh-token`. + +### How can I attach Beaker datasets to an experiment? + +Use the `--dataset` option for `gantry run`. For example: + +```bash +gantry run --show-logs --dataset='petew/squad-train:/input-data' -- ls /input-data +``` + +### How can I attach a WEKA bucket to an experiment? + +Use the `--weka` option for `gantry run`. For example: + +```bash +gantry run --show-logs --weka='oe-training-default:/mount/weka' -- ls -l /mount/weka +``` + +### How can I run distributed batch jobs with Gantry? + +The three options `--replicas INT`, `--leader-selection`, `--host-networking` used together give you the ability to run distributed batch jobs. See the [Beaker docs](https://beaker-docs.apps.allenai.org/experiments/distributed-training.html#batch-jobs) for more information. +Consider also setting `--propagate-failure`, `--propagate-preemption`, and `--synchronized-start-timeout TEXT` depending on your workload. + +For example: + +```bash +gantry run \ + --show-logs \ + --replicas=2 \ + --leader-selection \ + --host-networking \ + --propagate-failure \ + --propagate-preemption \ + --synchronized-start-timeout='5m' \ + --gpu-type='h100' \ + --gpus=8 \ + --beaker-image='ai2/cuda12.8-ubuntu22.04-torch2.7.0' \ + --system-python \ + --exec-method='bash' \ + -- torchrun \ + '--nnodes="$BEAKER_REPLICA_COUNT:$BEAKER_REPLICA_COUNT"' \ + '--nproc-per-node="$BEAKER_ASSIGNED_GPU_COUNT"' \ + '--rdzv-id=12347' \ + '--rdzv-backend=static' \ + '--rdzv-endpoint="$BEAKER_LEADER_REPLICA_HOSTNAME:29400"' \ + '--node-rank="$BEAKER_REPLICA_RANK"' \ + '--rdzv-conf="read_timeout=420"' \ + -m gantry.all_reduce_bench +``` + +Note that we have environment variables like `BEAKER_REPLICA_COUNT` in the arguments to our `torchrun` command that we want to have expanded *at runtime*. +To accomplish this we do two things: +1. We wrap those arguments in single quotes to avoid expanding them locally. +2. We set `--exec-method=bash` to tell gantry to run our command and arguments with `bash -c`, which will do variable expansion. + +Alternatively you could put your whole `torchrun` command into a script, let's call it `launch-torchrun.sh`, without single quotes around the arguments. +Then change your `gantry run` command like this: + +```diff + gantry run \ + --show-logs \ + --replicas=2 \ + --leader-selection \ + --host-networking \ + --propagate-failure \ + --propagate-preemption \ + --synchronized-start-timeout='5m' \ + --gpu-type='h100' \ + --gpus=8 \ + --beaker-image='ai2/cuda12.8-ubuntu22.04-torch2.7.0' \ + --system-python \ +- --exec-method='bash' \ +- -- torchrun \ +- '--nnodes="$BEAKER_REPLICA_COUNT:$BEAKER_REPLICA_COUNT"' \ +- '--nproc-per-node="$BEAKER_ASSIGNED_GPU_COUNT"' \ +- '--rdzv-id=12347' \ +- '--rdzv-backend=static' \ +- '--rdzv-endpoint="$BEAKER_LEADER_REPLICA_HOSTNAME:29400"' \ +- '--node-rank="$BEAKER_REPLICA_RANK"' \ +- '--rdzv-conf="read_timeout=420"' \ +- -m gantry.all_reduce_bench ++ -- ./launch-torchrun.sh +``` + +### How can I customize the Python setup steps? + +If gantry's default Python setup steps don't work for you, you can override them through the `--install TEXT` option with a custom command or shell script. +For example: + +```bash +gantry run --show-logs --install='pip install -r custom_requirements.txt' -- echo "Hello, World!" +``` + +### Can I use conda like with older versions of gantry? + +Yes, you can still use conda if you wish by committing a conda `environment.yml` file to your repo or by simply specifying `--python-manager=conda`. +For example: + +```bash +gantry run --show-logs --python-manager=conda -- which python +``` + +### Can I use gantry with non-Python workloads? + +Absolutely, just add the flag `--no-python` and optionally set `--install` or `--post-setup` to a custom command or shell script if you need custom setup steps. + +### Why "Gantry"? + +A gantry is a structure that's used, among other things, to lift containers off of ships. Analogously Beaker Gantry's purpose is to lift Docker containers (or at least the *management* of Docker containers) away from users. + diff --git a/venv/lib/python3.10/site-packages/beaker_gantry-3.0.0.dist-info/RECORD b/venv/lib/python3.10/site-packages/beaker_gantry-3.0.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..efc4c8d15adfd5401c936fbb702858ae16894cdd --- /dev/null +++ b/venv/lib/python3.10/site-packages/beaker_gantry-3.0.0.dist-info/RECORD @@ -0,0 +1,52 @@ +../../../bin/gantry,sha256=W1PyGE01lnRSW_A2yglNNaiDn4x2IWsYnvuOl_9ZpLY,284 +beaker_gantry-3.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +beaker_gantry-3.0.0.dist-info/METADATA,sha256=xS8iY-iY2u87HyPFV_6dGYKSjlBrhxw8Kv7SCNFD_jg,13902 +beaker_gantry-3.0.0.dist-info/RECORD,, +beaker_gantry-3.0.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 +beaker_gantry-3.0.0.dist-info/entry_points.txt,sha256=YqSWRra49zM3dGuinXhUZNV-XX6EuMLKsk675Y8uM1U,48 +beaker_gantry-3.0.0.dist-info/licenses/LICENSE,sha256=YvuKOpYh3COIF0yqq-nCMXtpS7mh1GyYvPVlW2j1G-M,11359 +beaker_gantry-3.0.0.dist-info/top_level.txt,sha256=gf3iVhupOZlEjiwnV0cUoxM53I072x7Yuo8Tp9fIIUY,7 +gantry/__init__.py,sha256=E-PF5cGEFlLs3H675NFTNUGXtyudZ3tia0WTA2rjk_Q,92 +gantry/__main__.py,sha256=AHw_g5NJJlDIWI_AENXB3_DwJVeXsJbrf13bXwLXoQM,66 +gantry/__pycache__/__init__.cpython-310.pyc,, +gantry/__pycache__/__main__.cpython-310.pyc,, +gantry/__pycache__/aliases.cpython-310.pyc,, +gantry/__pycache__/all_reduce_bench.cpython-310.pyc,, +gantry/__pycache__/api.cpython-310.pyc,, +gantry/__pycache__/constants.cpython-310.pyc,, +gantry/__pycache__/exceptions.cpython-310.pyc,, +gantry/__pycache__/git_utils.cpython-310.pyc,, +gantry/__pycache__/util.cpython-310.pyc,, +gantry/__pycache__/version.cpython-310.pyc,, +gantry/aliases.py,sha256=IgXZnb_QQG7Q1Ri5GaSVsdPRwnAoMmB-KQmieUo2LKw,83 +gantry/all_reduce_bench.py,sha256=ALwJi8ITT7QiWFP54H11n-IvktasR3akdx1FI0DUFrQ,2714 +gantry/api.py,sha256=HpKUPwn0CSemOXNCfYK41j5-bW1uhNePBVUTMqG6aRw,32961 +gantry/commands/__init__.py,sha256=bbx4pjiRqCVwqT-eIg32IjabsW2si14Yfyu0TSOmbDE,431 +gantry/commands/__pycache__/__init__.cpython-310.pyc,, +gantry/commands/__pycache__/completion.cpython-310.pyc,, +gantry/commands/__pycache__/config.cpython-310.pyc,, +gantry/commands/__pycache__/find_gpus.cpython-310.pyc,, +gantry/commands/__pycache__/follow.cpython-310.pyc,, +gantry/commands/__pycache__/list.cpython-310.pyc,, +gantry/commands/__pycache__/logs.cpython-310.pyc,, +gantry/commands/__pycache__/main.cpython-310.pyc,, +gantry/commands/__pycache__/open.cpython-310.pyc,, +gantry/commands/__pycache__/run.cpython-310.pyc,, +gantry/commands/__pycache__/stop.cpython-310.pyc,, +gantry/commands/completion.py,sha256=YzSSe8hvt_mpIjMDJIggu0V7hRyJbmGe9WXBtkUtWwI,1288 +gantry/commands/config.py,sha256=-orXMLvghAqytx6V00tzZ47oqIgD0ehRZjL7I8KTWIQ,1650 +gantry/commands/find_gpus.py,sha256=Pb3enRIj9Giss_-Z505YlxqmRVey4RnyIrlcf7lnAoM,2906 +gantry/commands/follow.py,sha256=i8t4wVr5-LNVXg5-1keEv03Mm6mKBsc8Skh9RXZzGz0,2360 +gantry/commands/list.py,sha256=77lT1pswO3_ncM2PNBFoaL0yCNsaICOOKeZNM2l0v6E,8766 +gantry/commands/logs.py,sha256=zhiMDQz-FXTU3KkZ3cvEU3K2g7WWushGHllpW3cBSNo,3030 +gantry/commands/main.py,sha256=hitqrFJdV6ufmlLU7Q8Ms0QvvN-5Di9GEQVfL5MXtpM,3403 +gantry/commands/open.py,sha256=mESrmDLbXx_KHBknzqkuMqgKOG613zUkaQnJZK0l7So,2310 +gantry/commands/run.py,sha256=J0mjXSWjGe5L26tyaXQwA3vY_UUxiIGB4aqPXRNwFqs,13621 +gantry/commands/stop.py,sha256=MlB6sHWYBdGl8lbae8p4UxhaJ6BIKUYJ2zip5bpzRXE,2485 +gantry/constants.py,sha256=QJV2q_wFi_D5IEKf_Mrey1JEF0fvCeBNqghtlk_toZo,231 +gantry/entrypoint.sh,sha256=rrWvZY-Xs5iEZRcoD3BvXlatjQpFnBzDfTRxyDOWxU4,20035 +gantry/exceptions.py,sha256=PmJgiDT-wp-r7OuO3STgtHqZVBcYURJADxQr-FjUeO0,715 +gantry/git_utils.py,sha256=PJQtb1kcFX0nKumB0sdf7KAjW6WVfxgVZKxNQqvPaXI,6190 +gantry/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +gantry/util.py,sha256=Q7o99biCqC1cBBf9IsHo9EDxRN5jobAa6WM78AJQtXg,17474 +gantry/version.py,sha256=qckxTKowVHHdLYuDeVRWcIaYVKeKZEw0YwwUIm6QtNY,18 diff --git a/venv/lib/python3.10/site-packages/beaker_gantry-3.0.0.dist-info/WHEEL b/venv/lib/python3.10/site-packages/beaker_gantry-3.0.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..e7fa31b6f3f78deb1022c1f7927f07d4d16da822 --- /dev/null +++ b/venv/lib/python3.10/site-packages/beaker_gantry-3.0.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/beaker_gantry-3.0.0.dist-info/entry_points.txt b/venv/lib/python3.10/site-packages/beaker_gantry-3.0.0.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..aa53bd6a6340c243dd0ce3cb9374842b41cf4ef0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/beaker_gantry-3.0.0.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +gantry = gantry.__main__:main diff --git a/venv/lib/python3.10/site-packages/beaker_gantry-3.0.0.dist-info/licenses/LICENSE b/venv/lib/python3.10/site-packages/beaker_gantry-3.0.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..9b259bdfcf9022e7f4999a318ab7261300644e11 --- /dev/null +++ b/venv/lib/python3.10/site-packages/beaker_gantry-3.0.0.dist-info/licenses/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.10/site-packages/beaker_gantry-3.0.0.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/beaker_gantry-3.0.0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..1c992de7c86263a841797301e2269c9eb433f544 --- /dev/null +++ b/venv/lib/python3.10/site-packages/beaker_gantry-3.0.0.dist-info/top_level.txt @@ -0,0 +1 @@ +gantry diff --git a/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/LICENSE b/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..b96dcb0480a0b0be0727976e5202a1e7b23edc3f --- /dev/null +++ b/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) Facebook, Inc. and its affiliates. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/METADATA b/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..e87cecfac3b910a804c4d5345bd4f407776c9a19 --- /dev/null +++ b/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/METADATA @@ -0,0 +1,41 @@ +Metadata-Version: 2.1 +Name: bitsandbytes +Version: 0.43.1 +Summary: k-bit optimizers and matrix multiplication routines. +Home-page: https://github.com/TimDettmers/bitsandbytes +Author: Tim Dettmers +Author-email: dettmers@cs.washington.edu +License: MIT +Keywords: gpu optimizers optimization 8-bit quantization compression +Classifier: Development Status :: 4 - Beta +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Description-Content-Type: text/markdown +License-File: LICENSE +License-File: NOTICE.md +Requires-Dist: torch +Requires-Dist: numpy +Provides-Extra: benchmark +Requires-Dist: pandas ; extra == 'benchmark' +Requires-Dist: matplotlib ; extra == 'benchmark' +Provides-Extra: test +Requires-Dist: scipy ; extra == 'test' + +# `bitsandbytes` + +[![Downloads](https://static.pepy.tech/badge/bitsandbytes)](https://pepy.tech/project/bitsandbytes) [![Downloads](https://static.pepy.tech/badge/bitsandbytes/month)](https://pepy.tech/project/bitsandbytes) [![Downloads](https://static.pepy.tech/badge/bitsandbytes/week)](https://pepy.tech/project/bitsandbytes) + +The `bitsandbytes` library is a lightweight Python wrapper around CUDA custom functions, in particular 8-bit optimizers, matrix multiplication (LLM.int8()), and 8 & 4-bit quantization functions. + +The library includes quantization primitives for 8-bit & 4-bit operations, through `bitsandbytes.nn.Linear8bitLt` and `bitsandbytes.nn.Linear4bit` and 8-bit optimizers through `bitsandbytes.optim` module. + +There are ongoing efforts to support further hardware backends, i.e. Intel CPU + GPU, AMD GPU, Apple Silicon. Windows support is quite far along and is on its way as well. + +**Please head to the official documentation page:** + +**[https://huggingface.co/docs/bitsandbytes/main](https://huggingface.co/docs/bitsandbytes/main)** + +## License + +The majority of bitsandbytes is licensed under MIT, however small portions of the project are available under separate license terms, as the parts adapted from Pytorch are licensed under the BSD license. + +We thank Fabio Cannizzo for his work on [FastBinarySearch](https://github.com/fabiocannizzo/FastBinarySearch) which we use for CPU quantization. diff --git a/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/NOTICE.md b/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/NOTICE.md new file mode 100644 index 0000000000000000000000000000000000000000..660658b057ad820c341d932fcbd4dd4ffe8e30f4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/NOTICE.md @@ -0,0 +1,3 @@ +The majority of bitsandbytes is licensed under MIT, however portions of the project are available under separate license terms: Pytorch is licensed under the BSD license. + +We thank Fabio Cannizzo for this work on FastBinarySearch which is included in this project. diff --git a/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/RECORD b/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..2c584a6a9ed660e8573fbda0fdd33d634b633483 --- /dev/null +++ b/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/RECORD @@ -0,0 +1,125 @@ +bitsandbytes-0.43.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +bitsandbytes-0.43.1.dist-info/LICENSE,sha256=UkEte8fOQVfqYou6rLiCngqcs8WPV_mRdhJryM8r_IU,1086 +bitsandbytes-0.43.1.dist-info/METADATA,sha256=rTk4YZpnCvi7Bl35P5tnw0wHiQIsIZQt8MrzsidhRHQ,2161 +bitsandbytes-0.43.1.dist-info/NOTICE.md,sha256=_4zDL2L8BqUwtmvoznR_wqhQmsP2QwdXHrAHnBMzAl8,265 +bitsandbytes-0.43.1.dist-info/RECORD,, +bitsandbytes-0.43.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +bitsandbytes-0.43.1.dist-info/WHEEL,sha256=EYL3ue1qm7hrY1DQB1tBcLP-lYRHaUwBRyhfzfLAMz0,111 +bitsandbytes-0.43.1.dist-info/top_level.txt,sha256=RttH1rYsSQjh-S6_y8rqF6hmKVVQ-cigSmKp5lBdKk4,19 +bitsandbytes/__init__.py,sha256=6lGiY1O94J3a7pikUOaVV68GkVBtqkBq_kF3QOPvG_c,544 +bitsandbytes/__main__.py,sha256=A3iA9QH8yfn-60uN6w_7WUiuRIM777rZ_pf_HsZtse4,90 +bitsandbytes/__pycache__/__init__.cpython-310.pyc,, +bitsandbytes/__pycache__/__main__.cpython-310.pyc,, +bitsandbytes/__pycache__/cextension.cpython-310.pyc,, +bitsandbytes/__pycache__/consts.cpython-310.pyc,, +bitsandbytes/__pycache__/cuda_specs.cpython-310.pyc,, +bitsandbytes/__pycache__/functional.cpython-310.pyc,, +bitsandbytes/__pycache__/utils.cpython-310.pyc,, +bitsandbytes/autograd/__init__.py,sha256=4OvPhgnGmn6wE-A1TSZatqLXfngloLWv5rOreKVEYNs,67 +bitsandbytes/autograd/__pycache__/__init__.cpython-310.pyc,, +bitsandbytes/autograd/__pycache__/_functions.cpython-310.pyc,, +bitsandbytes/autograd/_functions.py,sha256=cJ9FVknLOA0sQA740btdiddA7E49BmqbCm1vgnH_r6w,22262 +bitsandbytes/cextension.py,sha256=6oBPi2JxOnDyc4xs4hcFGslsqz8zFuC9NcNkYd2ty3s,4709 +bitsandbytes/consts.py,sha256=xfDadQAax-maxqkHrVNbZ6UITJoS0XgcUfopmjLgilM,380 +bitsandbytes/cuda_specs.py,sha256=yxnhpfN_j7Rb5pwKRzk9ZxM4WChG6ndlPWF2nwZoJ98,1196 +bitsandbytes/diagnostics/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +bitsandbytes/diagnostics/__pycache__/__init__.cpython-310.pyc,, +bitsandbytes/diagnostics/__pycache__/cuda.cpython-310.pyc,, +bitsandbytes/diagnostics/__pycache__/main.cpython-310.pyc,, +bitsandbytes/diagnostics/__pycache__/utils.cpython-310.pyc,, +bitsandbytes/diagnostics/cuda.py,sha256=2BQCBWwLmh-y2Mkq7__tuKtZvELF476rEwYJvYqi4O4,6832 +bitsandbytes/diagnostics/main.py,sha256=4t8NIihGT6j0e_YY05_txCxt-2zQpu1tEnAzDFjPa7c,2642 +bitsandbytes/diagnostics/utils.py,sha256=3G_0NmCKSwb4weI78IMAR0MF0dZ2KRKbg-phEp-sefw,284 +bitsandbytes/functional.py,sha256=TD0rV1vxp5plZKj5h32HK2BzMV3TfLTwehTxru1hDTM,91996 +bitsandbytes/libbitsandbytes_cpu.so,sha256=F4-Ha9kD-t3tViZks8s1hIIGlKUcTktcWmr_2maC-aQ,32848 +bitsandbytes/libbitsandbytes_cuda117.so,sha256=YU-1iTGwVnIgQqIQTdOqc9-zBQ-yzZJN7H0FLoaeUpM,21533992 +bitsandbytes/libbitsandbytes_cuda117_nocublaslt.so,sha256=Md5jArv_5NB0yp1y0Wmp0mLDdLL2nYFND1cYAoZnAZo,21519880 +bitsandbytes/libbitsandbytes_cuda118.so,sha256=Jjg0lqSAN7tmlGqYFy7grNhLJ1LF-Y98_grP2ST-KRs,24352040 +bitsandbytes/libbitsandbytes_cuda118_nocublaslt.so,sha256=zxQM7BTbXv31iPHfqaUuWgwh4jNuGMiv8nIebatgVNo,24337928 +bitsandbytes/libbitsandbytes_cuda120.so,sha256=gbpGsbUIJniNK0nktLiuuu97HlCbsn6WZ0Bc2-loeCI,26485992 +bitsandbytes/libbitsandbytes_cuda120_nocublaslt.so,sha256=LSQoDgVm-9WvRaZdmAnGahiW6afh-Lb-T0ixaKM6ilU,26471888 +bitsandbytes/libbitsandbytes_cuda121.so,sha256=-OppZiKL5c8gvDd92K7NQ6z7ZO0AIMNQCnhTU0K8tBk,26498280 +bitsandbytes/libbitsandbytes_cuda121_nocublaslt.so,sha256=u0xl19UbTmj1tjjnwOifju_iaFmdW_cVpVyey6Xq4tQ,26480080 +bitsandbytes/libbitsandbytes_cuda122.so,sha256=W1vfXS2GD7he54-WZJbEFQRjYqRSaCWxX6lP2BMP2Og,26539472 +bitsandbytes/libbitsandbytes_cuda122_nocublaslt.so,sha256=Fi1QLdFWt0HfARifoS139DOC8-d9GojsbdYTH_rHkpw,26517176 +bitsandbytes/libbitsandbytes_cuda123.so,sha256=I4Oc2xpUFHWWDIrlAxpjoLnNURAalSseBiwI6yWPMjo,26564048 +bitsandbytes/libbitsandbytes_cuda123_nocublaslt.so,sha256=6_onKXi9MEfI6yWuxnn4QmHjcrfOMkKP9Y3PD1xPE-4,26541752 +bitsandbytes/libbitsandbytes_cuda124.so,sha256=em-COkhRd0R9QEEUGN29TtgmX8UAt_umEr5d3MZ-03g,25885176 +bitsandbytes/libbitsandbytes_cuda124_nocublaslt.so,sha256=ItX6baCGWXo1yYfH6pN8j8aZeYv6k9Ft3X5btqlAmyo,25866976 +bitsandbytes/nn/__init__.py,sha256=B48TKjKNoVIAWUIVJuyIjzZqcql38h3z07WynG8lSp0,523 +bitsandbytes/nn/__pycache__/__init__.cpython-310.pyc,, +bitsandbytes/nn/__pycache__/modules.cpython-310.pyc,, +bitsandbytes/nn/__pycache__/triton_based_modules.cpython-310.pyc,, +bitsandbytes/nn/modules.py,sha256=GBzIH91TZ2YfGiSK5MCqph9s_s4Y_Zxuz7i1S_mBxWc,31845 +bitsandbytes/nn/triton_based_modules.py,sha256=4Yap4yUketfdny6YZEdlm6jZEC_Y6GFVvajMcSsDg20,9818 +bitsandbytes/optim/__init__.py,sha256=a54xjs8WfVo1lfhjtKS0YPk0np9Q5R0aYdRc3nrdPqc,767 +bitsandbytes/optim/__pycache__/__init__.cpython-310.pyc,, +bitsandbytes/optim/__pycache__/adagrad.cpython-310.pyc,, +bitsandbytes/optim/__pycache__/adam.cpython-310.pyc,, +bitsandbytes/optim/__pycache__/adamw.cpython-310.pyc,, +bitsandbytes/optim/__pycache__/lamb.cpython-310.pyc,, +bitsandbytes/optim/__pycache__/lars.cpython-310.pyc,, +bitsandbytes/optim/__pycache__/lion.cpython-310.pyc,, +bitsandbytes/optim/__pycache__/optimizer.cpython-310.pyc,, +bitsandbytes/optim/__pycache__/rmsprop.cpython-310.pyc,, +bitsandbytes/optim/__pycache__/sgd.cpython-310.pyc,, +bitsandbytes/optim/adagrad.py,sha256=Eb2IMjEUh-eKW8fboaqaFm_Zufci87BTFcJxMXuqKHo,7915 +bitsandbytes/optim/adam.py,sha256=3R83Pd5geirWEW_zlhQFJAmHIKWWLr9zOYms6BZRRgg,23889 +bitsandbytes/optim/adamw.py,sha256=fciFSafK2mdY63asrjIVFB-wDrz2uuFgyBYRR-pSfc8,14526 +bitsandbytes/optim/lamb.py,sha256=vmKFXKpmKUuDkwqC23Tmm96DCvWu4130eafqQMRIDZ0,7961 +bitsandbytes/optim/lars.py,sha256=IUIc7F9In8SpzeXYaOx1ryZMGR5VT5s0sPR1XwJeo9s,9390 +bitsandbytes/optim/lion.py,sha256=atjiI5bZiMuZA6M8uzk4B-L8XMIxWEhuoua6BmS6xek,11613 +bitsandbytes/optim/optimizer.py,sha256=zNKJDLtY22_-SgK66QGE1bUGnGTiOM95zgJZtdLoRiQ,28891 +bitsandbytes/optim/rmsprop.py,sha256=2REed-BmnNRr4JOL3i1r2laJx3JqjQR_85UAgcxZdjE,7786 +bitsandbytes/optim/sgd.py,sha256=qIuiu7temLkEY_sIhqOCwebGYAXpoJENpUBMdPI3Hh0,6461 +bitsandbytes/research/__init__.py,sha256=7Ynu_Jnsc9WMEvXUkRiDP0ESPw1PfjiTvnfzRt1I7ak,119 +bitsandbytes/research/__pycache__/__init__.cpython-310.pyc,, +bitsandbytes/research/autograd/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +bitsandbytes/research/autograd/__pycache__/__init__.cpython-310.pyc,, +bitsandbytes/research/autograd/__pycache__/_functions.cpython-310.pyc,, +bitsandbytes/research/autograd/_functions.py,sha256=QtNDVqj1uyxnUa-tOXBYYAwc6M7uhlRcmi_xPonJCj8,16048 +bitsandbytes/research/nn/__init__.py,sha256=D79oZ7lCiIi1zC-I6ZRFDGS5wHu9R6vulliABt8a8S4,53 +bitsandbytes/research/nn/__pycache__/__init__.cpython-310.pyc,, +bitsandbytes/research/nn/__pycache__/modules.cpython-310.pyc,, +bitsandbytes/research/nn/modules.py,sha256=1uIo6pbgv4cxYN6TZ8phpeUl_7G7AwjCZl41aOG7ilM,2328 +bitsandbytes/triton/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +bitsandbytes/triton/__pycache__/__init__.cpython-310.pyc,, +bitsandbytes/triton/__pycache__/dequantize_rowwise.cpython-310.pyc,, +bitsandbytes/triton/__pycache__/int8_matmul_mixed_dequantize.cpython-310.pyc,, +bitsandbytes/triton/__pycache__/int8_matmul_rowwise_dequantize.cpython-310.pyc,, +bitsandbytes/triton/__pycache__/quantize_columnwise_and_transpose.cpython-310.pyc,, +bitsandbytes/triton/__pycache__/quantize_global.cpython-310.pyc,, +bitsandbytes/triton/__pycache__/quantize_rowwise.cpython-310.pyc,, +bitsandbytes/triton/__pycache__/triton_utils.cpython-310.pyc,, +bitsandbytes/triton/dequantize_rowwise.py,sha256=_uNTYmWwM7RRkJfs3XXDx8yzhg5HiSTpG9CqzOyGAiI,2040 +bitsandbytes/triton/int8_matmul_mixed_dequantize.py,sha256=LTt-J1axryyf05BIr91Beb0cZD65xeM7xe6SQCyLeAY,8772 +bitsandbytes/triton/int8_matmul_rowwise_dequantize.py,sha256=u_fHArD8Xml26O-JEZoEGoy1C07M2ctPZpTZ-FH6lWg,8756 +bitsandbytes/triton/quantize_columnwise_and_transpose.py,sha256=wJu7iqYF2Eq4MvXafTArW3MhlCHa6ezFMe-ig6JH-08,2598 +bitsandbytes/triton/quantize_global.py,sha256=63YXd7Qf566pXPjYyCal7H4__GvWgfQ1Jbvf9JUXasQ,3921 +bitsandbytes/triton/quantize_rowwise.py,sha256=ipQO-xMJN3ismgpkba_BOtJskkd21di-ABSaw59SvNo,2175 +bitsandbytes/triton/triton_utils.py,sha256=Ud2SO1irefDIBDN2SlJj_SyLHiRGvbh1FsT30V31El4,104 +bitsandbytes/utils.py,sha256=ADQYZ_pl-0l6EYWdnHzsNfX-V6Znukt0AK9T9X9TrC8,6828 +tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +tests/__pycache__/__init__.cpython-310.pyc,, +tests/__pycache__/conftest.cpython-310.pyc,, +tests/__pycache__/helpers.cpython-310.pyc,, +tests/__pycache__/test_autograd.cpython-310.pyc,, +tests/__pycache__/test_cuda_setup_evaluator.cpython-310.pyc,, +tests/__pycache__/test_functional.cpython-310.pyc,, +tests/__pycache__/test_generation.cpython-310.pyc,, +tests/__pycache__/test_linear4bit.cpython-310.pyc,, +tests/__pycache__/test_linear8bitlt.cpython-310.pyc,, +tests/__pycache__/test_modules.cpython-310.pyc,, +tests/__pycache__/test_optim.cpython-310.pyc,, +tests/__pycache__/test_triton.cpython-310.pyc,, +tests/conftest.py,sha256=xQRiMFOiDSV5GwmZB7ObeR_12Uv0XLenVytTe_Knr2Y,1010 +tests/helpers.py,sha256=NeBk1xsVryl9Ck0nmSrqDK4Cbf6439xaqjg2NSkxFps,1611 +tests/test_autograd.py,sha256=14aOfU1G-D811AcfSOjCMjXFGA-DVYnOO1wN3rXcGGI,21376 +tests/test_cuda_setup_evaluator.py,sha256=kOgLtX0KC9Vas5oTIPRqmfeLRYbPN4QK-uyPi8pTcCc,1257 +tests/test_functional.py,sha256=JKeVdgGHN9A90rjRaSGE3ONGawYAzWVpdybkDqjuPlM,85125 +tests/test_generation.py,sha256=AUS_sLDFlOSJ4uRMSmARQoZJJoZ1jGPn_L3HYMPAS1w,4209 +tests/test_linear4bit.py,sha256=9HniiRl6ymSHCypmD5BP8-PVtaqoHiVyhnjmwd31Pyw,7014 +tests/test_linear8bitlt.py,sha256=znaeJjApipjaVxqOwN_5KY5I-1HlbEi8atcv4dtUMhw,6454 +tests/test_modules.py,sha256=i6xz9f0CuTCmy2WAOYFLrqT7-Fzw2HrhDf0XtsKWS7c,22971 +tests/test_optim.py,sha256=8qGEOIvC81MrkSIGlnfieaYxYxGoJ2IUUlrwlpU2ks8,20767 +tests/test_triton.py,sha256=cySUBLoJKVRBjhs6FkQRoR06-3CaMfQwgVXKhx2m7-k,2585 diff --git a/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/REQUESTED b/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/WHEEL b/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..2d20419c898d45a25e73456ce447a951f8f9ed3d --- /dev/null +++ b/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.43.0) +Root-Is-Purelib: false +Tag: py3-none-manylinux_2_24_x86_64 + diff --git a/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..304243c0704cc2069cbdf569f8a873a0c77acb60 --- /dev/null +++ b/venv/lib/python3.10/site-packages/bitsandbytes-0.43.1.dist-info/top_level.txt @@ -0,0 +1,2 @@ +bitsandbytes +tests diff --git a/venv/lib/python3.10/site-packages/botocore/__init__.py b/venv/lib/python3.10/site-packages/botocore/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..46e292e5b21c2f845275a0764d5cfcd407c74f14 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/__init__.py @@ -0,0 +1,139 @@ +# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ +# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import logging +import os +import re + +__version__ = '1.34.162' + + +class NullHandler(logging.Handler): + def emit(self, record): + pass + + +# Configure default logger to do nothing +log = logging.getLogger('botocore') +log.addHandler(NullHandler()) + +_INITIALIZERS = [] + +_first_cap_regex = re.compile('(.)([A-Z][a-z]+)') +_end_cap_regex = re.compile('([a-z0-9])([A-Z])') +# The regex below handles the special case where some acronym +# name is pluralized, e.g GatewayARNs, ListWebACLs, SomeCNAMEs. +_special_case_transform = re.compile('[A-Z]{2,}s$') +# Prepopulate the cache with special cases that don't match +# our regular transformation. +_xform_cache = { + ('CreateCachediSCSIVolume', '_'): 'create_cached_iscsi_volume', + ('CreateCachediSCSIVolume', '-'): 'create-cached-iscsi-volume', + ('DescribeCachediSCSIVolumes', '_'): 'describe_cached_iscsi_volumes', + ('DescribeCachediSCSIVolumes', '-'): 'describe-cached-iscsi-volumes', + ('DescribeStorediSCSIVolumes', '_'): 'describe_stored_iscsi_volumes', + ('DescribeStorediSCSIVolumes', '-'): 'describe-stored-iscsi-volumes', + ('CreateStorediSCSIVolume', '_'): 'create_stored_iscsi_volume', + ('CreateStorediSCSIVolume', '-'): 'create-stored-iscsi-volume', + ('ListHITsForQualificationType', '_'): 'list_hits_for_qualification_type', + ('ListHITsForQualificationType', '-'): 'list-hits-for-qualification-type', + ('ExecutePartiQLStatement', '_'): 'execute_partiql_statement', + ('ExecutePartiQLStatement', '-'): 'execute-partiql-statement', + ('ExecutePartiQLTransaction', '_'): 'execute_partiql_transaction', + ('ExecutePartiQLTransaction', '-'): 'execute-partiql-transaction', + ('ExecutePartiQLBatch', '_'): 'execute_partiql_batch', + ('ExecutePartiQLBatch', '-'): 'execute-partiql-batch', +} +# The items in this dict represent partial renames to apply globally to all +# services which might have a matching argument or operation. This way a +# common mis-translation can be fixed without having to call out each +# individual case. +ScalarTypes = ('string', 'integer', 'boolean', 'timestamp', 'float', 'double') + +BOTOCORE_ROOT = os.path.dirname(os.path.abspath(__file__)) + + +# Used to specify anonymous (unsigned) request signature +class UNSIGNED: + def __copy__(self): + return self + + def __deepcopy__(self, memodict): + return self + + +UNSIGNED = UNSIGNED() + + +def xform_name(name, sep='_', _xform_cache=_xform_cache): + """Convert camel case to a "pythonic" name. + + If the name contains the ``sep`` character, then it is + returned unchanged. + + """ + if sep in name: + # If the sep is in the name, assume that it's already + # transformed and return the string unchanged. + return name + key = (name, sep) + if key not in _xform_cache: + if _special_case_transform.search(name) is not None: + is_special = _special_case_transform.search(name) + matched = is_special.group() + # Replace something like ARNs, ACLs with _arns, _acls. + name = f"{name[: -len(matched)]}{sep}{matched.lower()}" + s1 = _first_cap_regex.sub(r'\1' + sep + r'\2', name) + transformed = _end_cap_regex.sub(r'\1' + sep + r'\2', s1).lower() + _xform_cache[key] = transformed + return _xform_cache[key] + + +def register_initializer(callback): + """Register an initializer function for session creation. + + This initializer function will be invoked whenever a new + `botocore.session.Session` is instantiated. + + :type callback: callable + :param callback: A callable that accepts a single argument + of type `botocore.session.Session`. + + """ + _INITIALIZERS.append(callback) + + +def unregister_initializer(callback): + """Unregister an initializer function. + + :type callback: callable + :param callback: A callable that was previously registered + with `botocore.register_initializer`. + + :raises ValueError: If a callback is provided that is not currently + registered as an initializer. + + """ + _INITIALIZERS.remove(callback) + + +def invoke_initializers(session): + """Invoke all initializers for a session. + + :type session: botocore.session.Session + :param session: The session to initialize. + + """ + for initializer in _INITIALIZERS: + initializer(session) diff --git a/venv/lib/python3.10/site-packages/botocore/args.py b/venv/lib/python3.10/site-packages/botocore/args.py new file mode 100644 index 0000000000000000000000000000000000000000..758a3c3c92d47635788272b76912744c3068aa0b --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/args.py @@ -0,0 +1,770 @@ +# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""Internal module to help with normalizing botocore client args. + +This module (and all function/classes within this module) should be +considered internal, and *not* a public API. + +""" + +import copy +import logging +import socket + +import botocore.exceptions +import botocore.parsers +import botocore.serialize +from botocore.config import Config +from botocore.endpoint import EndpointCreator +from botocore.regions import EndpointResolverBuiltins as EPRBuiltins +from botocore.regions import EndpointRulesetResolver +from botocore.signers import RequestSigner +from botocore.useragent import UserAgentString +from botocore.utils import ensure_boolean, is_s3_accelerate_url + +logger = logging.getLogger(__name__) + + +VALID_REGIONAL_ENDPOINTS_CONFIG = [ + 'legacy', + 'regional', +] +LEGACY_GLOBAL_STS_REGIONS = [ + 'ap-northeast-1', + 'ap-south-1', + 'ap-southeast-1', + 'ap-southeast-2', + 'aws-global', + 'ca-central-1', + 'eu-central-1', + 'eu-north-1', + 'eu-west-1', + 'eu-west-2', + 'eu-west-3', + 'sa-east-1', + 'us-east-1', + 'us-east-2', + 'us-west-1', + 'us-west-2', +] +# Maximum allowed length of the ``user_agent_appid`` config field. Longer +# values result in a warning-level log message. +USERAGENT_APPID_MAXLEN = 50 + + +class ClientArgsCreator: + def __init__( + self, + event_emitter, + user_agent, + response_parser_factory, + loader, + exceptions_factory, + config_store, + user_agent_creator=None, + ): + self._event_emitter = event_emitter + self._response_parser_factory = response_parser_factory + self._loader = loader + self._exceptions_factory = exceptions_factory + self._config_store = config_store + if user_agent_creator is None: + self._session_ua_creator = UserAgentString.from_environment() + else: + self._session_ua_creator = user_agent_creator + + def get_client_args( + self, + service_model, + region_name, + is_secure, + endpoint_url, + verify, + credentials, + scoped_config, + client_config, + endpoint_bridge, + auth_token=None, + endpoints_ruleset_data=None, + partition_data=None, + ): + final_args = self.compute_client_args( + service_model, + client_config, + endpoint_bridge, + region_name, + endpoint_url, + is_secure, + scoped_config, + ) + + service_name = final_args['service_name'] # noqa + parameter_validation = final_args['parameter_validation'] + endpoint_config = final_args['endpoint_config'] + protocol = final_args['protocol'] + config_kwargs = final_args['config_kwargs'] + s3_config = final_args['s3_config'] + partition = endpoint_config['metadata'].get('partition', None) + socket_options = final_args['socket_options'] + configured_endpoint_url = final_args['configured_endpoint_url'] + signing_region = endpoint_config['signing_region'] + endpoint_region_name = endpoint_config['region_name'] + + event_emitter = copy.copy(self._event_emitter) + signer = RequestSigner( + service_model.service_id, + signing_region, + endpoint_config['signing_name'], + endpoint_config['signature_version'], + credentials, + event_emitter, + auth_token, + ) + + config_kwargs['s3'] = s3_config + new_config = Config(**config_kwargs) + endpoint_creator = EndpointCreator(event_emitter) + + endpoint = endpoint_creator.create_endpoint( + service_model, + region_name=endpoint_region_name, + endpoint_url=endpoint_config['endpoint_url'], + verify=verify, + response_parser_factory=self._response_parser_factory, + max_pool_connections=new_config.max_pool_connections, + proxies=new_config.proxies, + timeout=(new_config.connect_timeout, new_config.read_timeout), + socket_options=socket_options, + client_cert=new_config.client_cert, + proxies_config=new_config.proxies_config, + ) + + serializer = botocore.serialize.create_serializer( + protocol, parameter_validation + ) + response_parser = botocore.parsers.create_parser(protocol) + + ruleset_resolver = self._build_endpoint_resolver( + endpoints_ruleset_data, + partition_data, + client_config, + service_model, + endpoint_region_name, + region_name, + configured_endpoint_url, + endpoint, + is_secure, + endpoint_bridge, + event_emitter, + ) + + # Copy the session's user agent factory and adds client configuration. + client_ua_creator = self._session_ua_creator.with_client_config( + new_config + ) + supplied_ua = client_config.user_agent if client_config else None + new_config._supplied_user_agent = supplied_ua + + return { + 'serializer': serializer, + 'endpoint': endpoint, + 'response_parser': response_parser, + 'event_emitter': event_emitter, + 'request_signer': signer, + 'service_model': service_model, + 'loader': self._loader, + 'client_config': new_config, + 'partition': partition, + 'exceptions_factory': self._exceptions_factory, + 'endpoint_ruleset_resolver': ruleset_resolver, + 'user_agent_creator': client_ua_creator, + } + + def compute_client_args( + self, + service_model, + client_config, + endpoint_bridge, + region_name, + endpoint_url, + is_secure, + scoped_config, + ): + service_name = service_model.endpoint_prefix + protocol = service_model.metadata['protocol'] + parameter_validation = True + if client_config and not client_config.parameter_validation: + parameter_validation = False + elif scoped_config: + raw_value = scoped_config.get('parameter_validation') + if raw_value is not None: + parameter_validation = ensure_boolean(raw_value) + + s3_config = self.compute_s3_config(client_config) + + configured_endpoint_url = self._compute_configured_endpoint_url( + client_config=client_config, + endpoint_url=endpoint_url, + ) + + endpoint_config = self._compute_endpoint_config( + service_name=service_name, + region_name=region_name, + endpoint_url=configured_endpoint_url, + is_secure=is_secure, + endpoint_bridge=endpoint_bridge, + s3_config=s3_config, + ) + endpoint_variant_tags = endpoint_config['metadata'].get('tags', []) + + # Some third-party libraries expect the final user-agent string in + # ``client.meta.config.user_agent``. To maintain backwards + # compatibility, the preliminary user-agent string (before any Config + # object modifications and without request-specific user-agent + # components) is stored in the new Config object's ``user_agent`` + # property but not used by Botocore itself. + preliminary_ua_string = self._session_ua_creator.with_client_config( + client_config + ).to_string() + # Create a new client config to be passed to the client based + # on the final values. We do not want the user to be able + # to try to modify an existing client with a client config. + config_kwargs = dict( + region_name=endpoint_config['region_name'], + signature_version=endpoint_config['signature_version'], + user_agent=preliminary_ua_string, + ) + if 'dualstack' in endpoint_variant_tags: + config_kwargs.update(use_dualstack_endpoint=True) + if 'fips' in endpoint_variant_tags: + config_kwargs.update(use_fips_endpoint=True) + if client_config is not None: + config_kwargs.update( + connect_timeout=client_config.connect_timeout, + read_timeout=client_config.read_timeout, + max_pool_connections=client_config.max_pool_connections, + proxies=client_config.proxies, + proxies_config=client_config.proxies_config, + retries=client_config.retries, + client_cert=client_config.client_cert, + inject_host_prefix=client_config.inject_host_prefix, + tcp_keepalive=client_config.tcp_keepalive, + user_agent_extra=client_config.user_agent_extra, + user_agent_appid=client_config.user_agent_appid, + request_min_compression_size_bytes=( + client_config.request_min_compression_size_bytes + ), + disable_request_compression=( + client_config.disable_request_compression + ), + client_context_params=client_config.client_context_params, + ) + self._compute_retry_config(config_kwargs) + self._compute_connect_timeout(config_kwargs) + self._compute_user_agent_appid_config(config_kwargs) + self._compute_request_compression_config(config_kwargs) + s3_config = self.compute_s3_config(client_config) + + is_s3_service = self._is_s3_service(service_name) + + if is_s3_service and 'dualstack' in endpoint_variant_tags: + if s3_config is None: + s3_config = {} + s3_config['use_dualstack_endpoint'] = True + + return { + 'service_name': service_name, + 'parameter_validation': parameter_validation, + 'configured_endpoint_url': configured_endpoint_url, + 'endpoint_config': endpoint_config, + 'protocol': protocol, + 'config_kwargs': config_kwargs, + 's3_config': s3_config, + 'socket_options': self._compute_socket_options( + scoped_config, client_config + ), + } + + def _compute_configured_endpoint_url(self, client_config, endpoint_url): + if endpoint_url is not None: + return endpoint_url + + if self._ignore_configured_endpoint_urls(client_config): + logger.debug("Ignoring configured endpoint URLs.") + return endpoint_url + + return self._config_store.get_config_variable('endpoint_url') + + def _ignore_configured_endpoint_urls(self, client_config): + if ( + client_config + and client_config.ignore_configured_endpoint_urls is not None + ): + return client_config.ignore_configured_endpoint_urls + + return self._config_store.get_config_variable( + 'ignore_configured_endpoint_urls' + ) + + def compute_s3_config(self, client_config): + s3_configuration = self._config_store.get_config_variable('s3') + + # Next specific client config values takes precedence over + # specific values in the scoped config. + if client_config is not None: + if client_config.s3 is not None: + if s3_configuration is None: + s3_configuration = client_config.s3 + else: + # The current s3_configuration dictionary may be + # from a source that only should be read from so + # we want to be safe and just make a copy of it to modify + # before it actually gets updated. + s3_configuration = s3_configuration.copy() + s3_configuration.update(client_config.s3) + + return s3_configuration + + def _is_s3_service(self, service_name): + """Whether the service is S3 or S3 Control. + + Note that throughout this class, service_name refers to the endpoint + prefix, not the folder name of the service in botocore/data. For + S3 Control, the folder name is 's3control' but the endpoint prefix is + 's3-control'. + """ + return service_name in ['s3', 's3-control'] + + def _compute_endpoint_config( + self, + service_name, + region_name, + endpoint_url, + is_secure, + endpoint_bridge, + s3_config, + ): + resolve_endpoint_kwargs = { + 'service_name': service_name, + 'region_name': region_name, + 'endpoint_url': endpoint_url, + 'is_secure': is_secure, + 'endpoint_bridge': endpoint_bridge, + } + if service_name == 's3': + return self._compute_s3_endpoint_config( + s3_config=s3_config, **resolve_endpoint_kwargs + ) + if service_name == 'sts': + return self._compute_sts_endpoint_config(**resolve_endpoint_kwargs) + return self._resolve_endpoint(**resolve_endpoint_kwargs) + + def _compute_s3_endpoint_config( + self, s3_config, **resolve_endpoint_kwargs + ): + force_s3_global = self._should_force_s3_global( + resolve_endpoint_kwargs['region_name'], s3_config + ) + if force_s3_global: + resolve_endpoint_kwargs['region_name'] = None + endpoint_config = self._resolve_endpoint(**resolve_endpoint_kwargs) + self._set_region_if_custom_s3_endpoint( + endpoint_config, resolve_endpoint_kwargs['endpoint_bridge'] + ) + # For backwards compatibility reasons, we want to make sure the + # client.meta.region_name will remain us-east-1 if we forced the + # endpoint to be the global region. Specifically, if this value + # changes to aws-global, it breaks logic where a user is checking + # for us-east-1 as the global endpoint such as in creating buckets. + if force_s3_global and endpoint_config['region_name'] == 'aws-global': + endpoint_config['region_name'] = 'us-east-1' + return endpoint_config + + def _should_force_s3_global(self, region_name, s3_config): + s3_regional_config = 'legacy' + if s3_config and 'us_east_1_regional_endpoint' in s3_config: + s3_regional_config = s3_config['us_east_1_regional_endpoint'] + self._validate_s3_regional_config(s3_regional_config) + + is_global_region = region_name in ('us-east-1', None) + return s3_regional_config == 'legacy' and is_global_region + + def _validate_s3_regional_config(self, config_val): + if config_val not in VALID_REGIONAL_ENDPOINTS_CONFIG: + raise botocore.exceptions.InvalidS3UsEast1RegionalEndpointConfigError( + s3_us_east_1_regional_endpoint_config=config_val + ) + + def _set_region_if_custom_s3_endpoint( + self, endpoint_config, endpoint_bridge + ): + # If a user is providing a custom URL, the endpoint resolver will + # refuse to infer a signing region. If we want to default to s3v4, + # we have to account for this. + if ( + endpoint_config['signing_region'] is None + and endpoint_config['region_name'] is None + ): + endpoint = endpoint_bridge.resolve('s3') + endpoint_config['signing_region'] = endpoint['signing_region'] + endpoint_config['region_name'] = endpoint['region_name'] + + def _compute_sts_endpoint_config(self, **resolve_endpoint_kwargs): + endpoint_config = self._resolve_endpoint(**resolve_endpoint_kwargs) + if self._should_set_global_sts_endpoint( + resolve_endpoint_kwargs['region_name'], + resolve_endpoint_kwargs['endpoint_url'], + endpoint_config, + ): + self._set_global_sts_endpoint( + endpoint_config, resolve_endpoint_kwargs['is_secure'] + ) + return endpoint_config + + def _should_set_global_sts_endpoint( + self, region_name, endpoint_url, endpoint_config + ): + has_variant_tags = endpoint_config and endpoint_config.get( + 'metadata', {} + ).get('tags') + if endpoint_url or has_variant_tags: + return False + return ( + self._get_sts_regional_endpoints_config() == 'legacy' + and region_name in LEGACY_GLOBAL_STS_REGIONS + ) + + def _get_sts_regional_endpoints_config(self): + sts_regional_endpoints_config = self._config_store.get_config_variable( + 'sts_regional_endpoints' + ) + if not sts_regional_endpoints_config: + sts_regional_endpoints_config = 'legacy' + if ( + sts_regional_endpoints_config + not in VALID_REGIONAL_ENDPOINTS_CONFIG + ): + raise botocore.exceptions.InvalidSTSRegionalEndpointsConfigError( + sts_regional_endpoints_config=sts_regional_endpoints_config + ) + return sts_regional_endpoints_config + + def _set_global_sts_endpoint(self, endpoint_config, is_secure): + scheme = 'https' if is_secure else 'http' + endpoint_config['endpoint_url'] = f'{scheme}://sts.amazonaws.com' + endpoint_config['signing_region'] = 'us-east-1' + + def _resolve_endpoint( + self, + service_name, + region_name, + endpoint_url, + is_secure, + endpoint_bridge, + ): + return endpoint_bridge.resolve( + service_name, region_name, endpoint_url, is_secure + ) + + def _compute_socket_options(self, scoped_config, client_config=None): + # This disables Nagle's algorithm and is the default socket options + # in urllib3. + socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] + client_keepalive = client_config and client_config.tcp_keepalive + scoped_keepalive = scoped_config and self._ensure_boolean( + scoped_config.get("tcp_keepalive", False) + ) + # Enables TCP Keepalive if specified in client config object or shared config file. + if client_keepalive or scoped_keepalive: + socket_options.append((socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)) + return socket_options + + def _compute_retry_config(self, config_kwargs): + self._compute_retry_max_attempts(config_kwargs) + self._compute_retry_mode(config_kwargs) + + def _compute_retry_max_attempts(self, config_kwargs): + # There's a pre-existing max_attempts client config value that actually + # means max *retry* attempts. There's also a `max_attempts` we pull + # from the config store that means *total attempts*, which includes the + # intitial request. We can't change what `max_attempts` means in + # client config so we try to normalize everything to a new + # "total_max_attempts" variable. We ensure that after this, the only + # configuration for "max attempts" is the 'total_max_attempts' key. + # An explicitly provided max_attempts in the client config + # overrides everything. + retries = config_kwargs.get('retries') + if retries is not None: + if 'total_max_attempts' in retries: + retries.pop('max_attempts', None) + return + if 'max_attempts' in retries: + value = retries.pop('max_attempts') + # client config max_attempts means total retries so we + # have to add one for 'total_max_attempts' to account + # for the initial request. + retries['total_max_attempts'] = value + 1 + return + # Otherwise we'll check the config store which checks env vars, + # config files, etc. There is no default value for max_attempts + # so if this returns None and we don't set a default value here. + max_attempts = self._config_store.get_config_variable('max_attempts') + if max_attempts is not None: + if retries is None: + retries = {} + config_kwargs['retries'] = retries + retries['total_max_attempts'] = max_attempts + + def _compute_retry_mode(self, config_kwargs): + retries = config_kwargs.get('retries') + if retries is None: + retries = {} + config_kwargs['retries'] = retries + elif 'mode' in retries: + # If there's a retry mode explicitly set in the client config + # that overrides everything. + return + retry_mode = self._config_store.get_config_variable('retry_mode') + if retry_mode is None: + retry_mode = 'legacy' + retries['mode'] = retry_mode + + def _compute_connect_timeout(self, config_kwargs): + # Checking if connect_timeout is set on the client config. + # If it is not, we check the config_store in case a + # non legacy default mode has been configured. + connect_timeout = config_kwargs.get('connect_timeout') + if connect_timeout is not None: + return + connect_timeout = self._config_store.get_config_variable( + 'connect_timeout' + ) + if connect_timeout: + config_kwargs['connect_timeout'] = connect_timeout + + def _compute_request_compression_config(self, config_kwargs): + min_size = config_kwargs.get('request_min_compression_size_bytes') + disabled = config_kwargs.get('disable_request_compression') + if min_size is None: + min_size = self._config_store.get_config_variable( + 'request_min_compression_size_bytes' + ) + # conversion func is skipped so input validation must be done here + # regardless if the value is coming from the config store or the + # config object + min_size = self._validate_min_compression_size(min_size) + config_kwargs['request_min_compression_size_bytes'] = min_size + + if disabled is None: + disabled = self._config_store.get_config_variable( + 'disable_request_compression' + ) + else: + # if the user provided a value we must check if it's a boolean + disabled = ensure_boolean(disabled) + config_kwargs['disable_request_compression'] = disabled + + def _validate_min_compression_size(self, min_size): + min_allowed_min_size = 1 + max_allowed_min_size = 1048576 + if min_size is not None: + error_msg_base = ( + f'Invalid value "{min_size}" for ' + 'request_min_compression_size_bytes.' + ) + try: + min_size = int(min_size) + except (ValueError, TypeError): + msg = ( + f'{error_msg_base} Value must be an integer. ' + f'Received {type(min_size)} instead.' + ) + raise botocore.exceptions.InvalidConfigError(error_msg=msg) + if not min_allowed_min_size <= min_size <= max_allowed_min_size: + msg = ( + f'{error_msg_base} Value must be between ' + f'{min_allowed_min_size} and {max_allowed_min_size}.' + ) + raise botocore.exceptions.InvalidConfigError(error_msg=msg) + + return min_size + + def _ensure_boolean(self, val): + if isinstance(val, bool): + return val + else: + return val.lower() == 'true' + + def _build_endpoint_resolver( + self, + endpoints_ruleset_data, + partition_data, + client_config, + service_model, + endpoint_region_name, + region_name, + endpoint_url, + endpoint, + is_secure, + endpoint_bridge, + event_emitter, + ): + if endpoints_ruleset_data is None: + return None + + # The legacy EndpointResolver is global to the session, but + # EndpointRulesetResolver is service-specific. Builtins for + # EndpointRulesetResolver must not be derived from the legacy + # endpoint resolver's output, including final_args, s3_config, + # etc. + s3_config_raw = self.compute_s3_config(client_config) or {} + service_name_raw = service_model.endpoint_prefix + # Maintain complex logic for s3 and sts endpoints for backwards + # compatibility. + if service_name_raw in ['s3', 'sts'] or region_name is None: + eprv2_region_name = endpoint_region_name + else: + eprv2_region_name = region_name + resolver_builtins = self.compute_endpoint_resolver_builtin_defaults( + region_name=eprv2_region_name, + service_name=service_name_raw, + s3_config=s3_config_raw, + endpoint_bridge=endpoint_bridge, + client_endpoint_url=endpoint_url, + legacy_endpoint_url=endpoint.host, + ) + # Client context params for s3 conflict with the available settings + # in the `s3` parameter on the `Config` object. If the same parameter + # is set in both places, the value in the `s3` parameter takes priority. + if client_config is not None: + client_context = client_config.client_context_params or {} + else: + client_context = {} + if self._is_s3_service(service_name_raw): + client_context.update(s3_config_raw) + + sig_version = ( + client_config.signature_version + if client_config is not None + else None + ) + return EndpointRulesetResolver( + endpoint_ruleset_data=endpoints_ruleset_data, + partition_data=partition_data, + service_model=service_model, + builtins=resolver_builtins, + client_context=client_context, + event_emitter=event_emitter, + use_ssl=is_secure, + requested_auth_scheme=sig_version, + ) + + def compute_endpoint_resolver_builtin_defaults( + self, + region_name, + service_name, + s3_config, + endpoint_bridge, + client_endpoint_url, + legacy_endpoint_url, + ): + # EndpointRulesetResolver rulesets may accept an "SDK::Endpoint" as + # input. If the endpoint_url argument of create_client() is set, it + # always takes priority. + if client_endpoint_url: + given_endpoint = client_endpoint_url + # If an endpoints.json data file other than the one bundled within + # the botocore/data directory is used, the output of legacy + # endpoint resolution is provided to EndpointRulesetResolver. + elif not endpoint_bridge.resolver_uses_builtin_data(): + given_endpoint = legacy_endpoint_url + else: + given_endpoint = None + + # The endpoint rulesets differ from legacy botocore behavior in whether + # forcing path style addressing in incompatible situations raises an + # exception or silently ignores the config setting. The + # AWS_S3_FORCE_PATH_STYLE parameter is adjusted both here and for each + # operation so that the ruleset behavior is backwards compatible. + if s3_config.get('use_accelerate_endpoint', False): + force_path_style = False + elif client_endpoint_url is not None and not is_s3_accelerate_url( + client_endpoint_url + ): + force_path_style = s3_config.get('addressing_style') != 'virtual' + else: + force_path_style = s3_config.get('addressing_style') == 'path' + + return { + EPRBuiltins.AWS_REGION: region_name, + EPRBuiltins.AWS_USE_FIPS: ( + # SDK_ENDPOINT cannot be combined with AWS_USE_FIPS + given_endpoint is None + # use legacy resolver's _resolve_endpoint_variant_config_var() + # or default to False if it returns None + and endpoint_bridge._resolve_endpoint_variant_config_var( + 'use_fips_endpoint' + ) + or False + ), + EPRBuiltins.AWS_USE_DUALSTACK: ( + # SDK_ENDPOINT cannot be combined with AWS_USE_DUALSTACK + given_endpoint is None + # use legacy resolver's _resolve_use_dualstack_endpoint() and + # or default to False if it returns None + and endpoint_bridge._resolve_use_dualstack_endpoint( + service_name + ) + or False + ), + EPRBuiltins.AWS_STS_USE_GLOBAL_ENDPOINT: ( + self._should_set_global_sts_endpoint( + region_name=region_name, + endpoint_url=None, + endpoint_config=None, + ) + ), + EPRBuiltins.AWS_S3_USE_GLOBAL_ENDPOINT: ( + self._should_force_s3_global(region_name, s3_config) + ), + EPRBuiltins.AWS_S3_ACCELERATE: s3_config.get( + 'use_accelerate_endpoint', False + ), + EPRBuiltins.AWS_S3_FORCE_PATH_STYLE: force_path_style, + EPRBuiltins.AWS_S3_USE_ARN_REGION: s3_config.get( + 'use_arn_region', True + ), + EPRBuiltins.AWS_S3CONTROL_USE_ARN_REGION: s3_config.get( + 'use_arn_region', False + ), + EPRBuiltins.AWS_S3_DISABLE_MRAP: s3_config.get( + 's3_disable_multiregion_access_points', False + ), + EPRBuiltins.SDK_ENDPOINT: given_endpoint, + } + + def _compute_user_agent_appid_config(self, config_kwargs): + user_agent_appid = config_kwargs.get('user_agent_appid') + if user_agent_appid is None: + user_agent_appid = self._config_store.get_config_variable( + 'user_agent_appid' + ) + if ( + user_agent_appid is not None + and len(user_agent_appid) > USERAGENT_APPID_MAXLEN + ): + logger.warning( + 'The configured value for user_agent_appid exceeds the ' + f'maximum length of {USERAGENT_APPID_MAXLEN} characters.' + ) + config_kwargs['user_agent_appid'] = user_agent_appid diff --git a/venv/lib/python3.10/site-packages/botocore/auth.py b/venv/lib/python3.10/site-packages/botocore/auth.py new file mode 100644 index 0000000000000000000000000000000000000000..6b296cfaaa16c79f102d6b7a041d2b7e587ca3e1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/auth.py @@ -0,0 +1,1162 @@ +# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ +# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import base64 +import calendar +import datetime +import functools +import hmac +import json +import logging +import time +from collections.abc import Mapping +from email.utils import formatdate +from hashlib import sha1, sha256 +from operator import itemgetter + +from botocore.compat import ( + HAS_CRT, + HTTPHeaders, + encodebytes, + ensure_unicode, + parse_qs, + quote, + unquote, + urlsplit, + urlunsplit, +) +from botocore.exceptions import NoAuthTokenError, NoCredentialsError +from botocore.utils import ( + is_valid_ipv6_endpoint_url, + normalize_url_path, + percent_encode_sequence, +) + +# Imports for backwards compatibility +from botocore.compat import MD5_AVAILABLE # noqa + + +logger = logging.getLogger(__name__) + + +EMPTY_SHA256_HASH = ( + 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855' +) +# This is the buffer size used when calculating sha256 checksums. +# Experimenting with various buffer sizes showed that this value generally +# gave the best result (in terms of performance). +PAYLOAD_BUFFER = 1024 * 1024 +ISO8601 = '%Y-%m-%dT%H:%M:%SZ' +SIGV4_TIMESTAMP = '%Y%m%dT%H%M%SZ' +SIGNED_HEADERS_BLACKLIST = [ + 'expect', + 'user-agent', + 'x-amzn-trace-id', +] +UNSIGNED_PAYLOAD = 'UNSIGNED-PAYLOAD' +STREAMING_UNSIGNED_PAYLOAD_TRAILER = 'STREAMING-UNSIGNED-PAYLOAD-TRAILER' + + +def _host_from_url(url): + # Given URL, derive value for host header. Ensure that value: + # 1) is lowercase + # 2) excludes port, if it was the default port + # 3) excludes userinfo + url_parts = urlsplit(url) + host = url_parts.hostname # urlsplit's hostname is always lowercase + if is_valid_ipv6_endpoint_url(url): + host = f'[{host}]' + default_ports = { + 'http': 80, + 'https': 443, + } + if url_parts.port is not None: + if url_parts.port != default_ports.get(url_parts.scheme): + host = '%s:%d' % (host, url_parts.port) + return host + + +def _get_body_as_dict(request): + # For query services, request.data is form-encoded and is already a + # dict, but for other services such as rest-json it could be a json + # string or bytes. In those cases we attempt to load the data as a + # dict. + data = request.data + if isinstance(data, bytes): + data = json.loads(data.decode('utf-8')) + elif isinstance(data, str): + data = json.loads(data) + return data + + +class BaseSigner: + REQUIRES_REGION = False + REQUIRES_TOKEN = False + + def add_auth(self, request): + raise NotImplementedError("add_auth") + + +class TokenSigner(BaseSigner): + REQUIRES_TOKEN = True + """ + Signers that expect an authorization token to perform the authorization + """ + + def __init__(self, auth_token): + self.auth_token = auth_token + + +class SigV2Auth(BaseSigner): + """ + Sign a request with Signature V2. + """ + + def __init__(self, credentials): + self.credentials = credentials + + def calc_signature(self, request, params): + logger.debug("Calculating signature using v2 auth.") + split = urlsplit(request.url) + path = split.path + if len(path) == 0: + path = '/' + string_to_sign = f"{request.method}\n{split.netloc}\n{path}\n" + lhmac = hmac.new( + self.credentials.secret_key.encode("utf-8"), digestmod=sha256 + ) + pairs = [] + for key in sorted(params): + # Any previous signature should not be a part of this + # one, so we skip that particular key. This prevents + # issues during retries. + if key == 'Signature': + continue + value = str(params[key]) + quoted_key = quote(key.encode('utf-8'), safe='') + quoted_value = quote(value.encode('utf-8'), safe='-_~') + pairs.append(f'{quoted_key}={quoted_value}') + qs = '&'.join(pairs) + string_to_sign += qs + logger.debug('String to sign: %s', string_to_sign) + lhmac.update(string_to_sign.encode('utf-8')) + b64 = base64.b64encode(lhmac.digest()).strip().decode('utf-8') + return (qs, b64) + + def add_auth(self, request): + # The auth handler is the last thing called in the + # preparation phase of a prepared request. + # Because of this we have to parse the query params + # from the request body so we can update them with + # the sigv2 auth params. + if self.credentials is None: + raise NoCredentialsError() + if request.data: + # POST + params = request.data + else: + # GET + params = request.params + params['AWSAccessKeyId'] = self.credentials.access_key + params['SignatureVersion'] = '2' + params['SignatureMethod'] = 'HmacSHA256' + params['Timestamp'] = time.strftime(ISO8601, time.gmtime()) + if self.credentials.token: + params['SecurityToken'] = self.credentials.token + qs, signature = self.calc_signature(request, params) + params['Signature'] = signature + return request + + +class SigV3Auth(BaseSigner): + def __init__(self, credentials): + self.credentials = credentials + + def add_auth(self, request): + if self.credentials is None: + raise NoCredentialsError() + if 'Date' in request.headers: + del request.headers['Date'] + request.headers['Date'] = formatdate(usegmt=True) + if self.credentials.token: + if 'X-Amz-Security-Token' in request.headers: + del request.headers['X-Amz-Security-Token'] + request.headers['X-Amz-Security-Token'] = self.credentials.token + new_hmac = hmac.new( + self.credentials.secret_key.encode('utf-8'), digestmod=sha256 + ) + new_hmac.update(request.headers['Date'].encode('utf-8')) + encoded_signature = encodebytes(new_hmac.digest()).strip() + signature = ( + f"AWS3-HTTPS AWSAccessKeyId={self.credentials.access_key}," + f"Algorithm=HmacSHA256,Signature={encoded_signature.decode('utf-8')}" + ) + if 'X-Amzn-Authorization' in request.headers: + del request.headers['X-Amzn-Authorization'] + request.headers['X-Amzn-Authorization'] = signature + + +class SigV4Auth(BaseSigner): + """ + Sign a request with Signature V4. + """ + + REQUIRES_REGION = True + + def __init__(self, credentials, service_name, region_name): + self.credentials = credentials + # We initialize these value here so the unit tests can have + # valid values. But these will get overriden in ``add_auth`` + # later for real requests. + self._region_name = region_name + self._service_name = service_name + + def _sign(self, key, msg, hex=False): + if hex: + sig = hmac.new(key, msg.encode('utf-8'), sha256).hexdigest() + else: + sig = hmac.new(key, msg.encode('utf-8'), sha256).digest() + return sig + + def headers_to_sign(self, request): + """ + Select the headers from the request that need to be included + in the StringToSign. + """ + header_map = HTTPHeaders() + for name, value in request.headers.items(): + lname = name.lower() + if lname not in SIGNED_HEADERS_BLACKLIST: + header_map[lname] = value + if 'host' not in header_map: + # TODO: We should set the host ourselves, instead of relying on our + # HTTP client to set it for us. + header_map['host'] = _host_from_url(request.url) + return header_map + + def canonical_query_string(self, request): + # The query string can come from two parts. One is the + # params attribute of the request. The other is from the request + # url (in which case we have to re-split the url into its components + # and parse out the query string component). + if request.params: + return self._canonical_query_string_params(request.params) + else: + return self._canonical_query_string_url(urlsplit(request.url)) + + def _canonical_query_string_params(self, params): + # [(key, value), (key2, value2)] + key_val_pairs = [] + if isinstance(params, Mapping): + params = params.items() + for key, value in params: + key_val_pairs.append( + (quote(key, safe='-_.~'), quote(str(value), safe='-_.~')) + ) + sorted_key_vals = [] + # Sort by the URI-encoded key names, and in the case of + # repeated keys, then sort by the value. + for key, value in sorted(key_val_pairs): + sorted_key_vals.append(f'{key}={value}') + canonical_query_string = '&'.join(sorted_key_vals) + return canonical_query_string + + def _canonical_query_string_url(self, parts): + canonical_query_string = '' + if parts.query: + # [(key, value), (key2, value2)] + key_val_pairs = [] + for pair in parts.query.split('&'): + key, _, value = pair.partition('=') + key_val_pairs.append((key, value)) + sorted_key_vals = [] + # Sort by the URI-encoded key names, and in the case of + # repeated keys, then sort by the value. + for key, value in sorted(key_val_pairs): + sorted_key_vals.append(f'{key}={value}') + canonical_query_string = '&'.join(sorted_key_vals) + return canonical_query_string + + def canonical_headers(self, headers_to_sign): + """ + Return the headers that need to be included in the StringToSign + in their canonical form by converting all header keys to lower + case, sorting them in alphabetical order and then joining + them into a string, separated by newlines. + """ + headers = [] + sorted_header_names = sorted(set(headers_to_sign)) + for key in sorted_header_names: + value = ','.join( + self._header_value(v) for v in headers_to_sign.get_all(key) + ) + headers.append(f'{key}:{ensure_unicode(value)}') + return '\n'.join(headers) + + def _header_value(self, value): + # From the sigv4 docs: + # Lowercase(HeaderName) + ':' + Trimall(HeaderValue) + # + # The Trimall function removes excess white space before and after + # values, and converts sequential spaces to a single space. + return ' '.join(value.split()) + + def signed_headers(self, headers_to_sign): + headers = sorted(n.lower().strip() for n in set(headers_to_sign)) + return ';'.join(headers) + + def _is_streaming_checksum_payload(self, request): + checksum_context = request.context.get('checksum', {}) + algorithm = checksum_context.get('request_algorithm') + return isinstance(algorithm, dict) and algorithm.get('in') == 'trailer' + + def payload(self, request): + if self._is_streaming_checksum_payload(request): + return STREAMING_UNSIGNED_PAYLOAD_TRAILER + elif not self._should_sha256_sign_payload(request): + # When payload signing is disabled, we use this static string in + # place of the payload checksum. + return UNSIGNED_PAYLOAD + request_body = request.body + if request_body and hasattr(request_body, 'seek'): + position = request_body.tell() + read_chunksize = functools.partial( + request_body.read, PAYLOAD_BUFFER + ) + checksum = sha256() + for chunk in iter(read_chunksize, b''): + checksum.update(chunk) + hex_checksum = checksum.hexdigest() + request_body.seek(position) + return hex_checksum + elif request_body: + # The request serialization has ensured that + # request.body is a bytes() type. + return sha256(request_body).hexdigest() + else: + return EMPTY_SHA256_HASH + + def _should_sha256_sign_payload(self, request): + # Payloads will always be signed over insecure connections. + if not request.url.startswith('https'): + return True + + # Certain operations may have payload signing disabled by default. + # Since we don't have access to the operation model, we pass in this + # bit of metadata through the request context. + return request.context.get('payload_signing_enabled', True) + + def canonical_request(self, request): + cr = [request.method.upper()] + path = self._normalize_url_path(urlsplit(request.url).path) + cr.append(path) + cr.append(self.canonical_query_string(request)) + headers_to_sign = self.headers_to_sign(request) + cr.append(self.canonical_headers(headers_to_sign) + '\n') + cr.append(self.signed_headers(headers_to_sign)) + if 'X-Amz-Content-SHA256' in request.headers: + body_checksum = request.headers['X-Amz-Content-SHA256'] + else: + body_checksum = self.payload(request) + cr.append(body_checksum) + return '\n'.join(cr) + + def _normalize_url_path(self, path): + normalized_path = quote(normalize_url_path(path), safe='/~') + return normalized_path + + def scope(self, request): + scope = [self.credentials.access_key] + scope.append(request.context['timestamp'][0:8]) + scope.append(self._region_name) + scope.append(self._service_name) + scope.append('aws4_request') + return '/'.join(scope) + + def credential_scope(self, request): + scope = [] + scope.append(request.context['timestamp'][0:8]) + scope.append(self._region_name) + scope.append(self._service_name) + scope.append('aws4_request') + return '/'.join(scope) + + def string_to_sign(self, request, canonical_request): + """ + Return the canonical StringToSign as well as a dict + containing the original version of all headers that + were included in the StringToSign. + """ + sts = ['AWS4-HMAC-SHA256'] + sts.append(request.context['timestamp']) + sts.append(self.credential_scope(request)) + sts.append(sha256(canonical_request.encode('utf-8')).hexdigest()) + return '\n'.join(sts) + + def signature(self, string_to_sign, request): + key = self.credentials.secret_key + k_date = self._sign( + (f"AWS4{key}").encode(), request.context["timestamp"][0:8] + ) + k_region = self._sign(k_date, self._region_name) + k_service = self._sign(k_region, self._service_name) + k_signing = self._sign(k_service, 'aws4_request') + return self._sign(k_signing, string_to_sign, hex=True) + + def add_auth(self, request): + if self.credentials is None: + raise NoCredentialsError() + datetime_now = datetime.datetime.utcnow() + request.context['timestamp'] = datetime_now.strftime(SIGV4_TIMESTAMP) + # This could be a retry. Make sure the previous + # authorization header is removed first. + self._modify_request_before_signing(request) + canonical_request = self.canonical_request(request) + logger.debug("Calculating signature using v4 auth.") + logger.debug('CanonicalRequest:\n%s', canonical_request) + string_to_sign = self.string_to_sign(request, canonical_request) + logger.debug('StringToSign:\n%s', string_to_sign) + signature = self.signature(string_to_sign, request) + logger.debug('Signature:\n%s', signature) + + self._inject_signature_to_request(request, signature) + + def _inject_signature_to_request(self, request, signature): + auth_str = [f'AWS4-HMAC-SHA256 Credential={self.scope(request)}'] + headers_to_sign = self.headers_to_sign(request) + auth_str.append( + f"SignedHeaders={self.signed_headers(headers_to_sign)}" + ) + auth_str.append(f'Signature={signature}') + request.headers['Authorization'] = ', '.join(auth_str) + return request + + def _modify_request_before_signing(self, request): + if 'Authorization' in request.headers: + del request.headers['Authorization'] + self._set_necessary_date_headers(request) + if self.credentials.token: + if 'X-Amz-Security-Token' in request.headers: + del request.headers['X-Amz-Security-Token'] + request.headers['X-Amz-Security-Token'] = self.credentials.token + + if not request.context.get('payload_signing_enabled', True): + if 'X-Amz-Content-SHA256' in request.headers: + del request.headers['X-Amz-Content-SHA256'] + request.headers['X-Amz-Content-SHA256'] = UNSIGNED_PAYLOAD + + def _set_necessary_date_headers(self, request): + # The spec allows for either the Date _or_ the X-Amz-Date value to be + # used so we check both. If there's a Date header, we use the date + # header. Otherwise we use the X-Amz-Date header. + if 'Date' in request.headers: + del request.headers['Date'] + datetime_timestamp = datetime.datetime.strptime( + request.context['timestamp'], SIGV4_TIMESTAMP + ) + request.headers['Date'] = formatdate( + int(calendar.timegm(datetime_timestamp.timetuple())) + ) + if 'X-Amz-Date' in request.headers: + del request.headers['X-Amz-Date'] + else: + if 'X-Amz-Date' in request.headers: + del request.headers['X-Amz-Date'] + request.headers['X-Amz-Date'] = request.context['timestamp'] + + +class S3SigV4Auth(SigV4Auth): + def _modify_request_before_signing(self, request): + super()._modify_request_before_signing(request) + if 'X-Amz-Content-SHA256' in request.headers: + del request.headers['X-Amz-Content-SHA256'] + + request.headers['X-Amz-Content-SHA256'] = self.payload(request) + + def _should_sha256_sign_payload(self, request): + # S3 allows optional body signing, so to minimize the performance + # impact, we opt to not SHA256 sign the body on streaming uploads, + # provided that we're on https. + client_config = request.context.get('client_config') + s3_config = getattr(client_config, 's3', None) + + # The config could be None if it isn't set, or if the customer sets it + # to None. + if s3_config is None: + s3_config = {} + + # The explicit configuration takes precedence over any implicit + # configuration. + sign_payload = s3_config.get('payload_signing_enabled', None) + if sign_payload is not None: + return sign_payload + + # We require that both a checksum be present and https be enabled + # to implicitly disable body signing. The combination of TLS and + # a checksum is sufficiently secure and durable for us to be + # confident in the request without body signing. + checksum_header = 'Content-MD5' + checksum_context = request.context.get('checksum', {}) + algorithm = checksum_context.get('request_algorithm') + if isinstance(algorithm, dict) and algorithm.get('in') == 'header': + checksum_header = algorithm['name'] + if ( + not request.url.startswith("https") + or checksum_header not in request.headers + ): + return True + + # If the input is streaming we disable body signing by default. + if request.context.get('has_streaming_input', False): + return False + + # If the S3-specific checks had no results, delegate to the generic + # checks. + return super()._should_sha256_sign_payload(request) + + def _normalize_url_path(self, path): + # For S3, we do not normalize the path. + return path + + +class S3ExpressAuth(S3SigV4Auth): + REQUIRES_IDENTITY_CACHE = True + + def __init__( + self, credentials, service_name, region_name, *, identity_cache + ): + super().__init__(credentials, service_name, region_name) + self._identity_cache = identity_cache + + def add_auth(self, request): + super().add_auth(request) + + def _modify_request_before_signing(self, request): + super()._modify_request_before_signing(request) + if 'x-amz-s3session-token' not in request.headers: + request.headers['x-amz-s3session-token'] = self.credentials.token + # S3Express does not support STS' X-Amz-Security-Token + if 'X-Amz-Security-Token' in request.headers: + del request.headers['X-Amz-Security-Token'] + + +class S3ExpressPostAuth(S3ExpressAuth): + REQUIRES_IDENTITY_CACHE = True + + def add_auth(self, request): + datetime_now = datetime.datetime.utcnow() + request.context['timestamp'] = datetime_now.strftime(SIGV4_TIMESTAMP) + + fields = {} + if request.context.get('s3-presign-post-fields', None) is not None: + fields = request.context['s3-presign-post-fields'] + + policy = {} + conditions = [] + if request.context.get('s3-presign-post-policy', None) is not None: + policy = request.context['s3-presign-post-policy'] + if policy.get('conditions', None) is not None: + conditions = policy['conditions'] + + policy['conditions'] = conditions + + fields['x-amz-algorithm'] = 'AWS4-HMAC-SHA256' + fields['x-amz-credential'] = self.scope(request) + fields['x-amz-date'] = request.context['timestamp'] + + conditions.append({'x-amz-algorithm': 'AWS4-HMAC-SHA256'}) + conditions.append({'x-amz-credential': self.scope(request)}) + conditions.append({'x-amz-date': request.context['timestamp']}) + + if self.credentials.token is not None: + fields['X-Amz-S3session-Token'] = self.credentials.token + conditions.append( + {'X-Amz-S3session-Token': self.credentials.token} + ) + + # Dump the base64 encoded policy into the fields dictionary. + fields['policy'] = base64.b64encode( + json.dumps(policy).encode('utf-8') + ).decode('utf-8') + + fields['x-amz-signature'] = self.signature(fields['policy'], request) + + request.context['s3-presign-post-fields'] = fields + request.context['s3-presign-post-policy'] = policy + + +class S3ExpressQueryAuth(S3ExpressAuth): + DEFAULT_EXPIRES = 300 + REQUIRES_IDENTITY_CACHE = True + + def __init__( + self, + credentials, + service_name, + region_name, + *, + identity_cache, + expires=DEFAULT_EXPIRES, + ): + super().__init__( + credentials, + service_name, + region_name, + identity_cache=identity_cache, + ) + self._expires = expires + + def _modify_request_before_signing(self, request): + # We automatically set this header, so if it's the auto-set value we + # want to get rid of it since it doesn't make sense for presigned urls. + content_type = request.headers.get('content-type') + blocklisted_content_type = ( + 'application/x-www-form-urlencoded; charset=utf-8' + ) + if content_type == blocklisted_content_type: + del request.headers['content-type'] + + # Note that we're not including X-Amz-Signature. + # From the docs: "The Canonical Query String must include all the query + # parameters from the preceding table except for X-Amz-Signature. + signed_headers = self.signed_headers(self.headers_to_sign(request)) + + auth_params = { + 'X-Amz-Algorithm': 'AWS4-HMAC-SHA256', + 'X-Amz-Credential': self.scope(request), + 'X-Amz-Date': request.context['timestamp'], + 'X-Amz-Expires': self._expires, + 'X-Amz-SignedHeaders': signed_headers, + } + if self.credentials.token is not None: + auth_params['X-Amz-S3session-Token'] = self.credentials.token + # Now parse the original query string to a dict, inject our new query + # params, and serialize back to a query string. + url_parts = urlsplit(request.url) + # parse_qs makes each value a list, but in our case we know we won't + # have repeated keys so we know we have single element lists which we + # can convert back to scalar values. + query_string_parts = parse_qs(url_parts.query, keep_blank_values=True) + query_dict = {k: v[0] for k, v in query_string_parts.items()} + + if request.params: + query_dict.update(request.params) + request.params = {} + # The spec is particular about this. It *has* to be: + # https://?& + # You can't mix the two types of params together, i.e just keep doing + # new_query_params.update(op_params) + # new_query_params.update(auth_params) + # percent_encode_sequence(new_query_params) + operation_params = '' + if request.data: + # We also need to move the body params into the query string. To + # do this, we first have to convert it to a dict. + query_dict.update(_get_body_as_dict(request)) + request.data = '' + if query_dict: + operation_params = percent_encode_sequence(query_dict) + '&' + new_query_string = ( + f"{operation_params}{percent_encode_sequence(auth_params)}" + ) + # url_parts is a tuple (and therefore immutable) so we need to create + # a new url_parts with the new query string. + # - + # scheme - 0 + # netloc - 1 + # path - 2 + # query - 3 <-- we're replacing this. + # fragment - 4 + p = url_parts + new_url_parts = (p[0], p[1], p[2], new_query_string, p[4]) + request.url = urlunsplit(new_url_parts) + + def _inject_signature_to_request(self, request, signature): + # Rather than calculating an "Authorization" header, for the query + # param quth, we just append an 'X-Amz-Signature' param to the end + # of the query string. + request.url += f'&X-Amz-Signature={signature}' + + def _normalize_url_path(self, path): + # For S3, we do not normalize the path. + return path + + def payload(self, request): + # From the doc link above: + # "You don't include a payload hash in the Canonical Request, because + # when you create a presigned URL, you don't know anything about the + # payload. Instead, you use a constant string "UNSIGNED-PAYLOAD". + return UNSIGNED_PAYLOAD + + +class SigV4QueryAuth(SigV4Auth): + DEFAULT_EXPIRES = 3600 + + def __init__( + self, credentials, service_name, region_name, expires=DEFAULT_EXPIRES + ): + super().__init__(credentials, service_name, region_name) + self._expires = expires + + def _modify_request_before_signing(self, request): + # We automatically set this header, so if it's the auto-set value we + # want to get rid of it since it doesn't make sense for presigned urls. + content_type = request.headers.get('content-type') + blacklisted_content_type = ( + 'application/x-www-form-urlencoded; charset=utf-8' + ) + if content_type == blacklisted_content_type: + del request.headers['content-type'] + + # Note that we're not including X-Amz-Signature. + # From the docs: "The Canonical Query String must include all the query + # parameters from the preceding table except for X-Amz-Signature. + signed_headers = self.signed_headers(self.headers_to_sign(request)) + + auth_params = { + 'X-Amz-Algorithm': 'AWS4-HMAC-SHA256', + 'X-Amz-Credential': self.scope(request), + 'X-Amz-Date': request.context['timestamp'], + 'X-Amz-Expires': self._expires, + 'X-Amz-SignedHeaders': signed_headers, + } + if self.credentials.token is not None: + auth_params['X-Amz-Security-Token'] = self.credentials.token + # Now parse the original query string to a dict, inject our new query + # params, and serialize back to a query string. + url_parts = urlsplit(request.url) + # parse_qs makes each value a list, but in our case we know we won't + # have repeated keys so we know we have single element lists which we + # can convert back to scalar values. + query_string_parts = parse_qs(url_parts.query, keep_blank_values=True) + query_dict = {k: v[0] for k, v in query_string_parts.items()} + + if request.params: + query_dict.update(request.params) + request.params = {} + # The spec is particular about this. It *has* to be: + # https://?& + # You can't mix the two types of params together, i.e just keep doing + # new_query_params.update(op_params) + # new_query_params.update(auth_params) + # percent_encode_sequence(new_query_params) + operation_params = '' + if request.data: + # We also need to move the body params into the query string. To + # do this, we first have to convert it to a dict. + query_dict.update(_get_body_as_dict(request)) + request.data = '' + if query_dict: + operation_params = percent_encode_sequence(query_dict) + '&' + new_query_string = ( + f"{operation_params}{percent_encode_sequence(auth_params)}" + ) + # url_parts is a tuple (and therefore immutable) so we need to create + # a new url_parts with the new query string. + # - + # scheme - 0 + # netloc - 1 + # path - 2 + # query - 3 <-- we're replacing this. + # fragment - 4 + p = url_parts + new_url_parts = (p[0], p[1], p[2], new_query_string, p[4]) + request.url = urlunsplit(new_url_parts) + + def _inject_signature_to_request(self, request, signature): + # Rather than calculating an "Authorization" header, for the query + # param quth, we just append an 'X-Amz-Signature' param to the end + # of the query string. + request.url += f'&X-Amz-Signature={signature}' + + +class S3SigV4QueryAuth(SigV4QueryAuth): + """S3 SigV4 auth using query parameters. + + This signer will sign a request using query parameters and signature + version 4, i.e a "presigned url" signer. + + Based off of: + + http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html + + """ + + def _normalize_url_path(self, path): + # For S3, we do not normalize the path. + return path + + def payload(self, request): + # From the doc link above: + # "You don't include a payload hash in the Canonical Request, because + # when you create a presigned URL, you don't know anything about the + # payload. Instead, you use a constant string "UNSIGNED-PAYLOAD". + return UNSIGNED_PAYLOAD + + +class S3SigV4PostAuth(SigV4Auth): + """ + Presigns a s3 post + + Implementation doc here: + http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-UsingHTTPPOST.html + """ + + def add_auth(self, request): + datetime_now = datetime.datetime.utcnow() + request.context['timestamp'] = datetime_now.strftime(SIGV4_TIMESTAMP) + + fields = {} + if request.context.get('s3-presign-post-fields', None) is not None: + fields = request.context['s3-presign-post-fields'] + + policy = {} + conditions = [] + if request.context.get('s3-presign-post-policy', None) is not None: + policy = request.context['s3-presign-post-policy'] + if policy.get('conditions', None) is not None: + conditions = policy['conditions'] + + policy['conditions'] = conditions + + fields['x-amz-algorithm'] = 'AWS4-HMAC-SHA256' + fields['x-amz-credential'] = self.scope(request) + fields['x-amz-date'] = request.context['timestamp'] + + conditions.append({'x-amz-algorithm': 'AWS4-HMAC-SHA256'}) + conditions.append({'x-amz-credential': self.scope(request)}) + conditions.append({'x-amz-date': request.context['timestamp']}) + + if self.credentials.token is not None: + fields['x-amz-security-token'] = self.credentials.token + conditions.append({'x-amz-security-token': self.credentials.token}) + + # Dump the base64 encoded policy into the fields dictionary. + fields['policy'] = base64.b64encode( + json.dumps(policy).encode('utf-8') + ).decode('utf-8') + + fields['x-amz-signature'] = self.signature(fields['policy'], request) + + request.context['s3-presign-post-fields'] = fields + request.context['s3-presign-post-policy'] = policy + + +class HmacV1Auth(BaseSigner): + # List of Query String Arguments of Interest + QSAOfInterest = [ + 'accelerate', + 'acl', + 'cors', + 'defaultObjectAcl', + 'location', + 'logging', + 'partNumber', + 'policy', + 'requestPayment', + 'torrent', + 'versioning', + 'versionId', + 'versions', + 'website', + 'uploads', + 'uploadId', + 'response-content-type', + 'response-content-language', + 'response-expires', + 'response-cache-control', + 'response-content-disposition', + 'response-content-encoding', + 'delete', + 'lifecycle', + 'tagging', + 'restore', + 'storageClass', + 'notification', + 'replication', + 'requestPayment', + 'analytics', + 'metrics', + 'inventory', + 'select', + 'select-type', + 'object-lock', + ] + + def __init__(self, credentials, service_name=None, region_name=None): + self.credentials = credentials + + def sign_string(self, string_to_sign): + new_hmac = hmac.new( + self.credentials.secret_key.encode('utf-8'), digestmod=sha1 + ) + new_hmac.update(string_to_sign.encode('utf-8')) + return encodebytes(new_hmac.digest()).strip().decode('utf-8') + + def canonical_standard_headers(self, headers): + interesting_headers = ['content-md5', 'content-type', 'date'] + hoi = [] + if 'Date' in headers: + del headers['Date'] + headers['Date'] = self._get_date() + for ih in interesting_headers: + found = False + for key in headers: + lk = key.lower() + if headers[key] is not None and lk == ih: + hoi.append(headers[key].strip()) + found = True + if not found: + hoi.append('') + return '\n'.join(hoi) + + def canonical_custom_headers(self, headers): + hoi = [] + custom_headers = {} + for key in headers: + lk = key.lower() + if headers[key] is not None: + if lk.startswith('x-amz-'): + custom_headers[lk] = ','.join( + v.strip() for v in headers.get_all(key) + ) + sorted_header_keys = sorted(custom_headers.keys()) + for key in sorted_header_keys: + hoi.append(f"{key}:{custom_headers[key]}") + return '\n'.join(hoi) + + def unquote_v(self, nv): + """ + TODO: Do we need this? + """ + if len(nv) == 1: + return nv + else: + return (nv[0], unquote(nv[1])) + + def canonical_resource(self, split, auth_path=None): + # don't include anything after the first ? in the resource... + # unless it is one of the QSA of interest, defined above + # NOTE: + # The path in the canonical resource should always be the + # full path including the bucket name, even for virtual-hosting + # style addressing. The ``auth_path`` keeps track of the full + # path for the canonical resource and would be passed in if + # the client was using virtual-hosting style. + if auth_path is not None: + buf = auth_path + else: + buf = split.path + if split.query: + qsa = split.query.split('&') + qsa = [a.split('=', 1) for a in qsa] + qsa = [ + self.unquote_v(a) for a in qsa if a[0] in self.QSAOfInterest + ] + if len(qsa) > 0: + qsa.sort(key=itemgetter(0)) + qsa = ['='.join(a) for a in qsa] + buf += '?' + buf += '&'.join(qsa) + return buf + + def canonical_string( + self, method, split, headers, expires=None, auth_path=None + ): + cs = method.upper() + '\n' + cs += self.canonical_standard_headers(headers) + '\n' + custom_headers = self.canonical_custom_headers(headers) + if custom_headers: + cs += custom_headers + '\n' + cs += self.canonical_resource(split, auth_path=auth_path) + return cs + + def get_signature( + self, method, split, headers, expires=None, auth_path=None + ): + if self.credentials.token: + del headers['x-amz-security-token'] + headers['x-amz-security-token'] = self.credentials.token + string_to_sign = self.canonical_string( + method, split, headers, auth_path=auth_path + ) + logger.debug(f'StringToSign:\n{string_to_sign}') + return self.sign_string(string_to_sign) + + def add_auth(self, request): + if self.credentials is None: + raise NoCredentialsError + logger.debug("Calculating signature using hmacv1 auth.") + split = urlsplit(request.url) + logger.debug(f'HTTP request method: {request.method}') + signature = self.get_signature( + request.method, split, request.headers, auth_path=request.auth_path + ) + self._inject_signature(request, signature) + + def _get_date(self): + return formatdate(usegmt=True) + + def _inject_signature(self, request, signature): + if 'Authorization' in request.headers: + # We have to do this because request.headers is not + # normal dictionary. It has the (unintuitive) behavior + # of aggregating repeated setattr calls for the same + # key value. For example: + # headers['foo'] = 'a'; headers['foo'] = 'b' + # list(headers) will print ['foo', 'foo']. + del request.headers['Authorization'] + + auth_header = f"AWS {self.credentials.access_key}:{signature}" + request.headers['Authorization'] = auth_header + + +class HmacV1QueryAuth(HmacV1Auth): + """ + Generates a presigned request for s3. + + Spec from this document: + + http://docs.aws.amazon.com/AmazonS3/latest/dev/RESTAuthentication.html + #RESTAuthenticationQueryStringAuth + + """ + + DEFAULT_EXPIRES = 3600 + + def __init__(self, credentials, expires=DEFAULT_EXPIRES): + self.credentials = credentials + self._expires = expires + + def _get_date(self): + return str(int(time.time() + int(self._expires))) + + def _inject_signature(self, request, signature): + query_dict = {} + query_dict['AWSAccessKeyId'] = self.credentials.access_key + query_dict['Signature'] = signature + + for header_key in request.headers: + lk = header_key.lower() + # For query string requests, Expires is used instead of the + # Date header. + if header_key == 'Date': + query_dict['Expires'] = request.headers['Date'] + # We only want to include relevant headers in the query string. + # These can be anything that starts with x-amz, is Content-MD5, + # or is Content-Type. + elif lk.startswith('x-amz-') or lk in ( + 'content-md5', + 'content-type', + ): + query_dict[lk] = request.headers[lk] + # Combine all of the identified headers into an encoded + # query string + new_query_string = percent_encode_sequence(query_dict) + + # Create a new url with the presigned url. + p = urlsplit(request.url) + if p[3]: + # If there was a pre-existing query string, we should + # add that back before injecting the new query string. + new_query_string = f'{p[3]}&{new_query_string}' + new_url_parts = (p[0], p[1], p[2], new_query_string, p[4]) + request.url = urlunsplit(new_url_parts) + + +class HmacV1PostAuth(HmacV1Auth): + """ + Generates a presigned post for s3. + + Spec from this document: + + http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingHTTPPOST.html + """ + + def add_auth(self, request): + fields = {} + if request.context.get('s3-presign-post-fields', None) is not None: + fields = request.context['s3-presign-post-fields'] + + policy = {} + conditions = [] + if request.context.get('s3-presign-post-policy', None) is not None: + policy = request.context['s3-presign-post-policy'] + if policy.get('conditions', None) is not None: + conditions = policy['conditions'] + + policy['conditions'] = conditions + + fields['AWSAccessKeyId'] = self.credentials.access_key + + if self.credentials.token is not None: + fields['x-amz-security-token'] = self.credentials.token + conditions.append({'x-amz-security-token': self.credentials.token}) + + # Dump the base64 encoded policy into the fields dictionary. + fields['policy'] = base64.b64encode( + json.dumps(policy).encode('utf-8') + ).decode('utf-8') + + fields['signature'] = self.sign_string(fields['policy']) + + request.context['s3-presign-post-fields'] = fields + request.context['s3-presign-post-policy'] = policy + + +class BearerAuth(TokenSigner): + """ + Performs bearer token authorization by placing the bearer token in the + Authorization header as specified by Section 2.1 of RFC 6750. + + https://datatracker.ietf.org/doc/html/rfc6750#section-2.1 + """ + + def add_auth(self, request): + if self.auth_token is None: + raise NoAuthTokenError() + + auth_header = f'Bearer {self.auth_token.token}' + if 'Authorization' in request.headers: + del request.headers['Authorization'] + request.headers['Authorization'] = auth_header + + +AUTH_TYPE_MAPS = { + 'v2': SigV2Auth, + 'v3': SigV3Auth, + 'v3https': SigV3Auth, + 's3': HmacV1Auth, + 's3-query': HmacV1QueryAuth, + 's3-presign-post': HmacV1PostAuth, + 's3v4-presign-post': S3SigV4PostAuth, + 'v4-s3express': S3ExpressAuth, + 'v4-s3express-query': S3ExpressQueryAuth, + 'v4-s3express-presign-post': S3ExpressPostAuth, + 'bearer': BearerAuth, +} + +# Define v4 signers depending on if CRT is present +if HAS_CRT: + from botocore.crt.auth import CRT_AUTH_TYPE_MAPS + + AUTH_TYPE_MAPS.update(CRT_AUTH_TYPE_MAPS) +else: + AUTH_TYPE_MAPS.update( + { + 'v4': SigV4Auth, + 'v4-query': SigV4QueryAuth, + 's3v4': S3SigV4Auth, + 's3v4-query': S3SigV4QueryAuth, + } + ) diff --git a/venv/lib/python3.10/site-packages/botocore/awsrequest.py b/venv/lib/python3.10/site-packages/botocore/awsrequest.py new file mode 100644 index 0000000000000000000000000000000000000000..49b4eee0d9a64096fa0f15a0e24419fe4fb99204 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/awsrequest.py @@ -0,0 +1,635 @@ +# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ +# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import functools +import logging +from collections.abc import Mapping + +import urllib3.util +from urllib3.connection import HTTPConnection, VerifiedHTTPSConnection +from urllib3.connectionpool import HTTPConnectionPool, HTTPSConnectionPool + +import botocore.utils +from botocore.compat import ( + HTTPHeaders, + HTTPResponse, + MutableMapping, + urlencode, + urlparse, + urlsplit, + urlunsplit, +) +from botocore.exceptions import UnseekableStreamError + +logger = logging.getLogger(__name__) + + +class AWSHTTPResponse(HTTPResponse): + # The *args, **kwargs is used because the args are slightly + # different in py2.6 than in py2.7/py3. + def __init__(self, *args, **kwargs): + self._status_tuple = kwargs.pop('status_tuple') + HTTPResponse.__init__(self, *args, **kwargs) + + def _read_status(self): + if self._status_tuple is not None: + status_tuple = self._status_tuple + self._status_tuple = None + return status_tuple + else: + return HTTPResponse._read_status(self) + + +class AWSConnection: + """Mixin for HTTPConnection that supports Expect 100-continue. + + This when mixed with a subclass of httplib.HTTPConnection (though + technically we subclass from urllib3, which subclasses + httplib.HTTPConnection) and we only override this class to support Expect + 100-continue, which we need for S3. As far as I can tell, this is + general purpose enough to not be specific to S3, but I'm being + tentative and keeping it in botocore because I've only tested + this against AWS services. + + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._original_response_cls = self.response_class + # This variable is set when we receive an early response from the + # server. If this value is set to True, any calls to send() are noops. + # This value is reset to false every time _send_request is called. + # This is to workaround changes in urllib3 2.0 which uses separate + # send() calls in request() instead of delegating to endheaders(), + # which is where the body is sent in CPython's HTTPConnection. + self._response_received = False + self._expect_header_set = False + self._send_called = False + + def close(self): + super().close() + # Reset all of our instance state we were tracking. + self._response_received = False + self._expect_header_set = False + self._send_called = False + self.response_class = self._original_response_cls + + def request(self, method, url, body=None, headers=None, *args, **kwargs): + if headers is None: + headers = {} + self._response_received = False + if headers.get('Expect', b'') == b'100-continue': + self._expect_header_set = True + else: + self._expect_header_set = False + self.response_class = self._original_response_cls + rval = super().request(method, url, body, headers, *args, **kwargs) + self._expect_header_set = False + return rval + + def _convert_to_bytes(self, mixed_buffer): + # Take a list of mixed str/bytes and convert it + # all into a single bytestring. + # Any str will be encoded as utf-8. + bytes_buffer = [] + for chunk in mixed_buffer: + if isinstance(chunk, str): + bytes_buffer.append(chunk.encode('utf-8')) + else: + bytes_buffer.append(chunk) + msg = b"\r\n".join(bytes_buffer) + return msg + + def _send_output(self, message_body=None, *args, **kwargs): + self._buffer.extend((b"", b"")) + msg = self._convert_to_bytes(self._buffer) + del self._buffer[:] + # If msg and message_body are sent in a single send() call, + # it will avoid performance problems caused by the interaction + # between delayed ack and the Nagle algorithm. + if isinstance(message_body, bytes): + msg += message_body + message_body = None + self.send(msg) + if self._expect_header_set: + # This is our custom behavior. If the Expect header was + # set, it will trigger this custom behavior. + logger.debug("Waiting for 100 Continue response.") + # Wait for 1 second for the server to send a response. + if urllib3.util.wait_for_read(self.sock, 1): + self._handle_expect_response(message_body) + return + else: + # From the RFC: + # Because of the presence of older implementations, the + # protocol allows ambiguous situations in which a client may + # send "Expect: 100-continue" without receiving either a 417 + # (Expectation Failed) status or a 100 (Continue) status. + # Therefore, when a client sends this header field to an origin + # server (possibly via a proxy) from which it has never seen a + # 100 (Continue) status, the client SHOULD NOT wait for an + # indefinite period before sending the request body. + logger.debug( + "No response seen from server, continuing to " + "send the response body." + ) + if message_body is not None: + # message_body was not a string (i.e. it is a file), and + # we must run the risk of Nagle. + self.send(message_body) + + def _consume_headers(self, fp): + # Most servers (including S3) will just return + # the CLRF after the 100 continue response. However, + # some servers (I've specifically seen this for squid when + # used as a straight HTTP proxy) will also inject a + # Connection: keep-alive header. To account for this + # we'll read until we read '\r\n', and ignore any headers + # that come immediately after the 100 continue response. + current = None + while current != b'\r\n': + current = fp.readline() + + def _handle_expect_response(self, message_body): + # This is called when we sent the request headers containing + # an Expect: 100-continue header and received a response. + # We now need to figure out what to do. + fp = self.sock.makefile('rb', 0) + try: + maybe_status_line = fp.readline() + parts = maybe_status_line.split(None, 2) + if self._is_100_continue_status(maybe_status_line): + self._consume_headers(fp) + logger.debug( + "100 Continue response seen, now sending request body." + ) + self._send_message_body(message_body) + elif len(parts) == 3 and parts[0].startswith(b'HTTP/'): + # From the RFC: + # Requirements for HTTP/1.1 origin servers: + # + # - Upon receiving a request which includes an Expect + # request-header field with the "100-continue" + # expectation, an origin server MUST either respond with + # 100 (Continue) status and continue to read from the + # input stream, or respond with a final status code. + # + # So if we don't get a 100 Continue response, then + # whatever the server has sent back is the final response + # and don't send the message_body. + logger.debug( + "Received a non 100 Continue response " + "from the server, NOT sending request body." + ) + status_tuple = ( + parts[0].decode('ascii'), + int(parts[1]), + parts[2].decode('ascii'), + ) + response_class = functools.partial( + AWSHTTPResponse, status_tuple=status_tuple + ) + self.response_class = response_class + self._response_received = True + finally: + fp.close() + + def _send_message_body(self, message_body): + if message_body is not None: + self.send(message_body) + + def send(self, str): + if self._response_received: + if not self._send_called: + # urllib3 2.0 chunks and calls send potentially + # thousands of times inside `request` unlike the + # standard library. Only log this once for sanity. + logger.debug( + "send() called, but response already received. " + "Not sending data." + ) + self._send_called = True + return + return super().send(str) + + def _is_100_continue_status(self, maybe_status_line): + parts = maybe_status_line.split(None, 2) + # Check for HTTP/ 100 Continue\r\n + return ( + len(parts) >= 3 + and parts[0].startswith(b'HTTP/') + and parts[1] == b'100' + ) + + +class AWSHTTPConnection(AWSConnection, HTTPConnection): + """An HTTPConnection that supports 100 Continue behavior.""" + + +class AWSHTTPSConnection(AWSConnection, VerifiedHTTPSConnection): + """An HTTPSConnection that supports 100 Continue behavior.""" + + +class AWSHTTPConnectionPool(HTTPConnectionPool): + ConnectionCls = AWSHTTPConnection + + +class AWSHTTPSConnectionPool(HTTPSConnectionPool): + ConnectionCls = AWSHTTPSConnection + + +def prepare_request_dict( + request_dict, endpoint_url, context=None, user_agent=None +): + """ + This method prepares a request dict to be created into an + AWSRequestObject. This prepares the request dict by adding the + url and the user agent to the request dict. + + :type request_dict: dict + :param request_dict: The request dict (created from the + ``serialize`` module). + + :type user_agent: string + :param user_agent: The user agent to use for this request. + + :type endpoint_url: string + :param endpoint_url: The full endpoint url, which contains at least + the scheme, the hostname, and optionally any path components. + """ + r = request_dict + if user_agent is not None: + headers = r['headers'] + headers['User-Agent'] = user_agent + host_prefix = r.get('host_prefix') + url = _urljoin(endpoint_url, r['url_path'], host_prefix) + if r['query_string']: + # NOTE: This is to avoid circular import with utils. This is being + # done to avoid moving classes to different modules as to not cause + # breaking chainges. + percent_encode_sequence = botocore.utils.percent_encode_sequence + encoded_query_string = percent_encode_sequence(r['query_string']) + if '?' not in url: + url += f'?{encoded_query_string}' + else: + url += f'&{encoded_query_string}' + r['url'] = url + r['context'] = context + if context is None: + r['context'] = {} + + +def create_request_object(request_dict): + """ + This method takes a request dict and creates an AWSRequest object + from it. + + :type request_dict: dict + :param request_dict: The request dict (created from the + ``prepare_request_dict`` method). + + :rtype: ``botocore.awsrequest.AWSRequest`` + :return: An AWSRequest object based on the request_dict. + + """ + r = request_dict + request_object = AWSRequest( + method=r['method'], + url=r['url'], + data=r['body'], + headers=r['headers'], + auth_path=r.get('auth_path'), + ) + request_object.context = r['context'] + return request_object + + +def _urljoin(endpoint_url, url_path, host_prefix): + p = urlsplit(endpoint_url) + # - + # scheme - p[0] + # netloc - p[1] + # path - p[2] + # query - p[3] + # fragment - p[4] + if not url_path or url_path == '/': + # If there's no path component, ensure the URL ends with + # a '/' for backwards compatibility. + if not p[2]: + new_path = '/' + else: + new_path = p[2] + elif p[2].endswith('/') and url_path.startswith('/'): + new_path = p[2][:-1] + url_path + else: + new_path = p[2] + url_path + + new_netloc = p[1] + if host_prefix is not None: + new_netloc = host_prefix + new_netloc + + reconstructed = urlunsplit((p[0], new_netloc, new_path, p[3], p[4])) + return reconstructed + + +class AWSRequestPreparer: + """ + This class performs preparation on AWSRequest objects similar to that of + the PreparedRequest class does in the requests library. However, the logic + has been boiled down to meet the specific use cases in botocore. Of note + there are the following differences: + This class does not heavily prepare the URL. Requests performed many + validations and corrections to ensure the URL is properly formatted. + Botocore either performs these validations elsewhere or otherwise + consistently provides well formatted URLs. + + This class does not heavily prepare the body. Body preperation is + simple and supports only the cases that we document: bytes and + file-like objects to determine the content-length. This will also + additionally prepare a body that is a dict to be url encoded params + string as some signers rely on this. Finally, this class does not + support multipart file uploads. + + This class does not prepare the method, auth or cookies. + """ + + def prepare(self, original): + method = original.method + url = self._prepare_url(original) + body = self._prepare_body(original) + headers = self._prepare_headers(original, body) + stream_output = original.stream_output + + return AWSPreparedRequest(method, url, headers, body, stream_output) + + def _prepare_url(self, original): + url = original.url + if original.params: + url_parts = urlparse(url) + delim = '&' if url_parts.query else '?' + if isinstance(original.params, Mapping): + params_to_encode = list(original.params.items()) + else: + params_to_encode = original.params + params = urlencode(params_to_encode, doseq=True) + url = delim.join((url, params)) + return url + + def _prepare_headers(self, original, prepared_body=None): + headers = HeadersDict(original.headers.items()) + + # If the transfer encoding or content length is already set, use that + if 'Transfer-Encoding' in headers or 'Content-Length' in headers: + return headers + + # Ensure we set the content length when it is expected + if original.method not in ('GET', 'HEAD', 'OPTIONS'): + length = self._determine_content_length(prepared_body) + if length is not None: + headers['Content-Length'] = str(length) + else: + # Failed to determine content length, using chunked + # NOTE: This shouldn't ever happen in practice + body_type = type(prepared_body) + logger.debug('Failed to determine length of %s', body_type) + headers['Transfer-Encoding'] = 'chunked' + + return headers + + def _to_utf8(self, item): + key, value = item + if isinstance(key, str): + key = key.encode('utf-8') + if isinstance(value, str): + value = value.encode('utf-8') + return key, value + + def _prepare_body(self, original): + """Prepares the given HTTP body data.""" + body = original.data + if body == b'': + body = None + + if isinstance(body, dict): + params = [self._to_utf8(item) for item in body.items()] + body = urlencode(params, doseq=True) + + return body + + def _determine_content_length(self, body): + return botocore.utils.determine_content_length(body) + + +class AWSRequest: + """Represents the elements of an HTTP request. + + This class was originally inspired by requests.models.Request, but has been + boiled down to meet the specific use cases in botocore. That being said this + class (even in requests) is effectively a named-tuple. + """ + + _REQUEST_PREPARER_CLS = AWSRequestPreparer + + def __init__( + self, + method=None, + url=None, + headers=None, + data=None, + params=None, + auth_path=None, + stream_output=False, + ): + self._request_preparer = self._REQUEST_PREPARER_CLS() + + # Default empty dicts for dict params. + params = {} if params is None else params + + self.method = method + self.url = url + self.headers = HTTPHeaders() + self.data = data + self.params = params + self.auth_path = auth_path + self.stream_output = stream_output + + if headers is not None: + for key, value in headers.items(): + self.headers[key] = value + + # This is a dictionary to hold information that is used when + # processing the request. What is inside of ``context`` is open-ended. + # For example, it may have a timestamp key that is used for holding + # what the timestamp is when signing the request. Note that none + # of the information that is inside of ``context`` is directly + # sent over the wire; the information is only used to assist in + # creating what is sent over the wire. + self.context = {} + + def prepare(self): + """Constructs a :class:`AWSPreparedRequest `.""" + return self._request_preparer.prepare(self) + + @property + def body(self): + body = self.prepare().body + if isinstance(body, str): + body = body.encode('utf-8') + return body + + +class AWSPreparedRequest: + """A data class representing a finalized request to be sent over the wire. + + Requests at this stage should be treated as final, and the properties of + the request should not be modified. + + :ivar method: The HTTP Method + :ivar url: The full url + :ivar headers: The HTTP headers to send. + :ivar body: The HTTP body. + :ivar stream_output: If the response for this request should be streamed. + """ + + def __init__(self, method, url, headers, body, stream_output): + self.method = method + self.url = url + self.headers = headers + self.body = body + self.stream_output = stream_output + + def __repr__(self): + fmt = ( + '' + ) + return fmt % (self.stream_output, self.method, self.url, self.headers) + + def reset_stream(self): + """Resets the streaming body to it's initial position. + + If the request contains a streaming body (a streamable file-like object) + seek to the object's initial position to ensure the entire contents of + the object is sent. This is a no-op for static bytes-like body types. + """ + # Trying to reset a stream when there is a no stream will + # just immediately return. It's not an error, it will produce + # the same result as if we had actually reset the stream (we'll send + # the entire body contents again if we need to). + # Same case if the body is a string/bytes/bytearray type. + + non_seekable_types = (bytes, str, bytearray) + if self.body is None or isinstance(self.body, non_seekable_types): + return + try: + logger.debug("Rewinding stream: %s", self.body) + self.body.seek(0) + except Exception as e: + logger.debug("Unable to rewind stream: %s", e) + raise UnseekableStreamError(stream_object=self.body) + + +class AWSResponse: + """A data class representing an HTTP response. + + This class was originally inspired by requests.models.Response, but has + been boiled down to meet the specific use cases in botocore. This has + effectively been reduced to a named tuple. + + :ivar url: The full url. + :ivar status_code: The status code of the HTTP response. + :ivar headers: The HTTP headers received. + :ivar body: The HTTP response body. + """ + + def __init__(self, url, status_code, headers, raw): + self.url = url + self.status_code = status_code + self.headers = HeadersDict(headers) + self.raw = raw + + self._content = None + + @property + def content(self): + """Content of the response as bytes.""" + + if self._content is None: + # Read the contents. + # NOTE: requests would attempt to call stream and fall back + # to a custom generator that would call read in a loop, but + # we don't rely on this behavior + self._content = b''.join(self.raw.stream()) or b'' + + return self._content + + @property + def text(self): + """Content of the response as a proper text type. + + Uses the encoding type provided in the reponse headers to decode the + response content into a proper text type. If the encoding is not + present in the headers, UTF-8 is used as a default. + """ + encoding = botocore.utils.get_encoding_from_headers(self.headers) + if encoding: + return self.content.decode(encoding) + else: + return self.content.decode('utf-8') + + +class _HeaderKey: + def __init__(self, key): + self._key = key + self._lower = key.lower() + + def __hash__(self): + return hash(self._lower) + + def __eq__(self, other): + return isinstance(other, _HeaderKey) and self._lower == other._lower + + def __str__(self): + return self._key + + def __repr__(self): + return repr(self._key) + + +class HeadersDict(MutableMapping): + """A case-insenseitive dictionary to represent HTTP headers.""" + + def __init__(self, *args, **kwargs): + self._dict = {} + self.update(*args, **kwargs) + + def __setitem__(self, key, value): + self._dict[_HeaderKey(key)] = value + + def __getitem__(self, key): + return self._dict[_HeaderKey(key)] + + def __delitem__(self, key): + del self._dict[_HeaderKey(key)] + + def __iter__(self): + return (str(key) for key in self._dict) + + def __len__(self): + return len(self._dict) + + def __repr__(self): + return repr(self._dict) + + def copy(self): + return HeadersDict(self.items()) diff --git a/venv/lib/python3.10/site-packages/botocore/cacert.pem b/venv/lib/python3.10/site-packages/botocore/cacert.pem new file mode 100644 index 0000000000000000000000000000000000000000..919478ed06ae84199e3afc23c22eec96365886c5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/cacert.pem @@ -0,0 +1,4361 @@ + +# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Label: "GlobalSign Root CA" +# Serial: 4835703278459707669005204 +# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a +# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c +# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG +A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv +b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw +MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i +YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT +aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ +jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp +xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp +1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG +snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ +U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 +9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B +AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz +yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE +38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP +AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad +DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME +HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Label: "GlobalSign Root CA - R2" +# Serial: 4835703278459682885658125 +# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 +# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe +# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e +-----BEGIN CERTIFICATE----- +MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 +MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL +v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 +eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq +tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd +C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa +zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB +mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH +V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n +bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG +3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs +J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO +291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS +ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd +AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 +TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Label: "Verisign Class 3 Public Primary Certification Authority - G3" +# Serial: 206684696279472310254277870180966723415 +# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 +# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 +# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 +-----BEGIN CERTIFICATE----- +MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl +cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu +LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT +aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD +VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT +aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ +bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu +IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b +N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t +KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu +kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm +CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ +Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu +imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te +2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe +DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC +/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p +F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt +TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Premium 2048 Secure Server CA" +# Serial: 946069240 +# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 +# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 +# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML +RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp +bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 +IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 +MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 +LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp +YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG +A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq +K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe +sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX +MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT +XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ +HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH +4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub +j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo +U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf +zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b +u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ +bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er +fF6adulZkMV8gzURZVE= +-----END CERTIFICATE----- + +# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Label: "Baltimore CyberTrust Root" +# Serial: 33554617 +# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 +# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 +# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ +RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD +VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX +DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y +ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy +VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr +mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr +IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK +mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu +XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy +dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye +jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 +BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 +DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 +9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx +jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 +Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz +ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS +R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network +# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network +# Label: "AddTrust External Root" +# Serial: 1 +# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f +# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 +# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 +-----BEGIN CERTIFICATE----- +MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs +IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 +MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux +FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h +bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v +dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt +H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 +uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX +mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX +a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN +E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 +WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD +VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 +Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU +cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx +IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN +AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH +YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 +6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC +Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX +c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a +mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc. +# Subject: CN=GeoTrust Global CA O=GeoTrust Inc. +# Label: "GeoTrust Global CA" +# Serial: 144470 +# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5 +# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12 +# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a +-----BEGIN CERTIFICATE----- +MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i +YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG +EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg +R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 +9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq +fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv +iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU +1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ +bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW +MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA +ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l +uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn +Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS +tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF +PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un +hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV +5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc. +# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc. +# Label: "GeoTrust Universal CA" +# Serial: 1 +# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48 +# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79 +# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12 +-----BEGIN CERTIFICATE----- +MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy +c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE +BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0 +IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV +VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8 +cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT +QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh +F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v +c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w +mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd +VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX +teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ +f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe +Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+ +nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB +/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY +MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG +9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc +aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX +IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn +ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z +uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN +Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja +QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW +koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9 +ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt +DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm +bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw= +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. +# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. +# Label: "GeoTrust Universal CA 2" +# Serial: 1 +# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7 +# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79 +# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy +c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD +VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1 +c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81 +WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG +FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq +XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL +se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb +KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd +IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73 +y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt +hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc +QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4 +Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV +HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ +KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z +dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ +L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr +Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo +ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY +T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz +GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m +1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV +OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH +6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX +QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS +-----END CERTIFICATE----- + +# Issuer: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association +# Subject: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association +# Label: "Visa eCommerce Root" +# Serial: 25952180776285836048024890241505565794 +# MD5 Fingerprint: fc:11:b8:d8:08:93:30:00:6d:23:f9:7e:eb:52:1e:02 +# SHA1 Fingerprint: 70:17:9b:86:8c:00:a4:fa:60:91:52:22:3f:9f:3e:32:bd:e0:05:62 +# SHA256 Fingerprint: 69:fa:c9:bd:55:fb:0a:c7:8d:53:bb:ee:5c:f1:d5:97:98:9f:d0:aa:ab:20:a2:51:51:bd:f1:73:3e:e7:d1:22 +-----BEGIN CERTIFICATE----- +MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBr +MQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRl +cm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv +bW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2WhcNMjIwNjI0MDAxNjEyWjBrMQsw +CQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5h +dGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1l +cmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h +2mCxlCfLF9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4E +lpF7sDPwsRROEW+1QK8bRaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdV +ZqW1LS7YgFmypw23RuwhY/81q6UCzyr0TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq +299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI/k4+oKsGGelT84ATB+0t +vz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzsGHxBvfaL +dXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUF +AAOCAQEAX/FBfXxcCLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcR +zCSs00Rsca4BIGsDoo8Ytyk6feUWYFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3 +LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pzzkWKsKZJ/0x9nXGIxHYdkFsd +7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBuYQa7FkKMcPcw +++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt +398znM/jra6O1I7mT1GvFpLgXPYHDw== +-----END CERTIFICATE----- + +# Issuer: CN=AAA Certificate Services O=Comodo CA Limited +# Subject: CN=AAA Certificate Services O=Comodo CA Limited +# Label: "Comodo AAA Services root" +# Serial: 1 +# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 +# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 +# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj +YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM +GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua +BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe +3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 +YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR +rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm +ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU +oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v +QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t +b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF +AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q +GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 +G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi +l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 +smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority +# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority +# Label: "QuoVadis Root CA" +# Serial: 985026699 +# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24 +# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9 +# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73 +-----BEGIN CERTIFICATE----- +MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0 +aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0 +aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz +MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw +IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR +dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp +li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D +rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ +WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug +F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU +xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC +Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv +dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw +ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl +IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh +c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy +ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh +Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI +KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T +KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq +y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p +dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD +VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL +MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk +fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8 +7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R +cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y +mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW +xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK +SnQ2+Q== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 +# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 +# Label: "Security Communication Root CA" +# Serial: 0 +# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a +# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 +# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c +-----BEGIN CERTIFICATE----- +MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY +MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t +dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 +WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD +VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 +9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ +DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 +Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N +QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ +xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G +A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG +kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr +Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 +Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU +JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot +RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== +-----END CERTIFICATE----- + +# Issuer: CN=Sonera Class2 CA O=Sonera +# Subject: CN=Sonera Class2 CA O=Sonera +# Label: "Sonera Class 2 Root CA" +# Serial: 29 +# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb +# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27 +# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27 +-----BEGIN CERTIFICATE----- +MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP +MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx +MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV +BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o +Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt +5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s +3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej +vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu +8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw +DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG +MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil +zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/ +3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD +FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6 +Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2 +ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M +-----END CERTIFICATE----- + +# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Label: "XRamp Global CA Root" +# Serial: 107108908803651509692980124233745014957 +# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 +# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 +# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB +gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk +MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY +UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx +NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 +dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy +dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 +38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP +KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q +DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 +qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa +JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi +PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P +BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs +jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 +eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR +vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa +IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy +i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ +O+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Label: "Go Daddy Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 +# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 +# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh +MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE +YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 +MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo +ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg +MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN +ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA +PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w +wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi +EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY +avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ +YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE +sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h +/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 +IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD +ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy +OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P +TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER +dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf +ReYNnyicsbkqWletNw+vHX/bvZ8= +-----END CERTIFICATE----- + +# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Label: "Starfield Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 +# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a +# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl +MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp +U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw +NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE +ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp +ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 +DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf +8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN ++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 +X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa +K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA +1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G +A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR +zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 +YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD +bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 +L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D +eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp +VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY +WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +# Issuer: O=Government Root Certification Authority +# Subject: O=Government Root Certification Authority +# Label: "Taiwan GRCA" +# Serial: 42023070807708724159991140556527066870 +# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e +# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9 +# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3 +-----BEGIN CERTIFICATE----- +MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/ +MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow +PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR +IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q +gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy +yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts +F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2 +jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx +ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC +VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK +YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH +EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN +Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud +DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE +MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK +UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ +TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf +qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK +ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE +JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7 +hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1 +EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm +nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX +udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz +ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe +LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl +pYYsfPQS +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=Class 2 Primary CA O=Certplus +# Subject: CN=Class 2 Primary CA O=Certplus +# Label: "Certplus Class 2 Primary CA" +# Serial: 177770208045934040241468760488327595043 +# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b +# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb +# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb +-----BEGIN CERTIFICATE----- +MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw +PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz +cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9 +MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz +IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ +ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR +VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL +kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd +EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas +H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0 +HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud +DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4 +QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu +Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/ +AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8 +yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR +FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA +ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB +kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7 +l7+ijrRU +-----END CERTIFICATE----- + +# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. +# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. +# Label: "DST Root CA X3" +# Serial: 91299735575339953335919266965803778155 +# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5 +# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13 +# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39 +-----BEGIN CERTIFICATE----- +MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/ +MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT +DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow +PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD +Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB +AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O +rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq +OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b +xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw +7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD +aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV +HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG +SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69 +ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr +AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz +R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5 +JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo +Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Label: "SwissSign Silver CA - G2" +# Serial: 5700383053117599563 +# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 +# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb +# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 +-----BEGIN CERTIFICATE----- +MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE +BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu +IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow +RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY +U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv +Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br +YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF +nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH +6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt +eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ +c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ +MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH +HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf +jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 +5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB +rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c +wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 +cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB +AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp +WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 +xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ +2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ +IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 +aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X +em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR +dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ +OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ +hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy +tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. +# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. +# Label: "GeoTrust Primary Certification Authority" +# Serial: 32798226551256963324313806436981982369 +# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf +# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96 +# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c +-----BEGIN CERTIFICATE----- +MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY +MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo +R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx +MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK +Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9 +AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA +ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0 +7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W +kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI +mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ +KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1 +6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl +4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K +oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj +UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU +AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA" +# Serial: 69529181992039203566298953787712940909 +# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12 +# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81 +# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB +qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf +Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw +MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV +BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw +NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j +LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG +A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl +IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs +W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta +3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk +6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 +Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J +NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP +r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU +DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz +YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX +xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 +/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ +LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 +jVaMaA== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Class 3 Public Primary Certification Authority - G5" +# Serial: 33037644167568058970164719475676101450 +# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c +# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5 +# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df +-----BEGIN CERTIFICATE----- +MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB +yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp +U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW +ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL +MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW +ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln +biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp +U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y +aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 +nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex +t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz +SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG +BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ +rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ +NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E +BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH +BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy +aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv +MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE +p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y +5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK +WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ +4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N +hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Label: "Network Solutions Certificate Authority" +# Serial: 116697915152937497490437556386812487904 +# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e +# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce +# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c +-----BEGIN CERTIFICATE----- +MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi +MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu +MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp +dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV +UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO +ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz +c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP +OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl +mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF +BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 +qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw +gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu +bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp +dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 +6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ +h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH +/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv +wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN +pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GA CA" +# Serial: 86718877871133159090080555911823548314 +# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93 +# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9 +# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5 +-----BEGIN CERTIFICATE----- +MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB +ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly +aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl +ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w +NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G +A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD +VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX +SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR +VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2 +w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF +mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg +4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9 +4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw +EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx +SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2 +ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8 +vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa +hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi +Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ +/L7fCg0= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center +# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center +# Label: "Deutsche Telekom Root CA 2" +# Serial: 38 +# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08 +# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf +# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3 +-----BEGIN CERTIFICATE----- +MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc +MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj +IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB +IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE +RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl +U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290 +IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU +ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC +QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr +rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S +NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc +QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH +txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP +BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC +AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp +tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa +IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl +6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+ +xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU +Cm26OWMohpLzGITY+9HPBVZkVw== +-----END CERTIFICATE----- + +# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc +# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc +# Label: "Cybertrust Global Root" +# Serial: 4835703278459682877484360 +# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 +# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 +# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 +-----BEGIN CERTIFICATE----- +MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG +A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh +bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE +ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS +b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 +7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS +J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y +HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP +t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz +FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY +XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ +MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw +hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js +MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA +A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj +Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx +XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o +omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc +A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW +WL1WMRJOEcgh4LMRkWXbtKaIOM5V +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only +# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only +# Label: "GeoTrust Primary Certification Authority - G3" +# Serial: 28809105769928564313984085209975885599 +# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05 +# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd +# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4 +-----BEGIN CERTIFICATE----- +MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB +mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT +MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s +eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv +cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ +BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg +MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0 +BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz ++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm +hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn +5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W +JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL +DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC +huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw +HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB +AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB +zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN +kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD +AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH +SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G +spki4cErx5z481+oghLrGREt +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA - G2" +# Serial: 71758320672825410020661621085256472406 +# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f +# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12 +# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57 +-----BEGIN CERTIFICATE----- +MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp +IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi +BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw +MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh +d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig +YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v +dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/ +BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6 +papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K +DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3 +KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox +XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA - G3" +# Serial: 127614157056681299805556476275995414779 +# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31 +# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2 +# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB +rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf +Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw +MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV +BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa +Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl +LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u +MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl +ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm +gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8 +YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf +b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9 +9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S +zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk +OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV +HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA +2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW +oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu +t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c +KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM +m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu +MdRAGmI0Nj81Aa6sY6A= +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only +# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only +# Label: "GeoTrust Primary Certification Authority - G2" +# Serial: 80682863203381065782177908751794619243 +# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a +# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0 +# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66 +-----BEGIN CERTIFICATE----- +MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL +MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj +KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2 +MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 +eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV +BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw +NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV +BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH +MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL +So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal +tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG +CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT +qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz +rD6ogRLQy7rQkgu2npaqBA+K +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Universal Root Certification Authority" +# Serial: 85209574734084581917763752644031726877 +# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19 +# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54 +# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c +-----BEGIN CERTIFICATE----- +MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB +vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp +U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W +ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX +MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0 +IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y +IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh +bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF +9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH +H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H +LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN +/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT +rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud +EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw +WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs +exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud +DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4 +sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+ +seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz +4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+ +BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR +lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3 +7M2CYfE45k+XmCpajQ== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Class 3 Public Primary Certification Authority - G4" +# Serial: 63143484348153506665311985501458640051 +# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41 +# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a +# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79 +-----BEGIN CERTIFICATE----- +MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL +MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW +ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln +biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp +U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y +aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp +U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg +SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln +biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm +GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve +fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ +aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj +aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW +kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC +4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga +FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden +# Label: "Staat der Nederlanden Root CA - G2" +# Serial: 10000012 +# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a +# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16 +# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f +-----BEGIN CERTIFICATE----- +MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX +DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl +ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv +b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291 +qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp +uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU +Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE +pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp +5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M +UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN +GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy +5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv +6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK +eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6 +B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/ +BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov +L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV +HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG +SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS +CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen +5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897 +IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK +gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL ++63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL +vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm +bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk +N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC +Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z +ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ== +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Label: "SecureSign RootCA11" +# Serial: 1 +# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 +# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 +# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr +MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG +A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 +MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp +Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD +QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz +i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 +h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV +MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 +UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni +8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC +h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD +VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB +AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm +KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ +X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr +QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 +pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN +QSdJQO7e5iNEOdyhIta6A/I= +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 6047274297262753887 +# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 +# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa +# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy +MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD +VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp +cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv +ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl +AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF +661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 +am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 +ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 +PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS +3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k +SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF +3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM +ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g +StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz +Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB +jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. +# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. +# Label: "Chambers of Commerce Root - 2008" +# Serial: 11806822484801597146 +# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7 +# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c +# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0 +-----BEGIN CERTIFICATE----- +MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD +VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 +IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 +MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz +IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz +MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj +dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw +EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp +MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G +CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9 +28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq +VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q +DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR +5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL +ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a +Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl +UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s ++12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5 +Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj +ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx +hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV +HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1 ++HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN +YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t +L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy +ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt +IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV +HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w +DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW +PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF +5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1 +glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH +FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2 +pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD +xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG +tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq +jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De +fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg +OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ +d0jQ +-----END CERTIFICATE----- + +# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. +# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. +# Label: "Global Chambersign Root - 2008" +# Serial: 14541511773111788494 +# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3 +# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c +# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca +-----BEGIN CERTIFICATE----- +MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD +VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 +IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 +MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD +aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx +MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy +cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG +A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl +BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI +hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed +KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7 +G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2 +zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4 +ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG +HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2 +Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V +yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e +beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r +6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh +wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog +zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW +BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr +ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp +ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk +cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt +YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC +CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow +KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI +hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ +UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz +X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x +fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz +a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd +Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd +SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O +AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso +M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge +v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z +09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2011" +# Serial: 0 +# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9 +# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d +# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71 +-----BEGIN CERTIFICATE----- +MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix +RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p +YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw +NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK +EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl +cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz +dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ +fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns +bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD +75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP +FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV +HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp +5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu +b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA +A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p +6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 +TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7 +dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys +Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI +l7WdmplNsDz4SgCbZN2fOUvRJ9e4 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: O=Trustis Limited OU=Trustis FPS Root CA +# Subject: O=Trustis Limited OU=Trustis FPS Root CA +# Label: "Trustis FPS Root CA" +# Serial: 36053640375399034304724988975563710553 +# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d +# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04 +# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d +-----BEGIN CERTIFICATE----- +MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF +MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL +ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx +MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc +MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+ +AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH +iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj +vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA +0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB +OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/ +BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E +FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01 +GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW +zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4 +1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE +f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F +jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN +ZetX2fNXlrtIzYE= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus +# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus +# Label: "EE Certification Centre Root CA" +# Serial: 112324828676200291871926431888494945866 +# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f +# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7 +# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76 +-----BEGIN CERTIFICATE----- +MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1 +MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1 +czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG +CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy +MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl +ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS +b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB +AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy +euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO +bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw +WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d +MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE +1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/ +zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB +BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF +BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV +v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG +E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u +uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW +iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v +GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 14367148294922964480859022125800977897474 +# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e +# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb +# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c +-----BEGIN CERTIFICATE----- +MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ +FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F +uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX +kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs +ewv4n4Q= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden +# Label: "Staat der Nederlanden Root CA - G3" +# Serial: 10003001 +# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37 +# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc +# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28 +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX +DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl +ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv +b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP +cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW +IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX +xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy +KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR +9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az +5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8 +6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7 +Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP +bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt +BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt +XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd +INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD +U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp +LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8 +Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp +gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh +/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw +0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A +fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq +4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR +1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/ +QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM +94B7IWcnMFk= +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Label: "Staat der Nederlanden EV Root CA" +# Serial: 10000013 +# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba +# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb +# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a +-----BEGIN CERTIFICATE----- +MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y +MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg +TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS +b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS +M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC +UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d +Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p +rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l +pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb +j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC +KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS +/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X +cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH +1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP +px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 +MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI +eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u +2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS +v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC +wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy +CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e +vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 +Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa +Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL +eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 +FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc +7uzXLg== +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5 O=T\xdcRKTRUST Bilgi \u0130leti\u015fim ve Bili\u015fim G\xfcvenli\u011fi Hizmetleri A.\u015e. +# Subject: CN=T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5 O=T\xdcRKTRUST Bilgi \u0130leti\u015fim ve Bili\u015fim G\xfcvenli\u011fi Hizmetleri A.\u015e. +# Label: "T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5" +# Serial: 156233699172481 +# MD5 Fingerprint: da:70:8e:f0:22:df:93:26:f6:5f:9f:d3:15:06:52:4e +# SHA1 Fingerprint: c4:18:f6:4d:46:d1:df:00:3d:27:30:13:72:43:a9:12:11:c6:75:fb +# SHA256 Fingerprint: 49:35:1b:90:34:44:c1:85:cc:dc:5c:69:3d:24:d8:55:5c:b2:08:d6:a8:14:13:07:69:9f:4a:f0:63:19:9d:78 +-----BEGIN CERTIFICATE----- +MIIEJzCCAw+gAwIBAgIHAI4X/iQggTANBgkqhkiG9w0BAQsFADCBsTELMAkGA1UE +BhMCVFIxDzANBgNVBAcMBkFua2FyYTFNMEsGA1UECgxEVMOcUktUUlVTVCBCaWxn +aSDEsGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkg +QS7Fni4xQjBABgNVBAMMOVTDnFJLVFJVU1QgRWxla3Ryb25payBTZXJ0aWZpa2Eg +SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSBINTAeFw0xMzA0MzAwODA3MDFaFw0yMzA0 +MjgwODA3MDFaMIGxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYD +VQQKDERUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8 +dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBF +bGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIEg1MIIB +IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApCUZ4WWe60ghUEoI5RHwWrom +/4NZzkQqL/7hzmAD/I0Dpe3/a6i6zDQGn1k19uwsu537jVJp45wnEFPzpALFp/kR +Gml1bsMdi9GYjZOHp3GXDSHHmflS0yxjXVW86B8BSLlg/kJK9siArs1mep5Fimh3 +4khon6La8eHBEJ/rPCmBp+EyCNSgBbGM+42WAA4+Jd9ThiI7/PS98wl+d+yG6w8z +5UNP9FR1bSmZLmZaQ9/LXMrI5Tjxfjs1nQ/0xVqhzPMggCTTV+wVunUlm+hkS7M0 +hO8EuPbJbKoCPrZV4jI3X/xml1/N1p7HIL9Nxqw/dV8c7TKcfGkAaZHjIxhT6QID +AQABo0IwQDAdBgNVHQ4EFgQUVpkHHtOsDGlktAxQR95DLL4gwPswDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAJ5FdnsX +SDLyOIspve6WSk6BGLFRRyDN0GSxDsnZAdkJzsiZ3GglE9Rc8qPoBP5yCccLqh0l +VX6Wmle3usURehnmp349hQ71+S4pL+f5bFgWV1Al9j4uPqrtd3GqqpmWRgqujuwq +URawXs3qZwQcWDD1YIq9pr1N5Za0/EKJAWv2cMhQOQwt1WbZyNKzMrcbGW3LM/nf +peYVhDfwwvJllpKQd/Ct9JDpEXjXk4nAPQu6KfTomZ1yju2dL+6SfaHx/126M2CF +Yv4HAqGEVka+lgqaE9chTLd8B59OTj+RdPsnnRHM3eaxynFNExc5JsUpISuTKWqW ++qtB4Uu2NQvAmxU= +-----END CERTIFICATE----- + +# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 +# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 +# Label: "Certinomis - Root CA" +# Serial: 1 +# MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f +# SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8 +# SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58 +-----BEGIN CERTIFICATE----- +MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjET +MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAb +BgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMz +MTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMx +FzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0g +Um9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2 +fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJfl +LieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQV +WZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDF +TKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb +5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLSc +CbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6Ri +wsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJ +wx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SG +m/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4 +F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZng +WVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh0 +2TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF +AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/ +0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWw +F6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZS +g081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzj +qh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSN +h4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/ +ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8V +btaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwj +Y/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ +8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvW +gQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=Certplus Root CA G1 O=Certplus +# Subject: CN=Certplus Root CA G1 O=Certplus +# Label: "Certplus Root CA G1" +# Serial: 1491911565779898356709731176965615564637713 +# MD5 Fingerprint: 7f:09:9c:f7:d9:b9:5c:69:69:56:d5:37:3e:14:0d:42 +# SHA1 Fingerprint: 22:fd:d0:b7:fd:a2:4e:0d:ac:49:2c:a0:ac:a6:7b:6a:1f:e3:f7:66 +# SHA256 Fingerprint: 15:2a:40:2b:fc:df:2c:d5:48:05:4d:22:75:b3:9c:7f:ca:3e:c0:97:80:78:b0:f0:ea:76:e5:61:a6:c7:43:3e +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgISESBVg+QtPlRWhS2DN7cs3EYRMA0GCSqGSIb3DQEBDQUA +MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy +dHBsdXMgUm9vdCBDQSBHMTAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBa +MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy +dHBsdXMgUm9vdCBDQSBHMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ANpQh7bauKk+nWT6VjOaVj0W5QOVsjQcmm1iBdTYj+eJZJ+622SLZOZ5KmHNr49a +iZFluVj8tANfkT8tEBXgfs+8/H9DZ6itXjYj2JizTfNDnjl8KvzsiNWI7nC9hRYt +6kuJPKNxQv4c/dMcLRC4hlTqQ7jbxofaqK6AJc96Jh2qkbBIb6613p7Y1/oA/caP +0FG7Yn2ksYyy/yARujVjBYZHYEMzkPZHogNPlk2dT8Hq6pyi/jQu3rfKG3akt62f +6ajUeD94/vI4CTYd0hYCyOwqaK/1jpTvLRN6HkJKHRUxrgwEV/xhc/MxVoYxgKDE +EW4wduOU8F8ExKyHcomYxZ3MVwia9Az8fXoFOvpHgDm2z4QTd28n6v+WZxcIbekN +1iNQMLAVdBM+5S//Ds3EC0pd8NgAM0lm66EYfFkuPSi5YXHLtaW6uOrc4nBvCGrc +h2c0798wct3zyT8j/zXhviEpIDCB5BmlIOklynMxdCm+4kLV87ImZsdo/Rmz5yCT +mehd4F6H50boJZwKKSTUzViGUkAksnsPmBIgJPaQbEfIDbsYIC7Z/fyL8inqh3SV +4EJQeIQEQWGw9CEjjy3LKCHyamz0GqbFFLQ3ZU+V/YDI+HLlJWvEYLF7bY5KinPO +WftwenMGE9nTdDckQQoRb5fc5+R+ob0V8rqHDz1oihYHAgMBAAGjYzBhMA4GA1Ud +DwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSowcCbkahDFXxd +Bie0KlHYlwuBsTAfBgNVHSMEGDAWgBSowcCbkahDFXxdBie0KlHYlwuBsTANBgkq +hkiG9w0BAQ0FAAOCAgEAnFZvAX7RvUz1isbwJh/k4DgYzDLDKTudQSk0YcbX8ACh +66Ryj5QXvBMsdbRX7gp8CXrc1cqh0DQT+Hern+X+2B50ioUHj3/MeXrKls3N/U/7 +/SMNkPX0XtPGYX2eEeAC7gkE2Qfdpoq3DIMku4NQkv5gdRE+2J2winq14J2by5BS +S7CTKtQ+FjPlnsZlFT5kOwQ/2wyPX1wdaR+v8+khjPPvl/aatxm2hHSco1S1cE5j +2FddUyGbQJJD+tZ3VTNPZNX70Cxqjm0lpu+F6ALEUz65noe8zDUa3qHpimOHZR4R +Kttjd5cUvpoUmRGywO6wT/gUITJDT5+rosuoD6o7BlXGEilXCNQ314cnrUlZp5Gr +RHpejXDbl85IULFzk/bwg2D5zfHhMf1bfHEhYxQUqq/F3pN+aLHsIqKqkHWetUNy +6mSjhEv9DKgma3GX7lZjZuhCVPnHHd/Qj1vfyDBviP4NxDMcU6ij/UgQ8uQKTuEV +V/xuZDDCVRHc6qnNSlSsKWNEz0pAoNZoWRsz+e86i9sgktxChL8Bq4fA1SCC28a5 +g4VCXA9DO2pJNdWY9BW/+mGBDAkgGNLQFwzLSABQ6XaCjGTXOqAHVcweMcDvOrRl +++O/QmueD6i9a5jc2NvLi6Td11n0bt3+qsOR0C5CB8AMTVPNJLFMWx5R9N/pkvo= +-----END CERTIFICATE----- + +# Issuer: CN=Certplus Root CA G2 O=Certplus +# Subject: CN=Certplus Root CA G2 O=Certplus +# Label: "Certplus Root CA G2" +# Serial: 1492087096131536844209563509228951875861589 +# MD5 Fingerprint: a7:ee:c4:78:2d:1b:ee:2d:b9:29:ce:d6:a7:96:32:31 +# SHA1 Fingerprint: 4f:65:8e:1f:e9:06:d8:28:02:e9:54:47:41:c9:54:25:5d:69:cc:1a +# SHA256 Fingerprint: 6c:c0:50:41:e6:44:5e:74:69:6c:4c:fb:c9:f8:0f:54:3b:7e:ab:bb:44:b4:ce:6f:78:7c:6a:99:71:c4:2f:17 +-----BEGIN CERTIFICATE----- +MIICHDCCAaKgAwIBAgISESDZkc6uo+jF5//pAq/Pc7xVMAoGCCqGSM49BAMDMD4x +CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs +dXMgUm9vdCBDQSBHMjAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBaMD4x +CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs +dXMgUm9vdCBDQSBHMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABM0PW1aC3/BFGtat +93nwHcmsltaeTpwftEIRyoa/bfuFo8XlGVzX7qY/aWfYeOKmycTbLXku54uNAm8x +Ik0G42ByRZ0OQneezs/lf4WbGOT8zC5y0xaTTsqZY1yhBSpsBqNjMGEwDgYDVR0P +AQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNqDYwJ5jtpMxjwj +FNiPwyCrKGBZMB8GA1UdIwQYMBaAFNqDYwJ5jtpMxjwjFNiPwyCrKGBZMAoGCCqG +SM49BAMDA2gAMGUCMHD+sAvZ94OX7PNVHdTcswYO/jOYnYs5kGuUIe22113WTNch +p+e/IQ8rzfcq3IUHnQIxAIYUFuXcsGXCwI4Un78kFmjlvPl5adytRSv3tjFzzAal +U5ORGpOucGpnutee5WEaXw== +-----END CERTIFICATE----- + +# Issuer: CN=OpenTrust Root CA G1 O=OpenTrust +# Subject: CN=OpenTrust Root CA G1 O=OpenTrust +# Label: "OpenTrust Root CA G1" +# Serial: 1492036577811947013770400127034825178844775 +# MD5 Fingerprint: 76:00:cc:81:29:cd:55:5e:88:6a:7a:2e:f7:4d:39:da +# SHA1 Fingerprint: 79:91:e8:34:f7:e2:ee:dd:08:95:01:52:e9:55:2d:14:e9:58:d5:7e +# SHA256 Fingerprint: 56:c7:71:28:d9:8c:18:d9:1b:4c:fd:ff:bc:25:ee:91:03:d4:75:8e:a2:ab:ad:82:6a:90:f3:45:7d:46:0e:b4 +-----BEGIN CERTIFICATE----- +MIIFbzCCA1egAwIBAgISESCzkFU5fX82bWTCp59rY45nMA0GCSqGSIb3DQEBCwUA +MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w +ZW5UcnVzdCBSb290IENBIEcxMB4XDTE0MDUyNjA4NDU1MFoXDTM4MDExNTAwMDAw +MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU +T3BlblRydXN0IFJvb3QgQ0EgRzEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQD4eUbalsUwXopxAy1wpLuwxQjczeY1wICkES3d5oeuXT2R0odsN7faYp6b +wiTXj/HbpqbfRm9RpnHLPhsxZ2L3EVs0J9V5ToybWL0iEA1cJwzdMOWo010hOHQX +/uMftk87ay3bfWAfjH1MBcLrARYVmBSO0ZB3Ij/swjm4eTrwSSTilZHcYTSSjFR0 +77F9jAHiOH3BX2pfJLKOYheteSCtqx234LSWSE9mQxAGFiQD4eCcjsZGT44ameGP +uY4zbGneWK2gDqdkVBFpRGZPTBKnjix9xNRbxQA0MMHZmf4yzgeEtE7NCv82TWLx +p2NX5Ntqp66/K7nJ5rInieV+mhxNaMbBGN4zK1FGSxyO9z0M+Yo0FMT7MzUj8czx +Kselu7Cizv5Ta01BG2Yospb6p64KTrk5M0ScdMGTHPjgniQlQ/GbI4Kq3ywgsNw2 +TgOzfALU5nsaqocTvz6hdLubDuHAk5/XpGbKuxs74zD0M1mKB3IDVedzagMxbm+W +G+Oin6+Sx+31QrclTDsTBM8clq8cIqPQqwWyTBIjUtz9GVsnnB47ev1CI9sjgBPw +vFEVVJSmdz7QdFG9URQIOTfLHzSpMJ1ShC5VkLG631UAC9hWLbFJSXKAqWLXwPYY +EQRVzXR7z2FwefR7LFxckvzluFqrTJOVoSfupb7PcSNCupt2LQIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUl0YhVyE1 +2jZVx/PxN3DlCPaTKbYwHwYDVR0jBBgwFoAUl0YhVyE12jZVx/PxN3DlCPaTKbYw +DQYJKoZIhvcNAQELBQADggIBAB3dAmB84DWn5ph76kTOZ0BP8pNuZtQ5iSas000E +PLuHIT839HEl2ku6q5aCgZG27dmxpGWX4m9kWaSW7mDKHyP7Rbr/jyTwyqkxf3kf +gLMtMrpkZ2CvuVnN35pJ06iCsfmYlIrM4LvgBBuZYLFGZdwIorJGnkSI6pN+VxbS +FXJfLkur1J1juONI5f6ELlgKn0Md/rcYkoZDSw6cMoYsYPXpSOqV7XAp8dUv/TW0 +V8/bhUiZucJvbI/NeJWsZCj9VrDDb8O+WVLhX4SPgPL0DTatdrOjteFkdjpY3H1P +XlZs5VVZV6Xf8YpmMIzUUmI4d7S+KNfKNsSbBfD4Fdvb8e80nR14SohWZ25g/4/I +i+GOvUKpMwpZQhISKvqxnUOOBZuZ2mKtVzazHbYNeS2WuOvyDEsMpZTGMKcmGS3t +TAZQMPH9WD25SxdfGbRqhFS0OE85og2WaMMolP3tLR9Ka0OWLpABEPs4poEL0L91 +09S5zvE/bw4cHjdx5RiHdRk/ULlepEU0rbDK5uUTdg8xFKmOLZTW1YVNcxVPS/Ky +Pu1svf0OnWZzsD2097+o4BGkxK51CUpjAEggpsadCwmKtODmzj7HPiY46SvepghJ +AwSQiumPv+i2tCqjI40cHLI5kqiPAlxAOXXUc0ECd97N4EOH1uS6SsNsEn/+KuYj +1oxx +-----END CERTIFICATE----- + +# Issuer: CN=OpenTrust Root CA G2 O=OpenTrust +# Subject: CN=OpenTrust Root CA G2 O=OpenTrust +# Label: "OpenTrust Root CA G2" +# Serial: 1492012448042702096986875987676935573415441 +# MD5 Fingerprint: 57:24:b6:59:24:6b:ae:c8:fe:1c:0c:20:f2:c0:4e:eb +# SHA1 Fingerprint: 79:5f:88:60:c5:ab:7c:3d:92:e6:cb:f4:8d:e1:45:cd:11:ef:60:0b +# SHA256 Fingerprint: 27:99:58:29:fe:6a:75:15:c1:bf:e8:48:f9:c4:76:1d:b1:6c:22:59:29:25:7b:f4:0d:08:94:f2:9e:a8:ba:f2 +-----BEGIN CERTIFICATE----- +MIIFbzCCA1egAwIBAgISESChaRu/vbm9UpaPI+hIvyYRMA0GCSqGSIb3DQEBDQUA +MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w +ZW5UcnVzdCBSb290IENBIEcyMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAw +MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU +T3BlblRydXN0IFJvb3QgQ0EgRzIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQDMtlelM5QQgTJT32F+D3Y5z1zCU3UdSXqWON2ic2rxb95eolq5cSG+Ntmh +/LzubKh8NBpxGuga2F8ORAbtp+Dz0mEL4DKiltE48MLaARf85KxP6O6JHnSrT78e +CbY2albz4e6WiWYkBuTNQjpK3eCasMSCRbP+yatcfD7J6xcvDH1urqWPyKwlCm/6 +1UWY0jUJ9gNDlP7ZvyCVeYCYitmJNbtRG6Q3ffyZO6v/v6wNj0OxmXsWEH4db0fE +FY8ElggGQgT4hNYdvJGmQr5J1WqIP7wtUdGejeBSzFfdNTVY27SPJIjki9/ca1TS +gSuyzpJLHB9G+h3Ykst2Z7UJmQnlrBcUVXDGPKBWCgOz3GIZ38i1MH/1PCZ1Eb3X +G7OHngevZXHloM8apwkQHZOJZlvoPGIytbU6bumFAYueQ4xncyhZW+vj3CzMpSZy +YhK05pyDRPZRpOLAeiRXyg6lPzq1O4vldu5w5pLeFlwoW5cZJ5L+epJUzpM5ChaH +vGOz9bGTXOBut9Dq+WIyiET7vycotjCVXRIouZW+j1MY5aIYFuJWpLIsEPUdN6b4 +t/bQWVyJ98LVtZR00dX+G7bw5tYee9I8y6jj9RjzIR9u701oBnstXW5DiabA+aC/ +gh7PU3+06yzbXfZqfUAkBXKJOAGTy3HCOV0GEfZvePg3DTmEJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUajn6QiL3 +5okATV59M4PLuG53hq8wHwYDVR0jBBgwFoAUajn6QiL35okATV59M4PLuG53hq8w +DQYJKoZIhvcNAQENBQADggIBAJjLq0A85TMCl38th6aP1F5Kr7ge57tx+4BkJamz +Gj5oXScmp7oq4fBXgwpkTx4idBvpkF/wrM//T2h6OKQQbA2xx6R3gBi2oihEdqc0 +nXGEL8pZ0keImUEiyTCYYW49qKgFbdEfwFFEVn8nNQLdXpgKQuswv42hm1GqO+qT +RmTFAHneIWv2V6CG1wZy7HBGS4tz3aAhdT7cHcCP009zHIXZ/n9iyJVvttN7jLpT +wm+bREx50B1ws9efAvSyB7DH5fitIw6mVskpEndI2S9G/Tvw/HRwkqWOOAgfZDC2 +t0v7NqwQjqBSM2OdAzVWxWm9xiNaJ5T2pBL4LTM8oValX9YZ6e18CL13zSdkzJTa +TkZQh+D5wVOAHrut+0dSixv9ovneDiK3PTNZbNTe9ZUGMg1RGUFcPk8G97krgCf2 +o6p6fAbhQ8MTOWIaNr3gKC6UAuQpLmBVrkA9sHSSXvAgZJY/X0VdiLWK2gKgW0VU +3jg9CcCoSmVGFvyqv1ROTVu+OEO3KMqLM6oaJbolXCkvW0pujOotnCr2BXbgd5eA +iN1nE28daCSLT7d0geX0YJ96Vdc+N9oWaz53rK4YcJUIeSkDiv7BO7M/Gg+kO14f +WKGVyasvc0rQLW6aWQ9VGHgtPFGml4vmu7JwqkwR3v98KzfUetF3NI/n+UL3PIEM +S1IK +-----END CERTIFICATE----- + +# Issuer: CN=OpenTrust Root CA G3 O=OpenTrust +# Subject: CN=OpenTrust Root CA G3 O=OpenTrust +# Label: "OpenTrust Root CA G3" +# Serial: 1492104908271485653071219941864171170455615 +# MD5 Fingerprint: 21:37:b4:17:16:92:7b:67:46:70:a9:96:d7:a8:13:24 +# SHA1 Fingerprint: 6e:26:64:f3:56:bf:34:55:bf:d1:93:3f:7c:01:de:d8:13:da:8a:a6 +# SHA256 Fingerprint: b7:c3:62:31:70:6e:81:07:8c:36:7c:b8:96:19:8f:1e:32:08:dd:92:69:49:dd:8f:57:09:a4:10:f7:5b:62:92 +-----BEGIN CERTIFICATE----- +MIICITCCAaagAwIBAgISESDm+Ez8JLC+BUCs2oMbNGA/MAoGCCqGSM49BAMDMEAx +CzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9wZW5U +cnVzdCBSb290IENBIEczMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAwMFow +QDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwUT3Bl +blRydXN0IFJvb3QgQ0EgRzMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARK7liuTcpm +3gY6oxH84Bjwbhy6LTAMidnW7ptzg6kjFYwvWYpa3RTqnVkrQ7cG7DK2uu5Bta1d +oYXM6h0UZqNnfkbilPPntlahFVmhTzeXuSIevRHr9LIfXsMUmuXZl5mjYzBhMA4G +A1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRHd8MUi2I5 +DMlv4VBN0BBY3JWIbTAfBgNVHSMEGDAWgBRHd8MUi2I5DMlv4VBN0BBY3JWIbTAK +BggqhkjOPQQDAwNpADBmAjEAj6jcnboMBBf6Fek9LykBl7+BFjNAk2z8+e2AcG+q +j9uEwov1NcoG3GRvaBbhj5G5AjEA2Euly8LQCGzpGPta3U1fJAuwACEl74+nBCZx +4nxp5V2a+EEfOzmTk51V6s2N8fvB +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A. +# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A. +# Label: "LuxTrust Global Root 2" +# Serial: 59914338225734147123941058376788110305822489521 +# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c +# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f +# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5 +-----BEGIN CERTIFICATE----- +MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL +BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV +BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw +MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B +LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F +ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem +hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1 +EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn +Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4 +zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ +96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m +j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g +DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+ +8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j +X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH +hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB +KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0 +Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT ++Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL +BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9 +BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO +jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9 +loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c +qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+ +2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/ +JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre +zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf +LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+ +x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6 +oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Label: "GDCA TrustAUTH R5 ROOT" +# Serial: 9009899650740120186 +# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 +# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 +# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE +BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 +MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV +BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w +HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj +Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj +TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u +KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj +qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm +MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 +ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP +zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk +L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC +jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA +HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC +AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm +DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 +COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry +L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf +JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg +IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io +2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV +09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ +XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq +T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe +MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor RootCert CA-1" +# Serial: 15752444095811006489 +# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45 +# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a +# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD +VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk +MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y +IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB +pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h +IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG +A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU +cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid +RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V +seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme +9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV +EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW +hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/ +DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD +ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I +/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf +ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ +yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts +L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN +zl/HHk484IkzlQsPpTLWPFp5LBk= +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor RootCert CA-2" +# Serial: 2711694510199101698 +# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64 +# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0 +# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65 +-----BEGIN CERTIFICATE----- +MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV +BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw +IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy +dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig +Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk +MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg +Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD +VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy +dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+ +QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq +1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp +2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK +DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape +az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF +3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88 +oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM +g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3 +mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh +8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd +BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U +nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw +DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX +dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+ +MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL +/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX +CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa +ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW +2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7 +N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3 +Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB +As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp +5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu +1uwJ +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor ECA-1" +# Serial: 9548242946988625984 +# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c +# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd +# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD +VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk +MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y +IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV +BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw +IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy +dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig +RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb +3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA +BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5 +3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou +owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/ +wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF +ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf +BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/ +MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv +civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2 +AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F +hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50 +soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI +WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi +tJ/X5g== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Label: "SSL.com Root Certification Authority RSA" +# Serial: 8875640296558310041 +# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 +# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb +# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE +BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK +DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz +OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R +xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX +qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC +C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 +6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh +/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF +YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E +JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc +US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 +ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm ++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi +M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV +cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc +Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs +PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ +q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 +cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr +a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I +H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y +K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu +nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf +oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY +Ic2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com Root Certification Authority ECC" +# Serial: 8495723813297216424 +# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e +# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a +# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz +WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 +b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS +b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI +7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg +CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud +EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD +VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T +kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ +gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority RSA R2" +# Serial: 6248227494352943350 +# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 +# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a +# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV +BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE +CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy +MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G +A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD +DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq +M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf +OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa +4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 +HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR +aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA +b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ +Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV +PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO +pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu +UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY +MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 +9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW +s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 +Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg +cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM +79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz +/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt +ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm +Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK +QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ +w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi +S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 +mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority ECC" +# Serial: 3182246526754555285 +# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 +# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d +# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx +NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv +bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA +VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku +WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX +5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ +ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg +h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- diff --git a/venv/lib/python3.10/site-packages/botocore/client.py b/venv/lib/python3.10/site-packages/botocore/client.py new file mode 100644 index 0000000000000000000000000000000000000000..e57d1ded3111eb0f4539c5cdcac29640bb97d039 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/client.py @@ -0,0 +1,1379 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import logging + +from botocore import waiter, xform_name +from botocore.args import ClientArgsCreator +from botocore.auth import AUTH_TYPE_MAPS +from botocore.awsrequest import prepare_request_dict +from botocore.compress import maybe_compress_request +from botocore.config import Config +from botocore.credentials import RefreshableCredentials +from botocore.discovery import ( + EndpointDiscoveryHandler, + EndpointDiscoveryManager, + block_endpoint_discovery_required_operations, +) +from botocore.docs.docstring import ClientMethodDocstring, PaginatorDocstring +from botocore.exceptions import ( + DataNotFoundError, + InvalidEndpointDiscoveryConfigurationError, + OperationNotPageableError, + UnknownServiceError, + UnknownSignatureVersionError, +) +from botocore.history import get_global_history_recorder +from botocore.hooks import first_non_none_response +from botocore.httpchecksum import ( + apply_request_checksum, + resolve_checksum_context, +) +from botocore.model import ServiceModel +from botocore.paginate import Paginator +from botocore.retries import adaptive, standard +from botocore.useragent import UserAgentString +from botocore.utils import ( + CachedProperty, + EventbridgeSignerSetter, + S3ControlArnParamHandlerv2, + S3ExpressIdentityResolver, + S3RegionRedirectorv2, + ensure_boolean, + get_service_module_name, +) + +# Keep these imported. There's pre-existing code that uses: +# "from botocore.client import UNSIGNED" +# "from botocore.client import ClientError" +# etc. +from botocore.exceptions import ClientError # noqa +from botocore.utils import S3ArnParamHandler # noqa +from botocore.utils import S3ControlArnParamHandler # noqa +from botocore.utils import S3ControlEndpointSetter # noqa +from botocore.utils import S3EndpointSetter # noqa +from botocore.utils import S3RegionRedirector # noqa +from botocore import UNSIGNED # noqa + + +_LEGACY_SIGNATURE_VERSIONS = frozenset( + ( + 'v2', + 'v3', + 'v3https', + 'v4', + 's3', + 's3v4', + ) +) + + +logger = logging.getLogger(__name__) +history_recorder = get_global_history_recorder() + + +class ClientCreator: + """Creates client objects for a service.""" + + def __init__( + self, + loader, + endpoint_resolver, + user_agent, + event_emitter, + retry_handler_factory, + retry_config_translator, + response_parser_factory=None, + exceptions_factory=None, + config_store=None, + user_agent_creator=None, + ): + self._loader = loader + self._endpoint_resolver = endpoint_resolver + self._user_agent = user_agent + self._event_emitter = event_emitter + self._retry_handler_factory = retry_handler_factory + self._retry_config_translator = retry_config_translator + self._response_parser_factory = response_parser_factory + self._exceptions_factory = exceptions_factory + # TODO: Migrate things away from scoped_config in favor of the + # config_store. The config store can pull things from both the scoped + # config and environment variables (and potentially more in the + # future). + self._config_store = config_store + self._user_agent_creator = user_agent_creator + + def create_client( + self, + service_name, + region_name, + is_secure=True, + endpoint_url=None, + verify=None, + credentials=None, + scoped_config=None, + api_version=None, + client_config=None, + auth_token=None, + ): + responses = self._event_emitter.emit( + 'choose-service-name', service_name=service_name + ) + service_name = first_non_none_response(responses, default=service_name) + service_model = self._load_service_model(service_name, api_version) + try: + endpoints_ruleset_data = self._load_service_endpoints_ruleset( + service_name, api_version + ) + partition_data = self._loader.load_data('partitions') + except UnknownServiceError: + endpoints_ruleset_data = None + partition_data = None + logger.info( + 'No endpoints ruleset found for service %s, falling back to ' + 'legacy endpoint routing.', + service_name, + ) + + cls = self._create_client_class(service_name, service_model) + region_name, client_config = self._normalize_fips_region( + region_name, client_config + ) + endpoint_bridge = ClientEndpointBridge( + self._endpoint_resolver, + scoped_config, + client_config, + service_signing_name=service_model.metadata.get('signingName'), + config_store=self._config_store, + service_signature_version=service_model.metadata.get( + 'signatureVersion' + ), + ) + client_args = self._get_client_args( + service_model, + region_name, + is_secure, + endpoint_url, + verify, + credentials, + scoped_config, + client_config, + endpoint_bridge, + auth_token, + endpoints_ruleset_data, + partition_data, + ) + service_client = cls(**client_args) + self._register_retries(service_client) + self._register_s3_events( + client=service_client, + endpoint_bridge=None, + endpoint_url=None, + client_config=client_config, + scoped_config=scoped_config, + ) + self._register_s3express_events(client=service_client) + self._register_s3_control_events(client=service_client) + self._register_endpoint_discovery( + service_client, endpoint_url, client_config + ) + return service_client + + def create_client_class(self, service_name, api_version=None): + service_model = self._load_service_model(service_name, api_version) + return self._create_client_class(service_name, service_model) + + def _create_client_class(self, service_name, service_model): + class_attributes = self._create_methods(service_model) + py_name_to_operation_name = self._create_name_mapping(service_model) + class_attributes['_PY_TO_OP_NAME'] = py_name_to_operation_name + bases = [BaseClient] + service_id = service_model.service_id.hyphenize() + self._event_emitter.emit( + f'creating-client-class.{service_id}', + class_attributes=class_attributes, + base_classes=bases, + ) + class_name = get_service_module_name(service_model) + cls = type(str(class_name), tuple(bases), class_attributes) + return cls + + def _normalize_fips_region(self, region_name, client_config): + if region_name is not None: + normalized_region_name = region_name.replace('fips-', '').replace( + '-fips', '' + ) + # If region has been transformed then set flag + if normalized_region_name != region_name: + config_use_fips_endpoint = Config(use_fips_endpoint=True) + if client_config: + # Keeping endpoint setting client specific + client_config = client_config.merge( + config_use_fips_endpoint + ) + else: + client_config = config_use_fips_endpoint + logger.warning( + f'transforming region from {region_name} to ' + f'{normalized_region_name} and setting ' + 'use_fips_endpoint to true. client should not ' + 'be configured with a fips psuedo region.' + ) + region_name = normalized_region_name + return region_name, client_config + + def _load_service_model(self, service_name, api_version=None): + json_model = self._loader.load_service_model( + service_name, 'service-2', api_version=api_version + ) + service_model = ServiceModel(json_model, service_name=service_name) + return service_model + + def _load_service_endpoints_ruleset(self, service_name, api_version=None): + return self._loader.load_service_model( + service_name, 'endpoint-rule-set-1', api_version=api_version + ) + + def _register_retries(self, client): + retry_mode = client.meta.config.retries['mode'] + if retry_mode == 'standard': + self._register_v2_standard_retries(client) + elif retry_mode == 'adaptive': + self._register_v2_standard_retries(client) + self._register_v2_adaptive_retries(client) + elif retry_mode == 'legacy': + self._register_legacy_retries(client) + + def _register_v2_standard_retries(self, client): + max_attempts = client.meta.config.retries.get('total_max_attempts') + kwargs = {'client': client} + if max_attempts is not None: + kwargs['max_attempts'] = max_attempts + standard.register_retry_handler(**kwargs) + + def _register_v2_adaptive_retries(self, client): + adaptive.register_retry_handler(client) + + def _register_legacy_retries(self, client): + endpoint_prefix = client.meta.service_model.endpoint_prefix + service_id = client.meta.service_model.service_id + service_event_name = service_id.hyphenize() + + # First, we load the entire retry config for all services, + # then pull out just the information we need. + original_config = self._loader.load_data('_retry') + if not original_config: + return + + retries = self._transform_legacy_retries(client.meta.config.retries) + retry_config = self._retry_config_translator.build_retry_config( + endpoint_prefix, + original_config.get('retry', {}), + original_config.get('definitions', {}), + retries, + ) + + logger.debug( + "Registering retry handlers for service: %s", + client.meta.service_model.service_name, + ) + handler = self._retry_handler_factory.create_retry_handler( + retry_config, endpoint_prefix + ) + unique_id = f'retry-config-{service_event_name}' + client.meta.events.register( + f"needs-retry.{service_event_name}", handler, unique_id=unique_id + ) + + def _transform_legacy_retries(self, retries): + if retries is None: + return + copied_args = retries.copy() + if 'total_max_attempts' in retries: + copied_args = retries.copy() + copied_args['max_attempts'] = ( + copied_args.pop('total_max_attempts') - 1 + ) + return copied_args + + def _get_retry_mode(self, client, config_store): + client_retries = client.meta.config.retries + if ( + client_retries is not None + and client_retries.get('mode') is not None + ): + return client_retries['mode'] + return config_store.get_config_variable('retry_mode') or 'legacy' + + def _register_endpoint_discovery(self, client, endpoint_url, config): + if endpoint_url is not None: + # Don't register any handlers in the case of a custom endpoint url + return + # Only attach handlers if the service supports discovery + if client.meta.service_model.endpoint_discovery_operation is None: + return + events = client.meta.events + service_id = client.meta.service_model.service_id.hyphenize() + enabled = False + if config and config.endpoint_discovery_enabled is not None: + enabled = config.endpoint_discovery_enabled + elif self._config_store: + enabled = self._config_store.get_config_variable( + 'endpoint_discovery_enabled' + ) + + enabled = self._normalize_endpoint_discovery_config(enabled) + if enabled and self._requires_endpoint_discovery(client, enabled): + discover = enabled is True + manager = EndpointDiscoveryManager( + client, always_discover=discover + ) + handler = EndpointDiscoveryHandler(manager) + handler.register(events, service_id) + else: + events.register( + 'before-parameter-build', + block_endpoint_discovery_required_operations, + ) + + def _normalize_endpoint_discovery_config(self, enabled): + """Config must either be a boolean-string or string-literal 'auto'""" + if isinstance(enabled, str): + enabled = enabled.lower().strip() + if enabled == 'auto': + return enabled + elif enabled in ('true', 'false'): + return ensure_boolean(enabled) + elif isinstance(enabled, bool): + return enabled + + raise InvalidEndpointDiscoveryConfigurationError(config_value=enabled) + + def _requires_endpoint_discovery(self, client, enabled): + if enabled == "auto": + return client.meta.service_model.endpoint_discovery_required + return enabled + + def _register_eventbridge_events( + self, client, endpoint_bridge, endpoint_url + ): + if client.meta.service_model.service_name != 'events': + return + EventbridgeSignerSetter( + endpoint_resolver=self._endpoint_resolver, + region=client.meta.region_name, + endpoint_url=endpoint_url, + ).register(client.meta.events) + + def _register_s3express_events( + self, + client, + endpoint_bridge=None, + endpoint_url=None, + client_config=None, + scoped_config=None, + ): + if client.meta.service_model.service_name != 's3': + return + S3ExpressIdentityResolver(client, RefreshableCredentials).register() + + def _register_s3_events( + self, + client, + endpoint_bridge, + endpoint_url, + client_config, + scoped_config, + ): + if client.meta.service_model.service_name != 's3': + return + S3RegionRedirectorv2(None, client).register() + self._set_s3_presign_signature_version( + client.meta, client_config, scoped_config + ) + client.meta.events.register( + 'before-parameter-build.s3', self._inject_s3_input_parameters + ) + + def _register_s3_control_events( + self, + client, + endpoint_bridge=None, + endpoint_url=None, + client_config=None, + scoped_config=None, + ): + if client.meta.service_model.service_name != 's3control': + return + S3ControlArnParamHandlerv2().register(client.meta.events) + + def _set_s3_presign_signature_version( + self, client_meta, client_config, scoped_config + ): + # This will return the manually configured signature version, or None + # if none was manually set. If a customer manually sets the signature + # version, we always want to use what they set. + provided_signature_version = _get_configured_signature_version( + 's3', client_config, scoped_config + ) + if provided_signature_version is not None: + return + + # Check to see if the region is a region that we know about. If we + # don't know about a region, then we can safely assume it's a new + # region that is sigv4 only, since all new S3 regions only allow sigv4. + # The only exception is aws-global. This is a pseudo-region for the + # global endpoint, we should respect the signature versions it + # supports, which includes v2. + regions = self._endpoint_resolver.get_available_endpoints( + 's3', client_meta.partition + ) + if ( + client_meta.region_name != 'aws-global' + and client_meta.region_name not in regions + ): + return + + # If it is a region we know about, we want to default to sigv2, so here + # we check to see if it is available. + endpoint = self._endpoint_resolver.construct_endpoint( + 's3', client_meta.region_name + ) + signature_versions = endpoint['signatureVersions'] + if 's3' not in signature_versions: + return + + # We now know that we're in a known region that supports sigv2 and + # the customer hasn't set a signature version so we default the + # signature version to sigv2. + client_meta.events.register( + 'choose-signer.s3', self._default_s3_presign_to_sigv2 + ) + + def _inject_s3_input_parameters(self, params, context, **kwargs): + context['input_params'] = {} + inject_parameters = ('Bucket', 'Delete', 'Key', 'Prefix') + for inject_parameter in inject_parameters: + if inject_parameter in params: + context['input_params'][inject_parameter] = params[ + inject_parameter + ] + + def _default_s3_presign_to_sigv2(self, signature_version, **kwargs): + """ + Returns the 's3' (sigv2) signer if presigning an s3 request. This is + intended to be used to set the default signature version for the signer + to sigv2. Situations where an asymmetric signature is required are the + exception, for example MRAP needs v4a. + + :type signature_version: str + :param signature_version: The current client signature version. + + :type signing_name: str + :param signing_name: The signing name of the service. + + :return: 's3' if the request is an s3 presign request, None otherwise + """ + if signature_version.startswith('v4a'): + return + + if signature_version.startswith('v4-s3express'): + return f'{signature_version}' + + for suffix in ['-query', '-presign-post']: + if signature_version.endswith(suffix): + return f's3{suffix}' + + def _get_client_args( + self, + service_model, + region_name, + is_secure, + endpoint_url, + verify, + credentials, + scoped_config, + client_config, + endpoint_bridge, + auth_token, + endpoints_ruleset_data, + partition_data, + ): + args_creator = ClientArgsCreator( + self._event_emitter, + self._user_agent, + self._response_parser_factory, + self._loader, + self._exceptions_factory, + config_store=self._config_store, + user_agent_creator=self._user_agent_creator, + ) + return args_creator.get_client_args( + service_model, + region_name, + is_secure, + endpoint_url, + verify, + credentials, + scoped_config, + client_config, + endpoint_bridge, + auth_token, + endpoints_ruleset_data, + partition_data, + ) + + def _create_methods(self, service_model): + op_dict = {} + for operation_name in service_model.operation_names: + py_operation_name = xform_name(operation_name) + op_dict[py_operation_name] = self._create_api_method( + py_operation_name, operation_name, service_model + ) + return op_dict + + def _create_name_mapping(self, service_model): + # py_name -> OperationName, for every operation available + # for a service. + mapping = {} + for operation_name in service_model.operation_names: + py_operation_name = xform_name(operation_name) + mapping[py_operation_name] = operation_name + return mapping + + def _create_api_method( + self, py_operation_name, operation_name, service_model + ): + def _api_call(self, *args, **kwargs): + # We're accepting *args so that we can give a more helpful + # error message than TypeError: _api_call takes exactly + # 1 argument. + if args: + raise TypeError( + f"{py_operation_name}() only accepts keyword arguments." + ) + # The "self" in this scope is referring to the BaseClient. + return self._make_api_call(operation_name, kwargs) + + _api_call.__name__ = str(py_operation_name) + + # Add the docstring to the client method + operation_model = service_model.operation_model(operation_name) + docstring = ClientMethodDocstring( + operation_model=operation_model, + method_name=operation_name, + event_emitter=self._event_emitter, + method_description=operation_model.documentation, + example_prefix=f'response = client.{py_operation_name}', + include_signature=False, + ) + _api_call.__doc__ = docstring + return _api_call + + +class ClientEndpointBridge: + """Bridges endpoint data and client creation + + This class handles taking out the relevant arguments from the endpoint + resolver and determining which values to use, taking into account any + client configuration options and scope configuration options. + + This class also handles determining what, if any, region to use if no + explicit region setting is provided. For example, Amazon S3 client will + utilize "us-east-1" by default if no region can be resolved.""" + + DEFAULT_ENDPOINT = '{service}.{region}.amazonaws.com' + _DUALSTACK_CUSTOMIZED_SERVICES = ['s3', 's3-control'] + + def __init__( + self, + endpoint_resolver, + scoped_config=None, + client_config=None, + default_endpoint=None, + service_signing_name=None, + config_store=None, + service_signature_version=None, + ): + self.service_signing_name = service_signing_name + self.endpoint_resolver = endpoint_resolver + self.scoped_config = scoped_config + self.client_config = client_config + self.default_endpoint = default_endpoint or self.DEFAULT_ENDPOINT + self.config_store = config_store + self.service_signature_version = service_signature_version + + def resolve( + self, service_name, region_name=None, endpoint_url=None, is_secure=True + ): + region_name = self._check_default_region(service_name, region_name) + use_dualstack_endpoint = self._resolve_use_dualstack_endpoint( + service_name + ) + use_fips_endpoint = self._resolve_endpoint_variant_config_var( + 'use_fips_endpoint' + ) + resolved = self.endpoint_resolver.construct_endpoint( + service_name, + region_name, + use_dualstack_endpoint=use_dualstack_endpoint, + use_fips_endpoint=use_fips_endpoint, + ) + + # If we can't resolve the region, we'll attempt to get a global + # endpoint for non-regionalized services (iam, route53, etc) + if not resolved: + # TODO: fallback partition_name should be configurable in the + # future for users to define as needed. + resolved = self.endpoint_resolver.construct_endpoint( + service_name, + region_name, + partition_name='aws', + use_dualstack_endpoint=use_dualstack_endpoint, + use_fips_endpoint=use_fips_endpoint, + ) + + if resolved: + return self._create_endpoint( + resolved, service_name, region_name, endpoint_url, is_secure + ) + else: + return self._assume_endpoint( + service_name, region_name, endpoint_url, is_secure + ) + + def resolver_uses_builtin_data(self): + return self.endpoint_resolver.uses_builtin_data + + def _check_default_region(self, service_name, region_name): + if region_name is not None: + return region_name + # Use the client_config region if no explicit region was provided. + if self.client_config and self.client_config.region_name is not None: + return self.client_config.region_name + + def _create_endpoint( + self, resolved, service_name, region_name, endpoint_url, is_secure + ): + region_name, signing_region = self._pick_region_values( + resolved, region_name, endpoint_url + ) + if endpoint_url is None: + endpoint_url = self._make_url( + resolved.get('hostname'), + is_secure, + resolved.get('protocols', []), + ) + signature_version = self._resolve_signature_version( + service_name, resolved + ) + signing_name = self._resolve_signing_name(service_name, resolved) + return self._create_result( + service_name=service_name, + region_name=region_name, + signing_region=signing_region, + signing_name=signing_name, + endpoint_url=endpoint_url, + metadata=resolved, + signature_version=signature_version, + ) + + def _resolve_endpoint_variant_config_var(self, config_var): + client_config = self.client_config + config_val = False + + # Client configuration arg has precedence + if client_config and getattr(client_config, config_var) is not None: + return getattr(client_config, config_var) + elif self.config_store is not None: + # Check config store + config_val = self.config_store.get_config_variable(config_var) + return config_val + + def _resolve_use_dualstack_endpoint(self, service_name): + s3_dualstack_mode = self._is_s3_dualstack_mode(service_name) + if s3_dualstack_mode is not None: + return s3_dualstack_mode + return self._resolve_endpoint_variant_config_var( + 'use_dualstack_endpoint' + ) + + def _is_s3_dualstack_mode(self, service_name): + if service_name not in self._DUALSTACK_CUSTOMIZED_SERVICES: + return None + # TODO: This normalization logic is duplicated from the + # ClientArgsCreator class. Consolidate everything to + # ClientArgsCreator. _resolve_signature_version also has similarly + # duplicated logic. + client_config = self.client_config + if ( + client_config is not None + and client_config.s3 is not None + and 'use_dualstack_endpoint' in client_config.s3 + ): + # Client config trumps scoped config. + return client_config.s3['use_dualstack_endpoint'] + if self.scoped_config is not None: + enabled = self.scoped_config.get('s3', {}).get( + 'use_dualstack_endpoint' + ) + if enabled in [True, 'True', 'true']: + return True + + def _assume_endpoint( + self, service_name, region_name, endpoint_url, is_secure + ): + if endpoint_url is None: + # Expand the default hostname URI template. + hostname = self.default_endpoint.format( + service=service_name, region=region_name + ) + endpoint_url = self._make_url( + hostname, is_secure, ['http', 'https'] + ) + logger.debug( + f'Assuming an endpoint for {service_name}, {region_name}: {endpoint_url}' + ) + # We still want to allow the user to provide an explicit version. + signature_version = self._resolve_signature_version( + service_name, {'signatureVersions': ['v4']} + ) + signing_name = self._resolve_signing_name(service_name, resolved={}) + return self._create_result( + service_name=service_name, + region_name=region_name, + signing_region=region_name, + signing_name=signing_name, + signature_version=signature_version, + endpoint_url=endpoint_url, + metadata={}, + ) + + def _create_result( + self, + service_name, + region_name, + signing_region, + signing_name, + endpoint_url, + signature_version, + metadata, + ): + return { + 'service_name': service_name, + 'region_name': region_name, + 'signing_region': signing_region, + 'signing_name': signing_name, + 'endpoint_url': endpoint_url, + 'signature_version': signature_version, + 'metadata': metadata, + } + + def _make_url(self, hostname, is_secure, supported_protocols): + if is_secure and 'https' in supported_protocols: + scheme = 'https' + else: + scheme = 'http' + return f'{scheme}://{hostname}' + + def _resolve_signing_name(self, service_name, resolved): + # CredentialScope overrides everything else. + if ( + 'credentialScope' in resolved + and 'service' in resolved['credentialScope'] + ): + return resolved['credentialScope']['service'] + # Use the signingName from the model if present. + if self.service_signing_name: + return self.service_signing_name + # Just assume is the same as the service name. + return service_name + + def _pick_region_values(self, resolved, region_name, endpoint_url): + signing_region = region_name + if endpoint_url is None: + # Do not use the region name or signing name from the resolved + # endpoint if the user explicitly provides an endpoint_url. This + # would happen if we resolve to an endpoint where the service has + # a "defaults" section that overrides all endpoint with a single + # hostname and credentialScope. This has been the case historically + # for how STS has worked. The only way to resolve an STS endpoint + # was to provide a region_name and an endpoint_url. In that case, + # we would still resolve an endpoint, but we would not use the + # resolved endpointName or signingRegion because we want to allow + # custom endpoints. + region_name = resolved['endpointName'] + signing_region = region_name + if ( + 'credentialScope' in resolved + and 'region' in resolved['credentialScope'] + ): + signing_region = resolved['credentialScope']['region'] + return region_name, signing_region + + def _resolve_signature_version(self, service_name, resolved): + configured_version = _get_configured_signature_version( + service_name, self.client_config, self.scoped_config + ) + if configured_version is not None: + return configured_version + + potential_versions = resolved.get('signatureVersions', []) + if ( + self.service_signature_version is not None + and self.service_signature_version + not in _LEGACY_SIGNATURE_VERSIONS + ): + # Prefer the service model as most specific + # source of truth for new signature versions. + potential_versions = [self.service_signature_version] + + # Pick a signature version from the endpoint metadata if present. + if 'signatureVersions' in resolved: + if service_name == 's3': + return 's3v4' + if 'v4' in potential_versions: + return 'v4' + # Now just iterate over the signature versions in order until we + # find the first one that is known to Botocore. + for known in potential_versions: + if known in AUTH_TYPE_MAPS: + return known + raise UnknownSignatureVersionError( + signature_version=potential_versions + ) + + +class BaseClient: + # This is actually reassigned with the py->op_name mapping + # when the client creator creates the subclass. This value is used + # because calls such as client.get_paginator('list_objects') use the + # snake_case name, but we need to know the ListObjects form. + # xform_name() does the ListObjects->list_objects conversion, but + # we need the reverse mapping here. + _PY_TO_OP_NAME = {} + + def __init__( + self, + serializer, + endpoint, + response_parser, + event_emitter, + request_signer, + service_model, + loader, + client_config, + partition, + exceptions_factory, + endpoint_ruleset_resolver=None, + user_agent_creator=None, + ): + self._serializer = serializer + self._endpoint = endpoint + self._ruleset_resolver = endpoint_ruleset_resolver + self._response_parser = response_parser + self._request_signer = request_signer + self._cache = {} + self._loader = loader + self._client_config = client_config + self.meta = ClientMeta( + event_emitter, + self._client_config, + endpoint.host, + service_model, + self._PY_TO_OP_NAME, + partition, + ) + self._exceptions_factory = exceptions_factory + self._exceptions = None + self._user_agent_creator = user_agent_creator + if self._user_agent_creator is None: + self._user_agent_creator = ( + UserAgentString.from_environment().with_client_config( + self._client_config + ) + ) + self._register_handlers() + + def __getattr__(self, item): + service_id = self._service_model.service_id.hyphenize() + event_name = f'getattr.{service_id}.{item}' + + handler, event_response = self.meta.events.emit_until_response( + event_name, client=self + ) + + if event_response is not None: + return event_response + + raise AttributeError( + f"'{self.__class__.__name__}' object has no attribute '{item}'" + ) + + def close(self): + """Closes underlying endpoint connections.""" + self._endpoint.close() + + def _register_handlers(self): + # Register the handler required to sign requests. + service_id = self.meta.service_model.service_id.hyphenize() + self.meta.events.register( + f"request-created.{service_id}", self._request_signer.handler + ) + + @property + def _service_model(self): + return self.meta.service_model + + def _make_api_call(self, operation_name, api_params): + operation_model = self._service_model.operation_model(operation_name) + service_name = self._service_model.service_name + history_recorder.record( + 'API_CALL', + { + 'service': service_name, + 'operation': operation_name, + 'params': api_params, + }, + ) + if operation_model.deprecated: + logger.debug( + 'Warning: %s.%s() is deprecated', service_name, operation_name + ) + request_context = { + 'client_region': self.meta.region_name, + 'client_config': self.meta.config, + 'has_streaming_input': operation_model.has_streaming_input, + 'auth_type': operation_model.auth_type, + } + api_params = self._emit_api_params( + api_params=api_params, + operation_model=operation_model, + context=request_context, + ) + ( + endpoint_url, + additional_headers, + properties, + ) = self._resolve_endpoint_ruleset( + operation_model, api_params, request_context + ) + if properties: + # Pass arbitrary endpoint info with the Request + # for use during construction. + request_context['endpoint_properties'] = properties + request_dict = self._convert_to_request_dict( + api_params=api_params, + operation_model=operation_model, + endpoint_url=endpoint_url, + context=request_context, + headers=additional_headers, + ) + resolve_checksum_context(request_dict, operation_model, api_params) + + service_id = self._service_model.service_id.hyphenize() + handler, event_response = self.meta.events.emit_until_response( + f'before-call.{service_id}.{operation_name}', + model=operation_model, + params=request_dict, + request_signer=self._request_signer, + context=request_context, + ) + + if event_response is not None: + http, parsed_response = event_response + else: + maybe_compress_request( + self.meta.config, request_dict, operation_model + ) + apply_request_checksum(request_dict) + http, parsed_response = self._make_request( + operation_model, request_dict, request_context + ) + + self.meta.events.emit( + f'after-call.{service_id}.{operation_name}', + http_response=http, + parsed=parsed_response, + model=operation_model, + context=request_context, + ) + + if http.status_code >= 300: + error_info = parsed_response.get("Error", {}) + error_code = error_info.get("QueryErrorCode") or error_info.get( + "Code" + ) + error_class = self.exceptions.from_code(error_code) + raise error_class(parsed_response, operation_name) + else: + return parsed_response + + def _make_request(self, operation_model, request_dict, request_context): + try: + return self._endpoint.make_request(operation_model, request_dict) + except Exception as e: + self.meta.events.emit( + f'after-call-error.{self._service_model.service_id.hyphenize()}.{operation_model.name}', + exception=e, + context=request_context, + ) + raise + + def _convert_to_request_dict( + self, + api_params, + operation_model, + endpoint_url, + context=None, + headers=None, + set_user_agent_header=True, + ): + request_dict = self._serializer.serialize_to_request( + api_params, operation_model + ) + if not self._client_config.inject_host_prefix: + request_dict.pop('host_prefix', None) + if headers is not None: + request_dict['headers'].update(headers) + if set_user_agent_header: + user_agent = self._user_agent_creator.to_string() + else: + user_agent = None + prepare_request_dict( + request_dict, + endpoint_url=endpoint_url, + user_agent=user_agent, + context=context, + ) + return request_dict + + def _emit_api_params(self, api_params, operation_model, context): + # Given the API params provided by the user and the operation_model + # we can serialize the request to a request_dict. + operation_name = operation_model.name + + # Emit an event that allows users to modify the parameters at the + # beginning of the method. It allows handlers to modify existing + # parameters or return a new set of parameters to use. + service_id = self._service_model.service_id.hyphenize() + responses = self.meta.events.emit( + f'provide-client-params.{service_id}.{operation_name}', + params=api_params, + model=operation_model, + context=context, + ) + api_params = first_non_none_response(responses, default=api_params) + + self.meta.events.emit( + f'before-parameter-build.{service_id}.{operation_name}', + params=api_params, + model=operation_model, + context=context, + ) + return api_params + + def _resolve_endpoint_ruleset( + self, + operation_model, + params, + request_context, + ignore_signing_region=False, + ): + """Returns endpoint URL and list of additional headers returned from + EndpointRulesetResolver for the given operation and params. If the + ruleset resolver is not available, for example because the service has + no endpoints ruleset file, the legacy endpoint resolver's value is + returned. + + Use ignore_signing_region for generating presigned URLs or any other + situation where the signing region information from the ruleset + resolver should be ignored. + + Returns tuple of URL and headers dictionary. Additionally, the + request_context dict is modified in place with any signing information + returned from the ruleset resolver. + """ + if self._ruleset_resolver is None: + endpoint_url = self.meta.endpoint_url + additional_headers = {} + endpoint_properties = {} + else: + endpoint_info = self._ruleset_resolver.construct_endpoint( + operation_model=operation_model, + call_args=params, + request_context=request_context, + ) + endpoint_url = endpoint_info.url + additional_headers = endpoint_info.headers + endpoint_properties = endpoint_info.properties + # If authSchemes is present, overwrite default auth type and + # signing context derived from service model. + auth_schemes = endpoint_info.properties.get('authSchemes') + if auth_schemes is not None: + auth_info = self._ruleset_resolver.auth_schemes_to_signing_ctx( + auth_schemes + ) + auth_type, signing_context = auth_info + request_context['auth_type'] = auth_type + if 'region' in signing_context and ignore_signing_region: + del signing_context['region'] + if 'signing' in request_context: + request_context['signing'].update(signing_context) + else: + request_context['signing'] = signing_context + + return endpoint_url, additional_headers, endpoint_properties + + def get_paginator(self, operation_name): + """Create a paginator for an operation. + + :type operation_name: string + :param operation_name: The operation name. This is the same name + as the method name on the client. For example, if the + method name is ``create_foo``, and you'd normally invoke the + operation as ``client.create_foo(**kwargs)``, if the + ``create_foo`` operation can be paginated, you can use the + call ``client.get_paginator("create_foo")``. + + :raise OperationNotPageableError: Raised if the operation is not + pageable. You can use the ``client.can_paginate`` method to + check if an operation is pageable. + + :rtype: ``botocore.paginate.Paginator`` + :return: A paginator object. + + """ + if not self.can_paginate(operation_name): + raise OperationNotPageableError(operation_name=operation_name) + else: + actual_operation_name = self._PY_TO_OP_NAME[operation_name] + + # Create a new paginate method that will serve as a proxy to + # the underlying Paginator.paginate method. This is needed to + # attach a docstring to the method. + def paginate(self, **kwargs): + return Paginator.paginate(self, **kwargs) + + paginator_config = self._cache['page_config'][ + actual_operation_name + ] + # Add the docstring for the paginate method. + paginate.__doc__ = PaginatorDocstring( + paginator_name=actual_operation_name, + event_emitter=self.meta.events, + service_model=self.meta.service_model, + paginator_config=paginator_config, + include_signature=False, + ) + + # Rename the paginator class based on the type of paginator. + service_module_name = get_service_module_name( + self.meta.service_model + ) + paginator_class_name = ( + f"{service_module_name}.Paginator.{actual_operation_name}" + ) + + # Create the new paginator class + documented_paginator_cls = type( + paginator_class_name, (Paginator,), {'paginate': paginate} + ) + + operation_model = self._service_model.operation_model( + actual_operation_name + ) + paginator = documented_paginator_cls( + getattr(self, operation_name), + paginator_config, + operation_model, + ) + return paginator + + def can_paginate(self, operation_name): + """Check if an operation can be paginated. + + :type operation_name: string + :param operation_name: The operation name. This is the same name + as the method name on the client. For example, if the + method name is ``create_foo``, and you'd normally invoke the + operation as ``client.create_foo(**kwargs)``, if the + ``create_foo`` operation can be paginated, you can use the + call ``client.get_paginator("create_foo")``. + + :return: ``True`` if the operation can be paginated, + ``False`` otherwise. + + """ + if 'page_config' not in self._cache: + try: + page_config = self._loader.load_service_model( + self._service_model.service_name, + 'paginators-1', + self._service_model.api_version, + )['pagination'] + self._cache['page_config'] = page_config + except DataNotFoundError: + self._cache['page_config'] = {} + actual_operation_name = self._PY_TO_OP_NAME[operation_name] + return actual_operation_name in self._cache['page_config'] + + def _get_waiter_config(self): + if 'waiter_config' not in self._cache: + try: + waiter_config = self._loader.load_service_model( + self._service_model.service_name, + 'waiters-2', + self._service_model.api_version, + ) + self._cache['waiter_config'] = waiter_config + except DataNotFoundError: + self._cache['waiter_config'] = {} + return self._cache['waiter_config'] + + def get_waiter(self, waiter_name): + """Returns an object that can wait for some condition. + + :type waiter_name: str + :param waiter_name: The name of the waiter to get. See the waiters + section of the service docs for a list of available waiters. + + :returns: The specified waiter object. + :rtype: ``botocore.waiter.Waiter`` + """ + config = self._get_waiter_config() + if not config: + raise ValueError(f"Waiter does not exist: {waiter_name}") + model = waiter.WaiterModel(config) + mapping = {} + for name in model.waiter_names: + mapping[xform_name(name)] = name + if waiter_name not in mapping: + raise ValueError(f"Waiter does not exist: {waiter_name}") + + return waiter.create_waiter_with_client( + mapping[waiter_name], model, self + ) + + @CachedProperty + def waiter_names(self): + """Returns a list of all available waiters.""" + config = self._get_waiter_config() + if not config: + return [] + model = waiter.WaiterModel(config) + # Waiter configs is a dict, we just want the waiter names + # which are the keys in the dict. + return [xform_name(name) for name in model.waiter_names] + + @property + def exceptions(self): + if self._exceptions is None: + self._exceptions = self._load_exceptions() + return self._exceptions + + def _load_exceptions(self): + return self._exceptions_factory.create_client_exceptions( + self._service_model + ) + + def _get_credentials(self): + """ + This private interface is subject to abrupt breaking changes, including + removal, in any botocore release. + """ + return self._request_signer._credentials + + +class ClientMeta: + """Holds additional client methods. + + This class holds additional information for clients. It exists for + two reasons: + + * To give advanced functionality to clients + * To namespace additional client attributes from the operation + names which are mapped to methods at runtime. This avoids + ever running into collisions with operation names. + + """ + + def __init__( + self, + events, + client_config, + endpoint_url, + service_model, + method_to_api_mapping, + partition, + ): + self.events = events + self._client_config = client_config + self._endpoint_url = endpoint_url + self._service_model = service_model + self._method_to_api_mapping = method_to_api_mapping + self._partition = partition + + @property + def service_model(self): + return self._service_model + + @property + def region_name(self): + return self._client_config.region_name + + @property + def endpoint_url(self): + return self._endpoint_url + + @property + def config(self): + return self._client_config + + @property + def method_to_api_mapping(self): + return self._method_to_api_mapping + + @property + def partition(self): + return self._partition + + +def _get_configured_signature_version( + service_name, client_config, scoped_config +): + """ + Gets the manually configured signature version. + + :returns: the customer configured signature version, or None if no + signature version was configured. + """ + # Client config overrides everything. + if client_config and client_config.signature_version is not None: + return client_config.signature_version + + # Scoped config overrides picking from the endpoint metadata. + if scoped_config is not None: + # A given service may have service specific configuration in the + # config file, so we need to check there as well. + service_config = scoped_config.get(service_name) + if service_config is not None and isinstance(service_config, dict): + version = service_config.get('signature_version') + if version: + logger.debug( + "Switching signature version for service %s " + "to version %s based on config file override.", + service_name, + version, + ) + return version + return None diff --git a/venv/lib/python3.10/site-packages/botocore/compat.py b/venv/lib/python3.10/site-packages/botocore/compat.py new file mode 100644 index 0000000000000000000000000000000000000000..6f79d43e3faf27960e421bf88447d2ba97b0c4e5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/compat.py @@ -0,0 +1,347 @@ +# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import copy +import datetime +import sys +import inspect +import warnings +import hashlib +from http.client import HTTPMessage +import logging +import shlex +import re +import os +from collections import OrderedDict +from collections.abc import MutableMapping +from math import floor + +from botocore.vendored import six +from botocore.exceptions import MD5UnavailableError +from dateutil.tz import tzlocal +from urllib3 import exceptions + +logger = logging.getLogger(__name__) + + +class HTTPHeaders(HTTPMessage): + pass + +from urllib.parse import ( + quote, + urlencode, + unquote, + unquote_plus, + urlparse, + urlsplit, + urlunsplit, + urljoin, + parse_qsl, + parse_qs, +) +from http.client import HTTPResponse +from io import IOBase as _IOBase +from base64 import encodebytes +from email.utils import formatdate +from itertools import zip_longest +file_type = _IOBase +zip = zip + +# In python3, unquote takes a str() object, url decodes it, +# then takes the bytestring and decodes it to utf-8. +unquote_str = unquote_plus + +def set_socket_timeout(http_response, timeout): + """Set the timeout of the socket from an HTTPResponse. + + :param http_response: An instance of ``httplib.HTTPResponse`` + + """ + http_response._fp.fp.raw._sock.settimeout(timeout) + +def accepts_kwargs(func): + # In python3.4.1, there's backwards incompatible + # changes when using getargspec with functools.partials. + return inspect.getfullargspec(func)[2] + +def ensure_unicode(s, encoding=None, errors=None): + # NOOP in Python 3, because every string is already unicode + return s + +def ensure_bytes(s, encoding='utf-8', errors='strict'): + if isinstance(s, str): + return s.encode(encoding, errors) + if isinstance(s, bytes): + return s + raise ValueError(f"Expected str or bytes, received {type(s)}.") + + +try: + import xml.etree.cElementTree as ETree +except ImportError: + # cElementTree does not exist from Python3.9+ + import xml.etree.ElementTree as ETree +XMLParseError = ETree.ParseError +import json + + +def filter_ssl_warnings(): + # Ignore warnings related to SNI as it is not being used in validations. + warnings.filterwarnings( + 'ignore', + message="A true SSLContext object is not available.*", + category=exceptions.InsecurePlatformWarning, + module=r".*urllib3\.util\.ssl_", + ) + + +@classmethod +def from_dict(cls, d): + new_instance = cls() + for key, value in d.items(): + new_instance[key] = value + return new_instance + + +@classmethod +def from_pairs(cls, pairs): + new_instance = cls() + for key, value in pairs: + new_instance[key] = value + return new_instance + + +HTTPHeaders.from_dict = from_dict +HTTPHeaders.from_pairs = from_pairs + + +def copy_kwargs(kwargs): + """ + This used to be a compat shim for 2.6 but is now just an alias. + """ + copy_kwargs = copy.copy(kwargs) + return copy_kwargs + + +def total_seconds(delta): + """ + Returns the total seconds in a ``datetime.timedelta``. + + This used to be a compat shim for 2.6 but is now just an alias. + + :param delta: The timedelta object + :type delta: ``datetime.timedelta`` + """ + return delta.total_seconds() + + +# Checks to see if md5 is available on this system. A given system might not +# have access to it for various reasons, such as FIPS mode being enabled. +try: + hashlib.md5() + MD5_AVAILABLE = True +except ValueError: + MD5_AVAILABLE = False + + +def get_md5(*args, **kwargs): + """ + Attempts to get an md5 hashing object. + + :param args: Args to pass to the MD5 constructor + :param kwargs: Key word arguments to pass to the MD5 constructor + :return: An MD5 hashing object if available. If it is unavailable, None + is returned if raise_error_if_unavailable is set to False. + """ + if MD5_AVAILABLE: + return hashlib.md5(*args, **kwargs) + else: + raise MD5UnavailableError() + + +def compat_shell_split(s, platform=None): + if platform is None: + platform = sys.platform + + if platform == "win32": + return _windows_shell_split(s) + else: + return shlex.split(s) + + +def _windows_shell_split(s): + """Splits up a windows command as the built-in command parser would. + + Windows has potentially bizarre rules depending on where you look. When + spawning a process via the Windows C runtime (which is what python does + when you call popen) the rules are as follows: + + https://docs.microsoft.com/en-us/cpp/cpp/parsing-cpp-command-line-arguments + + To summarize: + + * Only space and tab are valid delimiters + * Double quotes are the only valid quotes + * Backslash is interpreted literally unless it is part of a chain that + leads up to a double quote. Then the backslashes escape the backslashes, + and if there is an odd number the final backslash escapes the quote. + + :param s: The command string to split up into parts. + :return: A list of command components. + """ + if not s: + return [] + + components = [] + buff = [] + is_quoted = False + num_backslashes = 0 + for character in s: + if character == '\\': + # We can't simply append backslashes because we don't know if + # they are being used as escape characters or not. Instead we + # keep track of how many we've encountered and handle them when + # we encounter a different character. + num_backslashes += 1 + elif character == '"': + if num_backslashes > 0: + # The backslashes are in a chain leading up to a double + # quote, so they are escaping each other. + buff.append('\\' * int(floor(num_backslashes / 2))) + remainder = num_backslashes % 2 + num_backslashes = 0 + if remainder == 1: + # The number of backslashes is uneven, so they are also + # escaping the double quote, so it needs to be added to + # the current component buffer. + buff.append('"') + continue + + # We've encountered a double quote that is not escaped, + # so we toggle is_quoted. + is_quoted = not is_quoted + + # If there are quotes, then we may want an empty string. To be + # safe, we add an empty string to the buffer so that we make + # sure it sticks around if there's nothing else between quotes. + # If there is other stuff between quotes, the empty string will + # disappear during the joining process. + buff.append('') + elif character in [' ', '\t'] and not is_quoted: + # Since the backslashes aren't leading up to a quote, we put in + # the exact number of backslashes. + if num_backslashes > 0: + buff.append('\\' * num_backslashes) + num_backslashes = 0 + + # Excess whitespace is ignored, so only add the components list + # if there is anything in the buffer. + if buff: + components.append(''.join(buff)) + buff = [] + else: + # Since the backslashes aren't leading up to a quote, we put in + # the exact number of backslashes. + if num_backslashes > 0: + buff.append('\\' * num_backslashes) + num_backslashes = 0 + buff.append(character) + + # Quotes must be terminated. + if is_quoted: + raise ValueError(f"No closing quotation in string: {s}") + + # There may be some leftover backslashes, so we need to add them in. + # There's no quote so we add the exact number. + if num_backslashes > 0: + buff.append('\\' * num_backslashes) + + # Add the final component in if there is anything in the buffer. + if buff: + components.append(''.join(buff)) + + return components + + +def get_tzinfo_options(): + # Due to dateutil/dateutil#197, Windows may fail to parse times in the past + # with the system clock. We can alternatively fallback to tzwininfo when + # this happens, which will get time info from the Windows registry. + if sys.platform == 'win32': + from dateutil.tz import tzwinlocal + + return (tzlocal, tzwinlocal) + else: + return (tzlocal,) + + +# Detect if CRT is available for use +try: + import awscrt.auth + + # Allow user opt-out if needed + disabled = os.environ.get('BOTO_DISABLE_CRT', "false") + HAS_CRT = not disabled.lower() == 'true' +except ImportError: + HAS_CRT = False + + +######################################################## +# urllib3 compat backports # +######################################################## + +# Vendoring IPv6 validation regex patterns from urllib3 +# https://github.com/urllib3/urllib3/blob/7e856c0/src/urllib3/util/url.py +IPV4_PAT = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}" +IPV4_RE = re.compile("^" + IPV4_PAT + "$") +HEX_PAT = "[0-9A-Fa-f]{1,4}" +LS32_PAT = "(?:{hex}:{hex}|{ipv4})".format(hex=HEX_PAT, ipv4=IPV4_PAT) +_subs = {"hex": HEX_PAT, "ls32": LS32_PAT} +_variations = [ + # 6( h16 ":" ) ls32 + "(?:%(hex)s:){6}%(ls32)s", + # "::" 5( h16 ":" ) ls32 + "::(?:%(hex)s:){5}%(ls32)s", + # [ h16 ] "::" 4( h16 ":" ) ls32 + "(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)s", + # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32 + "(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)s", + # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32 + "(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)s", + # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32 + "(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s", + # [ *4( h16 ":" ) h16 ] "::" ls32 + "(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)s", + # [ *5( h16 ":" ) h16 ] "::" h16 + "(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)s", + # [ *6( h16 ":" ) h16 ] "::" + "(?:(?:%(hex)s:){0,6}%(hex)s)?::", +] + +UNRESERVED_PAT = ( + r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~" +) +IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")" +ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+" +IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]" +IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT + "$") + +# These are the characters that are stripped by post-bpo-43882 urlparse(). +UNSAFE_URL_CHARS = frozenset('\t\r\n') + +# Detect if gzip is available for use +try: + import gzip + HAS_GZIP = True +except ImportError: + HAS_GZIP = False diff --git a/venv/lib/python3.10/site-packages/botocore/compress.py b/venv/lib/python3.10/site-packages/botocore/compress.py new file mode 100644 index 0000000000000000000000000000000000000000..1f8577e84b32f6030b8d54b4c63bbef7473ce700 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/compress.py @@ -0,0 +1,126 @@ +# Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +""" +NOTE: All functions in this module are considered private and are +subject to abrupt breaking changes. Please do not use them directly. + +""" + +import io +import logging +from gzip import GzipFile +from gzip import compress as gzip_compress + +from botocore.compat import urlencode +from botocore.utils import determine_content_length + +logger = logging.getLogger(__name__) + + +def maybe_compress_request(config, request_dict, operation_model): + """Attempt to compress the request body using the modeled encodings.""" + if _should_compress_request(config, request_dict, operation_model): + for encoding in operation_model.request_compression['encodings']: + encoder = COMPRESSION_MAPPING.get(encoding) + if encoder is not None: + logger.debug('Compressing request with %s encoding.', encoding) + request_dict['body'] = encoder(request_dict['body']) + _set_compression_header(request_dict['headers'], encoding) + return + else: + logger.debug('Unsupported compression encoding: %s', encoding) + + +def _should_compress_request(config, request_dict, operation_model): + if ( + config.disable_request_compression is not True + and config.signature_version != 'v2' + and operation_model.request_compression is not None + ): + if not _is_compressible_type(request_dict): + body_type = type(request_dict['body']) + log_msg = 'Body type %s does not support compression.' + logger.debug(log_msg, body_type) + return False + + if operation_model.has_streaming_input: + streaming_input = operation_model.get_streaming_input() + streaming_metadata = streaming_input.metadata + return 'requiresLength' not in streaming_metadata + + body_size = _get_body_size(request_dict['body']) + min_size = config.request_min_compression_size_bytes + return min_size <= body_size + + return False + + +def _is_compressible_type(request_dict): + body = request_dict['body'] + # Coerce dict to a format compatible with compression. + if isinstance(body, dict): + body = urlencode(body, doseq=True, encoding='utf-8').encode('utf-8') + request_dict['body'] = body + is_supported_type = isinstance(body, (str, bytes, bytearray)) + return is_supported_type or hasattr(body, 'read') + + +def _get_body_size(body): + size = determine_content_length(body) + if size is None: + logger.debug( + 'Unable to get length of the request body: %s. ' + 'Skipping compression.', + body, + ) + size = 0 + return size + + +def _gzip_compress_body(body): + if isinstance(body, str): + return gzip_compress(body.encode('utf-8')) + elif isinstance(body, (bytes, bytearray)): + return gzip_compress(body) + elif hasattr(body, 'read'): + if hasattr(body, 'seek') and hasattr(body, 'tell'): + current_position = body.tell() + compressed_obj = _gzip_compress_fileobj(body) + body.seek(current_position) + return compressed_obj + return _gzip_compress_fileobj(body) + + +def _gzip_compress_fileobj(body): + compressed_obj = io.BytesIO() + with GzipFile(fileobj=compressed_obj, mode='wb') as gz: + while True: + chunk = body.read(8192) + if not chunk: + break + if isinstance(chunk, str): + chunk = chunk.encode('utf-8') + gz.write(chunk) + compressed_obj.seek(0) + return compressed_obj + + +def _set_compression_header(headers, encoding): + ce_header = headers.get('Content-Encoding') + if ce_header is None: + headers['Content-Encoding'] = encoding + else: + headers['Content-Encoding'] = f'{ce_header},{encoding}' + + +COMPRESSION_MAPPING = {'gzip': _gzip_compress_body} diff --git a/venv/lib/python3.10/site-packages/botocore/config.py b/venv/lib/python3.10/site-packages/botocore/config.py new file mode 100644 index 0000000000000000000000000000000000000000..87b52b6f1a312a313f834d676bec9d90107b1672 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/config.py @@ -0,0 +1,376 @@ +# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import copy + +from botocore.compat import OrderedDict +from botocore.endpoint import DEFAULT_TIMEOUT, MAX_POOL_CONNECTIONS +from botocore.exceptions import ( + InvalidMaxRetryAttemptsError, + InvalidRetryConfigurationError, + InvalidRetryModeError, + InvalidS3AddressingStyleError, +) + + +class Config: + """Advanced configuration for Botocore clients. + + :type region_name: str + :param region_name: The region to use in instantiating the client + + :type signature_version: str + :param signature_version: The signature version when signing requests. + + :type user_agent: str + :param user_agent: The value to use in the User-Agent header. + + :type user_agent_extra: str + :param user_agent_extra: The value to append to the current User-Agent + header value. + + :type user_agent_appid: str + :param user_agent_appid: A value that gets included in the User-Agent + string in the format "app/". Allowed characters are + ASCII alphanumerics and ``!$%&'*+-.^_`|~``. All other characters will + be replaced by a ``-``. + + :type connect_timeout: float or int + :param connect_timeout: The time in seconds till a timeout exception is + thrown when attempting to make a connection. The default is 60 + seconds. + + :type read_timeout: float or int + :param read_timeout: The time in seconds till a timeout exception is + thrown when attempting to read from a connection. The default is + 60 seconds. + + :type parameter_validation: bool + :param parameter_validation: Whether parameter validation should occur + when serializing requests. The default is True. You can disable + parameter validation for performance reasons. Otherwise, it's + recommended to leave parameter validation enabled. + + :type max_pool_connections: int + :param max_pool_connections: The maximum number of connections to + keep in a connection pool. If this value is not set, the default + value of 10 is used. + + :type proxies: dict + :param proxies: A dictionary of proxy servers to use by protocol or + endpoint, e.g.: + ``{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}``. + The proxies are used on each request. + + :type proxies_config: dict + :param proxies_config: A dictionary of additional proxy configurations. + Valid keys are: + + * ``proxy_ca_bundle`` -- The path to a custom certificate bundle to use + when establishing SSL/TLS connections with proxy. + + * ``proxy_client_cert`` -- The path to a certificate for proxy + TLS client authentication. + + When a string is provided it is treated as a path to a proxy client + certificate. When a two element tuple is provided, it will be + interpreted as the path to the client certificate, and the path + to the certificate key. + + * ``proxy_use_forwarding_for_https`` -- For HTTPS proxies, + forward your requests to HTTPS destinations with an absolute + URI. We strongly recommend you only use this option with + trusted or corporate proxies. Value must be boolean. + + :type s3: dict + :param s3: A dictionary of S3 specific configurations. + Valid keys are: + + * ``use_accelerate_endpoint`` -- Refers to whether to use the S3 + Accelerate endpoint. The value must be a boolean. If True, the + client will use the S3 Accelerate endpoint. If the S3 Accelerate + endpoint is being used then the addressing style will always + be virtual. + + * ``payload_signing_enabled`` -- Refers to whether or not to SHA256 + sign sigv4 payloads. By default, this is disabled for streaming + uploads (UploadPart and PutObject). + + * ``addressing_style`` -- Refers to the style in which to address + s3 endpoints. Values must be a string that equals one of: + + * ``auto`` -- Addressing style is chosen for user. Depending + on the configuration of client, the endpoint may be addressed in + the virtual or the path style. Note that this is the default + behavior if no style is specified. + + * ``virtual`` -- Addressing style is always virtual. The name of the + bucket must be DNS compatible or an exception will be thrown. + Endpoints will be addressed as such: ``mybucket.s3.amazonaws.com`` + + * ``path`` -- Addressing style is always by path. Endpoints will be + addressed as such: ``s3.amazonaws.com/mybucket`` + + * ``us_east_1_regional_endpoint`` -- Refers to what S3 endpoint to use + when the region is configured to be us-east-1. Values must be a + string that equals: + + * ``regional`` -- Use the us-east-1.amazonaws.com endpoint if the + client is configured to use the us-east-1 region. + + * ``legacy`` -- Use the s3.amazonaws.com endpoint if the client is + configured to use the us-east-1 region. This is the default if + the configuration option is not specified. + + + :type retries: dict + :param retries: A dictionary for configuration related to retry behavior. + Valid keys are: + + * ``total_max_attempts`` -- An integer representing the maximum number of + total attempts that will be made on a single request. This includes + the initial request, so a value of 1 indicates that no requests + will be retried. If ``total_max_attempts`` and ``max_attempts`` + are both provided, ``total_max_attempts`` takes precedence. + ``total_max_attempts`` is preferred over ``max_attempts`` because + it maps to the ``AWS_MAX_ATTEMPTS`` environment variable and + the ``max_attempts`` config file value. + * ``max_attempts`` -- An integer representing the maximum number of + retry attempts that will be made on a single request. For + example, setting this value to 2 will result in the request + being retried at most two times after the initial request. Setting + this value to 0 will result in no retries ever being attempted after + the initial request. If not provided, the number of retries will + default to the value specified in the service model, which is + typically four retries. + * ``mode`` -- A string representing the type of retry mode botocore + should use. Valid values are: + + * ``legacy`` - The pre-existing retry behavior. + + * ``standard`` - The standardized set of retry rules. This will also + default to 3 max attempts unless overridden. + + * ``adaptive`` - Retries with additional client side throttling. + + :type client_cert: str, (str, str) + :param client_cert: The path to a certificate for TLS client authentication. + + When a string is provided it is treated as a path to a client + certificate to be used when creating a TLS connection. + + If a client key is to be provided alongside the client certificate the + client_cert should be set to a tuple of length two where the first + element is the path to the client certificate and the second element is + the path to the certificate key. + + :type inject_host_prefix: bool + :param inject_host_prefix: Whether host prefix injection should occur. + + Defaults to True. + + Setting this to False disables the injection of operation parameters + into the prefix of the hostname. This is useful for clients providing + custom endpoints that should not have their host prefix modified. + + :type use_dualstack_endpoint: bool + :param use_dualstack_endpoint: Setting to True enables dualstack + endpoint resolution. + + Defaults to None. + + :type use_fips_endpoint: bool + :param use_fips_endpoint: Setting to True enables fips + endpoint resolution. + + Defaults to None. + + :type ignore_configured_endpoint_urls: bool + :param ignore_configured_endpoint_urls: Setting to True disables use + of endpoint URLs provided via environment variables and + the shared configuration file. + + Defaults to None. + + :type tcp_keepalive: bool + :param tcp_keepalive: Enables the TCP Keep-Alive socket option used when + creating new connections if set to True. + + Defaults to False. + + :type request_min_compression_size_bytes: int + :param request_min_compression_size_bytes: The minimum size in bytes that a + request body should be to trigger compression. All requests with + streaming input that don't contain the ``requiresLength`` trait will be + compressed regardless of this setting. + + Defaults to None. + + :type disable_request_compression: bool + :param disable_request_compression: Disables request body compression if + set to True. + + Defaults to None. + + :type client_context_params: dict + :param client_context_params: A dictionary of parameters specific to + individual services. If available, valid parameters can be found in + the ``Client Context Parameters`` section of the service client's + documentation. Invalid parameters or ones that are not used by the + specified service will be ignored. + + Defaults to None. + """ + + OPTION_DEFAULTS = OrderedDict( + [ + ('region_name', None), + ('signature_version', None), + ('user_agent', None), + ('user_agent_extra', None), + ('user_agent_appid', None), + ('connect_timeout', DEFAULT_TIMEOUT), + ('read_timeout', DEFAULT_TIMEOUT), + ('parameter_validation', True), + ('max_pool_connections', MAX_POOL_CONNECTIONS), + ('proxies', None), + ('proxies_config', None), + ('s3', None), + ('retries', None), + ('client_cert', None), + ('inject_host_prefix', True), + ('endpoint_discovery_enabled', None), + ('use_dualstack_endpoint', None), + ('use_fips_endpoint', None), + ('ignore_configured_endpoint_urls', None), + ('defaults_mode', None), + ('tcp_keepalive', None), + ('request_min_compression_size_bytes', None), + ('disable_request_compression', None), + ('client_context_params', None), + ] + ) + + NON_LEGACY_OPTION_DEFAULTS = { + 'connect_timeout': None, + } + + def __init__(self, *args, **kwargs): + self._user_provided_options = self._record_user_provided_options( + args, kwargs + ) + + # Merge the user_provided options onto the default options + config_vars = copy.copy(self.OPTION_DEFAULTS) + defaults_mode = self._user_provided_options.get( + 'defaults_mode', 'legacy' + ) + if defaults_mode != 'legacy': + config_vars.update(self.NON_LEGACY_OPTION_DEFAULTS) + config_vars.update(self._user_provided_options) + + # Set the attributes based on the config_vars + for key, value in config_vars.items(): + setattr(self, key, value) + + # Validate the s3 options + self._validate_s3_configuration(self.s3) + + self._validate_retry_configuration(self.retries) + + def _record_user_provided_options(self, args, kwargs): + option_order = list(self.OPTION_DEFAULTS) + user_provided_options = {} + + # Iterate through the kwargs passed through to the constructor and + # map valid keys to the dictionary + for key, value in kwargs.items(): + if key in self.OPTION_DEFAULTS: + user_provided_options[key] = value + # The key must exist in the available options + else: + raise TypeError(f"Got unexpected keyword argument '{key}'") + + # The number of args should not be longer than the allowed + # options + if len(args) > len(option_order): + raise TypeError( + f"Takes at most {len(option_order)} arguments ({len(args)} given)" + ) + + # Iterate through the args passed through to the constructor and map + # them to appropriate keys. + for i, arg in enumerate(args): + # If a kwarg was specified for the arg, then error out + if option_order[i] in user_provided_options: + raise TypeError( + f"Got multiple values for keyword argument '{option_order[i]}'" + ) + user_provided_options[option_order[i]] = arg + + return user_provided_options + + def _validate_s3_configuration(self, s3): + if s3 is not None: + addressing_style = s3.get('addressing_style') + if addressing_style not in ['virtual', 'auto', 'path', None]: + raise InvalidS3AddressingStyleError( + s3_addressing_style=addressing_style + ) + + def _validate_retry_configuration(self, retries): + valid_options = ('max_attempts', 'mode', 'total_max_attempts') + valid_modes = ('legacy', 'standard', 'adaptive') + if retries is not None: + for key, value in retries.items(): + if key not in valid_options: + raise InvalidRetryConfigurationError( + retry_config_option=key, + valid_options=valid_options, + ) + if key == 'max_attempts' and value < 0: + raise InvalidMaxRetryAttemptsError( + provided_max_attempts=value, + min_value=0, + ) + if key == 'total_max_attempts' and value < 1: + raise InvalidMaxRetryAttemptsError( + provided_max_attempts=value, + min_value=1, + ) + if key == 'mode' and value not in valid_modes: + raise InvalidRetryModeError( + provided_retry_mode=value, + valid_modes=valid_modes, + ) + + def merge(self, other_config): + """Merges the config object with another config object + + This will merge in all non-default values from the provided config + and return a new config object + + :type other_config: botocore.config.Config + :param other config: Another config object to merge with. The values + in the provided config object will take precedence in the merging + + :returns: A config object built from the merged values of both + config objects. + """ + # Make a copy of the current attributes in the config object. + config_options = copy.copy(self._user_provided_options) + + # Merge in the user provided options from the other config + config_options.update(other_config._user_provided_options) + + # Return a new config object with the merged properties. + return Config(**config_options) diff --git a/venv/lib/python3.10/site-packages/botocore/configloader.py b/venv/lib/python3.10/site-packages/botocore/configloader.py new file mode 100644 index 0000000000000000000000000000000000000000..0b6c82bcad6061a82cb39926141734a03dc3bd8c --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/configloader.py @@ -0,0 +1,287 @@ +# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ +# Copyright 2012-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import configparser +import copy +import os +import shlex +import sys + +import botocore.exceptions + + +def multi_file_load_config(*filenames): + """Load and combine multiple INI configs with profiles. + + This function will take a list of filesnames and return + a single dictionary that represents the merging of the loaded + config files. + + If any of the provided filenames does not exist, then that file + is ignored. It is therefore ok to provide a list of filenames, + some of which may not exist. + + Configuration files are **not** deep merged, only the top level + keys are merged. The filenames should be passed in order of + precedence. The first config file has precedence over the + second config file, which has precedence over the third config file, + etc. The only exception to this is that the "profiles" key is + merged to combine profiles from multiple config files into a + single profiles mapping. However, if a profile is defined in + multiple config files, then the config file with the highest + precedence is used. Profile values themselves are not merged. + For example:: + + FileA FileB FileC + [foo] [foo] [bar] + a=1 a=2 a=3 + b=2 + + [bar] [baz] [profile a] + a=2 a=3 region=e + + [profile a] [profile b] [profile c] + region=c region=d region=f + + The final result of ``multi_file_load_config(FileA, FileB, FileC)`` + would be:: + + {"foo": {"a": 1}, "bar": {"a": 2}, "baz": {"a": 3}, + "profiles": {"a": {"region": "c"}}, {"b": {"region": d"}}, + {"c": {"region": "f"}}} + + Note that the "foo" key comes from A, even though it's defined in both + FileA and FileB. Because "foo" was defined in FileA first, then the values + for "foo" from FileA are used and the values for "foo" from FileB are + ignored. Also note where the profiles originate from. Profile "a" + comes FileA, profile "b" comes from FileB, and profile "c" comes + from FileC. + + """ + configs = [] + profiles = [] + for filename in filenames: + try: + loaded = load_config(filename) + except botocore.exceptions.ConfigNotFound: + continue + profiles.append(loaded.pop('profiles')) + configs.append(loaded) + merged_config = _merge_list_of_dicts(configs) + merged_profiles = _merge_list_of_dicts(profiles) + merged_config['profiles'] = merged_profiles + return merged_config + + +def _merge_list_of_dicts(list_of_dicts): + merged_dicts = {} + for single_dict in list_of_dicts: + for key, value in single_dict.items(): + if key not in merged_dicts: + merged_dicts[key] = value + return merged_dicts + + +def load_config(config_filename): + """Parse a INI config with profiles. + + This will parse an INI config file and map top level profiles + into a top level "profile" key. + + If you want to parse an INI file and map all section names to + top level keys, use ``raw_config_parse`` instead. + + """ + parsed = raw_config_parse(config_filename) + return build_profile_map(parsed) + + +def raw_config_parse(config_filename, parse_subsections=True): + """Returns the parsed INI config contents. + + Each section name is a top level key. + + :param config_filename: The name of the INI file to parse + + :param parse_subsections: If True, parse indented blocks as + subsections that represent their own configuration dictionary. + For example, if the config file had the contents:: + + s3 = + signature_version = s3v4 + addressing_style = path + + The resulting ``raw_config_parse`` would be:: + + {'s3': {'signature_version': 's3v4', 'addressing_style': 'path'}} + + If False, do not try to parse subsections and return the indented + block as its literal value:: + + {'s3': '\nsignature_version = s3v4\naddressing_style = path'} + + :returns: A dict with keys for each profile found in the config + file and the value of each key being a dict containing name + value pairs found in that profile. + + :raises: ConfigNotFound, ConfigParseError + """ + config = {} + path = config_filename + if path is not None: + path = os.path.expandvars(path) + path = os.path.expanduser(path) + if not os.path.isfile(path): + raise botocore.exceptions.ConfigNotFound(path=_unicode_path(path)) + cp = configparser.RawConfigParser() + try: + cp.read([path]) + except (configparser.Error, UnicodeDecodeError) as e: + raise botocore.exceptions.ConfigParseError( + path=_unicode_path(path), error=e + ) from None + else: + for section in cp.sections(): + config[section] = {} + for option in cp.options(section): + config_value = cp.get(section, option) + if parse_subsections and config_value.startswith('\n'): + # Then we need to parse the inner contents as + # hierarchical. We support a single level + # of nesting for now. + try: + config_value = _parse_nested(config_value) + except ValueError as e: + raise botocore.exceptions.ConfigParseError( + path=_unicode_path(path), error=e + ) from None + config[section][option] = config_value + return config + + +def _unicode_path(path): + if isinstance(path, str): + return path + # According to the documentation getfilesystemencoding can return None + # on unix in which case the default encoding is used instead. + filesystem_encoding = sys.getfilesystemencoding() + if filesystem_encoding is None: + filesystem_encoding = sys.getdefaultencoding() + return path.decode(filesystem_encoding, 'replace') + + +def _parse_nested(config_value): + # Given a value like this: + # \n + # foo = bar + # bar = baz + # We need to parse this into + # {'foo': 'bar', 'bar': 'baz} + parsed = {} + for line in config_value.splitlines(): + line = line.strip() + if not line: + continue + # The caller will catch ValueError + # and raise an appropriate error + # if this fails. + key, value = line.split('=', 1) + parsed[key.strip()] = value.strip() + return parsed + + +def _parse_section(key, values): + result = {} + try: + parts = shlex.split(key) + except ValueError: + return result + if len(parts) == 2: + result[parts[1]] = values + return result + + +def build_profile_map(parsed_ini_config): + """Convert the parsed INI config into a profile map. + + The config file format requires that every profile except the + default to be prepended with "profile", e.g.:: + + [profile test] + aws_... = foo + aws_... = bar + + [profile bar] + aws_... = foo + aws_... = bar + + # This is *not* a profile + [preview] + otherstuff = 1 + + # Neither is this + [foobar] + morestuff = 2 + + The build_profile_map will take a parsed INI config file where each top + level key represents a section name, and convert into a format where all + the profiles are under a single top level "profiles" key, and each key in + the sub dictionary is a profile name. For example, the above config file + would be converted from:: + + {"profile test": {"aws_...": "foo", "aws...": "bar"}, + "profile bar": {"aws...": "foo", "aws...": "bar"}, + "preview": {"otherstuff": ...}, + "foobar": {"morestuff": ...}, + } + + into:: + + {"profiles": {"test": {"aws_...": "foo", "aws...": "bar"}, + "bar": {"aws...": "foo", "aws...": "bar"}, + "preview": {"otherstuff": ...}, + "foobar": {"morestuff": ...}, + } + + If there are no profiles in the provided parsed INI contents, then + an empty dict will be the value associated with the ``profiles`` key. + + .. note:: + + This will not mutate the passed in parsed_ini_config. Instead it will + make a deepcopy and return that value. + + """ + parsed_config = copy.deepcopy(parsed_ini_config) + profiles = {} + sso_sessions = {} + services = {} + final_config = {} + for key, values in parsed_config.items(): + if key.startswith("profile"): + profiles.update(_parse_section(key, values)) + elif key.startswith("sso-session"): + sso_sessions.update(_parse_section(key, values)) + elif key.startswith("services"): + services.update(_parse_section(key, values)) + elif key == 'default': + # default section is special and is considered a profile + # name but we don't require you use 'profile "default"' + # as a section. + profiles[key] = values + else: + final_config[key] = values + final_config['profiles'] = profiles + final_config['sso_sessions'] = sso_sessions + final_config['services'] = services + return final_config diff --git a/venv/lib/python3.10/site-packages/botocore/configprovider.py b/venv/lib/python3.10/site-packages/botocore/configprovider.py new file mode 100644 index 0000000000000000000000000000000000000000..5ed2dc63ce3ae787b6c59e0a60b775c1f8fb94e5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/configprovider.py @@ -0,0 +1,1015 @@ +# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""This module contains the interface for controlling how configuration +is loaded. +""" + +import copy +import logging +import os + +from botocore import utils +from botocore.exceptions import InvalidConfigError + +logger = logging.getLogger(__name__) + + +#: A default dictionary that maps the logical names for session variables +#: to the specific environment variables and configuration file names +#: that contain the values for these variables. +#: When creating a new Session object, you can pass in your own dictionary +#: to remap the logical names or to add new logical names. You can then +#: get the current value for these variables by using the +#: ``get_config_variable`` method of the :class:`botocore.session.Session` +#: class. +#: These form the keys of the dictionary. The values in the dictionary +#: are tuples of (, , , +#: ). +#: The conversion func is a function that takes the configuration value +#: as an argument and returns the converted value. If this value is +#: None, then the configuration value is returned unmodified. This +#: conversion function can be used to type convert config values to +#: values other than the default values of strings. +#: The ``profile`` and ``config_file`` variables should always have a +#: None value for the first entry in the tuple because it doesn't make +#: sense to look inside the config file for the location of the config +#: file or for the default profile to use. +#: The ``config_name`` is the name to look for in the configuration file, +#: the ``env var`` is the OS environment variable (``os.environ``) to +#: use, and ``default_value`` is the value to use if no value is otherwise +#: found. +#: NOTE: Fixing the spelling of this variable would be a breaking change. +#: Please leave as is. +BOTOCORE_DEFAUT_SESSION_VARIABLES = { + # logical: config_file, env_var, default_value, conversion_func + 'profile': (None, ['AWS_DEFAULT_PROFILE', 'AWS_PROFILE'], None, None), + 'region': ('region', 'AWS_DEFAULT_REGION', None, None), + 'data_path': ('data_path', 'AWS_DATA_PATH', None, None), + 'config_file': (None, 'AWS_CONFIG_FILE', '~/.aws/config', None), + 'ca_bundle': ('ca_bundle', 'AWS_CA_BUNDLE', None, None), + 'api_versions': ('api_versions', None, {}, None), + # This is the shared credentials file amongst sdks. + 'credentials_file': ( + None, + 'AWS_SHARED_CREDENTIALS_FILE', + '~/.aws/credentials', + None, + ), + # These variables only exist in the config file. + # This is the number of seconds until we time out a request to + # the instance metadata service. + 'metadata_service_timeout': ( + 'metadata_service_timeout', + 'AWS_METADATA_SERVICE_TIMEOUT', + 1, + int, + ), + # This is the number of request attempts we make until we give + # up trying to retrieve data from the instance metadata service. + 'metadata_service_num_attempts': ( + 'metadata_service_num_attempts', + 'AWS_METADATA_SERVICE_NUM_ATTEMPTS', + 1, + int, + ), + 'ec2_metadata_service_endpoint': ( + 'ec2_metadata_service_endpoint', + 'AWS_EC2_METADATA_SERVICE_ENDPOINT', + None, + None, + ), + 'ec2_metadata_service_endpoint_mode': ( + 'ec2_metadata_service_endpoint_mode', + 'AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE', + None, + None, + ), + 'ec2_metadata_v1_disabled': ( + 'ec2_metadata_v1_disabled', + 'AWS_EC2_METADATA_V1_DISABLED', + False, + utils.ensure_boolean, + ), + 'imds_use_ipv6': ( + 'imds_use_ipv6', + 'AWS_IMDS_USE_IPV6', + False, + utils.ensure_boolean, + ), + 'use_dualstack_endpoint': ( + 'use_dualstack_endpoint', + 'AWS_USE_DUALSTACK_ENDPOINT', + None, + utils.ensure_boolean, + ), + 'use_fips_endpoint': ( + 'use_fips_endpoint', + 'AWS_USE_FIPS_ENDPOINT', + None, + utils.ensure_boolean, + ), + 'ignore_configured_endpoint_urls': ( + 'ignore_configured_endpoint_urls', + 'AWS_IGNORE_CONFIGURED_ENDPOINT_URLS', + None, + utils.ensure_boolean, + ), + 'parameter_validation': ('parameter_validation', None, True, None), + # Client side monitoring configurations. + # Note: These configurations are considered internal to botocore. + # Do not use them until publicly documented. + 'csm_enabled': ( + 'csm_enabled', + 'AWS_CSM_ENABLED', + False, + utils.ensure_boolean, + ), + 'csm_host': ('csm_host', 'AWS_CSM_HOST', '127.0.0.1', None), + 'csm_port': ('csm_port', 'AWS_CSM_PORT', 31000, int), + 'csm_client_id': ('csm_client_id', 'AWS_CSM_CLIENT_ID', '', None), + # Endpoint discovery configuration + 'endpoint_discovery_enabled': ( + 'endpoint_discovery_enabled', + 'AWS_ENDPOINT_DISCOVERY_ENABLED', + 'auto', + None, + ), + 'sts_regional_endpoints': ( + 'sts_regional_endpoints', + 'AWS_STS_REGIONAL_ENDPOINTS', + 'legacy', + None, + ), + 'retry_mode': ('retry_mode', 'AWS_RETRY_MODE', 'legacy', None), + 'defaults_mode': ('defaults_mode', 'AWS_DEFAULTS_MODE', 'legacy', None), + # We can't have a default here for v1 because we need to defer to + # whatever the defaults are in _retry.json. + 'max_attempts': ('max_attempts', 'AWS_MAX_ATTEMPTS', None, int), + 'user_agent_appid': ('sdk_ua_app_id', 'AWS_SDK_UA_APP_ID', None, None), + 'request_min_compression_size_bytes': ( + 'request_min_compression_size_bytes', + 'AWS_REQUEST_MIN_COMPRESSION_SIZE_BYTES', + 10240, + None, + ), + 'disable_request_compression': ( + 'disable_request_compression', + 'AWS_DISABLE_REQUEST_COMPRESSION', + False, + utils.ensure_boolean, + ), +} +# A mapping for the s3 specific configuration vars. These are the configuration +# vars that typically go in the s3 section of the config file. This mapping +# follows the same schema as the previous session variable mapping. +DEFAULT_S3_CONFIG_VARS = { + 'addressing_style': (('s3', 'addressing_style'), None, None, None), + 'use_accelerate_endpoint': ( + ('s3', 'use_accelerate_endpoint'), + None, + None, + utils.ensure_boolean, + ), + 'use_dualstack_endpoint': ( + ('s3', 'use_dualstack_endpoint'), + None, + None, + utils.ensure_boolean, + ), + 'payload_signing_enabled': ( + ('s3', 'payload_signing_enabled'), + None, + None, + utils.ensure_boolean, + ), + 'use_arn_region': ( + ['s3_use_arn_region', ('s3', 'use_arn_region')], + 'AWS_S3_USE_ARN_REGION', + None, + utils.ensure_boolean, + ), + 'us_east_1_regional_endpoint': ( + [ + 's3_us_east_1_regional_endpoint', + ('s3', 'us_east_1_regional_endpoint'), + ], + 'AWS_S3_US_EAST_1_REGIONAL_ENDPOINT', + None, + None, + ), + 's3_disable_multiregion_access_points': ( + ('s3', 's3_disable_multiregion_access_points'), + 'AWS_S3_DISABLE_MULTIREGION_ACCESS_POINTS', + None, + utils.ensure_boolean, + ), +} +# A mapping for the proxy specific configuration vars. These are +# used to configure how botocore interacts with proxy setups while +# sending requests. +DEFAULT_PROXIES_CONFIG_VARS = { + 'proxy_ca_bundle': ('proxy_ca_bundle', None, None, None), + 'proxy_client_cert': ('proxy_client_cert', None, None, None), + 'proxy_use_forwarding_for_https': ( + 'proxy_use_forwarding_for_https', + None, + None, + utils.normalize_boolean, + ), +} + + +def create_botocore_default_config_mapping(session): + chain_builder = ConfigChainFactory(session=session) + config_mapping = _create_config_chain_mapping( + chain_builder, BOTOCORE_DEFAUT_SESSION_VARIABLES + ) + config_mapping['s3'] = SectionConfigProvider( + 's3', + session, + _create_config_chain_mapping(chain_builder, DEFAULT_S3_CONFIG_VARS), + ) + config_mapping['proxies_config'] = SectionConfigProvider( + 'proxies_config', + session, + _create_config_chain_mapping( + chain_builder, DEFAULT_PROXIES_CONFIG_VARS + ), + ) + return config_mapping + + +def _create_config_chain_mapping(chain_builder, config_variables): + mapping = {} + for logical_name, config in config_variables.items(): + mapping[logical_name] = chain_builder.create_config_chain( + instance_name=logical_name, + env_var_names=config[1], + config_property_names=config[0], + default=config[2], + conversion_func=config[3], + ) + return mapping + + +class DefaultConfigResolver: + def __init__(self, default_config_data): + self._base_default_config = default_config_data['base'] + self._modes = default_config_data['modes'] + self._resolved_default_configurations = {} + + def _resolve_default_values_by_mode(self, mode): + default_config = self._base_default_config.copy() + modifications = self._modes.get(mode) + + for config_var in modifications: + default_value = default_config[config_var] + modification_dict = modifications[config_var] + modification = list(modification_dict.keys())[0] + modification_value = modification_dict[modification] + if modification == 'multiply': + default_value *= modification_value + elif modification == 'add': + default_value += modification_value + elif modification == 'override': + default_value = modification_value + default_config[config_var] = default_value + return default_config + + def get_default_modes(self): + default_modes = ['legacy', 'auto'] + default_modes.extend(self._modes.keys()) + return default_modes + + def get_default_config_values(self, mode): + if mode not in self._resolved_default_configurations: + defaults = self._resolve_default_values_by_mode(mode) + self._resolved_default_configurations[mode] = defaults + return self._resolved_default_configurations[mode] + + +class ConfigChainFactory: + """Factory class to create our most common configuration chain case. + + This is a convenience class to construct configuration chains that follow + our most common pattern. This is to prevent ordering them incorrectly, + and to make the config chain construction more readable. + """ + + def __init__(self, session, environ=None): + """Initialize a ConfigChainFactory. + + :type session: :class:`botocore.session.Session` + :param session: This is the session that should be used to look up + values from the config file. + + :type environ: dict + :param environ: A mapping to use for environment variables. If this + is not provided it will default to use os.environ. + """ + self._session = session + if environ is None: + environ = os.environ + self._environ = environ + + def create_config_chain( + self, + instance_name=None, + env_var_names=None, + config_property_names=None, + default=None, + conversion_func=None, + ): + """Build a config chain following the standard botocore pattern. + + In botocore most of our config chains follow the the precendence: + session_instance_variables, environment, config_file, default_value. + + This is a convenience function for creating a chain that follow + that precendence. + + :type instance_name: str + :param instance_name: This indicates what session instance variable + corresponds to this config value. If it is None it will not be + added to the chain. + + :type env_var_names: str or list of str or None + :param env_var_names: One or more environment variable names to + search for this value. They are searched in order. If it is None + it will not be added to the chain. + + :type config_property_names: str/tuple or list of str/tuple or None + :param config_property_names: One of more strings or tuples + representing the name of the key in the config file for this + config option. They are searched in order. If it is None it will + not be added to the chain. + + :type default: Any + :param default: Any constant value to be returned. + + :type conversion_func: None or callable + :param conversion_func: If this value is None then it has no effect on + the return type. Otherwise, it is treated as a function that will + conversion_func our provided type. + + :rvalue: ConfigChain + :returns: A ConfigChain that resolves in the order env_var_names -> + config_property_name -> default. Any values that were none are + omitted form the chain. + """ + providers = [] + if instance_name is not None: + providers.append( + InstanceVarProvider( + instance_var=instance_name, session=self._session + ) + ) + if env_var_names is not None: + providers.extend(self._get_env_providers(env_var_names)) + if config_property_names is not None: + providers.extend( + self._get_scoped_config_providers(config_property_names) + ) + if default is not None: + providers.append(ConstantProvider(value=default)) + + return ChainProvider( + providers=providers, + conversion_func=conversion_func, + ) + + def _get_env_providers(self, env_var_names): + env_var_providers = [] + if not isinstance(env_var_names, list): + env_var_names = [env_var_names] + for env_var_name in env_var_names: + env_var_providers.append( + EnvironmentProvider(name=env_var_name, env=self._environ) + ) + return env_var_providers + + def _get_scoped_config_providers(self, config_property_names): + scoped_config_providers = [] + if not isinstance(config_property_names, list): + config_property_names = [config_property_names] + for config_property_name in config_property_names: + scoped_config_providers.append( + ScopedConfigProvider( + config_var_name=config_property_name, + session=self._session, + ) + ) + return scoped_config_providers + + +class ConfigValueStore: + """The ConfigValueStore object stores configuration values.""" + + def __init__(self, mapping=None): + """Initialize a ConfigValueStore. + + :type mapping: dict + :param mapping: The mapping parameter is a map of string to a subclass + of BaseProvider. When a config variable is asked for via the + get_config_variable method, the corresponding provider will be + invoked to load the value. + """ + self._overrides = {} + self._mapping = {} + if mapping is not None: + for logical_name, provider in mapping.items(): + self.set_config_provider(logical_name, provider) + + def __deepcopy__(self, memo): + config_store = ConfigValueStore(copy.deepcopy(self._mapping, memo)) + for logical_name, override_value in self._overrides.items(): + config_store.set_config_variable(logical_name, override_value) + + return config_store + + def __copy__(self): + config_store = ConfigValueStore(copy.copy(self._mapping)) + for logical_name, override_value in self._overrides.items(): + config_store.set_config_variable(logical_name, override_value) + + return config_store + + def get_config_variable(self, logical_name): + """ + Retrieve the value associeated with the specified logical_name + from the corresponding provider. If no value is found None will + be returned. + + :type logical_name: str + :param logical_name: The logical name of the session variable + you want to retrieve. This name will be mapped to the + appropriate environment variable name for this session as + well as the appropriate config file entry. + + :returns: value of variable or None if not defined. + """ + if logical_name in self._overrides: + return self._overrides[logical_name] + if logical_name not in self._mapping: + return None + provider = self._mapping[logical_name] + return provider.provide() + + def get_config_provider(self, logical_name): + """ + Retrieve the provider associated with the specified logical_name. + If no provider is found None will be returned. + + :type logical_name: str + :param logical_name: The logical name of the session variable + you want to retrieve. This name will be mapped to the + appropriate environment variable name for this session as + well as the appropriate config file entry. + + :returns: configuration provider or None if not defined. + """ + if ( + logical_name in self._overrides + or logical_name not in self._mapping + ): + return None + provider = self._mapping[logical_name] + return provider + + def set_config_variable(self, logical_name, value): + """Set a configuration variable to a specific value. + + By using this method, you can override the normal lookup + process used in ``get_config_variable`` by explicitly setting + a value. Subsequent calls to ``get_config_variable`` will + use the ``value``. This gives you per-session specific + configuration values. + + :: + >>> # Assume logical name 'foo' maps to env var 'FOO' + >>> os.environ['FOO'] = 'myvalue' + >>> s.get_config_variable('foo') + 'myvalue' + >>> s.set_config_variable('foo', 'othervalue') + >>> s.get_config_variable('foo') + 'othervalue' + + :type logical_name: str + :param logical_name: The logical name of the session variable + you want to set. These are the keys in ``SESSION_VARIABLES``. + + :param value: The value to associate with the config variable. + """ + self._overrides[logical_name] = value + + def clear_config_variable(self, logical_name): + """Remove an override config variable from the session. + + :type logical_name: str + :param logical_name: The name of the parameter to clear the override + value from. + """ + self._overrides.pop(logical_name, None) + + def set_config_provider(self, logical_name, provider): + """Set the provider for a config value. + + This provides control over how a particular configuration value is + loaded. This replaces the provider for ``logical_name`` with the new + ``provider``. + + :type logical_name: str + :param logical_name: The name of the config value to change the config + provider for. + + :type provider: :class:`botocore.configprovider.BaseProvider` + :param provider: The new provider that should be responsible for + providing a value for the config named ``logical_name``. + """ + self._mapping[logical_name] = provider + + +class SmartDefaultsConfigStoreFactory: + def __init__(self, default_config_resolver, imds_region_provider): + self._default_config_resolver = default_config_resolver + self._imds_region_provider = imds_region_provider + # Initializing _instance_metadata_region as None so we + # can fetch region in a lazy fashion only when needed. + self._instance_metadata_region = None + + def merge_smart_defaults(self, config_store, mode, region_name): + if mode == 'auto': + mode = self.resolve_auto_mode(region_name) + default_configs = ( + self._default_config_resolver.get_default_config_values(mode) + ) + for config_var in default_configs: + config_value = default_configs[config_var] + method = getattr(self, f'_set_{config_var}', None) + if method: + method(config_store, config_value) + + def resolve_auto_mode(self, region_name): + current_region = None + if os.environ.get('AWS_EXECUTION_ENV'): + default_region = os.environ.get('AWS_DEFAULT_REGION') + current_region = os.environ.get('AWS_REGION', default_region) + if not current_region: + if self._instance_metadata_region: + current_region = self._instance_metadata_region + else: + try: + current_region = self._imds_region_provider.provide() + self._instance_metadata_region = current_region + except Exception: + pass + + if current_region: + if region_name == current_region: + return 'in-region' + else: + return 'cross-region' + return 'standard' + + def _update_provider(self, config_store, variable, value): + original_provider = config_store.get_config_provider(variable) + default_provider = ConstantProvider(value) + if isinstance(original_provider, ChainProvider): + chain_provider_copy = copy.deepcopy(original_provider) + chain_provider_copy.set_default_provider(default_provider) + default_provider = chain_provider_copy + elif isinstance(original_provider, BaseProvider): + default_provider = ChainProvider( + providers=[original_provider, default_provider] + ) + config_store.set_config_provider(variable, default_provider) + + def _update_section_provider( + self, config_store, section_name, variable, value + ): + section_provider_copy = copy.deepcopy( + config_store.get_config_provider(section_name) + ) + section_provider_copy.set_default_provider( + variable, ConstantProvider(value) + ) + config_store.set_config_provider(section_name, section_provider_copy) + + def _set_retryMode(self, config_store, value): + self._update_provider(config_store, 'retry_mode', value) + + def _set_stsRegionalEndpoints(self, config_store, value): + self._update_provider(config_store, 'sts_regional_endpoints', value) + + def _set_s3UsEast1RegionalEndpoints(self, config_store, value): + self._update_section_provider( + config_store, 's3', 'us_east_1_regional_endpoint', value + ) + + def _set_connectTimeoutInMillis(self, config_store, value): + self._update_provider(config_store, 'connect_timeout', value / 1000) + + +class BaseProvider: + """Base class for configuration value providers. + + A configuration provider has some method of providing a configuration + value. + """ + + def provide(self): + """Provide a config value.""" + raise NotImplementedError('provide') + + +class ChainProvider(BaseProvider): + """This provider wraps one or more other providers. + + Each provider in the chain is called, the first one returning a non-None + value is then returned. + """ + + def __init__(self, providers=None, conversion_func=None): + """Initalize a ChainProvider. + + :type providers: list + :param providers: The initial list of providers to check for values + when invoked. + + :type conversion_func: None or callable + :param conversion_func: If this value is None then it has no affect on + the return type. Otherwise, it is treated as a function that will + transform provided value. + """ + if providers is None: + providers = [] + self._providers = providers + self._conversion_func = conversion_func + + def __deepcopy__(self, memo): + return ChainProvider( + copy.deepcopy(self._providers, memo), self._conversion_func + ) + + def provide(self): + """Provide the value from the first provider to return non-None. + + Each provider in the chain has its provide method called. The first + one in the chain to return a non-None value is the returned from the + ChainProvider. When no non-None value is found, None is returned. + """ + for provider in self._providers: + value = provider.provide() + if value is not None: + return self._convert_type(value) + return None + + def set_default_provider(self, default_provider): + if self._providers and isinstance( + self._providers[-1], ConstantProvider + ): + self._providers[-1] = default_provider + else: + self._providers.append(default_provider) + + num_of_constants = sum( + isinstance(provider, ConstantProvider) + for provider in self._providers + ) + if num_of_constants > 1: + logger.info( + 'ChainProvider object contains multiple ' + 'instances of ConstantProvider objects' + ) + + def _convert_type(self, value): + if self._conversion_func is not None: + return self._conversion_func(value) + return value + + def __repr__(self): + return '[{}]'.format(', '.join([str(p) for p in self._providers])) + + +class InstanceVarProvider(BaseProvider): + """This class loads config values from the session instance vars.""" + + def __init__(self, instance_var, session): + """Initialize InstanceVarProvider. + + :type instance_var: str + :param instance_var: The instance variable to load from the session. + + :type session: :class:`botocore.session.Session` + :param session: The botocore session to get the loaded configuration + file variables from. + """ + self._instance_var = instance_var + self._session = session + + def __deepcopy__(self, memo): + return InstanceVarProvider( + copy.deepcopy(self._instance_var, memo), self._session + ) + + def provide(self): + """Provide a config value from the session instance vars.""" + instance_vars = self._session.instance_variables() + value = instance_vars.get(self._instance_var) + return value + + def __repr__(self): + return f'InstanceVarProvider(instance_var={self._instance_var}, session={self._session})' + + +class ScopedConfigProvider(BaseProvider): + def __init__(self, config_var_name, session): + """Initialize ScopedConfigProvider. + + :type config_var_name: str or tuple + :param config_var_name: The name of the config variable to load from + the configuration file. If the value is a tuple, it must only + consist of two items, where the first item represents the section + and the second item represents the config var name in the section. + + :type session: :class:`botocore.session.Session` + :param session: The botocore session to get the loaded configuration + file variables from. + """ + self._config_var_name = config_var_name + self._session = session + + def __deepcopy__(self, memo): + return ScopedConfigProvider( + copy.deepcopy(self._config_var_name, memo), self._session + ) + + def provide(self): + """Provide a value from a config file property.""" + scoped_config = self._session.get_scoped_config() + if isinstance(self._config_var_name, tuple): + section_config = scoped_config.get(self._config_var_name[0]) + if not isinstance(section_config, dict): + return None + return section_config.get(self._config_var_name[1]) + return scoped_config.get(self._config_var_name) + + def __repr__(self): + return f'ScopedConfigProvider(config_var_name={self._config_var_name}, session={self._session})' + + +class EnvironmentProvider(BaseProvider): + """This class loads config values from environment variables.""" + + def __init__(self, name, env): + """Initialize with the keys in the dictionary to check. + + :type name: str + :param name: The key with that name will be loaded and returned. + + :type env: dict + :param env: Environment variables dictionary to get variables from. + """ + self._name = name + self._env = env + + def __deepcopy__(self, memo): + return EnvironmentProvider( + copy.deepcopy(self._name, memo), copy.deepcopy(self._env, memo) + ) + + def provide(self): + """Provide a config value from a source dictionary.""" + if self._name in self._env: + return self._env[self._name] + return None + + def __repr__(self): + return f'EnvironmentProvider(name={self._name}, env={self._env})' + + +class SectionConfigProvider(BaseProvider): + """Provides a dictionary from a section in the scoped config + + This is useful for retrieving scoped config variables (i.e. s3) that have + their own set of config variables and resolving logic. + """ + + def __init__(self, section_name, session, override_providers=None): + self._section_name = section_name + self._session = session + self._scoped_config_provider = ScopedConfigProvider( + self._section_name, self._session + ) + self._override_providers = override_providers + if self._override_providers is None: + self._override_providers = {} + + def __deepcopy__(self, memo): + return SectionConfigProvider( + copy.deepcopy(self._section_name, memo), + self._session, + copy.deepcopy(self._override_providers, memo), + ) + + def provide(self): + section_config = self._scoped_config_provider.provide() + if section_config and not isinstance(section_config, dict): + logger.debug( + "The %s config key is not a dictionary type, " + "ignoring its value of: %s", + self._section_name, + section_config, + ) + return None + for section_config_var, provider in self._override_providers.items(): + provider_val = provider.provide() + if provider_val is not None: + if section_config is None: + section_config = {} + section_config[section_config_var] = provider_val + return section_config + + def set_default_provider(self, key, default_provider): + provider = self._override_providers.get(key) + if isinstance(provider, ChainProvider): + provider.set_default_provider(default_provider) + return + elif isinstance(provider, BaseProvider): + default_provider = ChainProvider( + providers=[provider, default_provider] + ) + self._override_providers[key] = default_provider + + def __repr__(self): + return ( + f'SectionConfigProvider(section_name={self._section_name}, ' + f'session={self._session}, ' + f'override_providers={self._override_providers})' + ) + + +class ConstantProvider(BaseProvider): + """This provider provides a constant value.""" + + def __init__(self, value): + self._value = value + + def __deepcopy__(self, memo): + return ConstantProvider(copy.deepcopy(self._value, memo)) + + def provide(self): + """Provide the constant value given during initialization.""" + return self._value + + def __repr__(self): + return f'ConstantProvider(value={self._value})' + + +class ConfiguredEndpointProvider(BaseProvider): + """Lookup an endpoint URL from environment variable or shared config file. + + NOTE: This class is considered private and is subject to abrupt breaking + changes or removal without prior announcement. Please do not use it + directly. + """ + + _ENDPOINT_URL_LOOKUP_ORDER = [ + 'environment_service', + 'environment_global', + 'config_service', + 'config_global', + ] + + def __init__( + self, + full_config, + scoped_config, + client_name, + environ=None, + ): + """Initialize a ConfiguredEndpointProviderChain. + + :type full_config: dict + :param full_config: This is the dict representing the full + configuration file. + + :type scoped_config: dict + :param scoped_config: This is the dict representing the configuration + for the current profile for the session. + + :type client_name: str + :param client_name: The name used to instantiate a client using + botocore.session.Session.create_client. + + :type environ: dict + :param environ: A mapping to use for environment variables. If this + is not provided it will default to use os.environ. + """ + self._full_config = full_config + self._scoped_config = scoped_config + self._client_name = client_name + self._transformed_service_id = self._get_snake_case_service_id( + self._client_name + ) + if environ is None: + environ = os.environ + self._environ = environ + + def provide(self): + """Lookup the configured endpoint URL. + + The order is: + + 1. The value provided by a service-specific environment variable. + 2. The value provided by the global endpoint environment variable + (AWS_ENDPOINT_URL). + 3. The value provided by a service-specific parameter from a services + definition section in the shared configuration file. + 4. The value provided by the global parameter from a services + definition section in the shared configuration file. + """ + for location in self._ENDPOINT_URL_LOOKUP_ORDER: + logger.debug( + 'Looking for endpoint for %s via: %s', + self._client_name, + location, + ) + + endpoint_url = getattr(self, f'_get_endpoint_url_{location}')() + + if endpoint_url: + logger.info( + 'Found endpoint for %s via: %s.', + self._client_name, + location, + ) + return endpoint_url + + logger.debug('No configured endpoint found.') + return None + + def _get_snake_case_service_id(self, client_name): + # Get the service ID without loading the service data file, accounting + # for any aliases and standardizing the names with hyphens. + client_name = utils.SERVICE_NAME_ALIASES.get(client_name, client_name) + hyphenized_service_id = ( + utils.CLIENT_NAME_TO_HYPHENIZED_SERVICE_ID_OVERRIDES.get( + client_name, client_name + ) + ) + return hyphenized_service_id.replace('-', '_') + + def _get_service_env_var_name(self): + transformed_service_id_env = self._transformed_service_id.upper() + return f'AWS_ENDPOINT_URL_{transformed_service_id_env}' + + def _get_services_config(self): + if 'services' not in self._scoped_config: + return {} + + section_name = self._scoped_config['services'] + services_section = self._full_config.get('services', {}).get( + section_name + ) + + if not services_section: + error_msg = ( + f'The profile is configured to use the services ' + f'section but the "{section_name}" services ' + f'configuration does not exist.' + ) + raise InvalidConfigError(error_msg=error_msg) + + return services_section + + def _get_endpoint_url_config_service(self): + snakecase_service_id = self._transformed_service_id.lower() + return ( + self._get_services_config() + .get(snakecase_service_id, {}) + .get('endpoint_url') + ) + + def _get_endpoint_url_config_global(self): + return self._scoped_config.get('endpoint_url') + + def _get_endpoint_url_environment_service(self): + return EnvironmentProvider( + name=self._get_service_env_var_name(), env=self._environ + ).provide() + + def _get_endpoint_url_environment_global(self): + return EnvironmentProvider( + name='AWS_ENDPOINT_URL', env=self._environ + ).provide() diff --git a/venv/lib/python3.10/site-packages/botocore/credentials.py b/venv/lib/python3.10/site-packages/botocore/credentials.py new file mode 100644 index 0000000000000000000000000000000000000000..dd7e718255b91f7f1b20ca147201e78851f7987e --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/credentials.py @@ -0,0 +1,2297 @@ +# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ +# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import datetime +import getpass +import json +import logging +import os +import subprocess +import threading +import time +from collections import namedtuple +from copy import deepcopy +from hashlib import sha1 + +from dateutil.parser import parse +from dateutil.tz import tzlocal, tzutc + +import botocore.compat +import botocore.configloader +from botocore import UNSIGNED +from botocore.compat import compat_shell_split, total_seconds +from botocore.config import Config +from botocore.exceptions import ( + ConfigNotFound, + CredentialRetrievalError, + InfiniteLoopConfigError, + InvalidConfigError, + MetadataRetrievalError, + PartialCredentialsError, + RefreshWithMFAUnsupportedError, + UnauthorizedSSOTokenError, + UnknownCredentialError, +) +from botocore.tokens import SSOTokenProvider +from botocore.utils import ( + ContainerMetadataFetcher, + FileWebIdentityTokenLoader, + InstanceMetadataFetcher, + JSONFileCache, + SSOTokenLoader, + parse_key_val_file, + resolve_imds_endpoint_mode, +) + +logger = logging.getLogger(__name__) +ReadOnlyCredentials = namedtuple( + 'ReadOnlyCredentials', ['access_key', 'secret_key', 'token'] +) + +_DEFAULT_MANDATORY_REFRESH_TIMEOUT = 10 * 60 # 10 min +_DEFAULT_ADVISORY_REFRESH_TIMEOUT = 15 * 60 # 15 min + + +def create_credential_resolver(session, cache=None, region_name=None): + """Create a default credential resolver. + + This creates a pre-configured credential resolver + that includes the default lookup chain for + credentials. + + """ + profile_name = session.get_config_variable('profile') or 'default' + metadata_timeout = session.get_config_variable('metadata_service_timeout') + num_attempts = session.get_config_variable('metadata_service_num_attempts') + disable_env_vars = session.instance_variables().get('profile') is not None + + imds_config = { + 'ec2_metadata_service_endpoint': session.get_config_variable( + 'ec2_metadata_service_endpoint' + ), + 'ec2_metadata_service_endpoint_mode': resolve_imds_endpoint_mode( + session + ), + 'ec2_credential_refresh_window': _DEFAULT_ADVISORY_REFRESH_TIMEOUT, + 'ec2_metadata_v1_disabled': session.get_config_variable( + 'ec2_metadata_v1_disabled' + ), + } + + if cache is None: + cache = {} + + env_provider = EnvProvider() + container_provider = ContainerProvider() + instance_metadata_provider = InstanceMetadataProvider( + iam_role_fetcher=InstanceMetadataFetcher( + timeout=metadata_timeout, + num_attempts=num_attempts, + user_agent=session.user_agent(), + config=imds_config, + ) + ) + + profile_provider_builder = ProfileProviderBuilder( + session, cache=cache, region_name=region_name + ) + assume_role_provider = AssumeRoleProvider( + load_config=lambda: session.full_config, + client_creator=_get_client_creator(session, region_name), + cache=cache, + profile_name=profile_name, + credential_sourcer=CanonicalNameCredentialSourcer( + [env_provider, container_provider, instance_metadata_provider] + ), + profile_provider_builder=profile_provider_builder, + ) + + pre_profile = [ + env_provider, + assume_role_provider, + ] + profile_providers = profile_provider_builder.providers( + profile_name=profile_name, + disable_env_vars=disable_env_vars, + ) + post_profile = [ + OriginalEC2Provider(), + BotoProvider(), + container_provider, + instance_metadata_provider, + ] + providers = pre_profile + profile_providers + post_profile + + if disable_env_vars: + # An explicitly provided profile will negate an EnvProvider. + # We will defer to providers that understand the "profile" + # concept to retrieve credentials. + # The one edge case if is all three values are provided via + # env vars: + # export AWS_ACCESS_KEY_ID=foo + # export AWS_SECRET_ACCESS_KEY=bar + # export AWS_PROFILE=baz + # Then, just like our client() calls, the explicit credentials + # will take precedence. + # + # This precedence is enforced by leaving the EnvProvider in the chain. + # This means that the only way a "profile" would win is if the + # EnvProvider does not return credentials, which is what we want + # in this scenario. + providers.remove(env_provider) + logger.debug( + 'Skipping environment variable credential check' + ' because profile name was explicitly set.' + ) + + resolver = CredentialResolver(providers=providers) + return resolver + + +class ProfileProviderBuilder: + """This class handles the creation of profile based providers. + + NOTE: This class is only intended for internal use. + + This class handles the creation and ordering of the various credential + providers that primarly source their configuration from the shared config. + This is needed to enable sharing between the default credential chain and + the source profile chain created by the assume role provider. + """ + + def __init__( + self, session, cache=None, region_name=None, sso_token_cache=None + ): + self._session = session + self._cache = cache + self._region_name = region_name + self._sso_token_cache = sso_token_cache + + def providers(self, profile_name, disable_env_vars=False): + return [ + self._create_web_identity_provider( + profile_name, + disable_env_vars, + ), + self._create_sso_provider(profile_name), + self._create_shared_credential_provider(profile_name), + self._create_process_provider(profile_name), + self._create_config_provider(profile_name), + ] + + def _create_process_provider(self, profile_name): + return ProcessProvider( + profile_name=profile_name, + load_config=lambda: self._session.full_config, + ) + + def _create_shared_credential_provider(self, profile_name): + credential_file = self._session.get_config_variable('credentials_file') + return SharedCredentialProvider( + profile_name=profile_name, + creds_filename=credential_file, + ) + + def _create_config_provider(self, profile_name): + config_file = self._session.get_config_variable('config_file') + return ConfigProvider( + profile_name=profile_name, + config_filename=config_file, + ) + + def _create_web_identity_provider(self, profile_name, disable_env_vars): + return AssumeRoleWithWebIdentityProvider( + load_config=lambda: self._session.full_config, + client_creator=_get_client_creator( + self._session, self._region_name + ), + cache=self._cache, + profile_name=profile_name, + disable_env_vars=disable_env_vars, + ) + + def _create_sso_provider(self, profile_name): + return SSOProvider( + load_config=lambda: self._session.full_config, + client_creator=self._session.create_client, + profile_name=profile_name, + cache=self._cache, + token_cache=self._sso_token_cache, + token_provider=SSOTokenProvider( + self._session, + cache=self._sso_token_cache, + profile_name=profile_name, + ), + ) + + +def get_credentials(session): + resolver = create_credential_resolver(session) + return resolver.load_credentials() + + +def _local_now(): + return datetime.datetime.now(tzlocal()) + + +def _parse_if_needed(value): + if isinstance(value, datetime.datetime): + return value + return parse(value) + + +def _serialize_if_needed(value, iso=False): + if isinstance(value, datetime.datetime): + if iso: + return value.isoformat() + return value.strftime('%Y-%m-%dT%H:%M:%S%Z') + return value + + +def _get_client_creator(session, region_name): + def client_creator(service_name, **kwargs): + create_client_kwargs = {'region_name': region_name} + create_client_kwargs.update(**kwargs) + return session.create_client(service_name, **create_client_kwargs) + + return client_creator + + +def create_assume_role_refresher(client, params): + def refresh(): + response = client.assume_role(**params) + credentials = response['Credentials'] + # We need to normalize the credential names to + # the values expected by the refresh creds. + return { + 'access_key': credentials['AccessKeyId'], + 'secret_key': credentials['SecretAccessKey'], + 'token': credentials['SessionToken'], + 'expiry_time': _serialize_if_needed(credentials['Expiration']), + } + + return refresh + + +def create_mfa_serial_refresher(actual_refresh): + class _Refresher: + def __init__(self, refresh): + self._refresh = refresh + self._has_been_called = False + + def __call__(self): + if self._has_been_called: + # We can explore an option in the future to support + # reprompting for MFA, but for now we just error out + # when the temp creds expire. + raise RefreshWithMFAUnsupportedError() + self._has_been_called = True + return self._refresh() + + return _Refresher(actual_refresh) + + +class Credentials: + """ + Holds the credentials needed to authenticate requests. + + :param str access_key: The access key part of the credentials. + :param str secret_key: The secret key part of the credentials. + :param str token: The security token, valid only for session credentials. + :param str method: A string which identifies where the credentials + were found. + """ + + def __init__(self, access_key, secret_key, token=None, method=None): + self.access_key = access_key + self.secret_key = secret_key + self.token = token + + if method is None: + method = 'explicit' + self.method = method + + self._normalize() + + def _normalize(self): + # Keys would sometimes (accidentally) contain non-ascii characters. + # It would cause a confusing UnicodeDecodeError in Python 2. + # We explicitly convert them into unicode to avoid such error. + # + # Eventually the service will decide whether to accept the credential. + # This also complies with the behavior in Python 3. + self.access_key = botocore.compat.ensure_unicode(self.access_key) + self.secret_key = botocore.compat.ensure_unicode(self.secret_key) + + def get_frozen_credentials(self): + return ReadOnlyCredentials( + self.access_key, self.secret_key, self.token + ) + + +class RefreshableCredentials(Credentials): + """ + Holds the credentials needed to authenticate requests. In addition, it + knows how to refresh itself. + + :param str access_key: The access key part of the credentials. + :param str secret_key: The secret key part of the credentials. + :param str token: The security token, valid only for session credentials. + :param datetime expiry_time: The expiration time of the credentials. + :param function refresh_using: Callback function to refresh the credentials. + :param str method: A string which identifies where the credentials + were found. + :param function time_fetcher: Callback function to retrieve current time. + """ + + # The time at which we'll attempt to refresh, but not + # block if someone else is refreshing. + _advisory_refresh_timeout = _DEFAULT_ADVISORY_REFRESH_TIMEOUT + # The time at which all threads will block waiting for + # refreshed credentials. + _mandatory_refresh_timeout = _DEFAULT_MANDATORY_REFRESH_TIMEOUT + + def __init__( + self, + access_key, + secret_key, + token, + expiry_time, + refresh_using, + method, + time_fetcher=_local_now, + advisory_timeout=None, + mandatory_timeout=None, + ): + self._refresh_using = refresh_using + self._access_key = access_key + self._secret_key = secret_key + self._token = token + self._expiry_time = expiry_time + self._time_fetcher = time_fetcher + self._refresh_lock = threading.Lock() + self.method = method + self._frozen_credentials = ReadOnlyCredentials( + access_key, secret_key, token + ) + self._normalize() + if advisory_timeout is not None: + self._advisory_refresh_timeout = advisory_timeout + if mandatory_timeout is not None: + self._mandatory_refresh_timeout = mandatory_timeout + + def _normalize(self): + self._access_key = botocore.compat.ensure_unicode(self._access_key) + self._secret_key = botocore.compat.ensure_unicode(self._secret_key) + + @classmethod + def create_from_metadata( + cls, + metadata, + refresh_using, + method, + advisory_timeout=None, + mandatory_timeout=None, + ): + kwargs = {} + if advisory_timeout is not None: + kwargs['advisory_timeout'] = advisory_timeout + if mandatory_timeout is not None: + kwargs['mandatory_timeout'] = mandatory_timeout + + instance = cls( + access_key=metadata['access_key'], + secret_key=metadata['secret_key'], + token=metadata['token'], + expiry_time=cls._expiry_datetime(metadata['expiry_time']), + method=method, + refresh_using=refresh_using, + **kwargs, + ) + return instance + + @property + def access_key(self): + """Warning: Using this property can lead to race conditions if you + access another property subsequently along the refresh boundary. + Please use get_frozen_credentials instead. + """ + self._refresh() + return self._access_key + + @access_key.setter + def access_key(self, value): + self._access_key = value + + @property + def secret_key(self): + """Warning: Using this property can lead to race conditions if you + access another property subsequently along the refresh boundary. + Please use get_frozen_credentials instead. + """ + self._refresh() + return self._secret_key + + @secret_key.setter + def secret_key(self, value): + self._secret_key = value + + @property + def token(self): + """Warning: Using this property can lead to race conditions if you + access another property subsequently along the refresh boundary. + Please use get_frozen_credentials instead. + """ + self._refresh() + return self._token + + @token.setter + def token(self, value): + self._token = value + + def _seconds_remaining(self): + delta = self._expiry_time - self._time_fetcher() + return total_seconds(delta) + + def refresh_needed(self, refresh_in=None): + """Check if a refresh is needed. + + A refresh is needed if the expiry time associated + with the temporary credentials is less than the + provided ``refresh_in``. If ``time_delta`` is not + provided, ``self.advisory_refresh_needed`` will be used. + + For example, if your temporary credentials expire + in 10 minutes and the provided ``refresh_in`` is + ``15 * 60``, then this function will return ``True``. + + :type refresh_in: int + :param refresh_in: The number of seconds before the + credentials expire in which refresh attempts should + be made. + + :return: True if refresh needed, False otherwise. + + """ + if self._expiry_time is None: + # No expiration, so assume we don't need to refresh. + return False + + if refresh_in is None: + refresh_in = self._advisory_refresh_timeout + # The credentials should be refreshed if they're going to expire + # in less than 5 minutes. + if self._seconds_remaining() >= refresh_in: + # There's enough time left. Don't refresh. + return False + logger.debug("Credentials need to be refreshed.") + return True + + def _is_expired(self): + # Checks if the current credentials are expired. + return self.refresh_needed(refresh_in=0) + + def _refresh(self): + # In the common case where we don't need a refresh, we + # can immediately exit and not require acquiring the + # refresh lock. + if not self.refresh_needed(self._advisory_refresh_timeout): + return + + # acquire() doesn't accept kwargs, but False is indicating + # that we should not block if we can't acquire the lock. + # If we aren't able to acquire the lock, we'll trigger + # the else clause. + if self._refresh_lock.acquire(False): + try: + if not self.refresh_needed(self._advisory_refresh_timeout): + return + is_mandatory_refresh = self.refresh_needed( + self._mandatory_refresh_timeout + ) + self._protected_refresh(is_mandatory=is_mandatory_refresh) + return + finally: + self._refresh_lock.release() + elif self.refresh_needed(self._mandatory_refresh_timeout): + # If we're within the mandatory refresh window, + # we must block until we get refreshed credentials. + with self._refresh_lock: + if not self.refresh_needed(self._mandatory_refresh_timeout): + return + self._protected_refresh(is_mandatory=True) + + def _protected_refresh(self, is_mandatory): + # precondition: this method should only be called if you've acquired + # the self._refresh_lock. + try: + metadata = self._refresh_using() + except Exception: + period_name = 'mandatory' if is_mandatory else 'advisory' + logger.warning( + "Refreshing temporary credentials failed " + "during %s refresh period.", + period_name, + exc_info=True, + ) + if is_mandatory: + # If this is a mandatory refresh, then + # all errors that occur when we attempt to refresh + # credentials are propagated back to the user. + raise + # Otherwise we'll just return. + # The end result will be that we'll use the current + # set of temporary credentials we have. + return + self._set_from_data(metadata) + self._frozen_credentials = ReadOnlyCredentials( + self._access_key, self._secret_key, self._token + ) + if self._is_expired(): + # We successfully refreshed credentials but for whatever + # reason, our refreshing function returned credentials + # that are still expired. In this scenario, the only + # thing we can do is let the user know and raise + # an exception. + msg = ( + "Credentials were refreshed, but the " + "refreshed credentials are still expired." + ) + logger.warning(msg) + raise RuntimeError(msg) + + @staticmethod + def _expiry_datetime(time_str): + return parse(time_str) + + def _set_from_data(self, data): + expected_keys = ['access_key', 'secret_key', 'token', 'expiry_time'] + if not data: + missing_keys = expected_keys + else: + missing_keys = [k for k in expected_keys if k not in data] + + if missing_keys: + message = "Credential refresh failed, response did not contain: %s" + raise CredentialRetrievalError( + provider=self.method, + error_msg=message % ', '.join(missing_keys), + ) + + self.access_key = data['access_key'] + self.secret_key = data['secret_key'] + self.token = data['token'] + self._expiry_time = parse(data['expiry_time']) + logger.debug( + "Retrieved credentials will expire at: %s", self._expiry_time + ) + self._normalize() + + def get_frozen_credentials(self): + """Return immutable credentials. + + The ``access_key``, ``secret_key``, and ``token`` properties + on this class will always check and refresh credentials if + needed before returning the particular credentials. + + This has an edge case where you can get inconsistent + credentials. Imagine this: + + # Current creds are "t1" + tmp.access_key ---> expired? no, so return t1.access_key + # ---- time is now expired, creds need refreshing to "t2" ---- + tmp.secret_key ---> expired? yes, refresh and return t2.secret_key + + This means we're using the access key from t1 with the secret key + from t2. To fix this issue, you can request a frozen credential object + which is guaranteed not to change. + + The frozen credentials returned from this method should be used + immediately and then discarded. The typical usage pattern would + be:: + + creds = RefreshableCredentials(...) + some_code = SomeSignerObject() + # I'm about to sign the request. + # The frozen credentials are only used for the + # duration of generate_presigned_url and will be + # immediately thrown away. + request = some_code.sign_some_request( + with_credentials=creds.get_frozen_credentials()) + print("Signed request:", request) + + """ + self._refresh() + return self._frozen_credentials + + +class DeferredRefreshableCredentials(RefreshableCredentials): + """Refreshable credentials that don't require initial credentials. + + refresh_using will be called upon first access. + """ + + def __init__(self, refresh_using, method, time_fetcher=_local_now): + self._refresh_using = refresh_using + self._access_key = None + self._secret_key = None + self._token = None + self._expiry_time = None + self._time_fetcher = time_fetcher + self._refresh_lock = threading.Lock() + self.method = method + self._frozen_credentials = None + + def refresh_needed(self, refresh_in=None): + if self._frozen_credentials is None: + return True + return super().refresh_needed(refresh_in) + + +class CachedCredentialFetcher: + DEFAULT_EXPIRY_WINDOW_SECONDS = 60 * 15 + + def __init__(self, cache=None, expiry_window_seconds=None): + if cache is None: + cache = {} + self._cache = cache + self._cache_key = self._create_cache_key() + if expiry_window_seconds is None: + expiry_window_seconds = self.DEFAULT_EXPIRY_WINDOW_SECONDS + self._expiry_window_seconds = expiry_window_seconds + + def _create_cache_key(self): + raise NotImplementedError('_create_cache_key()') + + def _make_file_safe(self, filename): + # Replace :, path sep, and / to make it the string filename safe. + filename = filename.replace(':', '_').replace(os.sep, '_') + return filename.replace('/', '_') + + def _get_credentials(self): + raise NotImplementedError('_get_credentials()') + + def fetch_credentials(self): + return self._get_cached_credentials() + + def _get_cached_credentials(self): + """Get up-to-date credentials. + + This will check the cache for up-to-date credentials, calling assume + role if none are available. + """ + response = self._load_from_cache() + if response is None: + response = self._get_credentials() + self._write_to_cache(response) + else: + logger.debug("Credentials for role retrieved from cache.") + + creds = response['Credentials'] + expiration = _serialize_if_needed(creds['Expiration'], iso=True) + return { + 'access_key': creds['AccessKeyId'], + 'secret_key': creds['SecretAccessKey'], + 'token': creds['SessionToken'], + 'expiry_time': expiration, + } + + def _load_from_cache(self): + if self._cache_key in self._cache: + creds = deepcopy(self._cache[self._cache_key]) + if not self._is_expired(creds): + return creds + else: + logger.debug( + "Credentials were found in cache, but they are expired." + ) + return None + + def _write_to_cache(self, response): + self._cache[self._cache_key] = deepcopy(response) + + def _is_expired(self, credentials): + """Check if credentials are expired.""" + end_time = _parse_if_needed(credentials['Credentials']['Expiration']) + seconds = total_seconds(end_time - _local_now()) + return seconds < self._expiry_window_seconds + + +class BaseAssumeRoleCredentialFetcher(CachedCredentialFetcher): + def __init__( + self, + client_creator, + role_arn, + extra_args=None, + cache=None, + expiry_window_seconds=None, + ): + self._client_creator = client_creator + self._role_arn = role_arn + + if extra_args is None: + self._assume_kwargs = {} + else: + self._assume_kwargs = deepcopy(extra_args) + self._assume_kwargs['RoleArn'] = self._role_arn + + self._role_session_name = self._assume_kwargs.get('RoleSessionName') + self._using_default_session_name = False + if not self._role_session_name: + self._generate_assume_role_name() + + super().__init__(cache, expiry_window_seconds) + + def _generate_assume_role_name(self): + self._role_session_name = f'botocore-session-{int(time.time())}' + self._assume_kwargs['RoleSessionName'] = self._role_session_name + self._using_default_session_name = True + + def _create_cache_key(self): + """Create a predictable cache key for the current configuration. + + The cache key is intended to be compatible with file names. + """ + args = deepcopy(self._assume_kwargs) + + # The role session name gets randomly generated, so we don't want it + # in the hash. + if self._using_default_session_name: + del args['RoleSessionName'] + + if 'Policy' in args: + # To have a predictable hash, the keys of the policy must be + # sorted, so we have to load it here to make sure it gets sorted + # later on. + args['Policy'] = json.loads(args['Policy']) + + args = json.dumps(args, sort_keys=True) + argument_hash = sha1(args.encode('utf-8')).hexdigest() + return self._make_file_safe(argument_hash) + + +class AssumeRoleCredentialFetcher(BaseAssumeRoleCredentialFetcher): + def __init__( + self, + client_creator, + source_credentials, + role_arn, + extra_args=None, + mfa_prompter=None, + cache=None, + expiry_window_seconds=None, + ): + """ + :type client_creator: callable + :param client_creator: A callable that creates a client taking + arguments like ``Session.create_client``. + + :type source_credentials: Credentials + :param source_credentials: The credentials to use to create the + client for the call to AssumeRole. + + :type role_arn: str + :param role_arn: The ARN of the role to be assumed. + + :type extra_args: dict + :param extra_args: Any additional arguments to add to the assume + role request using the format of the botocore operation. + Possible keys include, but may not be limited to, + DurationSeconds, Policy, SerialNumber, ExternalId and + RoleSessionName. + + :type mfa_prompter: callable + :param mfa_prompter: A callable that returns input provided by the + user (i.e raw_input, getpass.getpass, etc.). + + :type cache: dict + :param cache: An object that supports ``__getitem__``, + ``__setitem__``, and ``__contains__``. An example of this is + the ``JSONFileCache`` class in aws-cli. + + :type expiry_window_seconds: int + :param expiry_window_seconds: The amount of time, in seconds, + """ + self._source_credentials = source_credentials + self._mfa_prompter = mfa_prompter + if self._mfa_prompter is None: + self._mfa_prompter = getpass.getpass + + super().__init__( + client_creator, + role_arn, + extra_args=extra_args, + cache=cache, + expiry_window_seconds=expiry_window_seconds, + ) + + def _get_credentials(self): + """Get credentials by calling assume role.""" + kwargs = self._assume_role_kwargs() + client = self._create_client() + return client.assume_role(**kwargs) + + def _assume_role_kwargs(self): + """Get the arguments for assume role based on current configuration.""" + assume_role_kwargs = deepcopy(self._assume_kwargs) + + mfa_serial = assume_role_kwargs.get('SerialNumber') + + if mfa_serial is not None: + prompt = f'Enter MFA code for {mfa_serial}: ' + token_code = self._mfa_prompter(prompt) + assume_role_kwargs['TokenCode'] = token_code + + duration_seconds = assume_role_kwargs.get('DurationSeconds') + + if duration_seconds is not None: + assume_role_kwargs['DurationSeconds'] = duration_seconds + + return assume_role_kwargs + + def _create_client(self): + """Create an STS client using the source credentials.""" + frozen_credentials = self._source_credentials.get_frozen_credentials() + return self._client_creator( + 'sts', + aws_access_key_id=frozen_credentials.access_key, + aws_secret_access_key=frozen_credentials.secret_key, + aws_session_token=frozen_credentials.token, + ) + + +class AssumeRoleWithWebIdentityCredentialFetcher( + BaseAssumeRoleCredentialFetcher +): + def __init__( + self, + client_creator, + web_identity_token_loader, + role_arn, + extra_args=None, + cache=None, + expiry_window_seconds=None, + ): + """ + :type client_creator: callable + :param client_creator: A callable that creates a client taking + arguments like ``Session.create_client``. + + :type web_identity_token_loader: callable + :param web_identity_token_loader: A callable that takes no arguments + and returns a web identity token str. + + :type role_arn: str + :param role_arn: The ARN of the role to be assumed. + + :type extra_args: dict + :param extra_args: Any additional arguments to add to the assume + role request using the format of the botocore operation. + Possible keys include, but may not be limited to, + DurationSeconds, Policy, SerialNumber, ExternalId and + RoleSessionName. + + :type cache: dict + :param cache: An object that supports ``__getitem__``, + ``__setitem__``, and ``__contains__``. An example of this is + the ``JSONFileCache`` class in aws-cli. + + :type expiry_window_seconds: int + :param expiry_window_seconds: The amount of time, in seconds, + """ + self._web_identity_token_loader = web_identity_token_loader + + super().__init__( + client_creator, + role_arn, + extra_args=extra_args, + cache=cache, + expiry_window_seconds=expiry_window_seconds, + ) + + def _get_credentials(self): + """Get credentials by calling assume role.""" + kwargs = self._assume_role_kwargs() + # Assume role with web identity does not require credentials other than + # the token, explicitly configure the client to not sign requests. + config = Config(signature_version=UNSIGNED) + client = self._client_creator('sts', config=config) + return client.assume_role_with_web_identity(**kwargs) + + def _assume_role_kwargs(self): + """Get the arguments for assume role based on current configuration.""" + assume_role_kwargs = deepcopy(self._assume_kwargs) + identity_token = self._web_identity_token_loader() + assume_role_kwargs['WebIdentityToken'] = identity_token + + return assume_role_kwargs + + +class CredentialProvider: + # A short name to identify the provider within botocore. + METHOD = None + + # A name to identify the provider for use in cross-sdk features like + # assume role's `credential_source` configuration option. These names + # are to be treated in a case-insensitive way. NOTE: any providers not + # implemented in botocore MUST prefix their canonical names with + # 'custom' or we DO NOT guarantee that it will work with any features + # that this provides. + CANONICAL_NAME = None + + def __init__(self, session=None): + self.session = session + + def load(self): + """ + Loads the credentials from their source & sets them on the object. + + Subclasses should implement this method (by reading from disk, the + environment, the network or wherever), returning ``True`` if they were + found & loaded. + + If not found, this method should return ``False``, indictating that the + ``CredentialResolver`` should fall back to the next available method. + + The default implementation does nothing, assuming the user has set the + ``access_key/secret_key/token`` themselves. + + :returns: Whether credentials were found & set + :rtype: Credentials + """ + return True + + def _extract_creds_from_mapping(self, mapping, *key_names): + found = [] + for key_name in key_names: + try: + found.append(mapping[key_name]) + except KeyError: + raise PartialCredentialsError( + provider=self.METHOD, cred_var=key_name + ) + return found + + +class ProcessProvider(CredentialProvider): + METHOD = 'custom-process' + + def __init__(self, profile_name, load_config, popen=subprocess.Popen): + self._profile_name = profile_name + self._load_config = load_config + self._loaded_config = None + self._popen = popen + + def load(self): + credential_process = self._credential_process + if credential_process is None: + return + + creds_dict = self._retrieve_credentials_using(credential_process) + if creds_dict.get('expiry_time') is not None: + return RefreshableCredentials.create_from_metadata( + creds_dict, + lambda: self._retrieve_credentials_using(credential_process), + self.METHOD, + ) + + return Credentials( + access_key=creds_dict['access_key'], + secret_key=creds_dict['secret_key'], + token=creds_dict.get('token'), + method=self.METHOD, + ) + + def _retrieve_credentials_using(self, credential_process): + # We're not using shell=True, so we need to pass the + # command and all arguments as a list. + process_list = compat_shell_split(credential_process) + p = self._popen( + process_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + stdout, stderr = p.communicate() + if p.returncode != 0: + raise CredentialRetrievalError( + provider=self.METHOD, error_msg=stderr.decode('utf-8') + ) + parsed = botocore.compat.json.loads(stdout.decode('utf-8')) + version = parsed.get('Version', '') + if version != 1: + raise CredentialRetrievalError( + provider=self.METHOD, + error_msg=( + f"Unsupported version '{version}' for credential process " + f"provider, supported versions: 1" + ), + ) + try: + return { + 'access_key': parsed['AccessKeyId'], + 'secret_key': parsed['SecretAccessKey'], + 'token': parsed.get('SessionToken'), + 'expiry_time': parsed.get('Expiration'), + } + except KeyError as e: + raise CredentialRetrievalError( + provider=self.METHOD, + error_msg=f"Missing required key in response: {e}", + ) + + @property + def _credential_process(self): + if self._loaded_config is None: + self._loaded_config = self._load_config() + profile_config = self._loaded_config.get('profiles', {}).get( + self._profile_name, {} + ) + return profile_config.get('credential_process') + + +class InstanceMetadataProvider(CredentialProvider): + METHOD = 'iam-role' + CANONICAL_NAME = 'Ec2InstanceMetadata' + + def __init__(self, iam_role_fetcher): + self._role_fetcher = iam_role_fetcher + + def load(self): + fetcher = self._role_fetcher + # We do the first request, to see if we get useful data back. + # If not, we'll pass & move on to whatever's next in the credential + # chain. + metadata = fetcher.retrieve_iam_role_credentials() + if not metadata: + return None + logger.info( + 'Found credentials from IAM Role: %s', metadata['role_name'] + ) + # We manually set the data here, since we already made the request & + # have it. When the expiry is hit, the credentials will auto-refresh + # themselves. + creds = RefreshableCredentials.create_from_metadata( + metadata, + method=self.METHOD, + refresh_using=fetcher.retrieve_iam_role_credentials, + ) + return creds + + +class EnvProvider(CredentialProvider): + METHOD = 'env' + CANONICAL_NAME = 'Environment' + ACCESS_KEY = 'AWS_ACCESS_KEY_ID' + SECRET_KEY = 'AWS_SECRET_ACCESS_KEY' + # The token can come from either of these env var. + # AWS_SESSION_TOKEN is what other AWS SDKs have standardized on. + TOKENS = ['AWS_SECURITY_TOKEN', 'AWS_SESSION_TOKEN'] + EXPIRY_TIME = 'AWS_CREDENTIAL_EXPIRATION' + + def __init__(self, environ=None, mapping=None): + """ + + :param environ: The environment variables (defaults to + ``os.environ`` if no value is provided). + :param mapping: An optional mapping of variable names to + environment variable names. Use this if you want to + change the mapping of access_key->AWS_ACCESS_KEY_ID, etc. + The dict can have up to 3 keys: ``access_key``, ``secret_key``, + ``session_token``. + """ + if environ is None: + environ = os.environ + self.environ = environ + self._mapping = self._build_mapping(mapping) + + def _build_mapping(self, mapping): + # Mapping of variable name to env var name. + var_mapping = {} + if mapping is None: + # Use the class var default. + var_mapping['access_key'] = self.ACCESS_KEY + var_mapping['secret_key'] = self.SECRET_KEY + var_mapping['token'] = self.TOKENS + var_mapping['expiry_time'] = self.EXPIRY_TIME + else: + var_mapping['access_key'] = mapping.get( + 'access_key', self.ACCESS_KEY + ) + var_mapping['secret_key'] = mapping.get( + 'secret_key', self.SECRET_KEY + ) + var_mapping['token'] = mapping.get('token', self.TOKENS) + if not isinstance(var_mapping['token'], list): + var_mapping['token'] = [var_mapping['token']] + var_mapping['expiry_time'] = mapping.get( + 'expiry_time', self.EXPIRY_TIME + ) + return var_mapping + + def load(self): + """ + Search for credentials in explicit environment variables. + """ + + access_key = self.environ.get(self._mapping['access_key'], '') + + if access_key: + logger.info('Found credentials in environment variables.') + fetcher = self._create_credentials_fetcher() + credentials = fetcher(require_expiry=False) + + expiry_time = credentials['expiry_time'] + if expiry_time is not None: + expiry_time = parse(expiry_time) + return RefreshableCredentials( + credentials['access_key'], + credentials['secret_key'], + credentials['token'], + expiry_time, + refresh_using=fetcher, + method=self.METHOD, + ) + + return Credentials( + credentials['access_key'], + credentials['secret_key'], + credentials['token'], + method=self.METHOD, + ) + else: + return None + + def _create_credentials_fetcher(self): + mapping = self._mapping + method = self.METHOD + environ = self.environ + + def fetch_credentials(require_expiry=True): + credentials = {} + + access_key = environ.get(mapping['access_key'], '') + if not access_key: + raise PartialCredentialsError( + provider=method, cred_var=mapping['access_key'] + ) + credentials['access_key'] = access_key + + secret_key = environ.get(mapping['secret_key'], '') + if not secret_key: + raise PartialCredentialsError( + provider=method, cred_var=mapping['secret_key'] + ) + credentials['secret_key'] = secret_key + + credentials['token'] = None + for token_env_var in mapping['token']: + token = environ.get(token_env_var, '') + if token: + credentials['token'] = token + break + + credentials['expiry_time'] = None + expiry_time = environ.get(mapping['expiry_time'], '') + if expiry_time: + credentials['expiry_time'] = expiry_time + if require_expiry and not expiry_time: + raise PartialCredentialsError( + provider=method, cred_var=mapping['expiry_time'] + ) + + return credentials + + return fetch_credentials + + +class OriginalEC2Provider(CredentialProvider): + METHOD = 'ec2-credentials-file' + CANONICAL_NAME = 'Ec2Config' + + CRED_FILE_ENV = 'AWS_CREDENTIAL_FILE' + ACCESS_KEY = 'AWSAccessKeyId' + SECRET_KEY = 'AWSSecretKey' + + def __init__(self, environ=None, parser=None): + if environ is None: + environ = os.environ + if parser is None: + parser = parse_key_val_file + self._environ = environ + self._parser = parser + + def load(self): + """ + Search for a credential file used by original EC2 CLI tools. + """ + if 'AWS_CREDENTIAL_FILE' in self._environ: + full_path = os.path.expanduser( + self._environ['AWS_CREDENTIAL_FILE'] + ) + creds = self._parser(full_path) + if self.ACCESS_KEY in creds: + logger.info('Found credentials in AWS_CREDENTIAL_FILE.') + access_key = creds[self.ACCESS_KEY] + secret_key = creds[self.SECRET_KEY] + # EC2 creds file doesn't support session tokens. + return Credentials(access_key, secret_key, method=self.METHOD) + else: + return None + + +class SharedCredentialProvider(CredentialProvider): + METHOD = 'shared-credentials-file' + CANONICAL_NAME = 'SharedCredentials' + + ACCESS_KEY = 'aws_access_key_id' + SECRET_KEY = 'aws_secret_access_key' + # Same deal as the EnvProvider above. Botocore originally supported + # aws_security_token, but the SDKs are standardizing on aws_session_token + # so we support both. + TOKENS = ['aws_security_token', 'aws_session_token'] + + def __init__(self, creds_filename, profile_name=None, ini_parser=None): + self._creds_filename = creds_filename + if profile_name is None: + profile_name = 'default' + self._profile_name = profile_name + if ini_parser is None: + ini_parser = botocore.configloader.raw_config_parse + self._ini_parser = ini_parser + + def load(self): + try: + available_creds = self._ini_parser(self._creds_filename) + except ConfigNotFound: + return None + if self._profile_name in available_creds: + config = available_creds[self._profile_name] + if self.ACCESS_KEY in config: + logger.info( + "Found credentials in shared credentials file: %s", + self._creds_filename, + ) + access_key, secret_key = self._extract_creds_from_mapping( + config, self.ACCESS_KEY, self.SECRET_KEY + ) + token = self._get_session_token(config) + return Credentials( + access_key, secret_key, token, method=self.METHOD + ) + + def _get_session_token(self, config): + for token_envvar in self.TOKENS: + if token_envvar in config: + return config[token_envvar] + + +class ConfigProvider(CredentialProvider): + """INI based config provider with profile sections.""" + + METHOD = 'config-file' + CANONICAL_NAME = 'SharedConfig' + + ACCESS_KEY = 'aws_access_key_id' + SECRET_KEY = 'aws_secret_access_key' + # Same deal as the EnvProvider above. Botocore originally supported + # aws_security_token, but the SDKs are standardizing on aws_session_token + # so we support both. + TOKENS = ['aws_security_token', 'aws_session_token'] + + def __init__(self, config_filename, profile_name, config_parser=None): + """ + + :param config_filename: The session configuration scoped to the current + profile. This is available via ``session.config``. + :param profile_name: The name of the current profile. + :param config_parser: A config parser callable. + + """ + self._config_filename = config_filename + self._profile_name = profile_name + if config_parser is None: + config_parser = botocore.configloader.load_config + self._config_parser = config_parser + + def load(self): + """ + If there is are credentials in the configuration associated with + the session, use those. + """ + try: + full_config = self._config_parser(self._config_filename) + except ConfigNotFound: + return None + if self._profile_name in full_config['profiles']: + profile_config = full_config['profiles'][self._profile_name] + if self.ACCESS_KEY in profile_config: + logger.info( + "Credentials found in config file: %s", + self._config_filename, + ) + access_key, secret_key = self._extract_creds_from_mapping( + profile_config, self.ACCESS_KEY, self.SECRET_KEY + ) + token = self._get_session_token(profile_config) + return Credentials( + access_key, secret_key, token, method=self.METHOD + ) + else: + return None + + def _get_session_token(self, profile_config): + for token_name in self.TOKENS: + if token_name in profile_config: + return profile_config[token_name] + + +class BotoProvider(CredentialProvider): + METHOD = 'boto-config' + CANONICAL_NAME = 'Boto2Config' + + BOTO_CONFIG_ENV = 'BOTO_CONFIG' + DEFAULT_CONFIG_FILENAMES = ['/etc/boto.cfg', '~/.boto'] + ACCESS_KEY = 'aws_access_key_id' + SECRET_KEY = 'aws_secret_access_key' + + def __init__(self, environ=None, ini_parser=None): + if environ is None: + environ = os.environ + if ini_parser is None: + ini_parser = botocore.configloader.raw_config_parse + self._environ = environ + self._ini_parser = ini_parser + + def load(self): + """ + Look for credentials in boto config file. + """ + if self.BOTO_CONFIG_ENV in self._environ: + potential_locations = [self._environ[self.BOTO_CONFIG_ENV]] + else: + potential_locations = self.DEFAULT_CONFIG_FILENAMES + for filename in potential_locations: + try: + config = self._ini_parser(filename) + except ConfigNotFound: + # Move on to the next potential config file name. + continue + if 'Credentials' in config: + credentials = config['Credentials'] + if self.ACCESS_KEY in credentials: + logger.info( + "Found credentials in boto config file: %s", filename + ) + access_key, secret_key = self._extract_creds_from_mapping( + credentials, self.ACCESS_KEY, self.SECRET_KEY + ) + return Credentials( + access_key, secret_key, method=self.METHOD + ) + + +class AssumeRoleProvider(CredentialProvider): + METHOD = 'assume-role' + # The AssumeRole provider is logically part of the SharedConfig and + # SharedCredentials providers. Since the purpose of the canonical name + # is to provide cross-sdk compatibility, calling code will need to be + # aware that either of those providers should be tied to the AssumeRole + # provider as much as possible. + CANONICAL_NAME = None + ROLE_CONFIG_VAR = 'role_arn' + WEB_IDENTITY_TOKE_FILE_VAR = 'web_identity_token_file' + # Credentials are considered expired (and will be refreshed) once the total + # remaining time left until the credentials expires is less than the + # EXPIRY_WINDOW. + EXPIRY_WINDOW_SECONDS = 60 * 15 + + def __init__( + self, + load_config, + client_creator, + cache, + profile_name, + prompter=getpass.getpass, + credential_sourcer=None, + profile_provider_builder=None, + ): + """ + :type load_config: callable + :param load_config: A function that accepts no arguments, and + when called, will return the full configuration dictionary + for the session (``session.full_config``). + + :type client_creator: callable + :param client_creator: A factory function that will create + a client when called. Has the same interface as + ``botocore.session.Session.create_client``. + + :type cache: dict + :param cache: An object that supports ``__getitem__``, + ``__setitem__``, and ``__contains__``. An example + of this is the ``JSONFileCache`` class in the CLI. + + :type profile_name: str + :param profile_name: The name of the profile. + + :type prompter: callable + :param prompter: A callable that returns input provided + by the user (i.e raw_input, getpass.getpass, etc.). + + :type credential_sourcer: CanonicalNameCredentialSourcer + :param credential_sourcer: A credential provider that takes a + configuration, which is used to provide the source credentials + for the STS call. + """ + #: The cache used to first check for assumed credentials. + #: This is checked before making the AssumeRole API + #: calls and can be useful if you have short lived + #: scripts and you'd like to avoid calling AssumeRole + #: until the credentials are expired. + self.cache = cache + self._load_config = load_config + # client_creator is a callable that creates function. + # It's basically session.create_client + self._client_creator = client_creator + self._profile_name = profile_name + self._prompter = prompter + # The _loaded_config attribute will be populated from the + # load_config() function once the configuration is actually + # loaded. The reason we go through all this instead of just + # requiring that the loaded_config be passed to us is to that + # we can defer configuration loaded until we actually try + # to load credentials (as opposed to when the object is + # instantiated). + self._loaded_config = {} + self._credential_sourcer = credential_sourcer + self._profile_provider_builder = profile_provider_builder + self._visited_profiles = [self._profile_name] + + def load(self): + self._loaded_config = self._load_config() + profiles = self._loaded_config.get('profiles', {}) + profile = profiles.get(self._profile_name, {}) + if self._has_assume_role_config_vars(profile): + return self._load_creds_via_assume_role(self._profile_name) + + def _has_assume_role_config_vars(self, profile): + return ( + self.ROLE_CONFIG_VAR in profile + and + # We need to ensure this provider doesn't look at a profile when + # the profile has configuration for web identity. Simply relying on + # the order in the credential chain is insufficient as it doesn't + # prevent the case when we're doing an assume role chain. + self.WEB_IDENTITY_TOKE_FILE_VAR not in profile + ) + + def _load_creds_via_assume_role(self, profile_name): + role_config = self._get_role_config(profile_name) + source_credentials = self._resolve_source_credentials( + role_config, profile_name + ) + + extra_args = {} + role_session_name = role_config.get('role_session_name') + if role_session_name is not None: + extra_args['RoleSessionName'] = role_session_name + + external_id = role_config.get('external_id') + if external_id is not None: + extra_args['ExternalId'] = external_id + + mfa_serial = role_config.get('mfa_serial') + if mfa_serial is not None: + extra_args['SerialNumber'] = mfa_serial + + duration_seconds = role_config.get('duration_seconds') + if duration_seconds is not None: + extra_args['DurationSeconds'] = duration_seconds + + fetcher = AssumeRoleCredentialFetcher( + client_creator=self._client_creator, + source_credentials=source_credentials, + role_arn=role_config['role_arn'], + extra_args=extra_args, + mfa_prompter=self._prompter, + cache=self.cache, + ) + refresher = fetcher.fetch_credentials + if mfa_serial is not None: + refresher = create_mfa_serial_refresher(refresher) + + # The initial credentials are empty and the expiration time is set + # to now so that we can delay the call to assume role until it is + # strictly needed. + return DeferredRefreshableCredentials( + method=self.METHOD, + refresh_using=refresher, + time_fetcher=_local_now, + ) + + def _get_role_config(self, profile_name): + """Retrieves and validates the role configuration for the profile.""" + profiles = self._loaded_config.get('profiles', {}) + + profile = profiles[profile_name] + source_profile = profile.get('source_profile') + role_arn = profile['role_arn'] + credential_source = profile.get('credential_source') + mfa_serial = profile.get('mfa_serial') + external_id = profile.get('external_id') + role_session_name = profile.get('role_session_name') + duration_seconds = profile.get('duration_seconds') + + role_config = { + 'role_arn': role_arn, + 'external_id': external_id, + 'mfa_serial': mfa_serial, + 'role_session_name': role_session_name, + 'source_profile': source_profile, + 'credential_source': credential_source, + } + + if duration_seconds is not None: + try: + role_config['duration_seconds'] = int(duration_seconds) + except ValueError: + pass + + # Either the credential source or the source profile must be + # specified, but not both. + if credential_source is not None and source_profile is not None: + raise InvalidConfigError( + error_msg=( + f'The profile "{profile_name}" contains both ' + 'source_profile and credential_source.' + ) + ) + elif credential_source is None and source_profile is None: + raise PartialCredentialsError( + provider=self.METHOD, + cred_var='source_profile or credential_source', + ) + elif credential_source is not None: + self._validate_credential_source(profile_name, credential_source) + else: + self._validate_source_profile(profile_name, source_profile) + + return role_config + + def _validate_credential_source(self, parent_profile, credential_source): + if self._credential_sourcer is None: + raise InvalidConfigError( + error_msg=( + f"The credential_source \"{credential_source}\" is specified " + f"in profile \"{parent_profile}\", " + f"but no source provider was configured." + ) + ) + if not self._credential_sourcer.is_supported(credential_source): + raise InvalidConfigError( + error_msg=( + f"The credential source \"{credential_source}\" referenced " + f"in profile \"{parent_profile}\" is not valid." + ) + ) + + def _source_profile_has_credentials(self, profile): + return any( + [ + self._has_static_credentials(profile), + self._has_assume_role_config_vars(profile), + ] + ) + + def _validate_source_profile( + self, parent_profile_name, source_profile_name + ): + profiles = self._loaded_config.get('profiles', {}) + if source_profile_name not in profiles: + raise InvalidConfigError( + error_msg=( + f"The source_profile \"{source_profile_name}\" referenced in " + f"the profile \"{parent_profile_name}\" does not exist." + ) + ) + + source_profile = profiles[source_profile_name] + + # Make sure we aren't going into an infinite loop. If we haven't + # visited the profile yet, we're good. + if source_profile_name not in self._visited_profiles: + return + + # If we have visited the profile and the profile isn't simply + # referencing itself, that's an infinite loop. + if source_profile_name != parent_profile_name: + raise InfiniteLoopConfigError( + source_profile=source_profile_name, + visited_profiles=self._visited_profiles, + ) + + # A profile is allowed to reference itself so that it can source + # static credentials and have configuration all in the same + # profile. This will only ever work for the top level assume + # role because the static credentials will otherwise take + # precedence. + if not self._has_static_credentials(source_profile): + raise InfiniteLoopConfigError( + source_profile=source_profile_name, + visited_profiles=self._visited_profiles, + ) + + def _has_static_credentials(self, profile): + static_keys = ['aws_secret_access_key', 'aws_access_key_id'] + return any(static_key in profile for static_key in static_keys) + + def _resolve_source_credentials(self, role_config, profile_name): + credential_source = role_config.get('credential_source') + if credential_source is not None: + return self._resolve_credentials_from_source( + credential_source, profile_name + ) + + source_profile = role_config['source_profile'] + self._visited_profiles.append(source_profile) + return self._resolve_credentials_from_profile(source_profile) + + def _resolve_credentials_from_profile(self, profile_name): + profiles = self._loaded_config.get('profiles', {}) + profile = profiles[profile_name] + + if ( + self._has_static_credentials(profile) + and not self._profile_provider_builder + ): + # This is only here for backwards compatibility. If this provider + # isn't given a profile provider builder we still want to be able + # handle the basic static credential case as we would before the + # provile provider builder parameter was added. + return self._resolve_static_credentials_from_profile(profile) + elif self._has_static_credentials( + profile + ) or not self._has_assume_role_config_vars(profile): + profile_providers = self._profile_provider_builder.providers( + profile_name=profile_name, + disable_env_vars=True, + ) + profile_chain = CredentialResolver(profile_providers) + credentials = profile_chain.load_credentials() + if credentials is None: + error_message = ( + 'The source profile "%s" must have credentials.' + ) + raise InvalidConfigError( + error_msg=error_message % profile_name, + ) + return credentials + + return self._load_creds_via_assume_role(profile_name) + + def _resolve_static_credentials_from_profile(self, profile): + try: + return Credentials( + access_key=profile['aws_access_key_id'], + secret_key=profile['aws_secret_access_key'], + token=profile.get('aws_session_token'), + ) + except KeyError as e: + raise PartialCredentialsError( + provider=self.METHOD, cred_var=str(e) + ) + + def _resolve_credentials_from_source( + self, credential_source, profile_name + ): + credentials = self._credential_sourcer.source_credentials( + credential_source + ) + if credentials is None: + raise CredentialRetrievalError( + provider=credential_source, + error_msg=( + 'No credentials found in credential_source referenced ' + f'in profile {profile_name}' + ), + ) + return credentials + + +class AssumeRoleWithWebIdentityProvider(CredentialProvider): + METHOD = 'assume-role-with-web-identity' + CANONICAL_NAME = None + _CONFIG_TO_ENV_VAR = { + 'web_identity_token_file': 'AWS_WEB_IDENTITY_TOKEN_FILE', + 'role_session_name': 'AWS_ROLE_SESSION_NAME', + 'role_arn': 'AWS_ROLE_ARN', + } + + def __init__( + self, + load_config, + client_creator, + profile_name, + cache=None, + disable_env_vars=False, + token_loader_cls=None, + ): + self.cache = cache + self._load_config = load_config + self._client_creator = client_creator + self._profile_name = profile_name + self._profile_config = None + self._disable_env_vars = disable_env_vars + if token_loader_cls is None: + token_loader_cls = FileWebIdentityTokenLoader + self._token_loader_cls = token_loader_cls + + def load(self): + return self._assume_role_with_web_identity() + + def _get_profile_config(self, key): + if self._profile_config is None: + loaded_config = self._load_config() + profiles = loaded_config.get('profiles', {}) + self._profile_config = profiles.get(self._profile_name, {}) + return self._profile_config.get(key) + + def _get_env_config(self, key): + if self._disable_env_vars: + return None + env_key = self._CONFIG_TO_ENV_VAR.get(key) + if env_key and env_key in os.environ: + return os.environ[env_key] + return None + + def _get_config(self, key): + env_value = self._get_env_config(key) + if env_value is not None: + return env_value + return self._get_profile_config(key) + + def _assume_role_with_web_identity(self): + token_path = self._get_config('web_identity_token_file') + if not token_path: + return None + token_loader = self._token_loader_cls(token_path) + + role_arn = self._get_config('role_arn') + if not role_arn: + error_msg = ( + 'The provided profile or the current environment is ' + 'configured to assume role with web identity but has no ' + 'role ARN configured. Ensure that the profile has the role_arn' + 'configuration set or the AWS_ROLE_ARN env var is set.' + ) + raise InvalidConfigError(error_msg=error_msg) + + extra_args = {} + role_session_name = self._get_config('role_session_name') + if role_session_name is not None: + extra_args['RoleSessionName'] = role_session_name + + fetcher = AssumeRoleWithWebIdentityCredentialFetcher( + client_creator=self._client_creator, + web_identity_token_loader=token_loader, + role_arn=role_arn, + extra_args=extra_args, + cache=self.cache, + ) + # The initial credentials are empty and the expiration time is set + # to now so that we can delay the call to assume role until it is + # strictly needed. + return DeferredRefreshableCredentials( + method=self.METHOD, + refresh_using=fetcher.fetch_credentials, + ) + + +class CanonicalNameCredentialSourcer: + def __init__(self, providers): + self._providers = providers + + def is_supported(self, source_name): + """Validates a given source name. + + :type source_name: str + :param source_name: The value of credential_source in the config + file. This is the canonical name of the credential provider. + + :rtype: bool + :returns: True if the credential provider is supported, + False otherwise. + """ + return source_name in [p.CANONICAL_NAME for p in self._providers] + + def source_credentials(self, source_name): + """Loads source credentials based on the provided configuration. + + :type source_name: str + :param source_name: The value of credential_source in the config + file. This is the canonical name of the credential provider. + + :rtype: Credentials + """ + source = self._get_provider(source_name) + if isinstance(source, CredentialResolver): + return source.load_credentials() + return source.load() + + def _get_provider(self, canonical_name): + """Return a credential provider by its canonical name. + + :type canonical_name: str + :param canonical_name: The canonical name of the provider. + + :raises UnknownCredentialError: Raised if no + credential provider by the provided name + is found. + """ + provider = self._get_provider_by_canonical_name(canonical_name) + + # The AssumeRole provider should really be part of the SharedConfig + # provider rather than being its own thing, but it is not. It is + # effectively part of both the SharedConfig provider and the + # SharedCredentials provider now due to the way it behaves. + # Therefore if we want either of those providers we should return + # the AssumeRole provider with it. + if canonical_name.lower() in ['sharedconfig', 'sharedcredentials']: + assume_role_provider = self._get_provider_by_method('assume-role') + if assume_role_provider is not None: + # The SharedConfig or SharedCredentials provider may not be + # present if it was removed for some reason, but the + # AssumeRole provider could still be present. In that case, + # return the assume role provider by itself. + if provider is None: + return assume_role_provider + + # If both are present, return them both as a + # CredentialResolver so that calling code can treat them as + # a single entity. + return CredentialResolver([assume_role_provider, provider]) + + if provider is None: + raise UnknownCredentialError(name=canonical_name) + + return provider + + def _get_provider_by_canonical_name(self, canonical_name): + """Return a credential provider by its canonical name. + + This function is strict, it does not attempt to address + compatibility issues. + """ + for provider in self._providers: + name = provider.CANONICAL_NAME + # Canonical names are case-insensitive + if name and name.lower() == canonical_name.lower(): + return provider + + def _get_provider_by_method(self, method): + """Return a credential provider by its METHOD name.""" + for provider in self._providers: + if provider.METHOD == method: + return provider + + +class ContainerProvider(CredentialProvider): + METHOD = 'container-role' + CANONICAL_NAME = 'EcsContainer' + ENV_VAR = 'AWS_CONTAINER_CREDENTIALS_RELATIVE_URI' + ENV_VAR_FULL = 'AWS_CONTAINER_CREDENTIALS_FULL_URI' + ENV_VAR_AUTH_TOKEN = 'AWS_CONTAINER_AUTHORIZATION_TOKEN' + ENV_VAR_AUTH_TOKEN_FILE = 'AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE' + + def __init__(self, environ=None, fetcher=None): + if environ is None: + environ = os.environ + if fetcher is None: + fetcher = ContainerMetadataFetcher() + self._environ = environ + self._fetcher = fetcher + + def load(self): + # This cred provider is only triggered if the self.ENV_VAR is set, + # which only happens if you opt into this feature. + if self.ENV_VAR in self._environ or self.ENV_VAR_FULL in self._environ: + return self._retrieve_or_fail() + + def _retrieve_or_fail(self): + if self._provided_relative_uri(): + full_uri = self._fetcher.full_url(self._environ[self.ENV_VAR]) + else: + full_uri = self._environ[self.ENV_VAR_FULL] + fetcher = self._create_fetcher(full_uri) + creds = fetcher() + return RefreshableCredentials( + access_key=creds['access_key'], + secret_key=creds['secret_key'], + token=creds['token'], + method=self.METHOD, + expiry_time=_parse_if_needed(creds['expiry_time']), + refresh_using=fetcher, + ) + + def _build_headers(self): + auth_token = None + if self.ENV_VAR_AUTH_TOKEN_FILE in self._environ: + auth_token_file_path = self._environ[self.ENV_VAR_AUTH_TOKEN_FILE] + with open(auth_token_file_path) as token_file: + auth_token = token_file.read() + elif self.ENV_VAR_AUTH_TOKEN in self._environ: + auth_token = self._environ[self.ENV_VAR_AUTH_TOKEN] + if auth_token is not None: + self._validate_auth_token(auth_token) + return {'Authorization': auth_token} + + def _validate_auth_token(self, auth_token): + if "\r" in auth_token or "\n" in auth_token: + raise ValueError("Auth token value is not a legal header value") + + def _create_fetcher(self, full_uri, *args, **kwargs): + def fetch_creds(): + try: + headers = self._build_headers() + response = self._fetcher.retrieve_full_uri( + full_uri, headers=headers + ) + except MetadataRetrievalError as e: + logger.debug( + "Error retrieving container metadata: %s", e, exc_info=True + ) + raise CredentialRetrievalError( + provider=self.METHOD, error_msg=str(e) + ) + return { + 'access_key': response['AccessKeyId'], + 'secret_key': response['SecretAccessKey'], + 'token': response['Token'], + 'expiry_time': response['Expiration'], + } + + return fetch_creds + + def _provided_relative_uri(self): + return self.ENV_VAR in self._environ + + +class CredentialResolver: + def __init__(self, providers): + """ + + :param providers: A list of ``CredentialProvider`` instances. + + """ + self.providers = providers + + def insert_before(self, name, credential_provider): + """ + Inserts a new instance of ``CredentialProvider`` into the chain that + will be tried before an existing one. + + :param name: The short name of the credentials you'd like to insert the + new credentials before. (ex. ``env`` or ``config``). Existing names + & ordering can be discovered via ``self.available_methods``. + :type name: string + + :param cred_instance: An instance of the new ``Credentials`` object + you'd like to add to the chain. + :type cred_instance: A subclass of ``Credentials`` + """ + try: + offset = [p.METHOD for p in self.providers].index(name) + except ValueError: + raise UnknownCredentialError(name=name) + self.providers.insert(offset, credential_provider) + + def insert_after(self, name, credential_provider): + """ + Inserts a new type of ``Credentials`` instance into the chain that will + be tried after an existing one. + + :param name: The short name of the credentials you'd like to insert the + new credentials after. (ex. ``env`` or ``config``). Existing names + & ordering can be discovered via ``self.available_methods``. + :type name: string + + :param cred_instance: An instance of the new ``Credentials`` object + you'd like to add to the chain. + :type cred_instance: A subclass of ``Credentials`` + """ + offset = self._get_provider_offset(name) + self.providers.insert(offset + 1, credential_provider) + + def remove(self, name): + """ + Removes a given ``Credentials`` instance from the chain. + + :param name: The short name of the credentials instance to remove. + :type name: string + """ + available_methods = [p.METHOD for p in self.providers] + if name not in available_methods: + # It's not present. Fail silently. + return + + offset = available_methods.index(name) + self.providers.pop(offset) + + def get_provider(self, name): + """Return a credential provider by name. + + :type name: str + :param name: The name of the provider. + + :raises UnknownCredentialError: Raised if no + credential provider by the provided name + is found. + """ + return self.providers[self._get_provider_offset(name)] + + def _get_provider_offset(self, name): + try: + return [p.METHOD for p in self.providers].index(name) + except ValueError: + raise UnknownCredentialError(name=name) + + def load_credentials(self): + """ + Goes through the credentials chain, returning the first ``Credentials`` + that could be loaded. + """ + # First provider to return a non-None response wins. + for provider in self.providers: + logger.debug("Looking for credentials via: %s", provider.METHOD) + creds = provider.load() + if creds is not None: + return creds + + # If we got here, no credentials could be found. + # This feels like it should be an exception, but historically, ``None`` + # is returned. + # + # +1 + # -js + return None + + +class SSOCredentialFetcher(CachedCredentialFetcher): + _UTC_DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ' + + def __init__( + self, + start_url, + sso_region, + role_name, + account_id, + client_creator, + token_loader=None, + cache=None, + expiry_window_seconds=None, + token_provider=None, + sso_session_name=None, + ): + self._client_creator = client_creator + self._sso_region = sso_region + self._role_name = role_name + self._account_id = account_id + self._start_url = start_url + self._token_loader = token_loader + self._token_provider = token_provider + self._sso_session_name = sso_session_name + super().__init__(cache, expiry_window_seconds) + + def _create_cache_key(self): + """Create a predictable cache key for the current configuration. + + The cache key is intended to be compatible with file names. + """ + args = { + 'roleName': self._role_name, + 'accountId': self._account_id, + } + if self._sso_session_name: + args['sessionName'] = self._sso_session_name + else: + args['startUrl'] = self._start_url + # NOTE: It would be good to hoist this cache key construction logic + # into the CachedCredentialFetcher class as we should be consistent. + # Unfortunately, the current assume role fetchers that sub class don't + # pass separators resulting in non-minified JSON. In the long term, + # all fetchers should use the below caching scheme. + args = json.dumps(args, sort_keys=True, separators=(',', ':')) + argument_hash = sha1(args.encode('utf-8')).hexdigest() + return self._make_file_safe(argument_hash) + + def _parse_timestamp(self, timestamp_ms): + # fromtimestamp expects seconds so: milliseconds / 1000 = seconds + timestamp_seconds = timestamp_ms / 1000.0 + timestamp = datetime.datetime.fromtimestamp(timestamp_seconds, tzutc()) + return timestamp.strftime(self._UTC_DATE_FORMAT) + + def _get_credentials(self): + """Get credentials by calling SSO get role credentials.""" + config = Config( + signature_version=UNSIGNED, + region_name=self._sso_region, + ) + client = self._client_creator('sso', config=config) + if self._token_provider: + initial_token_data = self._token_provider.load_token() + token = initial_token_data.get_frozen_token().token + else: + token = self._token_loader(self._start_url)['accessToken'] + + kwargs = { + 'roleName': self._role_name, + 'accountId': self._account_id, + 'accessToken': token, + } + try: + response = client.get_role_credentials(**kwargs) + except client.exceptions.UnauthorizedException: + raise UnauthorizedSSOTokenError() + credentials = response['roleCredentials'] + + credentials = { + 'ProviderType': 'sso', + 'Credentials': { + 'AccessKeyId': credentials['accessKeyId'], + 'SecretAccessKey': credentials['secretAccessKey'], + 'SessionToken': credentials['sessionToken'], + 'Expiration': self._parse_timestamp(credentials['expiration']), + }, + } + return credentials + + +class SSOProvider(CredentialProvider): + METHOD = 'sso' + + _SSO_TOKEN_CACHE_DIR = os.path.expanduser( + os.path.join('~', '.aws', 'sso', 'cache') + ) + _PROFILE_REQUIRED_CONFIG_VARS = ( + 'sso_role_name', + 'sso_account_id', + ) + _SSO_REQUIRED_CONFIG_VARS = ( + 'sso_start_url', + 'sso_region', + ) + _ALL_REQUIRED_CONFIG_VARS = ( + _PROFILE_REQUIRED_CONFIG_VARS + _SSO_REQUIRED_CONFIG_VARS + ) + + def __init__( + self, + load_config, + client_creator, + profile_name, + cache=None, + token_cache=None, + token_provider=None, + ): + if token_cache is None: + token_cache = JSONFileCache(self._SSO_TOKEN_CACHE_DIR) + self._token_cache = token_cache + self._token_provider = token_provider + if cache is None: + cache = {} + self.cache = cache + self._load_config = load_config + self._client_creator = client_creator + self._profile_name = profile_name + + def _load_sso_config(self): + loaded_config = self._load_config() + profiles = loaded_config.get('profiles', {}) + profile_name = self._profile_name + profile_config = profiles.get(self._profile_name, {}) + sso_sessions = loaded_config.get('sso_sessions', {}) + + # Role name & Account ID indicate the cred provider should be used + if all( + c not in profile_config for c in self._PROFILE_REQUIRED_CONFIG_VARS + ): + return None + + resolved_config, extra_reqs = self._resolve_sso_session_reference( + profile_config, sso_sessions + ) + + config = {} + missing_config_vars = [] + all_required_configs = self._ALL_REQUIRED_CONFIG_VARS + extra_reqs + for config_var in all_required_configs: + if config_var in resolved_config: + config[config_var] = resolved_config[config_var] + else: + missing_config_vars.append(config_var) + + if missing_config_vars: + missing = ', '.join(missing_config_vars) + raise InvalidConfigError( + error_msg=( + f'The profile "{profile_name}" is configured to use SSO ' + f'but is missing required configuration: {missing}' + ) + ) + return config + + def _resolve_sso_session_reference(self, profile_config, sso_sessions): + sso_session_name = profile_config.get('sso_session') + if sso_session_name is None: + # No reference to resolve, proceed with legacy flow + return profile_config, () + + if sso_session_name not in sso_sessions: + error_msg = f'The specified sso-session does not exist: "{sso_session_name}"' + raise InvalidConfigError(error_msg=error_msg) + + config = profile_config.copy() + session = sso_sessions[sso_session_name] + for config_var, val in session.items(): + # Validate any keys referenced in both profile and sso_session match + if config.get(config_var, val) != val: + error_msg = ( + f"The value for {config_var} is inconsistent between " + f"profile ({config[config_var]}) and sso-session ({val})." + ) + raise InvalidConfigError(error_msg=error_msg) + config[config_var] = val + return config, ('sso_session',) + + def load(self): + sso_config = self._load_sso_config() + if not sso_config: + return None + + fetcher_kwargs = { + 'start_url': sso_config['sso_start_url'], + 'sso_region': sso_config['sso_region'], + 'role_name': sso_config['sso_role_name'], + 'account_id': sso_config['sso_account_id'], + 'client_creator': self._client_creator, + 'token_loader': SSOTokenLoader(cache=self._token_cache), + 'cache': self.cache, + } + if 'sso_session' in sso_config: + fetcher_kwargs['sso_session_name'] = sso_config['sso_session'] + fetcher_kwargs['token_provider'] = self._token_provider + + sso_fetcher = SSOCredentialFetcher(**fetcher_kwargs) + + return DeferredRefreshableCredentials( + method=self.METHOD, + refresh_using=sso_fetcher.fetch_credentials, + ) diff --git a/venv/lib/python3.10/site-packages/botocore/crt/__init__.py b/venv/lib/python3.10/site-packages/botocore/crt/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..952ebf34cc37bde64e7fcd14a9b252a205429f47 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/crt/__init__.py @@ -0,0 +1,27 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +# A list of auth types supported by the signers in botocore/crt/auth.py. This +# should always match the keys of botocore.crt.auth.CRT_AUTH_TYPE_MAPS. The +# information is duplicated here so that it can be accessed in environments +# where `awscrt` is not present and any import from botocore.crt.auth would +# fail. +CRT_SUPPORTED_AUTH_TYPES = ( + 'v4', + 'v4-query', + 'v4a', + 's3v4', + 's3v4-query', + 's3v4a', + 's3v4a-query', +) diff --git a/venv/lib/python3.10/site-packages/botocore/crt/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/botocore/crt/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6a6d645c89df27e13c7f16d1532ef249fb24f36c Binary files /dev/null and b/venv/lib/python3.10/site-packages/botocore/crt/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/botocore/crt/__pycache__/auth.cpython-310.pyc b/venv/lib/python3.10/site-packages/botocore/crt/__pycache__/auth.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9b7dce060054067caa93614098415b588c28a870 Binary files /dev/null and b/venv/lib/python3.10/site-packages/botocore/crt/__pycache__/auth.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/botocore/crt/auth.py b/venv/lib/python3.10/site-packages/botocore/crt/auth.py new file mode 100644 index 0000000000000000000000000000000000000000..0d1a81def49fa2dda6673a648f57b5a5d9d23770 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/crt/auth.py @@ -0,0 +1,629 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import datetime +from io import BytesIO + +from botocore.auth import ( + SIGNED_HEADERS_BLACKLIST, + STREAMING_UNSIGNED_PAYLOAD_TRAILER, + UNSIGNED_PAYLOAD, + BaseSigner, + _get_body_as_dict, + _host_from_url, +) +from botocore.compat import HTTPHeaders, awscrt, parse_qs, urlsplit, urlunsplit +from botocore.exceptions import NoCredentialsError +from botocore.utils import percent_encode_sequence + + +class CrtSigV4Auth(BaseSigner): + REQUIRES_REGION = True + _PRESIGNED_HEADERS_BLOCKLIST = [ + 'Authorization', + 'X-Amz-Date', + 'X-Amz-Content-SHA256', + 'X-Amz-Security-Token', + ] + _SIGNATURE_TYPE = awscrt.auth.AwsSignatureType.HTTP_REQUEST_HEADERS + _USE_DOUBLE_URI_ENCODE = True + _SHOULD_NORMALIZE_URI_PATH = True + + def __init__(self, credentials, service_name, region_name): + self.credentials = credentials + self._service_name = service_name + self._region_name = region_name + self._expiration_in_seconds = None + + def _is_streaming_checksum_payload(self, request): + checksum_context = request.context.get('checksum', {}) + algorithm = checksum_context.get('request_algorithm') + return isinstance(algorithm, dict) and algorithm.get('in') == 'trailer' + + def add_auth(self, request): + if self.credentials is None: + raise NoCredentialsError() + + # Use utcnow() because that's what gets mocked by tests, but set + # timezone because CRT assumes naive datetime is local time. + datetime_now = datetime.datetime.utcnow().replace( + tzinfo=datetime.timezone.utc + ) + + # Use existing 'X-Amz-Content-SHA256' header if able + existing_sha256 = self._get_existing_sha256(request) + + self._modify_request_before_signing(request) + + credentials_provider = awscrt.auth.AwsCredentialsProvider.new_static( + access_key_id=self.credentials.access_key, + secret_access_key=self.credentials.secret_key, + session_token=self.credentials.token, + ) + + if self._is_streaming_checksum_payload(request): + explicit_payload = STREAMING_UNSIGNED_PAYLOAD_TRAILER + elif self._should_sha256_sign_payload(request): + if existing_sha256: + explicit_payload = existing_sha256 + else: + explicit_payload = None # to be calculated during signing + else: + explicit_payload = UNSIGNED_PAYLOAD + + if self._should_add_content_sha256_header(explicit_payload): + body_header = ( + awscrt.auth.AwsSignedBodyHeaderType.X_AMZ_CONTENT_SHA_256 + ) + else: + body_header = awscrt.auth.AwsSignedBodyHeaderType.NONE + + signing_config = awscrt.auth.AwsSigningConfig( + algorithm=awscrt.auth.AwsSigningAlgorithm.V4, + signature_type=self._SIGNATURE_TYPE, + credentials_provider=credentials_provider, + region=self._region_name, + service=self._service_name, + date=datetime_now, + should_sign_header=self._should_sign_header, + use_double_uri_encode=self._USE_DOUBLE_URI_ENCODE, + should_normalize_uri_path=self._SHOULD_NORMALIZE_URI_PATH, + signed_body_value=explicit_payload, + signed_body_header_type=body_header, + expiration_in_seconds=self._expiration_in_seconds, + ) + crt_request = self._crt_request_from_aws_request(request) + future = awscrt.auth.aws_sign_request(crt_request, signing_config) + future.result() + self._apply_signing_changes(request, crt_request) + + def _crt_request_from_aws_request(self, aws_request): + url_parts = urlsplit(aws_request.url) + crt_path = url_parts.path if url_parts.path else '/' + if aws_request.params: + array = [] + for param, value in aws_request.params.items(): + value = str(value) + array.append(f'{param}={value}') + crt_path = crt_path + '?' + '&'.join(array) + elif url_parts.query: + crt_path = f'{crt_path}?{url_parts.query}' + + crt_headers = awscrt.http.HttpHeaders(aws_request.headers.items()) + + # CRT requires body (if it exists) to be an I/O stream. + crt_body_stream = None + if aws_request.body: + if hasattr(aws_request.body, 'seek'): + crt_body_stream = aws_request.body + else: + crt_body_stream = BytesIO(aws_request.body) + + crt_request = awscrt.http.HttpRequest( + method=aws_request.method, + path=crt_path, + headers=crt_headers, + body_stream=crt_body_stream, + ) + return crt_request + + def _apply_signing_changes(self, aws_request, signed_crt_request): + # Apply changes from signed CRT request to the AWSRequest + aws_request.headers = HTTPHeaders.from_pairs( + list(signed_crt_request.headers) + ) + + def _should_sign_header(self, name, **kwargs): + return name.lower() not in SIGNED_HEADERS_BLACKLIST + + def _modify_request_before_signing(self, request): + # This could be a retry. Make sure the previous + # authorization headers are removed first. + for h in self._PRESIGNED_HEADERS_BLOCKLIST: + if h in request.headers: + del request.headers[h] + # If necessary, add the host header + if 'host' not in request.headers: + request.headers['host'] = _host_from_url(request.url) + + def _get_existing_sha256(self, request): + return request.headers.get('X-Amz-Content-SHA256') + + def _should_sha256_sign_payload(self, request): + # Payloads will always be signed over insecure connections. + if not request.url.startswith('https'): + return True + + # Certain operations may have payload signing disabled by default. + # Since we don't have access to the operation model, we pass in this + # bit of metadata through the request context. + return request.context.get('payload_signing_enabled', True) + + def _should_add_content_sha256_header(self, explicit_payload): + # only add X-Amz-Content-SHA256 header if payload is explicitly set + return explicit_payload is not None + + +class CrtS3SigV4Auth(CrtSigV4Auth): + # For S3, we do not normalize the path. + _USE_DOUBLE_URI_ENCODE = False + _SHOULD_NORMALIZE_URI_PATH = False + + def _get_existing_sha256(self, request): + # always recalculate + return None + + def _should_sha256_sign_payload(self, request): + # S3 allows optional body signing, so to minimize the performance + # impact, we opt to not SHA256 sign the body on streaming uploads, + # provided that we're on https. + client_config = request.context.get('client_config') + s3_config = getattr(client_config, 's3', None) + + # The config could be None if it isn't set, or if the customer sets it + # to None. + if s3_config is None: + s3_config = {} + + # The explicit configuration takes precedence over any implicit + # configuration. + sign_payload = s3_config.get('payload_signing_enabled', None) + if sign_payload is not None: + return sign_payload + + # We require that both a checksum be present and https be enabled + # to implicitly disable body signing. The combination of TLS and + # a checksum is sufficiently secure and durable for us to be + # confident in the request without body signing. + checksum_header = 'Content-MD5' + checksum_context = request.context.get('checksum', {}) + algorithm = checksum_context.get('request_algorithm') + if isinstance(algorithm, dict) and algorithm.get('in') == 'header': + checksum_header = algorithm['name'] + if ( + not request.url.startswith('https') + or checksum_header not in request.headers + ): + return True + + # If the input is streaming we disable body signing by default. + if request.context.get('has_streaming_input', False): + return False + + # If the S3-specific checks had no results, delegate to the generic + # checks. + return super()._should_sha256_sign_payload(request) + + def _should_add_content_sha256_header(self, explicit_payload): + # Always add X-Amz-Content-SHA256 header + return True + + +class CrtSigV4AsymAuth(BaseSigner): + REQUIRES_REGION = True + _PRESIGNED_HEADERS_BLOCKLIST = [ + 'Authorization', + 'X-Amz-Date', + 'X-Amz-Content-SHA256', + 'X-Amz-Security-Token', + ] + _SIGNATURE_TYPE = awscrt.auth.AwsSignatureType.HTTP_REQUEST_HEADERS + _USE_DOUBLE_URI_ENCODE = True + _SHOULD_NORMALIZE_URI_PATH = True + + def __init__(self, credentials, service_name, region_name): + self.credentials = credentials + self._service_name = service_name + self._region_name = region_name + self._expiration_in_seconds = None + + def add_auth(self, request): + if self.credentials is None: + raise NoCredentialsError() + + # Use utcnow() because that's what gets mocked by tests, but set + # timezone because CRT assumes naive datetime is local time. + datetime_now = datetime.datetime.utcnow().replace( + tzinfo=datetime.timezone.utc + ) + + # Use existing 'X-Amz-Content-SHA256' header if able + existing_sha256 = self._get_existing_sha256(request) + + self._modify_request_before_signing(request) + + credentials_provider = awscrt.auth.AwsCredentialsProvider.new_static( + access_key_id=self.credentials.access_key, + secret_access_key=self.credentials.secret_key, + session_token=self.credentials.token, + ) + + if self._is_streaming_checksum_payload(request): + explicit_payload = STREAMING_UNSIGNED_PAYLOAD_TRAILER + elif self._should_sha256_sign_payload(request): + if existing_sha256: + explicit_payload = existing_sha256 + else: + explicit_payload = None # to be calculated during signing + else: + explicit_payload = UNSIGNED_PAYLOAD + + if self._should_add_content_sha256_header(explicit_payload): + body_header = ( + awscrt.auth.AwsSignedBodyHeaderType.X_AMZ_CONTENT_SHA_256 + ) + else: + body_header = awscrt.auth.AwsSignedBodyHeaderType.NONE + + signing_config = awscrt.auth.AwsSigningConfig( + algorithm=awscrt.auth.AwsSigningAlgorithm.V4_ASYMMETRIC, + signature_type=self._SIGNATURE_TYPE, + credentials_provider=credentials_provider, + region=self._region_name, + service=self._service_name, + date=datetime_now, + should_sign_header=self._should_sign_header, + use_double_uri_encode=self._USE_DOUBLE_URI_ENCODE, + should_normalize_uri_path=self._SHOULD_NORMALIZE_URI_PATH, + signed_body_value=explicit_payload, + signed_body_header_type=body_header, + expiration_in_seconds=self._expiration_in_seconds, + ) + crt_request = self._crt_request_from_aws_request(request) + future = awscrt.auth.aws_sign_request(crt_request, signing_config) + future.result() + self._apply_signing_changes(request, crt_request) + + def _crt_request_from_aws_request(self, aws_request): + url_parts = urlsplit(aws_request.url) + crt_path = url_parts.path if url_parts.path else '/' + if aws_request.params: + array = [] + for param, value in aws_request.params.items(): + value = str(value) + array.append(f'{param}={value}') + crt_path = crt_path + '?' + '&'.join(array) + elif url_parts.query: + crt_path = f'{crt_path}?{url_parts.query}' + + crt_headers = awscrt.http.HttpHeaders(aws_request.headers.items()) + + # CRT requires body (if it exists) to be an I/O stream. + crt_body_stream = None + if aws_request.body: + if hasattr(aws_request.body, 'seek'): + crt_body_stream = aws_request.body + else: + crt_body_stream = BytesIO(aws_request.body) + + crt_request = awscrt.http.HttpRequest( + method=aws_request.method, + path=crt_path, + headers=crt_headers, + body_stream=crt_body_stream, + ) + return crt_request + + def _apply_signing_changes(self, aws_request, signed_crt_request): + # Apply changes from signed CRT request to the AWSRequest + aws_request.headers = HTTPHeaders.from_pairs( + list(signed_crt_request.headers) + ) + + def _should_sign_header(self, name, **kwargs): + return name.lower() not in SIGNED_HEADERS_BLACKLIST + + def _modify_request_before_signing(self, request): + # This could be a retry. Make sure the previous + # authorization headers are removed first. + for h in self._PRESIGNED_HEADERS_BLOCKLIST: + if h in request.headers: + del request.headers[h] + # If necessary, add the host header + if 'host' not in request.headers: + request.headers['host'] = _host_from_url(request.url) + + def _get_existing_sha256(self, request): + return request.headers.get('X-Amz-Content-SHA256') + + def _is_streaming_checksum_payload(self, request): + checksum_context = request.context.get('checksum', {}) + algorithm = checksum_context.get('request_algorithm') + return isinstance(algorithm, dict) and algorithm.get('in') == 'trailer' + + def _should_sha256_sign_payload(self, request): + # Payloads will always be signed over insecure connections. + if not request.url.startswith('https'): + return True + + # Certain operations may have payload signing disabled by default. + # Since we don't have access to the operation model, we pass in this + # bit of metadata through the request context. + return request.context.get('payload_signing_enabled', True) + + def _should_add_content_sha256_header(self, explicit_payload): + # only add X-Amz-Content-SHA256 header if payload is explicitly set + return explicit_payload is not None + + +class CrtS3SigV4AsymAuth(CrtSigV4AsymAuth): + # For S3, we do not normalize the path. + _USE_DOUBLE_URI_ENCODE = False + _SHOULD_NORMALIZE_URI_PATH = False + + def _get_existing_sha256(self, request): + # always recalculate + return None + + def _should_sha256_sign_payload(self, request): + # S3 allows optional body signing, so to minimize the performance + # impact, we opt to not SHA256 sign the body on streaming uploads, + # provided that we're on https. + client_config = request.context.get('client_config') + s3_config = getattr(client_config, 's3', None) + + # The config could be None if it isn't set, or if the customer sets it + # to None. + if s3_config is None: + s3_config = {} + + # The explicit configuration takes precedence over any implicit + # configuration. + sign_payload = s3_config.get('payload_signing_enabled', None) + if sign_payload is not None: + return sign_payload + + # We require that both content-md5 be present and https be enabled + # to implicitly disable body signing. The combination of TLS and + # content-md5 is sufficiently secure and durable for us to be + # confident in the request without body signing. + if ( + not request.url.startswith('https') + or 'Content-MD5' not in request.headers + ): + return True + + # If the input is streaming we disable body signing by default. + if request.context.get('has_streaming_input', False): + return False + + # If the S3-specific checks had no results, delegate to the generic + # checks. + return super()._should_sha256_sign_payload(request) + + def _should_add_content_sha256_header(self, explicit_payload): + # Always add X-Amz-Content-SHA256 header + return True + + +class CrtSigV4AsymQueryAuth(CrtSigV4AsymAuth): + DEFAULT_EXPIRES = 3600 + _SIGNATURE_TYPE = awscrt.auth.AwsSignatureType.HTTP_REQUEST_QUERY_PARAMS + + def __init__( + self, credentials, service_name, region_name, expires=DEFAULT_EXPIRES + ): + super().__init__(credentials, service_name, region_name) + self._expiration_in_seconds = expires + + def _modify_request_before_signing(self, request): + super()._modify_request_before_signing(request) + + # We automatically set this header, so if it's the auto-set value we + # want to get rid of it since it doesn't make sense for presigned urls. + content_type = request.headers.get('content-type') + if content_type == 'application/x-www-form-urlencoded; charset=utf-8': + del request.headers['content-type'] + + # Now parse the original query string to a dict, inject our new query + # params, and serialize back to a query string. + url_parts = urlsplit(request.url) + # parse_qs makes each value a list, but in our case we know we won't + # have repeated keys so we know we have single element lists which we + # can convert back to scalar values. + query_string_parts = parse_qs(url_parts.query, keep_blank_values=True) + query_dict = {k: v[0] for k, v in query_string_parts.items()} + + # The spec is particular about this. It *has* to be: + # https://?& + # You can't mix the two types of params together, i.e just keep doing + # new_query_params.update(op_params) + # new_query_params.update(auth_params) + # percent_encode_sequence(new_query_params) + if request.data: + # We also need to move the body params into the query string. To + # do this, we first have to convert it to a dict. + query_dict.update(_get_body_as_dict(request)) + request.data = '' + new_query_string = percent_encode_sequence(query_dict) + # url_parts is a tuple (and therefore immutable) so we need to create + # a new url_parts with the new query string. + # - + # scheme - 0 + # netloc - 1 + # path - 2 + # query - 3 <-- we're replacing this. + # fragment - 4 + p = url_parts + new_url_parts = (p[0], p[1], p[2], new_query_string, p[4]) + request.url = urlunsplit(new_url_parts) + + def _apply_signing_changes(self, aws_request, signed_crt_request): + # Apply changes from signed CRT request to the AWSRequest + super()._apply_signing_changes(aws_request, signed_crt_request) + + signed_query = urlsplit(signed_crt_request.path).query + p = urlsplit(aws_request.url) + # urlsplit() returns a tuple (and therefore immutable) so we + # need to create new url with the new query string. + # - + # scheme - 0 + # netloc - 1 + # path - 2 + # query - 3 <-- we're replacing this. + # fragment - 4 + aws_request.url = urlunsplit((p[0], p[1], p[2], signed_query, p[4])) + + +class CrtS3SigV4AsymQueryAuth(CrtSigV4AsymQueryAuth): + """S3 SigV4A auth using query parameters. + This signer will sign a request using query parameters and signature + version 4A, i.e a "presigned url" signer. + """ + + # For S3, we do not normalize the path. + _USE_DOUBLE_URI_ENCODE = False + _SHOULD_NORMALIZE_URI_PATH = False + + def _should_sha256_sign_payload(self, request): + # From the doc link above: + # "You don't include a payload hash in the Canonical Request, because + # when you create a presigned URL, you don't know anything about the + # payload. Instead, you use a constant string "UNSIGNED-PAYLOAD". + return False + + def _should_add_content_sha256_header(self, explicit_payload): + # Never add X-Amz-Content-SHA256 header + return False + + +class CrtSigV4QueryAuth(CrtSigV4Auth): + DEFAULT_EXPIRES = 3600 + _SIGNATURE_TYPE = awscrt.auth.AwsSignatureType.HTTP_REQUEST_QUERY_PARAMS + + def __init__( + self, credentials, service_name, region_name, expires=DEFAULT_EXPIRES + ): + super().__init__(credentials, service_name, region_name) + self._expiration_in_seconds = expires + + def _modify_request_before_signing(self, request): + super()._modify_request_before_signing(request) + + # We automatically set this header, so if it's the auto-set value we + # want to get rid of it since it doesn't make sense for presigned urls. + content_type = request.headers.get('content-type') + if content_type == 'application/x-www-form-urlencoded; charset=utf-8': + del request.headers['content-type'] + + # Now parse the original query string to a dict, inject our new query + # params, and serialize back to a query string. + url_parts = urlsplit(request.url) + # parse_qs makes each value a list, but in our case we know we won't + # have repeated keys so we know we have single element lists which we + # can convert back to scalar values. + query_dict = { + k: v[0] + for k, v in parse_qs( + url_parts.query, keep_blank_values=True + ).items() + } + if request.params: + query_dict.update(request.params) + request.params = {} + # The spec is particular about this. It *has* to be: + # https://?& + # You can't mix the two types of params together, i.e just keep doing + # new_query_params.update(op_params) + # new_query_params.update(auth_params) + # percent_encode_sequence(new_query_params) + if request.data: + # We also need to move the body params into the query string. To + # do this, we first have to convert it to a dict. + query_dict.update(_get_body_as_dict(request)) + request.data = '' + new_query_string = percent_encode_sequence(query_dict) + # url_parts is a tuple (and therefore immutable) so we need to create + # a new url_parts with the new query string. + # - + # scheme - 0 + # netloc - 1 + # path - 2 + # query - 3 <-- we're replacing this. + # fragment - 4 + p = url_parts + new_url_parts = (p[0], p[1], p[2], new_query_string, p[4]) + request.url = urlunsplit(new_url_parts) + + def _apply_signing_changes(self, aws_request, signed_crt_request): + # Apply changes from signed CRT request to the AWSRequest + super()._apply_signing_changes(aws_request, signed_crt_request) + + signed_query = urlsplit(signed_crt_request.path).query + p = urlsplit(aws_request.url) + # urlsplit() returns a tuple (and therefore immutable) so we + # need to create new url with the new query string. + # - + # scheme - 0 + # netloc - 1 + # path - 2 + # query - 3 <-- we're replacing this. + # fragment - 4 + aws_request.url = urlunsplit((p[0], p[1], p[2], signed_query, p[4])) + + +class CrtS3SigV4QueryAuth(CrtSigV4QueryAuth): + """S3 SigV4 auth using query parameters. + This signer will sign a request using query parameters and signature + version 4, i.e a "presigned url" signer. + Based off of: + http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html + """ + + # For S3, we do not normalize the path. + _USE_DOUBLE_URI_ENCODE = False + _SHOULD_NORMALIZE_URI_PATH = False + + def _should_sha256_sign_payload(self, request): + # From the doc link above: + # "You don't include a payload hash in the Canonical Request, because + # when you create a presigned URL, you don't know anything about the + # payload. Instead, you use a constant string "UNSIGNED-PAYLOAD". + return False + + def _should_add_content_sha256_header(self, explicit_payload): + # Never add X-Amz-Content-SHA256 header + return False + + +# Defined at the bottom of module to ensure all Auth +# classes are defined. +CRT_AUTH_TYPE_MAPS = { + 'v4': CrtSigV4Auth, + 'v4-query': CrtSigV4QueryAuth, + 'v4a': CrtSigV4AsymAuth, + 's3v4': CrtS3SigV4Auth, + 's3v4-query': CrtS3SigV4QueryAuth, + 's3v4a': CrtS3SigV4AsymAuth, + 's3v4a-query': CrtS3SigV4AsymQueryAuth, +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/_retry.json b/venv/lib/python3.10/site-packages/botocore/data/_retry.json new file mode 100644 index 0000000000000000000000000000000000000000..bfdd2641f3c3215e6f60af7b416a04088e1e0815 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/_retry.json @@ -0,0 +1,292 @@ +{ + "definitions": { + "throttling": { + "applies_when": { + "response": { + "service_error_code": "Throttling", + "http_status_code": 400 + } + } + }, + "throttling_exception": { + "applies_when": { + "response": { + "service_error_code": "ThrottlingException", + "http_status_code": 400 + } + } + }, + "throttled_exception": { + "applies_when": { + "response": { + "service_error_code": "ThrottledException", + "http_status_code": 400 + } + } + }, + "request_throttled_exception": { + "applies_when": { + "response": { + "service_error_code": "RequestThrottledException", + "http_status_code": 400 + } + } + }, + "too_many_requests": { + "applies_when": { + "response": { + "http_status_code": 429 + } + } + }, + "general_socket_errors": { + "applies_when": { + "socket_errors": ["GENERAL_CONNECTION_ERROR"] + } + }, + "general_server_error": { + "applies_when": { + "response": { + "http_status_code": 500 + } + } + }, + "bad_gateway": { + "applies_when": { + "response": { + "http_status_code": 502 + } + } + }, + "service_unavailable": { + "applies_when": { + "response": { + "http_status_code": 503 + } + } + }, + "gateway_timeout": { + "applies_when": { + "response": { + "http_status_code": 504 + } + } + }, + "limit_exceeded": { + "applies_when": { + "response": { + "http_status_code": 509 + } + } + }, + "throughput_exceeded": { + "applies_when": { + "response": { + "service_error_code": "ProvisionedThroughputExceededException", + "http_status_code": 400 + } + } + } + }, + "retry": { + "__default__": { + "max_attempts": 5, + "delay": { + "type": "exponential", + "base": "rand", + "growth_factor": 2 + }, + "policies": { + "general_socket_errors": {"$ref": "general_socket_errors"}, + "general_server_error": {"$ref": "general_server_error"}, + "bad_gateway": {"$ref": "bad_gateway"}, + "service_unavailable": {"$ref": "service_unavailable"}, + "gateway_timeout": {"$ref": "gateway_timeout"}, + "limit_exceeded": {"$ref": "limit_exceeded"}, + "throttling_exception": {"$ref": "throttling_exception"}, + "throttled_exception": {"$ref": "throttled_exception"}, + "request_throttled_exception": {"$ref": "request_throttled_exception"}, + "throttling": {"$ref": "throttling"}, + "too_many_requests": {"$ref": "too_many_requests"}, + "throughput_exceeded": {"$ref": "throughput_exceeded"} + } + }, + "organizations": { + "__default__": { + "policies": { + "too_many_requests": { + "applies_when": { + "response": { + "service_error_code": "TooManyRequestsException", + "http_status_code": 400 + } + } + } + } + } + }, + "dynamodb": { + "__default__": { + "max_attempts": 10, + "delay": { + "type": "exponential", + "base": 0.05, + "growth_factor": 2 + }, + "policies": { + "still_processing": { + "applies_when": { + "response": { + "service_error_code": "TransactionInProgressException", + "http_status_code": 400 + } + } + }, + "crc32": { + "applies_when": { + "response": { + "crc32body": "x-amz-crc32" + } + } + } + } + } + }, + "ec2": { + "__default__": { + "policies": { + "request_limit_exceeded": { + "applies_when": { + "response": { + "service_error_code": "RequestLimitExceeded", + "http_status_code": 503 + } + } + }, + "ec2_throttled_exception": { + "applies_when": { + "response": { + "service_error_code": "EC2ThrottledException", + "http_status_code": 503 + } + } + } + } + } + }, + "cloudsearch": { + "__default__": { + "policies": { + "request_limit_exceeded": { + "applies_when": { + "response": { + "service_error_code": "BandwidthLimitExceeded", + "http_status_code": 509 + } + } + } + } + } + }, + "kinesis": { + "__default__": { + "policies": { + "request_limit_exceeded": { + "applies_when": { + "response": { + "service_error_code": "LimitExceededException", + "http_status_code": 400 + } + } + } + } + } + }, + "sqs": { + "__default__": { + "policies": { + "request_limit_exceeded": { + "applies_when": { + "response": { + "service_error_code": "RequestThrottled", + "http_status_code": 403 + } + } + } + } + } + }, + "s3": { + "__default__": { + "policies": { + "timeouts": { + "applies_when": { + "response": { + "http_status_code": 400, + "service_error_code": "RequestTimeout" + } + } + }, + "contentmd5": { + "applies_when": { + "response": { + "http_status_code": 400, + "service_error_code": "BadDigest" + } + } + } + } + } + }, + "glacier": { + "__default__": { + "policies": { + "timeouts": { + "applies_when": { + "response": { + "http_status_code": 408, + "service_error_code": "RequestTimeoutException" + } + } + } + } + } + }, + "route53": { + "__default__": { + "policies": { + "request_limit_exceeded": { + "applies_when": { + "response": { + "service_error_code": "Throttling", + "http_status_code": 400 + } + } + }, + "still_processing": { + "applies_when": { + "response": { + "service_error_code": "PriorRequestNotComplete", + "http_status_code": 400 + } + } + } + } + } + }, + "sts": { + "__default__": { + "policies": { + "idp_unreachable_error": { + "applies_when": { + "response": { + "service_error_code": "IDPCommunicationError", + "http_status_code": 400 + } + } + } + } + } + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/account/2021-02-01/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/account/2021-02-01/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/account/2021-02-01/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/account/2021-02-01/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/account/2021-02-01/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..5e75ec80cb62d43cdd9680765eb1ae77eda9de0d --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/account/2021-02-01/paginators-1.json @@ -0,0 +1,10 @@ +{ + "pagination": { + "ListRegions": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "Regions" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/acm/2015-12-08/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/acm/2015-12-08/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/acm/2015-12-08/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/acm/2015-12-08/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/acm/2015-12-08/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..2e2e4f9ae48783555da7be60ce4ff84bb56f0d55 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/acm/2015-12-08/paginators-1.json @@ -0,0 +1,10 @@ +{ + "pagination": { + "ListCertificates": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxItems", + "result_key": "CertificateSummaryList" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/acm/2015-12-08/waiters-2.json b/venv/lib/python3.10/site-packages/botocore/data/acm/2015-12-08/waiters-2.json new file mode 100644 index 0000000000000000000000000000000000000000..1fba453d0c17be467dc2a16ad4f9c09fed017104 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/acm/2015-12-08/waiters-2.json @@ -0,0 +1,35 @@ +{ + "version": 2, + "waiters": { + "CertificateValidated": { + "delay": 60, + "maxAttempts": 40, + "operation": "DescribeCertificate", + "acceptors": [ + { + "matcher": "pathAll", + "expected": "SUCCESS", + "argument": "Certificate.DomainValidationOptions[].ValidationStatus", + "state": "success" + }, + { + "matcher": "pathAny", + "expected": "PENDING_VALIDATION", + "argument": "Certificate.DomainValidationOptions[].ValidationStatus", + "state": "retry" + }, + { + "matcher": "path", + "expected": "FAILED", + "argument": "Certificate.Status", + "state": "failure" + }, + { + "matcher": "error", + "expected": "ResourceNotFoundException", + "state": "failure" + } + ] + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/amp/2020-08-01/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/amp/2020-08-01/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/amp/2020-08-01/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/amp/2020-08-01/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/amp/2020-08-01/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..c1bf32abe252af5e7dd54151627d565e4284e873 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/amp/2020-08-01/paginators-1.json @@ -0,0 +1,22 @@ +{ + "pagination": { + "ListWorkspaces": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "workspaces" + }, + "ListRuleGroupsNamespaces": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "ruleGroupsNamespaces" + }, + "ListScrapers": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "scrapers" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/amp/2020-08-01/waiters-2.json b/venv/lib/python3.10/site-packages/botocore/data/amp/2020-08-01/waiters-2.json new file mode 100644 index 0000000000000000000000000000000000000000..93d8cd640adeb1056e647b2d2991b7e150f3b8df --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/amp/2020-08-01/waiters-2.json @@ -0,0 +1,76 @@ +{ + "version" : 2, + "waiters" : { + "ScraperActive" : { + "description" : "Wait until a scraper reaches ACTIVE status", + "delay" : 2, + "maxAttempts" : 60, + "operation" : "DescribeScraper", + "acceptors" : [ { + "matcher" : "path", + "argument" : "scraper.status.statusCode", + "state" : "success", + "expected" : "ACTIVE" + }, { + "matcher" : "path", + "argument" : "scraper.status.statusCode", + "state" : "failure", + "expected" : "CREATION_FAILED" + } ] + }, + "ScraperDeleted" : { + "description" : "Wait until a scraper reaches DELETED status", + "delay" : 2, + "maxAttempts" : 60, + "operation" : "DescribeScraper", + "acceptors" : [ { + "matcher" : "error", + "state" : "success", + "expected" : "ResourceNotFoundException" + }, { + "matcher" : "path", + "argument" : "scraper.status.statusCode", + "state" : "failure", + "expected" : "DELETION_FAILED" + } ] + }, + "WorkspaceActive" : { + "description" : "Wait until a workspace reaches ACTIVE status", + "delay" : 2, + "maxAttempts" : 60, + "operation" : "DescribeWorkspace", + "acceptors" : [ { + "matcher" : "path", + "argument" : "workspace.status.statusCode", + "state" : "success", + "expected" : "ACTIVE" + }, { + "matcher" : "path", + "argument" : "workspace.status.statusCode", + "state" : "retry", + "expected" : "UPDATING" + }, { + "matcher" : "path", + "argument" : "workspace.status.statusCode", + "state" : "retry", + "expected" : "CREATING" + } ] + }, + "WorkspaceDeleted" : { + "description" : "Wait until a workspace reaches DELETED status", + "delay" : 2, + "maxAttempts" : 60, + "operation" : "DescribeWorkspace", + "acceptors" : [ { + "matcher" : "error", + "state" : "success", + "expected" : "ResourceNotFoundException" + }, { + "matcher" : "path", + "argument" : "workspace.status.statusCode", + "state" : "retry", + "expected" : "DELETING" + } ] + } + } +} \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/botocore/data/amplify/2017-07-25/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/amplify/2017-07-25/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/amplify/2017-07-25/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/amplify/2017-07-25/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/amplify/2017-07-25/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..f84208e94728c81c745ece3c87c87bffffffc371 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/amplify/2017-07-25/paginators-1.json @@ -0,0 +1,28 @@ +{ + "pagination": { + "ListApps": { + "input_token": "nextToken", + "limit_key": "maxResults", + "output_token": "nextToken", + "result_key": "apps" + }, + "ListBranches": { + "input_token": "nextToken", + "limit_key": "maxResults", + "output_token": "nextToken", + "result_key": "branches" + }, + "ListDomainAssociations": { + "input_token": "nextToken", + "limit_key": "maxResults", + "output_token": "nextToken", + "result_key": "domainAssociations" + }, + "ListJobs": { + "input_token": "nextToken", + "limit_key": "maxResults", + "output_token": "nextToken", + "result_key": "jobSummaries" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/apigateway/2015-07-09/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/apigateway/2015-07-09/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/apigateway/2015-07-09/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/apigateway/2015-07-09/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/apigateway/2015-07-09/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..2a875c551e8792022e65bcf9b7835422d9db3946 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/apigateway/2015-07-09/paginators-1.json @@ -0,0 +1,117 @@ +{ + "pagination": { + "GetApiKeys": { + "input_token": "position", + "output_token": "position", + "limit_key": "limit", + "result_key": "items" + }, + "GetBasePathMappings": { + "input_token": "position", + "output_token": "position", + "limit_key": "limit", + "result_key": "items" + }, + "GetClientCertificates": { + "input_token": "position", + "output_token": "position", + "limit_key": "limit", + "result_key": "items" + }, + "GetDeployments": { + "input_token": "position", + "output_token": "position", + "limit_key": "limit", + "result_key": "items" + }, + "GetDomainNames": { + "input_token": "position", + "output_token": "position", + "limit_key": "limit", + "result_key": "items" + }, + "GetModels": { + "input_token": "position", + "output_token": "position", + "limit_key": "limit", + "result_key": "items" + }, + "GetResources": { + "input_token": "position", + "output_token": "position", + "limit_key": "limit", + "result_key": "items" + }, + "GetRestApis": { + "input_token": "position", + "output_token": "position", + "limit_key": "limit", + "result_key": "items" + }, + "GetUsage": { + "input_token": "position", + "output_token": "position", + "limit_key": "limit", + "result_key": "items", + "non_aggregate_keys": [ + "usagePlanId", + "startDate", + "endDate" + ] + }, + "GetUsagePlans": { + "input_token": "position", + "output_token": "position", + "limit_key": "limit", + "result_key": "items" + }, + "GetUsagePlanKeys": { + "input_token": "position", + "output_token": "position", + "limit_key": "limit", + "result_key": "items" + }, + "GetVpcLinks": { + "input_token": "position", + "limit_key": "limit", + "output_token": "position", + "result_key": "items" + }, + "GetAuthorizers": { + "input_token": "position", + "limit_key": "limit", + "output_token": "position", + "result_key": "items" + }, + "GetDocumentationParts": { + "input_token": "position", + "limit_key": "limit", + "output_token": "position", + "result_key": "items" + }, + "GetDocumentationVersions": { + "input_token": "position", + "limit_key": "limit", + "output_token": "position", + "result_key": "items" + }, + "GetGatewayResponses": { + "input_token": "position", + "limit_key": "limit", + "output_token": "position", + "result_key": "items" + }, + "GetRequestValidators": { + "input_token": "position", + "limit_key": "limit", + "output_token": "position", + "result_key": "items" + }, + "GetSdkTypes": { + "input_token": "position", + "limit_key": "limit", + "output_token": "position", + "result_key": "items" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/apigatewaymanagementapi/2018-11-29/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/apigatewaymanagementapi/2018-11-29/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..ea142457a6a77d6e6a54942329f1199bc2f2a60c --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/apigatewaymanagementapi/2018-11-29/paginators-1.json @@ -0,0 +1,3 @@ +{ + "pagination": {} +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/appconfig/2019-10-09/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/appconfig/2019-10-09/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..664e05eff098bae1d70d512201bcab268ab0d87d --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/appconfig/2019-10-09/examples-1.json @@ -0,0 +1,720 @@ +{ + "version": "1.0", + "examples": { + "CreateApplication": [ + { + "input": { + "Description": "An application used for creating an example.", + "Name": "example-application" + }, + "output": { + "Description": "An application used for creating an example.", + "Id": "339ohji", + "Name": "example-application" + }, + "comments": { + }, + "description": "The following create-application example creates an application in AWS AppConfig.", + "id": "to-create-an-application-1632264511615", + "title": "To create an application" + } + ], + "CreateConfigurationProfile": [ + { + "input": { + "ApplicationId": "339ohji", + "LocationUri": "ssm-parameter://Example-Parameter", + "Name": "Example-Configuration-Profile", + "RetrievalRoleArn": "arn:aws:iam::111122223333:role/Example-App-Config-Role" + }, + "output": { + "ApplicationId": "339ohji", + "Id": "ur8hx2f", + "LocationUri": "ssm-parameter://Example-Parameter", + "Name": "Example-Configuration-Profile", + "RetrievalRoleArn": "arn:aws:iam::111122223333:role/Example-App-Config-Role" + }, + "comments": { + }, + "description": "The following create-configuration-profile example creates a configuration profile using a configuration stored in Parameter Store, a capability of Systems Manager.", + "id": "to-create-a-configuration-profile-1632264580336", + "title": "To create a configuration profile" + } + ], + "CreateDeploymentStrategy": [ + { + "input": { + "DeploymentDurationInMinutes": 15, + "GrowthFactor": 25, + "Name": "Example-Deployment", + "ReplicateTo": "SSM_DOCUMENT" + }, + "output": { + "DeploymentDurationInMinutes": 15, + "FinalBakeTimeInMinutes": 0, + "GrowthFactor": 25, + "GrowthType": "LINEAR", + "Id": "1225qzk", + "Name": "Example-Deployment", + "ReplicateTo": "SSM_DOCUMENT" + }, + "comments": { + }, + "description": "The following create-deployment-strategy example creates a deployment strategy called Example-Deployment that takes 15 minutes and deploys the configuration to 25% of the application at a time. The strategy is also copied to an SSM Document.", + "id": "to-create-a-deployment-strategy-1632264783812", + "title": "To create a deployment strategy" + } + ], + "CreateEnvironment": [ + { + "input": { + "ApplicationId": "339ohji", + "Name": "Example-Environment" + }, + "output": { + "ApplicationId": "339ohji", + "Id": "54j1r29", + "Name": "Example-Environment", + "State": "READY_FOR_DEPLOYMENT" + }, + "comments": { + }, + "description": "The following create-environment example creates an AWS AppConfig environment named Example-Environment using the application you created using create-application", + "id": "to-create-an-environment-1632265124975", + "title": "To create an environment" + } + ], + "CreateHostedConfigurationVersion": [ + { + "input": { + "ApplicationId": "339ohji", + "ConfigurationProfileId": "ur8hx2f", + "Content": "eyAiTmFtZSI6ICJFeGFtcGxlQXBwbGljYXRpb24iLCAiSWQiOiBFeGFtcGxlSUQsICJSYW5rIjogNyB9", + "ContentType": "text", + "LatestVersionNumber": 1 + }, + "output": { + "ApplicationId": "339ohji", + "ConfigurationProfileId": "ur8hx2f", + "ContentType": "text", + "VersionNumber": 1 + }, + "comments": { + }, + "description": "The following create-hosted-configuration-version example creates a new configuration in the AWS AppConfig configuration store.", + "id": "to-create-a-hosted-configuration-version-1632265196980", + "title": "To create a hosted configuration version" + } + ], + "DeleteApplication": [ + { + "input": { + "ApplicationId": "339ohji" + }, + "comments": { + }, + "description": "The following delete-application example deletes the specified application. \n", + "id": "to-delete-an-application-1632265343951", + "title": "To delete an application" + } + ], + "DeleteConfigurationProfile": [ + { + "input": { + "ApplicationId": "339ohji", + "ConfigurationProfileId": "ur8hx2f" + }, + "comments": { + }, + "description": "The following delete-configuration-profile example deletes the specified configuration profile.", + "id": "to-delete-a-configuration-profile-1632265401308", + "title": "To delete a configuration profile" + } + ], + "DeleteDeploymentStrategy": [ + { + "input": { + "DeploymentStrategyId": "1225qzk" + }, + "comments": { + }, + "description": "The following delete-deployment-strategy example deletes the specified deployment strategy.", + "id": "to-delete-a-deployment-strategy-1632265473708", + "title": "To delete a deployment strategy" + } + ], + "DeleteEnvironment": [ + { + "input": { + "ApplicationId": "339ohji", + "EnvironmentId": "54j1r29" + }, + "comments": { + }, + "description": "The following delete-environment example deletes the specified application environment.", + "id": "to-delete-an-environment-1632265641044", + "title": "To delete an environment" + } + ], + "DeleteHostedConfigurationVersion": [ + { + "input": { + "ApplicationId": "339ohji", + "ConfigurationProfileId": "ur8hx2f", + "VersionNumber": 1 + }, + "comments": { + }, + "description": "The following delete-hosted-configuration-version example deletes a configuration version hosted in the AWS AppConfig configuration store.", + "id": "to-delete-a-hosted-configuration-version-1632265720740", + "title": "To delete a hosted configuration version" + } + ], + "GetApplication": [ + { + "input": { + "ApplicationId": "339ohji" + }, + "output": { + "Id": "339ohji", + "Name": "example-application" + }, + "comments": { + }, + "description": "The following get-application example lists the details of the specified application.", + "id": "to-list-details-of-an-application-1632265864702", + "title": "To list details of an application" + } + ], + "GetConfiguration": [ + { + "input": { + "Application": "example-application", + "ClientId": "example-id", + "Configuration": "Example-Configuration-Profile", + "Environment": "Example-Environment" + }, + "output": { + "ConfigurationVersion": "1", + "ContentType": "application/octet-stream" + }, + "comments": { + }, + "description": "The following get-configuration example returns the configuration details of the example application. On subsequent calls to get-configuration, use the client-configuration-version parameter to only update the configuration of your application if the version has changed. Only updating the configuration when the version has changed avoids excess charges incurred by calling get-configuration.", + "id": "to-retrieve-configuration-details-1632265954314", + "title": "To retrieve configuration details" + } + ], + "GetConfigurationProfile": [ + { + "input": { + "ApplicationId": "339ohji", + "ConfigurationProfileId": "ur8hx2f" + }, + "output": { + "ApplicationId": "339ohji", + "Id": "ur8hx2f", + "LocationUri": "ssm-parameter://Example-Parameter", + "Name": "Example-Configuration-Profile", + "RetrievalRoleArn": "arn:aws:iam::111122223333:role/Example-App-Config-Role" + }, + "comments": { + }, + "description": "The following get-configuration-profile example returns the details of the specified configuration profile.", + "id": "to-retrieve-configuration-profile-details-1632266081013", + "title": "To retrieve configuration profile details" + } + ], + "GetDeployment": [ + { + "input": { + "ApplicationId": "339ohji", + "DeploymentNumber": 1, + "EnvironmentId": "54j1r29" + }, + "output": { + "ApplicationId": "339ohji", + "CompletedAt": "2021-09-17T21:59:03.888000+00:00", + "ConfigurationLocationUri": "ssm-parameter://Example-Parameter", + "ConfigurationName": "Example-Configuration-Profile", + "ConfigurationProfileId": "ur8hx2f", + "ConfigurationVersion": "1", + "DeploymentDurationInMinutes": 15, + "DeploymentNumber": 1, + "DeploymentStrategyId": "1225qzk", + "EnvironmentId": "54j1r29", + "EventLog": [ + { + "Description": "Deployment completed", + "EventType": "DEPLOYMENT_COMPLETED", + "OccurredAt": "2021-09-17T21:59:03.888000+00:00", + "TriggeredBy": "APPCONFIG" + }, + { + "Description": "Deployment bake time started", + "EventType": "BAKE_TIME_STARTED", + "OccurredAt": "2021-09-17T21:58:57.722000+00:00", + "TriggeredBy": "APPCONFIG" + }, + { + "Description": "Configuration available to 100.00% of clients", + "EventType": "PERCENTAGE_UPDATED", + "OccurredAt": "2021-09-17T21:55:56.816000+00:00", + "TriggeredBy": "APPCONFIG" + }, + { + "Description": "Configuration available to 75.00% of clients", + "EventType": "PERCENTAGE_UPDATED", + "OccurredAt": "2021-09-17T21:52:56.567000+00:00", + "TriggeredBy": "APPCONFIG" + }, + { + "Description": "Configuration available to 50.00% of clients", + "EventType": "PERCENTAGE_UPDATED", + "OccurredAt": "2021-09-17T21:49:55.737000+00:00", + "TriggeredBy": "APPCONFIG" + }, + { + "Description": "Configuration available to 25.00% of clients", + "EventType": "PERCENTAGE_UPDATED", + "OccurredAt": "2021-09-17T21:46:55.187000+00:00", + "TriggeredBy": "APPCONFIG" + }, + { + "Description": "Deployment started", + "EventType": "DEPLOYMENT_STARTED", + "OccurredAt": "2021-09-17T21:43:54.205000+00:00", + "TriggeredBy": "USER" + } + ], + "FinalBakeTimeInMinutes": 0, + "GrowthFactor": 25, + "GrowthType": "LINEAR", + "PercentageComplete": 100, + "StartedAt": "2021-09-17T21:43:54.205000+00:00", + "State": "COMPLETE" + }, + "comments": { + }, + "description": "The following get-deployment example lists details of the deployment to the application in the specified environment and deployment.", + "id": "to-retrieve-deployment-details-1633976766883", + "title": "To retrieve deployment details" + } + ], + "GetDeploymentStrategy": [ + { + "input": { + "DeploymentStrategyId": "1225qzk" + }, + "output": { + "DeploymentDurationInMinutes": 15, + "FinalBakeTimeInMinutes": 0, + "GrowthFactor": 25, + "GrowthType": "LINEAR", + "Id": "1225qzk", + "Name": "Example-Deployment", + "ReplicateTo": "SSM_DOCUMENT" + }, + "comments": { + }, + "description": "The following get-deployment-strategy example lists the details of the specified deployment strategy.", + "id": "to-retrieve-details-of-a-deployment-strategy-1632266385805", + "title": "To retrieve details of a deployment strategy" + } + ], + "GetEnvironment": [ + { + "input": { + "ApplicationId": "339ohji", + "EnvironmentId": "54j1r29" + }, + "output": { + "ApplicationId": "339ohji", + "Id": "54j1r29", + "Name": "Example-Environment", + "State": "READY_FOR_DEPLOYMENT" + }, + "comments": { + }, + "description": "The following get-environment example returns the details and state of the specified environment.", + "id": "to-retrieve-environment-details-1632266924806", + "title": "To retrieve environment details" + } + ], + "GetHostedConfigurationVersion": [ + { + "input": { + "ApplicationId": "339ohji", + "ConfigurationProfileId": "ur8hx2f", + "VersionNumber": 1 + }, + "output": { + "ApplicationId": "339ohji", + "ConfigurationProfileId": "ur8hx2f", + "ContentType": "application/json", + "VersionNumber": 1 + }, + "comments": { + }, + "description": "The following get-hosted-configuration-version example retrieves the configuration details of the AWS AppConfig hosted configuration.", + "id": "to-retrieve-hosted-configuration-details-1632267003527", + "title": "To retrieve hosted configuration details" + } + ], + "ListApplications": [ + { + "input": { + }, + "output": { + "Items": [ + { + "Description": "An application used for creating an example.", + "Id": "339ohji", + "Name": "test-application" + }, + { + "Id": "rwalwu7", + "Name": "Test-Application" + } + ] + }, + "comments": { + }, + "description": "The following list-applications example lists the available applications in your AWS account.", + "id": "to-list-the-available-applications-1632267111131", + "title": "To list the available applications" + } + ], + "ListConfigurationProfiles": [ + { + "input": { + "ApplicationId": "339ohji" + }, + "output": { + "Items": [ + { + "ApplicationId": "339ohji", + "Id": "ur8hx2f", + "LocationUri": "ssm-parameter://Example-Parameter", + "Name": "Example-Configuration-Profile" + } + ] + }, + "comments": { + }, + "description": "The following list-configuration-profiles example lists the available configuration profiles for the specified application.", + "id": "to-list-the-available-configuration-profiles-1632267193265", + "title": "To list the available configuration profiles" + } + ], + "ListDeploymentStrategies": [ + { + "input": { + }, + "output": { + "Items": [ + { + "DeploymentDurationInMinutes": 15, + "FinalBakeTimeInMinutes": 0, + "GrowthFactor": 25, + "GrowthType": "LINEAR", + "Id": "1225qzk", + "Name": "Example-Deployment", + "ReplicateTo": "SSM_DOCUMENT" + } + ] + }, + "comments": { + }, + "description": "The following list-deployment-strategies example lists the available deployment strategies in your AWS account.", + "id": "to-list-the-available-deployment-strategies-1632267364180", + "title": "To list the available deployment strategies" + } + ], + "ListDeployments": [ + { + "input": { + "ApplicationId": "339ohji", + "EnvironmentId": "54j1r29" + }, + "output": { + "Items": [ + { + "CompletedAt": "2021-09-17T21:59:03.888000+00:00", + "ConfigurationName": "Example-Configuration-Profile", + "ConfigurationVersion": "1", + "DeploymentDurationInMinutes": 15, + "DeploymentNumber": 1, + "FinalBakeTimeInMinutes": 0, + "GrowthFactor": 25, + "GrowthType": "LINEAR", + "PercentageComplete": 100, + "StartedAt": "2021-09-17T21:43:54.205000+00:00", + "State": "COMPLETE" + } + ] + }, + "comments": { + }, + "description": "The following list-deployments example lists the available deployments in your AWS account for the specified application and environment.", + "id": "to-list-the-available-deployments-1632267282025", + "title": "To list the available deployments" + } + ], + "ListEnvironments": [ + { + "input": { + "ApplicationId": "339ohji" + }, + "output": { + "Items": [ + { + "ApplicationId": "339ohji", + "Id": "54j1r29", + "Name": "Example-Environment", + "State": "READY_FOR_DEPLOYMENT" + } + ] + }, + "comments": { + }, + "description": "The following list-environments example lists the available environments in your AWS account for the specified application.", + "id": "to-list-the-available-environments-1632267474389", + "title": "To list the available environments" + } + ], + "ListHostedConfigurationVersions": [ + { + "input": { + "ApplicationId": "339ohji", + "ConfigurationProfileId": "ur8hx2f" + }, + "output": { + "Items": [ + { + "ApplicationId": "339ohji", + "ConfigurationProfileId": "ur8hx2f", + "ContentType": "application/json", + "VersionNumber": 1 + } + ] + }, + "comments": { + }, + "description": "The following list-hosted-configuration-versions example lists the configurations versions hosted in the AWS AppConfig hosted configuration store for the specified application and configuration profile.", + "id": "to-list-the-available-hosted-configuration-versions-1632267647667", + "title": "To list the available hosted configuration versions" + } + ], + "ListTagsForResource": [ + { + "input": { + "ResourceArn": "arn:aws:appconfig:us-east-1:111122223333:application/339ohji" + }, + "output": { + "Tags": { + "group1": "1" + } + }, + "comments": { + }, + "description": "The following list-tags-for-resource example lists the tags of a specified application.", + "id": "to-list-the-tags-of-an-application-1632328796560", + "title": "To list the tags of an application" + } + ], + "StartDeployment": [ + { + "input": { + "ApplicationId": "339ohji", + "ConfigurationProfileId": "ur8hx2f", + "ConfigurationVersion": "1", + "DeploymentStrategyId": "1225qzk", + "Description": "", + "EnvironmentId": "54j1r29", + "Tags": { + } + }, + "output": { + "ApplicationId": "339ohji", + "ConfigurationLocationUri": "ssm-parameter://Example-Parameter", + "ConfigurationName": "Example-Configuration-Profile", + "ConfigurationProfileId": "ur8hx2f", + "ConfigurationVersion": "1", + "DeploymentDurationInMinutes": 15, + "DeploymentNumber": 1, + "DeploymentStrategyId": "1225qzk", + "EnvironmentId": "54j1r29", + "EventLog": [ + { + "Description": "Deployment started", + "EventType": "DEPLOYMENT_STARTED", + "OccurredAt": "2021-09-17T21:43:54.205000+00:00", + "TriggeredBy": "USER" + } + ], + "FinalBakeTimeInMinutes": 0, + "GrowthFactor": 25, + "GrowthType": "LINEAR", + "PercentageComplete": 1.0, + "StartedAt": "2021-09-17T21:43:54.205000+00:00", + "State": "DEPLOYING" + }, + "comments": { + }, + "description": "The following start-deployment example starts a deployment to the application using the specified environment, deployment strategy, and configuration profile.", + "id": "to-start-a-configuration-deployment-1632328956790", + "title": "To start a configuration deployment" + } + ], + "StopDeployment": [ + { + "input": { + "ApplicationId": "339ohji", + "DeploymentNumber": 2, + "EnvironmentId": "54j1r29" + }, + "output": { + "DeploymentDurationInMinutes": 15, + "DeploymentNumber": 2, + "FinalBakeTimeInMinutes": 0, + "GrowthFactor": 25.0, + "PercentageComplete": 1.0 + }, + "comments": { + }, + "description": "The following stop-deployment example stops the deployment of an application configuration to the specified environment.", + "id": "to-stop-configuration-deployment-1632329139126", + "title": "To stop configuration deployment" + } + ], + "TagResource": [ + { + "input": { + "ResourceArn": "arn:aws:appconfig:us-east-1:111122223333:application/339ohji", + "Tags": { + "group1": "1" + } + }, + "comments": { + }, + "description": "The following tag-resource example tags an application resource.", + "id": "to-tag-an-application-1632330350645", + "title": "To tag an application" + } + ], + "UntagResource": [ + { + "input": { + "ResourceArn": "arn:aws:appconfig:us-east-1:111122223333:application/339ohji", + "TagKeys": [ + "group1" + ] + }, + "comments": { + }, + "description": "The following untag-resource example removes the group1 tag from the specified application.", + "id": "to-remove-a-tag-from-an-application-1632330429881", + "title": "To remove a tag from an application" + } + ], + "UpdateApplication": [ + { + "input": { + "ApplicationId": "339ohji", + "Description": "", + "Name": "Example-Application" + }, + "output": { + "Description": "An application used for creating an example.", + "Id": "339ohji", + "Name": "Example-Application" + }, + "comments": { + }, + "description": "The following update-application example updates the name of the specified application.", + "id": "to-update-an-application-1632330585893", + "title": "To update an application" + } + ], + "UpdateConfigurationProfile": [ + { + "input": { + "ApplicationId": "339ohji", + "ConfigurationProfileId": "ur8hx2f", + "Description": "Configuration profile used for examples." + }, + "output": { + "ApplicationId": "339ohji", + "Description": "Configuration profile used for examples.", + "Id": "ur8hx2f", + "LocationUri": "ssm-parameter://Example-Parameter", + "Name": "Example-Configuration-Profile", + "RetrievalRoleArn": "arn:aws:iam::111122223333:role/Example-App-Config-Role" + }, + "comments": { + }, + "description": "The following update-configuration-profile example updates the description of the specified configuration profile.", + "id": "to-update-a-configuration-profile-1632330721974", + "title": "To update a configuration profile" + } + ], + "UpdateDeploymentStrategy": [ + { + "input": { + "DeploymentStrategyId": "1225qzk", + "FinalBakeTimeInMinutes": 20 + }, + "output": { + "DeploymentDurationInMinutes": 15, + "FinalBakeTimeInMinutes": 20, + "GrowthFactor": 25, + "GrowthType": "LINEAR", + "Id": "1225qzk", + "Name": "Example-Deployment", + "ReplicateTo": "SSM_DOCUMENT" + }, + "comments": { + }, + "description": "The following update-deployment-strategy example updates final bake time to 20 minutes in the specified deployment strategy. ::\n", + "id": "to-update-a-deployment-strategy-1632330896602", + "title": "To update a deployment strategy" + } + ], + "UpdateEnvironment": [ + { + "input": { + "ApplicationId": "339ohji", + "Description": "An environment for examples.", + "EnvironmentId": "54j1r29" + }, + "output": { + "ApplicationId": "339ohji", + "Description": "An environment for examples.", + "Id": "54j1r29", + "Name": "Example-Environment", + "State": "ROLLED_BACK" + }, + "comments": { + }, + "description": "The following update-environment example updates an environment's description.", + "id": "to-update-an-environment-1632331382428", + "title": "To update an environment" + } + ], + "ValidateConfiguration": [ + { + "input": { + "ApplicationId": "abc1234", + "ConfigurationProfileId": "ur8hx2f", + "ConfigurationVersion": "1" + }, + "comments": { + }, + "description": "The following validate-configuration example uses the validators in a configuration profile to validate a configuration.", + "id": "to-validate-a-configuration-1632331491365", + "title": "To validate a configuration" + } + ] + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/appconfig/2019-10-09/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/appconfig/2019-10-09/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..f176babae353275fff4b04888a03239323faaf62 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/appconfig/2019-10-09/paginators-1.json @@ -0,0 +1,52 @@ +{ + "pagination": { + "ListApplications": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "Items" + }, + "ListConfigurationProfiles": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "Items" + }, + "ListDeploymentStrategies": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "Items" + }, + "ListDeployments": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "Items" + }, + "ListEnvironments": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "Items" + }, + "ListExtensionAssociations": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "Items" + }, + "ListExtensions": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "Items" + }, + "ListHostedConfigurationVersions": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "Items" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/appconfigdata/2021-11-11/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/appconfigdata/2021-11-11/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/appconfigdata/2021-11-11/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/appconfigdata/2021-11-11/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/appconfigdata/2021-11-11/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..ea142457a6a77d6e6a54942329f1199bc2f2a60c --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/appconfigdata/2021-11-11/paginators-1.json @@ -0,0 +1,3 @@ +{ + "pagination": {} +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/appflow/2020-08-23/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/appflow/2020-08-23/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/appflow/2020-08-23/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/appflow/2020-08-23/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/appflow/2020-08-23/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..ea142457a6a77d6e6a54942329f1199bc2f2a60c --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/appflow/2020-08-23/paginators-1.json @@ -0,0 +1,3 @@ +{ + "pagination": {} +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/application-insights/2018-11-25/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/application-insights/2018-11-25/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/application-insights/2018-11-25/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/application-insights/2018-11-25/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/application-insights/2018-11-25/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..ea142457a6a77d6e6a54942329f1199bc2f2a60c --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/application-insights/2018-11-25/paginators-1.json @@ -0,0 +1,3 @@ +{ + "pagination": {} +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/application-signals/2024-04-15/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/application-signals/2024-04-15/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..f549f15d7f1b89e171d7517e6fd2da4621c58082 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/application-signals/2024-04-15/paginators-1.json @@ -0,0 +1,34 @@ +{ + "pagination": { + "ListServiceDependencies": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "ServiceDependencies" + }, + "ListServiceDependents": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "ServiceDependents" + }, + "ListServiceLevelObjectives": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "SloSummaries" + }, + "ListServiceOperations": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "ServiceOperations" + }, + "ListServices": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "ServiceSummaries" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/application-signals/2024-04-15/paginators-1.sdk-extras.json b/venv/lib/python3.10/site-packages/botocore/data/application-signals/2024-04-15/paginators-1.sdk-extras.json new file mode 100644 index 0000000000000000000000000000000000000000..d6fcd51478dd47f902aed040472d4e59ce9ca643 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/application-signals/2024-04-15/paginators-1.sdk-extras.json @@ -0,0 +1,31 @@ +{ + "version": 1.0, + "merge": { + "pagination": { + "ListServiceDependencies": { + "non_aggregate_keys": [ + "StartTime", + "EndTime" + ] + }, + "ListServiceDependents": { + "non_aggregate_keys": [ + "StartTime", + "EndTime" + ] + }, + "ListServiceOperations": { + "non_aggregate_keys": [ + "StartTime", + "EndTime" + ] + }, + "ListServices": { + "non_aggregate_keys": [ + "StartTime", + "EndTime" + ] + } + } + } + } diff --git a/venv/lib/python3.10/site-packages/botocore/data/appmesh/2018-10-01/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/appmesh/2018-10-01/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..752e89e032ee0f8e1efd4e7098313703bf934dbe --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/appmesh/2018-10-01/examples-1.json @@ -0,0 +1,4 @@ +{ + "version": "1.0", + "examples": { } +} \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/botocore/data/appmesh/2018-10-01/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/appmesh/2018-10-01/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..162b8b96ddf663f1cc5e4d501e4661f55a8c9f17 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/appmesh/2018-10-01/paginators-1.json @@ -0,0 +1,28 @@ +{ + "pagination": { + "ListMeshes": { + "input_token": "nextToken", + "limit_key": "limit", + "output_token": "nextToken", + "result_key": "meshes" + }, + "ListRoutes": { + "input_token": "nextToken", + "limit_key": "limit", + "output_token": "nextToken", + "result_key": "routes" + }, + "ListVirtualNodes": { + "input_token": "nextToken", + "limit_key": "limit", + "output_token": "nextToken", + "result_key": "virtualNodes" + }, + "ListVirtualRouters": { + "input_token": "nextToken", + "limit_key": "limit", + "output_token": "nextToken", + "result_key": "virtualRouters" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/appmesh/2019-01-25/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/appmesh/2019-01-25/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/appmesh/2019-01-25/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/appmesh/2019-01-25/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/appmesh/2019-01-25/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..5a79b5b5a3a700a1dcc2bf029ed557fd2590a737 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/appmesh/2019-01-25/paginators-1.json @@ -0,0 +1,52 @@ +{ + "pagination": { + "ListMeshes": { + "input_token": "nextToken", + "limit_key": "limit", + "output_token": "nextToken", + "result_key": "meshes" + }, + "ListRoutes": { + "input_token": "nextToken", + "limit_key": "limit", + "output_token": "nextToken", + "result_key": "routes" + }, + "ListVirtualNodes": { + "input_token": "nextToken", + "limit_key": "limit", + "output_token": "nextToken", + "result_key": "virtualNodes" + }, + "ListVirtualRouters": { + "input_token": "nextToken", + "limit_key": "limit", + "output_token": "nextToken", + "result_key": "virtualRouters" + }, + "ListVirtualServices": { + "input_token": "nextToken", + "limit_key": "limit", + "output_token": "nextToken", + "result_key": "virtualServices" + }, + "ListTagsForResource": { + "input_token": "nextToken", + "limit_key": "limit", + "output_token": "nextToken", + "result_key": "tags" + }, + "ListGatewayRoutes": { + "input_token": "nextToken", + "limit_key": "limit", + "output_token": "nextToken", + "result_key": "gatewayRoutes" + }, + "ListVirtualGateways": { + "input_token": "nextToken", + "limit_key": "limit", + "output_token": "nextToken", + "result_key": "virtualGateways" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/apprunner/2020-05-15/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/apprunner/2020-05-15/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/apprunner/2020-05-15/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/apprunner/2020-05-15/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/apprunner/2020-05-15/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..ea142457a6a77d6e6a54942329f1199bc2f2a60c --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/apprunner/2020-05-15/paginators-1.json @@ -0,0 +1,3 @@ +{ + "pagination": {} +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/appstream/2016-12-01/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/appstream/2016-12-01/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/appstream/2016-12-01/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/appstream/2016-12-01/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/appstream/2016-12-01/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..40cbf4ba97e7a85792f57504a6e91c4a680ab991 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/appstream/2016-12-01/paginators-1.json @@ -0,0 +1,60 @@ +{ + "pagination": { + "DescribeDirectoryConfigs": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "DirectoryConfigs" + }, + "DescribeFleets": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "Fleets" + }, + "DescribeImageBuilders": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "ImageBuilders" + }, + "DescribeImages": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "Images" + }, + "DescribeSessions": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "Limit", + "result_key": "Sessions" + }, + "DescribeStacks": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "Stacks" + }, + "DescribeUserStackAssociations": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "UserStackAssociations" + }, + "DescribeUsers": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "Users" + }, + "ListAssociatedFleets": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "Names" + }, + "ListAssociatedStacks": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "Names" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/appstream/2016-12-01/waiters-2.json b/venv/lib/python3.10/site-packages/botocore/data/appstream/2016-12-01/waiters-2.json new file mode 100644 index 0000000000000000000000000000000000000000..1c8dea0ded53105cb2069019548c8fc776f598a9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/appstream/2016-12-01/waiters-2.json @@ -0,0 +1,55 @@ +{ + "version": 2, + "waiters": { + "FleetStarted": { + "delay": 30, + "maxAttempts": 40, + "operation": "DescribeFleets", + "acceptors": [ + { + "state": "success", + "matcher": "pathAll", + "argument": "Fleets[].State", + "expected": "RUNNING" + }, + { + "state": "failure", + "matcher": "pathAny", + "argument": "Fleets[].State", + "expected": "STOPPING" + }, + { + "state": "failure", + "matcher": "pathAny", + "argument": "Fleets[].State", + "expected": "STOPPED" + } + ] + }, + "FleetStopped": { + "delay": 30, + "maxAttempts": 40, + "operation": "DescribeFleets", + "acceptors": [ + { + "state": "success", + "matcher": "pathAll", + "argument": "Fleets[].State", + "expected": "STOPPED" + }, + { + "state": "failure", + "matcher": "pathAny", + "argument": "Fleets[].State", + "expected": "STARTING" + }, + { + "state": "failure", + "matcher": "pathAny", + "argument": "Fleets[].State", + "expected": "RUNNING" + } + ] + } + } +} \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/botocore/data/appsync/2017-07-25/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/appsync/2017-07-25/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/appsync/2017-07-25/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/appsync/2017-07-25/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/appsync/2017-07-25/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0da53b21f4184b837792bbd316720aa128bd2cfc --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/appsync/2017-07-25/paginators-1.json @@ -0,0 +1,64 @@ +{ + "pagination": { + "ListApiKeys": { + "input_token": "nextToken", + "limit_key": "maxResults", + "output_token": "nextToken", + "result_key": "apiKeys" + }, + "ListDataSources": { + "input_token": "nextToken", + "limit_key": "maxResults", + "output_token": "nextToken", + "result_key": "dataSources" + }, + "ListFunctions": { + "input_token": "nextToken", + "limit_key": "maxResults", + "output_token": "nextToken", + "result_key": "functions" + }, + "ListGraphqlApis": { + "input_token": "nextToken", + "limit_key": "maxResults", + "output_token": "nextToken", + "result_key": "graphqlApis" + }, + "ListResolvers": { + "input_token": "nextToken", + "limit_key": "maxResults", + "output_token": "nextToken", + "result_key": "resolvers" + }, + "ListResolversByFunction": { + "input_token": "nextToken", + "limit_key": "maxResults", + "output_token": "nextToken", + "result_key": "resolvers" + }, + "ListTypes": { + "input_token": "nextToken", + "limit_key": "maxResults", + "output_token": "nextToken", + "result_key": "types" + }, + "ListDomainNames": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "domainNameConfigs" + }, + "ListSourceApiAssociations": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "sourceApiAssociationSummaries" + }, + "ListTypesByAssociation": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "types" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/apptest/2022-12-06/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/apptest/2022-12-06/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..56c715f68df75cd119bf17395252782844f2221a --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/apptest/2022-12-06/paginators-1.json @@ -0,0 +1,40 @@ +{ + "pagination": { + "ListTestCases": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "testCases" + }, + "ListTestConfigurations": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "testConfigurations" + }, + "ListTestRunSteps": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "testRunSteps" + }, + "ListTestRunTestCases": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "testRunTestCases" + }, + "ListTestRuns": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "testRuns" + }, + "ListTestSuites": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "testSuites" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/apptest/2022-12-06/waiters-2.json b/venv/lib/python3.10/site-packages/botocore/data/apptest/2022-12-06/waiters-2.json new file mode 100644 index 0000000000000000000000000000000000000000..13f60ee66be6a3d75208653e2bd7b563561adacc --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/apptest/2022-12-06/waiters-2.json @@ -0,0 +1,5 @@ +{ + "version": 2, + "waiters": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/arc-zonal-shift/2022-10-30/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/arc-zonal-shift/2022-10-30/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..9fcfc565410f26b59b2e85578af16510db6541b6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/arc-zonal-shift/2022-10-30/paginators-1.json @@ -0,0 +1,22 @@ +{ + "pagination": { + "ListManagedResources": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "items" + }, + "ListZonalShifts": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "items" + }, + "ListAutoshifts": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "items" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/artifact/2018-05-10/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/artifact/2018-05-10/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..f8c851442588998514b0f42e5b4fc89748918d4b --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/artifact/2018-05-10/paginators-1.json @@ -0,0 +1,10 @@ +{ + "pagination": { + "ListReports": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "reports" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/artifact/2018-05-10/waiters-2.json b/venv/lib/python3.10/site-packages/botocore/data/artifact/2018-05-10/waiters-2.json new file mode 100644 index 0000000000000000000000000000000000000000..13f60ee66be6a3d75208653e2bd7b563561adacc --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/artifact/2018-05-10/waiters-2.json @@ -0,0 +1,5 @@ +{ + "version": 2, + "waiters": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/autoscaling/2011-01-01/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/autoscaling/2011-01-01/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..af6929b4232176213db8cd80882b8954d185f62d --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/autoscaling/2011-01-01/examples-1.json @@ -0,0 +1,1696 @@ +{ + "version": "1.0", + "examples": { + "AttachInstances": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "InstanceIds": [ + "i-93633f9b" + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example attaches the specified instance to the specified Auto Scaling group.", + "id": "autoscaling-attach-instances-1", + "title": "To attach an instance to an Auto Scaling group" + } + ], + "AttachLoadBalancerTargetGroups": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "TargetGroupARNs": [ + "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067" + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example attaches the specified target group to the specified Auto Scaling group.", + "id": "autoscaling-attach-load-balancer-target-groups-1", + "title": "To attach a target group to an Auto Scaling group" + } + ], + "AttachLoadBalancers": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "LoadBalancerNames": [ + "my-load-balancer" + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example attaches the specified load balancer to the specified Auto Scaling group.", + "id": "autoscaling-attach-load-balancers-1", + "title": "To attach a load balancer to an Auto Scaling group" + } + ], + "AttachTrafficSources": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "TrafficSources": [ + { + "Identifier": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067" + } + ] + }, + "output": { + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example attaches the specified target group to the specified Auto Scaling group.", + "id": "to-attach-a-target-group-to-an-auto-scaling-group-1680036570089", + "title": "To attach a target group to an Auto Scaling group" + } + ], + "CancelInstanceRefresh": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group" + }, + "output": { + "InstanceRefreshId": "08b91cf7-8fa6-48af-b6a6-d227f40f1b9b" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example cancels an instance refresh operation in progress.", + "id": "to-cancel-an-instance-refresh-1592960979817", + "title": "To cancel an instance refresh" + } + ], + "CompleteLifecycleAction": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "LifecycleActionResult": "CONTINUE", + "LifecycleActionToken": "bcd2f1b8-9a78-44d3-8a7a-4dd07d7cf635", + "LifecycleHookName": "my-lifecycle-hook" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example notifies Auto Scaling that the specified lifecycle action is complete so that it can finish launching or terminating the instance.", + "id": "autoscaling-complete-lifecycle-action-1", + "title": "To complete the lifecycle action" + } + ], + "CreateAutoScalingGroup": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "DefaultInstanceWarmup": 120, + "LaunchTemplate": { + "LaunchTemplateName": "my-template-for-auto-scaling", + "Version": "$Default" + }, + "MaxInstanceLifetime": 2592000, + "MaxSize": 3, + "MinSize": 1, + "VPCZoneIdentifier": "subnet-057fa0918fEXAMPLE" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example creates an Auto Scaling group.", + "id": "autoscaling-create-auto-scaling-group-1", + "title": "To create an Auto Scaling group" + }, + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "HealthCheckGracePeriod": 300, + "HealthCheckType": "ELB", + "LaunchTemplate": { + "LaunchTemplateName": "my-template-for-auto-scaling", + "Version": "$Default" + }, + "MaxSize": 3, + "MinSize": 1, + "TargetGroupARNs": [ + "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067" + ], + "VPCZoneIdentifier": "subnet-057fa0918fEXAMPLE, subnet-610acd08EXAMPLE" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example creates an Auto Scaling group and attaches the specified target group.", + "id": "autoscaling-create-auto-scaling-group-2", + "title": "To create an Auto Scaling group with an attached target group" + }, + { + "input": { + "AutoScalingGroupName": "my-asg", + "DesiredCapacity": 3, + "MaxSize": 5, + "MinSize": 1, + "MixedInstancesPolicy": { + "InstancesDistribution": { + "OnDemandBaseCapacity": 1, + "OnDemandPercentageAboveBaseCapacity": 50, + "SpotAllocationStrategy": "price-capacity-optimized" + }, + "LaunchTemplate": { + "LaunchTemplateSpecification": { + "LaunchTemplateName": "my-launch-template-for-x86", + "Version": "$Default" + }, + "Overrides": [ + { + "InstanceType": "c6g.large", + "LaunchTemplateSpecification": { + "LaunchTemplateName": "my-launch-template-for-arm", + "Version": "$Default" + } + }, + { + "InstanceType": "c5.large" + }, + { + "InstanceType": "c5a.large" + } + ] + } + }, + "VPCZoneIdentifier": "subnet-057fa0918fEXAMPLE, subnet-610acd08EXAMPLE" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example creates an Auto Scaling group with a mixed instances policy. It specifies the c5.large, c5a.large, and c6g.large instance types and defines a different launch template for the c6g.large instance type.", + "id": "autoscaling-create-auto-scaling-group-3", + "title": "To create an Auto Scaling group with a mixed instances policy" + }, + { + "input": { + "AutoScalingGroupName": "my-asg", + "DesiredCapacity": 4, + "DesiredCapacityType": "units", + "MaxSize": 100, + "MinSize": 0, + "MixedInstancesPolicy": { + "InstancesDistribution": { + "OnDemandPercentageAboveBaseCapacity": 50, + "SpotAllocationStrategy": "price-capacity-optimized" + }, + "LaunchTemplate": { + "LaunchTemplateSpecification": { + "LaunchTemplateName": "my-template-for-auto-scaling", + "Version": "$Default" + }, + "Overrides": [ + { + "InstanceRequirements": { + "CpuManufacturers": [ + "intel" + ], + "MemoryMiB": { + "Min": 16384 + }, + "VCpuCount": { + "Max": 8, + "Min": 4 + } + } + } + ] + } + }, + "VPCZoneIdentifier": "subnet-057fa0918fEXAMPLE, subnet-610acd08EXAMPLE" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example creates an Auto Scaling group using attribute-based instance type selection. It requires the instance types to have a minimum of four vCPUs and a maximum of eight vCPUs, a minimum of 16,384 MiB of memory, and an Intel manufactured CPU.", + "id": "autoscaling-create-auto-scaling-group-4", + "title": "To create an Auto Scaling group using attribute-based instance type selection" + } + ], + "CreateLaunchConfiguration": [ + { + "input": { + "IamInstanceProfile": "my-iam-role", + "ImageId": "ami-12345678", + "InstanceType": "m3.medium", + "LaunchConfigurationName": "my-launch-config", + "SecurityGroups": [ + "sg-eb2af88e" + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example creates a launch configuration.", + "id": "autoscaling-create-launch-configuration-1", + "title": "To create a launch configuration" + } + ], + "CreateOrUpdateTags": [ + { + "input": { + "Tags": [ + { + "Key": "Role", + "PropagateAtLaunch": true, + "ResourceId": "my-auto-scaling-group", + "ResourceType": "auto-scaling-group", + "Value": "WebServer" + }, + { + "Key": "Dept", + "PropagateAtLaunch": true, + "ResourceId": "my-auto-scaling-group", + "ResourceType": "auto-scaling-group", + "Value": "Research" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example adds two tags to the specified Auto Scaling group.", + "id": "autoscaling-create-or-update-tags-1", + "title": "To create or update tags for an Auto Scaling group" + } + ], + "DeleteAutoScalingGroup": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example deletes the specified Auto Scaling group.", + "id": "autoscaling-delete-auto-scaling-group-1", + "title": "To delete an Auto Scaling group" + }, + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "ForceDelete": true + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example deletes the specified Auto Scaling group and all its instances.", + "id": "autoscaling-delete-auto-scaling-group-2", + "title": "To delete an Auto Scaling group and all its instances" + } + ], + "DeleteLaunchConfiguration": [ + { + "input": { + "LaunchConfigurationName": "my-launch-config" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example deletes the specified launch configuration.", + "id": "autoscaling-delete-launch-configuration-1", + "title": "To delete a launch configuration" + } + ], + "DeleteLifecycleHook": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "LifecycleHookName": "my-lifecycle-hook" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example deletes the specified lifecycle hook.", + "id": "autoscaling-delete-lifecycle-hook-1", + "title": "To delete a lifecycle hook" + } + ], + "DeleteNotificationConfiguration": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "TopicARN": "arn:aws:sns:us-west-2:123456789012:my-sns-topic" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example deletes the specified notification from the specified Auto Scaling group.", + "id": "autoscaling-delete-notification-configuration-1", + "title": "To delete an Auto Scaling notification" + } + ], + "DeletePolicy": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "PolicyName": "my-step-scale-out-policy" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example deletes the specified Auto Scaling policy.", + "id": "autoscaling-delete-policy-1", + "title": "To delete an Auto Scaling policy" + } + ], + "DeleteScheduledAction": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "ScheduledActionName": "my-scheduled-action" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example deletes the specified scheduled action from the specified Auto Scaling group.", + "id": "autoscaling-delete-scheduled-action-1", + "title": "To delete a scheduled action from an Auto Scaling group" + } + ], + "DeleteTags": [ + { + "input": { + "Tags": [ + { + "Key": "Dept", + "ResourceId": "my-auto-scaling-group", + "ResourceType": "auto-scaling-group", + "Value": "Research" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example deletes the specified tag from the specified Auto Scaling group.", + "id": "autoscaling-delete-tags-1", + "title": "To delete a tag from an Auto Scaling group" + } + ], + "DescribeAccountLimits": [ + { + "output": { + "MaxNumberOfAutoScalingGroups": 20, + "MaxNumberOfLaunchConfigurations": 100, + "NumberOfAutoScalingGroups": 3, + "NumberOfLaunchConfigurations": 5 + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes the Amazon EC2 Auto Scaling service quotas for your account.", + "id": "autoscaling-describe-account-limits-1", + "title": "To describe your Auto Scaling account limits" + } + ], + "DescribeAdjustmentTypes": [ + { + "output": { + "AdjustmentTypes": [ + { + "AdjustmentType": "ChangeInCapacity" + }, + { + "AdjustmentType": "ExactCapcity" + }, + { + "AdjustmentType": "PercentChangeInCapacity" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes the available adjustment types.", + "id": "autoscaling-describe-adjustment-types-1", + "title": "To describe the Amazon EC2 Auto Scaling adjustment types" + } + ], + "DescribeAutoScalingGroups": [ + { + "input": { + "AutoScalingGroupNames": [ + "my-auto-scaling-group" + ] + }, + "output": { + "AutoScalingGroups": [ + { + "AutoScalingGroupARN": "arn:aws:autoscaling:us-west-1:123456789012:autoScalingGroup:12345678-1234-1234-1234-123456789012:autoScalingGroupName/my-auto-scaling-group", + "AutoScalingGroupName": "my-auto-scaling-group", + "AvailabilityZones": [ + "us-west-2a", + "us-west-2b", + "us-west-2c" + ], + "CreatedTime": "2023-03-09T22:15:11.611Z", + "DefaultCooldown": 300, + "DesiredCapacity": 2, + "EnabledMetrics": [ + + ], + "HealthCheckGracePeriod": 300, + "HealthCheckType": "EC2", + "Instances": [ + { + "AvailabilityZone": "us-west-2c", + "HealthStatus": "Healthy", + "InstanceId": "i-05b4f7d5be44822a6", + "InstanceType": "t3.micro", + "LaunchConfigurationName": "my-launch-config", + "LifecycleState": "InService", + "ProtectedFromScaleIn": false + }, + { + "AvailabilityZone": "us-west-2b", + "HealthStatus": "Healthy", + "InstanceId": "i-0c20ac468fa3049e8", + "InstanceType": "t3.micro", + "LaunchConfigurationName": "my-launch-config", + "LifecycleState": "InService", + "ProtectedFromScaleIn": false + } + ], + "LaunchConfigurationName": "my-launch-config", + "LoadBalancerNames": [ + + ], + "MaxSize": 5, + "MinSize": 1, + "NewInstancesProtectedFromScaleIn": false, + "ServiceLinkedRoleARN": "arn:aws:iam::123456789012:role/aws-service-role/autoscaling.amazonaws.com/AWSServiceRoleForAutoScaling", + "SuspendedProcesses": [ + + ], + "Tags": [ + + ], + "TargetGroupARNs": [ + + ], + "TerminationPolicies": [ + "Default" + ], + "TrafficSources": [ + + ], + "VPCZoneIdentifier": "subnet-5ea0c127,subnet-6194ea3b,subnet-c934b782" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes the specified Auto Scaling group.", + "id": "autoscaling-describe-auto-scaling-groups-1", + "title": "To describe an Auto Scaling group" + } + ], + "DescribeAutoScalingInstances": [ + { + "input": { + "InstanceIds": [ + "i-05b4f7d5be44822a6" + ] + }, + "output": { + "AutoScalingInstances": [ + { + "AutoScalingGroupName": "my-auto-scaling-group", + "AvailabilityZone": "us-west-2c", + "HealthStatus": "HEALTHY", + "InstanceId": "i-05b4f7d5be44822a6", + "InstanceType": "t3.micro", + "LaunchConfigurationName": "my-launch-config", + "LifecycleState": "InService", + "ProtectedFromScaleIn": false + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes the specified Auto Scaling instance.", + "id": "autoscaling-describe-auto-scaling-instances-1", + "title": "To describe one or more Auto Scaling instances" + } + ], + "DescribeAutoScalingNotificationTypes": [ + { + "output": { + "AutoScalingNotificationTypes": [ + "autoscaling:EC2_INSTANCE_LAUNCH", + "autoscaling:EC2_INSTANCE_LAUNCH_ERROR", + "autoscaling:EC2_INSTANCE_TERMINATE", + "autoscaling:EC2_INSTANCE_TERMINATE_ERROR", + "autoscaling:TEST_NOTIFICATION" + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes the available notification types.", + "id": "autoscaling-describe-auto-scaling-notification-types-1", + "title": "To describe the Auto Scaling notification types" + } + ], + "DescribeInstanceRefreshes": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group" + }, + "output": { + "InstanceRefreshes": [ + { + "AutoScalingGroupName": "my-auto-scaling-group", + "InstanceRefreshId": "08b91cf7-8fa6-48af-b6a6-d227f40f1b9b", + "InstancesToUpdate": 0, + "PercentageComplete": 50, + "Preferences": { + "AlarmSpecification": { + "Alarms": [ + "my-alarm" + ] + }, + "AutoRollback": true, + "InstanceWarmup": 200, + "MinHealthyPercentage": 90, + "ScaleInProtectedInstances": "Ignore", + "SkipMatching": false, + "StandbyInstances": "Ignore" + }, + "StartTime": "2023-06-13T16:46:52+00:00", + "Status": "InProgress", + "StatusReason": "Waiting for instances to warm up before continuing. For example: i-0645704820a8e83ff is warming up." + }, + { + "AutoScalingGroupName": "my-auto-scaling-group", + "EndTime": "2023-06-02T13:59:45+00:00", + "InstanceRefreshId": "0e151305-1e57-4a32-a256-1fd14157c5ec", + "InstancesToUpdate": 0, + "PercentageComplete": 100, + "Preferences": { + "AlarmSpecification": { + "Alarms": [ + "my-alarm" + ] + }, + "AutoRollback": true, + "InstanceWarmup": 200, + "MinHealthyPercentage": 90, + "ScaleInProtectedInstances": "Ignore", + "SkipMatching": false, + "StandbyInstances": "Ignore" + }, + "StartTime": "2023-06-02T13:53:37+00:00", + "Status": "Successful" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes the instance refreshes for the specified Auto Scaling group.", + "id": "to-list-instance-refreshes-1592959593746", + "title": "To list instance refreshes" + } + ], + "DescribeLaunchConfigurations": [ + { + "input": { + "LaunchConfigurationNames": [ + "my-launch-config" + ] + }, + "output": { + "LaunchConfigurations": [ + { + "AssociatePublicIpAddress": true, + "BlockDeviceMappings": [ + + ], + "CreatedTime": "2014-05-07T17:39:28.599Z", + "EbsOptimized": false, + "ImageId": "ami-043a5034", + "InstanceMonitoring": { + "Enabled": true + }, + "InstanceType": "t1.micro", + "LaunchConfigurationARN": "arn:aws:autoscaling:us-west-2:123456789012:launchConfiguration:98d3b196-4cf9-4e88-8ca1-8547c24ced8b:launchConfigurationName/my-launch-config", + "LaunchConfigurationName": "my-launch-config", + "SecurityGroups": [ + "sg-67ef0308" + ] + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes the specified launch configuration.", + "id": "autoscaling-describe-launch-configurations-1", + "title": "To describe Auto Scaling launch configurations" + } + ], + "DescribeLifecycleHookTypes": [ + { + "output": { + "LifecycleHookTypes": [ + "autoscaling:EC2_INSTANCE_LAUNCHING", + "autoscaling:EC2_INSTANCE_TERMINATING" + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes the available lifecycle hook types.", + "id": "autoscaling-describe-lifecycle-hook-types-1", + "title": "To describe the available types of lifecycle hooks" + } + ], + "DescribeLifecycleHooks": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group" + }, + "output": { + "LifecycleHooks": [ + { + "AutoScalingGroupName": "my-auto-scaling-group", + "DefaultResult": "ABANDON", + "GlobalTimeout": 172800, + "HeartbeatTimeout": 3600, + "LifecycleHookName": "my-lifecycle-hook", + "LifecycleTransition": "autoscaling:EC2_INSTANCE_LAUNCHING", + "NotificationTargetARN": "arn:aws:sns:us-west-2:123456789012:my-sns-topic", + "RoleARN": "arn:aws:iam::123456789012:role/my-auto-scaling-role" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes the lifecycle hooks for the specified Auto Scaling group.", + "id": "autoscaling-describe-lifecycle-hooks-1", + "title": "To describe your lifecycle hooks" + } + ], + "DescribeLoadBalancerTargetGroups": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group" + }, + "output": { + "LoadBalancerTargetGroups": [ + { + "LoadBalancerTargetGroupARN": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067", + "State": "Added" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes the target groups attached to the specified Auto Scaling group.", + "id": "autoscaling-describe-load-balancer-target-groups-1", + "title": "To describe the target groups for an Auto Scaling group" + } + ], + "DescribeLoadBalancers": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group" + }, + "output": { + "LoadBalancers": [ + { + "LoadBalancerName": "my-load-balancer", + "State": "Added" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes the load balancers attached to the specified Auto Scaling group.", + "id": "autoscaling-describe-load-balancers-1", + "title": "To describe the load balancers for an Auto Scaling group" + } + ], + "DescribeMetricCollectionTypes": [ + { + "output": { + "Granularities": [ + { + "Granularity": "1Minute" + } + ], + "Metrics": [ + { + "Metric": "GroupMinSize" + }, + { + "Metric": "GroupMaxSize" + }, + { + "Metric": "GroupDesiredCapacity" + }, + { + "Metric": "GroupInServiceInstances" + }, + { + "Metric": "GroupPendingInstances" + }, + { + "Metric": "GroupTerminatingInstances" + }, + { + "Metric": "GroupStandbyInstances" + }, + { + "Metric": "GroupTotalInstances" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes the available metric collection types.", + "id": "autoscaling-describe-metric-collection-types-1", + "title": "To describe the Auto Scaling metric collection types" + } + ], + "DescribeNotificationConfigurations": [ + { + "input": { + "AutoScalingGroupNames": [ + "my-auto-scaling-group" + ] + }, + "output": { + "NotificationConfigurations": [ + { + "AutoScalingGroupName": "my-auto-scaling-group", + "NotificationType": "autoscaling:TEST_NOTIFICATION", + "TopicARN": "arn:aws:sns:us-west-2:123456789012:my-sns-topic-2" + }, + { + "AutoScalingGroupName": "my-auto-scaling-group", + "NotificationType": "autoscaling:TEST_NOTIFICATION", + "TopicARN": "arn:aws:sns:us-west-2:123456789012:my-sns-topic" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes the notification configurations for the specified Auto Scaling group.", + "id": "autoscaling-describe-notification-configurations-1", + "title": "To describe Auto Scaling notification configurations" + } + ], + "DescribePolicies": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group" + }, + "output": { + "ScalingPolicies": [ + { + "AdjustmentType": "ChangeInCapacity", + "Alarms": [ + + ], + "AutoScalingGroupName": "my-auto-scaling-group", + "PolicyARN": "arn:aws:autoscaling:us-west-2:123456789012:scalingPolicy:2233f3d7-6290-403b-b632-93c553560106:autoScalingGroupName/my-auto-scaling-group:policyName/ScaleIn", + "PolicyName": "ScaleIn", + "ScalingAdjustment": -1 + }, + { + "AdjustmentType": "PercentChangeInCapacity", + "Alarms": [ + + ], + "AutoScalingGroupName": "my-auto-scaling-group", + "Cooldown": 60, + "MinAdjustmentStep": 2, + "PolicyARN": "arn:aws:autoscaling:us-west-2:123456789012:scalingPolicy:2b435159-cf77-4e89-8c0e-d63b497baad7:autoScalingGroupName/my-auto-scaling-group:policyName/ScalePercentChange", + "PolicyName": "ScalePercentChange", + "ScalingAdjustment": 25 + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes the policies for the specified Auto Scaling group.", + "id": "autoscaling-describe-policies-1", + "title": "To describe scaling policies" + } + ], + "DescribeScalingActivities": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group" + }, + "output": { + "Activities": [ + { + "ActivityId": "f9f2d65b-f1f2-43e7-b46d-d86756459699", + "AutoScalingGroupARN": "arn:aws:autoscaling:us-east-1:123456789012:autoScalingGroup:12345678-1234-1234-1234-123456789012:autoScalingGroupName/my-auto-scaling-group", + "AutoScalingGroupName": "my-auto-scaling-group", + "Cause": "At 2013-08-19T20:53:25Z a user request created an AutoScalingGroup changing the desired capacity from 0 to 1. At 2013-08-19T20:53:29Z an instance was started in response to a difference between desired and actual capacity, increasing the capacity from 0 to 1.", + "Description": "Launching a new EC2 instance: i-4ba0837f", + "Details": "details", + "EndTime": "2013-08-19T20:54:02Z", + "Progress": 100, + "StartTime": "2013-08-19T20:53:29.930Z", + "StatusCode": "Successful" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes the scaling activities for the specified Auto Scaling group.", + "id": "autoscaling-describe-scaling-activities-1", + "title": "To describe the scaling activities for an Auto Scaling group" + } + ], + "DescribeScalingProcessTypes": [ + { + "output": { + "Processes": [ + { + "ProcessName": "AZRebalance" + }, + { + "ProcessName": "AddToLoadBalancer" + }, + { + "ProcessName": "AlarmNotification" + }, + { + "ProcessName": "HealthCheck" + }, + { + "ProcessName": "Launch" + }, + { + "ProcessName": "ReplaceUnhealthy" + }, + { + "ProcessName": "ScheduledActions" + }, + { + "ProcessName": "Terminate" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes the Auto Scaling process types.", + "id": "autoscaling-describe-scaling-process-types-1", + "title": "To describe the Auto Scaling process types" + } + ], + "DescribeScheduledActions": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group" + }, + "output": { + "ScheduledUpdateGroupActions": [ + { + "AutoScalingGroupName": "my-auto-scaling-group", + "DesiredCapacity": 4, + "MaxSize": 6, + "MinSize": 2, + "Recurrence": "30 0 1 12 0", + "ScheduledActionARN": "arn:aws:autoscaling:us-west-2:123456789012:scheduledUpdateGroupAction:8e86b655-b2e6-4410-8f29-b4f094d6871c:autoScalingGroupName/my-auto-scaling-group:scheduledActionName/my-scheduled-action", + "ScheduledActionName": "my-scheduled-action", + "StartTime": "2016-12-01T00:30:00Z", + "Time": "2016-12-01T00:30:00Z" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes the scheduled actions for the specified Auto Scaling group.", + "id": "autoscaling-describe-scheduled-actions-1", + "title": "To describe scheduled actions" + } + ], + "DescribeTags": [ + { + "input": { + "Filters": [ + { + "Name": "auto-scaling-group", + "Values": [ + "my-auto-scaling-group" + ] + } + ] + }, + "output": { + "Tags": [ + { + "Key": "Dept", + "PropagateAtLaunch": true, + "ResourceId": "my-auto-scaling-group", + "ResourceType": "auto-scaling-group", + "Value": "Research" + }, + { + "Key": "Role", + "PropagateAtLaunch": true, + "ResourceId": "my-auto-scaling-group", + "ResourceType": "auto-scaling-group", + "Value": "WebServer" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes the tags for the specified Auto Scaling group.", + "id": "autoscaling-describe-tags-1", + "title": "To describe tags" + } + ], + "DescribeTerminationPolicyTypes": [ + { + "output": { + "TerminationPolicyTypes": [ + "ClosestToNextInstanceHour", + "Default", + "NewestInstance", + "OldestInstance", + "OldestLaunchConfiguration" + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes the available termination policy types.", + "id": "autoscaling-describe-termination-policy-types-1", + "title": "To describe termination policy types" + } + ], + "DescribeTrafficSources": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group" + }, + "output": { + "NextToken": "", + "TrafficSources": [ + { + "Identifier": "arn:aws:vpc-lattice:us-west-2:123456789012:targetgroup/tg-0e2f2665eEXAMPLE", + "State": "InService", + "Type": "vpc-lattice" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes the target groups attached to the specified Auto Scaling group.", + "id": "to-describe-the-target-groups-for-an-auto-scaling-group-1680040714521", + "title": "To describe the target groups for an Auto Scaling group" + } + ], + "DetachInstances": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "InstanceIds": [ + "i-93633f9b" + ], + "ShouldDecrementDesiredCapacity": true + }, + "output": { + "Activities": [ + { + "ActivityId": "5091cb52-547a-47ce-a236-c9ccbc2cb2c9", + "AutoScalingGroupName": "my-auto-scaling-group", + "Cause": "At 2015-04-12T15:02:16Z instance i-93633f9b was detached in response to a user request, shrinking the capacity from 2 to 1.", + "Description": "Detaching EC2 instance: i-93633f9b", + "Details": "details", + "Progress": 50, + "StartTime": "2015-04-12T15:02:16.179Z", + "StatusCode": "InProgress" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example detaches the specified instance from the specified Auto Scaling group.", + "id": "autoscaling-detach-instances-1", + "title": "To detach an instance from an Auto Scaling group" + } + ], + "DetachLoadBalancerTargetGroups": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "TargetGroupARNs": [ + "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067" + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example detaches the specified target group from the specified Auto Scaling group", + "id": "autoscaling-detach-load-balancer-target-groups-1", + "title": "To detach a target group from an Auto Scaling group" + } + ], + "DetachLoadBalancers": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "LoadBalancerNames": [ + "my-load-balancer" + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example detaches the specified load balancer from the specified Auto Scaling group.", + "id": "autoscaling-detach-load-balancers-1", + "title": "To detach a load balancer from an Auto Scaling group" + } + ], + "DetachTrafficSources": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "TrafficSources": [ + { + "Identifier": "arn:aws:elasticloadbalancing:us-west-2:123456789012:targetgroup/my-targets/73e2d6bc24d8a067" + } + ] + }, + "output": { + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example detaches the specified target group from the specified Auto Scaling group.", + "id": "to-detach-a-target-group-from-an-auto-scaling-group-1680040404169", + "title": "To detach a target group from an Auto Scaling group" + } + ], + "DisableMetricsCollection": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "Metrics": [ + "GroupDesiredCapacity" + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example disables collecting data for the GroupDesiredCapacity metric for the specified Auto Scaling group.", + "id": "autoscaling-disable-metrics-collection-1", + "title": "To disable metrics collection for an Auto Scaling group" + } + ], + "EnableMetricsCollection": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "Granularity": "1Minute" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example enables data collection for the specified Auto Scaling group.", + "id": "autoscaling-enable-metrics-collection-1", + "title": "To enable metrics collection for an Auto Scaling group" + } + ], + "EnterStandby": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "InstanceIds": [ + "i-93633f9b" + ], + "ShouldDecrementDesiredCapacity": true + }, + "output": { + "Activities": [ + { + "ActivityId": "ffa056b4-6ed3-41ba-ae7c-249dfae6eba1", + "AutoScalingGroupName": "my-auto-scaling-group", + "Cause": "At 2015-04-12T15:10:23Z instance i-93633f9b was moved to standby in response to a user request, shrinking the capacity from 2 to 1.", + "Description": "Moving EC2 instance to Standby: i-93633f9b", + "Details": "details", + "Progress": 50, + "StartTime": "2015-04-12T15:10:23.640Z", + "StatusCode": "InProgress" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example puts the specified instance into standby mode.", + "id": "autoscaling-enter-standby-1", + "title": "To move instances into standby mode" + } + ], + "ExecutePolicy": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "BreachThreshold": 50.0, + "MetricValue": 59.0, + "PolicyName": "my-step-scale-out-policy" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example executes the specified policy.", + "id": "autoscaling-execute-policy-1", + "title": "To execute a scaling policy" + } + ], + "ExitStandby": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "InstanceIds": [ + "i-93633f9b" + ] + }, + "output": { + "Activities": [ + { + "ActivityId": "142928e1-a2dc-453a-9b24-b85ad6735928", + "AutoScalingGroupName": "my-auto-scaling-group", + "Cause": "At 2015-04-12T15:14:29Z instance i-93633f9b was moved out of standby in response to a user request, increasing the capacity from 1 to 2.", + "Description": "Moving EC2 instance out of Standby: i-93633f9b", + "Details": "details", + "Progress": 30, + "StartTime": "2015-04-12T15:14:29.886Z", + "StatusCode": "PreInService" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example moves the specified instance out of standby mode.", + "id": "autoscaling-exit-standby-1", + "title": "To move instances out of standby mode" + } + ], + "PutLifecycleHook": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "DefaultResult": "CONTINUE", + "HeartbeatTimeout": 300, + "LifecycleHookName": "my-launch-lifecycle-hook", + "LifecycleTransition": "autoscaling:EC2_INSTANCE_LAUNCHING" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example creates a lifecycle hook for instance launch.", + "id": "autoscaling-put-lifecycle-hook-1", + "title": "To create a launch lifecycle hook" + } + ], + "PutNotificationConfiguration": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "NotificationTypes": [ + "autoscaling:TEST_NOTIFICATION" + ], + "TopicARN": "arn:aws:sns:us-west-2:123456789012:my-sns-topic" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example adds the specified notification to the specified Auto Scaling group.", + "id": "autoscaling-put-notification-configuration-1", + "title": "To add an Auto Scaling notification" + } + ], + "PutScalingPolicy": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "PolicyName": "alb1000-target-tracking-scaling-policy", + "PolicyType": "TargetTrackingScaling", + "TargetTrackingConfiguration": { + "PredefinedMetricSpecification": { + "PredefinedMetricType": "ALBRequestCountPerTarget", + "ResourceLabel": "app/my-alb/778d41231b141a0f/targetgroup/my-alb-target-group/943f017f100becff" + }, + "TargetValue": 1000.0 + } + }, + "output": { + "Alarms": [ + { + "AlarmARN": "arn:aws:cloudwatch:us-west-2:123456789012:alarm:TargetTracking-my-asg-AlarmHigh-fc0e4183-23ac-497e-9992-691c9980c38e", + "AlarmName": "TargetTracking-my-asg-AlarmHigh-fc0e4183-23ac-497e-9992-691c9980c38e" + }, + { + "AlarmARN": "arn:aws:cloudwatch:us-west-2:123456789012:alarm:TargetTracking-my-asg-AlarmLow-61a39305-ed0c-47af-bd9e-471a352ee1a2", + "AlarmName": "TargetTracking-my-asg-AlarmLow-61a39305-ed0c-47af-bd9e-471a352ee1a2" + } + ], + "PolicyARN": "arn:aws:autoscaling:us-west-2:123456789012:scalingPolicy:228f02c2-c665-4bfd-aaac-8b04080bea3c:autoScalingGroupName/my-auto-scaling-group:policyName/alb1000-target-tracking-scaling-policy" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example adds the specified policy to the specified Auto Scaling group.", + "id": "autoscaling-put-scaling-policy-1", + "title": "To add a scaling policy to an Auto Scaling group" + } + ], + "PutScheduledUpdateGroupAction": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "DesiredCapacity": 4, + "EndTime": "2014-05-12T08:00:00Z", + "MaxSize": 6, + "MinSize": 2, + "ScheduledActionName": "my-scheduled-action", + "StartTime": "2014-05-12T08:00:00Z" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example adds the specified scheduled action to the specified Auto Scaling group.", + "id": "autoscaling-put-scheduled-update-group-action-1", + "title": "To add a scheduled action to an Auto Scaling group" + } + ], + "PutWarmPool": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "InstanceReusePolicy": { + "ReuseOnScaleIn": true + }, + "MinSize": 30, + "PoolState": "Hibernated" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example creates a warm pool for the specified Auto Scaling group.", + "id": "to-add-a-warm-pool-to-an-auto-scaling-group-1617818810383", + "title": "To create a warm pool for an Auto Scaling group" + } + ], + "RecordLifecycleActionHeartbeat": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "LifecycleActionToken": "bcd2f1b8-9a78-44d3-8a7a-4dd07d7cf635", + "LifecycleHookName": "my-lifecycle-hook" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example records a lifecycle action heartbeat to keep the instance in a pending state.", + "id": "autoscaling-record-lifecycle-action-heartbeat-1", + "title": "To record a lifecycle action heartbeat" + } + ], + "ResumeProcesses": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "ScalingProcesses": [ + "AlarmNotification" + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example resumes the specified suspended scaling process for the specified Auto Scaling group.", + "id": "autoscaling-resume-processes-1", + "title": "To resume Auto Scaling processes" + } + ], + "SetDesiredCapacity": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "DesiredCapacity": 2, + "HonorCooldown": true + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example sets the desired capacity for the specified Auto Scaling group.", + "id": "autoscaling-set-desired-capacity-1", + "title": "To set the desired capacity for an Auto Scaling group" + } + ], + "SetInstanceHealth": [ + { + "input": { + "HealthStatus": "Unhealthy", + "InstanceId": "i-93633f9b" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example sets the health status of the specified instance to Unhealthy.", + "id": "autoscaling-set-instance-health-1", + "title": "To set the health status of an instance" + } + ], + "SetInstanceProtection": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "InstanceIds": [ + "i-93633f9b" + ], + "ProtectedFromScaleIn": true + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example enables instance protection for the specified instance.", + "id": "autoscaling-set-instance-protection-1", + "title": "To enable instance protection for an instance" + }, + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "InstanceIds": [ + "i-93633f9b" + ], + "ProtectedFromScaleIn": false + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example disables instance protection for the specified instance.", + "id": "autoscaling-set-instance-protection-2", + "title": "To disable instance protection for an instance" + } + ], + "StartInstanceRefresh": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "DesiredConfiguration": { + "LaunchTemplate": { + "LaunchTemplateName": "my-template-for-auto-scaling", + "Version": "$Latest" + } + }, + "Preferences": { + "AlarmSpecification": { + "Alarms": [ + "my-alarm" + ] + }, + "AutoRollback": true, + "InstanceWarmup": 200, + "MinHealthyPercentage": 90 + } + }, + "output": { + "InstanceRefreshId": "08b91cf7-8fa6-48af-b6a6-d227f40f1b9b" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example starts an instance refresh for the specified Auto Scaling group.", + "id": "to-start-an-instance-refresh-1592957271522", + "title": "To start an instance refresh" + } + ], + "SuspendProcesses": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "ScalingProcesses": [ + "AlarmNotification" + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example suspends the specified scaling process for the specified Auto Scaling group.", + "id": "autoscaling-suspend-processes-1", + "title": "To suspend Auto Scaling processes" + } + ], + "TerminateInstanceInAutoScalingGroup": [ + { + "input": { + "InstanceId": "i-93633f9b", + "ShouldDecrementDesiredCapacity": false + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example terminates the specified instance from the specified Auto Scaling group without updating the size of the group. Auto Scaling launches a replacement instance after the specified instance terminates.", + "id": "autoscaling-terminate-instance-in-auto-scaling-group-1", + "title": "To terminate an instance in an Auto Scaling group" + } + ], + "UpdateAutoScalingGroup": [ + { + "input": { + "AutoScalingGroupName": "my-auto-scaling-group", + "LaunchTemplate": { + "LaunchTemplateName": "my-template-for-auto-scaling", + "Version": "2" + }, + "MaxSize": 5, + "MinSize": 1, + "NewInstancesProtectedFromScaleIn": true + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example updates multiple properties at the same time.", + "id": "autoscaling-update-auto-scaling-group-1", + "title": "To update an Auto Scaling group" + } + ] + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/autoscaling/2011-01-01/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/autoscaling/2011-01-01/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..ac5939dfe545f889052afd10d93200aea559264e --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/autoscaling/2011-01-01/paginators-1.json @@ -0,0 +1,70 @@ +{ + "pagination": { + "DescribeAutoScalingGroups": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxRecords", + "result_key": "AutoScalingGroups" + }, + "DescribeAutoScalingInstances": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxRecords", + "result_key": "AutoScalingInstances" + }, + "DescribeLaunchConfigurations": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxRecords", + "result_key": "LaunchConfigurations" + }, + "DescribeNotificationConfigurations": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxRecords", + "result_key": "NotificationConfigurations" + }, + "DescribePolicies": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxRecords", + "result_key": "ScalingPolicies" + }, + "DescribeScalingActivities": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxRecords", + "result_key": "Activities" + }, + "DescribeScheduledActions": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxRecords", + "result_key": "ScheduledUpdateGroupActions" + }, + "DescribeTags": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxRecords", + "result_key": "Tags" + }, + "DescribeLoadBalancerTargetGroups": { + "input_token": "NextToken", + "limit_key": "MaxRecords", + "output_token": "NextToken", + "result_key": "LoadBalancerTargetGroups" + }, + "DescribeLoadBalancers": { + "input_token": "NextToken", + "limit_key": "MaxRecords", + "output_token": "NextToken", + "result_key": "LoadBalancers" + }, + "DescribeWarmPool": { + "input_token": "NextToken", + "limit_key": "MaxRecords", + "output_token": "NextToken", + "result_key": "Instances" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/autoscaling/2011-01-01/paginators-1.sdk-extras.json b/venv/lib/python3.10/site-packages/botocore/data/autoscaling/2011-01-01/paginators-1.sdk-extras.json new file mode 100644 index 0000000000000000000000000000000000000000..1c63499294318cbbceca74373d91a13403dd4535 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/autoscaling/2011-01-01/paginators-1.sdk-extras.json @@ -0,0 +1,12 @@ +{ + "version": 1.0, + "merge": { + "pagination": { + "DescribeWarmPool": { + "non_aggregate_keys": [ + "WarmPoolConfiguration" + ] + } + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/b2bi/2022-06-23/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/b2bi/2022-06-23/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..5f3b0d242ce73407c92ab9e21d74d5586f7a0b1c --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/b2bi/2022-06-23/paginators-1.json @@ -0,0 +1,28 @@ +{ + "pagination": { + "ListCapabilities": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "capabilities" + }, + "ListPartnerships": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "partnerships" + }, + "ListProfiles": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "profiles" + }, + "ListTransformers": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "transformers" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/backup/2018-11-15/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/backup/2018-11-15/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/backup/2018-11-15/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/backup/2018-11-15/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/backup/2018-11-15/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..1720297a2e07e0644cca4e88f1964b4475ce7b8c --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/backup/2018-11-15/paginators-1.json @@ -0,0 +1,106 @@ +{ + "pagination": { + "ListBackupJobs": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "BackupJobs" + }, + "ListBackupPlanTemplates": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "BackupPlanTemplatesList" + }, + "ListBackupPlanVersions": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "BackupPlanVersionsList" + }, + "ListBackupPlans": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "BackupPlansList" + }, + "ListBackupSelections": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "BackupSelectionsList" + }, + "ListBackupVaults": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "BackupVaultList" + }, + "ListCopyJobs": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "CopyJobs" + }, + "ListProtectedResources": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "Results" + }, + "ListRecoveryPointsByBackupVault": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "RecoveryPoints" + }, + "ListRecoveryPointsByResource": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "RecoveryPoints" + }, + "ListRestoreJobs": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "RestoreJobs" + }, + "ListLegalHolds": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "LegalHolds" + }, + "ListRecoveryPointsByLegalHold": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "RecoveryPoints" + }, + "ListProtectedResourcesByBackupVault": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "Results" + }, + "ListRestoreJobsByProtectedResource": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "RestoreJobs" + }, + "ListRestoreTestingPlans": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "RestoreTestingPlans" + }, + "ListRestoreTestingSelections": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "RestoreTestingSelections" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/batch/2016-08-10/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/batch/2016-08-10/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..18203dc88698f0c53165d58c973cd6e2f824cfea --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/batch/2016-08-10/examples-1.json @@ -0,0 +1,711 @@ +{ + "version": "1.0", + "examples": { + "CancelJob": [ + { + "input": { + "jobId": "1d828f65-7a4d-42e8-996d-3b900ed59dc4", + "reason": "Cancelling job." + }, + "output": { + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example cancels a job with the specified job ID.", + "id": "to-cancel-a-job-1481152314733", + "title": "To cancel a job" + } + ], + "CreateComputeEnvironment": [ + { + "input": { + "type": "MANAGED", + "computeEnvironmentName": "C4OnDemand", + "computeResources": { + "type": "EC2", + "desiredvCpus": 48, + "ec2KeyPair": "id_rsa", + "instanceRole": "ecsInstanceRole", + "instanceTypes": [ + "c4.large", + "c4.xlarge", + "c4.2xlarge", + "c4.4xlarge", + "c4.8xlarge" + ], + "maxvCpus": 128, + "minvCpus": 0, + "securityGroupIds": [ + "sg-cf5093b2" + ], + "subnets": [ + "subnet-220c0e0a", + "subnet-1a95556d", + "subnet-978f6dce" + ], + "tags": { + "Name": "Batch Instance - C4OnDemand" + } + }, + "serviceRole": "arn:aws:iam::012345678910:role/AWSBatchServiceRole", + "state": "ENABLED" + }, + "output": { + "computeEnvironmentArn": "arn:aws:batch:us-east-1:012345678910:compute-environment/C4OnDemand", + "computeEnvironmentName": "C4OnDemand" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example creates a managed compute environment with specific C4 instance types that are launched on demand. The compute environment is called C4OnDemand.", + "id": "to-create-a-managed-ec2-compute-environment-1481152600017", + "title": "To create a managed EC2 compute environment" + }, + { + "input": { + "type": "MANAGED", + "computeEnvironmentName": "M4Spot", + "computeResources": { + "type": "SPOT", + "bidPercentage": 20, + "desiredvCpus": 4, + "ec2KeyPair": "id_rsa", + "instanceRole": "ecsInstanceRole", + "instanceTypes": [ + "m4" + ], + "maxvCpus": 128, + "minvCpus": 0, + "securityGroupIds": [ + "sg-cf5093b2" + ], + "spotIamFleetRole": "arn:aws:iam::012345678910:role/aws-ec2-spot-fleet-role", + "subnets": [ + "subnet-220c0e0a", + "subnet-1a95556d", + "subnet-978f6dce" + ], + "tags": { + "Name": "Batch Instance - M4Spot" + } + }, + "serviceRole": "arn:aws:iam::012345678910:role/AWSBatchServiceRole", + "state": "ENABLED" + }, + "output": { + "computeEnvironmentArn": "arn:aws:batch:us-east-1:012345678910:compute-environment/M4Spot", + "computeEnvironmentName": "M4Spot" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example creates a managed compute environment with the M4 instance type that is launched when the Spot bid price is at or below 20% of the On-Demand price for the instance type. The compute environment is called M4Spot.", + "id": "to-create-a-managed-ec2-spot-compute-environment-1481152844190", + "title": "To create a managed EC2 Spot compute environment" + } + ], + "CreateJobQueue": [ + { + "input": { + "computeEnvironmentOrder": [ + { + "computeEnvironment": "M4Spot", + "order": 1 + } + ], + "jobQueueName": "LowPriority", + "priority": 1, + "state": "ENABLED" + }, + "output": { + "jobQueueArn": "arn:aws:batch:us-east-1:012345678910:job-queue/LowPriority", + "jobQueueName": "LowPriority" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example creates a job queue called LowPriority that uses the M4Spot compute environment.", + "id": "to-create-a-job-queue-with-a-single-compute-environment-1481152967946", + "title": "To create a job queue with a single compute environment" + }, + { + "input": { + "computeEnvironmentOrder": [ + { + "computeEnvironment": "C4OnDemand", + "order": 1 + }, + { + "computeEnvironment": "M4Spot", + "order": 2 + } + ], + "jobQueueName": "HighPriority", + "priority": 10, + "state": "ENABLED" + }, + "output": { + "jobQueueArn": "arn:aws:batch:us-east-1:012345678910:job-queue/HighPriority", + "jobQueueName": "HighPriority" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example creates a job queue called HighPriority that uses the C4OnDemand compute environment with an order of 1 and the M4Spot compute environment with an order of 2.", + "id": "to-create-a-job-queue-with-multiple-compute-environments-1481153027051", + "title": "To create a job queue with multiple compute environments" + } + ], + "DeleteComputeEnvironment": [ + { + "input": { + "computeEnvironment": "P2OnDemand" + }, + "output": { + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example deletes the P2OnDemand compute environment.", + "id": "to-delete-a-compute-environment-1481153105644", + "title": "To delete a compute environment" + } + ], + "DeleteJobQueue": [ + { + "input": { + "jobQueue": "GPGPU" + }, + "output": { + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example deletes the GPGPU job queue.", + "id": "to-delete-a-job-queue-1481153508134", + "title": "To delete a job queue" + } + ], + "DeregisterJobDefinition": [ + { + "input": { + "jobDefinition": "sleep10" + }, + "output": { + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example deregisters a job definition called sleep10.", + "id": "to-deregister-a-job-definition-1481153579565", + "title": "To deregister a job definition" + } + ], + "DescribeComputeEnvironments": [ + { + "input": { + "computeEnvironments": [ + "P2OnDemand" + ] + }, + "output": { + "computeEnvironments": [ + { + "type": "MANAGED", + "computeEnvironmentArn": "arn:aws:batch:us-east-1:012345678910:compute-environment/P2OnDemand", + "computeEnvironmentName": "P2OnDemand", + "computeResources": { + "type": "EC2", + "desiredvCpus": 48, + "ec2KeyPair": "id_rsa", + "instanceRole": "ecsInstanceRole", + "instanceTypes": [ + "p2" + ], + "maxvCpus": 128, + "minvCpus": 0, + "securityGroupIds": [ + "sg-cf5093b2" + ], + "subnets": [ + "subnet-220c0e0a", + "subnet-1a95556d", + "subnet-978f6dce" + ], + "tags": { + "Name": "Batch Instance - P2OnDemand" + } + }, + "ecsClusterArn": "arn:aws:ecs:us-east-1:012345678910:cluster/P2OnDemand_Batch_2c06f29d-d1fe-3a49-879d-42394c86effc", + "serviceRole": "arn:aws:iam::012345678910:role/AWSBatchServiceRole", + "state": "ENABLED", + "status": "VALID", + "statusReason": "ComputeEnvironment Healthy" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes the P2OnDemand compute environment.", + "id": "to-describe-a-compute-environment-1481153713334", + "title": "To describe a compute environment" + } + ], + "DescribeJobDefinitions": [ + { + "input": { + "status": "ACTIVE" + }, + "output": { + "jobDefinitions": [ + { + "type": "container", + "containerProperties": { + "command": [ + "sleep", + "60" + ], + "environment": [ + + ], + "image": "busybox", + "mountPoints": [ + + ], + "resourceRequirements": [ + { + "type": "MEMORY", + "value": "128" + }, + { + "type": "VCPU", + "value": "1" + } + ], + "ulimits": [ + + ], + "volumes": [ + + ] + }, + "jobDefinitionArn": "arn:aws:batch:us-east-1:012345678910:job-definition/sleep60:1", + "jobDefinitionName": "sleep60", + "revision": 1, + "status": "ACTIVE" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes all of your active job definitions.", + "id": "to-describe-active-job-definitions-1481153895831", + "title": "To describe active job definitions" + } + ], + "DescribeJobQueues": [ + { + "input": { + "jobQueues": [ + "HighPriority" + ] + }, + "output": { + "jobQueues": [ + { + "computeEnvironmentOrder": [ + { + "computeEnvironment": "arn:aws:batch:us-east-1:012345678910:compute-environment/C4OnDemand", + "order": 1 + } + ], + "jobQueueArn": "arn:aws:batch:us-east-1:012345678910:job-queue/HighPriority", + "jobQueueName": "HighPriority", + "priority": 1, + "state": "ENABLED", + "status": "VALID", + "statusReason": "JobQueue Healthy" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes the HighPriority job queue.", + "id": "to-describe-a-job-queue-1481153995804", + "title": "To describe a job queue" + } + ], + "DescribeJobs": [ + { + "input": { + "jobs": [ + "24fa2d7a-64c4-49d2-8b47-f8da4fbde8e9" + ] + }, + "output": { + "jobs": [ + { + "container": { + "command": [ + "sleep", + "60" + ], + "containerInstanceArn": "arn:aws:ecs:us-east-1:012345678910:container-instance/5406d7cd-58bd-4b8f-9936-48d7c6b1526c", + "environment": [ + + ], + "exitCode": 0, + "image": "busybox", + "memory": 128, + "mountPoints": [ + + ], + "ulimits": [ + + ], + "vcpus": 1, + "volumes": [ + + ] + }, + "createdAt": 1480460782010, + "dependsOn": [ + + ], + "jobDefinition": "sleep60", + "jobId": "24fa2d7a-64c4-49d2-8b47-f8da4fbde8e9", + "jobName": "example", + "jobQueue": "arn:aws:batch:us-east-1:012345678910:job-queue/HighPriority", + "parameters": { + }, + "startedAt": 1480460816500, + "status": "SUCCEEDED", + "stoppedAt": 1480460880699 + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example describes a job with the specified job ID.", + "id": "to-describe-a-specific-job-1481154090490", + "title": "To describe a specific job" + } + ], + "ListJobs": [ + { + "input": { + "jobQueue": "HighPriority" + }, + "output": { + "jobSummaryList": [ + { + "jobId": "e66ff5fd-a1ff-4640-b1a2-0b0a142f49bb", + "jobName": "example" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example lists the running jobs in the HighPriority job queue.", + "id": "to-list-running-jobs-1481154202164", + "title": "To list running jobs" + }, + { + "input": { + "jobQueue": "HighPriority", + "jobStatus": "SUBMITTED" + }, + "output": { + "jobSummaryList": [ + { + "jobId": "68f0c163-fbd4-44e6-9fd1-25b14a434786", + "jobName": "example" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example lists jobs in the HighPriority job queue that are in the SUBMITTED job status.", + "id": "to-list-submitted-jobs-1481154251623", + "title": "To list submitted jobs" + } + ], + "ListTagsForResource": [ + { + "input": { + "resourceArn": "arn:aws:batch:us-east-1:123456789012:job-definition/sleep30:1" + }, + "output": { + "tags": { + "Department": "Engineering", + "Stage": "Alpha", + "User": "JaneDoe" + } + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This demonstrates calling the ListTagsForResource action.", + "id": "listtagsforresource-example-1591293003710", + "title": "ListTagsForResource Example" + } + ], + "RegisterJobDefinition": [ + { + "input": { + "type": "container", + "containerProperties": { + "command": [ + "sleep", + "10" + ], + "image": "busybox", + "resourceRequirements": [ + { + "type": "MEMORY", + "value": "128" + }, + { + "type": "VCPU", + "value": "1" + } + ] + }, + "jobDefinitionName": "sleep10" + }, + "output": { + "jobDefinitionArn": "arn:aws:batch:us-east-1:012345678910:job-definition/sleep10:1", + "jobDefinitionName": "sleep10", + "revision": 1 + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example registers a job definition for a simple container job.", + "id": "to-register-a-job-definition-1481154325325", + "title": "To register a job definition" + }, + { + "input": { + "type": "container", + "containerProperties": { + "command": [ + "sleep", + "30" + ], + "image": "busybox", + "resourceRequirements": [ + { + "type": "MEMORY", + "value": "128" + }, + { + "type": "VCPU", + "value": "1" + } + ] + }, + "jobDefinitionName": "sleep30", + "tags": { + "Department": "Engineering", + "User": "JaneDoe" + } + }, + "output": { + "jobDefinitionArn": "arn:aws:batch:us-east-1:012345678910:job-definition/sleep30:1", + "jobDefinitionName": "sleep30", + "revision": 1 + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This demonstrates calling the RegisterJobDefinition action, including tags.", + "id": "registerjobdefinition-with-tags-1591290509028", + "title": "RegisterJobDefinition with tags" + } + ], + "SubmitJob": [ + { + "input": { + "jobDefinition": "sleep60", + "jobName": "example", + "jobQueue": "HighPriority" + }, + "output": { + "jobId": "876da822-4198-45f2-a252-6cea32512ea8", + "jobName": "example" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example submits a simple container job called example to the HighPriority job queue.", + "id": "to-submit-a-job-to-a-queue-1481154481673", + "title": "To submit a job to a queue" + } + ], + "TagResource": [ + { + "input": { + "resourceArn": "arn:aws:batch:us-east-1:123456789012:job-definition/sleep30:1", + "tags": { + "Stage": "Alpha" + } + }, + "output": { + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This demonstrates calling the TagResource action.", + "id": "tagresource-example-1591291959952", + "title": "TagResource Example" + } + ], + "TerminateJob": [ + { + "input": { + "jobId": "61e743ed-35e4-48da-b2de-5c8333821c84", + "reason": "Terminating job." + }, + "output": { + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example terminates a job with the specified job ID.", + "id": "to-terminate-a-job-1481154558276", + "title": "To terminate a job" + } + ], + "UntagResource": [ + { + "input": { + "resourceArn": "arn:aws:batch:us-east-1:123456789012:job-definition/sleep30:1", + "tagKeys": [ + "Stage" + ] + }, + "output": { + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This demonstrates calling the UntagResource action.", + "id": "untagresource-example-1591292811042", + "title": "UntagResource Example" + } + ], + "UpdateComputeEnvironment": [ + { + "input": { + "computeEnvironment": "P2OnDemand", + "state": "DISABLED" + }, + "output": { + "computeEnvironmentArn": "arn:aws:batch:us-east-1:012345678910:compute-environment/P2OnDemand", + "computeEnvironmentName": "P2OnDemand" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example disables the P2OnDemand compute environment so it can be deleted.", + "id": "to-update-a-compute-environment-1481154702731", + "title": "To update a compute environment" + } + ], + "UpdateJobQueue": [ + { + "input": { + "jobQueue": "GPGPU", + "state": "DISABLED" + }, + "output": { + "jobQueueArn": "arn:aws:batch:us-east-1:012345678910:job-queue/GPGPU", + "jobQueueName": "GPGPU" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "This example disables a job queue so that it can be deleted.", + "id": "to-update-a-job-queue-1481154806981", + "title": "To update a job queue" + } + ] + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/batch/2016-08-10/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/batch/2016-08-10/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..166f3efa7cfa97debb1ab611e5c2120a2db84afe --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/batch/2016-08-10/paginators-1.json @@ -0,0 +1,34 @@ +{ + "pagination": { + "DescribeComputeEnvironments": { + "input_token": "nextToken", + "limit_key": "maxResults", + "output_token": "nextToken", + "result_key": "computeEnvironments" + }, + "DescribeJobDefinitions": { + "input_token": "nextToken", + "limit_key": "maxResults", + "output_token": "nextToken", + "result_key": "jobDefinitions" + }, + "DescribeJobQueues": { + "input_token": "nextToken", + "limit_key": "maxResults", + "output_token": "nextToken", + "result_key": "jobQueues" + }, + "ListJobs": { + "input_token": "nextToken", + "limit_key": "maxResults", + "output_token": "nextToken", + "result_key": "jobSummaryList" + }, + "ListSchedulingPolicies": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "schedulingPolicies" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/bedrock-agent-runtime/2023-07-26/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/bedrock-agent-runtime/2023-07-26/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..6d64542de38612a4d6f8e157a4402b16a3ecaeda --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/bedrock-agent-runtime/2023-07-26/paginators-1.json @@ -0,0 +1,15 @@ +{ + "pagination": { + "Retrieve": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "retrievalResults" + }, + "GetAgentMemory": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxItems", + "result_key": "memoryContents" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/bedrock-runtime/2023-09-30/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/bedrock-runtime/2023-09-30/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..ea142457a6a77d6e6a54942329f1199bc2f2a60c --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/bedrock-runtime/2023-09-30/paginators-1.json @@ -0,0 +1,3 @@ +{ + "pagination": {} +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/bedrock-runtime/2023-09-30/waiters-2.json b/venv/lib/python3.10/site-packages/botocore/data/bedrock-runtime/2023-09-30/waiters-2.json new file mode 100644 index 0000000000000000000000000000000000000000..4b20636aa4c8b334eca901959d698c0b98cba6d6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/bedrock-runtime/2023-09-30/waiters-2.json @@ -0,0 +1,5 @@ +{ + "version": 2, + "waiters": { + } +} \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/botocore/data/budgets/2016-10-20/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/budgets/2016-10-20/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/budgets/2016-10-20/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/budgets/2016-10-20/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/budgets/2016-10-20/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..15f7a63e05c9dbc1b31cc66f75bb3836263142d0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/budgets/2016-10-20/paginators-1.json @@ -0,0 +1,52 @@ +{ + "pagination": { + "DescribeBudgets": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "Budgets" + }, + "DescribeNotificationsForBudget": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "Notifications" + }, + "DescribeSubscribersForNotification": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "Subscribers" + }, + "DescribeBudgetPerformanceHistory": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "BudgetPerformanceHistory" + }, + "DescribeBudgetActionHistories": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "ActionHistories" + }, + "DescribeBudgetActionsForAccount": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "Actions" + }, + "DescribeBudgetActionsForBudget": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "Actions" + }, + "DescribeBudgetNotificationsForAccount": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "BudgetNotificationsForAccount" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/chatbot/2017-10-11/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/chatbot/2017-10-11/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..ea142457a6a77d6e6a54942329f1199bc2f2a60c --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/chatbot/2017-10-11/paginators-1.json @@ -0,0 +1,3 @@ +{ + "pagination": {} +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/cloud9/2017-09-23/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/cloud9/2017-09-23/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..fdef2700951e2c4d263df0b0d0279ea667678299 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/cloud9/2017-09-23/examples-1.json @@ -0,0 +1,315 @@ +{ + "version": "1.0", + "examples": { + "CreateEnvironmentEC2": [ + { + "input": { + "name": "my-demo-environment", + "automaticStopTimeMinutes": 60, + "description": "This is my demonstration environment.", + "instanceType": "t2.micro", + "ownerArn": "arn:aws:iam::123456789012:user/MyDemoUser", + "subnetId": "subnet-6300cd1b" + }, + "output": { + "environmentId": "8d9967e2f0624182b74e7690ad69ebEX" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "", + "id": "createenvironmentec2-1516821730547", + "title": "CreateEnvironmentEC2" + } + ], + "CreateEnvironmentMembership": [ + { + "input": { + "environmentId": "8d9967e2f0624182b74e7690ad69ebEX", + "permissions": "read-write", + "userArn": "arn:aws:iam::123456789012:user/AnotherDemoUser" + }, + "output": { + "membership": { + "environmentId": "8d9967e2f0624182b74e7690ad69ebEX", + "permissions": "read-write", + "userArn": "arn:aws:iam::123456789012:user/AnotherDemoUser", + "userId": "AIDAJ3BA6O2FMJWCWXHEX" + } + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "", + "id": "createenvironmentmembership-1516822583452", + "title": "CreateEnvironmentMembership" + } + ], + "DeleteEnvironment": [ + { + "input": { + "environmentId": "8d9967e2f0624182b74e7690ad69ebEX" + }, + "output": { + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "", + "id": "deleteenvironment-1516822903149", + "title": "DeleteEnvironment" + } + ], + "DeleteEnvironmentMembership": [ + { + "input": { + "environmentId": "8d9967e2f0624182b74e7690ad69ebEX", + "userArn": "arn:aws:iam::123456789012:user/AnotherDemoUser" + }, + "output": { + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "", + "id": "deleteenvironmentmembership-1516822975655", + "title": "DeleteEnvironmentMembership" + } + ], + "DescribeEnvironmentMemberships": [ + { + "input": { + "environmentId": "8d9967e2f0624182b74e7690ad69ebEX" + }, + "output": { + "memberships": [ + { + "environmentId": "8d9967e2f0624182b74e7690ad69ebEX", + "permissions": "read-write", + "userArn": "arn:aws:iam::123456789012:user/AnotherDemoUser", + "userId": "AIDAJ3BA6O2FMJWCWXHEX" + }, + { + "environmentId": "8d9967e2f0624182b74e7690ad69ebEX", + "permissions": "owner", + "userArn": "arn:aws:iam::123456789012:user/MyDemoUser", + "userId": "AIDAJNUEDQAQWFELJDLEX" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "The following example gets information about all of the environment members for the specified development environment.", + "id": "describeenvironmentmemberships1-1516823070453", + "title": "DescribeEnvironmentMemberships1" + }, + { + "input": { + "environmentId": "8d9967e2f0624182b74e7690ad69ebEX", + "permissions": [ + "owner" + ] + }, + "output": { + "memberships": [ + { + "environmentId": "8d9967e2f0624182b74e7690ad69ebEX", + "permissions": "owner", + "userArn": "arn:aws:iam::123456789012:user/MyDemoUser", + "userId": "AIDAJNUEDQAQWFELJDLEX" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "The following example gets information about the owner of the specified development environment.", + "id": "describeenvironmentmemberships2-1516823191355", + "title": "DescribeEnvironmentMemberships2" + }, + { + "input": { + "userArn": "arn:aws:iam::123456789012:user/MyDemoUser" + }, + "output": { + "memberships": [ + { + "environmentId": "10a75714bd494714929e7f5ec4125aEX", + "lastAccess": "2018-01-19T11:06:13Z", + "permissions": "owner", + "userArn": "arn:aws:iam::123456789012:user/MyDemoUser", + "userId": "AIDAJNUEDQAQWFELJDLEX" + }, + { + "environmentId": "12bfc3cd537f41cb9776f8af5525c9EX", + "lastAccess": "2018-01-19T11:39:19Z", + "permissions": "owner", + "userArn": "arn:aws:iam::123456789012:user/MyDemoUser", + "userId": "AIDAJNUEDQAQWFELJDLEX" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "The following example gets development environment membership information for the specified user.", + "id": "describeenvironmentmemberships3-1516823268793", + "title": "DescribeEnvironmentMemberships3" + } + ], + "DescribeEnvironmentStatus": [ + { + "input": { + "environmentId": "8d9967e2f0624182b74e7690ad69ebEX" + }, + "output": { + "message": "Environment is ready to use", + "status": "ready" + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "", + "id": "describeenvironmentstatus-1516823462133", + "title": "DescribeEnvironmentStatus" + } + ], + "DescribeEnvironments": [ + { + "input": { + "environmentIds": [ + "8d9967e2f0624182b74e7690ad69ebEX", + "349c86d4579e4e7298d500ff57a6b2EX" + ] + }, + "output": { + "environments": [ + { + "name": "my-demo-environment", + "type": "ec2", + "arn": "arn:aws:cloud9:us-east-2:123456789012:environment:8d9967e2f0624182b74e7690ad69ebEX", + "description": "This is my demonstration environment.", + "id": "8d9967e2f0624182b74e7690ad69ebEX", + "lifecycle": { + "status": "CREATED" + }, + "ownerArn": "arn:aws:iam::123456789012:user/MyDemoUser" + }, + { + "name": "another-demo-environment", + "type": "ssh", + "arn": "arn:aws:cloud9:us-east-2:123456789012:environment:349c86d4579e4e7298d500ff57a6b2EX", + "description": "", + "id": "349c86d4579e4e7298d500ff57a6b2EX", + "lifecycle": { + "status": "CREATED" + }, + "ownerArn": "arn:aws:sts::123456789012:assumed-role/AnotherDemoUser/AnotherDemoUser" + } + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "", + "id": "describeenvironments-1516823568291", + "title": "DescribeEnvironments" + } + ], + "ListEnvironments": [ + { + "input": { + }, + "output": { + "environmentIds": [ + "349c86d4579e4e7298d500ff57a6b2EX", + "45a3da47af0840f2b0c0824f5ee232EX" + ] + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "", + "id": "listenvironments-1516823687205", + "title": "ListEnvironments" + } + ], + "UpdateEnvironment": [ + { + "input": { + "name": "my-changed-demo-environment", + "description": "This is my changed demonstration environment.", + "environmentId": "8d9967e2f0624182b74e7690ad69ebEX" + }, + "output": { + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "", + "id": "updateenvironment-1516823781910", + "title": "UpdateEnvironment" + } + ], + "UpdateEnvironmentMembership": [ + { + "input": { + "environmentId": "8d9967e2f0624182b74e7690ad69ebEX", + "permissions": "read-only", + "userArn": "arn:aws:iam::123456789012:user/AnotherDemoUser" + }, + "output": { + "membership": { + "environmentId": "8d9967e2f0624182b74e7690ad69eb31", + "permissions": "read-only", + "userArn": "arn:aws:iam::123456789012:user/AnotherDemoUser", + "userId": "AIDAJ3BA6O2FMJWCWXHEX" + } + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "", + "id": "updateenvironmentmembership-1516823876645", + "title": "UpdateEnvironmentMembership" + } + ] + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/cloud9/2017-09-23/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/cloud9/2017-09-23/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..1c4c2ff54160bddb62705348dbf10c2145735532 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/cloud9/2017-09-23/paginators-1.json @@ -0,0 +1,16 @@ +{ + "pagination": { + "DescribeEnvironmentMemberships": { + "result_key": "memberships", + "output_token": "nextToken", + "input_token": "nextToken", + "limit_key": "maxResults" + }, + "ListEnvironments": { + "result_key": "environmentIds", + "output_token": "nextToken", + "input_token": "nextToken", + "limit_key": "maxResults" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/cloudcontrol/2021-09-30/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/cloudcontrol/2021-09-30/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/cloudcontrol/2021-09-30/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/cloudcontrol/2021-09-30/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/cloudcontrol/2021-09-30/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..14380b07447f80f178786eded893e6eb2d726afa --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/cloudcontrol/2021-09-30/paginators-1.json @@ -0,0 +1,16 @@ +{ + "pagination": { + "ListResourceRequests": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "ResourceRequestStatusSummaries" + }, + "ListResources": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "ResourceDescriptions" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/cloudcontrol/2021-09-30/paginators-1.sdk-extras.json b/venv/lib/python3.10/site-packages/botocore/data/cloudcontrol/2021-09-30/paginators-1.sdk-extras.json new file mode 100644 index 0000000000000000000000000000000000000000..d0d47fb7972ee2ebb4fe4c659b928baa1de7decf --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/cloudcontrol/2021-09-30/paginators-1.sdk-extras.json @@ -0,0 +1,12 @@ +{ + "version": 1.0, + "merge": { + "pagination": { + "ListResources": { + "non_aggregate_keys": [ + "TypeName" + ] + } + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/cloudcontrol/2021-09-30/waiters-2.json b/venv/lib/python3.10/site-packages/botocore/data/cloudcontrol/2021-09-30/waiters-2.json new file mode 100644 index 0000000000000000000000000000000000000000..e5f82acb29941a16a86198c5699d2aaa0faf36cd --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/cloudcontrol/2021-09-30/waiters-2.json @@ -0,0 +1,27 @@ +{ + "version" : 2, + "waiters" : { + "ResourceRequestSuccess" : { + "description" : "Wait until resource operation request is successful", + "delay" : 5, + "maxAttempts" : 24, + "operation" : "GetResourceRequestStatus", + "acceptors" : [ { + "matcher" : "path", + "argument" : "ProgressEvent.OperationStatus", + "state" : "success", + "expected" : "SUCCESS" + }, { + "matcher" : "path", + "argument" : "ProgressEvent.OperationStatus", + "state" : "failure", + "expected" : "FAILED" + }, { + "matcher" : "path", + "argument" : "ProgressEvent.OperationStatus", + "state" : "failure", + "expected" : "CANCEL_COMPLETE" + } ] + } + } +} \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/botocore/data/clouddirectory/2016-05-10/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/clouddirectory/2016-05-10/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/clouddirectory/2016-05-10/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/clouddirectory/2016-05-10/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/clouddirectory/2016-05-10/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..22cc439e42f8df530e13c66de3344861c14f35ca --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/clouddirectory/2016-05-10/paginators-1.json @@ -0,0 +1,100 @@ +{ + "pagination": { + "ListObjectParentPaths": { + "result_key": "PathToObjectIdentifiersList", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListFacetNames": { + "result_key": "FacetNames", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListPublishedSchemaArns": { + "result_key": "SchemaArns", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListDirectories": { + "result_key": "Directories", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListDevelopmentSchemaArns": { + "result_key": "SchemaArns", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListTypedLinkFacetNames": { + "result_key": "FacetNames", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListIndex": { + "result_key": "IndexAttachments", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListFacetAttributes": { + "result_key": "Attributes", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListObjectPolicies": { + "result_key": "AttachedPolicyIds", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListTagsForResource": { + "result_key": "Tags", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListAttachedIndices": { + "result_key": "IndexAttachments", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "LookupPolicy": { + "result_key": "PolicyToPathList", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListPolicyAttachments": { + "result_key": "ObjectIdentifiers", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListObjectAttributes": { + "result_key": "Attributes", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListAppliedSchemaArns": { + "result_key": "SchemaArns", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListTypedLinkFacetAttributes": { + "result_key": "Attributes", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/clouddirectory/2017-01-11/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/clouddirectory/2017-01-11/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/clouddirectory/2017-01-11/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/clouddirectory/2017-01-11/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/clouddirectory/2017-01-11/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..5a06fb0bcea02b7a72a3eca64d98c549e4dcadae --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/clouddirectory/2017-01-11/paginators-1.json @@ -0,0 +1,118 @@ +{ + "pagination": { + "ListObjectParentPaths": { + "result_key": "PathToObjectIdentifiersList", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListFacetNames": { + "result_key": "FacetNames", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListPublishedSchemaArns": { + "result_key": "SchemaArns", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListDirectories": { + "result_key": "Directories", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListDevelopmentSchemaArns": { + "result_key": "SchemaArns", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListTypedLinkFacetNames": { + "result_key": "FacetNames", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListIndex": { + "result_key": "IndexAttachments", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListFacetAttributes": { + "result_key": "Attributes", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListObjectPolicies": { + "result_key": "AttachedPolicyIds", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListTagsForResource": { + "result_key": "Tags", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListAttachedIndices": { + "result_key": "IndexAttachments", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "LookupPolicy": { + "result_key": "PolicyToPathList", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListPolicyAttachments": { + "result_key": "ObjectIdentifiers", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListObjectAttributes": { + "result_key": "Attributes", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListAppliedSchemaArns": { + "result_key": "SchemaArns", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListTypedLinkFacetAttributes": { + "result_key": "Attributes", + "output_token": "NextToken", + "input_token": "NextToken", + "limit_key": "MaxResults" + }, + "ListIncomingTypedLinks": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "LinkSpecifiers" + }, + "ListManagedSchemaArns": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "SchemaArns" + }, + "ListOutgoingTypedLinks": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "TypedLinkSpecifiers" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/cloudformation/2010-05-15/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/cloudformation/2010-05-15/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/cloudformation/2010-05-15/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/cloudformation/2010-05-15/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/cloudformation/2010-05-15/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..7ded86325397d0fb24f79fd825ebbd7cf4f1b33d --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/cloudformation/2010-05-15/paginators-1.json @@ -0,0 +1,124 @@ +{ + "pagination": { + "DescribeAccountLimits": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "AccountLimits" + }, + "DescribeChangeSet": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "Changes", + "non_aggregate_keys": [ + "ChangeSetName", + "ChangeSetId", + "StackId", + "StackName", + "Description", + "Parameters", + "CreationTime", + "ExecutionStatus", + "Status", + "StatusReason", + "NotificationARNs", + "RollbackConfiguration", + "Capabilities", + "Tags", + "ParentChangeSetId", + "IncludeNestedStacks", + "RootChangeSetId", + "OnStackFailure", + "ImportExistingResources" + ] + }, + "DescribeStackEvents": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "StackEvents" + }, + "DescribeStacks": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "Stacks" + }, + "ListChangeSets": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "Summaries" + }, + "ListStackInstances": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "Summaries" + }, + "ListStackResources": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "StackResourceSummaries" + }, + "ListStacks": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "StackSummaries" + }, + "ListStackSetOperationResults": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "Summaries" + }, + "ListStackSetOperations": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "Summaries" + }, + "ListStackSets": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "Summaries" + }, + "ListExports": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "Exports" + }, + "ListImports": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "Imports" + }, + "ListTypes": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "TypeSummaries" + }, + "ListGeneratedTemplates": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "Summaries" + }, + "ListResourceScanRelatedResources": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "RelatedResources" + }, + "ListResourceScanResources": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "Resources" + }, + "ListResourceScans": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "ResourceScanSummaries" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/cloudformation/2010-05-15/waiters-2.json b/venv/lib/python3.10/site-packages/botocore/data/cloudformation/2010-05-15/waiters-2.json new file mode 100644 index 0000000000000000000000000000000000000000..cd37c9113ee5583b16812d9c16c22555b4667f77 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/cloudformation/2010-05-15/waiters-2.json @@ -0,0 +1,348 @@ +{ + "version": 2, + "waiters": { + "StackExists": { + "delay": 5, + "operation": "DescribeStacks", + "maxAttempts": 20, + "acceptors": [ + { + "matcher": "status", + "expected": 200, + "state": "success" + }, + { + "matcher": "error", + "expected": "ValidationError", + "state": "retry" + } + ] + }, + "StackCreateComplete": { + "delay": 30, + "operation": "DescribeStacks", + "maxAttempts": 120, + "description": "Wait until stack status is CREATE_COMPLETE.", + "acceptors": [ + { + "argument": "Stacks[].StackStatus", + "expected": "CREATE_COMPLETE", + "matcher": "pathAll", + "state": "success" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "UPDATE_COMPLETE", + "matcher": "pathAll", + "state": "success" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "UPDATE_IN_PROGRESS", + "matcher": "pathAll", + "state": "success" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "UPDATE_COMPLETE_CLEANUP_IN_PROGRESS", + "matcher": "pathAll", + "state": "success" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "UPDATE_FAILED", + "matcher": "pathAll", + "state": "success" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "UPDATE_ROLLBACK_IN_PROGRESS", + "matcher": "pathAll", + "state": "success" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "UPDATE_ROLLBACK_FAILED", + "matcher": "pathAll", + "state": "success" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS", + "matcher": "pathAll", + "state": "success" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "UPDATE_ROLLBACK_COMPLETE", + "matcher": "pathAll", + "state": "success" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "CREATE_FAILED", + "matcher": "pathAny", + "state": "failure" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "DELETE_COMPLETE", + "matcher": "pathAny", + "state": "failure" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "DELETE_FAILED", + "matcher": "pathAny", + "state": "failure" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "ROLLBACK_FAILED", + "matcher": "pathAny", + "state": "failure" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "ROLLBACK_COMPLETE", + "matcher": "pathAny", + "state": "failure" + }, + { + "expected": "ValidationError", + "matcher": "error", + "state": "failure" + } + ] + }, + "StackDeleteComplete": { + "delay": 30, + "operation": "DescribeStacks", + "maxAttempts": 120, + "description": "Wait until stack status is DELETE_COMPLETE.", + "acceptors": [ + { + "argument": "Stacks[].StackStatus", + "expected": "DELETE_COMPLETE", + "matcher": "pathAll", + "state": "success" + }, + { + "expected": "ValidationError", + "matcher": "error", + "state": "success" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "DELETE_FAILED", + "matcher": "pathAny", + "state": "failure" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "CREATE_FAILED", + "matcher": "pathAny", + "state": "failure" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "ROLLBACK_FAILED", + "matcher": "pathAny", + "state": "failure" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "UPDATE_ROLLBACK_IN_PROGRESS", + "matcher": "pathAny", + "state": "failure" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "UPDATE_ROLLBACK_FAILED", + "matcher": "pathAny", + "state": "failure" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "UPDATE_ROLLBACK_COMPLETE", + "matcher": "pathAny", + "state": "failure" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "UPDATE_COMPLETE", + "matcher": "pathAny", + "state": "failure" + } + ] + }, + "StackUpdateComplete": { + "delay": 30, + "maxAttempts": 120, + "operation": "DescribeStacks", + "description": "Wait until stack status is UPDATE_COMPLETE.", + "acceptors": [ + { + "argument": "Stacks[].StackStatus", + "expected": "UPDATE_COMPLETE", + "matcher": "pathAll", + "state": "success" + }, + { + "expected": "UPDATE_FAILED", + "matcher": "pathAny", + "state": "failure", + "argument": "Stacks[].StackStatus" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "UPDATE_ROLLBACK_FAILED", + "matcher": "pathAny", + "state": "failure" + }, + { + "expected": "UPDATE_ROLLBACK_COMPLETE", + "matcher": "pathAny", + "state": "failure", + "argument": "Stacks[].StackStatus" + }, + { + "expected": "ValidationError", + "matcher": "error", + "state": "failure" + } + ] + }, + "StackImportComplete": { + "delay": 30, + "maxAttempts": 120, + "operation": "DescribeStacks", + "description": "Wait until stack status is IMPORT_COMPLETE.", + "acceptors": [ + { + "argument": "Stacks[].StackStatus", + "expected": "IMPORT_COMPLETE", + "matcher": "pathAll", + "state": "success" + }, + { + "expected": "ROLLBACK_COMPLETE", + "matcher": "pathAny", + "state": "failure", + "argument": "Stacks[].StackStatus" + }, + { + "expected": "ROLLBACK_FAILED", + "matcher": "pathAny", + "state": "failure", + "argument": "Stacks[].StackStatus" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "IMPORT_ROLLBACK_IN_PROGRESS", + "matcher": "pathAny", + "state": "failure" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "IMPORT_ROLLBACK_FAILED", + "matcher": "pathAny", + "state": "failure" + }, + { + "expected": "IMPORT_ROLLBACK_COMPLETE", + "matcher": "pathAny", + "state": "failure", + "argument": "Stacks[].StackStatus" + }, + { + "expected": "ValidationError", + "matcher": "error", + "state": "failure" + } + ] + }, + "StackRollbackComplete": { + "delay": 30, + "operation": "DescribeStacks", + "maxAttempts": 120, + "description": "Wait until stack status is UPDATE_ROLLBACK_COMPLETE.", + "acceptors": [ + { + "argument": "Stacks[].StackStatus", + "expected": "UPDATE_ROLLBACK_COMPLETE", + "matcher": "pathAll", + "state": "success" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "UPDATE_FAILED", + "matcher": "pathAny", + "state": "failure" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "UPDATE_ROLLBACK_FAILED", + "matcher": "pathAny", + "state": "failure" + }, + { + "argument": "Stacks[].StackStatus", + "expected": "DELETE_FAILED", + "matcher": "pathAny", + "state": "failure" + }, + { + "expected": "ValidationError", + "matcher": "error", + "state": "failure" + } + ] + }, + "ChangeSetCreateComplete": { + "delay": 30, + "operation": "DescribeChangeSet", + "maxAttempts": 120, + "description": "Wait until change set status is CREATE_COMPLETE.", + "acceptors": [ + { + "argument": "Status", + "expected": "CREATE_COMPLETE", + "matcher": "path", + "state": "success" + }, + { + "argument": "Status", + "expected": "FAILED", + "matcher": "path", + "state": "failure" + }, + { + "expected": "ValidationError", + "matcher": "error", + "state": "failure" + } + ] + }, + "TypeRegistrationComplete": { + "delay": 30, + "operation": "DescribeTypeRegistration", + "maxAttempts": 120, + "description": "Wait until type registration is COMPLETE.", + "acceptors": [ + { + "argument": "ProgressStatus", + "expected": "COMPLETE", + "matcher": "path", + "state": "success" + }, + { + "argument": "ProgressStatus", + "expected": "FAILED", + "matcher": "path", + "state": "failure" + } + ] + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/cloudfront-keyvaluestore/2022-07-26/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/cloudfront-keyvaluestore/2022-07-26/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..8fda57a3e0f2643a9d05b8ad189d192e3791ff19 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/cloudfront-keyvaluestore/2022-07-26/paginators-1.json @@ -0,0 +1,10 @@ +{ + "pagination": { + "ListKeys": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "Items" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/cloudtrail-data/2021-08-11/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/cloudtrail-data/2021-08-11/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..ea142457a6a77d6e6a54942329f1199bc2f2a60c --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/cloudtrail-data/2021-08-11/paginators-1.json @@ -0,0 +1,3 @@ +{ + "pagination": {} +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/cloudtrail/2013-11-01/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/cloudtrail/2013-11-01/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/cloudtrail/2013-11-01/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/cloudtrail/2013-11-01/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/cloudtrail/2013-11-01/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..300217d2c467fc8f377914725f351351b23260f6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/cloudtrail/2013-11-01/paginators-1.json @@ -0,0 +1,37 @@ +{ + "pagination": { + "LookupEvents": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "Events" + }, + "ListPublicKeys": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "PublicKeyList" + }, + "ListTags": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "ResourceTagList" + }, + "ListTrails": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "Trails" + }, + "ListImportFailures": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "Failures" + }, + "ListImports": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "Imports" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/codeconnections/2023-12-01/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/codeconnections/2023-12-01/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..ea142457a6a77d6e6a54942329f1199bc2f2a60c --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/codeconnections/2023-12-01/paginators-1.json @@ -0,0 +1,3 @@ +{ + "pagination": {} +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/codedeploy/2014-10-06/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/codedeploy/2014-10-06/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/codedeploy/2014-10-06/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/codedeploy/2014-10-06/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/codedeploy/2014-10-06/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..aae3fad3b85b9565b26dea683ec8d8d8b465d39c --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/codedeploy/2014-10-06/paginators-1.json @@ -0,0 +1,49 @@ +{ + "pagination": { + "ListApplicationRevisions": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "revisions" + }, + "ListApplications": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "applications" + }, + "ListDeploymentConfigs": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "deploymentConfigsList" + }, + "ListDeploymentGroups": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "deploymentGroups" + }, + "ListDeploymentInstances": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "instancesList" + }, + "ListDeployments": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "deployments" + }, + "ListDeploymentTargets": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "targetIds" + }, + "ListGitHubAccountTokenNames": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "tokenNameList" + }, + "ListOnPremisesInstances": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "instanceNames" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/codedeploy/2014-10-06/waiters-2.json b/venv/lib/python3.10/site-packages/botocore/data/codedeploy/2014-10-06/waiters-2.json new file mode 100644 index 0000000000000000000000000000000000000000..0fea4facde2fd2fc3ad7a0683376623be515d0f8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/codedeploy/2014-10-06/waiters-2.json @@ -0,0 +1,30 @@ +{ + "version": 2, + "waiters": { + "DeploymentSuccessful": { + "delay": 15, + "operation": "GetDeployment", + "maxAttempts": 120, + "acceptors": [ + { + "expected": "Succeeded", + "matcher": "path", + "state": "success", + "argument": "deploymentInfo.status" + }, + { + "expected": "Failed", + "matcher": "path", + "state": "failure", + "argument": "deploymentInfo.status" + }, + { + "expected": "Stopped", + "matcher": "path", + "state": "failure", + "argument": "deploymentInfo.status" + } + ] + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/codeguru-security/2018-05-10/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/codeguru-security/2018-05-10/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..03e1cbfce771c4466194cd147cef7b3d620d9edc --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/codeguru-security/2018-05-10/paginators-1.json @@ -0,0 +1,22 @@ +{ + "pagination": { + "GetFindings": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "findings" + }, + "ListFindingsMetrics": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "findingsMetrics" + }, + "ListScans": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "summaries" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/codeguruprofiler/2019-07-18/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/codeguruprofiler/2019-07-18/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/codeguruprofiler/2019-07-18/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/codeguruprofiler/2019-07-18/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/codeguruprofiler/2019-07-18/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..c787d76c09a8322e6be35053c5fedcff67949129 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/codeguruprofiler/2019-07-18/paginators-1.json @@ -0,0 +1,10 @@ +{ + "pagination": { + "ListProfileTimes": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "profileTimes" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/cognito-idp/2016-04-18/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/cognito-idp/2016-04-18/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/cognito-idp/2016-04-18/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/cognito-idp/2016-04-18/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/cognito-idp/2016-04-18/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..51b7c94d27b12bf5340960e2a695e3856c49a6db --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/cognito-idp/2016-04-18/paginators-1.json @@ -0,0 +1,58 @@ +{ + "pagination": { + "AdminListGroupsForUser": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "Groups" + }, + "AdminListUserAuthEvents": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "AuthEvents" + }, + "ListGroups": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "Groups" + }, + "ListIdentityProviders": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "Providers" + }, + "ListResourceServers": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "ResourceServers" + }, + "ListUserPoolClients": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "UserPoolClients" + }, + "ListUserPools": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "UserPools" + }, + "ListUsersInGroup": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "Users" + }, + "ListUsers": { + "input_token": "PaginationToken", + "limit_key": "Limit", + "output_token": "PaginationToken", + "result_key": "Users" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/cognito-sync/2014-06-30/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/cognito-sync/2014-06-30/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/cognito-sync/2014-06-30/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/cognito-sync/2014-06-30/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/cognito-sync/2014-06-30/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..ea142457a6a77d6e6a54942329f1199bc2f2a60c --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/cognito-sync/2014-06-30/paginators-1.json @@ -0,0 +1,3 @@ +{ + "pagination": {} +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/comprehendmedical/2018-10-30/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/comprehendmedical/2018-10-30/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/comprehendmedical/2018-10-30/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/comprehendmedical/2018-10-30/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/comprehendmedical/2018-10-30/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..ea142457a6a77d6e6a54942329f1199bc2f2a60c --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/comprehendmedical/2018-10-30/paginators-1.json @@ -0,0 +1,3 @@ +{ + "pagination": {} +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/compute-optimizer/2019-11-01/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/compute-optimizer/2019-11-01/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/compute-optimizer/2019-11-01/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/compute-optimizer/2019-11-01/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/compute-optimizer/2019-11-01/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..1d115fc226765294ea51640c8a2de7684ca607d5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/compute-optimizer/2019-11-01/paginators-1.json @@ -0,0 +1,34 @@ +{ + "pagination": { + "DescribeRecommendationExportJobs": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "recommendationExportJobs" + }, + "GetEnrollmentStatusesForOrganization": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "accountEnrollmentStatuses" + }, + "GetLambdaFunctionRecommendations": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "lambdaFunctionRecommendations" + }, + "GetRecommendationPreferences": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "recommendationPreferencesDetails" + }, + "GetRecommendationSummaries": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "recommendationSummaries" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/config/2014-11-12/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/config/2014-11-12/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/config/2014-11-12/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/config/2014-11-12/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/config/2014-11-12/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..1df4d34ee8e44026932085898aa9f2487f785fbf --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/config/2014-11-12/paginators-1.json @@ -0,0 +1,192 @@ +{ + "pagination": { + "DescribeComplianceByConfigRule": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "ComplianceByConfigRules" + }, + "DescribeComplianceByResource": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "ComplianceByResources", + "limit_key": "Limit" + }, + "DescribeConfigRules": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "ConfigRules" + }, + "GetComplianceDetailsByConfigRule": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "EvaluationResults", + "limit_key": "Limit" + }, + "GetComplianceDetailsByResource": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "EvaluationResults" + }, + "GetResourceConfigHistory": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "configurationItems", + "limit_key": "limit" + }, + "ListDiscoveredResources": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "resourceIdentifiers", + "limit_key": "limit" + }, + "DescribeAggregateComplianceByConfigRules": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "AggregateComplianceByConfigRules" + }, + "DescribeAggregationAuthorizations": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "AggregationAuthorizations" + }, + "DescribeConfigRuleEvaluationStatus": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "ConfigRulesEvaluationStatus" + }, + "DescribeConfigurationAggregatorSourcesStatus": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "AggregatedSourceStatusList" + }, + "DescribeConfigurationAggregators": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "ConfigurationAggregators" + }, + "DescribePendingAggregationRequests": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "PendingAggregationRequests" + }, + "DescribeRetentionConfigurations": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "RetentionConfigurations" + }, + "GetAggregateComplianceDetailsByConfigRule": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "AggregateEvaluationResults" + }, + "ListAggregateDiscoveredResources": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "ResourceIdentifiers" + }, + "DescribeRemediationExecutionStatus": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "RemediationExecutionStatuses" + }, + "DescribeAggregateComplianceByConformancePacks": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "AggregateComplianceByConformancePacks" + }, + "DescribeConformancePackStatus": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "ConformancePackStatusDetails" + }, + "DescribeConformancePacks": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "ConformancePackDetails" + }, + "DescribeOrganizationConfigRuleStatuses": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "OrganizationConfigRuleStatuses" + }, + "DescribeOrganizationConfigRules": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "OrganizationConfigRules" + }, + "DescribeOrganizationConformancePackStatuses": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "OrganizationConformancePackStatuses" + }, + "DescribeOrganizationConformancePacks": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "OrganizationConformancePacks" + }, + "GetConformancePackComplianceSummary": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "ConformancePackComplianceSummaryList" + }, + "GetOrganizationConfigRuleDetailedStatus": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "OrganizationConfigRuleDetailedStatus" + }, + "GetOrganizationConformancePackDetailedStatus": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "OrganizationConformancePackDetailedStatuses" + }, + "ListTagsForResource": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "Tags" + }, + "SelectAggregateResourceConfig": { + "input_token": "NextToken", + "limit_key": "Limit", + "non_aggregate_keys": [ + "QueryInfo" + ], + "output_token": "NextToken", + "result_key": "Results" + }, + "SelectResourceConfig": { + "input_token": "NextToken", + "limit_key": "Limit", + "non_aggregate_keys": [ + "QueryInfo" + ], + "output_token": "NextToken", + "result_key": "Results" + }, + "ListResourceEvaluations": { + "input_token": "NextToken", + "limit_key": "Limit", + "output_token": "NextToken", + "result_key": "ResourceEvaluations" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/connect/2017-08-08/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/connect/2017-08-08/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/connect/2017-08-08/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/connect/2017-08-08/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/connect/2017-08-08/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..4358a0eeecb7d26dbd52d168312e5898593c53f0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/connect/2017-08-08/paginators-1.json @@ -0,0 +1,415 @@ +{ + "pagination": { + "GetMetricData": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "MetricResults" + }, + "ListRoutingProfiles": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "RoutingProfileSummaryList" + }, + "ListSecurityProfiles": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "SecurityProfileSummaryList" + }, + "ListUserHierarchyGroups": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "UserHierarchyGroupSummaryList" + }, + "ListUsers": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "UserSummaryList" + }, + "ListContactFlows": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "ContactFlowSummaryList" + }, + "ListHoursOfOperations": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "HoursOfOperationSummaryList" + }, + "ListPhoneNumbers": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "PhoneNumberSummaryList" + }, + "ListQueues": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "QueueSummaryList" + }, + "ListPrompts": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "PromptSummaryList" + }, + "ListRoutingProfileQueues": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "RoutingProfileQueueConfigSummaryList", + "non_aggregate_keys": [ + "LastModifiedRegion", + "LastModifiedTime" + ] + }, + "ListApprovedOrigins": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "Origins" + }, + "ListInstanceAttributes": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "Attributes" + }, + "ListInstanceStorageConfigs": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "StorageConfigs" + }, + "ListInstances": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "InstanceSummaryList" + }, + "ListLambdaFunctions": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "LambdaFunctions" + }, + "ListLexBots": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "LexBots" + }, + "ListSecurityKeys": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "SecurityKeys" + }, + "ListIntegrationAssociations": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "IntegrationAssociationSummaryList" + }, + "ListUseCases": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "UseCaseSummaryList" + }, + "ListQuickConnects": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "QuickConnectSummaryList" + }, + "ListQueueQuickConnects": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "QuickConnectSummaryList", + "non_aggregate_keys": [ + "LastModifiedRegion", + "LastModifiedTime" + ] + }, + "ListBots": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "LexBots" + }, + "ListAgentStatuses": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "AgentStatusSummaryList" + }, + "ListSecurityProfilePermissions": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "Permissions", + "non_aggregate_keys": [ + "LastModifiedRegion", + "LastModifiedTime" + ] + }, + "ListContactReferences": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "ReferenceSummaryList" + }, + "ListContactFlowModules": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "ContactFlowModulesSummaryList" + }, + "ListDefaultVocabularies": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "DefaultVocabularyList" + }, + "SearchVocabularies": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "VocabularySummaryList" + }, + "ListPhoneNumbersV2": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "ListPhoneNumbersSummaryList" + }, + "SearchAvailablePhoneNumbers": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "AvailableNumbersList" + }, + "SearchUsers": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "non_aggregate_keys": [ + "ApproximateTotalCount" + ], + "output_token": "NextToken", + "result_key": "Users" + }, + "ListTaskTemplates": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "TaskTemplates" + }, + "SearchSecurityProfiles": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "non_aggregate_keys": [ + "ApproximateTotalCount" + ], + "output_token": "NextToken", + "result_key": "SecurityProfiles" + }, + "SearchQueues": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "non_aggregate_keys": [ + "ApproximateTotalCount" + ], + "output_token": "NextToken", + "result_key": "Queues" + }, + "SearchRoutingProfiles": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "non_aggregate_keys": [ + "ApproximateTotalCount" + ], + "output_token": "NextToken", + "result_key": "RoutingProfiles" + }, + "ListTrafficDistributionGroups": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "TrafficDistributionGroupSummaryList" + }, + "ListRules": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "RuleSummaryList" + }, + "ListContactEvaluations": { + "input_token": "NextToken", + "output_token": "NextToken", + "result_key": "EvaluationSummaryList" + }, + "ListEvaluationFormVersions": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "EvaluationFormVersionSummaryList" + }, + "ListEvaluationForms": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "EvaluationFormSummaryList" + }, + "SearchHoursOfOperations": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "non_aggregate_keys": [ + "ApproximateTotalCount" + ], + "output_token": "NextToken", + "result_key": "HoursOfOperations" + }, + "SearchPrompts": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "non_aggregate_keys": [ + "ApproximateTotalCount" + ], + "output_token": "NextToken", + "result_key": "Prompts" + }, + "SearchQuickConnects": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "non_aggregate_keys": [ + "ApproximateTotalCount" + ], + "output_token": "NextToken", + "result_key": "QuickConnects" + }, + "SearchResourceTags": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "Tags" + }, + "ListTrafficDistributionGroupUsers": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "TrafficDistributionGroupUserSummaryList" + }, + "ListViewVersions": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "ViewVersionSummaryList" + }, + "ListViews": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "ViewsSummaryList" + }, + "ListSecurityProfileApplications": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "Applications", + "non_aggregate_keys": [ + "LastModifiedRegion", + "LastModifiedTime" + ] + }, + "ListFlowAssociations": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "FlowAssociationSummaryList" + }, + "ListPredefinedAttributes": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "PredefinedAttributeSummaryList" + }, + "ListUserProficiencies": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "non_aggregate_keys": [ + "LastModifiedTime", + "LastModifiedRegion" + ], + "output_token": "NextToken", + "result_key": "UserProficiencyList" + }, + "SearchContacts": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "non_aggregate_keys": [ + "TotalCount" + ], + "output_token": "NextToken", + "result_key": "Contacts" + }, + "SearchPredefinedAttributes": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "non_aggregate_keys": [ + "ApproximateTotalCount" + ], + "output_token": "NextToken", + "result_key": "PredefinedAttributes" + }, + "SearchContactFlowModules": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "non_aggregate_keys": [ + "ApproximateTotalCount" + ], + "output_token": "NextToken", + "result_key": "ContactFlowModules" + }, + "SearchContactFlows": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "non_aggregate_keys": [ + "ApproximateTotalCount" + ], + "output_token": "NextToken", + "result_key": "ContactFlows" + }, + "ListAuthenticationProfiles": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "AuthenticationProfileSummaryList" + }, + "SearchAgentStatuses": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "non_aggregate_keys": [ + "ApproximateTotalCount" + ], + "output_token": "NextToken", + "result_key": "AgentStatuses" + }, + "SearchUserHierarchyGroups": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "non_aggregate_keys": [ + "ApproximateTotalCount" + ], + "output_token": "NextToken", + "result_key": "UserHierarchyGroups" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/connectcampaigns/2021-01-30/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/connectcampaigns/2021-01-30/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..6ab04512590255f738d1222e013252a0bdfc5a4d --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/connectcampaigns/2021-01-30/paginators-1.json @@ -0,0 +1,10 @@ +{ + "pagination": { + "ListCampaigns": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "campaignSummaryList" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/controltower/2018-05-10/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/controltower/2018-05-10/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..e34843be35efa9e79de9c8baa2ce8dccd3c02304 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/controltower/2018-05-10/paginators-1.json @@ -0,0 +1,40 @@ +{ + "pagination": { + "ListEnabledControls": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "enabledControls" + }, + "ListLandingZones": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "landingZones" + }, + "ListBaselines": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "baselines" + }, + "ListEnabledBaselines": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "enabledBaselines" + }, + "ListControlOperations": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "controlOperations" + }, + "ListLandingZoneOperations": { + "input_token": "nextToken", + "output_token": "nextToken", + "limit_key": "maxResults", + "result_key": "landingZoneOperations" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/datasync/2018-11-09/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/datasync/2018-11-09/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/datasync/2018-11-09/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/datasync/2018-11-09/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/datasync/2018-11-09/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..6819b45ec18edfc1da4994968e5965ed5299351d --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/datasync/2018-11-09/paginators-1.json @@ -0,0 +1,52 @@ +{ + "pagination": { + "ListAgents": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "Agents" + }, + "ListLocations": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "Locations" + }, + "ListTagsForResource": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "Tags" + }, + "ListTaskExecutions": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "TaskExecutions" + }, + "ListTasks": { + "input_token": "NextToken", + "limit_key": "MaxResults", + "output_token": "NextToken", + "result_key": "Tasks" + }, + "DescribeStorageSystemResourceMetrics": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "Metrics" + }, + "ListDiscoveryJobs": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "DiscoveryJobs" + }, + "ListStorageSystems": { + "input_token": "NextToken", + "output_token": "NextToken", + "limit_key": "MaxResults", + "result_key": "StorageSystems" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/devicefarm/2015-06-23/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/devicefarm/2015-06-23/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..9db4e46c6d845f1b1e0aba0e09428e743f437206 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/devicefarm/2015-06-23/examples-1.json @@ -0,0 +1,1242 @@ +{ + "version": "1.0", + "examples": { + "CreateDevicePool": [ + { + "input": { + "name": "MyDevicePool", + "description": "My Android devices", + "projectArn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456", + "rules": [ + + ] + }, + "output": { + "devicePool": { + } + }, + "comments": { + "input": { + "name": "A device pool contains related devices, such as devices that run only on Android or that run only on iOS.", + "projectArn": "You can get the project ARN by using the list-projects CLI command." + }, + "output": { + } + }, + "description": "The following example creates a new device pool named MyDevicePool inside an existing project.", + "id": "createdevicepool-example-1470862210860", + "title": "To create a new device pool" + } + ], + "CreateProject": [ + { + "input": { + "name": "MyProject" + }, + "output": { + "project": { + "name": "MyProject", + "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:5e01a8c7-c861-4c0a-b1d5-12345EXAMPLE", + "created": "1472660939.152" + } + }, + "comments": { + "input": { + "name": "A project in Device Farm is a workspace that contains test runs. A run is a test of a single app against one or more devices." + }, + "output": { + } + }, + "description": "The following example creates a new project named MyProject.", + "id": "createproject-example-1470862210860", + "title": "To create a new project" + } + ], + "CreateRemoteAccessSession": [ + { + "input": { + "name": "MySession", + "configuration": { + "billingMethod": "METERED" + }, + "deviceArn": "arn:aws:devicefarm:us-west-2::device:123EXAMPLE", + "projectArn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456" + }, + "output": { + "remoteAccessSession": { + } + }, + "comments": { + "input": { + "deviceArn": "You can get the device ARN by using the list-devices CLI command.", + "projectArn": "You can get the project ARN by using the list-projects CLI command." + }, + "output": { + } + }, + "description": "The following example creates a remote access session named MySession.", + "id": "to-create-a-remote-access-session-1470970668274", + "title": "To create a remote access session" + } + ], + "CreateUpload": [ + { + "input": { + "name": "MyAppiumPythonUpload", + "type": "APPIUM_PYTHON_TEST_PACKAGE", + "projectArn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456" + }, + "output": { + "upload": { + "name": "MyAppiumPythonUpload", + "type": "APPIUM_PYTHON_TEST_PACKAGE", + "arn": "arn:aws:devicefarm:us-west-2:123456789101:upload:5e01a8c7-c861-4c0a-b1d5-5ec6e6c6dd23/b5340a65-3da7-4da6-a26e-12345EXAMPLE", + "created": "1472661404.186", + "status": "INITIALIZED", + "url": "https://prod-us-west-2-uploads.s3-us-west-2.amazonaws.com/arn%3Aaws%3Adevicefarm%3Aus-west-2%3A123456789101%3Aproject%3A5e01a8c7-c861-4c0a-b1d5-12345EXAMPLE/uploads/arn%3Aaws%3Adevicefarm%3Aus-west-2%3A123456789101%3Aupload%3A5e01a8c7-c861-4c0a-b1d5-5ec6e6c6dd23/b5340a65-3da7-4da6-a26e-12345EXAMPLE/MyAppiumPythonUpload?AWSAccessKeyId=1234567891011EXAMPLE&Expires=1472747804&Signature=1234567891011EXAMPLE" + } + }, + "comments": { + "input": { + "projectArn": "You can get the project ARN by using the list-projects CLI command." + }, + "output": { + } + }, + "description": "The following example creates a new Appium Python test package upload inside an existing project.", + "id": "createupload-example-1470864711775", + "title": "To create a new test package upload" + } + ], + "DeleteDevicePool": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2::devicepool:123-456-EXAMPLE-GUID" + }, + "output": { + }, + "comments": { + "input": { + "arn": "You can get the device pool ARN by using the list-device-pools CLI command." + }, + "output": { + } + }, + "description": "The following example deletes a specific device pool.", + "id": "deletedevicepool-example-1470866975494", + "title": "To delete a device pool" + } + ], + "DeleteProject": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456" + }, + "output": { + }, + "comments": { + "input": { + "arn": "You can get the project ARN by using the list-projects CLI command." + }, + "output": { + } + }, + "description": "The following example deletes a specific project.", + "id": "deleteproject-example-1470867374212", + "title": "To delete a project" + } + ], + "DeleteRemoteAccessSession": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2:123456789101:session:EXAMPLE-GUID-123-456" + }, + "output": { + }, + "comments": { + "input": { + "arn": "You can get the remote access session ARN by using the list-remote-access-sessions CLI command." + }, + "output": { + } + }, + "description": "The following example deletes a specific remote access session.", + "id": "to-delete-a-specific-remote-access-session-1470971431677", + "title": "To delete a specific remote access session" + } + ], + "DeleteRun": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2:123456789101:run:EXAMPLE-GUID-123-456" + }, + "output": { + }, + "comments": { + "input": { + "arn": "You can get the run ARN by using the list-runs CLI command." + }, + "output": { + } + }, + "description": "The following example deletes a specific test run.", + "id": "deleterun-example-1470867905129", + "title": "To delete a run" + } + ], + "DeleteUpload": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2:123456789101:upload:EXAMPLE-GUID-123-456" + }, + "output": { + }, + "comments": { + "input": { + "arn": "You can get the upload ARN by using the list-uploads CLI command." + }, + "output": { + } + }, + "description": "The following example deletes a specific upload.", + "id": "deleteupload-example-1470868363942", + "title": "To delete a specific upload" + } + ], + "GetAccountSettings": [ + { + "input": { + }, + "output": { + "accountSettings": { + "awsAccountNumber": "123456789101", + "unmeteredDevices": { + "ANDROID": 1, + "IOS": 2 + } + } + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "The following example returns information about your Device Farm account settings.", + "id": "to-get-information-about-account-settings-1472567568189", + "title": "To get information about account settings" + } + ], + "GetDevice": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2::device:123EXAMPLE" + }, + "output": { + "device": { + "name": "LG G2 (Sprint)", + "arn": "arn:aws:devicefarm:us-west-2::device:A0E6E6E1059E45918208DF75B2B7EF6C", + "cpu": { + "architecture": "armeabi-v7a", + "clock": 2265.6, + "frequency": "MHz" + }, + "formFactor": "PHONE", + "heapSize": 256000000, + "image": "75B2B7EF6C12345EXAMPLE", + "manufacturer": "LG", + "memory": 16000000000, + "model": "G2 (Sprint)", + "os": "4.2.2", + "platform": "ANDROID", + "resolution": { + "height": 1920, + "width": 1080 + } + } + }, + "comments": { + "input": { + }, + "output": { + } + }, + "description": "The following example returns information about a specific device.", + "id": "getdevice-example-1470870602173", + "title": "To get information about a device" + } + ], + "GetDevicePool": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456" + }, + "output": { + "devicePool": { + } + }, + "comments": { + "input": { + "arn": "You can obtain the project ARN by using the list-projects CLI command." + }, + "output": { + } + }, + "description": "The following example returns information about a specific device pool, given a project ARN.", + "id": "getdevicepool-example-1470870873136", + "title": "To get information about a device pool" + } + ], + "GetDevicePoolCompatibility": [ + { + "input": { + "appArn": "arn:aws:devicefarm:us-west-2::app:123-456-EXAMPLE-GUID", + "devicePoolArn": "arn:aws:devicefarm:us-west-2::devicepool:123-456-EXAMPLE-GUID", + "testType": "APPIUM_PYTHON" + }, + "output": { + "compatibleDevices": [ + + ], + "incompatibleDevices": [ + + ] + }, + "comments": { + "input": { + "devicePoolArn": "You can get the device pool ARN by using the list-device-pools CLI command." + }, + "output": { + } + }, + "description": "The following example returns information about the compatibility of a specific device pool, given its ARN.", + "id": "getdevicepoolcompatibility-example-1470925003466", + "title": "To get information about the compatibility of a device pool" + } + ], + "GetJob": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2::job:123-456-EXAMPLE-GUID" + }, + "output": { + "job": { + } + }, + "comments": { + "input": { + "arn": "You can get the job ARN by using the list-jobs CLI command." + }, + "output": { + } + }, + "description": "The following example returns information about a specific job.", + "id": "getjob-example-1470928294268", + "title": "To get information about a job" + } + ], + "GetOfferingStatus": [ + { + "input": { + "nextToken": "RW5DdDJkMWYwZjM2MzM2VHVpOHJIUXlDUXlhc2QzRGViYnc9SEXAMPLE=" + }, + "output": { + "current": { + "D68B3C05-1BA6-4360-BC69-12345EXAMPLE": { + "offering": { + "type": "RECURRING", + "description": "Android Remote Access Unmetered Device Slot", + "id": "D68B3C05-1BA6-4360-BC69-12345EXAMPLE", + "platform": "ANDROID" + }, + "quantity": 1 + } + }, + "nextPeriod": { + "D68B3C05-1BA6-4360-BC69-12345EXAMPLE": { + "effectiveOn": "1472688000", + "offering": { + "type": "RECURRING", + "description": "Android Remote Access Unmetered Device Slot", + "id": "D68B3C05-1BA6-4360-BC69-12345EXAMPLE", + "platform": "ANDROID" + }, + "quantity": 1 + } + } + }, + "comments": { + "input": { + "nextToken": "A dynamically generated value, used for paginating results." + }, + "output": { + } + }, + "description": "The following example returns information about Device Farm offerings available to your account.", + "id": "to-get-status-information-about-device-offerings-1472568124402", + "title": "To get status information about device offerings" + } + ], + "GetProject": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:5e01a8c7-c861-4c0a-b1d5-12345EXAMPLE" + }, + "output": { + "project": { + "name": "My Project", + "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:5e01a8c7-c861-4c0a-b1d5-12345EXAMPLE", + "created": "1472660939.152" + } + }, + "comments": { + "input": { + "arn": "You can get the project ARN by using the list-projects CLI command." + }, + "output": { + } + }, + "description": "The following example gets information about a specific project.", + "id": "to-get-a-project-1470975038449", + "title": "To get information about a project" + } + ], + "GetRemoteAccessSession": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2:123456789101:session:EXAMPLE-GUID-123-456" + }, + "output": { + "remoteAccessSession": { + } + }, + "comments": { + "input": { + "arn": "You can get the remote access session ARN by using the list-remote-access-sessions CLI command." + }, + "output": { + } + }, + "description": "The following example gets a specific remote access session.", + "id": "to-get-a-remote-access-session-1471014119414", + "title": "To get a remote access session" + } + ], + "GetRun": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2:123456789101:run:5e01a8c7-c861-4c0a-b1d5-5ec6e6c6dd23/0fcac17b-6122-44d7-ae5a-12345EXAMPLE" + }, + "output": { + "run": { + "name": "My Test Run", + "type": "BUILTIN_EXPLORER", + "arn": "arn:aws:devicefarm:us-west-2:123456789101:run:5e01a8c7-c861-4c0a-b1d5-5ec6e6c6dd23/0fcac17b-6122-44d7-ae5a-12345EXAMPLE", + "billingMethod": "METERED", + "completedJobs": 0, + "counters": { + "errored": 0, + "failed": 0, + "passed": 0, + "skipped": 0, + "stopped": 0, + "total": 0, + "warned": 0 + }, + "created": "1472667509.852", + "deviceMinutes": { + "metered": 0.0, + "total": 0.0, + "unmetered": 0.0 + }, + "platform": "ANDROID", + "result": "PENDING", + "status": "RUNNING", + "totalJobs": 3 + } + }, + "comments": { + "input": { + "arn": "You can get the run ARN by using the list-runs CLI command." + }, + "output": { + } + }, + "description": "The following example gets information about a specific test run.", + "id": "to-get-a-test-run-1471015895657", + "title": "To get information about a test run" + } + ], + "GetSuite": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2:123456789101:suite:EXAMPLE-GUID-123-456" + }, + "output": { + "suite": { + } + }, + "comments": { + "input": { + "arn": "You can get the suite ARN by using the list-suites CLI command." + }, + "output": { + } + }, + "description": "The following example gets information about a specific test suite.", + "id": "to-get-information-about-a-test-suite-1471016525008", + "title": "To get information about a test suite" + } + ], + "GetTest": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2:123456789101:test:EXAMPLE-GUID-123-456" + }, + "output": { + "test": { + } + }, + "comments": { + "input": { + "arn": "You can get the test ARN by using the list-tests CLI command." + }, + "output": { + } + }, + "description": "The following example gets information about a specific test.", + "id": "to-get-information-about-a-specific-test-1471025744238", + "title": "To get information about a specific test" + } + ], + "GetUpload": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2:123456789101:upload:EXAMPLE-GUID-123-456" + }, + "output": { + "upload": { + } + }, + "comments": { + "input": { + "arn": "You can get the test ARN by using the list-uploads CLI command." + }, + "output": { + } + }, + "description": "The following example gets information about a specific upload.", + "id": "to-get-information-about-a-specific-upload-1471025996221", + "title": "To get information about a specific upload" + } + ], + "InstallToRemoteAccessSession": [ + { + "input": { + "appArn": "arn:aws:devicefarm:us-west-2:123456789101:app:EXAMPLE-GUID-123-456", + "remoteAccessSessionArn": "arn:aws:devicefarm:us-west-2:123456789101:session:EXAMPLE-GUID-123-456" + }, + "output": { + "appUpload": { + } + }, + "comments": { + "input": { + "remoteAccessSessionArn": "You can get the remote access session ARN by using the list-remote-access-sessions CLI command." + }, + "output": { + } + }, + "description": "The following example installs a specific app to a device in a specific remote access session.", + "id": "to-install-to-a-remote-access-session-1471634453818", + "title": "To install to a remote access session" + } + ], + "ListArtifacts": [ + { + "input": { + "type": "SCREENSHOT", + "arn": "arn:aws:devicefarm:us-west-2:123456789101:run:EXAMPLE-GUID-123-456" + }, + "comments": { + "input": { + "arn": "Can also be used to list artifacts for a Job, Suite, or Test ARN." + }, + "output": { + } + }, + "description": "The following example lists screenshot artifacts for a specific run.", + "id": "to-list-artifacts-for-a-resource-1471635409527", + "title": "To list artifacts for a resource" + } + ], + "ListDevicePools": [ + { + "input": { + "type": "PRIVATE", + "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456" + }, + "output": { + "devicePools": [ + { + "name": "Top Devices", + "arn": "arn:aws:devicefarm:us-west-2::devicepool:082d10e5-d7d7-48a5-ba5c-12345EXAMPLE", + "description": "Top devices", + "rules": [ + { + "value": "[\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\",\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\",\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\",\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\",\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\",\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\",\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\",\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\",\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\",\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\"]", + "attribute": "ARN", + "operator": "IN" + } + ] + }, + { + "name": "My Android Device Pool", + "arn": "arn:aws:devicefarm:us-west-2:123456789101:devicepool:5e01a8c7-c861-4c0a-b1d5-5ec6e6c6dd23/bf96e75a-28f6-4e61-b6a7-12345EXAMPLE", + "description": "Samsung Galaxy Android devices", + "rules": [ + { + "value": "[\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\",\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\",\"arn:aws:devicefarm:us-west-2::device:123456789EXAMPLE\"]", + "attribute": "ARN", + "operator": "IN" + } + ] + } + ] + }, + "comments": { + "input": { + "arn": "You can get the project ARN by using the list-projects CLI command." + }, + "output": { + } + }, + "description": "The following example returns information about the private device pools in a specific project.", + "id": "to-get-information-about-device-pools-1471635745170", + "title": "To get information about device pools" + } + ], + "ListDevices": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456" + }, + "output": { + }, + "comments": { + "input": { + "arn": "You can get the project ARN by using the list-projects CLI command." + }, + "output": { + } + }, + "description": "The following example returns information about the available devices in a specific project.", + "id": "to-get-information-about-devices-1471641699344", + "title": "To get information about devices" + } + ], + "ListJobs": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456" + }, + "comments": { + "input": { + "arn": "You can get the project ARN by using the list-jobs CLI command." + }, + "output": { + } + }, + "description": "The following example returns information about jobs in a specific project.", + "id": "to-get-information-about-jobs-1471642228071", + "title": "To get information about jobs" + } + ], + "ListOfferingTransactions": [ + { + "input": { + "nextToken": "RW5DdDJkMWYwZjM2MzM2VHVpOHJIUXlDUXlhc2QzRGViYnc9SEXAMPLE=" + }, + "output": { + "offeringTransactions": [ + { + "cost": { + "amount": 0, + "currencyCode": "USD" + }, + "createdOn": "1470021420", + "offeringStatus": { + "type": "RENEW", + "effectiveOn": "1472688000", + "offering": { + "type": "RECURRING", + "description": "Android Remote Access Unmetered Device Slot", + "id": "D68B3C05-1BA6-4360-BC69-12345EXAMPLE", + "platform": "ANDROID" + }, + "quantity": 0 + }, + "transactionId": "03728003-d1ea-4851-abd6-12345EXAMPLE" + }, + { + "cost": { + "amount": 250, + "currencyCode": "USD" + }, + "createdOn": "1470021420", + "offeringStatus": { + "type": "PURCHASE", + "effectiveOn": "1470021420", + "offering": { + "type": "RECURRING", + "description": "Android Remote Access Unmetered Device Slot", + "id": "D68B3C05-1BA6-4360-BC69-12345EXAMPLE", + "platform": "ANDROID" + }, + "quantity": 1 + }, + "transactionId": "56820b6e-06bd-473a-8ff8-12345EXAMPLE" + }, + { + "cost": { + "amount": 175, + "currencyCode": "USD" + }, + "createdOn": "1465538520", + "offeringStatus": { + "type": "PURCHASE", + "effectiveOn": "1465538520", + "offering": { + "type": "RECURRING", + "description": "Android Unmetered Device Slot", + "id": "8980F81C-00D7-469D-8EC6-12345EXAMPLE", + "platform": "ANDROID" + }, + "quantity": 1 + }, + "transactionId": "953ae2c6-d760-4a04-9597-12345EXAMPLE" + }, + { + "cost": { + "amount": 8.07, + "currencyCode": "USD" + }, + "createdOn": "1459344300", + "offeringStatus": { + "type": "PURCHASE", + "effectiveOn": "1459344300", + "offering": { + "type": "RECURRING", + "description": "iOS Unmetered Device Slot", + "id": "A53D4D73-A6F6-4B82-A0B0-12345EXAMPLE", + "platform": "IOS" + }, + "quantity": 1 + }, + "transactionId": "2baf9021-ae3e-47f5-ab52-12345EXAMPLE" + } + ] + }, + "comments": { + "input": { + "nextToken": "A dynamically generated value, used for paginating results." + }, + "output": { + } + }, + "description": "The following example returns information about Device Farm offering transactions.", + "id": "to-get-information-about-device-offering-transactions-1472561712315", + "title": "To get information about device offering transactions" + } + ], + "ListOfferings": [ + { + "input": { + "nextToken": "RW5DdDJkMWYwZjM2MzM2VHVpOHJIUXlDUXlhc2QzRGViYnc9SEXAMPLE=" + }, + "output": { + "offerings": [ + { + "type": "RECURRING", + "description": "iOS Unmetered Device Slot", + "id": "A53D4D73-A6F6-4B82-A0B0-12345EXAMPLE", + "platform": "IOS", + "recurringCharges": [ + { + "cost": { + "amount": 250, + "currencyCode": "USD" + }, + "frequency": "MONTHLY" + } + ] + }, + { + "type": "RECURRING", + "description": "Android Unmetered Device Slot", + "id": "8980F81C-00D7-469D-8EC6-12345EXAMPLE", + "platform": "ANDROID", + "recurringCharges": [ + { + "cost": { + "amount": 250, + "currencyCode": "USD" + }, + "frequency": "MONTHLY" + } + ] + }, + { + "type": "RECURRING", + "description": "Android Remote Access Unmetered Device Slot", + "id": "D68B3C05-1BA6-4360-BC69-12345EXAMPLE", + "platform": "ANDROID", + "recurringCharges": [ + { + "cost": { + "amount": 250, + "currencyCode": "USD" + }, + "frequency": "MONTHLY" + } + ] + }, + { + "type": "RECURRING", + "description": "iOS Remote Access Unmetered Device Slot", + "id": "552B4DAD-A6C9-45C4-94FB-12345EXAMPLE", + "platform": "IOS", + "recurringCharges": [ + { + "cost": { + "amount": 250, + "currencyCode": "USD" + }, + "frequency": "MONTHLY" + } + ] + } + ] + }, + "comments": { + "input": { + "nextToken": "A dynamically generated value, used for paginating results." + }, + "output": { + } + }, + "description": "The following example returns information about available device offerings.", + "id": "to-get-information-about-device-offerings-1472562810999", + "title": "To get information about device offerings" + } + ], + "ListProjects": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:7ad300ed-8183-41a7-bf94-12345EXAMPLE", + "nextToken": "RW5DdDJkMWYwZjM2MzM2VHVpOHJIUXlDUXlhc2QzRGViYnc9SEXAMPLE" + }, + "output": { + "projects": [ + { + "name": "My Test Project", + "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:7ad300ed-8183-41a7-bf94-12345EXAMPLE", + "created": "1453163262.105" + }, + { + "name": "Hello World", + "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:d6b087d9-56db-4e44-b9ec-12345EXAMPLE", + "created": "1470350112.439" + } + ] + }, + "comments": { + "input": { + "nextToken": "A dynamically generated value, used for paginating results." + }, + "output": { + } + }, + "description": "The following example returns information about the specified project in Device Farm.", + "id": "to-get-information-about-a-device-farm-project-1472564014388", + "title": "To get information about a Device Farm project" + } + ], + "ListRemoteAccessSessions": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2:123456789101:session:EXAMPLE-GUID-123-456", + "nextToken": "RW5DdDJkMWYwZjM2MzM2VHVpOHJIUXlDUXlhc2QzRGViYnc9SEXAMPLE=" + }, + "output": { + "remoteAccessSessions": [ + + ] + }, + "comments": { + "input": { + "arn": "You can get the Amazon Resource Name (ARN) of the session by using the list-sessions CLI command.", + "nextToken": "A dynamically generated value, used for paginating results." + }, + "output": { + } + }, + "description": "The following example returns information about a specific Device Farm remote access session.", + "id": "to-get-information-about-a-remote-access-session-1472581144803", + "title": "To get information about a remote access session" + } + ], + "ListRuns": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2:123456789101:run:5e01a8c7-c861-4c0a-b1d5-5ec6e6c6dd23/0fcac17b-6122-44d7-ae5a-12345EXAMPLE", + "nextToken": "RW5DdDJkMWYwZjM2MzM2VHVpOHJIUXlDUXlhc2QzRGViYnc9SEXAMPLE" + }, + "output": { + "runs": [ + { + "name": "My Test Run", + "type": "BUILTIN_EXPLORER", + "arn": "arn:aws:devicefarm:us-west-2:123456789101:run:5e01a8c7-c861-4c0a-b1d5-5ec6e6c6dd23/0fcac17b-6122-44d7-ae5a-12345EXAMPLE", + "billingMethod": "METERED", + "completedJobs": 0, + "counters": { + "errored": 0, + "failed": 0, + "passed": 0, + "skipped": 0, + "stopped": 0, + "total": 0, + "warned": 0 + }, + "created": "1472667509.852", + "deviceMinutes": { + "metered": 0.0, + "total": 0.0, + "unmetered": 0.0 + }, + "platform": "ANDROID", + "result": "PENDING", + "status": "RUNNING", + "totalJobs": 3 + } + ] + }, + "comments": { + "input": { + "arn": "You can get the Amazon Resource Name (ARN) of the run by using the list-runs CLI command.", + "nextToken": "A dynamically generated value, used for paginating results." + }, + "output": { + } + }, + "description": "The following example returns information about a specific test run.", + "id": "to-get-information-about-test-runs-1472582711069", + "title": "To get information about a test run" + } + ], + "ListSamples": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456", + "nextToken": "RW5DdDJkMWYwZjM2MzM2VHVpOHJIUXlDUXlhc2QzRGViYnc9SEXAMPLE" + }, + "output": { + "samples": [ + + ] + }, + "comments": { + "input": { + "arn": "You can get the Amazon Resource Name (ARN) of the project by using the list-projects CLI command.", + "nextToken": "A dynamically generated value, used for paginating results." + }, + "output": { + } + }, + "description": "The following example returns information about samples, given a specific Device Farm project.", + "id": "to-get-information-about-samples-1472582847534", + "title": "To get information about samples" + } + ], + "ListSuites": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2:123456789101:job:EXAMPLE-GUID-123-456", + "nextToken": "RW5DdDJkMWYwZjM2MzM2VHVpOHJIUXlDUXlhc2QzRGViYnc9SEXAMPLE" + }, + "output": { + "suites": [ + + ] + }, + "comments": { + "input": { + "arn": "You can get the Amazon Resource Name (ARN) of the job by using the list-jobs CLI command.", + "nextToken": "A dynamically generated value, used for paginating results." + }, + "output": { + } + }, + "description": "The following example returns information about suites, given a specific Device Farm job.", + "id": "to-get-information-about-suites-1472583038218", + "title": "To get information about suites" + } + ], + "ListTests": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456", + "nextToken": "RW5DdDJkMWYwZjM2MzM2VHVpOHJIUXlDUXlhc2QzRGViYnc9SEXAMPLE" + }, + "output": { + "tests": [ + + ] + }, + "comments": { + "input": { + "arn": "You can get the Amazon Resource Name (ARN) of the project by using the list-projects CLI command.", + "nextToken": "A dynamically generated value, used for paginating results." + }, + "output": { + } + }, + "description": "The following example returns information about tests, given a specific Device Farm project.", + "id": "to-get-information-about-tests-1472617372212", + "title": "To get information about tests" + } + ], + "ListUniqueProblems": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456", + "nextToken": "RW5DdDJkMWYwZjM2MzM2VHVpOHJIUXlDUXlhc2QzRGViYnc9SEXAMPLE" + }, + "output": { + "uniqueProblems": { + } + }, + "comments": { + "input": { + "arn": "You can get the Amazon Resource Name (ARN) of the project by using the list-projects CLI command.", + "nextToken": "A dynamically generated value, used for paginating results." + }, + "output": { + } + }, + "description": "The following example returns information about unique problems, given a specific Device Farm project.", + "id": "to-get-information-about-unique-problems-1472617781008", + "title": "To get information about unique problems" + } + ], + "ListUploads": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456", + "nextToken": "RW5DdDJkMWYwZjM2MzM2VHVpOHJIUXlDUXlhc2QzRGViYnc9SEXAMPLE" + }, + "output": { + "uploads": [ + + ] + }, + "comments": { + "input": { + "arn": "You can get the Amazon Resource Name (ARN) of the project by using the list-projects CLI command.", + "nextToken": "A dynamically generated value, used for paginating results." + }, + "output": { + } + }, + "description": "The following example returns information about uploads, given a specific Device Farm project.", + "id": "to-get-information-about-uploads-1472617943090", + "title": "To get information about uploads" + } + ], + "PurchaseOffering": [ + { + "input": { + "offeringId": "D68B3C05-1BA6-4360-BC69-12345EXAMPLE", + "quantity": 1 + }, + "output": { + "offeringTransaction": { + "cost": { + "amount": 8.07, + "currencyCode": "USD" + }, + "createdOn": "1472648340", + "offeringStatus": { + "type": "PURCHASE", + "effectiveOn": "1472648340", + "offering": { + "type": "RECURRING", + "description": "Android Remote Access Unmetered Device Slot", + "id": "D68B3C05-1BA6-4360-BC69-12345EXAMPLE", + "platform": "ANDROID" + }, + "quantity": 1 + }, + "transactionId": "d30614ed-1b03-404c-9893-12345EXAMPLE" + } + }, + "comments": { + "input": { + "offeringId": "You can get the offering ID by using the list-offerings CLI command." + }, + "output": { + } + }, + "description": "The following example purchases a specific device slot offering.", + "id": "to-purchase-a-device-slot-offering-1472648146343", + "title": "To purchase a device slot offering" + } + ], + "RenewOffering": [ + { + "input": { + "offeringId": "D68B3C05-1BA6-4360-BC69-12345EXAMPLE", + "quantity": 1 + }, + "output": { + "offeringTransaction": { + "cost": { + "amount": 250, + "currencyCode": "USD" + }, + "createdOn": "1472648880", + "offeringStatus": { + "type": "RENEW", + "effectiveOn": "1472688000", + "offering": { + "type": "RECURRING", + "description": "Android Remote Access Unmetered Device Slot", + "id": "D68B3C05-1BA6-4360-BC69-12345EXAMPLE", + "platform": "ANDROID" + }, + "quantity": 1 + }, + "transactionId": "e90f1405-8c35-4561-be43-12345EXAMPLE" + } + }, + "comments": { + "input": { + "offeringId": "You can get the offering ID by using the list-offerings CLI command." + }, + "output": { + } + }, + "description": "The following example renews a specific device slot offering.", + "id": "to-renew-a-device-slot-offering-1472648899785", + "title": "To renew a device slot offering" + } + ], + "ScheduleRun": [ + { + "input": { + "name": "MyRun", + "devicePoolArn": "arn:aws:devicefarm:us-west-2:123456789101:pool:EXAMPLE-GUID-123-456", + "projectArn": "arn:aws:devicefarm:us-west-2:123456789101:project:EXAMPLE-GUID-123-456", + "test": { + "type": "APPIUM_JAVA_JUNIT", + "testPackageArn": "arn:aws:devicefarm:us-west-2:123456789101:test:EXAMPLE-GUID-123-456" + } + }, + "output": { + "run": { + } + }, + "comments": { + "input": { + "devicePoolArn": "You can get the Amazon Resource Name (ARN) of the device pool by using the list-pools CLI command.", + "projectArn": "You can get the Amazon Resource Name (ARN) of the project by using the list-projects CLI command.", + "testPackageArn": "You can get the Amazon Resource Name (ARN) of the test package by using the list-tests CLI command." + }, + "output": { + } + }, + "description": "The following example schedules a test run named MyRun.", + "id": "to-schedule-a-test-run-1472652429636", + "title": "To schedule a test run" + } + ], + "StopRun": [ + { + "input": { + "arn": "arn:aws:devicefarm:us-west-2:123456789101:run:EXAMPLE-GUID-123-456" + }, + "output": { + "run": { + } + }, + "comments": { + "input": { + "arn": "You can get the Amazon Resource Name (ARN) of the test run by using the list-runs CLI command." + }, + "output": { + } + }, + "description": "The following example stops a specific test run.", + "id": "to-stop-a-test-run-1472653770340", + "title": "To stop a test run" + } + ], + "UpdateDevicePool": [ + { + "input": { + "name": "NewName", + "arn": "arn:aws:devicefarm:us-west-2::devicepool:082d10e5-d7d7-48a5-ba5c-12345EXAMPLE", + "description": "NewDescription", + "rules": [ + { + "value": "True", + "attribute": "REMOTE_ACCESS_ENABLED", + "operator": "EQUALS" + } + ] + }, + "output": { + "devicePool": { + } + }, + "comments": { + "input": { + "arn": "You can get the Amazon Resource Name (ARN) of the device pool by using the list-pools CLI command." + }, + "output": { + "devicePool": "Note: you cannot update curated device pools." + } + }, + "description": "The following example updates the specified device pool with a new name and description. It also enables remote access of devices in the device pool.", + "id": "to-update-a-device-pool-1472653887677", + "title": "To update a device pool" + } + ], + "UpdateProject": [ + { + "input": { + "name": "NewName", + "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:8f75187d-101e-4625-accc-12345EXAMPLE" + }, + "output": { + "project": { + "name": "NewName", + "arn": "arn:aws:devicefarm:us-west-2:123456789101:project:8f75187d-101e-4625-accc-12345EXAMPLE", + "created": "1448400709.927" + } + }, + "comments": { + "input": { + "arn": "You can get the Amazon Resource Name (ARN) of the project by using the list-projects CLI command." + }, + "output": { + } + }, + "description": "The following example updates the specified project with a new name.", + "id": "to-update-a-device-pool-1472653887677", + "title": "To update a device pool" + } + ] + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/devicefarm/2015-06-23/paginators-1.json b/venv/lib/python3.10/site-packages/botocore/data/devicefarm/2015-06-23/paginators-1.json new file mode 100644 index 0000000000000000000000000000000000000000..982e07f94103a48963168bb95d01a4fed9a35cba --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/devicefarm/2015-06-23/paginators-1.json @@ -0,0 +1,110 @@ +{ + "pagination": { + "ListArtifacts": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "artifacts" + }, + "ListDevicePools": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "devicePools" + }, + "ListDevices": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "devices" + }, + "ListJobs": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "jobs" + }, + "ListProjects": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "projects" + }, + "ListRuns": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "runs" + }, + "ListSamples": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "samples" + }, + "ListSuites": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "suites" + }, + "ListTests": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "tests" + }, + "ListUniqueProblems": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "uniqueProblems" + }, + "ListUploads": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "uploads" + }, + "GetOfferingStatus": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": [ + "current", + "nextPeriod" + ] + }, + "ListOfferingTransactions": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "offeringTransactions" + }, + "ListOfferings": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "offerings" + }, + "ListDeviceInstances": { + "input_token": "nextToken", + "limit_key": "maxResults", + "output_token": "nextToken", + "result_key": "deviceInstances" + }, + "ListInstanceProfiles": { + "input_token": "nextToken", + "limit_key": "maxResults", + "output_token": "nextToken", + "result_key": "instanceProfiles" + }, + "ListNetworkProfiles": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "networkProfiles" + }, + "ListOfferingPromotions": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "offeringPromotions" + }, + "ListRemoteAccessSessions": { + "input_token": "nextToken", + "output_token": "nextToken", + "result_key": "remoteAccessSessions" + }, + "ListVPCEConfigurations": { + "input_token": "nextToken", + "limit_key": "maxResults", + "output_token": "nextToken", + "result_key": "vpceConfigurations" + } + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/directconnect/2012-10-25/examples-1.json b/venv/lib/python3.10/site-packages/botocore/data/directconnect/2012-10-25/examples-1.json new file mode 100644 index 0000000000000000000000000000000000000000..0ea7e3b0bbe917eb027880396ac01509becd1fa0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/directconnect/2012-10-25/examples-1.json @@ -0,0 +1,5 @@ +{ + "version": "1.0", + "examples": { + } +} diff --git a/venv/lib/python3.10/site-packages/botocore/data/endpoints.json b/venv/lib/python3.10/site-packages/botocore/data/endpoints.json new file mode 100644 index 0000000000000000000000000000000000000000..2f6bff80171510d9a3617bd66bb012ab4131e2db --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/endpoints.json @@ -0,0 +1,29936 @@ +{ + "partitions" : [ { + "defaults" : { + "hostname" : "{service}.{region}.{dnsSuffix}", + "protocols" : [ "https" ], + "signatureVersions" : [ "v4" ], + "variants" : [ { + "dnsSuffix" : "amazonaws.com", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + }, { + "dnsSuffix" : "api.aws", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "dualstack", "fips" ] + }, { + "dnsSuffix" : "api.aws", + "hostname" : "{service}.{region}.{dnsSuffix}", + "tags" : [ "dualstack" ] + } ] + }, + "dnsSuffix" : "amazonaws.com", + "partition" : "aws", + "partitionName" : "AWS Standard", + "regionRegex" : "^(us|eu|ap|sa|ca|me|af|il)\\-\\w+\\-\\d+$", + "regions" : { + "af-south-1" : { + "description" : "Africa (Cape Town)" + }, + "ap-east-1" : { + "description" : "Asia Pacific (Hong Kong)" + }, + "ap-northeast-1" : { + "description" : "Asia Pacific (Tokyo)" + }, + "ap-northeast-2" : { + "description" : "Asia Pacific (Seoul)" + }, + "ap-northeast-3" : { + "description" : "Asia Pacific (Osaka)" + }, + "ap-south-1" : { + "description" : "Asia Pacific (Mumbai)" + }, + "ap-south-2" : { + "description" : "Asia Pacific (Hyderabad)" + }, + "ap-southeast-1" : { + "description" : "Asia Pacific (Singapore)" + }, + "ap-southeast-2" : { + "description" : "Asia Pacific (Sydney)" + }, + "ap-southeast-3" : { + "description" : "Asia Pacific (Jakarta)" + }, + "ap-southeast-4" : { + "description" : "Asia Pacific (Melbourne)" + }, + "ca-central-1" : { + "description" : "Canada (Central)" + }, + "ca-west-1" : { + "description" : "Canada West (Calgary)" + }, + "eu-central-1" : { + "description" : "Europe (Frankfurt)" + }, + "eu-central-2" : { + "description" : "Europe (Zurich)" + }, + "eu-north-1" : { + "description" : "Europe (Stockholm)" + }, + "eu-south-1" : { + "description" : "Europe (Milan)" + }, + "eu-south-2" : { + "description" : "Europe (Spain)" + }, + "eu-west-1" : { + "description" : "Europe (Ireland)" + }, + "eu-west-2" : { + "description" : "Europe (London)" + }, + "eu-west-3" : { + "description" : "Europe (Paris)" + }, + "il-central-1" : { + "description" : "Israel (Tel Aviv)" + }, + "me-central-1" : { + "description" : "Middle East (UAE)" + }, + "me-south-1" : { + "description" : "Middle East (Bahrain)" + }, + "sa-east-1" : { + "description" : "South America (Sao Paulo)" + }, + "us-east-1" : { + "description" : "US East (N. Virginia)" + }, + "us-east-2" : { + "description" : "US East (Ohio)" + }, + "us-west-1" : { + "description" : "US West (N. California)" + }, + "us-west-2" : { + "description" : "US West (Oregon)" + } + }, + "services" : { + "access-analyzer" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "access-analyzer-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "access-analyzer-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "access-analyzer-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "access-analyzer-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "access-analyzer-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "access-analyzer-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "access-analyzer-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "access-analyzer-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "access-analyzer-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "access-analyzer-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "access-analyzer-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "access-analyzer-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "account" : { + "endpoints" : { + "aws-global" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "account.us-east-1.amazonaws.com" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-global" + }, + "acm" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "acm-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-central-1-fips" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "acm-fips.ca-central-1.amazonaws.com" + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "acm-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1-fips" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "acm-fips.ca-west-1.amazonaws.com" + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "acm-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "acm-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "acm-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "acm-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "acm-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "acm-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "acm-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "acm-fips.us-west-2.amazonaws.com" + } + } + }, + "acm-pca" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "acm-pca-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "acm-pca-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "acm-pca-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "acm-pca-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "acm-pca-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "acm-pca-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "acm-pca-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "acm-pca-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "acm-pca-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "acm-pca-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "acm-pca-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "acm-pca-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "agreement-marketplace" : { + "endpoints" : { + "us-east-1" : { } + } + }, + "airflow" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "amplify" : { + "endpoints" : { + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "amplifybackend" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "amplifyuibuilder" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "aoss" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "api.detective" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "api.detective-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-central-1-fips" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "api.detective-fips.ca-central-1.amazonaws.com" + }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "api.detective-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "api.detective-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "api.detective-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "api.detective-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "api.detective-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "api.detective-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "api.detective-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "api.detective-fips.us-west-2.amazonaws.com" + } + } + }, + "api.ecr" : { + "defaults" : { + "variants" : [ { + "hostname" : "ecr-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "af-south-1" : { + "credentialScope" : { + "region" : "af-south-1" + }, + "hostname" : "api.ecr.af-south-1.amazonaws.com" + }, + "ap-east-1" : { + "credentialScope" : { + "region" : "ap-east-1" + }, + "hostname" : "api.ecr.ap-east-1.amazonaws.com" + }, + "ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "hostname" : "api.ecr.ap-northeast-1.amazonaws.com" + }, + "ap-northeast-2" : { + "credentialScope" : { + "region" : "ap-northeast-2" + }, + "hostname" : "api.ecr.ap-northeast-2.amazonaws.com" + }, + "ap-northeast-3" : { + "credentialScope" : { + "region" : "ap-northeast-3" + }, + "hostname" : "api.ecr.ap-northeast-3.amazonaws.com" + }, + "ap-south-1" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "hostname" : "api.ecr.ap-south-1.amazonaws.com" + }, + "ap-south-2" : { + "credentialScope" : { + "region" : "ap-south-2" + }, + "hostname" : "api.ecr.ap-south-2.amazonaws.com" + }, + "ap-southeast-1" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "hostname" : "api.ecr.ap-southeast-1.amazonaws.com" + }, + "ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "hostname" : "api.ecr.ap-southeast-2.amazonaws.com" + }, + "ap-southeast-3" : { + "credentialScope" : { + "region" : "ap-southeast-3" + }, + "hostname" : "api.ecr.ap-southeast-3.amazonaws.com" + }, + "ap-southeast-4" : { + "credentialScope" : { + "region" : "ap-southeast-4" + }, + "hostname" : "api.ecr.ap-southeast-4.amazonaws.com" + }, + "ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "hostname" : "api.ecr.ca-central-1.amazonaws.com" + }, + "ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "hostname" : "api.ecr.ca-west-1.amazonaws.com" + }, + "dkr-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "ecr-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "dkr-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "ecr-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "dkr-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "ecr-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "dkr-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "ecr-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "hostname" : "api.ecr.eu-central-1.amazonaws.com" + }, + "eu-central-2" : { + "credentialScope" : { + "region" : "eu-central-2" + }, + "hostname" : "api.ecr.eu-central-2.amazonaws.com" + }, + "eu-north-1" : { + "credentialScope" : { + "region" : "eu-north-1" + }, + "hostname" : "api.ecr.eu-north-1.amazonaws.com" + }, + "eu-south-1" : { + "credentialScope" : { + "region" : "eu-south-1" + }, + "hostname" : "api.ecr.eu-south-1.amazonaws.com" + }, + "eu-south-2" : { + "credentialScope" : { + "region" : "eu-south-2" + }, + "hostname" : "api.ecr.eu-south-2.amazonaws.com" + }, + "eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "hostname" : "api.ecr.eu-west-1.amazonaws.com" + }, + "eu-west-2" : { + "credentialScope" : { + "region" : "eu-west-2" + }, + "hostname" : "api.ecr.eu-west-2.amazonaws.com" + }, + "eu-west-3" : { + "credentialScope" : { + "region" : "eu-west-3" + }, + "hostname" : "api.ecr.eu-west-3.amazonaws.com" + }, + "fips-dkr-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "ecr-fips.us-east-1.amazonaws.com" + }, + "fips-dkr-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "ecr-fips.us-east-2.amazonaws.com" + }, + "fips-dkr-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "ecr-fips.us-west-1.amazonaws.com" + }, + "fips-dkr-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "ecr-fips.us-west-2.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "ecr-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "ecr-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "ecr-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "ecr-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { + "credentialScope" : { + "region" : "il-central-1" + }, + "hostname" : "api.ecr.il-central-1.amazonaws.com" + }, + "me-central-1" : { + "credentialScope" : { + "region" : "me-central-1" + }, + "hostname" : "api.ecr.me-central-1.amazonaws.com" + }, + "me-south-1" : { + "credentialScope" : { + "region" : "me-south-1" + }, + "hostname" : "api.ecr.me-south-1.amazonaws.com" + }, + "sa-east-1" : { + "credentialScope" : { + "region" : "sa-east-1" + }, + "hostname" : "api.ecr.sa-east-1.amazonaws.com" + }, + "us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "api.ecr.us-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "ecr-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "hostname" : "api.ecr.us-east-2.amazonaws.com", + "variants" : [ { + "hostname" : "ecr-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "hostname" : "api.ecr.us-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "ecr-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "api.ecr.us-west-2.amazonaws.com", + "variants" : [ { + "hostname" : "ecr-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "api.ecr-public" : { + "endpoints" : { + "us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "api.ecr-public.us-east-1.amazonaws.com" + }, + "us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "api.ecr-public.us-west-2.amazonaws.com" + } + } + }, + "api.elastic-inference" : { + "endpoints" : { + "ap-northeast-1" : { + "hostname" : "api.elastic-inference.ap-northeast-1.amazonaws.com" + }, + "ap-northeast-2" : { + "hostname" : "api.elastic-inference.ap-northeast-2.amazonaws.com" + }, + "eu-west-1" : { + "hostname" : "api.elastic-inference.eu-west-1.amazonaws.com" + }, + "us-east-1" : { + "hostname" : "api.elastic-inference.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "hostname" : "api.elastic-inference.us-east-2.amazonaws.com" + }, + "us-west-2" : { + "hostname" : "api.elastic-inference.us-west-2.amazonaws.com" + } + } + }, + "api.fleethub.iot" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "api.fleethub.iot-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "api.fleethub.iot-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "api.fleethub.iot-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "api.fleethub.iot-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "api.fleethub.iot-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "api.fleethub.iot-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "api.fleethub.iot-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "api.fleethub.iot-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "api.iotdeviceadvisor" : { + "endpoints" : { + "ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "hostname" : "api.iotdeviceadvisor.ap-northeast-1.amazonaws.com" + }, + "eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "hostname" : "api.iotdeviceadvisor.eu-west-1.amazonaws.com" + }, + "us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "api.iotdeviceadvisor.us-east-1.amazonaws.com" + }, + "us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "api.iotdeviceadvisor.us-west-2.amazonaws.com" + } + } + }, + "api.iotwireless" : { + "endpoints" : { + "ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "hostname" : "api.iotwireless.ap-northeast-1.amazonaws.com" + }, + "ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "hostname" : "api.iotwireless.ap-southeast-2.amazonaws.com" + }, + "eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "hostname" : "api.iotwireless.eu-central-1.amazonaws.com" + }, + "eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "hostname" : "api.iotwireless.eu-west-1.amazonaws.com" + }, + "sa-east-1" : { + "credentialScope" : { + "region" : "sa-east-1" + }, + "hostname" : "api.iotwireless.sa-east-1.amazonaws.com" + }, + "us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "api.iotwireless.us-east-1.amazonaws.com" + }, + "us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "api.iotwireless.us-west-2.amazonaws.com" + } + } + }, + "api.mediatailor" : { + "endpoints" : { + "af-south-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-3" : { }, + "me-central-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "api.pricing" : { + "defaults" : { + "credentialScope" : { + "service" : "pricing" + } + }, + "endpoints" : { + "ap-south-1" : { }, + "eu-central-1" : { }, + "us-east-1" : { } + } + }, + "api.sagemaker" : { + "defaults" : { + "variants" : [ { + "hostname" : "api-fips.sagemaker.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "api-fips.sagemaker.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "api-fips.sagemaker.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "api-fips.sagemaker.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "api-fips.sagemaker.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "api-fips.sagemaker.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "api-fips.sagemaker.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "api-fips.sagemaker.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "api-fips.sagemaker.us-west-2.amazonaws.com" + } + } + }, + "api.tunneling.iot" : { + "defaults" : { + "variants" : [ { + "hostname" : "api.tunneling.iot-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "api.tunneling.iot-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "api.tunneling.iot-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "api.tunneling.iot-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "api.tunneling.iot-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "api.tunneling.iot-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "api.tunneling.iot-fips.us-west-2.amazonaws.com" + }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "api.tunneling.iot-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "api.tunneling.iot-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "api.tunneling.iot-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "api.tunneling.iot-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "apigateway" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "apigateway-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "apigateway-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "apigateway-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "apigateway-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "apigateway-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "apigateway-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "apigateway-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "apigateway-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "apigateway-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "apigateway-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "apigateway-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "apigateway-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "app-integrations" : { + "endpoints" : { + "af-south-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-west-2" : { } + } + }, + "appconfig" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "appconfigdata" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "appflow" : { + "endpoints" : { + "af-south-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "appflow-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "appflow-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "appflow-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "appflow-fips.us-west-2.amazonaws.com" + }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "appflow-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "appflow-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "appflow-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "appflow-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "application-autoscaling" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "applicationinsights" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "appmesh" : { + "endpoints" : { + "af-south-1" : { + "variants" : [ { + "hostname" : "appmesh.af-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-east-1" : { + "variants" : [ { + "hostname" : "appmesh.ap-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-1" : { + "variants" : [ { + "hostname" : "appmesh.ap-northeast-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-2" : { + "variants" : [ { + "hostname" : "appmesh.ap-northeast-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-3" : { + "variants" : [ { + "hostname" : "appmesh.ap-northeast-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-1" : { + "variants" : [ { + "hostname" : "appmesh.ap-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-1" : { + "variants" : [ { + "hostname" : "appmesh.ap-southeast-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-2" : { + "variants" : [ { + "hostname" : "appmesh.ap-southeast-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-3" : { + "variants" : [ { + "hostname" : "appmesh.ap-southeast-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "appmesh-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "appmesh-fips.ca-central-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "appmesh.ca-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ca-central-1-fips" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "appmesh-fips.ca-central-1.amazonaws.com" + }, + "eu-central-1" : { + "variants" : [ { + "hostname" : "appmesh.eu-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-north-1" : { + "variants" : [ { + "hostname" : "appmesh.eu-north-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-1" : { + "variants" : [ { + "hostname" : "appmesh.eu-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-1" : { + "variants" : [ { + "hostname" : "appmesh.eu-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-2" : { + "variants" : [ { + "hostname" : "appmesh.eu-west-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-3" : { + "variants" : [ { + "hostname" : "appmesh.eu-west-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "il-central-1" : { + "variants" : [ { + "hostname" : "appmesh.il-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "me-south-1" : { + "variants" : [ { + "hostname" : "appmesh.me-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "sa-east-1" : { + "variants" : [ { + "hostname" : "appmesh.sa-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "appmesh-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "appmesh-fips.us-east-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "appmesh.us-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "appmesh-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "appmesh-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "appmesh-fips.us-east-2.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "appmesh.us-east-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "appmesh-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "appmesh-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "appmesh-fips.us-west-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "appmesh.us-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "appmesh-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "appmesh-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "appmesh-fips.us-west-2.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "appmesh.us-west-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "appmesh-fips.us-west-2.amazonaws.com" + } + } + }, + "apprunner" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "apprunner-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "apprunner-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "apprunner-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "apprunner-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "apprunner-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "apprunner-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "appstream2" : { + "defaults" : { + "credentialScope" : { + "service" : "appstream" + }, + "protocols" : [ "https" ] + }, + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "appstream2-fips.us-west-2.amazonaws.com" + }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "appstream2-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "appstream2-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { }, + "us-west-2" : { + "variants" : [ { + "hostname" : "appstream2-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "appstream2-fips.us-west-2.amazonaws.com" + } + } + }, + "appsync" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "aps" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "arc-zonal-shift" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "athena" : { + "endpoints" : { + "af-south-1" : { + "variants" : [ { + "hostname" : "athena.af-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-east-1" : { + "variants" : [ { + "hostname" : "athena.ap-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-1" : { + "variants" : [ { + "hostname" : "athena.ap-northeast-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-2" : { + "variants" : [ { + "hostname" : "athena.ap-northeast-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-3" : { + "variants" : [ { + "hostname" : "athena.ap-northeast-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-1" : { + "variants" : [ { + "hostname" : "athena.ap-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-2" : { + "variants" : [ { + "hostname" : "athena.ap-south-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-1" : { + "variants" : [ { + "hostname" : "athena.ap-southeast-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-2" : { + "variants" : [ { + "hostname" : "athena.ap-southeast-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-3" : { + "variants" : [ { + "hostname" : "athena.ap-southeast-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-4" : { + "variants" : [ { + "hostname" : "athena.ap-southeast-4.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "athena-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "athena-fips.ca-central-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "athena.ca-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "athena-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "athena-fips.ca-west-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "athena.ca-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-central-1" : { + "variants" : [ { + "hostname" : "athena.eu-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-central-2" : { + "variants" : [ { + "hostname" : "athena.eu-central-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-north-1" : { + "variants" : [ { + "hostname" : "athena.eu-north-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-1" : { + "variants" : [ { + "hostname" : "athena.eu-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-2" : { + "variants" : [ { + "hostname" : "athena.eu-south-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-1" : { + "variants" : [ { + "hostname" : "athena.eu-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-2" : { + "variants" : [ { + "hostname" : "athena.eu-west-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-3" : { + "variants" : [ { + "hostname" : "athena.eu-west-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "athena-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "athena-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "athena-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "athena-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "athena-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "athena-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { + "variants" : [ { + "hostname" : "athena.il-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "me-central-1" : { + "variants" : [ { + "hostname" : "athena.me-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "me-south-1" : { + "variants" : [ { + "hostname" : "athena.me-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "sa-east-1" : { + "variants" : [ { + "hostname" : "athena.sa-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "athena-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "athena-fips.us-east-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "athena.us-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "athena-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "athena-fips.us-east-2.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "athena.us-east-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "athena-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "athena-fips.us-west-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "athena.us-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "athena-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "athena-fips.us-west-2.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "athena.us-west-2.api.aws", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "auditmanager" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "auditmanager-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "auditmanager-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "auditmanager-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "auditmanager-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "auditmanager-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "auditmanager-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "auditmanager-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "auditmanager-fips.us-west-2.amazonaws.com" + } + } + }, + "autoscaling" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "autoscaling-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "autoscaling-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "autoscaling-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "autoscaling-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "autoscaling-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "autoscaling-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "autoscaling-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "autoscaling-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "autoscaling-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "autoscaling-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "autoscaling-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "autoscaling-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "autoscaling-plans" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "backup" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "backup-gateway" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "batch" : { + "defaults" : { + "variants" : [ { + "hostname" : "fips.batch.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "fips.batch.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "fips.batch.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "fips.batch.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "fips.batch.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "fips.batch.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "fips.batch.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "fips.batch.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "fips.batch.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "bedrock" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "bedrock-ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "hostname" : "bedrock.ap-northeast-1.amazonaws.com" + }, + "bedrock-ap-south-1" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "hostname" : "bedrock.ap-south-1.amazonaws.com" + }, + "bedrock-ap-southeast-1" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "hostname" : "bedrock.ap-southeast-1.amazonaws.com" + }, + "bedrock-ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "hostname" : "bedrock.ap-southeast-2.amazonaws.com" + }, + "bedrock-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "hostname" : "bedrock.ca-central-1.amazonaws.com" + }, + "bedrock-eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "hostname" : "bedrock.eu-central-1.amazonaws.com" + }, + "bedrock-eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "hostname" : "bedrock.eu-west-1.amazonaws.com" + }, + "bedrock-eu-west-2" : { + "credentialScope" : { + "region" : "eu-west-2" + }, + "hostname" : "bedrock.eu-west-2.amazonaws.com" + }, + "bedrock-eu-west-3" : { + "credentialScope" : { + "region" : "eu-west-3" + }, + "hostname" : "bedrock.eu-west-3.amazonaws.com" + }, + "bedrock-fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "hostname" : "bedrock-fips.ca-central-1.amazonaws.com" + }, + "bedrock-fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "bedrock-fips.us-east-1.amazonaws.com" + }, + "bedrock-fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "bedrock-fips.us-west-2.amazonaws.com" + }, + "bedrock-runtime-ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "hostname" : "bedrock-runtime.ap-northeast-1.amazonaws.com" + }, + "bedrock-runtime-ap-south-1" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "hostname" : "bedrock-runtime.ap-south-1.amazonaws.com" + }, + "bedrock-runtime-ap-southeast-1" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "hostname" : "bedrock-runtime.ap-southeast-1.amazonaws.com" + }, + "bedrock-runtime-ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "hostname" : "bedrock-runtime.ap-southeast-2.amazonaws.com" + }, + "bedrock-runtime-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "hostname" : "bedrock-runtime.ca-central-1.amazonaws.com" + }, + "bedrock-runtime-eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "hostname" : "bedrock-runtime.eu-central-1.amazonaws.com" + }, + "bedrock-runtime-eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "hostname" : "bedrock-runtime.eu-west-1.amazonaws.com" + }, + "bedrock-runtime-eu-west-2" : { + "credentialScope" : { + "region" : "eu-west-2" + }, + "hostname" : "bedrock-runtime.eu-west-2.amazonaws.com" + }, + "bedrock-runtime-eu-west-3" : { + "credentialScope" : { + "region" : "eu-west-3" + }, + "hostname" : "bedrock-runtime.eu-west-3.amazonaws.com" + }, + "bedrock-runtime-fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "hostname" : "bedrock-runtime-fips.ca-central-1.amazonaws.com" + }, + "bedrock-runtime-fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "bedrock-runtime-fips.us-east-1.amazonaws.com" + }, + "bedrock-runtime-fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "bedrock-runtime-fips.us-west-2.amazonaws.com" + }, + "bedrock-runtime-sa-east-1" : { + "credentialScope" : { + "region" : "sa-east-1" + }, + "hostname" : "bedrock-runtime.sa-east-1.amazonaws.com" + }, + "bedrock-runtime-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "bedrock-runtime.us-east-1.amazonaws.com" + }, + "bedrock-runtime-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "bedrock-runtime.us-west-2.amazonaws.com" + }, + "bedrock-sa-east-1" : { + "credentialScope" : { + "region" : "sa-east-1" + }, + "hostname" : "bedrock.sa-east-1.amazonaws.com" + }, + "bedrock-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "bedrock.us-east-1.amazonaws.com" + }, + "bedrock-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "bedrock.us-west-2.amazonaws.com" + }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-west-2" : { } + } + }, + "billingconductor" : { + "endpoints" : { + "aws-global" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "billingconductor.us-east-1.amazonaws.com" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-global" + }, + "braket" : { + "endpoints" : { + "eu-north-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "budgets" : { + "endpoints" : { + "aws-global" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "budgets.amazonaws.com" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-global" + }, + "cases" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-2" : { }, + "fips-us-east-1" : { + "deprecated" : true + }, + "fips-us-west-2" : { + "deprecated" : true + }, + "us-east-1" : { + "variants" : [ { + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "tags" : [ "fips" ] + } ] + } + } + }, + "cassandra" : { + "endpoints" : { + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "cassandra-fips.us-east-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "cassandra-fips.us-west-2.amazonaws.com" + }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "cassandra-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { + "variants" : [ { + "hostname" : "cassandra-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "catalog.marketplace" : { + "endpoints" : { + "us-east-1" : { } + } + }, + "ce" : { + "endpoints" : { + "aws-global" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "ce.us-east-1.amazonaws.com" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-global" + }, + "chime" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "aws-global" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "chime.us-east-1.amazonaws.com", + "protocols" : [ "https" ] + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-global" + }, + "cleanrooms" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "cloud9" : { + "endpoints" : { + "af-south-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-east-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-2" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-3" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-2" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ca-central-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + }, { + "hostname" : "cloud9-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "cloud9-fips.ca-central-1.api.aws", + "tags" : [ "dualstack", "fips" ] + } ] + }, + "eu-central-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "eu-north-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-2" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-3" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "cloud9-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "cloud9-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "cloud9-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "cloud9-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "cloud9-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "me-south-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "sa-east-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "us-east-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + }, { + "hostname" : "cloud9-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "cloud9-fips.us-east-1.api.aws", + "tags" : [ "dualstack", "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "tags" : [ "dualstack" ] + }, { + "hostname" : "cloud9-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "cloud9-fips.us-east-2.api.aws", + "tags" : [ "dualstack", "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + }, { + "hostname" : "cloud9-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "cloud9-fips.us-west-1.api.aws", + "tags" : [ "dualstack", "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "tags" : [ "dualstack" ] + }, { + "hostname" : "cloud9-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "cloud9-fips.us-west-2.api.aws", + "tags" : [ "dualstack", "fips" ] + } ] + } + } + }, + "cloudcontrolapi" : { + "endpoints" : { + "af-south-1" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.af-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-east-1" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.ap-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-1" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.ap-northeast-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-2" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.ap-northeast-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-3" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.ap-northeast-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-1" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.ap-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-2" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.ap-south-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-1" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.ap-southeast-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-2" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.ap-southeast-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-3" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.ap-southeast-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-4" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.ap-southeast-4.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "cloudcontrolapi-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "cloudcontrolapi-fips.ca-central-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "cloudcontrolapi.ca-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "cloudcontrolapi-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "cloudcontrolapi-fips.ca-west-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "cloudcontrolapi.ca-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-central-1" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.eu-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-central-2" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.eu-central-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-north-1" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.eu-north-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-1" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.eu-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-2" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.eu-south-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-1" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.eu-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-2" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.eu-west-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-3" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.eu-west-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "cloudcontrolapi-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "cloudcontrolapi-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "cloudcontrolapi-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "cloudcontrolapi-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "cloudcontrolapi-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "cloudcontrolapi-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.il-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "me-central-1" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.me-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "me-south-1" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.me-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "sa-east-1" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.sa-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "cloudcontrolapi-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "cloudcontrolapi-fips.us-east-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "cloudcontrolapi.us-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "cloudcontrolapi-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "cloudcontrolapi-fips.us-east-2.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "cloudcontrolapi.us-east-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "cloudcontrolapi-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "cloudcontrolapi-fips.us-west-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "cloudcontrolapi.us-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "cloudcontrolapi-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "cloudcontrolapi-fips.us-west-2.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "cloudcontrolapi.us-west-2.api.aws", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "clouddirectory" : { + "endpoints" : { + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "cloudformation" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "cloudformation-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "cloudformation-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "cloudformation-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "cloudformation-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "cloudformation-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "cloudformation-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "cloudformation-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "cloudformation-fips.us-west-2.amazonaws.com" + } + } + }, + "cloudfront" : { + "endpoints" : { + "aws-global" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "cloudfront.amazonaws.com", + "protocols" : [ "http", "https" ] + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-global" + }, + "cloudhsm" : { + "endpoints" : { + "us-east-1" : { } + } + }, + "cloudhsmv2" : { + "defaults" : { + "credentialScope" : { + "service" : "cloudhsm" + } + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "cloudsearch" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "cloudtrail" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "cloudtrail-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "cloudtrail-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "cloudtrail-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "cloudtrail-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "cloudtrail-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "cloudtrail-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "cloudtrail-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "cloudtrail-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "cloudtrail-data" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "codeartifact" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "codebuild" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "codebuild-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "codebuild-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "codebuild-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "codebuild-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "codebuild-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "codebuild-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "codebuild-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "codebuild-fips.us-west-2.amazonaws.com" + } + } + }, + "codecatalyst" : { + "endpoints" : { + "aws-global" : { + "hostname" : "codecatalyst.global.api.aws" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-global" + }, + "codecommit" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "codecommit-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-central-1-fips" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "codecommit-fips.ca-central-1.amazonaws.com" + }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "codecommit-fips.ca-central-1.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "codecommit-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "codecommit-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "codecommit-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "codecommit-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "codecommit-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "codecommit-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "codecommit-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "codecommit-fips.us-west-2.amazonaws.com" + } + } + }, + "codedeploy" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "codedeploy-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "codedeploy-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "codedeploy-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "codedeploy-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "codedeploy-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "codedeploy-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "codedeploy-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "codedeploy-fips.us-west-2.amazonaws.com" + } + } + }, + "codeguru-reviewer" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "codepipeline" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "codepipeline-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "codepipeline-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "codepipeline-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "codepipeline-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "codepipeline-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "codepipeline-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "codepipeline-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "codepipeline-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "codepipeline-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "codepipeline-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "codestar" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "codestar-connections" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "codestar-notifications" : { + "endpoints" : { + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "cognito-identity" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "cognito-identity-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "cognito-identity-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "cognito-identity-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "cognito-identity-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "cognito-identity-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "cognito-identity-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "cognito-identity-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "cognito-identity-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "cognito-idp" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "cognito-idp-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "cognito-idp-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "cognito-idp-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "cognito-idp-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "cognito-idp-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "cognito-idp-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "cognito-idp-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "cognito-idp-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "cognito-sync" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "comprehend" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "comprehend-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "comprehend-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "comprehend-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "comprehend-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "comprehend-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "comprehend-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "comprehendmedical" : { + "endpoints" : { + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "comprehendmedical-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "comprehendmedical-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "comprehendmedical-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "comprehendmedical-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "comprehendmedical-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "comprehendmedical-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "comprehendmedical-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "comprehendmedical-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "compute-optimizer" : { + "endpoints" : { + "af-south-1" : { + "credentialScope" : { + "region" : "af-south-1" + }, + "hostname" : "compute-optimizer.af-south-1.amazonaws.com" + }, + "ap-east-1" : { + "credentialScope" : { + "region" : "ap-east-1" + }, + "hostname" : "compute-optimizer.ap-east-1.amazonaws.com" + }, + "ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "hostname" : "compute-optimizer.ap-northeast-1.amazonaws.com" + }, + "ap-northeast-2" : { + "credentialScope" : { + "region" : "ap-northeast-2" + }, + "hostname" : "compute-optimizer.ap-northeast-2.amazonaws.com" + }, + "ap-northeast-3" : { + "credentialScope" : { + "region" : "ap-northeast-3" + }, + "hostname" : "compute-optimizer.ap-northeast-3.amazonaws.com" + }, + "ap-south-1" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "hostname" : "compute-optimizer.ap-south-1.amazonaws.com" + }, + "ap-south-2" : { + "credentialScope" : { + "region" : "ap-south-2" + }, + "hostname" : "compute-optimizer.ap-south-2.amazonaws.com" + }, + "ap-southeast-1" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "hostname" : "compute-optimizer.ap-southeast-1.amazonaws.com" + }, + "ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "hostname" : "compute-optimizer.ap-southeast-2.amazonaws.com" + }, + "ap-southeast-3" : { + "credentialScope" : { + "region" : "ap-southeast-3" + }, + "hostname" : "compute-optimizer.ap-southeast-3.amazonaws.com" + }, + "ap-southeast-4" : { + "credentialScope" : { + "region" : "ap-southeast-4" + }, + "hostname" : "compute-optimizer.ap-southeast-4.amazonaws.com" + }, + "ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "hostname" : "compute-optimizer.ca-central-1.amazonaws.com" + }, + "eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "hostname" : "compute-optimizer.eu-central-1.amazonaws.com" + }, + "eu-central-2" : { + "credentialScope" : { + "region" : "eu-central-2" + }, + "hostname" : "compute-optimizer.eu-central-2.amazonaws.com" + }, + "eu-north-1" : { + "credentialScope" : { + "region" : "eu-north-1" + }, + "hostname" : "compute-optimizer.eu-north-1.amazonaws.com" + }, + "eu-south-1" : { + "credentialScope" : { + "region" : "eu-south-1" + }, + "hostname" : "compute-optimizer.eu-south-1.amazonaws.com" + }, + "eu-south-2" : { + "credentialScope" : { + "region" : "eu-south-2" + }, + "hostname" : "compute-optimizer.eu-south-2.amazonaws.com" + }, + "eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "hostname" : "compute-optimizer.eu-west-1.amazonaws.com" + }, + "eu-west-2" : { + "credentialScope" : { + "region" : "eu-west-2" + }, + "hostname" : "compute-optimizer.eu-west-2.amazonaws.com" + }, + "eu-west-3" : { + "credentialScope" : { + "region" : "eu-west-3" + }, + "hostname" : "compute-optimizer.eu-west-3.amazonaws.com" + }, + "il-central-1" : { + "credentialScope" : { + "region" : "il-central-1" + }, + "hostname" : "compute-optimizer.il-central-1.amazonaws.com" + }, + "me-central-1" : { + "credentialScope" : { + "region" : "me-central-1" + }, + "hostname" : "compute-optimizer.me-central-1.amazonaws.com" + }, + "me-south-1" : { + "credentialScope" : { + "region" : "me-south-1" + }, + "hostname" : "compute-optimizer.me-south-1.amazonaws.com" + }, + "sa-east-1" : { + "credentialScope" : { + "region" : "sa-east-1" + }, + "hostname" : "compute-optimizer.sa-east-1.amazonaws.com" + }, + "us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "compute-optimizer.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "hostname" : "compute-optimizer.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "hostname" : "compute-optimizer.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "compute-optimizer.us-west-2.amazonaws.com" + } + } + }, + "config" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "config-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "config-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "config-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "config-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "config-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "config-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "config-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "config-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "connect" : { + "endpoints" : { + "af-south-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-2" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "connect-fips.us-east-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "connect-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "connect-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "connect-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "connect-campaigns" : { + "endpoints" : { + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-2" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "connect-campaigns-fips.us-east-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "connect-campaigns-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "connect-campaigns-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "connect-campaigns-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "contact-lens" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-west-2" : { } + } + }, + "controltower" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "controltower-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-central-1-fips" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "controltower-fips.ca-central-1.amazonaws.com" + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "controltower-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1-fips" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "controltower-fips.ca-west-1.amazonaws.com" + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "controltower-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "controltower-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "controltower-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "controltower-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "controltower-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "controltower-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "controltower-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "controltower-fips.us-west-2.amazonaws.com" + } + } + }, + "cost-optimization-hub" : { + "endpoints" : { + "us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "cost-optimization-hub.us-east-1.amazonaws.com" + } + } + }, + "cur" : { + "endpoints" : { + "us-east-1" : { } + } + }, + "data-ats.iot" : { + "defaults" : { + "credentialScope" : { + "service" : "iotdata" + }, + "protocols" : [ "https" ] + }, + "endpoints" : { + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "data.iot-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "service" : "iotdata" + }, + "deprecated" : true, + "hostname" : "data.iot-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "service" : "iotdata" + }, + "deprecated" : true, + "hostname" : "data.iot-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "service" : "iotdata" + }, + "deprecated" : true, + "hostname" : "data.iot-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "service" : "iotdata" + }, + "deprecated" : true, + "hostname" : "data.iot-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "service" : "iotdata" + }, + "deprecated" : true, + "hostname" : "data.iot-fips.us-west-2.amazonaws.com" + }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "data.iot-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "data.iot-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "data.iot-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "data.iot-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "data.iot" : { + "defaults" : { + "credentialScope" : { + "service" : "iotdata" + }, + "protocols" : [ "https" ] + }, + "endpoints" : { + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "data.iot-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "service" : "iotdata" + }, + "deprecated" : true, + "hostname" : "data.iot-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "service" : "iotdata" + }, + "deprecated" : true, + "hostname" : "data.iot-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "service" : "iotdata" + }, + "deprecated" : true, + "hostname" : "data.iot-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "service" : "iotdata" + }, + "deprecated" : true, + "hostname" : "data.iot-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "service" : "iotdata" + }, + "deprecated" : true, + "hostname" : "data.iot-fips.us-west-2.amazonaws.com" + }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "data.iot-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "data.iot-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "data.iot-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "data.iot-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "data.jobs.iot" : { + "endpoints" : { + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "data.jobs.iot-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "data.jobs.iot-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "data.jobs.iot-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "data.jobs.iot-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "data.jobs.iot-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "data.jobs.iot-fips.us-west-2.amazonaws.com" + }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "data.jobs.iot-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "data.jobs.iot-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "data.jobs.iot-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "data.jobs.iot-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "data.mediastore" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-west-2" : { } + } + }, + "databrew" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "databrew-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "databrew-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "databrew-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "databrew-fips.us-west-2.amazonaws.com" + }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "databrew-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "databrew-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "databrew-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "databrew-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "dataexchange" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "datapipeline" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-southeast-2" : { }, + "eu-west-1" : { }, + "us-east-1" : { }, + "us-west-2" : { } + } + }, + "datasync" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "datasync-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "datasync-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "datasync-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "datasync-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "datasync-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "datasync-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "datasync-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "datasync-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "datasync-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "datasync-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "datasync-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "datasync-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "datazone" : { + "defaults" : { + "dnsSuffix" : "api.aws", + "variants" : [ { + "dnsSuffix" : "api.aws", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "ap-northeast-1" : { + "hostname" : "datazone.ap-northeast-1.api.aws" + }, + "ap-northeast-2" : { + "hostname" : "datazone.ap-northeast-2.api.aws" + }, + "ap-northeast-3" : { + "hostname" : "datazone.ap-northeast-3.api.aws" + }, + "ap-south-2" : { + "hostname" : "datazone.ap-south-2.api.aws" + }, + "ap-southeast-1" : { + "hostname" : "datazone.ap-southeast-1.api.aws" + }, + "ap-southeast-2" : { + "hostname" : "datazone.ap-southeast-2.api.aws" + }, + "ap-southeast-3" : { + "hostname" : "datazone.ap-southeast-3.api.aws" + }, + "ap-southeast-4" : { + "hostname" : "datazone.ap-southeast-4.api.aws" + }, + "ca-central-1" : { + "hostname" : "datazone.ca-central-1.api.aws", + "variants" : [ { + "hostname" : "datazone-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "hostname" : "datazone.ca-west-1.api.aws" + }, + "eu-central-1" : { + "hostname" : "datazone.eu-central-1.api.aws" + }, + "eu-north-1" : { + "hostname" : "datazone.eu-north-1.api.aws" + }, + "eu-south-1" : { + "hostname" : "datazone.eu-south-1.api.aws" + }, + "eu-west-1" : { + "hostname" : "datazone.eu-west-1.api.aws" + }, + "eu-west-2" : { + "hostname" : "datazone.eu-west-2.api.aws" + }, + "eu-west-3" : { + "hostname" : "datazone.eu-west-3.api.aws" + }, + "il-central-1" : { + "hostname" : "datazone.il-central-1.api.aws" + }, + "me-central-1" : { + "hostname" : "datazone.me-central-1.api.aws" + }, + "me-south-1" : { + "hostname" : "datazone.me-south-1.api.aws" + }, + "sa-east-1" : { + "hostname" : "datazone.sa-east-1.api.aws" + }, + "us-east-1" : { + "hostname" : "datazone.us-east-1.api.aws", + "variants" : [ { + "hostname" : "datazone-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "hostname" : "datazone.us-east-2.api.aws", + "variants" : [ { + "hostname" : "datazone-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "hostname" : "datazone.us-west-1.api.aws" + }, + "us-west-2" : { + "hostname" : "datazone.us-west-2.api.aws", + "variants" : [ { + "hostname" : "datazone-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "dax" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "devicefarm" : { + "endpoints" : { + "us-west-2" : { } + } + }, + "devops-guru" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "devops-guru-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "devops-guru-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "devops-guru-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "devops-guru-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "devops-guru-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "devops-guru-fips.us-west-2.amazonaws.com" + }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "devops-guru-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "devops-guru-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "devops-guru-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "devops-guru-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "directconnect" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "directconnect-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "directconnect-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "directconnect-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "directconnect-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "directconnect-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "directconnect-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "directconnect-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "directconnect-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "directconnect-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "directconnect-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "directconnect-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "directconnect-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "discovery" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-west-2" : { } + } + }, + "dlm" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "dms" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "dms" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "dms-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "dms-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "dms-fips.us-west-1.amazonaws.com" + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "dms-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "dms-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "dms-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "dms-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "dms-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "dms-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "dms-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "dms-fips.us-west-2.amazonaws.com" + } + } + }, + "docdb" : { + "endpoints" : { + "ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "hostname" : "rds.ap-northeast-1.amazonaws.com" + }, + "ap-northeast-2" : { + "credentialScope" : { + "region" : "ap-northeast-2" + }, + "hostname" : "rds.ap-northeast-2.amazonaws.com" + }, + "ap-south-1" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "hostname" : "rds.ap-south-1.amazonaws.com" + }, + "ap-southeast-1" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "hostname" : "rds.ap-southeast-1.amazonaws.com" + }, + "ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "hostname" : "rds.ap-southeast-2.amazonaws.com" + }, + "ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "hostname" : "rds.ca-central-1.amazonaws.com" + }, + "eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "hostname" : "rds.eu-central-1.amazonaws.com" + }, + "eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "hostname" : "rds.eu-west-1.amazonaws.com" + }, + "eu-west-2" : { + "credentialScope" : { + "region" : "eu-west-2" + }, + "hostname" : "rds.eu-west-2.amazonaws.com" + }, + "eu-west-3" : { + "credentialScope" : { + "region" : "eu-west-3" + }, + "hostname" : "rds.eu-west-3.amazonaws.com" + }, + "sa-east-1" : { + "credentialScope" : { + "region" : "sa-east-1" + }, + "hostname" : "rds.sa-east-1.amazonaws.com" + }, + "us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "rds.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "hostname" : "rds.us-east-2.amazonaws.com" + }, + "us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "rds.us-west-2.amazonaws.com" + } + } + }, + "drs" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "drs-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "drs-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "drs-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "drs-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "drs-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "drs-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "drs-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "drs-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "ds" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "ds-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "ds-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "ds-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "ds-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "ds-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "ds-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "ds-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "ds-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "ds-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "ds-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "ds-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "ds-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "dynamodb" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "dynamodb-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-central-1-fips" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "dynamodb-fips.ca-central-1.amazonaws.com" + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "dynamodb-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1-fips" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "dynamodb-fips.ca-west-1.amazonaws.com" + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "local" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "localhost:8000", + "protocols" : [ "http" ] + }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "dynamodb-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "dynamodb-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "dynamodb-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "dynamodb-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "dynamodb-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "dynamodb-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "dynamodb-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "dynamodb-fips.us-west-2.amazonaws.com" + } + } + }, + "ebs" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "ebs-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "ebs-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "ebs-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "ebs-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "ebs-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "ebs-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "ebs-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "ebs-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "ebs-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "ebs-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "ebs-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "ebs-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "ec2" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { + "variants" : [ { + "hostname" : "ec2.ap-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "ec2-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "ec2-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { + "variants" : [ { + "hostname" : "ec2.eu-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "ec2-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "ec2-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "ec2-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "ec2-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "ec2-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "ec2-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { + "variants" : [ { + "hostname" : "ec2.sa-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "ec2-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "ec2.us-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "ec2-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "ec2.us-east-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "ec2-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "ec2-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "ec2.us-west-2.api.aws", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "ecs" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "ecs-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "ecs-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "ecs-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "ecs-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "ecs-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "ecs-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "ecs-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "ecs-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "edge.sagemaker" : { + "endpoints" : { + "ap-northeast-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "eks" : { + "defaults" : { + "protocols" : [ "http", "https" ], + "variants" : [ { + "hostname" : "fips.eks.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "fips.eks.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "fips.eks.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "fips.eks.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "fips.eks.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "fips.eks.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "fips.eks.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "fips.eks.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "fips.eks.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "eks-auth" : { + "defaults" : { + "dnsSuffix" : "api.aws", + "variants" : [ { + "dnsSuffix" : "api.aws", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "af-south-1" : { + "hostname" : "eks-auth.af-south-1.api.aws" + }, + "ap-east-1" : { + "hostname" : "eks-auth.ap-east-1.api.aws" + }, + "ap-northeast-1" : { + "hostname" : "eks-auth.ap-northeast-1.api.aws" + }, + "ap-northeast-2" : { + "hostname" : "eks-auth.ap-northeast-2.api.aws" + }, + "ap-northeast-3" : { + "hostname" : "eks-auth.ap-northeast-3.api.aws" + }, + "ap-south-1" : { + "hostname" : "eks-auth.ap-south-1.api.aws" + }, + "ap-south-2" : { + "hostname" : "eks-auth.ap-south-2.api.aws" + }, + "ap-southeast-1" : { + "hostname" : "eks-auth.ap-southeast-1.api.aws" + }, + "ap-southeast-2" : { + "hostname" : "eks-auth.ap-southeast-2.api.aws" + }, + "ap-southeast-3" : { + "hostname" : "eks-auth.ap-southeast-3.api.aws" + }, + "ap-southeast-4" : { + "hostname" : "eks-auth.ap-southeast-4.api.aws" + }, + "ca-central-1" : { + "hostname" : "eks-auth.ca-central-1.api.aws" + }, + "ca-west-1" : { + "hostname" : "eks-auth.ca-west-1.api.aws" + }, + "eu-central-1" : { + "hostname" : "eks-auth.eu-central-1.api.aws" + }, + "eu-central-2" : { + "hostname" : "eks-auth.eu-central-2.api.aws" + }, + "eu-north-1" : { + "hostname" : "eks-auth.eu-north-1.api.aws" + }, + "eu-south-1" : { + "hostname" : "eks-auth.eu-south-1.api.aws" + }, + "eu-south-2" : { + "hostname" : "eks-auth.eu-south-2.api.aws" + }, + "eu-west-1" : { + "hostname" : "eks-auth.eu-west-1.api.aws" + }, + "eu-west-2" : { + "hostname" : "eks-auth.eu-west-2.api.aws" + }, + "eu-west-3" : { + "hostname" : "eks-auth.eu-west-3.api.aws" + }, + "il-central-1" : { + "hostname" : "eks-auth.il-central-1.api.aws" + }, + "me-central-1" : { + "hostname" : "eks-auth.me-central-1.api.aws" + }, + "me-south-1" : { + "hostname" : "eks-auth.me-south-1.api.aws" + }, + "sa-east-1" : { + "hostname" : "eks-auth.sa-east-1.api.aws" + }, + "us-east-1" : { + "hostname" : "eks-auth.us-east-1.api.aws" + }, + "us-east-2" : { + "hostname" : "eks-auth.us-east-2.api.aws" + }, + "us-west-1" : { + "hostname" : "eks-auth.us-west-1.api.aws" + }, + "us-west-2" : { + "hostname" : "eks-auth.us-west-2.api.aws" + } + } + }, + "elasticache" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "elasticache-fips.us-west-1.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "elasticache-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "elasticache-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "elasticache-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "elasticache-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "elasticache-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "elasticache-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "elasticache-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "elasticache-fips.us-west-2.amazonaws.com" + } + } + }, + "elasticbeanstalk" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "elasticbeanstalk-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "elasticbeanstalk-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "elasticbeanstalk-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "elasticbeanstalk-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "elasticbeanstalk-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "elasticbeanstalk-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "elasticbeanstalk-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "elasticbeanstalk-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "elasticfilesystem" : { + "endpoints" : { + "af-south-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.af-south-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-east-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.ap-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-northeast-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.ap-northeast-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-northeast-2" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.ap-northeast-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-northeast-3" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.ap-northeast-3.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-south-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.ap-south-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-south-2" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.ap-south-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-southeast-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.ap-southeast-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-southeast-2" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.ap-southeast-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-southeast-3" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.ap-southeast-3.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-southeast-4" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.ap-southeast-4.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.eu-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-2" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.eu-central-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-north-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.eu-north-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-south-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.eu-south-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-south-2" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.eu-south-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-west-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.eu-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-west-2" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.eu-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-west-3" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.eu-west-3.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "fips-af-south-1" : { + "credentialScope" : { + "region" : "af-south-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.af-south-1.amazonaws.com" + }, + "fips-ap-east-1" : { + "credentialScope" : { + "region" : "ap-east-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.ap-east-1.amazonaws.com" + }, + "fips-ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.ap-northeast-1.amazonaws.com" + }, + "fips-ap-northeast-2" : { + "credentialScope" : { + "region" : "ap-northeast-2" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.ap-northeast-2.amazonaws.com" + }, + "fips-ap-northeast-3" : { + "credentialScope" : { + "region" : "ap-northeast-3" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.ap-northeast-3.amazonaws.com" + }, + "fips-ap-south-1" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.ap-south-1.amazonaws.com" + }, + "fips-ap-south-2" : { + "credentialScope" : { + "region" : "ap-south-2" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.ap-south-2.amazonaws.com" + }, + "fips-ap-southeast-1" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.ap-southeast-1.amazonaws.com" + }, + "fips-ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.ap-southeast-2.amazonaws.com" + }, + "fips-ap-southeast-3" : { + "credentialScope" : { + "region" : "ap-southeast-3" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.ap-southeast-3.amazonaws.com" + }, + "fips-ap-southeast-4" : { + "credentialScope" : { + "region" : "ap-southeast-4" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.ap-southeast-4.amazonaws.com" + }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.ca-west-1.amazonaws.com" + }, + "fips-eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.eu-central-1.amazonaws.com" + }, + "fips-eu-central-2" : { + "credentialScope" : { + "region" : "eu-central-2" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.eu-central-2.amazonaws.com" + }, + "fips-eu-north-1" : { + "credentialScope" : { + "region" : "eu-north-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.eu-north-1.amazonaws.com" + }, + "fips-eu-south-1" : { + "credentialScope" : { + "region" : "eu-south-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.eu-south-1.amazonaws.com" + }, + "fips-eu-south-2" : { + "credentialScope" : { + "region" : "eu-south-2" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.eu-south-2.amazonaws.com" + }, + "fips-eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.eu-west-1.amazonaws.com" + }, + "fips-eu-west-2" : { + "credentialScope" : { + "region" : "eu-west-2" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.eu-west-2.amazonaws.com" + }, + "fips-eu-west-3" : { + "credentialScope" : { + "region" : "eu-west-3" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.eu-west-3.amazonaws.com" + }, + "fips-il-central-1" : { + "credentialScope" : { + "region" : "il-central-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.il-central-1.amazonaws.com" + }, + "fips-me-central-1" : { + "credentialScope" : { + "region" : "me-central-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.me-central-1.amazonaws.com" + }, + "fips-me-south-1" : { + "credentialScope" : { + "region" : "me-south-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.me-south-1.amazonaws.com" + }, + "fips-sa-east-1" : { + "credentialScope" : { + "region" : "sa-east-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.sa-east-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.il-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "me-central-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.me-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "me-south-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.me-south-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "sa-east-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.sa-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "elasticloadbalancing" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "elasticloadbalancing-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "elasticloadbalancing-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "elasticloadbalancing-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "elasticloadbalancing-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "elasticloadbalancing-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "elasticloadbalancing-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "elasticloadbalancing-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "elasticloadbalancing-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "elasticmapreduce" : { + "defaults" : { + "protocols" : [ "https" ], + "sslCommonName" : "{region}.{service}.{dnsSuffix}" + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "elasticmapreduce-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "elasticmapreduce-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { + "sslCommonName" : "{service}.{region}.{dnsSuffix}" + }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "elasticmapreduce-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "elasticmapreduce-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "elasticmapreduce-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "elasticmapreduce-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "elasticmapreduce-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "elasticmapreduce-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "sslCommonName" : "{service}.{region}.{dnsSuffix}", + "variants" : [ { + "hostname" : "elasticmapreduce-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "elasticmapreduce-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "elasticmapreduce.us-east-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "elasticmapreduce-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "elasticmapreduce-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "elastictranscoder" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "eu-west-1" : { }, + "us-east-1" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "email" : { + "endpoints" : { + "af-south-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "email-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "email-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "email-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "email-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "email-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "email-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "email-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "email-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "email-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "email-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "emr-containers" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "emr-containers-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "emr-containers-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "emr-containers-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "emr-containers-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "emr-containers-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "emr-containers-fips.us-west-2.amazonaws.com" + }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "emr-containers-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "emr-containers-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "emr-containers-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "emr-containers-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "emr-serverless" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "emr-serverless-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "emr-serverless-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "emr-serverless-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "emr-serverless-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "emr-serverless-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "emr-serverless-fips.us-west-2.amazonaws.com" + }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "emr-serverless-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "emr-serverless-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "emr-serverless-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "emr-serverless-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "entitlement.marketplace" : { + "defaults" : { + "credentialScope" : { + "service" : "aws-marketplace" + } + }, + "endpoints" : { + "us-east-1" : { } + } + }, + "es" : { + "endpoints" : { + "af-south-1" : { + "variants" : [ { + "hostname" : "aos.af-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-east-1" : { + "variants" : [ { + "hostname" : "aos.ap-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-1" : { + "variants" : [ { + "hostname" : "aos.ap-northeast-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-2" : { + "variants" : [ { + "hostname" : "aos.ap-northeast-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-3" : { + "variants" : [ { + "hostname" : "aos.ap-northeast-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-1" : { + "variants" : [ { + "hostname" : "aos.ap-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-2" : { + "variants" : [ { + "hostname" : "aos.ap-south-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-1" : { + "variants" : [ { + "hostname" : "aos.ap-southeast-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-2" : { + "variants" : [ { + "hostname" : "aos.ap-southeast-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-3" : { + "variants" : [ { + "hostname" : "aos.ap-southeast-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-4" : { + "variants" : [ { + "hostname" : "aos.ap-southeast-4.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "aos.ca-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "aos.ca-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-central-1" : { + "variants" : [ { + "hostname" : "aos.eu-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-central-2" : { + "variants" : [ { + "hostname" : "aos.eu-central-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-north-1" : { + "variants" : [ { + "hostname" : "aos.eu-north-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-1" : { + "variants" : [ { + "hostname" : "aos.eu-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-2" : { + "variants" : [ { + "hostname" : "aos.eu-south-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-1" : { + "variants" : [ { + "hostname" : "aos.eu-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-2" : { + "variants" : [ { + "hostname" : "aos.eu-west-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-3" : { + "variants" : [ { + "hostname" : "aos.eu-west-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "es-fips.us-west-1.amazonaws.com" + }, + "il-central-1" : { + "variants" : [ { + "hostname" : "aos.il-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "me-central-1" : { + "variants" : [ { + "hostname" : "aos.me-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "me-south-1" : { + "variants" : [ { + "hostname" : "aos.me-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "sa-east-1" : { + "variants" : [ { + "hostname" : "aos.sa-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "aos.us-east-1.api.aws", + "tags" : [ "dualstack" ] + }, { + "hostname" : "es-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "es-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "aos.us-east-2.api.aws", + "tags" : [ "dualstack" ] + }, { + "hostname" : "es-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "es-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "aos.us-west-1.api.aws", + "tags" : [ "dualstack" ] + }, { + "hostname" : "es-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "es-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "aos.us-west-2.api.aws", + "tags" : [ "dualstack" ] + }, { + "hostname" : "es-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "es-fips.us-west-2.amazonaws.com" + } + } + }, + "events" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "events-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "events-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "events-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "events-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "events-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "events-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "events-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "events-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "evidently" : { + "endpoints" : { + "ap-northeast-1" : { + "hostname" : "evidently.ap-northeast-1.amazonaws.com" + }, + "ap-southeast-1" : { + "hostname" : "evidently.ap-southeast-1.amazonaws.com" + }, + "ap-southeast-2" : { + "hostname" : "evidently.ap-southeast-2.amazonaws.com" + }, + "eu-central-1" : { + "hostname" : "evidently.eu-central-1.amazonaws.com" + }, + "eu-north-1" : { + "hostname" : "evidently.eu-north-1.amazonaws.com" + }, + "eu-west-1" : { + "hostname" : "evidently.eu-west-1.amazonaws.com" + }, + "us-east-1" : { + "hostname" : "evidently.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "hostname" : "evidently.us-east-2.amazonaws.com" + }, + "us-west-2" : { + "hostname" : "evidently.us-west-2.amazonaws.com" + } + } + }, + "finspace" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "finspace-api" : { + "endpoints" : { + "ca-central-1" : { }, + "eu-west-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "firehose" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "firehose-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "firehose-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "firehose-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "firehose-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "firehose-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "firehose-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "firehose-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "firehose-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "fms" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "af-south-1" : { + "variants" : [ { + "hostname" : "fms-fips.af-south-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-east-1" : { + "variants" : [ { + "hostname" : "fms-fips.ap-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-northeast-1" : { + "variants" : [ { + "hostname" : "fms-fips.ap-northeast-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-northeast-2" : { + "variants" : [ { + "hostname" : "fms-fips.ap-northeast-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-northeast-3" : { }, + "ap-south-1" : { + "variants" : [ { + "hostname" : "fms-fips.ap-south-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-south-2" : { }, + "ap-southeast-1" : { + "variants" : [ { + "hostname" : "fms-fips.ap-southeast-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-southeast-2" : { + "variants" : [ { + "hostname" : "fms-fips.ap-southeast-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "fms-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "fms-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { + "variants" : [ { + "hostname" : "fms-fips.eu-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { + "variants" : [ { + "hostname" : "fms-fips.eu-south-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-south-2" : { }, + "eu-west-1" : { + "variants" : [ { + "hostname" : "fms-fips.eu-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-west-2" : { + "variants" : [ { + "hostname" : "fms-fips.eu-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-west-3" : { + "variants" : [ { + "hostname" : "fms-fips.eu-west-3.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "fips-af-south-1" : { + "credentialScope" : { + "region" : "af-south-1" + }, + "deprecated" : true, + "hostname" : "fms-fips.af-south-1.amazonaws.com" + }, + "fips-ap-east-1" : { + "credentialScope" : { + "region" : "ap-east-1" + }, + "deprecated" : true, + "hostname" : "fms-fips.ap-east-1.amazonaws.com" + }, + "fips-ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "deprecated" : true, + "hostname" : "fms-fips.ap-northeast-1.amazonaws.com" + }, + "fips-ap-northeast-2" : { + "credentialScope" : { + "region" : "ap-northeast-2" + }, + "deprecated" : true, + "hostname" : "fms-fips.ap-northeast-2.amazonaws.com" + }, + "fips-ap-south-1" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "deprecated" : true, + "hostname" : "fms-fips.ap-south-1.amazonaws.com" + }, + "fips-ap-southeast-1" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "deprecated" : true, + "hostname" : "fms-fips.ap-southeast-1.amazonaws.com" + }, + "fips-ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "deprecated" : true, + "hostname" : "fms-fips.ap-southeast-2.amazonaws.com" + }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "fms-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "fms-fips.ca-west-1.amazonaws.com" + }, + "fips-eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "deprecated" : true, + "hostname" : "fms-fips.eu-central-1.amazonaws.com" + }, + "fips-eu-south-1" : { + "credentialScope" : { + "region" : "eu-south-1" + }, + "deprecated" : true, + "hostname" : "fms-fips.eu-south-1.amazonaws.com" + }, + "fips-eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "deprecated" : true, + "hostname" : "fms-fips.eu-west-1.amazonaws.com" + }, + "fips-eu-west-2" : { + "credentialScope" : { + "region" : "eu-west-2" + }, + "deprecated" : true, + "hostname" : "fms-fips.eu-west-2.amazonaws.com" + }, + "fips-eu-west-3" : { + "credentialScope" : { + "region" : "eu-west-3" + }, + "deprecated" : true, + "hostname" : "fms-fips.eu-west-3.amazonaws.com" + }, + "fips-me-south-1" : { + "credentialScope" : { + "region" : "me-south-1" + }, + "deprecated" : true, + "hostname" : "fms-fips.me-south-1.amazonaws.com" + }, + "fips-sa-east-1" : { + "credentialScope" : { + "region" : "sa-east-1" + }, + "deprecated" : true, + "hostname" : "fms-fips.sa-east-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "fms-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "fms-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "fms-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "fms-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { + "variants" : [ { + "hostname" : "fms-fips.me-south-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "sa-east-1" : { + "variants" : [ { + "hostname" : "fms-fips.sa-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "fms-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "fms-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "fms-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "fms-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "forecast" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "forecast-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "forecast-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "forecast-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "forecast-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "forecast-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "forecast-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "forecastquery" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "forecastquery-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "forecastquery-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "forecastquery-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "forecastquery-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "forecastquery-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "forecastquery-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "frauddetector" : { + "endpoints" : { + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "eu-west-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "fsx" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "fsx-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "fsx-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "fsx-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "fsx-fips.ca-west-1.amazonaws.com" + }, + "fips-prod-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "fsx-fips.ca-central-1.amazonaws.com" + }, + "fips-prod-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "fsx-fips.ca-west-1.amazonaws.com" + }, + "fips-prod-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "fsx-fips.us-east-1.amazonaws.com" + }, + "fips-prod-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "fsx-fips.us-east-2.amazonaws.com" + }, + "fips-prod-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "fsx-fips.us-west-1.amazonaws.com" + }, + "fips-prod-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "fsx-fips.us-west-2.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "fsx-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "fsx-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "fsx-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "fsx-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "prod-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "fsx-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "prod-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "fsx-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "prod-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "fsx-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "prod-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "fsx-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "prod-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "fsx-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "prod-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "fsx-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "fsx-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "fsx-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "fsx-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "fsx-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "gamelift" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "geo" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "glacier" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "glacier-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "glacier-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "glacier-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "glacier-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "glacier-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "glacier-fips.us-west-2.amazonaws.com" + }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "glacier-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "glacier-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "glacier-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "glacier-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "globalaccelerator" : { + "endpoints" : { + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "globalaccelerator-fips.us-west-2.amazonaws.com" + } + } + }, + "glue" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "glue-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "glue-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "glue-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "glue-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "glue-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "glue-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "glue-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "glue-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "grafana" : { + "endpoints" : { + "ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "hostname" : "grafana.ap-northeast-1.amazonaws.com" + }, + "ap-northeast-2" : { + "credentialScope" : { + "region" : "ap-northeast-2" + }, + "hostname" : "grafana.ap-northeast-2.amazonaws.com" + }, + "ap-southeast-1" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "hostname" : "grafana.ap-southeast-1.amazonaws.com" + }, + "ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "hostname" : "grafana.ap-southeast-2.amazonaws.com" + }, + "eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "hostname" : "grafana.eu-central-1.amazonaws.com" + }, + "eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "hostname" : "grafana.eu-west-1.amazonaws.com" + }, + "eu-west-2" : { + "credentialScope" : { + "region" : "eu-west-2" + }, + "hostname" : "grafana.eu-west-2.amazonaws.com" + }, + "us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "grafana.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "hostname" : "grafana.us-east-2.amazonaws.com" + }, + "us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "grafana.us-west-2.amazonaws.com" + } + } + }, + "greengrass" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "greengrass-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "greengrass-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "greengrass-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "greengrass-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "greengrass-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "greengrass-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "greengrass-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "greengrass-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + }, + "isRegionalized" : true + }, + "groundstation" : { + "endpoints" : { + "af-south-1" : { }, + "ap-northeast-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "groundstation-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "groundstation-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "groundstation-fips.us-west-2.amazonaws.com" + }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "groundstation-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "groundstation-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "groundstation-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "guardduty" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "guardduty-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "guardduty-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "guardduty-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "guardduty-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "guardduty-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "guardduty-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "guardduty-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "guardduty-fips.us-west-2.amazonaws.com" + } + }, + "isRegionalized" : true + }, + "health" : { + "defaults" : { + "protocols" : [ "https" ], + "sslCommonName" : "health.us-east-1.amazonaws.com" + }, + "endpoints" : { + "aws-global" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "global.health.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "health-fips.us-east-2.amazonaws.com" + }, + "us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "health-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-global" + }, + "healthlake" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "ap-south-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "iam" : { + "endpoints" : { + "aws-global" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "iam.amazonaws.com", + "variants" : [ { + "hostname" : "iam-fips.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "aws-global-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "iam-fips.amazonaws.com" + }, + "iam" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "iam-fips.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "iam-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "iam-fips.amazonaws.com" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-global" + }, + "identity-chime" : { + "endpoints" : { + "eu-central-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "identity-chime-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "identity-chime-fips.us-east-1.amazonaws.com" + } + } + }, + "identitystore" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "importexport" : { + "endpoints" : { + "aws-global" : { + "credentialScope" : { + "region" : "us-east-1", + "service" : "IngestionService" + }, + "hostname" : "importexport.amazonaws.com", + "signatureVersions" : [ "v2", "v4" ] + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-global" + }, + "ingest.timestream" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "ingest-fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "ingest.timestream-fips.us-east-1.amazonaws.com" + }, + "ingest-fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "ingest.timestream-fips.us-east-2.amazonaws.com" + }, + "ingest-fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "ingest.timestream-fips.us-west-2.amazonaws.com" + }, + "ingest-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "ingest.timestream-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ingest-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "ingest.timestream-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ingest-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "ingest.timestream-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "inspector" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "inspector-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "inspector-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "inspector-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "inspector-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "inspector-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "inspector-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "inspector-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "inspector-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "inspector2" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "inspector2-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "inspector2-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "inspector2-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "inspector2-fips.us-west-2.amazonaws.com" + }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "inspector2-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "inspector2-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "inspector2-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "inspector2-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "internetmonitor" : { + "defaults" : { + "dnsSuffix" : "api.aws", + "variants" : [ { + "dnsSuffix" : "api.aws", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "af-south-1" : { + "hostname" : "internetmonitor.af-south-1.api.aws" + }, + "ap-east-1" : { + "hostname" : "internetmonitor.ap-east-1.api.aws" + }, + "ap-northeast-1" : { + "hostname" : "internetmonitor.ap-northeast-1.api.aws" + }, + "ap-northeast-2" : { + "hostname" : "internetmonitor.ap-northeast-2.api.aws" + }, + "ap-northeast-3" : { + "hostname" : "internetmonitor.ap-northeast-3.api.aws" + }, + "ap-south-1" : { + "hostname" : "internetmonitor.ap-south-1.api.aws" + }, + "ap-south-2" : { + "hostname" : "internetmonitor.ap-south-2.api.aws" + }, + "ap-southeast-1" : { + "hostname" : "internetmonitor.ap-southeast-1.api.aws" + }, + "ap-southeast-2" : { + "hostname" : "internetmonitor.ap-southeast-2.api.aws" + }, + "ap-southeast-3" : { + "hostname" : "internetmonitor.ap-southeast-3.api.aws" + }, + "ap-southeast-4" : { + "hostname" : "internetmonitor.ap-southeast-4.api.aws" + }, + "ca-central-1" : { + "hostname" : "internetmonitor.ca-central-1.api.aws", + "variants" : [ { + "hostname" : "internetmonitor-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "hostname" : "internetmonitor.ca-west-1.api.aws" + }, + "eu-central-1" : { + "hostname" : "internetmonitor.eu-central-1.api.aws" + }, + "eu-central-2" : { + "hostname" : "internetmonitor.eu-central-2.api.aws" + }, + "eu-north-1" : { + "hostname" : "internetmonitor.eu-north-1.api.aws" + }, + "eu-south-1" : { + "hostname" : "internetmonitor.eu-south-1.api.aws" + }, + "eu-south-2" : { + "hostname" : "internetmonitor.eu-south-2.api.aws" + }, + "eu-west-1" : { + "hostname" : "internetmonitor.eu-west-1.api.aws" + }, + "eu-west-2" : { + "hostname" : "internetmonitor.eu-west-2.api.aws" + }, + "eu-west-3" : { + "hostname" : "internetmonitor.eu-west-3.api.aws" + }, + "il-central-1" : { + "hostname" : "internetmonitor.il-central-1.api.aws" + }, + "me-central-1" : { + "hostname" : "internetmonitor.me-central-1.api.aws" + }, + "me-south-1" : { + "hostname" : "internetmonitor.me-south-1.api.aws" + }, + "sa-east-1" : { + "hostname" : "internetmonitor.sa-east-1.api.aws" + }, + "us-east-1" : { + "hostname" : "internetmonitor.us-east-1.api.aws", + "variants" : [ { + "hostname" : "internetmonitor-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "hostname" : "internetmonitor.us-east-2.api.aws", + "variants" : [ { + "hostname" : "internetmonitor-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "hostname" : "internetmonitor.us-west-1.api.aws", + "variants" : [ { + "hostname" : "internetmonitor-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "hostname" : "internetmonitor.us-west-2.api.aws", + "variants" : [ { + "hostname" : "internetmonitor-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "iot" : { + "endpoints" : { + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "iot-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "deprecated" : true, + "hostname" : "iot-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "deprecated" : true, + "hostname" : "iot-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "deprecated" : true, + "hostname" : "iot-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "deprecated" : true, + "hostname" : "iot-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "deprecated" : true, + "hostname" : "iot-fips.us-west-2.amazonaws.com" + }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "iot-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "iot-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "iot-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "iot-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "iotanalytics" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-south-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "iotevents" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "iotevents-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "iotevents-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "iotevents-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "iotevents-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "iotevents-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "iotevents-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "iotevents-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "iotevents-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "ioteventsdata" : { + "endpoints" : { + "ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "hostname" : "data.iotevents.ap-northeast-1.amazonaws.com" + }, + "ap-northeast-2" : { + "credentialScope" : { + "region" : "ap-northeast-2" + }, + "hostname" : "data.iotevents.ap-northeast-2.amazonaws.com" + }, + "ap-south-1" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "hostname" : "data.iotevents.ap-south-1.amazonaws.com" + }, + "ap-southeast-1" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "hostname" : "data.iotevents.ap-southeast-1.amazonaws.com" + }, + "ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "hostname" : "data.iotevents.ap-southeast-2.amazonaws.com" + }, + "ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "hostname" : "data.iotevents.ca-central-1.amazonaws.com", + "variants" : [ { + "hostname" : "data.iotevents-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "hostname" : "data.iotevents.eu-central-1.amazonaws.com" + }, + "eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "hostname" : "data.iotevents.eu-west-1.amazonaws.com" + }, + "eu-west-2" : { + "credentialScope" : { + "region" : "eu-west-2" + }, + "hostname" : "data.iotevents.eu-west-2.amazonaws.com" + }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "data.iotevents-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "data.iotevents-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "data.iotevents-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "data.iotevents-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "data.iotevents.us-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "data.iotevents-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "hostname" : "data.iotevents.us-east-2.amazonaws.com", + "variants" : [ { + "hostname" : "data.iotevents-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "data.iotevents.us-west-2.amazonaws.com", + "variants" : [ { + "hostname" : "data.iotevents-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "iotfleetwise" : { + "endpoints" : { + "eu-central-1" : { }, + "us-east-1" : { } + } + }, + "iotsecuredtunneling" : { + "defaults" : { + "variants" : [ { + "hostname" : "api.tunneling.iot-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "api.tunneling.iot-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "api.tunneling.iot-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "api.tunneling.iot-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "api.tunneling.iot-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "api.tunneling.iot-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "api.tunneling.iot-fips.us-west-2.amazonaws.com" + }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "api.tunneling.iot-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "api.tunneling.iot-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "api.tunneling.iot-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "api.tunneling.iot-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "iotsitewise" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "iotsitewise-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "iotsitewise-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "iotsitewise-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "iotsitewise-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "iotsitewise-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "iotsitewise-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "iotsitewise-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "iotsitewise-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "iotthingsgraph" : { + "defaults" : { + "credentialScope" : { + "service" : "iotthingsgraph" + } + }, + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-southeast-2" : { }, + "eu-west-1" : { }, + "us-east-1" : { }, + "us-west-2" : { } + } + }, + "iottwinmaker" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "api-ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "hostname" : "api.iottwinmaker.ap-northeast-1.amazonaws.com" + }, + "api-ap-northeast-2" : { + "credentialScope" : { + "region" : "ap-northeast-2" + }, + "hostname" : "api.iottwinmaker.ap-northeast-2.amazonaws.com" + }, + "api-ap-south-1" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "hostname" : "api.iottwinmaker.ap-south-1.amazonaws.com" + }, + "api-ap-southeast-1" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "hostname" : "api.iottwinmaker.ap-southeast-1.amazonaws.com" + }, + "api-ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "hostname" : "api.iottwinmaker.ap-southeast-2.amazonaws.com" + }, + "api-eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "hostname" : "api.iottwinmaker.eu-central-1.amazonaws.com" + }, + "api-eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "hostname" : "api.iottwinmaker.eu-west-1.amazonaws.com" + }, + "api-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "api.iottwinmaker.us-east-1.amazonaws.com" + }, + "api-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "api.iottwinmaker.us-west-2.amazonaws.com" + }, + "data-ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "hostname" : "data.iottwinmaker.ap-northeast-1.amazonaws.com" + }, + "data-ap-northeast-2" : { + "credentialScope" : { + "region" : "ap-northeast-2" + }, + "hostname" : "data.iottwinmaker.ap-northeast-2.amazonaws.com" + }, + "data-ap-south-1" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "hostname" : "data.iottwinmaker.ap-south-1.amazonaws.com" + }, + "data-ap-southeast-1" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "hostname" : "data.iottwinmaker.ap-southeast-1.amazonaws.com" + }, + "data-ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "hostname" : "data.iottwinmaker.ap-southeast-2.amazonaws.com" + }, + "data-eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "hostname" : "data.iottwinmaker.eu-central-1.amazonaws.com" + }, + "data-eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "hostname" : "data.iottwinmaker.eu-west-1.amazonaws.com" + }, + "data-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "data.iottwinmaker.us-east-1.amazonaws.com" + }, + "data-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "data.iottwinmaker.us-west-2.amazonaws.com" + }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "fips-api-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "api.iottwinmaker-fips.us-east-1.amazonaws.com" + }, + "fips-api-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "api.iottwinmaker-fips.us-west-2.amazonaws.com" + }, + "fips-data-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "data.iottwinmaker-fips.us-east-1.amazonaws.com" + }, + "fips-data-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "data.iottwinmaker-fips.us-west-2.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "iottwinmaker-fips.us-east-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "iottwinmaker-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "iottwinmaker-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "iottwinmaker-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "iotwireless" : { + "endpoints" : { + "ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "hostname" : "api.iotwireless.ap-northeast-1.amazonaws.com" + }, + "ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "hostname" : "api.iotwireless.ap-southeast-2.amazonaws.com" + }, + "eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "hostname" : "api.iotwireless.eu-west-1.amazonaws.com" + }, + "us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "api.iotwireless.us-east-1.amazonaws.com" + }, + "us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "api.iotwireless.us-west-2.amazonaws.com" + } + } + }, + "ivs" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "us-east-1" : { }, + "us-west-2" : { } + } + }, + "ivschat" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "us-east-1" : { }, + "us-west-2" : { } + } + }, + "ivsrealtime" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "us-east-1" : { }, + "us-west-2" : { } + } + }, + "kafka" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "kafka-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "kafka-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "kafka-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "kafka-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "kafka-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "kafka-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "kafka-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "kafka-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "kafka-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "kafka-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "kafka-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "kafka-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "kafkaconnect" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "kendra" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "kendra-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "kendra-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "kendra-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "kendra-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "kendra-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "kendra-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "kendra-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "kendra-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "kendra-ranking" : { + "defaults" : { + "dnsSuffix" : "api.aws", + "variants" : [ { + "dnsSuffix" : "api.aws", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "af-south-1" : { + "hostname" : "kendra-ranking.af-south-1.api.aws" + }, + "ap-east-1" : { + "hostname" : "kendra-ranking.ap-east-1.api.aws" + }, + "ap-northeast-1" : { + "hostname" : "kendra-ranking.ap-northeast-1.api.aws" + }, + "ap-northeast-2" : { + "hostname" : "kendra-ranking.ap-northeast-2.api.aws" + }, + "ap-northeast-3" : { + "hostname" : "kendra-ranking.ap-northeast-3.api.aws" + }, + "ap-south-1" : { + "hostname" : "kendra-ranking.ap-south-1.api.aws" + }, + "ap-south-2" : { + "hostname" : "kendra-ranking.ap-south-2.api.aws" + }, + "ap-southeast-1" : { + "hostname" : "kendra-ranking.ap-southeast-1.api.aws" + }, + "ap-southeast-2" : { + "hostname" : "kendra-ranking.ap-southeast-2.api.aws" + }, + "ap-southeast-3" : { + "hostname" : "kendra-ranking.ap-southeast-3.api.aws" + }, + "ap-southeast-4" : { + "hostname" : "kendra-ranking.ap-southeast-4.api.aws" + }, + "ca-central-1" : { + "hostname" : "kendra-ranking.ca-central-1.api.aws", + "variants" : [ { + "hostname" : "kendra-ranking-fips.ca-central-1.api.aws", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "hostname" : "kendra-ranking.ca-west-1.api.aws" + }, + "eu-central-2" : { + "hostname" : "kendra-ranking.eu-central-2.api.aws" + }, + "eu-north-1" : { + "hostname" : "kendra-ranking.eu-north-1.api.aws" + }, + "eu-south-1" : { + "hostname" : "kendra-ranking.eu-south-1.api.aws" + }, + "eu-south-2" : { + "hostname" : "kendra-ranking.eu-south-2.api.aws" + }, + "eu-west-1" : { + "hostname" : "kendra-ranking.eu-west-1.api.aws" + }, + "eu-west-3" : { + "hostname" : "kendra-ranking.eu-west-3.api.aws" + }, + "il-central-1" : { + "hostname" : "kendra-ranking.il-central-1.api.aws" + }, + "me-central-1" : { + "hostname" : "kendra-ranking.me-central-1.api.aws" + }, + "me-south-1" : { + "hostname" : "kendra-ranking.me-south-1.api.aws" + }, + "sa-east-1" : { + "hostname" : "kendra-ranking.sa-east-1.api.aws" + }, + "us-east-1" : { + "hostname" : "kendra-ranking.us-east-1.api.aws", + "variants" : [ { + "hostname" : "kendra-ranking-fips.us-east-1.api.aws", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "hostname" : "kendra-ranking.us-east-2.api.aws", + "variants" : [ { + "hostname" : "kendra-ranking-fips.us-east-2.api.aws", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "hostname" : "kendra-ranking.us-west-1.api.aws" + }, + "us-west-2" : { + "hostname" : "kendra-ranking.us-west-2.api.aws", + "variants" : [ { + "hostname" : "kendra-ranking-fips.us-west-2.api.aws", + "tags" : [ "fips" ] + } ] + } + } + }, + "kinesis" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "kinesis-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "kinesis-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "kinesis-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "kinesis-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "kinesis-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "kinesis-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "kinesis-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "kinesis-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "kinesisanalytics" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "kinesisanalytics-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "kinesisanalytics-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "kinesisanalytics-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "kinesisanalytics-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "kinesisanalytics-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "kinesisanalytics-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "kinesisanalytics-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "kinesisanalytics-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "kinesisanalytics-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "kinesisanalytics-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "kinesisanalytics-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "kinesisanalytics-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "kinesisvideo" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "kms" : { + "endpoints" : { + "ProdFips" : { + "credentialScope" : { + "region" : "eu-central-2" + }, + "deprecated" : true, + "hostname" : "kms-fips.eu-central-2.amazonaws.com" + }, + "af-south-1" : { + "variants" : [ { + "hostname" : "kms-fips.af-south-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "af-south-1-fips" : { + "credentialScope" : { + "region" : "af-south-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.af-south-1.amazonaws.com" + }, + "ap-east-1" : { + "variants" : [ { + "hostname" : "kms-fips.ap-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-east-1-fips" : { + "credentialScope" : { + "region" : "ap-east-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.ap-east-1.amazonaws.com" + }, + "ap-northeast-1" : { + "variants" : [ { + "hostname" : "kms-fips.ap-northeast-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-northeast-1-fips" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.ap-northeast-1.amazonaws.com" + }, + "ap-northeast-2" : { + "variants" : [ { + "hostname" : "kms-fips.ap-northeast-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-northeast-2-fips" : { + "credentialScope" : { + "region" : "ap-northeast-2" + }, + "deprecated" : true, + "hostname" : "kms-fips.ap-northeast-2.amazonaws.com" + }, + "ap-northeast-3" : { + "variants" : [ { + "hostname" : "kms-fips.ap-northeast-3.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-northeast-3-fips" : { + "credentialScope" : { + "region" : "ap-northeast-3" + }, + "deprecated" : true, + "hostname" : "kms-fips.ap-northeast-3.amazonaws.com" + }, + "ap-south-1" : { + "variants" : [ { + "hostname" : "kms-fips.ap-south-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-south-1-fips" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.ap-south-1.amazonaws.com" + }, + "ap-south-2" : { + "variants" : [ { + "hostname" : "kms-fips.ap-south-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-south-2-fips" : { + "credentialScope" : { + "region" : "ap-south-2" + }, + "deprecated" : true, + "hostname" : "kms-fips.ap-south-2.amazonaws.com" + }, + "ap-southeast-1" : { + "variants" : [ { + "hostname" : "kms-fips.ap-southeast-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-southeast-1-fips" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.ap-southeast-1.amazonaws.com" + }, + "ap-southeast-2" : { + "variants" : [ { + "hostname" : "kms-fips.ap-southeast-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-southeast-2-fips" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "deprecated" : true, + "hostname" : "kms-fips.ap-southeast-2.amazonaws.com" + }, + "ap-southeast-3" : { + "variants" : [ { + "hostname" : "kms-fips.ap-southeast-3.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-southeast-3-fips" : { + "credentialScope" : { + "region" : "ap-southeast-3" + }, + "deprecated" : true, + "hostname" : "kms-fips.ap-southeast-3.amazonaws.com" + }, + "ap-southeast-4" : { + "variants" : [ { + "hostname" : "kms-fips.ap-southeast-4.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-southeast-4-fips" : { + "credentialScope" : { + "region" : "ap-southeast-4" + }, + "deprecated" : true, + "hostname" : "kms-fips.ap-southeast-4.amazonaws.com" + }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "kms-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-central-1-fips" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.ca-central-1.amazonaws.com" + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "kms-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1-fips" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.ca-west-1.amazonaws.com" + }, + "eu-central-1" : { + "variants" : [ { + "hostname" : "kms-fips.eu-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1-fips" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.eu-central-1.amazonaws.com" + }, + "eu-central-2" : { + "variants" : [ { + "hostname" : "kms-fips.eu-central-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-2-fips" : { + "credentialScope" : { + "region" : "eu-central-2" + }, + "deprecated" : true, + "hostname" : "kms-fips.eu-central-2.amazonaws.com" + }, + "eu-north-1" : { + "variants" : [ { + "hostname" : "kms-fips.eu-north-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-north-1-fips" : { + "credentialScope" : { + "region" : "eu-north-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.eu-north-1.amazonaws.com" + }, + "eu-south-1" : { + "variants" : [ { + "hostname" : "kms-fips.eu-south-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-south-1-fips" : { + "credentialScope" : { + "region" : "eu-south-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.eu-south-1.amazonaws.com" + }, + "eu-south-2" : { + "variants" : [ { + "hostname" : "kms-fips.eu-south-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-south-2-fips" : { + "credentialScope" : { + "region" : "eu-south-2" + }, + "deprecated" : true, + "hostname" : "kms-fips.eu-south-2.amazonaws.com" + }, + "eu-west-1" : { + "variants" : [ { + "hostname" : "kms-fips.eu-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-west-1-fips" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.eu-west-1.amazonaws.com" + }, + "eu-west-2" : { + "variants" : [ { + "hostname" : "kms-fips.eu-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-west-2-fips" : { + "credentialScope" : { + "region" : "eu-west-2" + }, + "deprecated" : true, + "hostname" : "kms-fips.eu-west-2.amazonaws.com" + }, + "eu-west-3" : { + "variants" : [ { + "hostname" : "kms-fips.eu-west-3.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-west-3-fips" : { + "credentialScope" : { + "region" : "eu-west-3" + }, + "deprecated" : true, + "hostname" : "kms-fips.eu-west-3.amazonaws.com" + }, + "il-central-1" : { + "variants" : [ { + "hostname" : "kms-fips.il-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "il-central-1-fips" : { + "credentialScope" : { + "region" : "il-central-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.il-central-1.amazonaws.com" + }, + "me-central-1" : { + "variants" : [ { + "hostname" : "kms-fips.me-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "me-central-1-fips" : { + "credentialScope" : { + "region" : "me-central-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.me-central-1.amazonaws.com" + }, + "me-south-1" : { + "variants" : [ { + "hostname" : "kms-fips.me-south-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "me-south-1-fips" : { + "credentialScope" : { + "region" : "me-south-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.me-south-1.amazonaws.com" + }, + "sa-east-1" : { + "variants" : [ { + "hostname" : "kms-fips.sa-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "sa-east-1-fips" : { + "credentialScope" : { + "region" : "sa-east-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.sa-east-1.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "kms-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "kms-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "kms-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "kms-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "kms-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "kms-fips.us-west-2.amazonaws.com" + } + } + }, + "lakeformation" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "lakeformation-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "lakeformation-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "lakeformation-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "lakeformation-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "lakeformation-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "lakeformation-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "lakeformation-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "lakeformation-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "lambda" : { + "endpoints" : { + "af-south-1" : { + "variants" : [ { + "hostname" : "lambda.af-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-east-1" : { + "variants" : [ { + "hostname" : "lambda.ap-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-1" : { + "variants" : [ { + "hostname" : "lambda.ap-northeast-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-2" : { + "variants" : [ { + "hostname" : "lambda.ap-northeast-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-3" : { + "variants" : [ { + "hostname" : "lambda.ap-northeast-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-1" : { + "variants" : [ { + "hostname" : "lambda.ap-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-2" : { + "variants" : [ { + "hostname" : "lambda.ap-south-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-1" : { + "variants" : [ { + "hostname" : "lambda.ap-southeast-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-2" : { + "variants" : [ { + "hostname" : "lambda.ap-southeast-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-3" : { + "variants" : [ { + "hostname" : "lambda.ap-southeast-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-4" : { + "variants" : [ { + "hostname" : "lambda.ap-southeast-4.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "lambda.ca-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "lambda.ca-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-central-1" : { + "variants" : [ { + "hostname" : "lambda.eu-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-central-2" : { + "variants" : [ { + "hostname" : "lambda.eu-central-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-north-1" : { + "variants" : [ { + "hostname" : "lambda.eu-north-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-1" : { + "variants" : [ { + "hostname" : "lambda.eu-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-2" : { + "variants" : [ { + "hostname" : "lambda.eu-south-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-1" : { + "variants" : [ { + "hostname" : "lambda.eu-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-2" : { + "variants" : [ { + "hostname" : "lambda.eu-west-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-3" : { + "variants" : [ { + "hostname" : "lambda.eu-west-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "lambda-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "lambda-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "lambda-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "lambda-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { + "variants" : [ { + "hostname" : "lambda.il-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "me-central-1" : { + "variants" : [ { + "hostname" : "lambda.me-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "me-south-1" : { + "variants" : [ { + "hostname" : "lambda.me-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "sa-east-1" : { + "variants" : [ { + "hostname" : "lambda.sa-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "lambda-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "lambda.us-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "lambda-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "lambda.us-east-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "lambda-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "lambda.us-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "lambda-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "lambda.us-west-2.api.aws", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "license-manager" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "license-manager-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "license-manager-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "license-manager-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "license-manager-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "license-manager-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "license-manager-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "license-manager-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "license-manager-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "license-manager-linux-subscriptions" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "license-manager-linux-subscriptions-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "license-manager-linux-subscriptions-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "license-manager-linux-subscriptions-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "license-manager-linux-subscriptions-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "license-manager-linux-subscriptions-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "license-manager-linux-subscriptions-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "license-manager-linux-subscriptions-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "license-manager-linux-subscriptions-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "license-manager-user-subscriptions" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "license-manager-user-subscriptions-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "license-manager-user-subscriptions-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "license-manager-user-subscriptions-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "license-manager-user-subscriptions-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "license-manager-user-subscriptions-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "license-manager-user-subscriptions-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "license-manager-user-subscriptions-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "license-manager-user-subscriptions-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "lightsail" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "logs" : { + "endpoints" : { + "af-south-1" : { + "variants" : [ { + "hostname" : "logs.af-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-east-1" : { + "variants" : [ { + "hostname" : "logs.ap-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-1" : { + "variants" : [ { + "hostname" : "logs.ap-northeast-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-2" : { + "variants" : [ { + "hostname" : "logs.ap-northeast-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-3" : { + "variants" : [ { + "hostname" : "logs.ap-northeast-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-1" : { + "variants" : [ { + "hostname" : "logs.ap-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-2" : { + "variants" : [ { + "hostname" : "logs.ap-south-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-1" : { + "variants" : [ { + "hostname" : "logs.ap-southeast-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-2" : { + "variants" : [ { + "hostname" : "logs.ap-southeast-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-3" : { + "variants" : [ { + "hostname" : "logs.ap-southeast-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-4" : { + "variants" : [ { + "hostname" : "logs.ap-southeast-4.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "logs-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "logs.ca-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "logs-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "logs.ca-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-central-1" : { + "variants" : [ { + "hostname" : "logs.eu-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-central-2" : { + "variants" : [ { + "hostname" : "logs.eu-central-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-north-1" : { + "variants" : [ { + "hostname" : "logs.eu-north-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-1" : { + "variants" : [ { + "hostname" : "logs.eu-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-2" : { + "variants" : [ { + "hostname" : "logs.eu-south-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-1" : { + "variants" : [ { + "hostname" : "logs.eu-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-2" : { + "variants" : [ { + "hostname" : "logs.eu-west-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-3" : { + "variants" : [ { + "hostname" : "logs.eu-west-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "logs-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "logs-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "logs-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "logs-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "logs-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "logs-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { + "variants" : [ { + "hostname" : "logs.il-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "me-central-1" : { + "variants" : [ { + "hostname" : "logs.me-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "me-south-1" : { + "variants" : [ { + "hostname" : "logs.me-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "sa-east-1" : { + "variants" : [ { + "hostname" : "logs.sa-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "logs-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "logs.us-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "logs-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "logs.us-east-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "logs-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "logs.us-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "logs-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "logs.us-west-2.api.aws", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "lookoutequipment" : { + "endpoints" : { + "ap-northeast-2" : { }, + "eu-west-1" : { }, + "us-east-1" : { } + } + }, + "lookoutmetrics" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "lookoutvision" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "m2" : { + "endpoints" : { + "af-south-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "deprecated" : true + }, + "fips-us-east-1" : { + "deprecated" : true + }, + "fips-us-east-2" : { + "deprecated" : true + }, + "fips-us-west-1" : { + "deprecated" : true + }, + "fips-us-west-2" : { + "deprecated" : true + }, + "il-central-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "tags" : [ "fips" ] + } ] + } + } + }, + "machinelearning" : { + "endpoints" : { + "eu-west-1" : { }, + "us-east-1" : { } + } + }, + "macie2" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "macie2-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "macie2-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "macie2-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "macie2-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "macie2-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "macie2-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "macie2-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "macie2-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "managedblockchain" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-southeast-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { } + } + }, + "managedblockchain-query" : { + "endpoints" : { + "us-east-1" : { } + } + }, + "marketplacecommerceanalytics" : { + "endpoints" : { + "us-east-1" : { } + } + }, + "media-pipelines-chime" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "media-pipelines-chime-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "media-pipelines-chime-fips.us-east-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "media-pipelines-chime-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "media-pipelines-chime-fips.us-west-2.amazonaws.com" + } + } + }, + "mediaconnect" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "me-central-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "mediaconvert" : { + "endpoints" : { + "af-south-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "mediaconvert-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "mediaconvert-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "mediaconvert-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "mediaconvert-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "mediaconvert-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "mediaconvert-fips.us-west-2.amazonaws.com" + }, + "me-central-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "mediaconvert-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "mediaconvert-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "mediaconvert-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "mediaconvert-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "medialive" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "medialive-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "medialive-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "medialive-fips.us-west-2.amazonaws.com" + }, + "me-central-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "medialive-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "medialive-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "medialive-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "mediapackage" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "mediapackage-vod" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "mediapackagev2" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "mediastore" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-west-2" : { } + } + }, + "meetings-chime" : { + "endpoints" : { + "af-south-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "meetings-chime-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-central-1-fips" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "meetings-chime-fips.ca-central-1.amazonaws.com" + }, + "eu-central-1" : { }, + "eu-west-2" : { }, + "il-central-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "meetings-chime-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "meetings-chime-fips.us-east-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "meetings-chime-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "meetings-chime-fips.us-west-2.amazonaws.com" + } + } + }, + "memory-db" : { + "endpoints" : { + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "hostname" : "memory-db-fips.us-west-1.amazonaws.com" + }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "messaging-chime" : { + "endpoints" : { + "eu-central-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "messaging-chime-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "messaging-chime-fips.us-east-1.amazonaws.com" + } + } + }, + "metering.marketplace" : { + "defaults" : { + "credentialScope" : { + "service" : "aws-marketplace" + } + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "metrics.sagemaker" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "mgh" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-west-2" : { } + } + }, + "mgn" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "mgn-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "mgn-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "mgn-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "mgn-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "mgn-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "mgn-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "mgn-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "mgn-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "migrationhub-orchestrator" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-west-2" : { } + } + }, + "migrationhub-strategy" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-west-2" : { } + } + }, + "mobileanalytics" : { + "endpoints" : { + "us-east-1" : { } + } + }, + "models-v2-lex" : { + "endpoints" : { + "af-south-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-west-2" : { } + } + }, + "models.lex" : { + "defaults" : { + "credentialScope" : { + "service" : "lex" + }, + "variants" : [ { + "hostname" : "models-fips.lex.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "ap-northeast-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "models-fips.lex.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "models-fips.lex.us-east-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "models-fips.lex.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "models-fips.lex.us-west-2.amazonaws.com" + } + } + }, + "monitoring" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "monitoring-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "monitoring-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "monitoring-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "monitoring-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "monitoring-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "monitoring-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "monitoring-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "monitoring-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "mq" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "mq-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "mq-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "mq-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "mq-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "mq-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "mq-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "mq-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "mq-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "mturk-requester" : { + "endpoints" : { + "sandbox" : { + "hostname" : "mturk-requester-sandbox.us-east-1.amazonaws.com" + }, + "us-east-1" : { } + }, + "isRegionalized" : false + }, + "neptune" : { + "endpoints" : { + "ap-east-1" : { + "credentialScope" : { + "region" : "ap-east-1" + }, + "hostname" : "rds.ap-east-1.amazonaws.com" + }, + "ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "hostname" : "rds.ap-northeast-1.amazonaws.com" + }, + "ap-northeast-2" : { + "credentialScope" : { + "region" : "ap-northeast-2" + }, + "hostname" : "rds.ap-northeast-2.amazonaws.com" + }, + "ap-south-1" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "hostname" : "rds.ap-south-1.amazonaws.com" + }, + "ap-southeast-1" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "hostname" : "rds.ap-southeast-1.amazonaws.com" + }, + "ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "hostname" : "rds.ap-southeast-2.amazonaws.com" + }, + "ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "hostname" : "rds.ca-central-1.amazonaws.com" + }, + "eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "hostname" : "rds.eu-central-1.amazonaws.com" + }, + "eu-north-1" : { + "credentialScope" : { + "region" : "eu-north-1" + }, + "hostname" : "rds.eu-north-1.amazonaws.com" + }, + "eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "hostname" : "rds.eu-west-1.amazonaws.com" + }, + "eu-west-2" : { + "credentialScope" : { + "region" : "eu-west-2" + }, + "hostname" : "rds.eu-west-2.amazonaws.com" + }, + "eu-west-3" : { + "credentialScope" : { + "region" : "eu-west-3" + }, + "hostname" : "rds.eu-west-3.amazonaws.com" + }, + "me-south-1" : { + "credentialScope" : { + "region" : "me-south-1" + }, + "hostname" : "rds.me-south-1.amazonaws.com" + }, + "sa-east-1" : { + "credentialScope" : { + "region" : "sa-east-1" + }, + "hostname" : "rds.sa-east-1.amazonaws.com" + }, + "us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "rds.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "hostname" : "rds.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "hostname" : "rds.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "rds.us-west-2.amazonaws.com" + } + } + }, + "network-firewall" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "network-firewall-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "network-firewall-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "network-firewall-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "network-firewall-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "network-firewall-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "network-firewall-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "network-firewall-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "network-firewall-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "network-firewall-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "network-firewall-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "networkmanager" : { + "endpoints" : { + "aws-global" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "networkmanager.us-west-2.amazonaws.com", + "variants" : [ { + "hostname" : "networkmanager-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "fips-aws-global" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "networkmanager-fips.us-west-2.amazonaws.com" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-global" + }, + "nimble" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "oam" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "oidc" : { + "endpoints" : { + "af-south-1" : { + "credentialScope" : { + "region" : "af-south-1" + }, + "hostname" : "oidc.af-south-1.amazonaws.com" + }, + "ap-east-1" : { + "credentialScope" : { + "region" : "ap-east-1" + }, + "hostname" : "oidc.ap-east-1.amazonaws.com" + }, + "ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "hostname" : "oidc.ap-northeast-1.amazonaws.com" + }, + "ap-northeast-2" : { + "credentialScope" : { + "region" : "ap-northeast-2" + }, + "hostname" : "oidc.ap-northeast-2.amazonaws.com" + }, + "ap-northeast-3" : { + "credentialScope" : { + "region" : "ap-northeast-3" + }, + "hostname" : "oidc.ap-northeast-3.amazonaws.com" + }, + "ap-south-1" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "hostname" : "oidc.ap-south-1.amazonaws.com" + }, + "ap-south-2" : { + "credentialScope" : { + "region" : "ap-south-2" + }, + "hostname" : "oidc.ap-south-2.amazonaws.com" + }, + "ap-southeast-1" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "hostname" : "oidc.ap-southeast-1.amazonaws.com" + }, + "ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "hostname" : "oidc.ap-southeast-2.amazonaws.com" + }, + "ap-southeast-3" : { + "credentialScope" : { + "region" : "ap-southeast-3" + }, + "hostname" : "oidc.ap-southeast-3.amazonaws.com" + }, + "ap-southeast-4" : { + "credentialScope" : { + "region" : "ap-southeast-4" + }, + "hostname" : "oidc.ap-southeast-4.amazonaws.com" + }, + "ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "hostname" : "oidc.ca-central-1.amazonaws.com" + }, + "ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "hostname" : "oidc.ca-west-1.amazonaws.com" + }, + "eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "hostname" : "oidc.eu-central-1.amazonaws.com" + }, + "eu-central-2" : { + "credentialScope" : { + "region" : "eu-central-2" + }, + "hostname" : "oidc.eu-central-2.amazonaws.com" + }, + "eu-north-1" : { + "credentialScope" : { + "region" : "eu-north-1" + }, + "hostname" : "oidc.eu-north-1.amazonaws.com" + }, + "eu-south-1" : { + "credentialScope" : { + "region" : "eu-south-1" + }, + "hostname" : "oidc.eu-south-1.amazonaws.com" + }, + "eu-south-2" : { + "credentialScope" : { + "region" : "eu-south-2" + }, + "hostname" : "oidc.eu-south-2.amazonaws.com" + }, + "eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "hostname" : "oidc.eu-west-1.amazonaws.com" + }, + "eu-west-2" : { + "credentialScope" : { + "region" : "eu-west-2" + }, + "hostname" : "oidc.eu-west-2.amazonaws.com" + }, + "eu-west-3" : { + "credentialScope" : { + "region" : "eu-west-3" + }, + "hostname" : "oidc.eu-west-3.amazonaws.com" + }, + "il-central-1" : { + "credentialScope" : { + "region" : "il-central-1" + }, + "hostname" : "oidc.il-central-1.amazonaws.com" + }, + "me-central-1" : { + "credentialScope" : { + "region" : "me-central-1" + }, + "hostname" : "oidc.me-central-1.amazonaws.com" + }, + "me-south-1" : { + "credentialScope" : { + "region" : "me-south-1" + }, + "hostname" : "oidc.me-south-1.amazonaws.com" + }, + "sa-east-1" : { + "credentialScope" : { + "region" : "sa-east-1" + }, + "hostname" : "oidc.sa-east-1.amazonaws.com" + }, + "us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "oidc.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "hostname" : "oidc.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "hostname" : "oidc.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "oidc.us-west-2.amazonaws.com" + } + } + }, + "omics" : { + "endpoints" : { + "ap-southeast-1" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "hostname" : "omics.ap-southeast-1.amazonaws.com" + }, + "eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "hostname" : "omics.eu-central-1.amazonaws.com" + }, + "eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "hostname" : "omics.eu-west-1.amazonaws.com" + }, + "eu-west-2" : { + "credentialScope" : { + "region" : "eu-west-2" + }, + "hostname" : "omics.eu-west-2.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "omics-fips.us-east-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "omics-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { + "credentialScope" : { + "region" : "il-central-1" + }, + "hostname" : "omics.il-central-1.amazonaws.com" + }, + "us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "omics.us-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "omics-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "omics.us-west-2.amazonaws.com", + "variants" : [ { + "hostname" : "omics-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "opsworks" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "opsworks-cm" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "organizations" : { + "endpoints" : { + "aws-global" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "organizations.us-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "organizations-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "fips-aws-global" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "organizations-fips.us-east-1.amazonaws.com" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-global" + }, + "osis" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "outposts" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "outposts-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "outposts-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "outposts-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "outposts-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "outposts-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "outposts-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "outposts-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "outposts-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "outposts-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "outposts-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "participant.connect" : { + "endpoints" : { + "af-south-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-2" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "participant.connect-fips.us-east-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "participant.connect-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "participant.connect-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "participant.connect-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "personalize" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "pi" : { + "endpoints" : { + "af-south-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.af-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-east-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.ap-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.ap-northeast-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-2" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.ap-northeast-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-3" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.ap-northeast-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.ap-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-2" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.ap-south-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.ap-southeast-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-2" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.ap-southeast-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-3" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.ap-southeast-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-4" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.ap-southeast-4.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ca-central-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "pi-fips.ca-central-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "pi.ca-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ca-west-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "pi-fips.ca-west-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "pi.ca-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-central-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.eu-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-central-2" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.eu-central-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-north-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.eu-north-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.eu-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-2" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.eu-south-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.eu-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-2" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.eu-west-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-3" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.eu-west-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "pi-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "pi-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "pi-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "pi-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "pi-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "pi-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.il-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "me-central-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.me-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "me-south-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.me-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "sa-east-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.sa-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "pi-fips.us-east-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "pi.us-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-2" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "pi-fips.us-east-2.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "pi.us-east-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-west-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "pi-fips.us-west-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "pi.us-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-west-2" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "pi-fips.us-west-2.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "pi.us-west-2.api.aws", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "pinpoint" : { + "defaults" : { + "credentialScope" : { + "service" : "mobiletargeting" + } + }, + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "hostname" : "pinpoint.ca-central-1.amazonaws.com", + "variants" : [ { + "hostname" : "pinpoint-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "pinpoint-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "pinpoint-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "pinpoint-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "pinpoint-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "pinpoint.us-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "pinpoint-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "hostname" : "pinpoint.us-east-2.amazonaws.com", + "variants" : [ { + "hostname" : "pinpoint-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "pinpoint.us-west-2.amazonaws.com", + "variants" : [ { + "hostname" : "pinpoint-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "pipes" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "polly" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "polly-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "polly-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "polly-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "polly-fips.us-west-2.amazonaws.com" + }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "polly-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "polly-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "polly-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "polly-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "portal.sso" : { + "endpoints" : { + "af-south-1" : { + "credentialScope" : { + "region" : "af-south-1" + }, + "hostname" : "portal.sso.af-south-1.amazonaws.com" + }, + "ap-east-1" : { + "credentialScope" : { + "region" : "ap-east-1" + }, + "hostname" : "portal.sso.ap-east-1.amazonaws.com" + }, + "ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "hostname" : "portal.sso.ap-northeast-1.amazonaws.com" + }, + "ap-northeast-2" : { + "credentialScope" : { + "region" : "ap-northeast-2" + }, + "hostname" : "portal.sso.ap-northeast-2.amazonaws.com" + }, + "ap-northeast-3" : { + "credentialScope" : { + "region" : "ap-northeast-3" + }, + "hostname" : "portal.sso.ap-northeast-3.amazonaws.com" + }, + "ap-south-1" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "hostname" : "portal.sso.ap-south-1.amazonaws.com" + }, + "ap-south-2" : { + "credentialScope" : { + "region" : "ap-south-2" + }, + "hostname" : "portal.sso.ap-south-2.amazonaws.com" + }, + "ap-southeast-1" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "hostname" : "portal.sso.ap-southeast-1.amazonaws.com" + }, + "ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "hostname" : "portal.sso.ap-southeast-2.amazonaws.com" + }, + "ap-southeast-3" : { + "credentialScope" : { + "region" : "ap-southeast-3" + }, + "hostname" : "portal.sso.ap-southeast-3.amazonaws.com" + }, + "ap-southeast-4" : { + "credentialScope" : { + "region" : "ap-southeast-4" + }, + "hostname" : "portal.sso.ap-southeast-4.amazonaws.com" + }, + "ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "hostname" : "portal.sso.ca-central-1.amazonaws.com" + }, + "ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "hostname" : "portal.sso.ca-west-1.amazonaws.com" + }, + "eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "hostname" : "portal.sso.eu-central-1.amazonaws.com" + }, + "eu-central-2" : { + "credentialScope" : { + "region" : "eu-central-2" + }, + "hostname" : "portal.sso.eu-central-2.amazonaws.com" + }, + "eu-north-1" : { + "credentialScope" : { + "region" : "eu-north-1" + }, + "hostname" : "portal.sso.eu-north-1.amazonaws.com" + }, + "eu-south-1" : { + "credentialScope" : { + "region" : "eu-south-1" + }, + "hostname" : "portal.sso.eu-south-1.amazonaws.com" + }, + "eu-south-2" : { + "credentialScope" : { + "region" : "eu-south-2" + }, + "hostname" : "portal.sso.eu-south-2.amazonaws.com" + }, + "eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "hostname" : "portal.sso.eu-west-1.amazonaws.com" + }, + "eu-west-2" : { + "credentialScope" : { + "region" : "eu-west-2" + }, + "hostname" : "portal.sso.eu-west-2.amazonaws.com" + }, + "eu-west-3" : { + "credentialScope" : { + "region" : "eu-west-3" + }, + "hostname" : "portal.sso.eu-west-3.amazonaws.com" + }, + "il-central-1" : { + "credentialScope" : { + "region" : "il-central-1" + }, + "hostname" : "portal.sso.il-central-1.amazonaws.com" + }, + "me-central-1" : { + "credentialScope" : { + "region" : "me-central-1" + }, + "hostname" : "portal.sso.me-central-1.amazonaws.com" + }, + "me-south-1" : { + "credentialScope" : { + "region" : "me-south-1" + }, + "hostname" : "portal.sso.me-south-1.amazonaws.com" + }, + "sa-east-1" : { + "credentialScope" : { + "region" : "sa-east-1" + }, + "hostname" : "portal.sso.sa-east-1.amazonaws.com" + }, + "us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "portal.sso.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "hostname" : "portal.sso.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "hostname" : "portal.sso.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "portal.sso.us-west-2.amazonaws.com" + } + } + }, + "private-networks" : { + "endpoints" : { + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "profile" : { + "endpoints" : { + "af-south-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "profile-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-west-2" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "profile-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "profile-fips.us-east-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "profile-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "profile-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "profile-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "projects.iot1click" : { + "endpoints" : { + "ap-northeast-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "proton" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "qbusiness" : { + "defaults" : { + "dnsSuffix" : "api.aws", + "variants" : [ { + "dnsSuffix" : "api.aws", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "af-south-1" : { + "hostname" : "qbusiness.af-south-1.api.aws" + }, + "ap-east-1" : { + "hostname" : "qbusiness.ap-east-1.api.aws" + }, + "ap-northeast-1" : { + "hostname" : "qbusiness.ap-northeast-1.api.aws" + }, + "ap-northeast-2" : { + "hostname" : "qbusiness.ap-northeast-2.api.aws" + }, + "ap-northeast-3" : { + "hostname" : "qbusiness.ap-northeast-3.api.aws" + }, + "ap-south-1" : { + "hostname" : "qbusiness.ap-south-1.api.aws" + }, + "ap-south-2" : { + "hostname" : "qbusiness.ap-south-2.api.aws" + }, + "ap-southeast-1" : { + "hostname" : "qbusiness.ap-southeast-1.api.aws" + }, + "ap-southeast-2" : { + "hostname" : "qbusiness.ap-southeast-2.api.aws" + }, + "ap-southeast-3" : { + "hostname" : "qbusiness.ap-southeast-3.api.aws" + }, + "ap-southeast-4" : { + "hostname" : "qbusiness.ap-southeast-4.api.aws" + }, + "ca-central-1" : { + "hostname" : "qbusiness.ca-central-1.api.aws" + }, + "ca-west-1" : { + "hostname" : "qbusiness.ca-west-1.api.aws" + }, + "eu-central-1" : { + "hostname" : "qbusiness.eu-central-1.api.aws" + }, + "eu-central-2" : { + "hostname" : "qbusiness.eu-central-2.api.aws" + }, + "eu-north-1" : { + "hostname" : "qbusiness.eu-north-1.api.aws" + }, + "eu-south-1" : { + "hostname" : "qbusiness.eu-south-1.api.aws" + }, + "eu-south-2" : { + "hostname" : "qbusiness.eu-south-2.api.aws" + }, + "eu-west-1" : { + "hostname" : "qbusiness.eu-west-1.api.aws" + }, + "eu-west-2" : { + "hostname" : "qbusiness.eu-west-2.api.aws" + }, + "eu-west-3" : { + "hostname" : "qbusiness.eu-west-3.api.aws" + }, + "il-central-1" : { + "hostname" : "qbusiness.il-central-1.api.aws" + }, + "me-central-1" : { + "hostname" : "qbusiness.me-central-1.api.aws" + }, + "me-south-1" : { + "hostname" : "qbusiness.me-south-1.api.aws" + }, + "sa-east-1" : { + "hostname" : "qbusiness.sa-east-1.api.aws" + }, + "us-east-1" : { + "hostname" : "qbusiness.us-east-1.api.aws" + }, + "us-east-2" : { + "hostname" : "qbusiness.us-east-2.api.aws" + }, + "us-west-1" : { + "hostname" : "qbusiness.us-west-1.api.aws" + }, + "us-west-2" : { + "hostname" : "qbusiness.us-west-2.api.aws" + } + } + }, + "qldb" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "qldb-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "qldb-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "qldb-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "qldb-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "qldb-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "qldb-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "qldb-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "qldb-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "quicksight" : { + "endpoints" : { + "af-south-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "api" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "ram" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "ram-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "ram-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "ram-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "ram-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "ram-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "ram-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "ram-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "ram-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "ram-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "ram-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "ram-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "ram-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "rbin" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "rbin-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "rbin-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "rbin-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "rbin-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "rbin-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "rbin-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "rbin-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "rbin-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "rbin-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "rbin-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "rbin-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "rbin-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "rds" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "rds-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-central-1-fips" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "rds-fips.ca-central-1.amazonaws.com" + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "rds-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1-fips" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "rds-fips.ca-west-1.amazonaws.com" + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "rds-fips.ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "rds-fips.ca-central-1.amazonaws.com" + }, + "rds-fips.ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "rds-fips.ca-west-1.amazonaws.com" + }, + "rds-fips.us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "rds-fips.us-east-1.amazonaws.com" + }, + "rds-fips.us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "rds-fips.us-east-2.amazonaws.com" + }, + "rds-fips.us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "rds-fips.us-west-1.amazonaws.com" + }, + "rds-fips.us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "rds-fips.us-west-2.amazonaws.com" + }, + "rds.ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "rds-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "rds.ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "rds-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "rds.us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "rds-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "rds.us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "rds-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "rds.us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "rds-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "rds.us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "rds-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "sa-east-1" : { }, + "us-east-1" : { + "sslCommonName" : "{service}.{dnsSuffix}", + "variants" : [ { + "hostname" : "rds-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "rds-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "rds-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "rds-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "rds-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "rds-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "rds-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "rds-fips.us-west-2.amazonaws.com" + } + } + }, + "rds-data" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "rds-data-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "rds-data-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "rds-data-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "rds-data-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "rds-data-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "rds-data-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "rds-data-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "rds-data-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "redshift" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "redshift-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "redshift-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "redshift-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "redshift-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "redshift-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "redshift-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "redshift-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "redshift-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "redshift-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "redshift-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "redshift-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "redshift-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "redshift-serverless" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "redshift-serverless-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "redshift-serverless-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "redshift-serverless-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "redshift-serverless-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "redshift-serverless-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "redshift-serverless-fips.us-west-2.amazonaws.com" + }, + "me-central-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "redshift-serverless-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "redshift-serverless-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "redshift-serverless-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "redshift-serverless-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "rekognition" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "rekognition-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-central-1-fips" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "rekognition-fips.ca-central-1.amazonaws.com" + }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "il-central-1" : { }, + "rekognition-fips.ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "rekognition-fips.ca-central-1.amazonaws.com" + }, + "rekognition-fips.us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "rekognition-fips.us-east-1.amazonaws.com" + }, + "rekognition-fips.us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "rekognition-fips.us-east-2.amazonaws.com" + }, + "rekognition-fips.us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "rekognition-fips.us-west-1.amazonaws.com" + }, + "rekognition-fips.us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "rekognition-fips.us-west-2.amazonaws.com" + }, + "rekognition.ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "rekognition-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "rekognition.us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "rekognition-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "rekognition.us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "rekognition-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "rekognition.us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "rekognition-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "rekognition.us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "rekognition-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "rekognition-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "rekognition-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "rekognition-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "rekognition-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "rekognition-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "rekognition-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "rekognition-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "rekognition-fips.us-west-2.amazonaws.com" + } + } + }, + "resiliencehub" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "resource-explorer-2" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "resource-groups" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "resource-groups-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "resource-groups-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "resource-groups-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "resource-groups-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "resource-groups-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "resource-groups-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "resource-groups-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "resource-groups-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "robomaker" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-southeast-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "rolesanywhere" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "rolesanywhere-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "rolesanywhere-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "rolesanywhere-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "rolesanywhere-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "rolesanywhere-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "rolesanywhere-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "rolesanywhere-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "rolesanywhere-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "route53" : { + "endpoints" : { + "aws-global" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "route53.amazonaws.com", + "variants" : [ { + "hostname" : "route53-fips.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "fips-aws-global" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "route53-fips.amazonaws.com" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-global" + }, + "route53-recovery-control-config" : { + "endpoints" : { + "aws-global" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "route53-recovery-control-config.us-west-2.amazonaws.com" + } + } + }, + "route53domains" : { + "endpoints" : { + "us-east-1" : { } + } + }, + "route53resolver" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "rum" : { + "endpoints" : { + "af-south-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "runtime-v2-lex" : { + "endpoints" : { + "af-south-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-west-2" : { } + } + }, + "runtime.lex" : { + "defaults" : { + "credentialScope" : { + "service" : "lex" + }, + "variants" : [ { + "hostname" : "runtime-fips.lex.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "ap-northeast-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "runtime-fips.lex.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "runtime-fips.lex.us-east-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "runtime-fips.lex.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "runtime-fips.lex.us-west-2.amazonaws.com" + } + } + }, + "runtime.sagemaker" : { + "defaults" : { + "variants" : [ { + "hostname" : "runtime-fips.sagemaker.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "runtime-fips.sagemaker.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "runtime-fips.sagemaker.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "runtime-fips.sagemaker.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "runtime-fips.sagemaker.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "runtime-fips.sagemaker.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "runtime-fips.sagemaker.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "runtime-fips.sagemaker.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "runtime-fips.sagemaker.us-west-2.amazonaws.com" + } + } + }, + "s3" : { + "defaults" : { + "protocols" : [ "http", "https" ], + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "dnsSuffix" : "amazonaws.com", + "hostname" : "{service}-fips.dualstack.{region}.{dnsSuffix}", + "tags" : [ "dualstack", "fips" ] + }, { + "dnsSuffix" : "amazonaws.com", + "hostname" : "{service}.dualstack.{region}.{dnsSuffix}", + "tags" : [ "dualstack" ] + } ] + }, + "endpoints" : { + "af-south-1" : { + "variants" : [ { + "hostname" : "s3.dualstack.af-south-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ap-east-1" : { + "variants" : [ { + "hostname" : "s3.dualstack.ap-east-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-1" : { + "hostname" : "s3.ap-northeast-1.amazonaws.com", + "signatureVersions" : [ "s3", "s3v4" ], + "variants" : [ { + "hostname" : "s3.dualstack.ap-northeast-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-2" : { + "variants" : [ { + "hostname" : "s3.dualstack.ap-northeast-2.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-3" : { + "variants" : [ { + "hostname" : "s3.dualstack.ap-northeast-3.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-1" : { + "variants" : [ { + "hostname" : "s3.dualstack.ap-south-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-2" : { + "variants" : [ { + "hostname" : "s3.dualstack.ap-south-2.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-1" : { + "hostname" : "s3.ap-southeast-1.amazonaws.com", + "signatureVersions" : [ "s3", "s3v4" ], + "variants" : [ { + "hostname" : "s3.dualstack.ap-southeast-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-2" : { + "hostname" : "s3.ap-southeast-2.amazonaws.com", + "signatureVersions" : [ "s3", "s3v4" ], + "variants" : [ { + "hostname" : "s3.dualstack.ap-southeast-2.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-3" : { + "variants" : [ { + "hostname" : "s3.dualstack.ap-southeast-3.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-4" : { + "variants" : [ { + "hostname" : "s3.dualstack.ap-southeast-4.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "aws-global" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "s3.amazonaws.com", + "signatureVersions" : [ "s3", "s3v4" ] + }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "s3-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "s3-fips.dualstack.ca-central-1.amazonaws.com", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "s3.dualstack.ca-central-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "s3-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "s3-fips.dualstack.ca-west-1.amazonaws.com", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "s3.dualstack.ca-west-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "eu-central-1" : { + "variants" : [ { + "hostname" : "s3.dualstack.eu-central-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "eu-central-2" : { + "variants" : [ { + "hostname" : "s3.dualstack.eu-central-2.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "eu-north-1" : { + "variants" : [ { + "hostname" : "s3.dualstack.eu-north-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-1" : { + "variants" : [ { + "hostname" : "s3.dualstack.eu-south-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-2" : { + "variants" : [ { + "hostname" : "s3.dualstack.eu-south-2.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-1" : { + "hostname" : "s3.eu-west-1.amazonaws.com", + "signatureVersions" : [ "s3", "s3v4" ], + "variants" : [ { + "hostname" : "s3.dualstack.eu-west-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-2" : { + "variants" : [ { + "hostname" : "s3.dualstack.eu-west-2.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-3" : { + "variants" : [ { + "hostname" : "s3.dualstack.eu-west-3.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "s3-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "s3-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "s3-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "s3-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "s3-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "s3-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { + "variants" : [ { + "hostname" : "s3.dualstack.il-central-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "me-central-1" : { + "variants" : [ { + "hostname" : "s3.dualstack.me-central-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "me-south-1" : { + "variants" : [ { + "hostname" : "s3.dualstack.me-south-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "s3-external-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "s3-external-1.amazonaws.com", + "signatureVersions" : [ "s3", "s3v4" ] + }, + "sa-east-1" : { + "hostname" : "s3.sa-east-1.amazonaws.com", + "signatureVersions" : [ "s3", "s3v4" ], + "variants" : [ { + "hostname" : "s3.dualstack.sa-east-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-1" : { + "hostname" : "s3.us-east-1.amazonaws.com", + "signatureVersions" : [ "s3", "s3v4" ], + "variants" : [ { + "hostname" : "s3-fips.dualstack.us-east-1.amazonaws.com", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "s3-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "s3.dualstack.us-east-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "s3-fips.dualstack.us-east-2.amazonaws.com", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "s3-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "s3.dualstack.us-east-2.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "us-west-1" : { + "hostname" : "s3.us-west-1.amazonaws.com", + "signatureVersions" : [ "s3", "s3v4" ], + "variants" : [ { + "hostname" : "s3-fips.dualstack.us-west-1.amazonaws.com", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "s3-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "s3.dualstack.us-west-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "us-west-2" : { + "hostname" : "s3.us-west-2.amazonaws.com", + "signatureVersions" : [ "s3", "s3v4" ], + "variants" : [ { + "hostname" : "s3-fips.dualstack.us-west-2.amazonaws.com", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "s3-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "s3.dualstack.us-west-2.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + } + }, + "isRegionalized" : true, + "partitionEndpoint" : "aws-global" + }, + "s3-control" : { + "defaults" : { + "protocols" : [ "https" ], + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "dnsSuffix" : "amazonaws.com", + "hostname" : "{service}-fips.dualstack.{region}.{dnsSuffix}", + "tags" : [ "dualstack", "fips" ] + }, { + "dnsSuffix" : "amazonaws.com", + "hostname" : "{service}.dualstack.{region}.{dnsSuffix}", + "tags" : [ "dualstack" ] + } ] + }, + "endpoints" : { + "af-south-1" : { + "credentialScope" : { + "region" : "af-south-1" + }, + "hostname" : "s3-control.af-south-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.af-south-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ap-east-1" : { + "credentialScope" : { + "region" : "ap-east-1" + }, + "hostname" : "s3-control.ap-east-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.ap-east-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "hostname" : "s3-control.ap-northeast-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.ap-northeast-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-2" : { + "credentialScope" : { + "region" : "ap-northeast-2" + }, + "hostname" : "s3-control.ap-northeast-2.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.ap-northeast-2.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-3" : { + "credentialScope" : { + "region" : "ap-northeast-3" + }, + "hostname" : "s3-control.ap-northeast-3.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.ap-northeast-3.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-1" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "hostname" : "s3-control.ap-south-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.ap-south-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-2" : { + "credentialScope" : { + "region" : "ap-south-2" + }, + "hostname" : "s3-control.ap-south-2.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.ap-south-2.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-1" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "hostname" : "s3-control.ap-southeast-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.ap-southeast-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "hostname" : "s3-control.ap-southeast-2.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.ap-southeast-2.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-3" : { + "credentialScope" : { + "region" : "ap-southeast-3" + }, + "hostname" : "s3-control.ap-southeast-3.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.ap-southeast-3.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-4" : { + "credentialScope" : { + "region" : "ap-southeast-4" + }, + "hostname" : "s3-control.ap-southeast-4.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.ap-southeast-4.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "hostname" : "s3-control.ca-central-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "s3-control-fips.dualstack.ca-central-1.amazonaws.com", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "s3-control.dualstack.ca-central-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ca-central-1-fips" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "s3-control-fips.ca-central-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ] + }, + "ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "hostname" : "s3-control.ca-west-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "s3-control-fips.dualstack.ca-west-1.amazonaws.com", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "s3-control.dualstack.ca-west-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "ca-west-1-fips" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "s3-control-fips.ca-west-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ] + }, + "eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "hostname" : "s3-control.eu-central-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.eu-central-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "eu-central-2" : { + "credentialScope" : { + "region" : "eu-central-2" + }, + "hostname" : "s3-control.eu-central-2.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.eu-central-2.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "eu-north-1" : { + "credentialScope" : { + "region" : "eu-north-1" + }, + "hostname" : "s3-control.eu-north-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.eu-north-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-1" : { + "credentialScope" : { + "region" : "eu-south-1" + }, + "hostname" : "s3-control.eu-south-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.eu-south-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-2" : { + "credentialScope" : { + "region" : "eu-south-2" + }, + "hostname" : "s3-control.eu-south-2.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.eu-south-2.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "hostname" : "s3-control.eu-west-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.eu-west-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-2" : { + "credentialScope" : { + "region" : "eu-west-2" + }, + "hostname" : "s3-control.eu-west-2.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.eu-west-2.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-3" : { + "credentialScope" : { + "region" : "eu-west-3" + }, + "hostname" : "s3-control.eu-west-3.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.eu-west-3.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "il-central-1" : { + "credentialScope" : { + "region" : "il-central-1" + }, + "hostname" : "s3-control.il-central-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.il-central-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "me-central-1" : { + "credentialScope" : { + "region" : "me-central-1" + }, + "hostname" : "s3-control.me-central-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.me-central-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "me-south-1" : { + "credentialScope" : { + "region" : "me-south-1" + }, + "hostname" : "s3-control.me-south-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.me-south-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "sa-east-1" : { + "credentialScope" : { + "region" : "sa-east-1" + }, + "hostname" : "s3-control.sa-east-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.sa-east-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "s3-control.us-east-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control-fips.dualstack.us-east-1.amazonaws.com", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "s3-control-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "s3-control.dualstack.us-east-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "s3-control-fips.us-east-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ] + }, + "us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "hostname" : "s3-control.us-east-2.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control-fips.dualstack.us-east-2.amazonaws.com", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "s3-control-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "s3-control.dualstack.us-east-2.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "s3-control-fips.us-east-2.amazonaws.com", + "signatureVersions" : [ "s3v4" ] + }, + "us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "hostname" : "s3-control.us-west-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control-fips.dualstack.us-west-1.amazonaws.com", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "s3-control-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "s3-control.dualstack.us-west-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "s3-control-fips.us-west-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ] + }, + "us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "s3-control.us-west-2.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control-fips.dualstack.us-west-2.amazonaws.com", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "s3-control-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "s3-control.dualstack.us-west-2.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "s3-control-fips.us-west-2.amazonaws.com", + "signatureVersions" : [ "s3v4" ] + } + } + }, + "s3-outposts" : { + "endpoints" : { + "af-south-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-east-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-2" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-3" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-2" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-3" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ca-central-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + }, { + "tags" : [ "dualstack", "fips" ] + }, { + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "eu-north-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-2" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-3" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "fips-ca-central-1" : { + "deprecated" : true + }, + "fips-us-east-1" : { + "deprecated" : true + }, + "fips-us-east-2" : { + "deprecated" : true + }, + "fips-us-west-1" : { + "deprecated" : true + }, + "fips-us-west-2" : { + "deprecated" : true + }, + "il-central-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "me-south-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "sa-east-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "us-east-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + }, { + "tags" : [ "dualstack", "fips" ] + }, { + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "tags" : [ "dualstack" ] + }, { + "tags" : [ "dualstack", "fips" ] + }, { + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + }, { + "tags" : [ "dualstack", "fips" ] + }, { + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "tags" : [ "dualstack" ] + }, { + "tags" : [ "dualstack", "fips" ] + }, { + "tags" : [ "fips" ] + } ] + } + } + }, + "sagemaker-geospatial" : { + "endpoints" : { + "us-west-2" : { } + } + }, + "savingsplans" : { + "endpoints" : { + "aws-global" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "savingsplans.amazonaws.com" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-global" + }, + "scheduler" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "schemas" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "sdb" : { + "defaults" : { + "protocols" : [ "http", "https" ], + "signatureVersions" : [ "v2" ] + }, + "endpoints" : { + "ap-northeast-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "eu-west-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "hostname" : "sdb.amazonaws.com" + }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "secretsmanager" : { + "endpoints" : { + "af-south-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-east-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-2" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-3" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-2" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-2" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-3" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-4" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "ca-central-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + }, { + "tags" : [ "dualstack", "fips" ] + }, { + "tags" : [ "fips" ] + } ] + }, + "ca-central-1-fips" : { + "deprecated" : true + }, + "ca-west-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + }, { + "tags" : [ "dualstack", "fips" ] + }, { + "tags" : [ "fips" ] + } ] + }, + "ca-west-1-fips" : { + "deprecated" : true + }, + "eu-central-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "eu-central-2" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "eu-north-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-2" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-2" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-3" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "il-central-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "me-central-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "me-south-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "sa-east-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "us-east-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + }, { + "tags" : [ "dualstack", "fips" ] + }, { + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "deprecated" : true + }, + "us-east-2" : { + "variants" : [ { + "tags" : [ "dualstack" ] + }, { + "tags" : [ "dualstack", "fips" ] + }, { + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "deprecated" : true + }, + "us-west-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + }, { + "tags" : [ "dualstack", "fips" ] + }, { + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "deprecated" : true + }, + "us-west-2" : { + "variants" : [ { + "tags" : [ "dualstack" ] + }, { + "tags" : [ "dualstack", "fips" ] + }, { + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "deprecated" : true + } + } + }, + "securityhub" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "securityhub-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "securityhub-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "securityhub-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "securityhub-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "securityhub-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "securityhub-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "securityhub-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "securityhub-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "securitylake" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "securitylake-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "securitylake-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "securitylake-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "securitylake-fips.us-west-2.amazonaws.com" + }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "securitylake-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "securitylake-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "securitylake-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "securitylake-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "serverlessrepo" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "ap-east-1" : { + "protocols" : [ "https" ] + }, + "ap-northeast-1" : { + "protocols" : [ "https" ] + }, + "ap-northeast-2" : { + "protocols" : [ "https" ] + }, + "ap-south-1" : { + "protocols" : [ "https" ] + }, + "ap-southeast-1" : { + "protocols" : [ "https" ] + }, + "ap-southeast-2" : { + "protocols" : [ "https" ] + }, + "ca-central-1" : { + "protocols" : [ "https" ] + }, + "eu-central-1" : { + "protocols" : [ "https" ] + }, + "eu-north-1" : { + "protocols" : [ "https" ] + }, + "eu-west-1" : { + "protocols" : [ "https" ] + }, + "eu-west-2" : { + "protocols" : [ "https" ] + }, + "eu-west-3" : { + "protocols" : [ "https" ] + }, + "me-south-1" : { + "protocols" : [ "https" ] + }, + "sa-east-1" : { + "protocols" : [ "https" ] + }, + "us-east-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "serverlessrepo-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "serverlessrepo-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "serverlessrepo-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "serverlessrepo-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "serverlessrepo-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "serverlessrepo-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "serverlessrepo-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "serverlessrepo-fips.us-west-2.amazonaws.com" + } + } + }, + "servicecatalog" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "servicecatalog-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "servicecatalog-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "servicecatalog-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "servicecatalog-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "servicecatalog-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "servicecatalog-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "servicecatalog-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "servicecatalog-fips.us-west-2.amazonaws.com" + } + } + }, + "servicecatalog-appregistry" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "servicecatalog-appregistry-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "servicecatalog-appregistry-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "servicecatalog-appregistry-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "servicecatalog-appregistry-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "servicecatalog-appregistry-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "servicecatalog-appregistry-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "servicecatalog-appregistry-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "servicecatalog-appregistry-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "servicecatalog-appregistry-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "servicecatalog-appregistry-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "servicediscovery" : { + "endpoints" : { + "af-south-1" : { + "variants" : [ { + "hostname" : "servicediscovery.af-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-east-1" : { + "variants" : [ { + "hostname" : "servicediscovery.ap-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-1" : { + "variants" : [ { + "hostname" : "servicediscovery.ap-northeast-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-2" : { + "variants" : [ { + "hostname" : "servicediscovery.ap-northeast-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-northeast-3" : { + "variants" : [ { + "hostname" : "servicediscovery.ap-northeast-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-1" : { + "variants" : [ { + "hostname" : "servicediscovery.ap-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-2" : { + "variants" : [ { + "hostname" : "servicediscovery.ap-south-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-1" : { + "variants" : [ { + "hostname" : "servicediscovery.ap-southeast-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-2" : { + "variants" : [ { + "hostname" : "servicediscovery.ap-southeast-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-3" : { + "variants" : [ { + "hostname" : "servicediscovery.ap-southeast-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-4" : { + "variants" : [ { + "hostname" : "servicediscovery.ap-southeast-4.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "servicediscovery-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "servicediscovery-fips.ca-central-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "servicediscovery.ca-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ca-central-1-fips" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "servicediscovery-fips.ca-central-1.amazonaws.com" + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "servicediscovery-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "servicediscovery-fips.ca-west-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "servicediscovery.ca-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ca-west-1-fips" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "servicediscovery-fips.ca-west-1.amazonaws.com" + }, + "eu-central-1" : { + "variants" : [ { + "hostname" : "servicediscovery.eu-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-central-2" : { + "variants" : [ { + "hostname" : "servicediscovery.eu-central-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-north-1" : { + "variants" : [ { + "hostname" : "servicediscovery.eu-north-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-1" : { + "variants" : [ { + "hostname" : "servicediscovery.eu-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-south-2" : { + "variants" : [ { + "hostname" : "servicediscovery.eu-south-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-1" : { + "variants" : [ { + "hostname" : "servicediscovery.eu-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-2" : { + "variants" : [ { + "hostname" : "servicediscovery.eu-west-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-3" : { + "variants" : [ { + "hostname" : "servicediscovery.eu-west-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "il-central-1" : { + "variants" : [ { + "hostname" : "servicediscovery.il-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "me-central-1" : { + "variants" : [ { + "hostname" : "servicediscovery.me-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "me-south-1" : { + "variants" : [ { + "hostname" : "servicediscovery.me-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "sa-east-1" : { + "variants" : [ { + "hostname" : "servicediscovery.sa-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "servicediscovery-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "servicediscovery-fips.us-east-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "servicediscovery.us-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "servicediscovery-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "servicediscovery-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "servicediscovery-fips.us-east-2.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "servicediscovery.us-east-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "servicediscovery-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "servicediscovery-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "servicediscovery-fips.us-west-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "servicediscovery.us-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "servicediscovery-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "servicediscovery-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "servicediscovery-fips.us-west-2.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "servicediscovery.us-west-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "servicediscovery-fips.us-west-2.amazonaws.com" + } + } + }, + "servicequotas" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "session.qldb" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "session.qldb-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "session.qldb-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "session.qldb-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "session.qldb-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "session.qldb-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "session.qldb-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "shield" : { + "defaults" : { + "protocols" : [ "https" ], + "sslCommonName" : "shield.us-east-1.amazonaws.com" + }, + "endpoints" : { + "aws-global" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "shield.us-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "shield-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "fips-aws-global" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "shield-fips.us-east-1.amazonaws.com" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-global" + }, + "signer" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "signer-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "signer-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "signer-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "signer-fips.us-west-2.amazonaws.com" + }, + "fips-verification-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "verification.signer-fips.us-east-1.amazonaws.com" + }, + "fips-verification-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "hostname" : "verification.signer-fips.us-east-2.amazonaws.com" + }, + "fips-verification-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "hostname" : "verification.signer-fips.us-west-1.amazonaws.com" + }, + "fips-verification-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "verification.signer-fips.us-west-2.amazonaws.com" + }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "signer-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "signer-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "signer-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "signer-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "verification-af-south-1" : { + "credentialScope" : { + "region" : "af-south-1" + }, + "hostname" : "verification.signer.af-south-1.amazonaws.com" + }, + "verification-ap-east-1" : { + "credentialScope" : { + "region" : "ap-east-1" + }, + "hostname" : "verification.signer.ap-east-1.amazonaws.com" + }, + "verification-ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "hostname" : "verification.signer.ap-northeast-1.amazonaws.com" + }, + "verification-ap-northeast-2" : { + "credentialScope" : { + "region" : "ap-northeast-2" + }, + "hostname" : "verification.signer.ap-northeast-2.amazonaws.com" + }, + "verification-ap-south-1" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "hostname" : "verification.signer.ap-south-1.amazonaws.com" + }, + "verification-ap-southeast-1" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "hostname" : "verification.signer.ap-southeast-1.amazonaws.com" + }, + "verification-ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "hostname" : "verification.signer.ap-southeast-2.amazonaws.com" + }, + "verification-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "hostname" : "verification.signer.ca-central-1.amazonaws.com" + }, + "verification-eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "hostname" : "verification.signer.eu-central-1.amazonaws.com" + }, + "verification-eu-north-1" : { + "credentialScope" : { + "region" : "eu-north-1" + }, + "hostname" : "verification.signer.eu-north-1.amazonaws.com" + }, + "verification-eu-south-1" : { + "credentialScope" : { + "region" : "eu-south-1" + }, + "hostname" : "verification.signer.eu-south-1.amazonaws.com" + }, + "verification-eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "hostname" : "verification.signer.eu-west-1.amazonaws.com" + }, + "verification-eu-west-2" : { + "credentialScope" : { + "region" : "eu-west-2" + }, + "hostname" : "verification.signer.eu-west-2.amazonaws.com" + }, + "verification-eu-west-3" : { + "credentialScope" : { + "region" : "eu-west-3" + }, + "hostname" : "verification.signer.eu-west-3.amazonaws.com" + }, + "verification-me-south-1" : { + "credentialScope" : { + "region" : "me-south-1" + }, + "hostname" : "verification.signer.me-south-1.amazonaws.com" + }, + "verification-sa-east-1" : { + "credentialScope" : { + "region" : "sa-east-1" + }, + "hostname" : "verification.signer.sa-east-1.amazonaws.com" + }, + "verification-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "verification.signer.us-east-1.amazonaws.com" + }, + "verification-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "hostname" : "verification.signer.us-east-2.amazonaws.com" + }, + "verification-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "hostname" : "verification.signer.us-west-1.amazonaws.com" + }, + "verification-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "verification.signer.us-west-2.amazonaws.com" + } + } + }, + "simspaceweaver" : { + "endpoints" : { + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "sms" : { + "endpoints" : { + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "sms-fips.us-west-2.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "sms-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "sms-voice" : { + "endpoints" : { + "af-south-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "sms-voice-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "sms-voice-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "sms-voice-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "sms-voice-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "sms-voice-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "sms-voice-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "sms-voice-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "sms-voice-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "sms-voice-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "sms-voice-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "snowball" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { + "variants" : [ { + "hostname" : "snowball-fips.ap-northeast-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-northeast-2" : { + "variants" : [ { + "hostname" : "snowball-fips.ap-northeast-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-northeast-3" : { + "variants" : [ { + "hostname" : "snowball-fips.ap-northeast-3.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-south-1" : { + "variants" : [ { + "hostname" : "snowball-fips.ap-south-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-southeast-1" : { + "variants" : [ { + "hostname" : "snowball-fips.ap-southeast-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-southeast-2" : { + "variants" : [ { + "hostname" : "snowball-fips.ap-southeast-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-southeast-3" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "snowball-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { + "variants" : [ { + "hostname" : "snowball-fips.eu-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { + "variants" : [ { + "hostname" : "snowball-fips.eu-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-west-2" : { + "variants" : [ { + "hostname" : "snowball-fips.eu-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-west-3" : { + "variants" : [ { + "hostname" : "snowball-fips.eu-west-3.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "fips-ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "deprecated" : true, + "hostname" : "snowball-fips.ap-northeast-1.amazonaws.com" + }, + "fips-ap-northeast-2" : { + "credentialScope" : { + "region" : "ap-northeast-2" + }, + "deprecated" : true, + "hostname" : "snowball-fips.ap-northeast-2.amazonaws.com" + }, + "fips-ap-northeast-3" : { + "credentialScope" : { + "region" : "ap-northeast-3" + }, + "deprecated" : true, + "hostname" : "snowball-fips.ap-northeast-3.amazonaws.com" + }, + "fips-ap-south-1" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "deprecated" : true, + "hostname" : "snowball-fips.ap-south-1.amazonaws.com" + }, + "fips-ap-southeast-1" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "deprecated" : true, + "hostname" : "snowball-fips.ap-southeast-1.amazonaws.com" + }, + "fips-ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "deprecated" : true, + "hostname" : "snowball-fips.ap-southeast-2.amazonaws.com" + }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "snowball-fips.ca-central-1.amazonaws.com" + }, + "fips-eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "deprecated" : true, + "hostname" : "snowball-fips.eu-central-1.amazonaws.com" + }, + "fips-eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "deprecated" : true, + "hostname" : "snowball-fips.eu-west-1.amazonaws.com" + }, + "fips-eu-west-2" : { + "credentialScope" : { + "region" : "eu-west-2" + }, + "deprecated" : true, + "hostname" : "snowball-fips.eu-west-2.amazonaws.com" + }, + "fips-eu-west-3" : { + "credentialScope" : { + "region" : "eu-west-3" + }, + "deprecated" : true, + "hostname" : "snowball-fips.eu-west-3.amazonaws.com" + }, + "fips-sa-east-1" : { + "credentialScope" : { + "region" : "sa-east-1" + }, + "deprecated" : true, + "hostname" : "snowball-fips.sa-east-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "snowball-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "snowball-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "snowball-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "snowball-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "sa-east-1" : { + "variants" : [ { + "hostname" : "snowball-fips.sa-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "snowball-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "snowball-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "snowball-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "snowball-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "sns" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "sns-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "sns-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "sns-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "sns-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "sns-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "sns-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "sns-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "sns-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "sns-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "sns-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "sqs" : { + "defaults" : { + "protocols" : [ "http", "https" ], + "sslCommonName" : "{region}.queue.{dnsSuffix}" + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "sqs-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "sqs-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "sqs-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "sqs-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "sslCommonName" : "queue.{dnsSuffix}", + "variants" : [ { + "hostname" : "sqs-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "sqs-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "sqs-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "sqs-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "ssm" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "ssm-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "ssm-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "ssm-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "ssm-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "ssm-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "ssm-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "ssm-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "ssm-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "ssm-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "ssm-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "ssm-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "ssm-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "ssm-contacts" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "ssm-contacts-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "ssm-contacts-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "ssm-contacts-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "ssm-contacts-fips.us-west-2.amazonaws.com" + }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "ssm-contacts-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "ssm-contacts-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "ssm-contacts-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "ssm-contacts-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "ssm-incidents" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "ssm-incidents-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "ssm-incidents-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "ssm-incidents-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "ssm-incidents-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "ssm-incidents-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "ssm-incidents-fips.us-west-2.amazonaws.com" + }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "ssm-incidents-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "ssm-incidents-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "ssm-incidents-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "ssm-incidents-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "ssm-sap" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "ssm-sap-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "ssm-sap-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "ssm-sap-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "ssm-sap-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "ssm-sap-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "ssm-sap-fips.us-west-2.amazonaws.com" + }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "ssm-sap-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "ssm-sap-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "ssm-sap-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "ssm-sap-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "sso" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "states" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "states-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "states-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "states-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "states-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "states-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "states-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "states-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "states-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "storagegateway" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "storagegateway-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-central-1-fips" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "storagegateway-fips.ca-central-1.amazonaws.com" + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "storagegateway-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1-fips" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "storagegateway-fips.ca-west-1.amazonaws.com" + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "storagegateway-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "storagegateway-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "storagegateway-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "storagegateway-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "storagegateway-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "storagegateway-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "storagegateway-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "storagegateway-fips.us-west-2.amazonaws.com" + } + } + }, + "streams.dynamodb" : { + "defaults" : { + "credentialScope" : { + "service" : "dynamodb" + }, + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "local" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "localhost:8000", + "protocols" : [ "http" ] + }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "sts" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "aws-global" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "sts.amazonaws.com" + }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "sts-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "sts-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "sts-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "sts-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "sts-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "sts-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "sts-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "sts-fips.us-west-2.amazonaws.com" + } + }, + "partitionEndpoint" : "aws-global" + }, + "support" : { + "endpoints" : { + "aws-global" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "support.us-east-1.amazonaws.com" + } + }, + "partitionEndpoint" : "aws-global" + }, + "supportapp" : { + "endpoints" : { + "eu-west-1" : { }, + "us-east-1" : { }, + "us-west-2" : { } + } + }, + "swf" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "swf-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "swf-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "swf-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "swf-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "swf-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "swf-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "swf-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "swf-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "synthetics" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "synthetics-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "synthetics-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "synthetics-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "synthetics-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "synthetics-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "synthetics-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "synthetics-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "synthetics-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "tagging" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "tax" : { + "endpoints" : { + "aws-global" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "tax.us-east-1.amazonaws.com" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-global" + }, + "textract" : { + "endpoints" : { + "ap-northeast-2" : { + "variants" : [ { + "hostname" : "textract.ap-northeast-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-south-1" : { + "variants" : [ { + "hostname" : "textract.ap-south-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-1" : { + "variants" : [ { + "hostname" : "textract.ap-southeast-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ap-southeast-2" : { + "variants" : [ { + "hostname" : "textract.ap-southeast-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "textract-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "textract-fips.ca-central-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "textract.ca-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-central-1" : { + "variants" : [ { + "hostname" : "textract.eu-central-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-1" : { + "variants" : [ { + "hostname" : "textract.eu-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-2" : { + "variants" : [ { + "hostname" : "textract.eu-west-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "eu-west-3" : { + "variants" : [ { + "hostname" : "textract.eu-west-3.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "textract-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "textract-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "textract-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "textract-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "textract-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "textract-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "textract-fips.us-east-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "textract.us-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "textract-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "textract-fips.us-east-2.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "textract.us-east-2.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "textract-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "textract-fips.us-west-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "textract.us-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "textract-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "textract-fips.us-west-2.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "textract.us-west-2.api.aws", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "thinclient" : { + "endpoints" : { + "ap-south-1" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-west-2" : { } + } + }, + "tnb" : { + "endpoints" : { + "ap-northeast-2" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-south-2" : { }, + "eu-west-3" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-west-2" : { } + } + }, + "transcribe" : { + "defaults" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "fips.transcribe.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "fips.transcribe.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "fips.transcribe.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "fips.transcribe.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "fips.transcribe.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "fips.transcribe.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "fips.transcribe.us-west-2.amazonaws.com" + }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "fips.transcribe.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "fips.transcribe.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "fips.transcribe.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "fips.transcribe.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "transcribestreaming" : { + "endpoints" : { + "af-south-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "sa-east-1" : { }, + "transcribestreaming-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "transcribestreaming-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "transcribestreaming-fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "transcribestreaming-fips.ca-central-1.amazonaws.com" + }, + "transcribestreaming-fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "transcribestreaming-fips.us-east-1.amazonaws.com" + }, + "transcribestreaming-fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "transcribestreaming-fips.us-east-2.amazonaws.com" + }, + "transcribestreaming-fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "transcribestreaming-fips.us-west-2.amazonaws.com" + }, + "transcribestreaming-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "transcribestreaming-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "transcribestreaming-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "transcribestreaming-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "transcribestreaming-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "transcribestreaming-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-2" : { } + } + }, + "transfer" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "transfer-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "transfer-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "transfer-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "transfer-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "transfer-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "transfer-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "transfer-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "transfer-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "transfer-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "transfer-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "transfer-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "transfer-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "translate" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "translate-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "translate-fips.us-east-1.amazonaws.com" + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "translate-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2-fips" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "translate-fips.us-east-2.amazonaws.com" + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "translate-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1-fips" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "translate-fips.us-west-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "translate-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "translate-fips.us-west-2.amazonaws.com" + } + } + }, + "verifiedpermissions" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "verifiedpermissions-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "variants" : [ { + "hostname" : "verifiedpermissions-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "verifiedpermissions-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "verifiedpermissions-fips.ca-west-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "verifiedpermissions-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "verifiedpermissions-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "verifiedpermissions-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "verifiedpermissions-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "verifiedpermissions-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "verifiedpermissions-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "verifiedpermissions-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "verifiedpermissions-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "voice-chime" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "voice-chime-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-central-1-fips" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "voice-chime-fips.ca-central-1.amazonaws.com" + }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "voice-chime-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "voice-chime-fips.us-east-1.amazonaws.com" + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "voice-chime-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2-fips" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "voice-chime-fips.us-west-2.amazonaws.com" + } + } + }, + "voiceid" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { + "variants" : [ { + "hostname" : "voiceid-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { }, + "eu-west-2" : { }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "voiceid-fips.ca-central-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "voiceid-fips.us-east-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "voiceid-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "voiceid-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "voiceid-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "vpc-lattice" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "waf" : { + "endpoints" : { + "aws" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "waf-fips.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "aws-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "waf-fips.amazonaws.com" + }, + "aws-global" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "waf.amazonaws.com", + "variants" : [ { + "hostname" : "waf-fips.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "aws-global-fips" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "waf-fips.amazonaws.com" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-global" + }, + "waf-regional" : { + "endpoints" : { + "af-south-1" : { + "credentialScope" : { + "region" : "af-south-1" + }, + "hostname" : "waf-regional.af-south-1.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.af-south-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-east-1" : { + "credentialScope" : { + "region" : "ap-east-1" + }, + "hostname" : "waf-regional.ap-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.ap-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "hostname" : "waf-regional.ap-northeast-1.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.ap-northeast-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-northeast-2" : { + "credentialScope" : { + "region" : "ap-northeast-2" + }, + "hostname" : "waf-regional.ap-northeast-2.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.ap-northeast-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-northeast-3" : { + "credentialScope" : { + "region" : "ap-northeast-3" + }, + "hostname" : "waf-regional.ap-northeast-3.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.ap-northeast-3.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-south-1" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "hostname" : "waf-regional.ap-south-1.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.ap-south-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-south-2" : { + "credentialScope" : { + "region" : "ap-south-2" + }, + "hostname" : "waf-regional.ap-south-2.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.ap-south-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-southeast-1" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "hostname" : "waf-regional.ap-southeast-1.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.ap-southeast-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "hostname" : "waf-regional.ap-southeast-2.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.ap-southeast-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-southeast-3" : { + "credentialScope" : { + "region" : "ap-southeast-3" + }, + "hostname" : "waf-regional.ap-southeast-3.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.ap-southeast-3.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-southeast-4" : { + "credentialScope" : { + "region" : "ap-southeast-4" + }, + "hostname" : "waf-regional.ap-southeast-4.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.ap-southeast-4.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "hostname" : "waf-regional.ca-central-1.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "hostname" : "waf-regional.eu-central-1.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.eu-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-2" : { + "credentialScope" : { + "region" : "eu-central-2" + }, + "hostname" : "waf-regional.eu-central-2.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.eu-central-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-north-1" : { + "credentialScope" : { + "region" : "eu-north-1" + }, + "hostname" : "waf-regional.eu-north-1.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.eu-north-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-south-1" : { + "credentialScope" : { + "region" : "eu-south-1" + }, + "hostname" : "waf-regional.eu-south-1.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.eu-south-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-south-2" : { + "credentialScope" : { + "region" : "eu-south-2" + }, + "hostname" : "waf-regional.eu-south-2.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.eu-south-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "hostname" : "waf-regional.eu-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.eu-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-west-2" : { + "credentialScope" : { + "region" : "eu-west-2" + }, + "hostname" : "waf-regional.eu-west-2.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.eu-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-west-3" : { + "credentialScope" : { + "region" : "eu-west-3" + }, + "hostname" : "waf-regional.eu-west-3.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.eu-west-3.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "fips-af-south-1" : { + "credentialScope" : { + "region" : "af-south-1" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.af-south-1.amazonaws.com" + }, + "fips-ap-east-1" : { + "credentialScope" : { + "region" : "ap-east-1" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.ap-east-1.amazonaws.com" + }, + "fips-ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.ap-northeast-1.amazonaws.com" + }, + "fips-ap-northeast-2" : { + "credentialScope" : { + "region" : "ap-northeast-2" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.ap-northeast-2.amazonaws.com" + }, + "fips-ap-northeast-3" : { + "credentialScope" : { + "region" : "ap-northeast-3" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.ap-northeast-3.amazonaws.com" + }, + "fips-ap-south-1" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.ap-south-1.amazonaws.com" + }, + "fips-ap-south-2" : { + "credentialScope" : { + "region" : "ap-south-2" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.ap-south-2.amazonaws.com" + }, + "fips-ap-southeast-1" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.ap-southeast-1.amazonaws.com" + }, + "fips-ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.ap-southeast-2.amazonaws.com" + }, + "fips-ap-southeast-3" : { + "credentialScope" : { + "region" : "ap-southeast-3" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.ap-southeast-3.amazonaws.com" + }, + "fips-ap-southeast-4" : { + "credentialScope" : { + "region" : "ap-southeast-4" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.ap-southeast-4.amazonaws.com" + }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.ca-central-1.amazonaws.com" + }, + "fips-eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.eu-central-1.amazonaws.com" + }, + "fips-eu-central-2" : { + "credentialScope" : { + "region" : "eu-central-2" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.eu-central-2.amazonaws.com" + }, + "fips-eu-north-1" : { + "credentialScope" : { + "region" : "eu-north-1" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.eu-north-1.amazonaws.com" + }, + "fips-eu-south-1" : { + "credentialScope" : { + "region" : "eu-south-1" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.eu-south-1.amazonaws.com" + }, + "fips-eu-south-2" : { + "credentialScope" : { + "region" : "eu-south-2" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.eu-south-2.amazonaws.com" + }, + "fips-eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.eu-west-1.amazonaws.com" + }, + "fips-eu-west-2" : { + "credentialScope" : { + "region" : "eu-west-2" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.eu-west-2.amazonaws.com" + }, + "fips-eu-west-3" : { + "credentialScope" : { + "region" : "eu-west-3" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.eu-west-3.amazonaws.com" + }, + "fips-il-central-1" : { + "credentialScope" : { + "region" : "il-central-1" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.il-central-1.amazonaws.com" + }, + "fips-me-central-1" : { + "credentialScope" : { + "region" : "me-central-1" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.me-central-1.amazonaws.com" + }, + "fips-me-south-1" : { + "credentialScope" : { + "region" : "me-south-1" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.me-south-1.amazonaws.com" + }, + "fips-sa-east-1" : { + "credentialScope" : { + "region" : "sa-east-1" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.sa-east-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { + "credentialScope" : { + "region" : "il-central-1" + }, + "hostname" : "waf-regional.il-central-1.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.il-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "me-central-1" : { + "credentialScope" : { + "region" : "me-central-1" + }, + "hostname" : "waf-regional.me-central-1.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.me-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "me-south-1" : { + "credentialScope" : { + "region" : "me-south-1" + }, + "hostname" : "waf-regional.me-south-1.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.me-south-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "sa-east-1" : { + "credentialScope" : { + "region" : "sa-east-1" + }, + "hostname" : "waf-regional.sa-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.sa-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "waf-regional.us-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "hostname" : "waf-regional.us-east-2.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "hostname" : "waf-regional.us-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "waf-regional.us-west-2.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "wafv2" : { + "endpoints" : { + "af-south-1" : { + "credentialScope" : { + "region" : "af-south-1" + }, + "hostname" : "wafv2.af-south-1.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.af-south-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-east-1" : { + "credentialScope" : { + "region" : "ap-east-1" + }, + "hostname" : "wafv2.ap-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.ap-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "hostname" : "wafv2.ap-northeast-1.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.ap-northeast-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-northeast-2" : { + "credentialScope" : { + "region" : "ap-northeast-2" + }, + "hostname" : "wafv2.ap-northeast-2.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.ap-northeast-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-northeast-3" : { + "credentialScope" : { + "region" : "ap-northeast-3" + }, + "hostname" : "wafv2.ap-northeast-3.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.ap-northeast-3.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-south-1" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "hostname" : "wafv2.ap-south-1.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.ap-south-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-south-2" : { + "credentialScope" : { + "region" : "ap-south-2" + }, + "hostname" : "wafv2.ap-south-2.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.ap-south-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-southeast-1" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "hostname" : "wafv2.ap-southeast-1.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.ap-southeast-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "hostname" : "wafv2.ap-southeast-2.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.ap-southeast-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-southeast-3" : { + "credentialScope" : { + "region" : "ap-southeast-3" + }, + "hostname" : "wafv2.ap-southeast-3.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.ap-southeast-3.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ap-southeast-4" : { + "credentialScope" : { + "region" : "ap-southeast-4" + }, + "hostname" : "wafv2.ap-southeast-4.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.ap-southeast-4.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "hostname" : "wafv2.ca-central-1.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.ca-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "hostname" : "wafv2.ca-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.ca-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "hostname" : "wafv2.eu-central-1.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.eu-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-central-2" : { + "credentialScope" : { + "region" : "eu-central-2" + }, + "hostname" : "wafv2.eu-central-2.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.eu-central-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-north-1" : { + "credentialScope" : { + "region" : "eu-north-1" + }, + "hostname" : "wafv2.eu-north-1.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.eu-north-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-south-1" : { + "credentialScope" : { + "region" : "eu-south-1" + }, + "hostname" : "wafv2.eu-south-1.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.eu-south-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-south-2" : { + "credentialScope" : { + "region" : "eu-south-2" + }, + "hostname" : "wafv2.eu-south-2.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.eu-south-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "hostname" : "wafv2.eu-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.eu-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-west-2" : { + "credentialScope" : { + "region" : "eu-west-2" + }, + "hostname" : "wafv2.eu-west-2.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.eu-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "eu-west-3" : { + "credentialScope" : { + "region" : "eu-west-3" + }, + "hostname" : "wafv2.eu-west-3.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.eu-west-3.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "fips-af-south-1" : { + "credentialScope" : { + "region" : "af-south-1" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.af-south-1.amazonaws.com" + }, + "fips-ap-east-1" : { + "credentialScope" : { + "region" : "ap-east-1" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.ap-east-1.amazonaws.com" + }, + "fips-ap-northeast-1" : { + "credentialScope" : { + "region" : "ap-northeast-1" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.ap-northeast-1.amazonaws.com" + }, + "fips-ap-northeast-2" : { + "credentialScope" : { + "region" : "ap-northeast-2" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.ap-northeast-2.amazonaws.com" + }, + "fips-ap-northeast-3" : { + "credentialScope" : { + "region" : "ap-northeast-3" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.ap-northeast-3.amazonaws.com" + }, + "fips-ap-south-1" : { + "credentialScope" : { + "region" : "ap-south-1" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.ap-south-1.amazonaws.com" + }, + "fips-ap-south-2" : { + "credentialScope" : { + "region" : "ap-south-2" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.ap-south-2.amazonaws.com" + }, + "fips-ap-southeast-1" : { + "credentialScope" : { + "region" : "ap-southeast-1" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.ap-southeast-1.amazonaws.com" + }, + "fips-ap-southeast-2" : { + "credentialScope" : { + "region" : "ap-southeast-2" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.ap-southeast-2.amazonaws.com" + }, + "fips-ap-southeast-3" : { + "credentialScope" : { + "region" : "ap-southeast-3" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.ap-southeast-3.amazonaws.com" + }, + "fips-ap-southeast-4" : { + "credentialScope" : { + "region" : "ap-southeast-4" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.ap-southeast-4.amazonaws.com" + }, + "fips-ca-central-1" : { + "credentialScope" : { + "region" : "ca-central-1" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.ca-central-1.amazonaws.com" + }, + "fips-ca-west-1" : { + "credentialScope" : { + "region" : "ca-west-1" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.ca-west-1.amazonaws.com" + }, + "fips-eu-central-1" : { + "credentialScope" : { + "region" : "eu-central-1" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.eu-central-1.amazonaws.com" + }, + "fips-eu-central-2" : { + "credentialScope" : { + "region" : "eu-central-2" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.eu-central-2.amazonaws.com" + }, + "fips-eu-north-1" : { + "credentialScope" : { + "region" : "eu-north-1" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.eu-north-1.amazonaws.com" + }, + "fips-eu-south-1" : { + "credentialScope" : { + "region" : "eu-south-1" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.eu-south-1.amazonaws.com" + }, + "fips-eu-south-2" : { + "credentialScope" : { + "region" : "eu-south-2" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.eu-south-2.amazonaws.com" + }, + "fips-eu-west-1" : { + "credentialScope" : { + "region" : "eu-west-1" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.eu-west-1.amazonaws.com" + }, + "fips-eu-west-2" : { + "credentialScope" : { + "region" : "eu-west-2" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.eu-west-2.amazonaws.com" + }, + "fips-eu-west-3" : { + "credentialScope" : { + "region" : "eu-west-3" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.eu-west-3.amazonaws.com" + }, + "fips-il-central-1" : { + "credentialScope" : { + "region" : "il-central-1" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.il-central-1.amazonaws.com" + }, + "fips-me-central-1" : { + "credentialScope" : { + "region" : "me-central-1" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.me-central-1.amazonaws.com" + }, + "fips-me-south-1" : { + "credentialScope" : { + "region" : "me-south-1" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.me-south-1.amazonaws.com" + }, + "fips-sa-east-1" : { + "credentialScope" : { + "region" : "sa-east-1" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.sa-east-1.amazonaws.com" + }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { + "credentialScope" : { + "region" : "il-central-1" + }, + "hostname" : "wafv2.il-central-1.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.il-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "me-central-1" : { + "credentialScope" : { + "region" : "me-central-1" + }, + "hostname" : "wafv2.me-central-1.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.me-central-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "me-south-1" : { + "credentialScope" : { + "region" : "me-south-1" + }, + "hostname" : "wafv2.me-south-1.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.me-south-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "sa-east-1" : { + "credentialScope" : { + "region" : "sa-east-1" + }, + "hostname" : "wafv2.sa-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.sa-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "hostname" : "wafv2.us-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "hostname" : "wafv2.us-east-2.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "hostname" : "wafv2.us-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "hostname" : "wafv2.us-west-2.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "wellarchitected" : { + "endpoints" : { + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-north-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { }, + "us-east-2" : { }, + "us-west-1" : { }, + "us-west-2" : { } + } + }, + "wisdom" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-2" : { }, + "fips-us-east-1" : { + "deprecated" : true + }, + "fips-us-west-2" : { + "deprecated" : true + }, + "ui-ap-northeast-1" : { }, + "ui-ap-northeast-2" : { }, + "ui-ap-southeast-1" : { }, + "ui-ap-southeast-2" : { }, + "ui-ca-central-1" : { }, + "ui-eu-central-1" : { }, + "ui-eu-west-2" : { }, + "ui-us-east-1" : { }, + "ui-us-west-2" : { }, + "us-east-1" : { + "variants" : [ { + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "tags" : [ "fips" ] + } ] + } + } + }, + "workdocs" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "eu-west-1" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "workdocs-fips.us-east-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "workdocs-fips.us-west-2.amazonaws.com" + }, + "us-east-1" : { + "variants" : [ { + "hostname" : "workdocs-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "workdocs-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "workmail" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "eu-west-1" : { }, + "us-east-1" : { }, + "us-west-2" : { } + } + }, + "workspaces" : { + "endpoints" : { + "af-south-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "workspaces-fips.us-east-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "workspaces-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "workspaces-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "workspaces-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "workspaces-web" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-south-1" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ca-central-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "us-east-1" : { }, + "us-west-2" : { } + } + }, + "xray" : { + "endpoints" : { + "af-south-1" : { }, + "ap-east-1" : { }, + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-northeast-3" : { }, + "ap-south-1" : { }, + "ap-south-2" : { }, + "ap-southeast-1" : { }, + "ap-southeast-2" : { }, + "ap-southeast-3" : { }, + "ap-southeast-4" : { }, + "ca-central-1" : { }, + "ca-west-1" : { }, + "eu-central-1" : { }, + "eu-central-2" : { }, + "eu-north-1" : { }, + "eu-south-1" : { }, + "eu-south-2" : { }, + "eu-west-1" : { }, + "eu-west-2" : { }, + "eu-west-3" : { }, + "fips-us-east-1" : { + "credentialScope" : { + "region" : "us-east-1" + }, + "deprecated" : true, + "hostname" : "xray-fips.us-east-1.amazonaws.com" + }, + "fips-us-east-2" : { + "credentialScope" : { + "region" : "us-east-2" + }, + "deprecated" : true, + "hostname" : "xray-fips.us-east-2.amazonaws.com" + }, + "fips-us-west-1" : { + "credentialScope" : { + "region" : "us-west-1" + }, + "deprecated" : true, + "hostname" : "xray-fips.us-west-1.amazonaws.com" + }, + "fips-us-west-2" : { + "credentialScope" : { + "region" : "us-west-2" + }, + "deprecated" : true, + "hostname" : "xray-fips.us-west-2.amazonaws.com" + }, + "il-central-1" : { }, + "me-central-1" : { }, + "me-south-1" : { }, + "sa-east-1" : { }, + "us-east-1" : { + "variants" : [ { + "hostname" : "xray-fips.us-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-east-2" : { + "variants" : [ { + "hostname" : "xray-fips.us-east-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-1" : { + "variants" : [ { + "hostname" : "xray-fips.us-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-west-2" : { + "variants" : [ { + "hostname" : "xray-fips.us-west-2.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + } + } + }, { + "defaults" : { + "hostname" : "{service}.{region}.{dnsSuffix}", + "protocols" : [ "https" ], + "signatureVersions" : [ "v4" ], + "variants" : [ { + "dnsSuffix" : "amazonaws.com.cn", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + }, { + "dnsSuffix" : "api.amazonwebservices.com.cn", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "dualstack", "fips" ] + }, { + "dnsSuffix" : "api.amazonwebservices.com.cn", + "hostname" : "{service}.{region}.{dnsSuffix}", + "tags" : [ "dualstack" ] + } ] + }, + "dnsSuffix" : "amazonaws.com.cn", + "partition" : "aws-cn", + "partitionName" : "AWS China", + "regionRegex" : "^cn\\-\\w+\\-\\d+$", + "regions" : { + "cn-north-1" : { + "description" : "China (Beijing)" + }, + "cn-northwest-1" : { + "description" : "China (Ningxia)" + } + }, + "services" : { + "access-analyzer" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "account" : { + "endpoints" : { + "aws-cn-global" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "hostname" : "account.cn-northwest-1.amazonaws.com.cn" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-cn-global" + }, + "acm" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "acm-pca" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "airflow" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "api.ecr" : { + "endpoints" : { + "cn-north-1" : { + "credentialScope" : { + "region" : "cn-north-1" + }, + "hostname" : "api.ecr.cn-north-1.amazonaws.com.cn" + }, + "cn-northwest-1" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "hostname" : "api.ecr.cn-northwest-1.amazonaws.com.cn" + } + } + }, + "api.pricing" : { + "defaults" : { + "credentialScope" : { + "service" : "pricing" + } + }, + "endpoints" : { + "cn-northwest-1" : { } + } + }, + "api.sagemaker" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "api.tunneling.iot" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "apigateway" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "appconfig" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "appconfigdata" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "application-autoscaling" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "applicationinsights" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "appmesh" : { + "endpoints" : { + "cn-north-1" : { + "variants" : [ { + "hostname" : "appmesh.cn-north-1.api.amazonwebservices.com.cn", + "tags" : [ "dualstack" ] + } ] + }, + "cn-northwest-1" : { + "variants" : [ { + "hostname" : "appmesh.cn-northwest-1.api.amazonwebservices.com.cn", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "appsync" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "arc-zonal-shift" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "athena" : { + "endpoints" : { + "cn-north-1" : { + "variants" : [ { + "hostname" : "athena.cn-north-1.api.amazonwebservices.com.cn", + "tags" : [ "dualstack" ] + } ] + }, + "cn-northwest-1" : { + "variants" : [ { + "hostname" : "athena.cn-northwest-1.api.amazonwebservices.com.cn", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "autoscaling" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "autoscaling-plans" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "backup" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "batch" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "budgets" : { + "endpoints" : { + "aws-cn-global" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "hostname" : "budgets.amazonaws.com.cn" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-cn-global" + }, + "cassandra" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "ce" : { + "endpoints" : { + "aws-cn-global" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "hostname" : "ce.cn-northwest-1.amazonaws.com.cn" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-cn-global" + }, + "cloudcontrolapi" : { + "endpoints" : { + "cn-north-1" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.cn-north-1.api.amazonwebservices.com.cn", + "tags" : [ "dualstack" ] + } ] + }, + "cn-northwest-1" : { + "variants" : [ { + "hostname" : "cloudcontrolapi.cn-northwest-1.api.amazonwebservices.com.cn", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "cloudformation" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "cloudfront" : { + "endpoints" : { + "aws-cn-global" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "hostname" : "cloudfront.cn-northwest-1.amazonaws.com.cn", + "protocols" : [ "http", "https" ] + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-cn-global" + }, + "cloudtrail" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "codebuild" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "codecommit" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "codedeploy" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "codepipeline" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "cognito-identity" : { + "endpoints" : { + "cn-north-1" : { } + } + }, + "compute-optimizer" : { + "endpoints" : { + "cn-north-1" : { + "credentialScope" : { + "region" : "cn-north-1" + }, + "hostname" : "compute-optimizer.cn-north-1.amazonaws.com.cn" + }, + "cn-northwest-1" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "hostname" : "compute-optimizer.cn-northwest-1.amazonaws.com.cn" + } + } + }, + "config" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "cur" : { + "endpoints" : { + "cn-northwest-1" : { } + } + }, + "data-ats.iot" : { + "defaults" : { + "credentialScope" : { + "service" : "iotdata" + }, + "protocols" : [ "https" ] + }, + "endpoints" : { + "cn-north-1" : { + "hostname" : "data.ats.iot.cn-north-1.amazonaws.com.cn", + "protocols" : [ "https" ] + }, + "cn-northwest-1" : { } + } + }, + "data.iot" : { + "defaults" : { + "credentialScope" : { + "service" : "iotdata" + }, + "protocols" : [ "https" ] + }, + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "data.jobs.iot" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "databrew" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "datasync" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "datazone" : { + "defaults" : { + "dnsSuffix" : "api.amazonwebservices.com.cn", + "variants" : [ { + "dnsSuffix" : "api.amazonwebservices.com.cn", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "cn-north-1" : { + "hostname" : "datazone.cn-north-1.api.amazonwebservices.com.cn" + }, + "cn-northwest-1" : { + "hostname" : "datazone.cn-northwest-1.api.amazonwebservices.com.cn" + } + } + }, + "dax" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "directconnect" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "dlm" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "dms" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "docdb" : { + "endpoints" : { + "cn-northwest-1" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "hostname" : "rds.cn-northwest-1.amazonaws.com.cn" + } + } + }, + "ds" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "dynamodb" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "ebs" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "ec2" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "ecs" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "eks" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "eks-auth" : { + "defaults" : { + "dnsSuffix" : "api.amazonwebservices.com.cn", + "variants" : [ { + "dnsSuffix" : "api.amazonwebservices.com.cn", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "cn-north-1" : { + "hostname" : "eks-auth.cn-north-1.api.amazonwebservices.com.cn" + }, + "cn-northwest-1" : { + "hostname" : "eks-auth.cn-northwest-1.api.amazonwebservices.com.cn" + } + } + }, + "elasticache" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "elasticbeanstalk" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "elasticfilesystem" : { + "endpoints" : { + "cn-north-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.cn-north-1.amazonaws.com.cn", + "tags" : [ "fips" ] + } ] + }, + "cn-northwest-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.cn-northwest-1.amazonaws.com.cn", + "tags" : [ "fips" ] + } ] + }, + "fips-cn-north-1" : { + "credentialScope" : { + "region" : "cn-north-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.cn-north-1.amazonaws.com.cn" + }, + "fips-cn-northwest-1" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.cn-northwest-1.amazonaws.com.cn" + } + } + }, + "elasticloadbalancing" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "elasticmapreduce" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "cn-north-1" : { + "variants" : [ { + "hostname" : "elasticmapreduce.cn-north-1.api.amazonwebservices.com.cn", + "tags" : [ "dualstack" ] + } ] + }, + "cn-northwest-1" : { + "variants" : [ { + "hostname" : "elasticmapreduce.cn-northwest-1.api.amazonwebservices.com.cn", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "emr-containers" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "emr-serverless" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "entitlement.marketplace" : { + "endpoints" : { + "cn-northwest-1" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "hostname" : "entitlement-marketplace.cn-northwest-1.amazonaws.com.cn", + "protocols" : [ "https" ] + } + } + }, + "es" : { + "endpoints" : { + "cn-north-1" : { + "variants" : [ { + "hostname" : "aos.cn-north-1.api.amazonwebservices.com.cn", + "tags" : [ "dualstack" ] + } ] + }, + "cn-northwest-1" : { + "variants" : [ { + "hostname" : "aos.cn-northwest-1.api.amazonwebservices.com.cn", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "events" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "firehose" : { + "endpoints" : { + "cn-north-1" : { + "variants" : [ { + "hostname" : "firehose.cn-north-1.api.amazonwebservices.com.cn", + "tags" : [ "dualstack" ] + } ] + }, + "cn-northwest-1" : { + "variants" : [ { + "hostname" : "firehose.cn-northwest-1.api.amazonwebservices.com.cn", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "fms" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "fsx" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "gamelift" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "glacier" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "glue" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "greengrass" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "cn-north-1" : { } + }, + "isRegionalized" : true + }, + "guardduty" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + }, + "isRegionalized" : true + }, + "health" : { + "defaults" : { + "protocols" : [ "https" ], + "sslCommonName" : "health.cn-northwest-1.amazonaws.com.cn" + }, + "endpoints" : { + "aws-cn-global" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "hostname" : "global.health.amazonaws.com.cn" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-cn-global" + }, + "iam" : { + "endpoints" : { + "aws-cn-global" : { + "credentialScope" : { + "region" : "cn-north-1" + }, + "hostname" : "iam.cn-north-1.amazonaws.com.cn" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-cn-global" + }, + "identitystore" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "inspector2" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "internetmonitor" : { + "defaults" : { + "dnsSuffix" : "api.amazonwebservices.com.cn", + "variants" : [ { + "dnsSuffix" : "api.amazonwebservices.com.cn", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "cn-north-1" : { + "hostname" : "internetmonitor.cn-north-1.api.amazonwebservices.com.cn" + }, + "cn-northwest-1" : { + "hostname" : "internetmonitor.cn-northwest-1.api.amazonwebservices.com.cn" + } + } + }, + "iot" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "iotanalytics" : { + "endpoints" : { + "cn-north-1" : { } + } + }, + "iotevents" : { + "endpoints" : { + "cn-north-1" : { } + } + }, + "ioteventsdata" : { + "endpoints" : { + "cn-north-1" : { + "credentialScope" : { + "region" : "cn-north-1" + }, + "hostname" : "data.iotevents.cn-north-1.amazonaws.com.cn" + } + } + }, + "iotsecuredtunneling" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "iotsitewise" : { + "endpoints" : { + "cn-north-1" : { } + } + }, + "iottwinmaker" : { + "endpoints" : { + "api-cn-north-1" : { + "credentialScope" : { + "region" : "cn-north-1" + }, + "hostname" : "api.iottwinmaker.cn-north-1.amazonaws.com.cn" + }, + "cn-north-1" : { }, + "data-cn-north-1" : { + "credentialScope" : { + "region" : "cn-north-1" + }, + "hostname" : "data.iottwinmaker.cn-north-1.amazonaws.com.cn" + } + } + }, + "kafka" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "kendra-ranking" : { + "defaults" : { + "dnsSuffix" : "api.amazonwebservices.com.cn", + "variants" : [ { + "dnsSuffix" : "api.amazonwebservices.com.cn", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "cn-north-1" : { + "hostname" : "kendra-ranking.cn-north-1.api.amazonwebservices.com.cn" + }, + "cn-northwest-1" : { + "hostname" : "kendra-ranking.cn-northwest-1.api.amazonwebservices.com.cn" + } + } + }, + "kinesis" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "kinesisanalytics" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "kinesisvideo" : { + "endpoints" : { + "cn-north-1" : { } + } + }, + "kms" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "lakeformation" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "lambda" : { + "endpoints" : { + "cn-north-1" : { + "variants" : [ { + "hostname" : "lambda.cn-north-1.api.amazonwebservices.com.cn", + "tags" : [ "dualstack" ] + } ] + }, + "cn-northwest-1" : { + "variants" : [ { + "hostname" : "lambda.cn-northwest-1.api.amazonwebservices.com.cn", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "license-manager" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "license-manager-linux-subscriptions" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "logs" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "mediaconvert" : { + "endpoints" : { + "cn-northwest-1" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "hostname" : "mediaconvert.cn-northwest-1.amazonaws.com.cn" + } + } + }, + "memory-db" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "metrics.sagemaker" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "monitoring" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "mq" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "neptune" : { + "endpoints" : { + "cn-north-1" : { + "credentialScope" : { + "region" : "cn-north-1" + }, + "hostname" : "rds.cn-north-1.amazonaws.com.cn" + }, + "cn-northwest-1" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "hostname" : "rds.cn-northwest-1.amazonaws.com.cn" + } + } + }, + "network-firewall" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "oam" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "oidc" : { + "endpoints" : { + "cn-north-1" : { + "credentialScope" : { + "region" : "cn-north-1" + }, + "hostname" : "oidc.cn-north-1.amazonaws.com.cn" + }, + "cn-northwest-1" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "hostname" : "oidc.cn-northwest-1.amazonaws.com.cn" + } + } + }, + "organizations" : { + "endpoints" : { + "aws-cn-global" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "hostname" : "organizations.cn-northwest-1.amazonaws.com.cn" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-cn-global" + }, + "personalize" : { + "endpoints" : { + "cn-north-1" : { } + } + }, + "pi" : { + "endpoints" : { + "cn-north-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.cn-north-1.api.amazonwebservices.com.cn", + "tags" : [ "dualstack" ] + } ] + }, + "cn-northwest-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi.cn-northwest-1.api.amazonwebservices.com.cn", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "pipes" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "polly" : { + "endpoints" : { + "cn-northwest-1" : { } + } + }, + "portal.sso" : { + "endpoints" : { + "cn-north-1" : { + "credentialScope" : { + "region" : "cn-north-1" + }, + "hostname" : "portal.sso.cn-north-1.amazonaws.com.cn" + }, + "cn-northwest-1" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "hostname" : "portal.sso.cn-northwest-1.amazonaws.com.cn" + } + } + }, + "qbusiness" : { + "defaults" : { + "dnsSuffix" : "api.amazonwebservices.com.cn", + "variants" : [ { + "dnsSuffix" : "api.amazonwebservices.com.cn", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "cn-north-1" : { + "hostname" : "qbusiness.cn-north-1.api.amazonwebservices.com.cn" + }, + "cn-northwest-1" : { + "hostname" : "qbusiness.cn-northwest-1.api.amazonwebservices.com.cn" + } + } + }, + "quicksight" : { + "endpoints" : { + "cn-north-1" : { } + } + }, + "ram" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "rbin" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "rds" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "redshift" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "redshift-serverless" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "resource-groups" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "rolesanywhere" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "route53" : { + "endpoints" : { + "aws-cn-global" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "hostname" : "route53.amazonaws.com.cn" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-cn-global" + }, + "route53resolver" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "runtime.sagemaker" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "s3" : { + "defaults" : { + "protocols" : [ "http", "https" ], + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "dnsSuffix" : "amazonaws.com.cn", + "hostname" : "{service}.dualstack.{region}.{dnsSuffix}", + "tags" : [ "dualstack" ] + } ] + }, + "endpoints" : { + "cn-north-1" : { + "variants" : [ { + "hostname" : "s3.dualstack.cn-north-1.amazonaws.com.cn", + "tags" : [ "dualstack" ] + } ] + }, + "cn-northwest-1" : { + "variants" : [ { + "hostname" : "s3.dualstack.cn-northwest-1.amazonaws.com.cn", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "s3-control" : { + "defaults" : { + "protocols" : [ "https" ], + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "dnsSuffix" : "amazonaws.com.cn", + "hostname" : "{service}.dualstack.{region}.{dnsSuffix}", + "tags" : [ "dualstack" ] + } ] + }, + "endpoints" : { + "cn-north-1" : { + "credentialScope" : { + "region" : "cn-north-1" + }, + "hostname" : "s3-control.cn-north-1.amazonaws.com.cn", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.cn-north-1.amazonaws.com.cn", + "tags" : [ "dualstack" ] + } ] + }, + "cn-northwest-1" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "hostname" : "s3-control.cn-northwest-1.amazonaws.com.cn", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control.dualstack.cn-northwest-1.amazonaws.com.cn", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "savingsplans" : { + "endpoints" : { + "cn-north-1" : { + "credentialScope" : { + "region" : "cn-north-1" + }, + "hostname" : "savingsplans.cn-north-1.amazonaws.com.cn" + }, + "cn-northwest-1" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "hostname" : "savingsplans.cn-northwest-1.amazonaws.com.cn" + } + }, + "isRegionalized" : true + }, + "schemas" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "secretsmanager" : { + "endpoints" : { + "cn-north-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + }, + "cn-northwest-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + } ] + } + } + }, + "securityhub" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "serverlessrepo" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "cn-north-1" : { + "protocols" : [ "https" ] + }, + "cn-northwest-1" : { + "protocols" : [ "https" ] + } + } + }, + "servicecatalog" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "servicediscovery" : { + "endpoints" : { + "cn-north-1" : { + "variants" : [ { + "hostname" : "servicediscovery.cn-north-1.api.amazonwebservices.com.cn", + "tags" : [ "dualstack" ] + } ] + }, + "cn-northwest-1" : { + "variants" : [ { + "hostname" : "servicediscovery.cn-northwest-1.api.amazonwebservices.com.cn", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "servicequotas" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "signer" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { }, + "verification-cn-north-1" : { + "credentialScope" : { + "region" : "cn-north-1" + }, + "hostname" : "verification.signer.cn-north-1.amazonaws.com.cn" + }, + "verification-cn-northwest-1" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "hostname" : "verification.signer.cn-northwest-1.amazonaws.com.cn" + } + } + }, + "sms" : { + "endpoints" : { + "cn-north-1" : { } + } + }, + "snowball" : { + "endpoints" : { + "cn-north-1" : { + "variants" : [ { + "hostname" : "snowball-fips.cn-north-1.amazonaws.com.cn", + "tags" : [ "fips" ] + } ] + }, + "cn-northwest-1" : { + "variants" : [ { + "hostname" : "snowball-fips.cn-northwest-1.amazonaws.com.cn", + "tags" : [ "fips" ] + } ] + }, + "fips-cn-north-1" : { + "credentialScope" : { + "region" : "cn-north-1" + }, + "deprecated" : true, + "hostname" : "snowball-fips.cn-north-1.amazonaws.com.cn" + }, + "fips-cn-northwest-1" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "deprecated" : true, + "hostname" : "snowball-fips.cn-northwest-1.amazonaws.com.cn" + } + } + }, + "sns" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "sqs" : { + "defaults" : { + "protocols" : [ "http", "https" ], + "sslCommonName" : "{region}.queue.{dnsSuffix}" + }, + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "ssm" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "sso" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "states" : { + "endpoints" : { + "cn-north-1" : { + "variants" : [ { + "hostname" : "states.cn-north-1.api.amazonwebservices.com.cn", + "tags" : [ "dualstack" ] + } ] + }, + "cn-northwest-1" : { + "variants" : [ { + "hostname" : "states.cn-northwest-1.api.amazonwebservices.com.cn", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "storagegateway" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "streams.dynamodb" : { + "defaults" : { + "credentialScope" : { + "service" : "dynamodb" + }, + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "sts" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "support" : { + "endpoints" : { + "aws-cn-global" : { + "credentialScope" : { + "region" : "cn-north-1" + }, + "hostname" : "support.cn-north-1.amazonaws.com.cn" + } + }, + "partitionEndpoint" : "aws-cn-global" + }, + "swf" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "synthetics" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "tagging" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "transcribe" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "cn-north-1" : { + "credentialScope" : { + "region" : "cn-north-1" + }, + "hostname" : "cn.transcribe.cn-north-1.amazonaws.com.cn" + }, + "cn-northwest-1" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "hostname" : "cn.transcribe.cn-northwest-1.amazonaws.com.cn" + } + } + }, + "transcribestreaming" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "transfer" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + }, + "waf-regional" : { + "endpoints" : { + "cn-north-1" : { + "credentialScope" : { + "region" : "cn-north-1" + }, + "hostname" : "waf-regional.cn-north-1.amazonaws.com.cn", + "variants" : [ { + "hostname" : "waf-regional-fips.cn-north-1.amazonaws.com.cn", + "tags" : [ "fips" ] + } ] + }, + "cn-northwest-1" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "hostname" : "waf-regional.cn-northwest-1.amazonaws.com.cn", + "variants" : [ { + "hostname" : "waf-regional-fips.cn-northwest-1.amazonaws.com.cn", + "tags" : [ "fips" ] + } ] + }, + "fips-cn-north-1" : { + "credentialScope" : { + "region" : "cn-north-1" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.cn-north-1.amazonaws.com.cn" + }, + "fips-cn-northwest-1" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.cn-northwest-1.amazonaws.com.cn" + } + } + }, + "wafv2" : { + "endpoints" : { + "cn-north-1" : { + "credentialScope" : { + "region" : "cn-north-1" + }, + "hostname" : "wafv2.cn-north-1.amazonaws.com.cn", + "variants" : [ { + "hostname" : "wafv2-fips.cn-north-1.amazonaws.com.cn", + "tags" : [ "fips" ] + } ] + }, + "cn-northwest-1" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "hostname" : "wafv2.cn-northwest-1.amazonaws.com.cn", + "variants" : [ { + "hostname" : "wafv2-fips.cn-northwest-1.amazonaws.com.cn", + "tags" : [ "fips" ] + } ] + }, + "fips-cn-north-1" : { + "credentialScope" : { + "region" : "cn-north-1" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.cn-north-1.amazonaws.com.cn" + }, + "fips-cn-northwest-1" : { + "credentialScope" : { + "region" : "cn-northwest-1" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.cn-northwest-1.amazonaws.com.cn" + } + } + }, + "workspaces" : { + "endpoints" : { + "cn-northwest-1" : { } + } + }, + "xray" : { + "endpoints" : { + "cn-north-1" : { }, + "cn-northwest-1" : { } + } + } + } + }, { + "defaults" : { + "hostname" : "{service}.{region}.{dnsSuffix}", + "protocols" : [ "https" ], + "signatureVersions" : [ "v4" ], + "variants" : [ { + "dnsSuffix" : "amazonaws.com", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + }, { + "dnsSuffix" : "api.aws", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "dualstack", "fips" ] + }, { + "dnsSuffix" : "api.aws", + "hostname" : "{service}.{region}.{dnsSuffix}", + "tags" : [ "dualstack" ] + } ] + }, + "dnsSuffix" : "amazonaws.com", + "partition" : "aws-us-gov", + "partitionName" : "AWS GovCloud (US)", + "regionRegex" : "^us\\-gov\\-\\w+\\-\\d+$", + "regions" : { + "us-gov-east-1" : { + "description" : "AWS GovCloud (US-East)" + }, + "us-gov-west-1" : { + "description" : "AWS GovCloud (US-West)" + } + }, + "services" : { + "access-analyzer" : { + "endpoints" : { + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "access-analyzer.us-gov-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "access-analyzer.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "access-analyzer.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "access-analyzer.us-gov-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "access-analyzer.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "access-analyzer.us-gov-west-1.amazonaws.com" + } + } + }, + "acm" : { + "defaults" : { + "variants" : [ { + "hostname" : "acm.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "acm.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "acm.us-gov-west-1.amazonaws.com" + } + } + }, + "acm-pca" : { + "defaults" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "acm-pca.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "acm-pca.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "acm-pca.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "acm-pca.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "acm-pca.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "api.detective" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "api.detective-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "api.detective-fips.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "api.detective-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "api.detective-fips.us-gov-west-1.amazonaws.com" + } + } + }, + "api.ecr" : { + "defaults" : { + "variants" : [ { + "hostname" : "ecr-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "dkr-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "ecr-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "dkr-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "ecr-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "fips-dkr-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "ecr-fips.us-gov-east-1.amazonaws.com" + }, + "fips-dkr-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "ecr-fips.us-gov-west-1.amazonaws.com" + }, + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "ecr-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "ecr-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "api.ecr.us-gov-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "ecr-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "api.ecr.us-gov-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "ecr-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "api.sagemaker" : { + "defaults" : { + "variants" : [ { + "hostname" : "api-fips.sagemaker.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "us-gov-east-1" : { }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "api-fips.sagemaker.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "api-fips.sagemaker.us-gov-west-1.amazonaws.com" + }, + "us-gov-west-1-fips-secondary" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "api.sagemaker.us-gov-west-1.amazonaws.com" + }, + "us-gov-west-1-secondary" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "api.sagemaker.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "api.tunneling.iot" : { + "defaults" : { + "variants" : [ { + "hostname" : "api.tunneling.iot-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "api.tunneling.iot-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "api.tunneling.iot-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "api.tunneling.iot-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "api.tunneling.iot-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "apigateway" : { + "endpoints" : { + "us-gov-east-1" : { }, + "us-gov-west-1" : { } + } + }, + "appconfig" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "appconfig.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "appconfig.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "appconfig.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "appconfig.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "appconfigdata" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "appconfigdata.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "appconfigdata.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "appconfigdata.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "appconfigdata.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "application-autoscaling" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "us-gov-east-1" : { + "hostname" : "application-autoscaling.us-gov-east-1.amazonaws.com", + "protocols" : [ "http", "https" ], + "variants" : [ { + "hostname" : "application-autoscaling.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "deprecated" : true, + "hostname" : "application-autoscaling.us-gov-east-1.amazonaws.com", + "protocols" : [ "http", "https" ] + }, + "us-gov-west-1" : { + "hostname" : "application-autoscaling.us-gov-west-1.amazonaws.com", + "protocols" : [ "http", "https" ], + "variants" : [ { + "hostname" : "application-autoscaling.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "deprecated" : true, + "hostname" : "application-autoscaling.us-gov-west-1.amazonaws.com", + "protocols" : [ "http", "https" ] + } + } + }, + "applicationinsights" : { + "endpoints" : { + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "applicationinsights.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "applicationinsights.us-gov-west-1.amazonaws.com" + } + } + }, + "appstream2" : { + "defaults" : { + "credentialScope" : { + "service" : "appstream" + }, + "protocols" : [ "https" ] + }, + "endpoints" : { + "fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "appstream2-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "appstream2-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "appstream2-fips.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "appstream2-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "appstream2-fips.us-gov-west-1.amazonaws.com" + } + } + }, + "arc-zonal-shift" : { + "endpoints" : { + "us-gov-east-1" : { }, + "us-gov-west-1" : { } + } + }, + "athena" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "athena-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "athena-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "athena-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "athena-fips.us-gov-east-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "athena.us-gov-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "athena-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "athena-fips.us-gov-west-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "athena.us-gov-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "autoscaling" : { + "defaults" : { + "variants" : [ { + "hostname" : "autoscaling.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "us-gov-east-1" : { + "protocols" : [ "http", "https" ] + }, + "us-gov-west-1" : { + "protocols" : [ "http", "https" ] + } + } + }, + "autoscaling-plans" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "autoscaling-plans.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "deprecated" : true, + "hostname" : "autoscaling-plans.us-gov-east-1.amazonaws.com", + "protocols" : [ "http", "https" ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "autoscaling-plans.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "deprecated" : true, + "hostname" : "autoscaling-plans.us-gov-west-1.amazonaws.com", + "protocols" : [ "http", "https" ] + } + } + }, + "backup" : { + "endpoints" : { + "us-gov-east-1" : { }, + "us-gov-west-1" : { } + } + }, + "backup-gateway" : { + "endpoints" : { + "us-gov-east-1" : { }, + "us-gov-west-1" : { } + } + }, + "batch" : { + "defaults" : { + "variants" : [ { + "hostname" : "batch.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "batch.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "batch.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "batch.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "batch.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "bedrock" : { + "endpoints" : { + "bedrock-fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "bedrock-fips.us-gov-west-1.amazonaws.com" + }, + "bedrock-runtime-fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "bedrock-runtime-fips.us-gov-west-1.amazonaws.com" + }, + "bedrock-runtime-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "bedrock-runtime.us-gov-west-1.amazonaws.com" + }, + "bedrock-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "bedrock.us-gov-west-1.amazonaws.com" + }, + "us-gov-west-1" : { } + } + }, + "cassandra" : { + "endpoints" : { + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "cassandra.us-gov-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "cassandra.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "cassandra.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "cassandra.us-gov-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "cassandra.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "cassandra.us-gov-west-1.amazonaws.com" + } + } + }, + "cloudcontrolapi" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "cloudcontrolapi-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "cloudcontrolapi-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "cloudcontrolapi-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "cloudcontrolapi-fips.us-gov-east-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "cloudcontrolapi.us-gov-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "cloudcontrolapi-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "cloudcontrolapi-fips.us-gov-west-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "cloudcontrolapi.us-gov-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "clouddirectory" : { + "endpoints" : { + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "clouddirectory.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "clouddirectory.us-gov-west-1.amazonaws.com" + } + } + }, + "cloudformation" : { + "endpoints" : { + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "cloudformation.us-gov-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "cloudformation.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "cloudformation.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "cloudformation.us-gov-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "cloudformation.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "cloudformation.us-gov-west-1.amazonaws.com" + } + } + }, + "cloudhsm" : { + "endpoints" : { + "us-gov-west-1" : { } + } + }, + "cloudhsmv2" : { + "defaults" : { + "credentialScope" : { + "service" : "cloudhsm" + } + }, + "endpoints" : { + "us-gov-east-1" : { }, + "us-gov-west-1" : { } + } + }, + "cloudtrail" : { + "defaults" : { + "variants" : [ { + "hostname" : "cloudtrail.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "cloudtrail.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "cloudtrail.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "cloudtrail.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "cloudtrail.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "codebuild" : { + "endpoints" : { + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "codebuild-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "codebuild-fips.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "codebuild-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "codebuild-fips.us-gov-west-1.amazonaws.com" + } + } + }, + "codecommit" : { + "endpoints" : { + "fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "codecommit-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "codecommit-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "codecommit-fips.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "codecommit-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "codecommit-fips.us-gov-west-1.amazonaws.com" + } + } + }, + "codedeploy" : { + "endpoints" : { + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "codedeploy-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "codedeploy-fips.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "codedeploy-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "codedeploy-fips.us-gov-west-1.amazonaws.com" + } + } + }, + "codepipeline" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "codepipeline-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "codepipeline-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "codepipeline-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "codepipeline-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "codestar-connections" : { + "endpoints" : { + "us-gov-east-1" : { } + } + }, + "cognito-identity" : { + "endpoints" : { + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "cognito-identity-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "cognito-identity-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "cognito-idp" : { + "endpoints" : { + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "cognito-idp-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "cognito-idp-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "comprehend" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "comprehend-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "comprehend-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "comprehendmedical" : { + "endpoints" : { + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "comprehendmedical-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "comprehendmedical-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "compute-optimizer" : { + "endpoints" : { + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "compute-optimizer-fips.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "compute-optimizer-fips.us-gov-west-1.amazonaws.com" + } + } + }, + "config" : { + "defaults" : { + "variants" : [ { + "hostname" : "config.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "config.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "config.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "config.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "config.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "connect" : { + "endpoints" : { + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "connect.us-gov-west-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "connect.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "controltower" : { + "endpoints" : { + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "controltower-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "controltower-fips.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "controltower-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "controltower-fips.us-gov-west-1.amazonaws.com" + } + } + }, + "data-ats.iot" : { + "defaults" : { + "credentialScope" : { + "service" : "iotdata" + }, + "protocols" : [ "https" ] + }, + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "service" : "iotdata" + }, + "deprecated" : true, + "hostname" : "data.iot-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "service" : "iotdata" + }, + "deprecated" : true, + "hostname" : "data.iot-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "data.iot-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "data.iot-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "data.iot" : { + "defaults" : { + "credentialScope" : { + "service" : "iotdata" + }, + "protocols" : [ "https" ] + }, + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "service" : "iotdata" + }, + "deprecated" : true, + "hostname" : "data.iot-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "service" : "iotdata" + }, + "deprecated" : true, + "hostname" : "data.iot-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "data.iot-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "data.iot-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "data.jobs.iot" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "data.jobs.iot-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "data.jobs.iot-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "data.jobs.iot-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "data.jobs.iot-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "databrew" : { + "endpoints" : { + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "databrew.us-gov-west-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "databrew.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "datasync" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "datasync-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "datasync-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "datasync-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "datasync-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "datazone" : { + "defaults" : { + "dnsSuffix" : "api.aws", + "variants" : [ { + "dnsSuffix" : "api.aws", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "us-gov-east-1" : { + "hostname" : "datazone.us-gov-east-1.api.aws" + }, + "us-gov-west-1" : { + "hostname" : "datazone.us-gov-west-1.api.aws" + } + } + }, + "directconnect" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "directconnect-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "directconnect-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "directconnect-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "directconnect-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "dlm" : { + "endpoints" : { + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "dlm.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "dlm.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "dlm.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "dlm.us-gov-west-1.amazonaws.com" + } + } + }, + "dms" : { + "defaults" : { + "variants" : [ { + "hostname" : "dms.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "dms" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "dms.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "dms-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "dms.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "dms.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "dms.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "dms.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "dms.us-gov-west-1.amazonaws.com" + } + } + }, + "docdb" : { + "endpoints" : { + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "rds.us-gov-west-1.amazonaws.com" + } + } + }, + "drs" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "drs-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "drs-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "drs-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "drs-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "ds" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "ds-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "ds-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "ds-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "ds-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "dynamodb" : { + "defaults" : { + "variants" : [ { + "hostname" : "dynamodb.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "dynamodb.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "dynamodb.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "dynamodb.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "dynamodb.us-gov-west-1.amazonaws.com" + } + } + }, + "ebs" : { + "endpoints" : { + "us-gov-east-1" : { }, + "us-gov-west-1" : { } + } + }, + "ec2" : { + "defaults" : { + "variants" : [ { + "hostname" : "ec2.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "ec2.us-gov-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "ec2.us-gov-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "ec2.us-gov-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "ec2.us-gov-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "ecs" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "ecs-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "ecs-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "ecs-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "ecs-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "eks" : { + "defaults" : { + "protocols" : [ "http", "https" ], + "variants" : [ { + "hostname" : "eks.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "eks.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "eks.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "eks.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "eks.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "eks-auth" : { + "defaults" : { + "dnsSuffix" : "api.aws", + "variants" : [ { + "dnsSuffix" : "api.aws", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "us-gov-east-1" : { + "hostname" : "eks-auth.us-gov-east-1.api.aws" + }, + "us-gov-west-1" : { + "hostname" : "eks-auth.us-gov-west-1.api.aws" + } + } + }, + "elasticache" : { + "defaults" : { + "variants" : [ { + "hostname" : "elasticache.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "elasticache.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "elasticache.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "elasticache.us-gov-west-1.amazonaws.com" + } + } + }, + "elasticbeanstalk" : { + "endpoints" : { + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "elasticbeanstalk.us-gov-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "elasticbeanstalk.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "elasticbeanstalk.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "elasticbeanstalk.us-gov-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "elasticbeanstalk.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "elasticbeanstalk.us-gov-west-1.amazonaws.com" + } + } + }, + "elasticfilesystem" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "elasticloadbalancing" : { + "defaults" : { + "variants" : [ { + "hostname" : "elasticloadbalancing.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "elasticloadbalancing.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "elasticloadbalancing.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "elasticloadbalancing.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "protocols" : [ "http", "https" ], + "variants" : [ { + "hostname" : "elasticloadbalancing.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "elasticmapreduce" : { + "defaults" : { + "variants" : [ { + "hostname" : "elasticmapreduce.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "elasticmapreduce.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "elasticmapreduce.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "elasticmapreduce.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "elasticmapreduce.us-gov-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-gov-west-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "elasticmapreduce.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "elasticmapreduce.us-gov-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "email" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "email-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "email-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "email-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "email-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "emr-containers" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "emr-containers.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "emr-containers.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "emr-containers.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "emr-containers.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "emr-serverless" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "emr-serverless.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "emr-serverless.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "emr-serverless.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "emr-serverless.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "es" : { + "endpoints" : { + "fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "es-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "aos.us-gov-east-1.api.aws", + "tags" : [ "dualstack" ] + }, { + "hostname" : "es-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "es-fips.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "aos.us-gov-west-1.api.aws", + "tags" : [ "dualstack" ] + }, { + "hostname" : "es-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "es-fips.us-gov-west-1.amazonaws.com" + } + } + }, + "events" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "events.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "events.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "events.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "events.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "firehose" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "firehose-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "firehose-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "firehose-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "firehose-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "fms" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "fms-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "fms-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "fms-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "fms-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "fsx" : { + "endpoints" : { + "fips-prod-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "fsx-fips.us-gov-east-1.amazonaws.com" + }, + "fips-prod-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "fsx-fips.us-gov-west-1.amazonaws.com" + }, + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "fsx-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "fsx-fips.us-gov-west-1.amazonaws.com" + }, + "prod-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "fsx-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "prod-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "fsx-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "fsx-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "fsx-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "geo" : { + "endpoints" : { + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "geo-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "geo-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "glacier" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "glacier.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "glacier.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "glacier.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "protocols" : [ "http", "https" ], + "variants" : [ { + "hostname" : "glacier.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "glue" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "glue-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "glue-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "glue-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "glue-fips.us-gov-east-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "glue.us-gov-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "glue-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "glue-fips.us-gov-west-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "glue.us-gov-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "greengrass" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "dataplane-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "greengrass-ats.iot.us-gov-east-1.amazonaws.com" + }, + "dataplane-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "greengrass-ats.iot.us-gov-west-1.amazonaws.com" + }, + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "greengrass.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "greengrass.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "greengrass.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "greengrass.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + }, + "isRegionalized" : true + }, + "guardduty" : { + "defaults" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "guardduty.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "guardduty.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "guardduty.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "guardduty.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "guardduty.us-gov-west-1.amazonaws.com" + } + }, + "isRegionalized" : true + }, + "health" : { + "defaults" : { + "protocols" : [ "https" ], + "sslCommonName" : "health.us-gov-west-1.amazonaws.com" + }, + "endpoints" : { + "aws-us-gov-global" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "global.health.us-gov.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "health-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "health-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "iam" : { + "endpoints" : { + "aws-us-gov-global" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "iam.us-gov.amazonaws.com", + "variants" : [ { + "hostname" : "iam.us-gov.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "aws-us-gov-global-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "iam.us-gov.amazonaws.com" + }, + "iam-govcloud" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "iam.us-gov.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "iam-govcloud-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "iam.us-gov.amazonaws.com" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-us-gov-global" + }, + "identitystore" : { + "defaults" : { + "variants" : [ { + "hostname" : "identitystore.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "identitystore.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "identitystore.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "identitystore.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "identitystore.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "ingest.timestream" : { + "endpoints" : { + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "ingest.timestream.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "ingest.timestream.us-gov-west-1.amazonaws.com" + } + } + }, + "inspector" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "inspector-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "inspector-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "inspector-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "inspector-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "inspector2" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "inspector2-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "inspector2-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "inspector2-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "inspector2-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "internetmonitor" : { + "defaults" : { + "dnsSuffix" : "api.aws", + "variants" : [ { + "dnsSuffix" : "api.aws", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "us-gov-east-1" : { + "hostname" : "internetmonitor.us-gov-east-1.api.aws" + }, + "us-gov-west-1" : { + "hostname" : "internetmonitor.us-gov-west-1.api.aws" + } + } + }, + "iot" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "deprecated" : true, + "hostname" : "iot-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "deprecated" : true, + "hostname" : "iot-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "iot-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "iot-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "iotevents" : { + "endpoints" : { + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "iotevents-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "iotevents-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "ioteventsdata" : { + "endpoints" : { + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "data.iotevents-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "data.iotevents.us-gov-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "data.iotevents-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "iotsecuredtunneling" : { + "defaults" : { + "variants" : [ { + "hostname" : "api.tunneling.iot-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "api.tunneling.iot-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "api.tunneling.iot-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "api.tunneling.iot-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "api.tunneling.iot-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "iotsitewise" : { + "endpoints" : { + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "iotsitewise-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "iotsitewise-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "iottwinmaker" : { + "endpoints" : { + "api-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "api.iottwinmaker.us-gov-west-1.amazonaws.com" + }, + "data-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "data.iottwinmaker.us-gov-west-1.amazonaws.com" + }, + "fips-api-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "api.iottwinmaker-fips.us-gov-west-1.amazonaws.com" + }, + "fips-data-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "data.iottwinmaker-fips.us-gov-west-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "iottwinmaker-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "iottwinmaker-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "kafka" : { + "endpoints" : { + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "kafka.us-gov-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "kafka.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "kafka.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "kafka.us-gov-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "kafka.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "kafka.us-gov-west-1.amazonaws.com" + } + } + }, + "kendra" : { + "endpoints" : { + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "kendra-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "kendra-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "kendra-ranking" : { + "defaults" : { + "dnsSuffix" : "api.aws", + "variants" : [ { + "dnsSuffix" : "api.aws", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "us-gov-east-1" : { + "hostname" : "kendra-ranking.us-gov-east-1.api.aws" + }, + "us-gov-west-1" : { + "hostname" : "kendra-ranking.us-gov-west-1.api.aws" + } + } + }, + "kinesis" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "kinesis.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "kinesis.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "kinesis.us-gov-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "kinesis.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "kinesis.us-gov-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "kinesis.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "kinesisanalytics" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "kinesisanalytics-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "kinesisanalytics-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "kinesisanalytics-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "kinesisanalytics-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "kinesisvideo" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "kinesisvideo-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "kinesisvideo-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "kinesisvideo-fips.us-gov-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "kinesisvideo-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "kinesisvideo-fips.us-gov-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "kinesisvideo-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "kms" : { + "endpoints" : { + "ProdFips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "kms-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "kms-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.us-gov-west-1.amazonaws.com" + } + } + }, + "lakeformation" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "lakeformation-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "lakeformation-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "lakeformation-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "lakeformation-fips.us-gov-east-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "lakeformation.us-gov-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "lakeformation-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "lakeformation-fips.us-gov-west-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "lakeformation.us-gov-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "lambda" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "lambda-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "lambda-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "lambda-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "lambda.us-gov-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "lambda-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "lambda.us-gov-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "license-manager" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "license-manager-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "license-manager-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "license-manager-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "license-manager-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "license-manager-linux-subscriptions" : { + "endpoints" : { + "us-gov-east-1" : { }, + "us-gov-west-1" : { } + } + }, + "license-manager-user-subscriptions" : { + "endpoints" : { + "us-gov-east-1" : { }, + "us-gov-west-1" : { } + } + }, + "logs" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "logs.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "logs.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "logs.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "logs.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "m2" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "deprecated" : true + }, + "fips-us-gov-west-1" : { + "deprecated" : true + }, + "us-gov-east-1" : { + "variants" : [ { + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "tags" : [ "fips" ] + } ] + } + } + }, + "managedblockchain" : { + "endpoints" : { + "us-gov-west-1" : { } + } + }, + "mediaconvert" : { + "endpoints" : { + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "mediaconvert.us-gov-west-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "mediaconvert.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "meetings-chime" : { + "endpoints" : { + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "meetings-chime-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "meetings-chime-fips.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "meetings-chime-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "meetings-chime-fips.us-gov-west-1.amazonaws.com" + } + } + }, + "metering.marketplace" : { + "defaults" : { + "credentialScope" : { + "service" : "aws-marketplace" + } + }, + "endpoints" : { + "us-gov-east-1" : { }, + "us-gov-west-1" : { } + } + }, + "metrics.sagemaker" : { + "endpoints" : { + "us-gov-east-1" : { }, + "us-gov-west-1" : { } + } + }, + "mgn" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "mgn-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "mgn-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "mgn-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "mgn-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "models-v2-lex" : { + "endpoints" : { + "us-gov-west-1" : { } + } + }, + "models.lex" : { + "defaults" : { + "credentialScope" : { + "service" : "lex" + }, + "variants" : [ { + "hostname" : "models-fips.lex.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "models-fips.lex.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "models-fips.lex.us-gov-west-1.amazonaws.com" + } + } + }, + "monitoring" : { + "defaults" : { + "variants" : [ { + "hostname" : "monitoring.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "monitoring.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "monitoring.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "monitoring.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "monitoring.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "mq" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "mq-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "mq-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "mq-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "mq-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "neptune" : { + "endpoints" : { + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "rds.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "rds.us-gov-west-1.amazonaws.com" + } + } + }, + "network-firewall" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "network-firewall-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "network-firewall-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "network-firewall-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "network-firewall-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "networkmanager" : { + "endpoints" : { + "aws-us-gov-global" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "networkmanager.us-gov-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "networkmanager.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "fips-aws-us-gov-global" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "networkmanager.us-gov-west-1.amazonaws.com" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-us-gov-global" + }, + "oam" : { + "endpoints" : { + "us-gov-east-1" : { }, + "us-gov-west-1" : { } + } + }, + "oidc" : { + "endpoints" : { + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "oidc.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "oidc.us-gov-west-1.amazonaws.com" + } + } + }, + "organizations" : { + "endpoints" : { + "aws-us-gov-global" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "organizations.us-gov-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "organizations.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "fips-aws-us-gov-global" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "organizations.us-gov-west-1.amazonaws.com" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-us-gov-global" + }, + "outposts" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "outposts.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "outposts.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "outposts.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "outposts.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "participant.connect" : { + "endpoints" : { + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "participant.connect.us-gov-west-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "participant.connect.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "pi" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "pi-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "pi-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "pi-fips.us-gov-east-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "pi.us-gov-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-gov-west-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "pi-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "pi-fips.us-gov-west-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "pi.us-gov-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "pinpoint" : { + "defaults" : { + "credentialScope" : { + "service" : "mobiletargeting" + } + }, + "endpoints" : { + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "pinpoint-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "pinpoint.us-gov-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "pinpoint-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "polly" : { + "endpoints" : { + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "polly-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "polly-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "portal.sso" : { + "endpoints" : { + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "portal.sso.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "portal.sso.us-gov-west-1.amazonaws.com" + } + } + }, + "qbusiness" : { + "defaults" : { + "dnsSuffix" : "api.aws", + "variants" : [ { + "dnsSuffix" : "api.aws", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "us-gov-east-1" : { + "hostname" : "qbusiness.us-gov-east-1.api.aws" + }, + "us-gov-west-1" : { + "hostname" : "qbusiness.us-gov-west-1.api.aws" + } + } + }, + "quicksight" : { + "endpoints" : { + "api" : { }, + "us-gov-west-1" : { } + } + }, + "ram" : { + "endpoints" : { + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "ram.us-gov-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "ram.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "ram.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "ram.us-gov-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "ram.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "ram.us-gov-west-1.amazonaws.com" + } + } + }, + "rbin" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "rbin-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "rbin-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "rbin-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "rbin-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "rds" : { + "defaults" : { + "variants" : [ { + "hostname" : "rds.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "rds.us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "rds.us-gov-east-1.amazonaws.com" + }, + "rds.us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "rds.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "rds.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "rds.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "rds.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "rds.us-gov-west-1.amazonaws.com" + } + } + }, + "redshift" : { + "endpoints" : { + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "redshift.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "redshift.us-gov-west-1.amazonaws.com" + } + } + }, + "rekognition" : { + "endpoints" : { + "rekognition-fips.us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "rekognition-fips.us-gov-west-1.amazonaws.com" + }, + "rekognition.us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "rekognition-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "rekognition-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "rekognition-fips.us-gov-west-1.amazonaws.com" + } + } + }, + "resiliencehub" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "resiliencehub-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "resiliencehub-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "resiliencehub-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "resiliencehub-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "resource-groups" : { + "defaults" : { + "variants" : [ { + "hostname" : "resource-groups.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "resource-groups.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "resource-groups.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "resource-groups.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "resource-groups.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "robomaker" : { + "endpoints" : { + "us-gov-west-1" : { } + } + }, + "rolesanywhere" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "rolesanywhere-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "rolesanywhere-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "rolesanywhere-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "rolesanywhere-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "route53" : { + "endpoints" : { + "aws-us-gov-global" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "route53.us-gov.amazonaws.com", + "variants" : [ { + "hostname" : "route53.us-gov.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "fips-aws-us-gov-global" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "route53.us-gov.amazonaws.com" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-us-gov-global" + }, + "route53resolver" : { + "endpoints" : { + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "route53resolver.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "deprecated" : true, + "hostname" : "route53resolver.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "route53resolver.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "deprecated" : true, + "hostname" : "route53resolver.us-gov-west-1.amazonaws.com" + } + } + }, + "runtime-v2-lex" : { + "endpoints" : { + "us-gov-west-1" : { } + } + }, + "runtime.lex" : { + "defaults" : { + "credentialScope" : { + "service" : "lex" + }, + "variants" : [ { + "hostname" : "runtime-fips.lex.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "runtime-fips.lex.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "runtime-fips.lex.us-gov-west-1.amazonaws.com" + } + } + }, + "runtime.sagemaker" : { + "defaults" : { + "variants" : [ { + "hostname" : "runtime.sagemaker.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "us-gov-east-1" : { }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "runtime.sagemaker.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "runtime.sagemaker.us-gov-west-1.amazonaws.com" + } + } + }, + "s3" : { + "defaults" : { + "signatureVersions" : [ "s3", "s3v4" ], + "variants" : [ { + "dnsSuffix" : "amazonaws.com", + "hostname" : "{service}-fips.dualstack.{region}.{dnsSuffix}", + "tags" : [ "dualstack", "fips" ] + }, { + "dnsSuffix" : "amazonaws.com", + "hostname" : "{service}.dualstack.{region}.{dnsSuffix}", + "tags" : [ "dualstack" ] + } ] + }, + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "s3-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "s3-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "hostname" : "s3.us-gov-east-1.amazonaws.com", + "protocols" : [ "http", "https" ], + "variants" : [ { + "hostname" : "s3-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "s3.dualstack.us-gov-east-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "us-gov-west-1" : { + "hostname" : "s3.us-gov-west-1.amazonaws.com", + "protocols" : [ "http", "https" ], + "variants" : [ { + "hostname" : "s3-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "s3.dualstack.us-gov-west-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "s3-control" : { + "defaults" : { + "protocols" : [ "https" ], + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "dnsSuffix" : "amazonaws.com", + "hostname" : "{service}-fips.dualstack.{region}.{dnsSuffix}", + "tags" : [ "dualstack", "fips" ] + }, { + "dnsSuffix" : "amazonaws.com", + "hostname" : "{service}.dualstack.{region}.{dnsSuffix}", + "tags" : [ "dualstack" ] + } ] + }, + "endpoints" : { + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "s3-control.us-gov-east-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control-fips.dualstack.us-gov-east-1.amazonaws.com", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "s3-control-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "s3-control.dualstack.us-gov-east-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "s3-control-fips.us-gov-east-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ] + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "s3-control.us-gov-west-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control-fips.dualstack.us-gov-west-1.amazonaws.com", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "s3-control-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "s3-control.dualstack.us-gov-west-1.amazonaws.com", + "tags" : [ "dualstack" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "s3-control-fips.us-gov-west-1.amazonaws.com", + "signatureVersions" : [ "s3v4" ] + } + } + }, + "s3-outposts" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "deprecated" : true + }, + "fips-us-gov-west-1" : { + "deprecated" : true + }, + "us-gov-east-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + }, { + "tags" : [ "dualstack", "fips" ] + }, { + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + }, { + "tags" : [ "dualstack", "fips" ] + }, { + "tags" : [ "fips" ] + } ] + } + } + }, + "secretsmanager" : { + "endpoints" : { + "us-gov-east-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + }, { + "tags" : [ "dualstack", "fips" ] + }, { + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "deprecated" : true + }, + "us-gov-west-1" : { + "variants" : [ { + "tags" : [ "dualstack" ] + }, { + "tags" : [ "dualstack", "fips" ] + }, { + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "deprecated" : true + } + } + }, + "securityhub" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "securityhub-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "securityhub-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "securityhub-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "securityhub-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "securitylake" : { + "endpoints" : { + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "securitylake.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "securitylake.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "securitylake.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "securitylake.us-gov-west-1.amazonaws.com" + } + } + }, + "serverlessrepo" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "us-gov-east-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "serverlessrepo.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "serverlessrepo.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "serverlessrepo.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "serverlessrepo.us-gov-west-1.amazonaws.com" + } + } + }, + "servicecatalog" : { + "endpoints" : { + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "servicecatalog-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "servicecatalog-fips.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "servicecatalog-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "servicecatalog-fips.us-gov-west-1.amazonaws.com" + } + } + }, + "servicecatalog-appregistry" : { + "defaults" : { + "variants" : [ { + "hostname" : "servicecatalog-appregistry.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "us-gov-east-1" : { }, + "us-gov-west-1" : { } + } + }, + "servicediscovery" : { + "endpoints" : { + "servicediscovery" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "servicediscovery-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "servicediscovery-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "servicediscovery-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "servicediscovery-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "servicediscovery-fips.us-gov-east-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "servicediscovery.us-gov-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "servicediscovery-fips.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "servicediscovery-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "servicediscovery-fips.us-gov-west-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "servicediscovery.us-gov-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "servicediscovery-fips.us-gov-west-1.amazonaws.com" + } + } + }, + "servicequotas" : { + "defaults" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "servicequotas.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "servicequotas.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "servicequotas.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "servicequotas.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "servicequotas.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "signer" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "signer-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "signer-fips.us-gov-west-1.amazonaws.com" + }, + "fips-verification-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "verification.signer-fips.us-gov-east-1.amazonaws.com" + }, + "fips-verification-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "verification.signer-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "signer-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "signer-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "verification-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "verification.signer.us-gov-east-1.amazonaws.com" + }, + "verification-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "verification.signer.us-gov-west-1.amazonaws.com" + } + } + }, + "simspaceweaver" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "simspaceweaver.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "simspaceweaver.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "simspaceweaver.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "simspaceweaver.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "sms" : { + "endpoints" : { + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "sms-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "sms-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "sms-voice" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "sms-voice-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "sms-voice-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "sms-voice-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "sms-voice-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "snowball" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "snowball-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "snowball-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "snowball-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "snowball-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "sns" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "sns.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "sns.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "sns.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "sns.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "sqs" : { + "defaults" : { + "variants" : [ { + "hostname" : "sqs.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "sqs.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "sqs.us-gov-west-1.amazonaws.com", + "protocols" : [ "http", "https" ], + "sslCommonName" : "{region}.queue.{dnsSuffix}" + } + } + }, + "ssm" : { + "defaults" : { + "variants" : [ { + "hostname" : "ssm.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "ssm.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "ssm.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "ssm.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "ssm.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "sso" : { + "endpoints" : { + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "sso.us-gov-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "sso.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "sso.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "sso.us-gov-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "sso.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "sso.us-gov-west-1.amazonaws.com" + } + } + }, + "states" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "states-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "states.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "states-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "states.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "storagegateway" : { + "endpoints" : { + "fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "storagegateway-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "storagegateway-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "storagegateway-fips.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "storagegateway-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "storagegateway-fips.us-gov-west-1.amazonaws.com" + } + } + }, + "streams.dynamodb" : { + "defaults" : { + "credentialScope" : { + "service" : "dynamodb" + }, + "variants" : [ { + "hostname" : "streams.dynamodb.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "streams.dynamodb.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "streams.dynamodb.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "streams.dynamodb.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "streams.dynamodb.us-gov-west-1.amazonaws.com" + } + } + }, + "sts" : { + "defaults" : { + "variants" : [ { + "hostname" : "sts.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "sts.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "sts.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "sts.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "sts.us-gov-west-1.amazonaws.com" + } + } + }, + "support" : { + "endpoints" : { + "aws-us-gov-global" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "support.us-gov-west-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "support.us-gov-west-1.amazonaws.com" + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "support.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + }, + "partitionEndpoint" : "aws-us-gov-global" + }, + "swf" : { + "endpoints" : { + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "swf.us-gov-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "swf.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-east-1-fips" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "swf.us-gov-east-1.amazonaws.com" + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "swf.us-gov-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "swf.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "swf.us-gov-west-1.amazonaws.com" + } + } + }, + "synthetics" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "synthetics-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "synthetics-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "synthetics-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "synthetics-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "tagging" : { + "endpoints" : { + "us-gov-east-1" : { }, + "us-gov-west-1" : { } + } + }, + "textract" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "textract-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "textract-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "textract-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "textract-fips.us-gov-east-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "textract.us-gov-east-1.api.aws", + "tags" : [ "dualstack" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "textract-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + }, { + "hostname" : "textract-fips.us-gov-west-1.api.aws", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "textract.us-gov-west-1.api.aws", + "tags" : [ "dualstack" ] + } ] + } + } + }, + "transcribe" : { + "defaults" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "fips.transcribe.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "fips.transcribe.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "fips.transcribe.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "fips.transcribe.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "fips.transcribe.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "transcribestreaming" : { + "endpoints" : { + "us-gov-east-1" : { }, + "us-gov-west-1" : { } + } + }, + "transfer" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "transfer-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "transfer-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "transfer-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "transfer-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "translate" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "translate-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1-fips" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "translate-fips.us-gov-west-1.amazonaws.com" + } + } + }, + "verifiedpermissions" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "verifiedpermissions-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "verifiedpermissions-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "verifiedpermissions-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "verifiedpermissions-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "waf-regional" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "waf-regional-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "waf-regional.us-gov-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "waf-regional.us-gov-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "waf-regional-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "wafv2" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "wafv2-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "hostname" : "wafv2.us-gov-east-1.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "hostname" : "wafv2.us-gov-west-1.amazonaws.com", + "variants" : [ { + "hostname" : "wafv2-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "wellarchitected" : { + "endpoints" : { + "us-gov-east-1" : { }, + "us-gov-west-1" : { } + } + }, + "workspaces" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "workspaces-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "workspaces-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "workspaces-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "workspaces-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + }, + "xray" : { + "endpoints" : { + "fips-us-gov-east-1" : { + "credentialScope" : { + "region" : "us-gov-east-1" + }, + "deprecated" : true, + "hostname" : "xray-fips.us-gov-east-1.amazonaws.com" + }, + "fips-us-gov-west-1" : { + "credentialScope" : { + "region" : "us-gov-west-1" + }, + "deprecated" : true, + "hostname" : "xray-fips.us-gov-west-1.amazonaws.com" + }, + "us-gov-east-1" : { + "variants" : [ { + "hostname" : "xray-fips.us-gov-east-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + }, + "us-gov-west-1" : { + "variants" : [ { + "hostname" : "xray-fips.us-gov-west-1.amazonaws.com", + "tags" : [ "fips" ] + } ] + } + } + } + } + }, { + "defaults" : { + "hostname" : "{service}.{region}.{dnsSuffix}", + "protocols" : [ "https" ], + "signatureVersions" : [ "v4" ], + "variants" : [ { + "dnsSuffix" : "c2s.ic.gov", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "dnsSuffix" : "c2s.ic.gov", + "partition" : "aws-iso", + "partitionName" : "AWS ISO (US)", + "regionRegex" : "^us\\-iso\\-\\w+\\-\\d+$", + "regions" : { + "us-iso-east-1" : { + "description" : "US ISO East" + }, + "us-iso-west-1" : { + "description" : "US ISO WEST" + } + }, + "services" : { + "api.ecr" : { + "endpoints" : { + "us-iso-east-1" : { + "credentialScope" : { + "region" : "us-iso-east-1" + }, + "hostname" : "api.ecr.us-iso-east-1.c2s.ic.gov" + }, + "us-iso-west-1" : { + "credentialScope" : { + "region" : "us-iso-west-1" + }, + "hostname" : "api.ecr.us-iso-west-1.c2s.ic.gov" + } + } + }, + "api.pricing" : { + "defaults" : { + "credentialScope" : { + "service" : "pricing" + } + }, + "endpoints" : { + "us-iso-east-1" : { } + } + }, + "api.sagemaker" : { + "endpoints" : { + "us-iso-east-1" : { } + } + }, + "apigateway" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "appconfig" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "appconfigdata" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "application-autoscaling" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "arc-zonal-shift" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "athena" : { + "endpoints" : { + "us-iso-east-1" : { } + } + }, + "autoscaling" : { + "endpoints" : { + "us-iso-east-1" : { + "protocols" : [ "http", "https" ] + }, + "us-iso-west-1" : { } + } + }, + "cloudcontrolapi" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "cloudformation" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "cloudtrail" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "codedeploy" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "comprehend" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "us-iso-east-1" : { } + } + }, + "config" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "datapipeline" : { + "endpoints" : { + "us-iso-east-1" : { } + } + }, + "datasync" : { + "endpoints" : { + "fips-us-iso-east-1" : { + "credentialScope" : { + "region" : "us-iso-east-1" + }, + "deprecated" : true, + "hostname" : "datasync-fips.us-iso-east-1.c2s.ic.gov" + }, + "fips-us-iso-west-1" : { + "credentialScope" : { + "region" : "us-iso-west-1" + }, + "deprecated" : true, + "hostname" : "datasync-fips.us-iso-west-1.c2s.ic.gov" + }, + "us-iso-east-1" : { + "variants" : [ { + "hostname" : "datasync-fips.us-iso-east-1.c2s.ic.gov", + "tags" : [ "fips" ] + } ] + }, + "us-iso-west-1" : { + "variants" : [ { + "hostname" : "datasync-fips.us-iso-west-1.c2s.ic.gov", + "tags" : [ "fips" ] + } ] + } + } + }, + "directconnect" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "dlm" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "dms" : { + "defaults" : { + "variants" : [ { + "hostname" : "dms.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "dms" : { + "credentialScope" : { + "region" : "us-iso-east-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "dms.us-iso-east-1.c2s.ic.gov", + "tags" : [ "fips" ] + } ] + }, + "dms-fips" : { + "credentialScope" : { + "region" : "us-iso-east-1" + }, + "deprecated" : true, + "hostname" : "dms.us-iso-east-1.c2s.ic.gov" + }, + "us-iso-east-1" : { + "variants" : [ { + "hostname" : "dms.us-iso-east-1.c2s.ic.gov", + "tags" : [ "fips" ] + } ] + }, + "us-iso-east-1-fips" : { + "credentialScope" : { + "region" : "us-iso-east-1" + }, + "deprecated" : true, + "hostname" : "dms.us-iso-east-1.c2s.ic.gov" + }, + "us-iso-west-1" : { + "variants" : [ { + "hostname" : "dms.us-iso-west-1.c2s.ic.gov", + "tags" : [ "fips" ] + } ] + }, + "us-iso-west-1-fips" : { + "credentialScope" : { + "region" : "us-iso-west-1" + }, + "deprecated" : true, + "hostname" : "dms.us-iso-west-1.c2s.ic.gov" + } + } + }, + "ds" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "dynamodb" : { + "endpoints" : { + "us-iso-east-1" : { + "protocols" : [ "http", "https" ] + }, + "us-iso-west-1" : { } + } + }, + "ebs" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "ec2" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "ecs" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "eks" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "elasticache" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "elasticfilesystem" : { + "endpoints" : { + "fips-us-iso-east-1" : { + "credentialScope" : { + "region" : "us-iso-east-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.us-iso-east-1.c2s.ic.gov" + }, + "fips-us-iso-west-1" : { + "credentialScope" : { + "region" : "us-iso-west-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.us-iso-west-1.c2s.ic.gov" + }, + "us-iso-east-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.us-iso-east-1.c2s.ic.gov", + "tags" : [ "fips" ] + } ] + }, + "us-iso-west-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.us-iso-west-1.c2s.ic.gov", + "tags" : [ "fips" ] + } ] + } + } + }, + "elasticloadbalancing" : { + "endpoints" : { + "us-iso-east-1" : { + "protocols" : [ "http", "https" ] + }, + "us-iso-west-1" : { } + } + }, + "elasticmapreduce" : { + "endpoints" : { + "fips-us-iso-east-1" : { + "credentialScope" : { + "region" : "us-iso-east-1" + }, + "deprecated" : true, + "hostname" : "elasticmapreduce.us-iso-east-1.c2s.ic.gov" + }, + "fips-us-iso-west-1" : { + "credentialScope" : { + "region" : "us-iso-west-1" + }, + "deprecated" : true, + "hostname" : "elasticmapreduce.us-iso-west-1.c2s.ic.gov" + }, + "us-iso-east-1" : { + "protocols" : [ "https" ], + "variants" : [ { + "hostname" : "elasticmapreduce.us-iso-east-1.c2s.ic.gov", + "tags" : [ "fips" ] + } ] + }, + "us-iso-west-1" : { + "variants" : [ { + "hostname" : "elasticmapreduce.us-iso-west-1.c2s.ic.gov", + "tags" : [ "fips" ] + } ] + } + } + }, + "es" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "events" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "firehose" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "fsx" : { + "endpoints" : { + "fips-prod-us-iso-east-1" : { + "credentialScope" : { + "region" : "us-iso-east-1" + }, + "deprecated" : true, + "hostname" : "fsx-fips.us-iso-east-1.c2s.ic.gov" + }, + "fips-us-iso-east-1" : { + "credentialScope" : { + "region" : "us-iso-east-1" + }, + "deprecated" : true, + "hostname" : "fsx-fips.us-iso-east-1.c2s.ic.gov" + }, + "prod-us-iso-east-1" : { + "credentialScope" : { + "region" : "us-iso-east-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "fsx-fips.us-iso-east-1.c2s.ic.gov", + "tags" : [ "fips" ] + } ] + }, + "us-iso-east-1" : { + "variants" : [ { + "hostname" : "fsx-fips.us-iso-east-1.c2s.ic.gov", + "tags" : [ "fips" ] + } ] + } + } + }, + "glacier" : { + "endpoints" : { + "us-iso-east-1" : { + "protocols" : [ "http", "https" ] + }, + "us-iso-west-1" : { } + } + }, + "glue" : { + "endpoints" : { + "us-iso-east-1" : { } + } + }, + "guardduty" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "us-iso-east-1" : { } + }, + "isRegionalized" : true + }, + "health" : { + "endpoints" : { + "us-iso-east-1" : { } + } + }, + "iam" : { + "endpoints" : { + "aws-iso-global" : { + "credentialScope" : { + "region" : "us-iso-east-1" + }, + "hostname" : "iam.us-iso-east-1.c2s.ic.gov" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-iso-global" + }, + "kinesis" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "kms" : { + "endpoints" : { + "ProdFips" : { + "credentialScope" : { + "region" : "us-iso-east-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.us-iso-east-1.c2s.ic.gov" + }, + "us-iso-east-1" : { + "variants" : [ { + "hostname" : "kms-fips.us-iso-east-1.c2s.ic.gov", + "tags" : [ "fips" ] + } ] + }, + "us-iso-east-1-fips" : { + "credentialScope" : { + "region" : "us-iso-east-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.us-iso-east-1.c2s.ic.gov" + }, + "us-iso-west-1" : { + "variants" : [ { + "hostname" : "kms-fips.us-iso-west-1.c2s.ic.gov", + "tags" : [ "fips" ] + } ] + }, + "us-iso-west-1-fips" : { + "credentialScope" : { + "region" : "us-iso-west-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.us-iso-west-1.c2s.ic.gov" + } + } + }, + "lambda" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "license-manager" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "logs" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "medialive" : { + "endpoints" : { + "us-iso-east-1" : { } + } + }, + "mediapackage" : { + "endpoints" : { + "us-iso-east-1" : { } + } + }, + "metrics.sagemaker" : { + "endpoints" : { + "us-iso-east-1" : { } + } + }, + "monitoring" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "outposts" : { + "endpoints" : { + "us-iso-east-1" : { } + } + }, + "ram" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "rbin" : { + "endpoints" : { + "fips-us-iso-east-1" : { + "credentialScope" : { + "region" : "us-iso-east-1" + }, + "deprecated" : true, + "hostname" : "rbin-fips.us-iso-east-1.c2s.ic.gov" + }, + "fips-us-iso-west-1" : { + "credentialScope" : { + "region" : "us-iso-west-1" + }, + "deprecated" : true, + "hostname" : "rbin-fips.us-iso-west-1.c2s.ic.gov" + }, + "us-iso-east-1" : { + "variants" : [ { + "hostname" : "rbin-fips.us-iso-east-1.c2s.ic.gov", + "tags" : [ "fips" ] + } ] + }, + "us-iso-west-1" : { + "variants" : [ { + "hostname" : "rbin-fips.us-iso-west-1.c2s.ic.gov", + "tags" : [ "fips" ] + } ] + } + } + }, + "rds" : { + "endpoints" : { + "rds.us-iso-east-1" : { + "credentialScope" : { + "region" : "us-iso-east-1" + }, + "deprecated" : true, + "hostname" : "rds.us-iso-east-1.c2s.ic.gov" + }, + "rds.us-iso-west-1" : { + "credentialScope" : { + "region" : "us-iso-west-1" + }, + "deprecated" : true, + "hostname" : "rds.us-iso-west-1.c2s.ic.gov" + }, + "us-iso-east-1" : { + "variants" : [ { + "hostname" : "rds.us-iso-east-1.c2s.ic.gov", + "tags" : [ "fips" ] + } ] + }, + "us-iso-east-1-fips" : { + "credentialScope" : { + "region" : "us-iso-east-1" + }, + "deprecated" : true, + "hostname" : "rds.us-iso-east-1.c2s.ic.gov" + }, + "us-iso-west-1" : { + "variants" : [ { + "hostname" : "rds.us-iso-west-1.c2s.ic.gov", + "tags" : [ "fips" ] + } ] + }, + "us-iso-west-1-fips" : { + "credentialScope" : { + "region" : "us-iso-west-1" + }, + "deprecated" : true, + "hostname" : "rds.us-iso-west-1.c2s.ic.gov" + } + } + }, + "redshift" : { + "endpoints" : { + "us-iso-east-1" : { + "credentialScope" : { + "region" : "us-iso-east-1" + }, + "hostname" : "redshift.us-iso-east-1.c2s.ic.gov" + }, + "us-iso-west-1" : { + "credentialScope" : { + "region" : "us-iso-west-1" + }, + "hostname" : "redshift.us-iso-west-1.c2s.ic.gov" + } + } + }, + "resource-groups" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "route53" : { + "endpoints" : { + "aws-iso-global" : { + "credentialScope" : { + "region" : "us-iso-east-1" + }, + "hostname" : "route53.c2s.ic.gov" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-iso-global" + }, + "route53resolver" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "runtime.sagemaker" : { + "endpoints" : { + "us-iso-east-1" : { } + } + }, + "s3" : { + "defaults" : { + "signatureVersions" : [ "s3v4" ] + }, + "endpoints" : { + "fips-us-iso-east-1" : { + "credentialScope" : { + "region" : "us-iso-east-1" + }, + "deprecated" : true, + "hostname" : "s3-fips.us-iso-east-1.c2s.ic.gov" + }, + "fips-us-iso-west-1" : { + "credentialScope" : { + "region" : "us-iso-west-1" + }, + "deprecated" : true, + "hostname" : "s3-fips.us-iso-west-1.c2s.ic.gov" + }, + "us-iso-east-1" : { + "protocols" : [ "http", "https" ], + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-fips.dualstack.us-iso-east-1.c2s.ic.gov", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "s3-fips.us-iso-east-1.c2s.ic.gov", + "tags" : [ "fips" ] + } ] + }, + "us-iso-west-1" : { + "variants" : [ { + "hostname" : "s3-fips.dualstack.us-iso-west-1.c2s.ic.gov", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "s3-fips.us-iso-west-1.c2s.ic.gov", + "tags" : [ "fips" ] + } ] + } + } + }, + "s3-control" : { + "defaults" : { + "protocols" : [ "https" ], + "signatureVersions" : [ "s3v4" ] + }, + "endpoints" : { + "us-iso-east-1" : { + "credentialScope" : { + "region" : "us-iso-east-1" + }, + "hostname" : "s3-control.us-iso-east-1.c2s.ic.gov", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control-fips.dualstack.us-iso-east-1.c2s.ic.gov", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "s3-control-fips.us-iso-east-1.c2s.ic.gov", + "tags" : [ "fips" ] + }, { + "hostname" : "s3-control.dualstack.us-iso-east-1.c2s.ic.gov", + "tags" : [ "dualstack" ] + } ] + }, + "us-iso-east-1-fips" : { + "credentialScope" : { + "region" : "us-iso-east-1" + }, + "deprecated" : true, + "hostname" : "s3-control-fips.us-iso-east-1.c2s.ic.gov", + "signatureVersions" : [ "s3v4" ] + }, + "us-iso-west-1" : { + "credentialScope" : { + "region" : "us-iso-west-1" + }, + "hostname" : "s3-control.us-iso-west-1.c2s.ic.gov", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control-fips.dualstack.us-iso-west-1.c2s.ic.gov", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "s3-control-fips.us-iso-west-1.c2s.ic.gov", + "tags" : [ "fips" ] + }, { + "hostname" : "s3-control.dualstack.us-iso-west-1.c2s.ic.gov", + "tags" : [ "dualstack" ] + } ] + }, + "us-iso-west-1-fips" : { + "credentialScope" : { + "region" : "us-iso-west-1" + }, + "deprecated" : true, + "hostname" : "s3-control-fips.us-iso-west-1.c2s.ic.gov", + "signatureVersions" : [ "s3v4" ] + } + } + }, + "s3-outposts" : { + "endpoints" : { + "fips-us-iso-east-1" : { + "deprecated" : true + }, + "us-iso-east-1" : { + "variants" : [ { + "tags" : [ "fips" ] + } ] + } + } + }, + "secretsmanager" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "snowball" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "sns" : { + "endpoints" : { + "us-iso-east-1" : { + "protocols" : [ "http", "https" ] + }, + "us-iso-west-1" : { } + } + }, + "sqs" : { + "endpoints" : { + "us-iso-east-1" : { + "protocols" : [ "http", "https" ] + }, + "us-iso-west-1" : { } + } + }, + "ssm" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "states" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "streams.dynamodb" : { + "defaults" : { + "credentialScope" : { + "service" : "dynamodb" + } + }, + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "sts" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "support" : { + "endpoints" : { + "aws-iso-global" : { + "credentialScope" : { + "region" : "us-iso-east-1" + }, + "hostname" : "support.us-iso-east-1.c2s.ic.gov" + } + }, + "partitionEndpoint" : "aws-iso-global" + }, + "swf" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "synthetics" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "tagging" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + }, + "textract" : { + "endpoints" : { + "us-iso-east-1" : { } + } + }, + "transcribe" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "us-iso-east-1" : { } + } + }, + "transcribestreaming" : { + "endpoints" : { + "us-iso-east-1" : { } + } + }, + "translate" : { + "defaults" : { + "protocols" : [ "https" ] + }, + "endpoints" : { + "us-iso-east-1" : { } + } + }, + "workspaces" : { + "endpoints" : { + "us-iso-east-1" : { }, + "us-iso-west-1" : { } + } + } + } + }, { + "defaults" : { + "hostname" : "{service}.{region}.{dnsSuffix}", + "protocols" : [ "https" ], + "signatureVersions" : [ "v4" ], + "variants" : [ { + "dnsSuffix" : "sc2s.sgov.gov", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "dnsSuffix" : "sc2s.sgov.gov", + "partition" : "aws-iso-b", + "partitionName" : "AWS ISOB (US)", + "regionRegex" : "^us\\-isob\\-\\w+\\-\\d+$", + "regions" : { + "us-isob-east-1" : { + "description" : "US ISOB East (Ohio)" + } + }, + "services" : { + "api.ecr" : { + "endpoints" : { + "us-isob-east-1" : { + "credentialScope" : { + "region" : "us-isob-east-1" + }, + "hostname" : "api.ecr.us-isob-east-1.sc2s.sgov.gov" + } + } + }, + "api.pricing" : { + "defaults" : { + "credentialScope" : { + "service" : "pricing" + } + }, + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "api.sagemaker" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "apigateway" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "appconfig" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "appconfigdata" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "application-autoscaling" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "arc-zonal-shift" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "autoscaling" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "cloudcontrolapi" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "cloudformation" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "cloudtrail" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "codedeploy" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "config" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "directconnect" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "dlm" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "dms" : { + "defaults" : { + "variants" : [ { + "hostname" : "dms.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "endpoints" : { + "dms" : { + "credentialScope" : { + "region" : "us-isob-east-1" + }, + "deprecated" : true, + "variants" : [ { + "hostname" : "dms.us-isob-east-1.sc2s.sgov.gov", + "tags" : [ "fips" ] + } ] + }, + "dms-fips" : { + "credentialScope" : { + "region" : "us-isob-east-1" + }, + "deprecated" : true, + "hostname" : "dms.us-isob-east-1.sc2s.sgov.gov" + }, + "us-isob-east-1" : { + "variants" : [ { + "hostname" : "dms.us-isob-east-1.sc2s.sgov.gov", + "tags" : [ "fips" ] + } ] + }, + "us-isob-east-1-fips" : { + "credentialScope" : { + "region" : "us-isob-east-1" + }, + "deprecated" : true, + "hostname" : "dms.us-isob-east-1.sc2s.sgov.gov" + } + } + }, + "ds" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "dynamodb" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "ebs" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "ec2" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "ecs" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "eks" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "elasticache" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "elasticfilesystem" : { + "endpoints" : { + "fips-us-isob-east-1" : { + "credentialScope" : { + "region" : "us-isob-east-1" + }, + "deprecated" : true, + "hostname" : "elasticfilesystem-fips.us-isob-east-1.sc2s.sgov.gov" + }, + "us-isob-east-1" : { + "variants" : [ { + "hostname" : "elasticfilesystem-fips.us-isob-east-1.sc2s.sgov.gov", + "tags" : [ "fips" ] + } ] + } + } + }, + "elasticloadbalancing" : { + "endpoints" : { + "us-isob-east-1" : { + "protocols" : [ "https" ] + } + } + }, + "elasticmapreduce" : { + "endpoints" : { + "fips-us-isob-east-1" : { + "credentialScope" : { + "region" : "us-isob-east-1" + }, + "deprecated" : true, + "hostname" : "elasticmapreduce.us-isob-east-1.sc2s.sgov.gov" + }, + "us-isob-east-1" : { + "variants" : [ { + "hostname" : "elasticmapreduce.us-isob-east-1.sc2s.sgov.gov", + "tags" : [ "fips" ] + } ] + } + } + }, + "es" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "events" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "firehose" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "glacier" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "health" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "iam" : { + "endpoints" : { + "aws-iso-b-global" : { + "credentialScope" : { + "region" : "us-isob-east-1" + }, + "hostname" : "iam.us-isob-east-1.sc2s.sgov.gov" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-iso-b-global" + }, + "kinesis" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "kms" : { + "endpoints" : { + "ProdFips" : { + "credentialScope" : { + "region" : "us-isob-east-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.us-isob-east-1.sc2s.sgov.gov" + }, + "us-isob-east-1" : { + "variants" : [ { + "hostname" : "kms-fips.us-isob-east-1.sc2s.sgov.gov", + "tags" : [ "fips" ] + } ] + }, + "us-isob-east-1-fips" : { + "credentialScope" : { + "region" : "us-isob-east-1" + }, + "deprecated" : true, + "hostname" : "kms-fips.us-isob-east-1.sc2s.sgov.gov" + } + } + }, + "lambda" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "license-manager" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "logs" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "medialive" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "mediapackage" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "metering.marketplace" : { + "defaults" : { + "credentialScope" : { + "service" : "aws-marketplace" + } + }, + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "metrics.sagemaker" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "monitoring" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "outposts" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "ram" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "rbin" : { + "endpoints" : { + "fips-us-isob-east-1" : { + "credentialScope" : { + "region" : "us-isob-east-1" + }, + "deprecated" : true, + "hostname" : "rbin-fips.us-isob-east-1.sc2s.sgov.gov" + }, + "us-isob-east-1" : { + "variants" : [ { + "hostname" : "rbin-fips.us-isob-east-1.sc2s.sgov.gov", + "tags" : [ "fips" ] + } ] + } + } + }, + "rds" : { + "endpoints" : { + "rds.us-isob-east-1" : { + "credentialScope" : { + "region" : "us-isob-east-1" + }, + "deprecated" : true, + "hostname" : "rds.us-isob-east-1.sc2s.sgov.gov" + }, + "us-isob-east-1" : { + "variants" : [ { + "hostname" : "rds.us-isob-east-1.sc2s.sgov.gov", + "tags" : [ "fips" ] + } ] + }, + "us-isob-east-1-fips" : { + "credentialScope" : { + "region" : "us-isob-east-1" + }, + "deprecated" : true, + "hostname" : "rds.us-isob-east-1.sc2s.sgov.gov" + } + } + }, + "redshift" : { + "endpoints" : { + "us-isob-east-1" : { + "credentialScope" : { + "region" : "us-isob-east-1" + }, + "hostname" : "redshift.us-isob-east-1.sc2s.sgov.gov" + } + } + }, + "resource-groups" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "route53" : { + "endpoints" : { + "aws-iso-b-global" : { + "credentialScope" : { + "region" : "us-isob-east-1" + }, + "hostname" : "route53.sc2s.sgov.gov" + } + }, + "isRegionalized" : false, + "partitionEndpoint" : "aws-iso-b-global" + }, + "route53resolver" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "runtime.sagemaker" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "s3" : { + "defaults" : { + "protocols" : [ "http", "https" ], + "signatureVersions" : [ "s3v4" ] + }, + "endpoints" : { + "fips-us-isob-east-1" : { + "credentialScope" : { + "region" : "us-isob-east-1" + }, + "deprecated" : true, + "hostname" : "s3-fips.us-isob-east-1.sc2s.sgov.gov" + }, + "us-isob-east-1" : { + "variants" : [ { + "hostname" : "s3-fips.dualstack.us-isob-east-1.sc2s.sgov.gov", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "s3-fips.us-isob-east-1.sc2s.sgov.gov", + "tags" : [ "fips" ] + } ] + } + } + }, + "s3-control" : { + "defaults" : { + "protocols" : [ "https" ], + "signatureVersions" : [ "s3v4" ] + }, + "endpoints" : { + "us-isob-east-1" : { + "credentialScope" : { + "region" : "us-isob-east-1" + }, + "hostname" : "s3-control.us-isob-east-1.sc2s.sgov.gov", + "signatureVersions" : [ "s3v4" ], + "variants" : [ { + "hostname" : "s3-control-fips.dualstack.us-isob-east-1.sc2s.sgov.gov", + "tags" : [ "dualstack", "fips" ] + }, { + "hostname" : "s3-control-fips.us-isob-east-1.sc2s.sgov.gov", + "tags" : [ "fips" ] + }, { + "hostname" : "s3-control.dualstack.us-isob-east-1.sc2s.sgov.gov", + "tags" : [ "dualstack" ] + } ] + }, + "us-isob-east-1-fips" : { + "credentialScope" : { + "region" : "us-isob-east-1" + }, + "deprecated" : true, + "hostname" : "s3-control-fips.us-isob-east-1.sc2s.sgov.gov", + "signatureVersions" : [ "s3v4" ] + } + } + }, + "s3-outposts" : { + "endpoints" : { + "fips-us-isob-east-1" : { + "deprecated" : true + }, + "us-isob-east-1" : { + "variants" : [ { + "tags" : [ "fips" ] + } ] + } + } + }, + "secretsmanager" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "snowball" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "sns" : { + "defaults" : { + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "sqs" : { + "defaults" : { + "protocols" : [ "http", "https" ], + "sslCommonName" : "{region}.queue.{dnsSuffix}" + }, + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "ssm" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "states" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "storagegateway" : { + "endpoints" : { + "fips" : { + "credentialScope" : { + "region" : "us-isob-east-1" + }, + "deprecated" : true, + "hostname" : "storagegateway-fips.us-isob-east-1.sc2s.sgov.gov" + }, + "us-isob-east-1" : { + "variants" : [ { + "hostname" : "storagegateway-fips.us-isob-east-1.sc2s.sgov.gov", + "tags" : [ "fips" ] + } ] + }, + "us-isob-east-1-fips" : { + "credentialScope" : { + "region" : "us-isob-east-1" + }, + "deprecated" : true, + "hostname" : "storagegateway-fips.us-isob-east-1.sc2s.sgov.gov" + } + } + }, + "streams.dynamodb" : { + "defaults" : { + "credentialScope" : { + "service" : "dynamodb" + }, + "protocols" : [ "http", "https" ] + }, + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "sts" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "support" : { + "endpoints" : { + "aws-iso-b-global" : { + "credentialScope" : { + "region" : "us-isob-east-1" + }, + "hostname" : "support.us-isob-east-1.sc2s.sgov.gov" + } + }, + "partitionEndpoint" : "aws-iso-b-global" + }, + "swf" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "synthetics" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "tagging" : { + "endpoints" : { + "us-isob-east-1" : { } + } + }, + "workspaces" : { + "endpoints" : { + "us-isob-east-1" : { } + } + } + } + }, { + "defaults" : { + "hostname" : "{service}.{region}.{dnsSuffix}", + "protocols" : [ "https" ], + "signatureVersions" : [ "v4" ], + "variants" : [ { + "dnsSuffix" : "cloud.adc-e.uk", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "dnsSuffix" : "cloud.adc-e.uk", + "partition" : "aws-iso-e", + "partitionName" : "AWS ISOE (Europe)", + "regionRegex" : "^eu\\-isoe\\-\\w+\\-\\d+$", + "regions" : { + "eu-isoe-west-1" : { + "description" : "EU ISOE West" + } + }, + "services" : { } + }, { + "defaults" : { + "hostname" : "{service}.{region}.{dnsSuffix}", + "protocols" : [ "https" ], + "signatureVersions" : [ "v4" ], + "variants" : [ { + "dnsSuffix" : "csp.hci.ic.gov", + "hostname" : "{service}-fips.{region}.{dnsSuffix}", + "tags" : [ "fips" ] + } ] + }, + "dnsSuffix" : "csp.hci.ic.gov", + "partition" : "aws-iso-f", + "partitionName" : "AWS ISOF", + "regionRegex" : "^us\\-isof\\-\\w+\\-\\d+$", + "regions" : { }, + "services" : { } + } ], + "version" : 3 +} \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/botocore/data/partitions.json b/venv/lib/python3.10/site-packages/botocore/data/partitions.json new file mode 100644 index 0000000000000000000000000000000000000000..7a28569c3decf373f980b5eb8e1a4afa092185d3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/partitions.json @@ -0,0 +1,220 @@ +{ + "partitions" : [ { + "id" : "aws", + "outputs" : { + "dnsSuffix" : "amazonaws.com", + "dualStackDnsSuffix" : "api.aws", + "implicitGlobalRegion" : "us-east-1", + "name" : "aws", + "supportsDualStack" : true, + "supportsFIPS" : true + }, + "regionRegex" : "^(us|eu|ap|sa|ca|me|af|il)\\-\\w+\\-\\d+$", + "regions" : { + "af-south-1" : { + "description" : "Africa (Cape Town)" + }, + "ap-east-1" : { + "description" : "Asia Pacific (Hong Kong)" + }, + "ap-northeast-1" : { + "description" : "Asia Pacific (Tokyo)" + }, + "ap-northeast-2" : { + "description" : "Asia Pacific (Seoul)" + }, + "ap-northeast-3" : { + "description" : "Asia Pacific (Osaka)" + }, + "ap-south-1" : { + "description" : "Asia Pacific (Mumbai)" + }, + "ap-south-2" : { + "description" : "Asia Pacific (Hyderabad)" + }, + "ap-southeast-1" : { + "description" : "Asia Pacific (Singapore)" + }, + "ap-southeast-2" : { + "description" : "Asia Pacific (Sydney)" + }, + "ap-southeast-3" : { + "description" : "Asia Pacific (Jakarta)" + }, + "ap-southeast-4" : { + "description" : "Asia Pacific (Melbourne)" + }, + "aws-global" : { + "description" : "AWS Standard global region" + }, + "ca-central-1" : { + "description" : "Canada (Central)" + }, + "ca-west-1" : { + "description" : "Canada West (Calgary)" + }, + "eu-central-1" : { + "description" : "Europe (Frankfurt)" + }, + "eu-central-2" : { + "description" : "Europe (Zurich)" + }, + "eu-north-1" : { + "description" : "Europe (Stockholm)" + }, + "eu-south-1" : { + "description" : "Europe (Milan)" + }, + "eu-south-2" : { + "description" : "Europe (Spain)" + }, + "eu-west-1" : { + "description" : "Europe (Ireland)" + }, + "eu-west-2" : { + "description" : "Europe (London)" + }, + "eu-west-3" : { + "description" : "Europe (Paris)" + }, + "il-central-1" : { + "description" : "Israel (Tel Aviv)" + }, + "me-central-1" : { + "description" : "Middle East (UAE)" + }, + "me-south-1" : { + "description" : "Middle East (Bahrain)" + }, + "sa-east-1" : { + "description" : "South America (Sao Paulo)" + }, + "us-east-1" : { + "description" : "US East (N. Virginia)" + }, + "us-east-2" : { + "description" : "US East (Ohio)" + }, + "us-west-1" : { + "description" : "US West (N. California)" + }, + "us-west-2" : { + "description" : "US West (Oregon)" + } + } + }, { + "id" : "aws-cn", + "outputs" : { + "dnsSuffix" : "amazonaws.com.cn", + "dualStackDnsSuffix" : "api.amazonwebservices.com.cn", + "implicitGlobalRegion" : "cn-northwest-1", + "name" : "aws-cn", + "supportsDualStack" : true, + "supportsFIPS" : true + }, + "regionRegex" : "^cn\\-\\w+\\-\\d+$", + "regions" : { + "aws-cn-global" : { + "description" : "AWS China global region" + }, + "cn-north-1" : { + "description" : "China (Beijing)" + }, + "cn-northwest-1" : { + "description" : "China (Ningxia)" + } + } + }, { + "id" : "aws-us-gov", + "outputs" : { + "dnsSuffix" : "amazonaws.com", + "dualStackDnsSuffix" : "api.aws", + "implicitGlobalRegion" : "us-gov-west-1", + "name" : "aws-us-gov", + "supportsDualStack" : true, + "supportsFIPS" : true + }, + "regionRegex" : "^us\\-gov\\-\\w+\\-\\d+$", + "regions" : { + "aws-us-gov-global" : { + "description" : "AWS GovCloud (US) global region" + }, + "us-gov-east-1" : { + "description" : "AWS GovCloud (US-East)" + }, + "us-gov-west-1" : { + "description" : "AWS GovCloud (US-West)" + } + } + }, { + "id" : "aws-iso", + "outputs" : { + "dnsSuffix" : "c2s.ic.gov", + "dualStackDnsSuffix" : "c2s.ic.gov", + "implicitGlobalRegion" : "us-iso-east-1", + "name" : "aws-iso", + "supportsDualStack" : false, + "supportsFIPS" : true + }, + "regionRegex" : "^us\\-iso\\-\\w+\\-\\d+$", + "regions" : { + "aws-iso-global" : { + "description" : "AWS ISO (US) global region" + }, + "us-iso-east-1" : { + "description" : "US ISO East" + }, + "us-iso-west-1" : { + "description" : "US ISO WEST" + } + } + }, { + "id" : "aws-iso-b", + "outputs" : { + "dnsSuffix" : "sc2s.sgov.gov", + "dualStackDnsSuffix" : "sc2s.sgov.gov", + "implicitGlobalRegion" : "us-isob-east-1", + "name" : "aws-iso-b", + "supportsDualStack" : false, + "supportsFIPS" : true + }, + "regionRegex" : "^us\\-isob\\-\\w+\\-\\d+$", + "regions" : { + "aws-iso-b-global" : { + "description" : "AWS ISOB (US) global region" + }, + "us-isob-east-1" : { + "description" : "US ISOB East (Ohio)" + } + } + }, { + "id" : "aws-iso-e", + "outputs" : { + "dnsSuffix" : "cloud.adc-e.uk", + "dualStackDnsSuffix" : "cloud.adc-e.uk", + "implicitGlobalRegion" : "eu-isoe-west-1", + "name" : "aws-iso-e", + "supportsDualStack" : false, + "supportsFIPS" : true + }, + "regionRegex" : "^eu\\-isoe\\-\\w+\\-\\d+$", + "regions" : { + "eu-isoe-west-1" : { + "description" : "EU ISOE West" + } + } + }, { + "id" : "aws-iso-f", + "outputs" : { + "dnsSuffix" : "csp.hci.ic.gov", + "dualStackDnsSuffix" : "csp.hci.ic.gov", + "implicitGlobalRegion" : "us-isof-south-1", + "name" : "aws-iso-f", + "supportsDualStack" : false, + "supportsFIPS" : true + }, + "regionRegex" : "^us\\-isof\\-\\w+\\-\\d+$", + "regions" : { } + } ], + "version" : "1.1" +} \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/botocore/data/sdk-default-configuration.json b/venv/lib/python3.10/site-packages/botocore/data/sdk-default-configuration.json new file mode 100644 index 0000000000000000000000000000000000000000..3db13b26cc5e9d56882479c603a2a7cbd8721cb1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/data/sdk-default-configuration.json @@ -0,0 +1,55 @@ +{ + "version": 1, + "base": { + "retryMode": "standard", + "stsRegionalEndpoints": "regional", + "s3UsEast1RegionalEndpoints": "regional", + "connectTimeoutInMillis": 1100, + "tlsNegotiationTimeoutInMillis": 1100 + }, + "modes": { + "standard": { + "connectTimeoutInMillis": { + "override": 3100 + }, + "tlsNegotiationTimeoutInMillis": { + "override": 3100 + } + }, + "in-region": { + }, + "cross-region": { + "connectTimeoutInMillis": { + "override": 3100 + }, + "tlsNegotiationTimeoutInMillis": { + "override": 3100 + } + }, + "mobile": { + "connectTimeoutInMillis": { + "override": 30000 + }, + "tlsNegotiationTimeoutInMillis": { + "override": 30000 + } + } + }, + "documentation": { + "modes": { + "standard": "

The STANDARD mode provides the latest recommended default values that should be safe to run in most scenarios

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

", + "in-region": "

The IN_REGION mode builds on the standard mode and includes optimization tailored for applications which call AWS services from within the same AWS region

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

", + "cross-region": "

The CROSS_REGION mode builds on the standard mode and includes optimization tailored for applications which call AWS services in a different region

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

", + "mobile": "

The MOBILE mode builds on the standard mode and includes optimization tailored for mobile applications

Note that the default values vended from this mode might change as best practices may evolve. As a result, it is encouraged to perform tests when upgrading the SDK

", + "auto": "

The AUTO mode is an experimental mode that builds on the standard mode. The SDK will attempt to discover the execution environment to determine the appropriate settings automatically.

Note that the auto detection is heuristics-based and does not guarantee 100% accuracy. STANDARD mode will be used if the execution environment cannot be determined. The auto detection might query EC2 Instance Metadata service, which might introduce latency. Therefore we recommend choosing an explicit defaults_mode instead if startup latency is critical to your application

", + "legacy": "

The LEGACY mode provides default settings that vary per SDK and were used prior to establishment of defaults_mode

" + }, + "configuration": { + "retryMode": "

A retry mode specifies how the SDK attempts retries. See Retry Mode

", + "stsRegionalEndpoints": "

Specifies how the SDK determines the AWS service endpoint that it uses to talk to the AWS Security Token Service (AWS STS). See Setting STS Regional endpoints

", + "s3UsEast1RegionalEndpoints": "

Specifies how the SDK determines the AWS service endpoint that it uses to talk to the Amazon S3 for the us-east-1 region

", + "connectTimeoutInMillis": "

The amount of time after making an initial connection attempt on a socket, where if the client does not receive a completion of the connect handshake, the client gives up and fails the operation

", + "tlsNegotiationTimeoutInMillis": "

The maximum amount of time that a TLS handshake is allowed to take from the time the CLIENT HELLO message is sent to ethe time the client and server have fully negotiated ciphers and exchanged keys

" + } + } +} \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/botocore/discovery.py b/venv/lib/python3.10/site-packages/botocore/discovery.py new file mode 100644 index 0000000000000000000000000000000000000000..95b51b81baff31ad0132cf8aa57e451f68a576a7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/discovery.py @@ -0,0 +1,281 @@ +# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import logging +import time +import weakref + +from botocore import xform_name +from botocore.exceptions import BotoCoreError, ConnectionError, HTTPClientError +from botocore.model import OperationNotFoundError +from botocore.utils import CachedProperty + +logger = logging.getLogger(__name__) + + +class EndpointDiscoveryException(BotoCoreError): + pass + + +class EndpointDiscoveryRequired(EndpointDiscoveryException): + """Endpoint Discovery is disabled but is required for this operation.""" + + fmt = 'Endpoint Discovery is not enabled but this operation requires it.' + + +class EndpointDiscoveryRefreshFailed(EndpointDiscoveryException): + """Endpoint Discovery failed to the refresh the known endpoints.""" + + fmt = 'Endpoint Discovery failed to refresh the required endpoints.' + + +def block_endpoint_discovery_required_operations(model, **kwargs): + endpoint_discovery = model.endpoint_discovery + if endpoint_discovery and endpoint_discovery.get('required'): + raise EndpointDiscoveryRequired() + + +class EndpointDiscoveryModel: + def __init__(self, service_model): + self._service_model = service_model + + @CachedProperty + def discovery_operation_name(self): + discovery_operation = self._service_model.endpoint_discovery_operation + return xform_name(discovery_operation.name) + + @CachedProperty + def discovery_operation_keys(self): + discovery_operation = self._service_model.endpoint_discovery_operation + keys = [] + if discovery_operation.input_shape: + keys = list(discovery_operation.input_shape.members.keys()) + return keys + + def discovery_required_for(self, operation_name): + try: + operation_model = self._service_model.operation_model( + operation_name + ) + return operation_model.endpoint_discovery.get('required', False) + except OperationNotFoundError: + return False + + def discovery_operation_kwargs(self, **kwargs): + input_keys = self.discovery_operation_keys + # Operation and Identifiers are only sent if there are Identifiers + if not kwargs.get('Identifiers'): + kwargs.pop('Operation', None) + kwargs.pop('Identifiers', None) + return {k: v for k, v in kwargs.items() if k in input_keys} + + def gather_identifiers(self, operation, params): + return self._gather_ids(operation.input_shape, params) + + def _gather_ids(self, shape, params, ids=None): + # Traverse the input shape and corresponding parameters, gathering + # any input fields labeled as an endpoint discovery id + if ids is None: + ids = {} + for member_name, member_shape in shape.members.items(): + if member_shape.metadata.get('endpointdiscoveryid'): + ids[member_name] = params[member_name] + elif ( + member_shape.type_name == 'structure' and member_name in params + ): + self._gather_ids(member_shape, params[member_name], ids) + return ids + + +class EndpointDiscoveryManager: + def __init__( + self, client, cache=None, current_time=None, always_discover=True + ): + if cache is None: + cache = {} + self._cache = cache + self._failed_attempts = {} + if current_time is None: + current_time = time.time + self._time = current_time + self._always_discover = always_discover + + # This needs to be a weak ref in order to prevent memory leaks on + # python 2.6 + self._client = weakref.proxy(client) + self._model = EndpointDiscoveryModel(client.meta.service_model) + + def _parse_endpoints(self, response): + endpoints = response['Endpoints'] + current_time = self._time() + for endpoint in endpoints: + cache_time = endpoint.get('CachePeriodInMinutes') + endpoint['Expiration'] = current_time + cache_time * 60 + return endpoints + + def _cache_item(self, value): + if isinstance(value, dict): + return tuple(sorted(value.items())) + else: + return value + + def _create_cache_key(self, **kwargs): + kwargs = self._model.discovery_operation_kwargs(**kwargs) + return tuple(self._cache_item(v) for k, v in sorted(kwargs.items())) + + def gather_identifiers(self, operation, params): + return self._model.gather_identifiers(operation, params) + + def delete_endpoints(self, **kwargs): + cache_key = self._create_cache_key(**kwargs) + if cache_key in self._cache: + del self._cache[cache_key] + + def _describe_endpoints(self, **kwargs): + # This is effectively a proxy to whatever name/kwargs the service + # supports for endpoint discovery. + kwargs = self._model.discovery_operation_kwargs(**kwargs) + operation_name = self._model.discovery_operation_name + discovery_operation = getattr(self._client, operation_name) + logger.debug('Discovering endpoints with kwargs: %s', kwargs) + return discovery_operation(**kwargs) + + def _get_current_endpoints(self, key): + if key not in self._cache: + return None + now = self._time() + return [e for e in self._cache[key] if now < e['Expiration']] + + def _refresh_current_endpoints(self, **kwargs): + cache_key = self._create_cache_key(**kwargs) + try: + response = self._describe_endpoints(**kwargs) + endpoints = self._parse_endpoints(response) + self._cache[cache_key] = endpoints + self._failed_attempts.pop(cache_key, None) + return endpoints + except (ConnectionError, HTTPClientError): + self._failed_attempts[cache_key] = self._time() + 60 + return None + + def _recently_failed(self, cache_key): + if cache_key in self._failed_attempts: + now = self._time() + if now < self._failed_attempts[cache_key]: + return True + del self._failed_attempts[cache_key] + return False + + def _select_endpoint(self, endpoints): + return endpoints[0]['Address'] + + def describe_endpoint(self, **kwargs): + operation = kwargs['Operation'] + discovery_required = self._model.discovery_required_for(operation) + + if not self._always_discover and not discovery_required: + # Discovery set to only run on required operations + logger.debug( + f'Optional discovery disabled. Skipping discovery for Operation: {operation}' + ) + return None + + # Get the endpoint for the provided operation and identifiers + cache_key = self._create_cache_key(**kwargs) + endpoints = self._get_current_endpoints(cache_key) + if endpoints: + return self._select_endpoint(endpoints) + # All known endpoints are stale + recently_failed = self._recently_failed(cache_key) + if not recently_failed: + # We haven't failed to discover recently, go ahead and refresh + endpoints = self._refresh_current_endpoints(**kwargs) + if endpoints: + return self._select_endpoint(endpoints) + # Discovery has failed recently, do our best to get an endpoint + logger.debug('Endpoint Discovery has failed for: %s', kwargs) + stale_entries = self._cache.get(cache_key, None) + if stale_entries: + # We have stale entries, use those while discovery is failing + return self._select_endpoint(stale_entries) + if discovery_required: + # It looks strange to be checking recently_failed again but, + # this informs us as to whether or not we tried to refresh earlier + if recently_failed: + # Discovery is required and we haven't already refreshed + endpoints = self._refresh_current_endpoints(**kwargs) + if endpoints: + return self._select_endpoint(endpoints) + # No endpoints even refresh, raise hard error + raise EndpointDiscoveryRefreshFailed() + # Discovery is optional, just use the default endpoint for now + return None + + +class EndpointDiscoveryHandler: + def __init__(self, manager): + self._manager = manager + + def register(self, events, service_id): + events.register( + f'before-parameter-build.{service_id}', self.gather_identifiers + ) + events.register_first( + f'request-created.{service_id}', self.discover_endpoint + ) + events.register(f'needs-retry.{service_id}', self.handle_retries) + + def gather_identifiers(self, params, model, context, **kwargs): + endpoint_discovery = model.endpoint_discovery + # Only continue if the operation supports endpoint discovery + if endpoint_discovery is None: + return + ids = self._manager.gather_identifiers(model, params) + context['discovery'] = {'identifiers': ids} + + def discover_endpoint(self, request, operation_name, **kwargs): + ids = request.context.get('discovery', {}).get('identifiers') + if ids is None: + return + endpoint = self._manager.describe_endpoint( + Operation=operation_name, Identifiers=ids + ) + if endpoint is None: + logger.debug('Failed to discover and inject endpoint') + return + if not endpoint.startswith('http'): + endpoint = 'https://' + endpoint + logger.debug('Injecting discovered endpoint: %s', endpoint) + request.url = endpoint + + def handle_retries(self, request_dict, response, operation, **kwargs): + if response is None: + return None + + _, response = response + status = response.get('ResponseMetadata', {}).get('HTTPStatusCode') + error_code = response.get('Error', {}).get('Code') + if status != 421 and error_code != 'InvalidEndpointException': + return None + + context = request_dict.get('context', {}) + ids = context.get('discovery', {}).get('identifiers') + if ids is None: + return None + + # Delete the cached endpoints, forcing a refresh on retry + # TODO: Improve eviction behavior to only evict the bad endpoint if + # there are multiple. This will almost certainly require a lock. + self._manager.delete_endpoints( + Operation=operation.name, Identifiers=ids + ) + return 0 diff --git a/venv/lib/python3.10/site-packages/botocore/endpoint.py b/venv/lib/python3.10/site-packages/botocore/endpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..59f3d86c8ee8f5b59dbbfafabfc7a04a8ad13ff0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/endpoint.py @@ -0,0 +1,441 @@ +# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ +# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import datetime +import logging +import os +import threading +import time +import uuid + +from botocore import parsers +from botocore.awsrequest import create_request_object +from botocore.exceptions import HTTPClientError +from botocore.history import get_global_history_recorder +from botocore.hooks import first_non_none_response +from botocore.httpchecksum import handle_checksum_body +from botocore.httpsession import URLLib3Session +from botocore.response import StreamingBody +from botocore.utils import ( + get_environ_proxies, + is_valid_endpoint_url, + is_valid_ipv6_endpoint_url, +) + +logger = logging.getLogger(__name__) +history_recorder = get_global_history_recorder() +DEFAULT_TIMEOUT = 60 +MAX_POOL_CONNECTIONS = 10 + + +def convert_to_response_dict(http_response, operation_model): + """Convert an HTTP response object to a request dict. + + This converts the requests library's HTTP response object to + a dictionary. + + :type http_response: botocore.vendored.requests.model.Response + :param http_response: The HTTP response from an AWS service request. + + :rtype: dict + :return: A response dictionary which will contain the following keys: + * headers (dict) + * status_code (int) + * body (string or file-like object) + + """ + response_dict = { + 'headers': http_response.headers, + 'status_code': http_response.status_code, + 'context': { + 'operation_name': operation_model.name, + }, + } + if response_dict['status_code'] >= 300: + response_dict['body'] = http_response.content + elif operation_model.has_event_stream_output: + response_dict['body'] = http_response.raw + elif operation_model.has_streaming_output: + length = response_dict['headers'].get('content-length') + response_dict['body'] = StreamingBody(http_response.raw, length) + else: + response_dict['body'] = http_response.content + return response_dict + + +class Endpoint: + """ + Represents an endpoint for a particular service in a specific + region. Only an endpoint can make requests. + + :ivar service: The Service object that describes this endpoints + service. + :ivar host: The fully qualified endpoint hostname. + :ivar session: The session object. + """ + + def __init__( + self, + host, + endpoint_prefix, + event_emitter, + response_parser_factory=None, + http_session=None, + ): + self._endpoint_prefix = endpoint_prefix + self._event_emitter = event_emitter + self.host = host + self._lock = threading.Lock() + if response_parser_factory is None: + response_parser_factory = parsers.ResponseParserFactory() + self._response_parser_factory = response_parser_factory + self.http_session = http_session + if self.http_session is None: + self.http_session = URLLib3Session() + + def __repr__(self): + return f'{self._endpoint_prefix}({self.host})' + + def close(self): + self.http_session.close() + + def make_request(self, operation_model, request_dict): + logger.debug( + "Making request for %s with params: %s", + operation_model, + request_dict, + ) + return self._send_request(request_dict, operation_model) + + def create_request(self, params, operation_model=None): + request = create_request_object(params) + if operation_model: + request.stream_output = any( + [ + operation_model.has_streaming_output, + operation_model.has_event_stream_output, + ] + ) + service_id = operation_model.service_model.service_id.hyphenize() + event_name = f'request-created.{service_id}.{operation_model.name}' + self._event_emitter.emit( + event_name, + request=request, + operation_name=operation_model.name, + ) + prepared_request = self.prepare_request(request) + return prepared_request + + def _encode_headers(self, headers): + # In place encoding of headers to utf-8 if they are unicode. + for key, value in headers.items(): + if isinstance(value, str): + headers[key] = value.encode('utf-8') + + def prepare_request(self, request): + self._encode_headers(request.headers) + return request.prepare() + + def _calculate_ttl( + self, response_received_timestamp, date_header, read_timeout + ): + local_timestamp = datetime.datetime.utcnow() + date_conversion = datetime.datetime.strptime( + date_header, "%a, %d %b %Y %H:%M:%S %Z" + ) + estimated_skew = date_conversion - response_received_timestamp + ttl = ( + local_timestamp + + datetime.timedelta(seconds=read_timeout) + + estimated_skew + ) + return ttl.strftime('%Y%m%dT%H%M%SZ') + + def _set_ttl(self, retries_context, read_timeout, success_response): + response_date_header = success_response[0].headers.get('Date') + has_streaming_input = retries_context.get('has_streaming_input') + if response_date_header and not has_streaming_input: + try: + response_received_timestamp = datetime.datetime.utcnow() + retries_context['ttl'] = self._calculate_ttl( + response_received_timestamp, + response_date_header, + read_timeout, + ) + except Exception: + logger.debug( + "Exception received when updating retries context with TTL", + exc_info=True, + ) + + def _update_retries_context(self, context, attempt, success_response=None): + retries_context = context.setdefault('retries', {}) + retries_context['attempt'] = attempt + if 'invocation-id' not in retries_context: + retries_context['invocation-id'] = str(uuid.uuid4()) + + if success_response: + read_timeout = context['client_config'].read_timeout + self._set_ttl(retries_context, read_timeout, success_response) + + def _send_request(self, request_dict, operation_model): + attempts = 1 + context = request_dict['context'] + self._update_retries_context(context, attempts) + request = self.create_request(request_dict, operation_model) + success_response, exception = self._get_response( + request, operation_model, context + ) + while self._needs_retry( + attempts, + operation_model, + request_dict, + success_response, + exception, + ): + attempts += 1 + self._update_retries_context(context, attempts, success_response) + # If there is a stream associated with the request, we need + # to reset it before attempting to send the request again. + # This will ensure that we resend the entire contents of the + # body. + request.reset_stream() + # Create a new request when retried (including a new signature). + request = self.create_request(request_dict, operation_model) + success_response, exception = self._get_response( + request, operation_model, context + ) + if ( + success_response is not None + and 'ResponseMetadata' in success_response[1] + ): + # We want to share num retries, not num attempts. + total_retries = attempts - 1 + success_response[1]['ResponseMetadata']['RetryAttempts'] = ( + total_retries + ) + if exception is not None: + raise exception + else: + return success_response + + def _get_response(self, request, operation_model, context): + # This will return a tuple of (success_response, exception) + # and success_response is itself a tuple of + # (http_response, parsed_dict). + # If an exception occurs then the success_response is None. + # If no exception occurs then exception is None. + success_response, exception = self._do_get_response( + request, operation_model, context + ) + kwargs_to_emit = { + 'response_dict': None, + 'parsed_response': None, + 'context': context, + 'exception': exception, + } + if success_response is not None: + http_response, parsed_response = success_response + kwargs_to_emit['parsed_response'] = parsed_response + kwargs_to_emit['response_dict'] = convert_to_response_dict( + http_response, operation_model + ) + service_id = operation_model.service_model.service_id.hyphenize() + self._event_emitter.emit( + f"response-received.{service_id}.{operation_model.name}", + **kwargs_to_emit, + ) + return success_response, exception + + def _do_get_response(self, request, operation_model, context): + try: + logger.debug("Sending http request: %s", request) + history_recorder.record( + 'HTTP_REQUEST', + { + 'method': request.method, + 'headers': request.headers, + 'streaming': operation_model.has_streaming_input, + 'url': request.url, + 'body': request.body, + }, + ) + service_id = operation_model.service_model.service_id.hyphenize() + event_name = f"before-send.{service_id}.{operation_model.name}" + responses = self._event_emitter.emit(event_name, request=request) + http_response = first_non_none_response(responses) + if http_response is None: + http_response = self._send(request) + except HTTPClientError as e: + return (None, e) + except Exception as e: + logger.debug( + "Exception received when sending HTTP request.", exc_info=True + ) + return (None, e) + # This returns the http_response and the parsed_data. + response_dict = convert_to_response_dict( + http_response, operation_model + ) + handle_checksum_body( + http_response, + response_dict, + context, + operation_model, + ) + + http_response_record_dict = response_dict.copy() + http_response_record_dict['streaming'] = ( + operation_model.has_streaming_output + ) + history_recorder.record('HTTP_RESPONSE', http_response_record_dict) + + protocol = operation_model.metadata['protocol'] + parser = self._response_parser_factory.create_parser(protocol) + parsed_response = parser.parse( + response_dict, operation_model.output_shape + ) + # Do a second parsing pass to pick up on any modeled error fields + # NOTE: Ideally, we would push this down into the parser classes but + # they currently have no reference to the operation or service model + # The parsers should probably take the operation model instead of + # output shape but we can't change that now + if http_response.status_code >= 300: + self._add_modeled_error_fields( + response_dict, + parsed_response, + operation_model, + parser, + ) + history_recorder.record('PARSED_RESPONSE', parsed_response) + return (http_response, parsed_response), None + + def _add_modeled_error_fields( + self, + response_dict, + parsed_response, + operation_model, + parser, + ): + error_code = parsed_response.get("Error", {}).get("Code") + if error_code is None: + return + service_model = operation_model.service_model + error_shape = service_model.shape_for_error_code(error_code) + if error_shape is None: + return + modeled_parse = parser.parse(response_dict, error_shape) + # TODO: avoid naming conflicts with ResponseMetadata and Error + parsed_response.update(modeled_parse) + + def _needs_retry( + self, + attempts, + operation_model, + request_dict, + response=None, + caught_exception=None, + ): + service_id = operation_model.service_model.service_id.hyphenize() + event_name = f"needs-retry.{service_id}.{operation_model.name}" + responses = self._event_emitter.emit( + event_name, + response=response, + endpoint=self, + operation=operation_model, + attempts=attempts, + caught_exception=caught_exception, + request_dict=request_dict, + ) + handler_response = first_non_none_response(responses) + if handler_response is None: + return False + else: + # Request needs to be retried, and we need to sleep + # for the specified number of times. + logger.debug( + "Response received to retry, sleeping for %s seconds", + handler_response, + ) + time.sleep(handler_response) + return True + + def _send(self, request): + return self.http_session.send(request) + + +class EndpointCreator: + def __init__(self, event_emitter): + self._event_emitter = event_emitter + + def create_endpoint( + self, + service_model, + region_name, + endpoint_url, + verify=None, + response_parser_factory=None, + timeout=DEFAULT_TIMEOUT, + max_pool_connections=MAX_POOL_CONNECTIONS, + http_session_cls=URLLib3Session, + proxies=None, + socket_options=None, + client_cert=None, + proxies_config=None, + ): + if not is_valid_endpoint_url( + endpoint_url + ) and not is_valid_ipv6_endpoint_url(endpoint_url): + raise ValueError(f"Invalid endpoint: {endpoint_url}") + + if proxies is None: + proxies = self._get_proxies(endpoint_url) + endpoint_prefix = service_model.endpoint_prefix + + logger.debug('Setting %s timeout as %s', endpoint_prefix, timeout) + http_session = http_session_cls( + timeout=timeout, + proxies=proxies, + verify=self._get_verify_value(verify), + max_pool_connections=max_pool_connections, + socket_options=socket_options, + client_cert=client_cert, + proxies_config=proxies_config, + ) + + return Endpoint( + endpoint_url, + endpoint_prefix=endpoint_prefix, + event_emitter=self._event_emitter, + response_parser_factory=response_parser_factory, + http_session=http_session, + ) + + def _get_proxies(self, url): + # We could also support getting proxies from a config file, + # but for now proxy support is taken from the environment. + return get_environ_proxies(url) + + def _get_verify_value(self, verify): + # This is to account for: + # https://github.com/kennethreitz/requests/issues/1436 + # where we need to honor REQUESTS_CA_BUNDLE because we're creating our + # own request objects. + # First, if verify is not None, then the user explicitly specified + # a value so this automatically wins. + if verify is not None: + return verify + # Otherwise use the value from REQUESTS_CA_BUNDLE, or default to + # True if the env var does not exist. + return os.environ.get('REQUESTS_CA_BUNDLE', True) diff --git a/venv/lib/python3.10/site-packages/botocore/endpoint_provider.py b/venv/lib/python3.10/site-packages/botocore/endpoint_provider.py new file mode 100644 index 0000000000000000000000000000000000000000..9439086c5345678e4253a5996b597de8f0870493 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/endpoint_provider.py @@ -0,0 +1,721 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +""" +NOTE: All classes and functions in this module are considered private and are +subject to abrupt breaking changes. Please do not use them directly. + +To view the raw JSON that the objects in this module represent, please +go to any `endpoint-rule-set.json` file in /botocore/data/// +or you can look at the test files in /tests/unit/data/endpoints/valid-rules/ +""" + +import logging +import re +from enum import Enum +from string import Formatter +from typing import NamedTuple + +from botocore import xform_name +from botocore.compat import IPV4_RE, quote, urlparse +from botocore.exceptions import EndpointResolutionError +from botocore.utils import ( + ArnParser, + InvalidArnException, + is_valid_ipv4_endpoint_url, + is_valid_ipv6_endpoint_url, + lru_cache_weakref, + normalize_url_path, + percent_encode, +) + +logger = logging.getLogger(__name__) + +TEMPLATE_STRING_RE = re.compile(r"\{[a-zA-Z#]+\}") +GET_ATTR_RE = re.compile(r"(\w+)\[(\d+)\]") +VALID_HOST_LABEL_RE = re.compile( + r"^(?!-)[a-zA-Z\d-]{1,63}(?= len(value): + return None + return value[index] + else: + value = value[part] + return value + + def format_partition_output(self, partition): + output = partition["outputs"] + output["name"] = partition["id"] + return output + + def is_partition_match(self, region, partition): + matches_regex = re.match(partition["regionRegex"], region) is not None + return region in partition["regions"] or matches_regex + + def aws_partition(self, value): + """Match a region string to an AWS partition. + + :type value: str + :rtype: dict + """ + partitions = self.partitions_data['partitions'] + + if value is not None: + for partition in partitions: + if self.is_partition_match(value, partition): + return self.format_partition_output(partition) + + # return the default partition if no matches were found + aws_partition = partitions[0] + return self.format_partition_output(aws_partition) + + def aws_parse_arn(self, value): + """Parse and validate string for ARN components. + + :type value: str + :rtype: dict + """ + if value is None or not value.startswith("arn:"): + return None + + try: + arn_dict = ARN_PARSER.parse_arn(value) + except InvalidArnException: + return None + + # partition, resource, and service are required + if not all( + (arn_dict["partition"], arn_dict["service"], arn_dict["resource"]) + ): + return None + + arn_dict["accountId"] = arn_dict.pop("account") + + resource = arn_dict.pop("resource") + arn_dict["resourceId"] = resource.replace(":", "/").split("/") + + return arn_dict + + def is_valid_host_label(self, value, allow_subdomains): + """Evaluates whether a value is a valid host label per + RFC 1123. If allow_subdomains is True, split on `.` and validate + each component separately. + + :type value: str + :type allow_subdomains: bool + :rtype: bool + """ + if value is None or allow_subdomains is False and value.count(".") > 0: + return False + + if allow_subdomains is True: + return all( + self.is_valid_host_label(label, False) + for label in value.split(".") + ) + + return VALID_HOST_LABEL_RE.match(value) is not None + + def string_equals(self, value1, value2): + """Evaluates two string values for equality. + + :type value1: str + :type value2: str + :rtype: bool + """ + if not all(isinstance(val, str) for val in (value1, value2)): + msg = f"Both values must be strings, not {type(value1)} and {type(value2)}." + raise EndpointResolutionError(msg=msg) + return value1 == value2 + + def uri_encode(self, value): + """Perform percent-encoding on an input string. + + :type value: str + :rytpe: str + """ + if value is None: + return None + + return percent_encode(value) + + def parse_url(self, value): + """Parse a URL string into components. + + :type value: str + :rtype: dict + """ + if value is None: + return None + + url_components = urlparse(value) + try: + # url_parse may assign non-integer values to + # `port` and will fail when accessed. + url_components.port + except ValueError: + return None + + scheme = url_components.scheme + query = url_components.query + # URLs with queries are not supported + if scheme not in ("https", "http") or len(query) > 0: + return None + + path = url_components.path + normalized_path = quote(normalize_url_path(path)) + if not normalized_path.endswith("/"): + normalized_path = f"{normalized_path}/" + + return { + "scheme": scheme, + "authority": url_components.netloc, + "path": path, + "normalizedPath": normalized_path, + "isIp": is_valid_ipv4_endpoint_url(value) + or is_valid_ipv6_endpoint_url(value), + } + + def boolean_equals(self, value1, value2): + """Evaluates two boolean values for equality. + + :type value1: bool + :type value2: bool + :rtype: bool + """ + if not all(isinstance(val, bool) for val in (value1, value2)): + msg = f"Both arguments must be bools, not {type(value1)} and {type(value2)}." + raise EndpointResolutionError(msg=msg) + return value1 is value2 + + def is_ascii(self, value): + """Evaluates if a string only contains ASCII characters. + + :type value: str + :rtype: bool + """ + try: + value.encode("ascii") + return True + except UnicodeEncodeError: + return False + + def substring(self, value, start, stop, reverse): + """Computes a substring given the start index and end index. If `reverse` is + True, slice the string from the end instead. + + :type value: str + :type start: int + :type end: int + :type reverse: bool + :rtype: str + """ + if not isinstance(value, str): + msg = f"Input must be a string, not {type(value)}." + raise EndpointResolutionError(msg=msg) + if start >= stop or len(value) < stop or not self.is_ascii(value): + return None + + if reverse is True: + r_start = len(value) - stop + r_stop = len(value) - start + return value[r_start:r_stop] + + return value[start:stop] + + def _not(self, value): + """A function implementation of the logical operator `not`. + + :type value: Any + :rtype: bool + """ + return not value + + def aws_is_virtual_hostable_s3_bucket(self, value, allow_subdomains): + """Evaluates whether a value is a valid bucket name for virtual host + style bucket URLs. To pass, the value must meet the following criteria: + 1. is_valid_host_label(value) is True + 2. length between 3 and 63 characters (inclusive) + 3. does not contain uppercase characters + 4. is not formatted as an IP address + + If allow_subdomains is True, split on `.` and validate + each component separately. + + :type value: str + :type allow_subdomains: bool + :rtype: bool + """ + if ( + value is None + or len(value) < 3 + or value.lower() != value + or IPV4_RE.match(value) is not None + ): + return False + + return self.is_valid_host_label( + value, allow_subdomains=allow_subdomains + ) + + +# maintains backwards compatibility as `Library` was misspelled +# in earlier versions +RuleSetStandardLibary = RuleSetStandardLibrary + + +class BaseRule: + """Base interface for individual endpoint rules.""" + + def __init__(self, conditions, documentation=None): + self.conditions = conditions + self.documentation = documentation + + def evaluate(self, scope_vars, rule_lib): + raise NotImplementedError() + + def evaluate_conditions(self, scope_vars, rule_lib): + """Determine if all conditions in a rule are met. + + :type scope_vars: dict + :type rule_lib: RuleSetStandardLibrary + :rtype: bool + """ + for func_signature in self.conditions: + result = rule_lib.call_function(func_signature, scope_vars) + if result is False or result is None: + return False + return True + + +class RuleSetEndpoint(NamedTuple): + """A resolved endpoint object returned by a rule.""" + + url: str + properties: dict + headers: dict + + +class EndpointRule(BaseRule): + def __init__(self, endpoint, **kwargs): + super().__init__(**kwargs) + self.endpoint = endpoint + + def evaluate(self, scope_vars, rule_lib): + """Determine if conditions are met to provide a valid endpoint. + + :type scope_vars: dict + :rtype: RuleSetEndpoint + """ + if self.evaluate_conditions(scope_vars, rule_lib): + url = rule_lib.resolve_value(self.endpoint["url"], scope_vars) + properties = self.resolve_properties( + self.endpoint.get("properties", {}), + scope_vars, + rule_lib, + ) + headers = self.resolve_headers(scope_vars, rule_lib) + return RuleSetEndpoint( + url=url, properties=properties, headers=headers + ) + + return None + + def resolve_properties(self, properties, scope_vars, rule_lib): + """Traverse `properties` attribute, resolving any template strings. + + :type properties: dict/list/str + :type scope_vars: dict + :type rule_lib: RuleSetStandardLibrary + :rtype: dict + """ + if isinstance(properties, list): + return [ + self.resolve_properties(prop, scope_vars, rule_lib) + for prop in properties + ] + elif isinstance(properties, dict): + return { + key: self.resolve_properties(value, scope_vars, rule_lib) + for key, value in properties.items() + } + elif rule_lib.is_template(properties): + return rule_lib.resolve_template_string(properties, scope_vars) + + return properties + + def resolve_headers(self, scope_vars, rule_lib): + """Iterate through headers attribute resolving all values. + + :type scope_vars: dict + :type rule_lib: RuleSetStandardLibrary + :rtype: dict + """ + resolved_headers = {} + headers = self.endpoint.get("headers", {}) + + for header, values in headers.items(): + resolved_headers[header] = [ + rule_lib.resolve_value(item, scope_vars) for item in values + ] + return resolved_headers + + +class ErrorRule(BaseRule): + def __init__(self, error, **kwargs): + super().__init__(**kwargs) + self.error = error + + def evaluate(self, scope_vars, rule_lib): + """If an error rule's conditions are met, raise an error rule. + + :type scope_vars: dict + :type rule_lib: RuleSetStandardLibrary + :rtype: EndpointResolutionError + """ + if self.evaluate_conditions(scope_vars, rule_lib): + error = rule_lib.resolve_value(self.error, scope_vars) + raise EndpointResolutionError(msg=error) + return None + + +class TreeRule(BaseRule): + """A tree rule is non-terminal meaning it will never be returned to a provider. + Additionally this means it has no attributes that need to be resolved. + """ + + def __init__(self, rules, **kwargs): + super().__init__(**kwargs) + self.rules = [RuleCreator.create(**rule) for rule in rules] + + def evaluate(self, scope_vars, rule_lib): + """If a tree rule's conditions are met, iterate its sub-rules + and return first result found. + + :type scope_vars: dict + :type rule_lib: RuleSetStandardLibrary + :rtype: RuleSetEndpoint/EndpointResolutionError + """ + if self.evaluate_conditions(scope_vars, rule_lib): + for rule in self.rules: + # don't share scope_vars between rules + rule_result = rule.evaluate(scope_vars.copy(), rule_lib) + if rule_result: + return rule_result + return None + + +class RuleCreator: + endpoint = EndpointRule + error = ErrorRule + tree = TreeRule + + @classmethod + def create(cls, **kwargs): + """Create a rule instance from metadata. + + :rtype: TreeRule/EndpointRule/ErrorRule + """ + rule_type = kwargs.pop("type") + try: + rule_class = getattr(cls, rule_type) + except AttributeError: + raise EndpointResolutionError( + msg=f"Unknown rule type: {rule_type}. A rule must " + "be of type tree, endpoint or error." + ) + else: + return rule_class(**kwargs) + + +class ParameterType(Enum): + """Translation from `type` attribute to native Python type.""" + + string = str + boolean = bool + + +class ParameterDefinition: + """The spec of an individual parameter defined in a RuleSet.""" + + def __init__( + self, + name, + parameter_type, + documentation=None, + builtIn=None, + default=None, + required=None, + deprecated=None, + ): + self.name = name + try: + self.parameter_type = getattr( + ParameterType, parameter_type.lower() + ).value + except AttributeError: + raise EndpointResolutionError( + msg=f"Unknown parameter type: {parameter_type}. " + "A parameter must be of type string or boolean." + ) + self.documentation = documentation + self.builtin = builtIn + self.default = default + self.required = required + self.deprecated = deprecated + + def validate_input(self, value): + """Perform base validation on parameter input. + + :type value: Any + :raises: EndpointParametersError + """ + + if not isinstance(value, self.parameter_type): + raise EndpointResolutionError( + msg=f"Value ({self.name}) is the wrong " + f"type. Must be {self.parameter_type}." + ) + if self.deprecated is not None: + depr_str = f"{self.name} has been deprecated." + msg = self.deprecated.get("message") + since = self.deprecated.get("since") + if msg: + depr_str += f"\n{msg}" + if since: + depr_str += f"\nDeprecated since {since}." + logger.info(depr_str) + + return None + + def process_input(self, value): + """Process input against spec, applying default if value is None.""" + if value is None: + if self.default is not None: + return self.default + if self.required: + raise EndpointResolutionError( + f"Cannot find value for required parameter {self.name}" + ) + # in all other cases, the parameter will keep the value None + else: + self.validate_input(value) + return value + + +class RuleSet: + """Collection of rules to derive a routable service endpoint.""" + + def __init__( + self, version, parameters, rules, partitions, documentation=None + ): + self.version = version + self.parameters = self._ingest_parameter_spec(parameters) + self.rules = [RuleCreator.create(**rule) for rule in rules] + self.rule_lib = RuleSetStandardLibrary(partitions) + self.documentation = documentation + + def _ingest_parameter_spec(self, parameters): + return { + name: ParameterDefinition( + name, + spec["type"], + spec.get("documentation"), + spec.get("builtIn"), + spec.get("default"), + spec.get("required"), + spec.get("deprecated"), + ) + for name, spec in parameters.items() + } + + def process_input_parameters(self, input_params): + """Process each input parameter against its spec. + + :type input_params: dict + """ + for name, spec in self.parameters.items(): + value = spec.process_input(input_params.get(name)) + if value is not None: + input_params[name] = value + return None + + def evaluate(self, input_parameters): + """Evaluate input parameters against rules returning first match. + + :type input_parameters: dict + """ + self.process_input_parameters(input_parameters) + for rule in self.rules: + evaluation = rule.evaluate(input_parameters.copy(), self.rule_lib) + if evaluation is not None: + return evaluation + return None + + +class EndpointProvider: + """Derives endpoints from a RuleSet for given input parameters.""" + + def __init__(self, ruleset_data, partition_data): + self.ruleset = RuleSet(**ruleset_data, partitions=partition_data) + + @lru_cache_weakref(maxsize=CACHE_SIZE) + def resolve_endpoint(self, **input_parameters): + """Match input parameters to a rule. + + :type input_parameters: dict + :rtype: RuleSetEndpoint + """ + params_for_error = input_parameters.copy() + endpoint = self.ruleset.evaluate(input_parameters) + if endpoint is None: + param_string = "\n".join( + [f"{key}: {value}" for key, value in params_for_error.items()] + ) + raise EndpointResolutionError( + msg=f"No endpoint found for parameters:\n{param_string}" + ) + return endpoint diff --git a/venv/lib/python3.10/site-packages/botocore/errorfactory.py b/venv/lib/python3.10/site-packages/botocore/errorfactory.py new file mode 100644 index 0000000000000000000000000000000000000000..6084e51da467c0cbf0e7fdeb52f858512bebcc6f --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/errorfactory.py @@ -0,0 +1,90 @@ +# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +from botocore.exceptions import ClientError +from botocore.utils import get_service_module_name + + +class BaseClientExceptions: + ClientError = ClientError + + def __init__(self, code_to_exception): + """Base class for exceptions object on a client + + :type code_to_exception: dict + :param code_to_exception: Mapping of error codes (strings) to exception + class that should be raised when encountering a particular + error code. + """ + self._code_to_exception = code_to_exception + + def from_code(self, error_code): + """Retrieves the error class based on the error code + + This is helpful for identifying the exception class needing to be + caught based on the ClientError.parsed_reponse['Error']['Code'] value + + :type error_code: string + :param error_code: The error code associated to a ClientError exception + + :rtype: ClientError or a subclass of ClientError + :returns: The appropriate modeled exception class for that error + code. If the error code does not match any of the known + modeled exceptions then return a generic ClientError. + """ + return self._code_to_exception.get(error_code, self.ClientError) + + def __getattr__(self, name): + exception_cls_names = [ + exception_cls.__name__ + for exception_cls in self._code_to_exception.values() + ] + raise AttributeError( + rf"{self} object has no attribute {name}. " + rf"Valid exceptions are: {', '.join(exception_cls_names)}" + ) + + +class ClientExceptionsFactory: + def __init__(self): + self._client_exceptions_cache = {} + + def create_client_exceptions(self, service_model): + """Creates a ClientExceptions object for the particular service client + + :type service_model: botocore.model.ServiceModel + :param service_model: The service model for the client + + :rtype: object that subclasses from BaseClientExceptions + :returns: The exceptions object of a client that can be used + to grab the various different modeled exceptions. + """ + service_name = service_model.service_name + if service_name not in self._client_exceptions_cache: + client_exceptions = self._create_client_exceptions(service_model) + self._client_exceptions_cache[service_name] = client_exceptions + return self._client_exceptions_cache[service_name] + + def _create_client_exceptions(self, service_model): + cls_props = {} + code_to_exception = {} + for error_shape in service_model.error_shapes: + exception_name = str(error_shape.name) + exception_cls = type(exception_name, (ClientError,), {}) + cls_props[exception_name] = exception_cls + code = str(error_shape.error_code) + code_to_exception[code] = exception_cls + cls_name = str(get_service_module_name(service_model) + 'Exceptions') + client_exceptions_cls = type( + cls_name, (BaseClientExceptions,), cls_props + ) + return client_exceptions_cls(code_to_exception) diff --git a/venv/lib/python3.10/site-packages/botocore/eventstream.py b/venv/lib/python3.10/site-packages/botocore/eventstream.py new file mode 100644 index 0000000000000000000000000000000000000000..b7999a6e50499525693a09481804056f1648d67e --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/eventstream.py @@ -0,0 +1,622 @@ +# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""Binary Event Stream Decoding""" + +from binascii import crc32 +from struct import unpack + +from botocore.exceptions import EventStreamError + +# byte length of the prelude (total_length + header_length + prelude_crc) +_PRELUDE_LENGTH = 12 +_MAX_HEADERS_LENGTH = 128 * 1024 # 128 Kb +_MAX_PAYLOAD_LENGTH = 16 * 1024**2 # 16 Mb + + +class ParserError(Exception): + """Base binary flow encoding parsing exception.""" + + pass + + +class DuplicateHeader(ParserError): + """Duplicate header found in the event.""" + + def __init__(self, header): + message = f'Duplicate header present: "{header}"' + super().__init__(message) + + +class InvalidHeadersLength(ParserError): + """Headers length is longer than the maximum.""" + + def __init__(self, length): + message = f'Header length of {length} exceeded the maximum of {_MAX_HEADERS_LENGTH}' + super().__init__(message) + + +class InvalidPayloadLength(ParserError): + """Payload length is longer than the maximum.""" + + def __init__(self, length): + message = f'Payload length of {length} exceeded the maximum of {_MAX_PAYLOAD_LENGTH}' + super().__init__(message) + + +class ChecksumMismatch(ParserError): + """Calculated checksum did not match the expected checksum.""" + + def __init__(self, expected, calculated): + message = f'Checksum mismatch: expected 0x{expected:08x}, calculated 0x{calculated:08x}' + super().__init__(message) + + +class NoInitialResponseError(ParserError): + """An event of type initial-response was not received. + + This exception is raised when the event stream produced no events or + the first event in the stream was not of the initial-response type. + """ + + def __init__(self): + message = 'First event was not of the initial-response type' + super().__init__(message) + + +class DecodeUtils: + """Unpacking utility functions used in the decoder. + + All methods on this class take raw bytes and return a tuple containing + the value parsed from the bytes and the number of bytes consumed to parse + that value. + """ + + UINT8_BYTE_FORMAT = '!B' + UINT16_BYTE_FORMAT = '!H' + UINT32_BYTE_FORMAT = '!I' + INT8_BYTE_FORMAT = '!b' + INT16_BYTE_FORMAT = '!h' + INT32_BYTE_FORMAT = '!i' + INT64_BYTE_FORMAT = '!q' + PRELUDE_BYTE_FORMAT = '!III' + + # uint byte size to unpack format + UINT_BYTE_FORMAT = { + 1: UINT8_BYTE_FORMAT, + 2: UINT16_BYTE_FORMAT, + 4: UINT32_BYTE_FORMAT, + } + + @staticmethod + def unpack_true(data): + """This method consumes none of the provided bytes and returns True. + + :type data: bytes + :param data: The bytes to parse from. This is ignored in this method. + + :rtype: tuple + :rtype: (bool, int) + :returns: The tuple (True, 0) + """ + return True, 0 + + @staticmethod + def unpack_false(data): + """This method consumes none of the provided bytes and returns False. + + :type data: bytes + :param data: The bytes to parse from. This is ignored in this method. + + :rtype: tuple + :rtype: (bool, int) + :returns: The tuple (False, 0) + """ + return False, 0 + + @staticmethod + def unpack_uint8(data): + """Parse an unsigned 8-bit integer from the bytes. + + :type data: bytes + :param data: The bytes to parse from. + + :rtype: (int, int) + :returns: A tuple containing the (parsed integer value, bytes consumed) + """ + value = unpack(DecodeUtils.UINT8_BYTE_FORMAT, data[:1])[0] + return value, 1 + + @staticmethod + def unpack_uint32(data): + """Parse an unsigned 32-bit integer from the bytes. + + :type data: bytes + :param data: The bytes to parse from. + + :rtype: (int, int) + :returns: A tuple containing the (parsed integer value, bytes consumed) + """ + value = unpack(DecodeUtils.UINT32_BYTE_FORMAT, data[:4])[0] + return value, 4 + + @staticmethod + def unpack_int8(data): + """Parse a signed 8-bit integer from the bytes. + + :type data: bytes + :param data: The bytes to parse from. + + :rtype: (int, int) + :returns: A tuple containing the (parsed integer value, bytes consumed) + """ + value = unpack(DecodeUtils.INT8_BYTE_FORMAT, data[:1])[0] + return value, 1 + + @staticmethod + def unpack_int16(data): + """Parse a signed 16-bit integer from the bytes. + + :type data: bytes + :param data: The bytes to parse from. + + :rtype: tuple + :rtype: (int, int) + :returns: A tuple containing the (parsed integer value, bytes consumed) + """ + value = unpack(DecodeUtils.INT16_BYTE_FORMAT, data[:2])[0] + return value, 2 + + @staticmethod + def unpack_int32(data): + """Parse a signed 32-bit integer from the bytes. + + :type data: bytes + :param data: The bytes to parse from. + + :rtype: tuple + :rtype: (int, int) + :returns: A tuple containing the (parsed integer value, bytes consumed) + """ + value = unpack(DecodeUtils.INT32_BYTE_FORMAT, data[:4])[0] + return value, 4 + + @staticmethod + def unpack_int64(data): + """Parse a signed 64-bit integer from the bytes. + + :type data: bytes + :param data: The bytes to parse from. + + :rtype: tuple + :rtype: (int, int) + :returns: A tuple containing the (parsed integer value, bytes consumed) + """ + value = unpack(DecodeUtils.INT64_BYTE_FORMAT, data[:8])[0] + return value, 8 + + @staticmethod + def unpack_byte_array(data, length_byte_size=2): + """Parse a variable length byte array from the bytes. + + The bytes are expected to be in the following format: + [ length ][0 ... length bytes] + where length is an unsigned integer represented in the smallest number + of bytes to hold the maximum length of the array. + + :type data: bytes + :param data: The bytes to parse from. + + :type length_byte_size: int + :param length_byte_size: The byte size of the preceding integer that + represents the length of the array. Supported values are 1, 2, and 4. + + :rtype: (bytes, int) + :returns: A tuple containing the (parsed byte array, bytes consumed). + """ + uint_byte_format = DecodeUtils.UINT_BYTE_FORMAT[length_byte_size] + length = unpack(uint_byte_format, data[:length_byte_size])[0] + bytes_end = length + length_byte_size + array_bytes = data[length_byte_size:bytes_end] + return array_bytes, bytes_end + + @staticmethod + def unpack_utf8_string(data, length_byte_size=2): + """Parse a variable length utf-8 string from the bytes. + + The bytes are expected to be in the following format: + [ length ][0 ... length bytes] + where length is an unsigned integer represented in the smallest number + of bytes to hold the maximum length of the array and the following + bytes are a valid utf-8 string. + + :type data: bytes + :param bytes: The bytes to parse from. + + :type length_byte_size: int + :param length_byte_size: The byte size of the preceding integer that + represents the length of the array. Supported values are 1, 2, and 4. + + :rtype: (str, int) + :returns: A tuple containing the (utf-8 string, bytes consumed). + """ + array_bytes, consumed = DecodeUtils.unpack_byte_array( + data, length_byte_size + ) + return array_bytes.decode('utf-8'), consumed + + @staticmethod + def unpack_uuid(data): + """Parse a 16-byte uuid from the bytes. + + :type data: bytes + :param data: The bytes to parse from. + + :rtype: (bytes, int) + :returns: A tuple containing the (uuid bytes, bytes consumed). + """ + return data[:16], 16 + + @staticmethod + def unpack_prelude(data): + """Parse the prelude for an event stream message from the bytes. + + The prelude for an event stream message has the following format: + [total_length][header_length][prelude_crc] + where each field is an unsigned 32-bit integer. + + :rtype: ((int, int, int), int) + :returns: A tuple of ((total_length, headers_length, prelude_crc), + consumed) + """ + return (unpack(DecodeUtils.PRELUDE_BYTE_FORMAT, data), _PRELUDE_LENGTH) + + +def _validate_checksum(data, checksum, crc=0): + # To generate the same numeric value across all Python versions and + # platforms use crc32(data) & 0xffffffff. + computed_checksum = crc32(data, crc) & 0xFFFFFFFF + if checksum != computed_checksum: + raise ChecksumMismatch(checksum, computed_checksum) + + +class MessagePrelude: + """Represents the prelude of an event stream message.""" + + def __init__(self, total_length, headers_length, crc): + self.total_length = total_length + self.headers_length = headers_length + self.crc = crc + + @property + def payload_length(self): + """Calculates the total payload length. + + The extra minus 4 bytes is for the message CRC. + + :rtype: int + :returns: The total payload length. + """ + return self.total_length - self.headers_length - _PRELUDE_LENGTH - 4 + + @property + def payload_end(self): + """Calculates the byte offset for the end of the message payload. + + The extra minus 4 bytes is for the message CRC. + + :rtype: int + :returns: The byte offset from the beginning of the event stream + message to the end of the payload. + """ + return self.total_length - 4 + + @property + def headers_end(self): + """Calculates the byte offset for the end of the message headers. + + :rtype: int + :returns: The byte offset from the beginning of the event stream + message to the end of the headers. + """ + return _PRELUDE_LENGTH + self.headers_length + + +class EventStreamMessage: + """Represents an event stream message.""" + + def __init__(self, prelude, headers, payload, crc): + self.prelude = prelude + self.headers = headers + self.payload = payload + self.crc = crc + + def to_response_dict(self, status_code=200): + message_type = self.headers.get(':message-type') + if message_type == 'error' or message_type == 'exception': + status_code = 400 + return { + 'status_code': status_code, + 'headers': self.headers, + 'body': self.payload, + } + + +class EventStreamHeaderParser: + """Parses the event headers from an event stream message. + + Expects all of the header data upfront and creates a dictionary of headers + to return. This object can be reused multiple times to parse the headers + from multiple event stream messages. + """ + + # Maps header type to appropriate unpacking function + # These unpacking functions return the value and the amount unpacked + _HEADER_TYPE_MAP = { + # boolean_true + 0: DecodeUtils.unpack_true, + # boolean_false + 1: DecodeUtils.unpack_false, + # byte + 2: DecodeUtils.unpack_int8, + # short + 3: DecodeUtils.unpack_int16, + # integer + 4: DecodeUtils.unpack_int32, + # long + 5: DecodeUtils.unpack_int64, + # byte_array + 6: DecodeUtils.unpack_byte_array, + # string + 7: DecodeUtils.unpack_utf8_string, + # timestamp + 8: DecodeUtils.unpack_int64, + # uuid + 9: DecodeUtils.unpack_uuid, + } + + def __init__(self): + self._data = None + + def parse(self, data): + """Parses the event stream headers from an event stream message. + + :type data: bytes + :param data: The bytes that correspond to the headers section of an + event stream message. + + :rtype: dict + :returns: A dictionary of header key, value pairs. + """ + self._data = data + return self._parse_headers() + + def _parse_headers(self): + headers = {} + while self._data: + name, value = self._parse_header() + if name in headers: + raise DuplicateHeader(name) + headers[name] = value + return headers + + def _parse_header(self): + name = self._parse_name() + value = self._parse_value() + return name, value + + def _parse_name(self): + name, consumed = DecodeUtils.unpack_utf8_string(self._data, 1) + self._advance_data(consumed) + return name + + def _parse_type(self): + type, consumed = DecodeUtils.unpack_uint8(self._data) + self._advance_data(consumed) + return type + + def _parse_value(self): + header_type = self._parse_type() + value_unpacker = self._HEADER_TYPE_MAP[header_type] + value, consumed = value_unpacker(self._data) + self._advance_data(consumed) + return value + + def _advance_data(self, consumed): + self._data = self._data[consumed:] + + +class EventStreamBuffer: + """Streaming based event stream buffer + + A buffer class that wraps bytes from an event stream providing parsed + messages as they become available via an iterable interface. + """ + + def __init__(self): + self._data = b'' + self._prelude = None + self._header_parser = EventStreamHeaderParser() + + def add_data(self, data): + """Add data to the buffer. + + :type data: bytes + :param data: The bytes to add to the buffer to be used when parsing + """ + self._data += data + + def _validate_prelude(self, prelude): + if prelude.headers_length > _MAX_HEADERS_LENGTH: + raise InvalidHeadersLength(prelude.headers_length) + + if prelude.payload_length > _MAX_PAYLOAD_LENGTH: + raise InvalidPayloadLength(prelude.payload_length) + + def _parse_prelude(self): + prelude_bytes = self._data[:_PRELUDE_LENGTH] + raw_prelude, _ = DecodeUtils.unpack_prelude(prelude_bytes) + prelude = MessagePrelude(*raw_prelude) + self._validate_prelude(prelude) + # The minus 4 removes the prelude crc from the bytes to be checked + _validate_checksum(prelude_bytes[: _PRELUDE_LENGTH - 4], prelude.crc) + return prelude + + def _parse_headers(self): + header_bytes = self._data[_PRELUDE_LENGTH : self._prelude.headers_end] + return self._header_parser.parse(header_bytes) + + def _parse_payload(self): + prelude = self._prelude + payload_bytes = self._data[prelude.headers_end : prelude.payload_end] + return payload_bytes + + def _parse_message_crc(self): + prelude = self._prelude + crc_bytes = self._data[prelude.payload_end : prelude.total_length] + message_crc, _ = DecodeUtils.unpack_uint32(crc_bytes) + return message_crc + + def _parse_message_bytes(self): + # The minus 4 includes the prelude crc to the bytes to be checked + message_bytes = self._data[ + _PRELUDE_LENGTH - 4 : self._prelude.payload_end + ] + return message_bytes + + def _validate_message_crc(self): + message_crc = self._parse_message_crc() + message_bytes = self._parse_message_bytes() + _validate_checksum(message_bytes, message_crc, crc=self._prelude.crc) + return message_crc + + def _parse_message(self): + crc = self._validate_message_crc() + headers = self._parse_headers() + payload = self._parse_payload() + message = EventStreamMessage(self._prelude, headers, payload, crc) + self._prepare_for_next_message() + return message + + def _prepare_for_next_message(self): + # Advance the data and reset the current prelude + self._data = self._data[self._prelude.total_length :] + self._prelude = None + + def next(self): + """Provides the next available message parsed from the stream + + :rtype: EventStreamMessage + :returns: The next event stream message + """ + if len(self._data) < _PRELUDE_LENGTH: + raise StopIteration() + + if self._prelude is None: + self._prelude = self._parse_prelude() + + if len(self._data) < self._prelude.total_length: + raise StopIteration() + + return self._parse_message() + + def __next__(self): + return self.next() + + def __iter__(self): + return self + + +class EventStream: + """Wrapper class for an event stream body. + + This wraps the underlying streaming body, parsing it for individual events + and yielding them as they come available through the iterator interface. + + The following example uses the S3 select API to get structured data out of + an object stored in S3 using an event stream. + + **Example:** + :: + from botocore.session import Session + + s3 = Session().create_client('s3') + response = s3.select_object_content( + Bucket='bucketname', + Key='keyname', + ExpressionType='SQL', + RequestProgress={'Enabled': True}, + Expression="SELECT * FROM S3Object s", + InputSerialization={'CSV': {}}, + OutputSerialization={'CSV': {}}, + ) + # This is the event stream in the response + event_stream = response['Payload'] + end_event_received = False + with open('output', 'wb') as f: + # Iterate over events in the event stream as they come + for event in event_stream: + # If we received a records event, write the data to a file + if 'Records' in event: + data = event['Records']['Payload'] + f.write(data) + # If we received a progress event, print the details + elif 'Progress' in event: + print(event['Progress']['Details']) + # End event indicates that the request finished successfully + elif 'End' in event: + print('Result is complete') + end_event_received = True + if not end_event_received: + raise Exception("End event not received, request incomplete.") + """ + + def __init__(self, raw_stream, output_shape, parser, operation_name): + self._raw_stream = raw_stream + self._output_shape = output_shape + self._operation_name = operation_name + self._parser = parser + self._event_generator = self._create_raw_event_generator() + + def __iter__(self): + for event in self._event_generator: + parsed_event = self._parse_event(event) + if parsed_event: + yield parsed_event + + def _create_raw_event_generator(self): + event_stream_buffer = EventStreamBuffer() + for chunk in self._raw_stream.stream(): + event_stream_buffer.add_data(chunk) + yield from event_stream_buffer + + def _parse_event(self, event): + response_dict = event.to_response_dict() + parsed_response = self._parser.parse(response_dict, self._output_shape) + if response_dict['status_code'] == 200: + return parsed_response + else: + raise EventStreamError(parsed_response, self._operation_name) + + def get_initial_response(self): + try: + initial_event = next(self._event_generator) + event_type = initial_event.headers.get(':event-type') + if event_type == 'initial-response': + return initial_event + except StopIteration: + pass + raise NoInitialResponseError() + + def close(self): + """Closes the underlying streaming body.""" + self._raw_stream.close() diff --git a/venv/lib/python3.10/site-packages/botocore/exceptions.py b/venv/lib/python3.10/site-packages/botocore/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..1c480abbf87c1c8c9132e139658790b856c3e8b8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/exceptions.py @@ -0,0 +1,816 @@ +# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ +# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +from botocore.vendored import requests +from botocore.vendored.requests.packages import urllib3 + + +def _exception_from_packed_args(exception_cls, args=None, kwargs=None): + # This is helpful for reducing Exceptions that only accept kwargs as + # only positional arguments can be provided for __reduce__ + # Ideally, this would also be a class method on the BotoCoreError + # but instance methods cannot be pickled. + if args is None: + args = () + if kwargs is None: + kwargs = {} + return exception_cls(*args, **kwargs) + + +class BotoCoreError(Exception): + """ + The base exception class for BotoCore exceptions. + + :ivar msg: The descriptive message associated with the error. + """ + + fmt = 'An unspecified error occurred' + + def __init__(self, **kwargs): + msg = self.fmt.format(**kwargs) + Exception.__init__(self, msg) + self.kwargs = kwargs + + def __reduce__(self): + return _exception_from_packed_args, (self.__class__, None, self.kwargs) + + +class DataNotFoundError(BotoCoreError): + """ + The data associated with a particular path could not be loaded. + + :ivar data_path: The data path that the user attempted to load. + """ + + fmt = 'Unable to load data for: {data_path}' + + +class UnknownServiceError(DataNotFoundError): + """Raised when trying to load data for an unknown service. + + :ivar service_name: The name of the unknown service. + + """ + + fmt = ( + "Unknown service: '{service_name}'. Valid service names are: " + "{known_service_names}" + ) + + +class UnknownRegionError(BotoCoreError): + """Raised when trying to load data for an unknown region. + + :ivar region_name: The name of the unknown region. + + """ + + fmt = "Unknown region: '{region_name}'. {error_msg}" + + +class ApiVersionNotFoundError(BotoCoreError): + """ + The data associated with either the API version or a compatible one + could not be loaded. + + :ivar data_path: The data path that the user attempted to load. + :ivar api_version: The API version that the user attempted to load. + """ + + fmt = 'Unable to load data {data_path} for: {api_version}' + + +class HTTPClientError(BotoCoreError): + fmt = 'An HTTP Client raised an unhandled exception: {error}' + + def __init__(self, request=None, response=None, **kwargs): + self.request = request + self.response = response + super().__init__(**kwargs) + + def __reduce__(self): + return _exception_from_packed_args, ( + self.__class__, + (self.request, self.response), + self.kwargs, + ) + + +class ConnectionError(BotoCoreError): + fmt = 'An HTTP Client failed to establish a connection: {error}' + + +class InvalidIMDSEndpointError(BotoCoreError): + fmt = 'Invalid endpoint EC2 Instance Metadata endpoint: {endpoint}' + + +class InvalidIMDSEndpointModeError(BotoCoreError): + fmt = ( + 'Invalid EC2 Instance Metadata endpoint mode: {mode}' + ' Valid endpoint modes (case-insensitive): {valid_modes}.' + ) + + +class EndpointConnectionError(ConnectionError): + fmt = 'Could not connect to the endpoint URL: "{endpoint_url}"' + + +class SSLError(ConnectionError, requests.exceptions.SSLError): + fmt = 'SSL validation failed for {endpoint_url} {error}' + + +class ConnectionClosedError(HTTPClientError): + fmt = ( + 'Connection was closed before we received a valid response ' + 'from endpoint URL: "{endpoint_url}".' + ) + + +class ReadTimeoutError( + HTTPClientError, + requests.exceptions.ReadTimeout, + urllib3.exceptions.ReadTimeoutError, +): + fmt = 'Read timeout on endpoint URL: "{endpoint_url}"' + + +class ConnectTimeoutError(ConnectionError, requests.exceptions.ConnectTimeout): + fmt = 'Connect timeout on endpoint URL: "{endpoint_url}"' + + +class ProxyConnectionError(ConnectionError, requests.exceptions.ProxyError): + fmt = 'Failed to connect to proxy URL: "{proxy_url}"' + + +class ResponseStreamingError(HTTPClientError): + fmt = 'An error occurred while reading from response stream: {error}' + + +class NoCredentialsError(BotoCoreError): + """ + No credentials could be found. + """ + + fmt = 'Unable to locate credentials' + + +class NoAuthTokenError(BotoCoreError): + """ + No authorization token could be found. + """ + + fmt = 'Unable to locate authorization token' + + +class TokenRetrievalError(BotoCoreError): + """ + Error attempting to retrieve a token from a remote source. + + :ivar provider: The name of the token provider. + :ivar error_msg: The msg explaining why the token could not be retrieved. + + """ + + fmt = 'Error when retrieving token from {provider}: {error_msg}' + + +class PartialCredentialsError(BotoCoreError): + """ + Only partial credentials were found. + + :ivar cred_var: The missing credential variable name. + + """ + + fmt = 'Partial credentials found in {provider}, missing: {cred_var}' + + +class CredentialRetrievalError(BotoCoreError): + """ + Error attempting to retrieve credentials from a remote source. + + :ivar provider: The name of the credential provider. + :ivar error_msg: The msg explaining why credentials could not be + retrieved. + + """ + + fmt = 'Error when retrieving credentials from {provider}: {error_msg}' + + +class UnknownSignatureVersionError(BotoCoreError): + """ + Requested Signature Version is not known. + + :ivar signature_version: The name of the requested signature version. + """ + + fmt = 'Unknown Signature Version: {signature_version}.' + + +class ServiceNotInRegionError(BotoCoreError): + """ + The service is not available in requested region. + + :ivar service_name: The name of the service. + :ivar region_name: The name of the region. + """ + + fmt = 'Service {service_name} not available in region {region_name}' + + +class BaseEndpointResolverError(BotoCoreError): + """Base error for endpoint resolving errors. + + Should never be raised directly, but clients can catch + this exception if they want to generically handle any errors + during the endpoint resolution process. + + """ + + +class NoRegionError(BaseEndpointResolverError): + """No region was specified.""" + + fmt = 'You must specify a region.' + + +class EndpointVariantError(BaseEndpointResolverError): + """ + Could not construct modeled endpoint variant. + + :ivar error_msg: The message explaining why the modeled endpoint variant + is unable to be constructed. + + """ + + fmt = ( + 'Unable to construct a modeled endpoint with the following ' + 'variant(s) {tags}: ' + ) + + +class UnknownEndpointError(BaseEndpointResolverError, ValueError): + """ + Could not construct an endpoint. + + :ivar service_name: The name of the service. + :ivar region_name: The name of the region. + """ + + fmt = ( + 'Unable to construct an endpoint for ' + '{service_name} in region {region_name}' + ) + + +class UnknownFIPSEndpointError(BaseEndpointResolverError): + """ + Could not construct a FIPS endpoint. + + :ivar service_name: The name of the service. + :ivar region_name: The name of the region. + """ + + fmt = ( + 'The provided FIPS pseudo-region "{region_name}" is not known for ' + 'the service "{service_name}". A FIPS compliant endpoint cannot be ' + 'constructed.' + ) + + +class ProfileNotFound(BotoCoreError): + """ + The specified configuration profile was not found in the + configuration file. + + :ivar profile: The name of the profile the user attempted to load. + """ + + fmt = 'The config profile ({profile}) could not be found' + + +class ConfigParseError(BotoCoreError): + """ + The configuration file could not be parsed. + + :ivar path: The path to the configuration file. + """ + + fmt = 'Unable to parse config file: {path}' + + +class ConfigNotFound(BotoCoreError): + """ + The specified configuration file could not be found. + + :ivar path: The path to the configuration file. + """ + + fmt = 'The specified config file ({path}) could not be found.' + + +class MissingParametersError(BotoCoreError): + """ + One or more required parameters were not supplied. + + :ivar object: The object that has missing parameters. + This can be an operation or a parameter (in the + case of inner params). The str() of this object + will be used so it doesn't need to implement anything + other than str(). + :ivar missing: The names of the missing parameters. + """ + + fmt = ( + 'The following required parameters are missing for ' + '{object_name}: {missing}' + ) + + +class ValidationError(BotoCoreError): + """ + An exception occurred validating parameters. + + Subclasses must accept a ``value`` and ``param`` + argument in their ``__init__``. + + :ivar value: The value that was being validated. + :ivar param: The parameter that failed validation. + :ivar type_name: The name of the underlying type. + """ + + fmt = "Invalid value ('{value}') for param {param} " "of type {type_name} " + + +class ParamValidationError(BotoCoreError): + fmt = 'Parameter validation failed:\n{report}' + + +# These exceptions subclass from ValidationError so that code +# can just 'except ValidationError' to catch any possibly validation +# error. +class UnknownKeyError(ValidationError): + """ + Unknown key in a struct parameter. + + :ivar value: The value that was being checked. + :ivar param: The name of the parameter. + :ivar choices: The valid choices the value can be. + """ + + fmt = ( + "Unknown key '{value}' for param '{param}'. Must be one " + "of: {choices}" + ) + + +class RangeError(ValidationError): + """ + A parameter value was out of the valid range. + + :ivar value: The value that was being checked. + :ivar param: The parameter that failed validation. + :ivar min_value: The specified minimum value. + :ivar max_value: The specified maximum value. + """ + + fmt = ( + 'Value out of range for param {param}: ' + '{min_value} <= {value} <= {max_value}' + ) + + +class UnknownParameterError(ValidationError): + """ + Unknown top level parameter. + + :ivar name: The name of the unknown parameter. + :ivar operation: The name of the operation. + :ivar choices: The valid choices the parameter name can be. + """ + + fmt = ( + "Unknown parameter '{name}' for operation {operation}. Must be one " + "of: {choices}" + ) + + +class InvalidRegionError(ValidationError, ValueError): + """ + Invalid region_name provided to client or resource. + + :ivar region_name: region_name that was being validated. + """ + + fmt = "Provided region_name '{region_name}' doesn't match a supported format." + + +class AliasConflictParameterError(ValidationError): + """ + Error when an alias is provided for a parameter as well as the original. + + :ivar original: The name of the original parameter. + :ivar alias: The name of the alias + :ivar operation: The name of the operation. + """ + + fmt = ( + "Parameter '{original}' and its alias '{alias}' were provided " + "for operation {operation}. Only one of them may be used." + ) + + +class UnknownServiceStyle(BotoCoreError): + """ + Unknown style of service invocation. + + :ivar service_style: The style requested. + """ + + fmt = 'The service style ({service_style}) is not understood.' + + +class PaginationError(BotoCoreError): + fmt = 'Error during pagination: {message}' + + +class OperationNotPageableError(BotoCoreError): + fmt = 'Operation cannot be paginated: {operation_name}' + + +class ChecksumError(BotoCoreError): + """The expected checksum did not match the calculated checksum.""" + + fmt = ( + 'Checksum {checksum_type} failed, expected checksum ' + '{expected_checksum} did not match calculated checksum ' + '{actual_checksum}.' + ) + + +class UnseekableStreamError(BotoCoreError): + """Need to seek a stream, but stream does not support seeking.""" + + fmt = ( + 'Need to rewind the stream {stream_object}, but stream ' + 'is not seekable.' + ) + + +class WaiterError(BotoCoreError): + """Waiter failed to reach desired state.""" + + fmt = 'Waiter {name} failed: {reason}' + + def __init__(self, name, reason, last_response): + super().__init__(name=name, reason=reason) + self.last_response = last_response + + +class IncompleteReadError(BotoCoreError): + """HTTP response did not return expected number of bytes.""" + + fmt = ( + '{actual_bytes} read, but total bytes ' 'expected is {expected_bytes}.' + ) + + +class InvalidExpressionError(BotoCoreError): + """Expression is either invalid or too complex.""" + + fmt = 'Invalid expression {expression}: Only dotted lookups are supported.' + + +class UnknownCredentialError(BotoCoreError): + """Tried to insert before/after an unregistered credential type.""" + + fmt = 'Credential named {name} not found.' + + +class WaiterConfigError(BotoCoreError): + """Error when processing waiter configuration.""" + + fmt = 'Error processing waiter config: {error_msg}' + + +class UnknownClientMethodError(BotoCoreError): + """Error when trying to access a method on a client that does not exist.""" + + fmt = 'Client does not have method: {method_name}' + + +class UnsupportedSignatureVersionError(BotoCoreError): + """Error when trying to use an unsupported Signature Version.""" + + fmt = 'Signature version is not supported: {signature_version}' + + +class ClientError(Exception): + MSG_TEMPLATE = ( + 'An error occurred ({error_code}) when calling the {operation_name} ' + 'operation{retry_info}: {error_message}' + ) + + def __init__(self, error_response, operation_name): + retry_info = self._get_retry_info(error_response) + error = error_response.get('Error', {}) + msg = self.MSG_TEMPLATE.format( + error_code=error.get('Code', 'Unknown'), + error_message=error.get('Message', 'Unknown'), + operation_name=operation_name, + retry_info=retry_info, + ) + super().__init__(msg) + self.response = error_response + self.operation_name = operation_name + + def _get_retry_info(self, response): + retry_info = '' + if 'ResponseMetadata' in response: + metadata = response['ResponseMetadata'] + if metadata.get('MaxAttemptsReached', False): + if 'RetryAttempts' in metadata: + retry_info = ( + f" (reached max retries: {metadata['RetryAttempts']})" + ) + return retry_info + + def __reduce__(self): + # Subclasses of ClientError's are dynamically generated and + # cannot be pickled unless they are attributes of a + # module. So at the very least return a ClientError back. + return ClientError, (self.response, self.operation_name) + + +class EventStreamError(ClientError): + pass + + +class UnsupportedTLSVersionWarning(Warning): + """Warn when an openssl version that uses TLS 1.2 is required""" + + pass + + +class ImminentRemovalWarning(Warning): + pass + + +class InvalidDNSNameError(BotoCoreError): + """Error when virtual host path is forced on a non-DNS compatible bucket""" + + fmt = ( + 'Bucket named {bucket_name} is not DNS compatible. Virtual ' + 'hosted-style addressing cannot be used. The addressing style ' + 'can be configured by removing the addressing_style value ' + 'or setting that value to \'path\' or \'auto\' in the AWS Config ' + 'file or in the botocore.client.Config object.' + ) + + +class InvalidS3AddressingStyleError(BotoCoreError): + """Error when an invalid path style is specified""" + + fmt = ( + 'S3 addressing style {s3_addressing_style} is invalid. Valid options ' + 'are: \'auto\', \'virtual\', and \'path\'' + ) + + +class UnsupportedS3ArnError(BotoCoreError): + """Error when S3 ARN provided to Bucket parameter is not supported""" + + fmt = ( + 'S3 ARN {arn} provided to "Bucket" parameter is invalid. Only ' + 'ARNs for S3 access-points are supported.' + ) + + +class UnsupportedS3ControlArnError(BotoCoreError): + """Error when S3 ARN provided to S3 control parameter is not supported""" + + fmt = 'S3 ARN "{arn}" provided is invalid for this operation. {msg}' + + +class InvalidHostLabelError(BotoCoreError): + """Error when an invalid host label would be bound to an endpoint""" + + fmt = ( + 'Invalid host label to be bound to the hostname of the endpoint: ' + '"{label}".' + ) + + +class UnsupportedOutpostResourceError(BotoCoreError): + """Error when S3 Outpost ARN provided to Bucket parameter is incomplete""" + + fmt = ( + 'S3 Outpost ARN resource "{resource_name}" provided to "Bucket" ' + 'parameter is invalid. Only ARNs for S3 Outpost arns with an ' + 'access-point sub-resource are supported.' + ) + + +class UnsupportedS3ConfigurationError(BotoCoreError): + """Error when an unsupported configuration is used with access-points""" + + fmt = 'Unsupported configuration when using S3: {msg}' + + +class UnsupportedS3AccesspointConfigurationError(BotoCoreError): + """Error when an unsupported configuration is used with access-points""" + + fmt = 'Unsupported configuration when using S3 access-points: {msg}' + + +class InvalidEndpointDiscoveryConfigurationError(BotoCoreError): + """Error when invalid value supplied for endpoint_discovery_enabled""" + + fmt = ( + 'Unsupported configuration value for endpoint_discovery_enabled. ' + 'Expected one of ("true", "false", "auto") but got {config_value}.' + ) + + +class UnsupportedS3ControlConfigurationError(BotoCoreError): + """Error when an unsupported configuration is used with S3 Control""" + + fmt = 'Unsupported configuration when using S3 Control: {msg}' + + +class InvalidRetryConfigurationError(BotoCoreError): + """Error when invalid retry configuration is specified""" + + fmt = ( + 'Cannot provide retry configuration for "{retry_config_option}". ' + 'Valid retry configuration options are: {valid_options}' + ) + + +class InvalidMaxRetryAttemptsError(InvalidRetryConfigurationError): + """Error when invalid retry configuration is specified""" + + fmt = ( + 'Value provided to "max_attempts": {provided_max_attempts} must ' + 'be an integer greater than or equal to {min_value}.' + ) + + +class InvalidRetryModeError(InvalidRetryConfigurationError): + """Error when invalid retry mode configuration is specified""" + + fmt = ( + 'Invalid value provided to "mode": "{provided_retry_mode}" must ' + 'be one of: {valid_modes}' + ) + + +class InvalidS3UsEast1RegionalEndpointConfigError(BotoCoreError): + """Error for invalid s3 us-east-1 regional endpoints configuration""" + + fmt = ( + 'S3 us-east-1 regional endpoint option ' + '{s3_us_east_1_regional_endpoint_config} is ' + 'invalid. Valid options are: "legacy", "regional"' + ) + + +class InvalidSTSRegionalEndpointsConfigError(BotoCoreError): + """Error when invalid sts regional endpoints configuration is specified""" + + fmt = ( + 'STS regional endpoints option {sts_regional_endpoints_config} is ' + 'invalid. Valid options are: "legacy", "regional"' + ) + + +class StubResponseError(BotoCoreError): + fmt = ( + 'Error getting response stub for operation {operation_name}: {reason}' + ) + + +class StubAssertionError(StubResponseError, AssertionError): + pass + + +class UnStubbedResponseError(StubResponseError): + pass + + +class InvalidConfigError(BotoCoreError): + fmt = '{error_msg}' + + +class InfiniteLoopConfigError(InvalidConfigError): + fmt = ( + 'Infinite loop in credential configuration detected. Attempting to ' + 'load from profile {source_profile} which has already been visited. ' + 'Visited profiles: {visited_profiles}' + ) + + +class RefreshWithMFAUnsupportedError(BotoCoreError): + fmt = 'Cannot refresh credentials: MFA token required.' + + +class MD5UnavailableError(BotoCoreError): + fmt = "This system does not support MD5 generation." + + +class MissingDependencyException(BotoCoreError): + fmt = "Missing Dependency: {msg}" + + +class MetadataRetrievalError(BotoCoreError): + fmt = "Error retrieving metadata: {error_msg}" + + +class UndefinedModelAttributeError(Exception): + pass + + +class MissingServiceIdError(UndefinedModelAttributeError): + fmt = ( + "The model being used for the service {service_name} is missing the " + "serviceId metadata property, which is required." + ) + + def __init__(self, **kwargs): + msg = self.fmt.format(**kwargs) + Exception.__init__(self, msg) + self.kwargs = kwargs + + +class SSOError(BotoCoreError): + fmt = ( + "An unspecified error happened when resolving AWS credentials or an " + "access token from SSO." + ) + + +class SSOTokenLoadError(SSOError): + fmt = "Error loading SSO Token: {error_msg}" + + +class UnauthorizedSSOTokenError(SSOError): + fmt = ( + "The SSO session associated with this profile has expired or is " + "otherwise invalid. To refresh this SSO session run aws sso login " + "with the corresponding profile." + ) + + +class CapacityNotAvailableError(BotoCoreError): + fmt = 'Insufficient request capacity available.' + + +class InvalidProxiesConfigError(BotoCoreError): + fmt = 'Invalid configuration value(s) provided for proxies_config.' + + +class InvalidDefaultsMode(BotoCoreError): + fmt = ( + 'Client configured with invalid defaults mode: {mode}. ' + 'Valid defaults modes include: {valid_modes}.' + ) + + +class AwsChunkedWrapperError(BotoCoreError): + fmt = '{error_msg}' + + +class FlexibleChecksumError(BotoCoreError): + fmt = '{error_msg}' + + +class InvalidEndpointConfigurationError(BotoCoreError): + fmt = 'Invalid endpoint configuration: {msg}' + + +class EndpointProviderError(BotoCoreError): + """Base error for the EndpointProvider class""" + + fmt = '{msg}' + + +class EndpointResolutionError(EndpointProviderError): + """Error when input parameters resolve to an error rule""" + + fmt = '{msg}' + + +class UnknownEndpointResolutionBuiltInName(EndpointProviderError): + fmt = 'Unknown builtin variable name: {name}' diff --git a/venv/lib/python3.10/site-packages/botocore/handlers.py b/venv/lib/python3.10/site-packages/botocore/handlers.py new file mode 100644 index 0000000000000000000000000000000000000000..211ed0477cc673a01a8a818164365cc776329fda --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/handlers.py @@ -0,0 +1,1431 @@ +# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +"""Builtin event handlers. + +This module contains builtin handlers for events emitted by botocore. +""" + +import base64 +import copy +import logging +import os +import re +import uuid +import warnings +from io import BytesIO + +import botocore +import botocore.auth +from botocore import utils +from botocore.compat import ( + ETree, + OrderedDict, + XMLParseError, + ensure_bytes, + get_md5, + json, + quote, + unquote, + unquote_str, + urlsplit, + urlunsplit, +) +from botocore.docs.utils import ( + AppendParamDocumentation, + AutoPopulatedParam, + HideParamFromOperations, +) +from botocore.endpoint_provider import VALID_HOST_LABEL_RE +from botocore.exceptions import ( + AliasConflictParameterError, + ParamValidationError, + UnsupportedTLSVersionWarning, +) +from botocore.regions import EndpointResolverBuiltins +from botocore.signers import ( + add_generate_db_auth_token, + add_generate_presigned_post, + add_generate_presigned_url, +) +from botocore.utils import ( + SAFE_CHARS, + ArnParser, + conditionally_calculate_checksum, + conditionally_calculate_md5, + percent_encode, + switch_host_with_param, +) + +# Keep these imported. There's pre-existing code that uses them. +from botocore import retryhandler # noqa +from botocore import translate # noqa +from botocore.compat import MD5_AVAILABLE # noqa +from botocore.exceptions import MissingServiceIdError # noqa +from botocore.utils import hyphenize_service_id # noqa +from botocore.utils import is_global_accesspoint # noqa +from botocore.utils import SERVICE_NAME_ALIASES # noqa + + +logger = logging.getLogger(__name__) + +REGISTER_FIRST = object() +REGISTER_LAST = object() +# From the S3 docs: +# The rules for bucket names in the US Standard region allow bucket names +# to be as long as 255 characters, and bucket names can contain any +# combination of uppercase letters, lowercase letters, numbers, periods +# (.), hyphens (-), and underscores (_). +VALID_BUCKET = re.compile(r'^[a-zA-Z0-9.\-_]{1,255}$') +_ACCESSPOINT_ARN = ( + r'^arn:(aws).*:(s3|s3-object-lambda):[a-z\-0-9]*:[0-9]{12}:accesspoint[/:]' + r'[a-zA-Z0-9\-.]{1,63}$' +) +_OUTPOST_ARN = ( + r'^arn:(aws).*:s3-outposts:[a-z\-0-9]+:[0-9]{12}:outpost[/:]' + r'[a-zA-Z0-9\-]{1,63}[/:]accesspoint[/:][a-zA-Z0-9\-]{1,63}$' +) +VALID_S3_ARN = re.compile('|'.join([_ACCESSPOINT_ARN, _OUTPOST_ARN])) +# signing names used for the services s3 and s3-control, for example in +# botocore/data/s3/2006-03-01/endpoints-rule-set-1.json +S3_SIGNING_NAMES = ('s3', 's3-outposts', 's3-object-lambda', 's3express') +VERSION_ID_SUFFIX = re.compile(r'\?versionId=[^\s]+$') + + +def handle_service_name_alias(service_name, **kwargs): + return SERVICE_NAME_ALIASES.get(service_name, service_name) + + +def add_recursion_detection_header(params, **kwargs): + has_lambda_name = 'AWS_LAMBDA_FUNCTION_NAME' in os.environ + trace_id = os.environ.get('_X_AMZN_TRACE_ID') + if has_lambda_name and trace_id: + headers = params['headers'] + if 'X-Amzn-Trace-Id' not in headers: + headers['X-Amzn-Trace-Id'] = quote(trace_id, safe='-=;:+&[]{}"\',') + + +def escape_xml_payload(params, **kwargs): + # Replace \r and \n with the escaped sequence over the whole XML document + # to avoid linebreak normalization modifying customer input when the + # document is parsed. Ideally, we would do this in ElementTree.tostring, + # but it doesn't allow us to override entity escaping for text fields. For + # this operation \r and \n can only appear in the XML document if they were + # passed as part of the customer input. + body = params['body'] + if b'\r' in body: + body = body.replace(b'\r', b' ') + if b'\n' in body: + body = body.replace(b'\n', b' ') + + params['body'] = body + + +def check_for_200_error(response, **kwargs): + # From: http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectCOPY.html + # There are two opportunities for a copy request to return an error. One + # can occur when Amazon S3 receives the copy request and the other can + # occur while Amazon S3 is copying the files. If the error occurs before + # the copy operation starts, you receive a standard Amazon S3 error. If the + # error occurs during the copy operation, the error response is embedded in + # the 200 OK response. This means that a 200 OK response can contain either + # a success or an error. Make sure to design your application to parse the + # contents of the response and handle it appropriately. + # + # So this handler checks for this case. Even though the server sends a + # 200 response, conceptually this should be handled exactly like a + # 500 response (with respect to raising exceptions, retries, etc.) + # We're connected *before* all the other retry logic handlers, so as long + # as we switch the error code to 500, we'll retry the error as expected. + if response is None: + # A None response can happen if an exception is raised while + # trying to retrieve the response. See Endpoint._get_response(). + return + http_response, parsed = response + if _looks_like_special_case_error(http_response): + logger.debug( + "Error found for response with 200 status code, " + "errors: %s, changing status code to " + "500.", + parsed, + ) + http_response.status_code = 500 + + +def _looks_like_special_case_error(http_response): + if http_response.status_code == 200: + try: + parser = ETree.XMLParser( + target=ETree.TreeBuilder(), encoding='utf-8' + ) + parser.feed(http_response.content) + root = parser.close() + except XMLParseError: + # In cases of network disruptions, we may end up with a partial + # streamed response from S3. We need to treat these cases as + # 500 Service Errors and try again. + return True + if root.tag == 'Error': + return True + return False + + +def set_operation_specific_signer(context, signing_name, **kwargs): + """Choose the operation-specific signer. + + Individual operations may have a different auth type than the service as a + whole. This will most often manifest as operations that should not be + authenticated at all, but can include other auth modes such as sigv4 + without body signing. + """ + auth_type = context.get('auth_type') + + # Auth type will be None if the operation doesn't have a configured auth + # type. + if not auth_type: + return + + # Auth type will be the string value 'none' if the operation should not + # be signed at all. + if auth_type == 'none': + return botocore.UNSIGNED + + if auth_type == 'bearer': + return 'bearer' + + if auth_type.startswith('v4'): + if auth_type == 'v4-s3express': + return auth_type + + if auth_type == 'v4a': + # If sigv4a is chosen, we must add additional signing config for + # global signature. + signing = {'region': '*', 'signing_name': signing_name} + if 'signing' in context: + context['signing'].update(signing) + else: + context['signing'] = signing + signature_version = 'v4a' + else: + signature_version = 'v4' + + # If the operation needs an unsigned body, we set additional context + # allowing the signer to be aware of this. + if auth_type == 'v4-unsigned-body': + context['payload_signing_enabled'] = False + + # Signing names used by s3 and s3-control use customized signers "s3v4" + # and "s3v4a". + if signing_name in S3_SIGNING_NAMES: + signature_version = f's3{signature_version}' + + return signature_version + + +def decode_console_output(parsed, **kwargs): + if 'Output' in parsed: + try: + # We're using 'replace' for errors because it is + # possible that console output contains non string + # chars we can't utf-8 decode. + value = base64.b64decode( + bytes(parsed['Output'], 'latin-1') + ).decode('utf-8', 'replace') + parsed['Output'] = value + except (ValueError, TypeError, AttributeError): + logger.debug('Error decoding base64', exc_info=True) + + +def generate_idempotent_uuid(params, model, **kwargs): + for name in model.idempotent_members: + if name not in params: + params[name] = str(uuid.uuid4()) + logger.debug( + f"injecting idempotency token ({params[name]}) into param '{name}'." + ) + + +def decode_quoted_jsondoc(value): + try: + value = json.loads(unquote(value)) + except (ValueError, TypeError): + logger.debug('Error loading quoted JSON', exc_info=True) + return value + + +def json_decode_template_body(parsed, **kwargs): + if 'TemplateBody' in parsed: + try: + value = json.loads( + parsed['TemplateBody'], object_pairs_hook=OrderedDict + ) + parsed['TemplateBody'] = value + except (ValueError, TypeError): + logger.debug('error loading JSON', exc_info=True) + + +def validate_bucket_name(params, **kwargs): + if 'Bucket' not in params: + return + bucket = params['Bucket'] + if not VALID_BUCKET.search(bucket) and not VALID_S3_ARN.search(bucket): + error_msg = ( + f'Invalid bucket name "{bucket}": Bucket name must match ' + f'the regex "{VALID_BUCKET.pattern}" or be an ARN matching ' + f'the regex "{VALID_S3_ARN.pattern}"' + ) + raise ParamValidationError(report=error_msg) + + +def sse_md5(params, **kwargs): + """ + S3 server-side encryption requires the encryption key to be sent to the + server base64 encoded, as well as a base64-encoded MD5 hash of the + encryption key. This handler does both if the MD5 has not been set by + the caller. + """ + _sse_md5(params, 'SSECustomer') + + +def copy_source_sse_md5(params, **kwargs): + """ + S3 server-side encryption requires the encryption key to be sent to the + server base64 encoded, as well as a base64-encoded MD5 hash of the + encryption key. This handler does both if the MD5 has not been set by + the caller specifically if the parameter is for the copy-source sse-c key. + """ + _sse_md5(params, 'CopySourceSSECustomer') + + +def _sse_md5(params, sse_member_prefix='SSECustomer'): + if not _needs_s3_sse_customization(params, sse_member_prefix): + return + + sse_key_member = sse_member_prefix + 'Key' + sse_md5_member = sse_member_prefix + 'KeyMD5' + key_as_bytes = params[sse_key_member] + if isinstance(key_as_bytes, str): + key_as_bytes = key_as_bytes.encode('utf-8') + key_md5_str = base64.b64encode(get_md5(key_as_bytes).digest()).decode( + 'utf-8' + ) + key_b64_encoded = base64.b64encode(key_as_bytes).decode('utf-8') + params[sse_key_member] = key_b64_encoded + params[sse_md5_member] = key_md5_str + + +def _needs_s3_sse_customization(params, sse_member_prefix): + return ( + params.get(sse_member_prefix + 'Key') is not None + and sse_member_prefix + 'KeyMD5' not in params + ) + + +def disable_signing(**kwargs): + """ + This handler disables request signing by setting the signer + name to a special sentinel value. + """ + return botocore.UNSIGNED + + +def add_expect_header(model, params, **kwargs): + if model.http.get('method', '') not in ['PUT', 'POST']: + return + if 'body' in params: + body = params['body'] + if hasattr(body, 'read'): + check_body = utils.ensure_boolean( + os.environ.get( + 'BOTO_EXPERIMENTAL__NO_EMPTY_CONTINUE', + False, + ) + ) + if check_body and utils.determine_content_length(body) == 0: + return + # Any file like object will use an expect 100-continue + # header regardless of size. + logger.debug("Adding expect 100 continue header to request.") + params['headers']['Expect'] = '100-continue' + + +class DeprecatedServiceDocumenter: + def __init__(self, replacement_service_name): + self._replacement_service_name = replacement_service_name + + def inject_deprecation_notice(self, section, event_name, **kwargs): + section.style.start_important() + section.write('This service client is deprecated. Please use ') + section.style.ref( + self._replacement_service_name, + self._replacement_service_name, + ) + section.write(' instead.') + section.style.end_important() + + +def document_copy_source_form(section, event_name, **kwargs): + if 'request-example' in event_name: + parent = section.get_section('structure-value') + param_line = parent.get_section('CopySource') + value_portion = param_line.get_section('member-value') + value_portion.clear_text() + value_portion.write( + "'string' or {'Bucket': 'string', " + "'Key': 'string', 'VersionId': 'string'}" + ) + elif 'request-params' in event_name: + param_section = section.get_section('CopySource') + type_section = param_section.get_section('param-type') + type_section.clear_text() + type_section.write(':type CopySource: str or dict') + doc_section = param_section.get_section('param-documentation') + doc_section.clear_text() + doc_section.write( + "The name of the source bucket, key name of the source object, " + "and optional version ID of the source object. You can either " + "provide this value as a string or a dictionary. The " + "string form is {bucket}/{key} or " + "{bucket}/{key}?versionId={versionId} if you want to copy a " + "specific version. You can also provide this value as a " + "dictionary. The dictionary format is recommended over " + "the string format because it is more explicit. The dictionary " + "format is: {'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}." + " Note that the VersionId key is optional and may be omitted." + " To specify an S3 access point, provide the access point" + " ARN for the ``Bucket`` key in the copy source dictionary. If you" + " want to provide the copy source for an S3 access point as a" + " string instead of a dictionary, the ARN provided must be the" + " full S3 access point object ARN" + " (i.e. {accesspoint_arn}/object/{key})" + ) + + +def handle_copy_source_param(params, **kwargs): + """Convert CopySource param for CopyObject/UploadPartCopy. + + This handler will deal with two cases: + + * CopySource provided as a string. We'll make a best effort + to URL encode the key name as required. This will require + parsing the bucket and version id from the CopySource value + and only encoding the key. + * CopySource provided as a dict. In this case we're + explicitly given the Bucket, Key, and VersionId so we're + able to encode the key and ensure this value is serialized + and correctly sent to S3. + + """ + source = params.get('CopySource') + if source is None: + # The call will eventually fail but we'll let the + # param validator take care of this. It will + # give a better error message. + return + if isinstance(source, str): + params['CopySource'] = _quote_source_header(source) + elif isinstance(source, dict): + params['CopySource'] = _quote_source_header_from_dict(source) + + +def _quote_source_header_from_dict(source_dict): + try: + bucket = source_dict['Bucket'] + key = source_dict['Key'] + version_id = source_dict.get('VersionId') + if VALID_S3_ARN.search(bucket): + final = f'{bucket}/object/{key}' + else: + final = f'{bucket}/{key}' + except KeyError as e: + raise ParamValidationError( + report=f'Missing required parameter: {str(e)}' + ) + final = percent_encode(final, safe=SAFE_CHARS + '/') + if version_id is not None: + final += f'?versionId={version_id}' + return final + + +def _quote_source_header(value): + result = VERSION_ID_SUFFIX.search(value) + if result is None: + return percent_encode(value, safe=SAFE_CHARS + '/') + else: + first, version_id = value[: result.start()], value[result.start() :] + return percent_encode(first, safe=SAFE_CHARS + '/') + version_id + + +def _get_cross_region_presigned_url( + request_signer, request_dict, model, source_region, destination_region +): + # The better way to do this is to actually get the + # endpoint_resolver and get the endpoint_url given the + # source region. In this specific case, we know that + # we can safely replace the dest region with the source + # region because of the supported EC2 regions, but in + # general this is not a safe assumption to make. + # I think eventually we should try to plumb through something + # that allows us to resolve endpoints from regions. + request_dict_copy = copy.deepcopy(request_dict) + request_dict_copy['body']['DestinationRegion'] = destination_region + request_dict_copy['url'] = request_dict['url'].replace( + destination_region, source_region + ) + request_dict_copy['method'] = 'GET' + request_dict_copy['headers'] = {} + return request_signer.generate_presigned_url( + request_dict_copy, region_name=source_region, operation_name=model.name + ) + + +def _get_presigned_url_source_and_destination_regions(request_signer, params): + # Gets the source and destination regions to be used + destination_region = request_signer._region_name + source_region = params.get('SourceRegion') + return source_region, destination_region + + +def inject_presigned_url_ec2(params, request_signer, model, **kwargs): + # The customer can still provide this, so we should pass if they do. + if 'PresignedUrl' in params['body']: + return + src, dest = _get_presigned_url_source_and_destination_regions( + request_signer, params['body'] + ) + url = _get_cross_region_presigned_url( + request_signer, params, model, src, dest + ) + params['body']['PresignedUrl'] = url + # EC2 Requires that the destination region be sent over the wire in + # addition to the source region. + params['body']['DestinationRegion'] = dest + + +def inject_presigned_url_rds(params, request_signer, model, **kwargs): + # SourceRegion is not required for RDS operations, so it's possible that + # it isn't set. In that case it's probably a local copy so we don't need + # to do anything else. + if 'SourceRegion' not in params['body']: + return + + src, dest = _get_presigned_url_source_and_destination_regions( + request_signer, params['body'] + ) + + # Since SourceRegion isn't actually modeled for RDS, it needs to be + # removed from the request params before we send the actual request. + del params['body']['SourceRegion'] + + if 'PreSignedUrl' in params['body']: + return + + url = _get_cross_region_presigned_url( + request_signer, params, model, src, dest + ) + params['body']['PreSignedUrl'] = url + + +def json_decode_policies(parsed, model, **kwargs): + # Any time an IAM operation returns a policy document + # it is a string that is json that has been urlencoded, + # i.e urlencode(json.dumps(policy_document)). + # To give users something more useful, we will urldecode + # this value and json.loads() the result so that they have + # the policy document as a dictionary. + output_shape = model.output_shape + if output_shape is not None: + _decode_policy_types(parsed, model.output_shape) + + +def _decode_policy_types(parsed, shape): + # IAM consistently uses the policyDocumentType shape to indicate + # strings that have policy documents. + shape_name = 'policyDocumentType' + if shape.type_name == 'structure': + for member_name, member_shape in shape.members.items(): + if ( + member_shape.type_name == 'string' + and member_shape.name == shape_name + and member_name in parsed + ): + parsed[member_name] = decode_quoted_jsondoc( + parsed[member_name] + ) + elif member_name in parsed: + _decode_policy_types(parsed[member_name], member_shape) + if shape.type_name == 'list': + shape_member = shape.member + for item in parsed: + _decode_policy_types(item, shape_member) + + +def parse_get_bucket_location(parsed, http_response, **kwargs): + # s3.GetBucketLocation cannot be modeled properly. To + # account for this we just manually parse the XML document. + # The "parsed" passed in only has the ResponseMetadata + # filled out. This handler will fill in the LocationConstraint + # value. + if http_response.raw is None: + return + response_body = http_response.content + parser = ETree.XMLParser(target=ETree.TreeBuilder(), encoding='utf-8') + parser.feed(response_body) + root = parser.close() + region = root.text + parsed['LocationConstraint'] = region + + +def base64_encode_user_data(params, **kwargs): + if 'UserData' in params: + if isinstance(params['UserData'], str): + # Encode it to bytes if it is text. + params['UserData'] = params['UserData'].encode('utf-8') + params['UserData'] = base64.b64encode(params['UserData']).decode( + 'utf-8' + ) + + +def document_base64_encoding(param): + description = ( + '**This value will be base64 encoded automatically. Do ' + 'not base64 encode this value prior to performing the ' + 'operation.**' + ) + append = AppendParamDocumentation(param, description) + return append.append_documentation + + +def validate_ascii_metadata(params, **kwargs): + """Verify S3 Metadata only contains ascii characters. + + From: http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html + + "Amazon S3 stores user-defined metadata in lowercase. Each name, value pair + must conform to US-ASCII when using REST and UTF-8 when using SOAP or + browser-based uploads via POST." + + """ + metadata = params.get('Metadata') + if not metadata or not isinstance(metadata, dict): + # We have to at least type check the metadata as a dict type + # because this handler is called before param validation. + # We'll go ahead and return because the param validator will + # give a descriptive error message for us. + # We might need a post-param validation event. + return + for key, value in metadata.items(): + try: + key.encode('ascii') + value.encode('ascii') + except UnicodeEncodeError: + error_msg = ( + 'Non ascii characters found in S3 metadata ' + f'for key "{key}", value: "{value}". \nS3 metadata can only ' + 'contain ASCII characters. ' + ) + raise ParamValidationError(report=error_msg) + + +def fix_route53_ids(params, model, **kwargs): + """ + Check for and split apart Route53 resource IDs, setting + only the last piece. This allows the output of one operation + (e.g. ``'foo/1234'``) to be used as input in another + operation (e.g. it expects just ``'1234'``). + """ + input_shape = model.input_shape + if not input_shape or not hasattr(input_shape, 'members'): + return + + members = [ + name + for (name, shape) in input_shape.members.items() + if shape.name in ['ResourceId', 'DelegationSetId', 'ChangeId'] + ] + + for name in members: + if name in params: + orig_value = params[name] + params[name] = orig_value.split('/')[-1] + logger.debug('%s %s -> %s', name, orig_value, params[name]) + + +def inject_account_id(params, **kwargs): + if params.get('accountId') is None: + # Glacier requires accountId, but allows you + # to specify '-' for the current owners account. + # We add this default value if the user does not + # provide the accountId as a convenience. + params['accountId'] = '-' + + +def add_glacier_version(model, params, **kwargs): + request_dict = params + request_dict['headers']['x-amz-glacier-version'] = model.metadata[ + 'apiVersion' + ] + + +def add_accept_header(model, params, **kwargs): + if params['headers'].get('Accept', None) is None: + request_dict = params + request_dict['headers']['Accept'] = 'application/json' + + +def add_glacier_checksums(params, **kwargs): + """Add glacier checksums to the http request. + + This will add two headers to the http request: + + * x-amz-content-sha256 + * x-amz-sha256-tree-hash + + These values will only be added if they are not present + in the HTTP request. + + """ + request_dict = params + headers = request_dict['headers'] + body = request_dict['body'] + if isinstance(body, bytes): + # If the user provided a bytes type instead of a file + # like object, we're temporarily create a BytesIO object + # so we can use the util functions to calculate the + # checksums which assume file like objects. Note that + # we're not actually changing the body in the request_dict. + body = BytesIO(body) + starting_position = body.tell() + if 'x-amz-content-sha256' not in headers: + headers['x-amz-content-sha256'] = utils.calculate_sha256( + body, as_hex=True + ) + body.seek(starting_position) + if 'x-amz-sha256-tree-hash' not in headers: + headers['x-amz-sha256-tree-hash'] = utils.calculate_tree_hash(body) + body.seek(starting_position) + + +def document_glacier_tree_hash_checksum(): + doc = ''' + This is a required field. + + Ideally you will want to compute this value with checksums from + previous uploaded parts, using the algorithm described in + `Glacier documentation `_. + + But if you prefer, you can also use botocore.utils.calculate_tree_hash() + to compute it from raw file by:: + + checksum = calculate_tree_hash(open('your_file.txt', 'rb')) + + ''' + return AppendParamDocumentation('checksum', doc).append_documentation + + +def document_cloudformation_get_template_return_type( + section, event_name, **kwargs +): + if 'response-params' in event_name: + template_body_section = section.get_section('TemplateBody') + type_section = template_body_section.get_section('param-type') + type_section.clear_text() + type_section.write('(*dict*) --') + elif 'response-example' in event_name: + parent = section.get_section('structure-value') + param_line = parent.get_section('TemplateBody') + value_portion = param_line.get_section('member-value') + value_portion.clear_text() + value_portion.write('{}') + + +def switch_host_machinelearning(request, **kwargs): + switch_host_with_param(request, 'PredictEndpoint') + + +def check_openssl_supports_tls_version_1_2(**kwargs): + import ssl + + try: + openssl_version_tuple = ssl.OPENSSL_VERSION_INFO + if openssl_version_tuple < (1, 0, 1): + warnings.warn( + f'Currently installed openssl version: {ssl.OPENSSL_VERSION} does not ' + 'support TLS 1.2, which is required for use of iot-data. ' + 'Please use python installed with openssl version 1.0.1 or ' + 'higher.', + UnsupportedTLSVersionWarning, + ) + # We cannot check the openssl version on python2.6, so we should just + # pass on this conveniency check. + except AttributeError: + pass + + +def change_get_to_post(request, **kwargs): + # This is useful when we need to change a potentially large GET request + # into a POST with x-www-form-urlencoded encoding. + if request.method == 'GET' and '?' in request.url: + request.headers['Content-Type'] = 'application/x-www-form-urlencoded' + request.method = 'POST' + request.url, request.data = request.url.split('?', 1) + + +def set_list_objects_encoding_type_url(params, context, **kwargs): + if 'EncodingType' not in params: + # We set this context so that we know it wasn't the customer that + # requested the encoding. + context['encoding_type_auto_set'] = True + params['EncodingType'] = 'url' + + +def decode_list_object(parsed, context, **kwargs): + # This is needed because we are passing url as the encoding type. Since the + # paginator is based on the key, we need to handle it before it can be + # round tripped. + # + # From the documentation: If you specify encoding-type request parameter, + # Amazon S3 includes this element in the response, and returns encoded key + # name values in the following response elements: + # Delimiter, Marker, Prefix, NextMarker, Key. + _decode_list_object( + top_level_keys=['Delimiter', 'Marker', 'NextMarker'], + nested_keys=[('Contents', 'Key'), ('CommonPrefixes', 'Prefix')], + parsed=parsed, + context=context, + ) + + +def decode_list_object_v2(parsed, context, **kwargs): + # From the documentation: If you specify encoding-type request parameter, + # Amazon S3 includes this element in the response, and returns encoded key + # name values in the following response elements: + # Delimiter, Prefix, ContinuationToken, Key, and StartAfter. + _decode_list_object( + top_level_keys=['Delimiter', 'Prefix', 'StartAfter'], + nested_keys=[('Contents', 'Key'), ('CommonPrefixes', 'Prefix')], + parsed=parsed, + context=context, + ) + + +def decode_list_object_versions(parsed, context, **kwargs): + # From the documentation: If you specify encoding-type request parameter, + # Amazon S3 includes this element in the response, and returns encoded key + # name values in the following response elements: + # KeyMarker, NextKeyMarker, Prefix, Key, and Delimiter. + _decode_list_object( + top_level_keys=[ + 'KeyMarker', + 'NextKeyMarker', + 'Prefix', + 'Delimiter', + ], + nested_keys=[ + ('Versions', 'Key'), + ('DeleteMarkers', 'Key'), + ('CommonPrefixes', 'Prefix'), + ], + parsed=parsed, + context=context, + ) + + +def _decode_list_object(top_level_keys, nested_keys, parsed, context): + if parsed.get('EncodingType') == 'url' and context.get( + 'encoding_type_auto_set' + ): + # URL decode top-level keys in the response if present. + for key in top_level_keys: + if key in parsed: + parsed[key] = unquote_str(parsed[key]) + # URL decode nested keys from the response if present. + for top_key, child_key in nested_keys: + if top_key in parsed: + for member in parsed[top_key]: + member[child_key] = unquote_str(member[child_key]) + + +def convert_body_to_file_like_object(params, **kwargs): + if 'Body' in params: + if isinstance(params['Body'], str): + params['Body'] = BytesIO(ensure_bytes(params['Body'])) + elif isinstance(params['Body'], bytes): + params['Body'] = BytesIO(params['Body']) + + +def _add_parameter_aliases(handler_list): + # Mapping of original parameter to parameter alias. + # The key is ..parameter + # The first part of the key is used for event registration. + # The last part is the original parameter name and the value is the + # alias to expose in documentation. + aliases = { + 'ec2.*.Filter': 'Filters', + 'logs.CreateExportTask.from': 'fromTime', + 'cloudsearchdomain.Search.return': 'returnFields', + } + + for original, new_name in aliases.items(): + event_portion, original_name = original.rsplit('.', 1) + parameter_alias = ParameterAlias(original_name, new_name) + + # Add the handlers to the list of handlers. + # One handler is to handle when users provide the alias. + # The other handler is to update the documentation to show only + # the alias. + parameter_build_event_handler_tuple = ( + 'before-parameter-build.' + event_portion, + parameter_alias.alias_parameter_in_call, + REGISTER_FIRST, + ) + docs_event_handler_tuple = ( + 'docs.*.' + event_portion + '.complete-section', + parameter_alias.alias_parameter_in_documentation, + ) + handler_list.append(parameter_build_event_handler_tuple) + handler_list.append(docs_event_handler_tuple) + + +class ParameterAlias: + def __init__(self, original_name, alias_name): + self._original_name = original_name + self._alias_name = alias_name + + def alias_parameter_in_call(self, params, model, **kwargs): + if model.input_shape: + # Only consider accepting the alias if it is modeled in the + # input shape. + if self._original_name in model.input_shape.members: + if self._alias_name in params: + if self._original_name in params: + raise AliasConflictParameterError( + original=self._original_name, + alias=self._alias_name, + operation=model.name, + ) + # Remove the alias parameter value and use the old name + # instead. + params[self._original_name] = params.pop(self._alias_name) + + def alias_parameter_in_documentation(self, event_name, section, **kwargs): + if event_name.startswith('docs.request-params'): + if self._original_name not in section.available_sections: + return + # Replace the name for parameter type + param_section = section.get_section(self._original_name) + param_type_section = param_section.get_section('param-type') + self._replace_content(param_type_section) + + # Replace the name for the parameter description + param_name_section = param_section.get_section('param-name') + self._replace_content(param_name_section) + elif event_name.startswith('docs.request-example'): + section = section.get_section('structure-value') + if self._original_name not in section.available_sections: + return + # Replace the name for the example + param_section = section.get_section(self._original_name) + self._replace_content(param_section) + + def _replace_content(self, section): + content = section.getvalue().decode('utf-8') + updated_content = content.replace( + self._original_name, self._alias_name + ) + section.clear_text() + section.write(updated_content) + + +class ClientMethodAlias: + def __init__(self, actual_name): + """Aliases a non-extant method to an existing method. + + :param actual_name: The name of the method that actually exists on + the client. + """ + self._actual = actual_name + + def __call__(self, client, **kwargs): + return getattr(client, self._actual) + + +# TODO: Remove this class as it is no longer used +class HeaderToHostHoister: + """Takes a header and moves it to the front of the hoststring.""" + + _VALID_HOSTNAME = re.compile(r'(?!-)[a-z\d-]{1,63}(? 1 and ArnParser.is_arn( + unquote(auth_path_parts[1]) + ): + request.auth_path = '/'.join(['', *auth_path_parts[2:]]) + + +def customize_endpoint_resolver_builtins( + builtins, model, params, context, **kwargs +): + """Modify builtin parameter values for endpoint resolver + + Modifies the builtins dict in place. Changes are in effect for one call. + The corresponding event is emitted only if at least one builtin parameter + value is required for endpoint resolution for the operation. + """ + bucket_name = params.get('Bucket') + bucket_is_arn = bucket_name is not None and ArnParser.is_arn(bucket_name) + # In some situations the host will return AuthorizationHeaderMalformed + # when the signing region of a sigv4 request is not the bucket's + # region (which is likely unknown by the user of GetBucketLocation). + # Avoid this by always using path-style addressing. + if model.name == 'GetBucketLocation': + builtins[EndpointResolverBuiltins.AWS_S3_FORCE_PATH_STYLE] = True + # All situations where the bucket name is an ARN are not compatible + # with path style addressing. + elif bucket_is_arn: + builtins[EndpointResolverBuiltins.AWS_S3_FORCE_PATH_STYLE] = False + + # Bucket names that are invalid host labels require path-style addressing. + # If path-style addressing was specifically requested, the default builtin + # value is already set. + path_style_required = ( + bucket_name is not None and not VALID_HOST_LABEL_RE.match(bucket_name) + ) + path_style_requested = builtins[ + EndpointResolverBuiltins.AWS_S3_FORCE_PATH_STYLE + ] + + # Path-style addressing is incompatible with the global endpoint for + # presigned URLs. If the bucket name is an ARN, the ARN's region should be + # used in the endpoint. + if ( + context.get('use_global_endpoint') + and not path_style_required + and not path_style_requested + and not bucket_is_arn + and not utils.is_s3express_bucket(bucket_name) + ): + builtins[EndpointResolverBuiltins.AWS_REGION] = 'aws-global' + builtins[EndpointResolverBuiltins.AWS_S3_USE_GLOBAL_ENDPOINT] = True + + +def remove_content_type_header_for_presigning(request, **kwargs): + if ( + request.context.get('is_presign_request') is True + and 'Content-Type' in request.headers + ): + del request.headers['Content-Type'] + + +# This is a list of (event_name, handler). +# When a Session is created, everything in this list will be +# automatically registered with that Session. + +BUILTIN_HANDLERS = [ + ('choose-service-name', handle_service_name_alias), + ( + 'getattr.mturk.list_hi_ts_for_qualification_type', + ClientMethodAlias('list_hits_for_qualification_type'), + ), + ( + 'before-parameter-build.s3.UploadPart', + convert_body_to_file_like_object, + REGISTER_LAST, + ), + ( + 'before-parameter-build.s3.PutObject', + convert_body_to_file_like_object, + REGISTER_LAST, + ), + ('creating-client-class', add_generate_presigned_url), + ('creating-client-class.s3', add_generate_presigned_post), + ('creating-client-class.iot-data', check_openssl_supports_tls_version_1_2), + ('creating-client-class.lex-runtime-v2', remove_lex_v2_start_conversation), + ('creating-client-class.qbusiness', remove_qbusiness_chat), + ('after-call.iam', json_decode_policies), + ('after-call.ec2.GetConsoleOutput', decode_console_output), + ('after-call.cloudformation.GetTemplate', json_decode_template_body), + ('after-call.s3.GetBucketLocation', parse_get_bucket_location), + ('before-parameter-build', generate_idempotent_uuid), + ('before-parameter-build.s3', validate_bucket_name), + ('before-parameter-build.s3', remove_bucket_from_url_paths_from_model), + ( + 'before-parameter-build.s3.ListObjects', + set_list_objects_encoding_type_url, + ), + ( + 'before-parameter-build.s3.ListObjectsV2', + set_list_objects_encoding_type_url, + ), + ( + 'before-parameter-build.s3.ListObjectVersions', + set_list_objects_encoding_type_url, + ), + ('before-parameter-build.s3.CopyObject', handle_copy_source_param), + ('before-parameter-build.s3.UploadPartCopy', handle_copy_source_param), + ('before-parameter-build.s3.CopyObject', validate_ascii_metadata), + ('before-parameter-build.s3.PutObject', validate_ascii_metadata), + ( + 'before-parameter-build.s3.CreateMultipartUpload', + validate_ascii_metadata, + ), + ('before-parameter-build.s3-control', remove_accid_host_prefix_from_model), + ('docs.*.s3.CopyObject.complete-section', document_copy_source_form), + ('docs.*.s3.UploadPartCopy.complete-section', document_copy_source_form), + ('before-endpoint-resolution.s3', customize_endpoint_resolver_builtins), + ('before-call', add_recursion_detection_header), + ('before-call.s3', add_expect_header), + ('before-call.glacier', add_glacier_version), + ('before-call.apigateway', add_accept_header), + ('before-call.s3.PutObject', conditionally_calculate_checksum), + ('before-call.s3.UploadPart', conditionally_calculate_md5), + ('before-call.s3.DeleteObjects', escape_xml_payload), + ('before-call.s3.DeleteObjects', conditionally_calculate_checksum), + ('before-call.s3.PutBucketLifecycleConfiguration', escape_xml_payload), + ('before-call.glacier.UploadArchive', add_glacier_checksums), + ('before-call.glacier.UploadMultipartPart', add_glacier_checksums), + ('before-call.ec2.CopySnapshot', inject_presigned_url_ec2), + ('request-created', add_retry_headers), + ('request-created.machinelearning.Predict', switch_host_machinelearning), + ('needs-retry.s3.UploadPartCopy', check_for_200_error, REGISTER_FIRST), + ('needs-retry.s3.CopyObject', check_for_200_error, REGISTER_FIRST), + ( + 'needs-retry.s3.CompleteMultipartUpload', + check_for_200_error, + REGISTER_FIRST, + ), + ('choose-signer.cognito-identity.GetId', disable_signing), + ('choose-signer.cognito-identity.GetOpenIdToken', disable_signing), + ('choose-signer.cognito-identity.UnlinkIdentity', disable_signing), + ( + 'choose-signer.cognito-identity.GetCredentialsForIdentity', + disable_signing, + ), + ('choose-signer.sts.AssumeRoleWithSAML', disable_signing), + ('choose-signer.sts.AssumeRoleWithWebIdentity', disable_signing), + ('choose-signer', set_operation_specific_signer), + ('before-parameter-build.s3.HeadObject', sse_md5), + ('before-parameter-build.s3.GetObject', sse_md5), + ('before-parameter-build.s3.PutObject', sse_md5), + ('before-parameter-build.s3.CopyObject', sse_md5), + ('before-parameter-build.s3.CopyObject', copy_source_sse_md5), + ('before-parameter-build.s3.CreateMultipartUpload', sse_md5), + ('before-parameter-build.s3.UploadPart', sse_md5), + ('before-parameter-build.s3.UploadPartCopy', sse_md5), + ('before-parameter-build.s3.UploadPartCopy', copy_source_sse_md5), + ('before-parameter-build.s3.CompleteMultipartUpload', sse_md5), + ('before-parameter-build.s3.SelectObjectContent', sse_md5), + ('before-parameter-build.ec2.RunInstances', base64_encode_user_data), + ( + 'before-parameter-build.autoscaling.CreateLaunchConfiguration', + base64_encode_user_data, + ), + ('before-parameter-build.route53', fix_route53_ids), + ('before-parameter-build.glacier', inject_account_id), + ('before-sign.s3', remove_arn_from_signing_path), + ( + 'before-sign.polly.SynthesizeSpeech', + remove_content_type_header_for_presigning, + ), + ('after-call.s3.ListObjects', decode_list_object), + ('after-call.s3.ListObjectsV2', decode_list_object_v2), + ('after-call.s3.ListObjectVersions', decode_list_object_versions), + # Cloudsearchdomain search operation will be sent by HTTP POST + ('request-created.cloudsearchdomain.Search', change_get_to_post), + # Glacier documentation customizations + ( + 'docs.*.glacier.*.complete-section', + AutoPopulatedParam( + 'accountId', + 'Note: this parameter is set to "-" by' + 'default if no value is not specified.', + ).document_auto_populated_param, + ), + ( + 'docs.*.glacier.UploadArchive.complete-section', + AutoPopulatedParam('checksum').document_auto_populated_param, + ), + ( + 'docs.*.glacier.UploadMultipartPart.complete-section', + AutoPopulatedParam('checksum').document_auto_populated_param, + ), + ( + 'docs.request-params.glacier.CompleteMultipartUpload.complete-section', + document_glacier_tree_hash_checksum(), + ), + # Cloudformation documentation customizations + ( + 'docs.*.cloudformation.GetTemplate.complete-section', + document_cloudformation_get_template_return_type, + ), + # UserData base64 encoding documentation customizations + ( + 'docs.*.ec2.RunInstances.complete-section', + document_base64_encoding('UserData'), + ), + ( + 'docs.*.autoscaling.CreateLaunchConfiguration.complete-section', + document_base64_encoding('UserData'), + ), + # EC2 CopySnapshot documentation customizations + ( + 'docs.*.ec2.CopySnapshot.complete-section', + AutoPopulatedParam('PresignedUrl').document_auto_populated_param, + ), + ( + 'docs.*.ec2.CopySnapshot.complete-section', + AutoPopulatedParam('DestinationRegion').document_auto_populated_param, + ), + # S3 SSE documentation modifications + ( + 'docs.*.s3.*.complete-section', + AutoPopulatedParam('SSECustomerKeyMD5').document_auto_populated_param, + ), + # S3 SSE Copy Source documentation modifications + ( + 'docs.*.s3.*.complete-section', + AutoPopulatedParam( + 'CopySourceSSECustomerKeyMD5' + ).document_auto_populated_param, + ), + # Add base64 information to Lambda + ( + 'docs.*.lambda.UpdateFunctionCode.complete-section', + document_base64_encoding('ZipFile'), + ), + # The following S3 operations cannot actually accept a ContentMD5 + ( + 'docs.*.s3.*.complete-section', + HideParamFromOperations( + 's3', + 'ContentMD5', + [ + 'DeleteObjects', + 'PutBucketAcl', + 'PutBucketCors', + 'PutBucketLifecycle', + 'PutBucketLogging', + 'PutBucketNotification', + 'PutBucketPolicy', + 'PutBucketReplication', + 'PutBucketRequestPayment', + 'PutBucketTagging', + 'PutBucketVersioning', + 'PutBucketWebsite', + 'PutObjectAcl', + ], + ).hide_param, + ), + ############# + # RDS + ############# + ('creating-client-class.rds', add_generate_db_auth_token), + ('before-call.rds.CopyDBClusterSnapshot', inject_presigned_url_rds), + ('before-call.rds.CreateDBCluster', inject_presigned_url_rds), + ('before-call.rds.CopyDBSnapshot', inject_presigned_url_rds), + ('before-call.rds.CreateDBInstanceReadReplica', inject_presigned_url_rds), + ( + 'before-call.rds.StartDBInstanceAutomatedBackupsReplication', + inject_presigned_url_rds, + ), + # RDS PresignedUrl documentation customizations + ( + 'docs.*.rds.CopyDBClusterSnapshot.complete-section', + AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, + ), + ( + 'docs.*.rds.CreateDBCluster.complete-section', + AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, + ), + ( + 'docs.*.rds.CopyDBSnapshot.complete-section', + AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, + ), + ( + 'docs.*.rds.CreateDBInstanceReadReplica.complete-section', + AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, + ), + ( + 'docs.*.rds.StartDBInstanceAutomatedBackupsReplication.complete-section', + AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, + ), + ############# + # Neptune + ############# + ('before-call.neptune.CopyDBClusterSnapshot', inject_presigned_url_rds), + ('before-call.neptune.CreateDBCluster', inject_presigned_url_rds), + # Neptune PresignedUrl documentation customizations + ( + 'docs.*.neptune.CopyDBClusterSnapshot.complete-section', + AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, + ), + ( + 'docs.*.neptune.CreateDBCluster.complete-section', + AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, + ), + ############# + # DocDB + ############# + ('before-call.docdb.CopyDBClusterSnapshot', inject_presigned_url_rds), + ('before-call.docdb.CreateDBCluster', inject_presigned_url_rds), + # DocDB PresignedUrl documentation customizations + ( + 'docs.*.docdb.CopyDBClusterSnapshot.complete-section', + AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, + ), + ( + 'docs.*.docdb.CreateDBCluster.complete-section', + AutoPopulatedParam('PreSignedUrl').document_auto_populated_param, + ), + ('before-call', inject_api_version_header_if_needed), +] +_add_parameter_aliases(BUILTIN_HANDLERS) diff --git a/venv/lib/python3.10/site-packages/botocore/history.py b/venv/lib/python3.10/site-packages/botocore/history.py new file mode 100644 index 0000000000000000000000000000000000000000..59d9481d7fb5ed48c737c6c835bd916a78aa3ff8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/history.py @@ -0,0 +1,55 @@ +# Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import logging + +HISTORY_RECORDER = None +logger = logging.getLogger(__name__) + + +class BaseHistoryHandler: + def emit(self, event_type, payload, source): + raise NotImplementedError('emit()') + + +class HistoryRecorder: + def __init__(self): + self._enabled = False + self._handlers = [] + + def enable(self): + self._enabled = True + + def disable(self): + self._enabled = False + + def add_handler(self, handler): + self._handlers.append(handler) + + def record(self, event_type, payload, source='BOTOCORE'): + if self._enabled and self._handlers: + for handler in self._handlers: + try: + handler.emit(event_type, payload, source) + except Exception: + # Never let the process die because we had a failure in + # a record collection handler. + logger.debug( + "Exception raised in %s.", handler, exc_info=True + ) + + +def get_global_history_recorder(): + global HISTORY_RECORDER + if HISTORY_RECORDER is None: + HISTORY_RECORDER = HistoryRecorder() + return HISTORY_RECORDER diff --git a/venv/lib/python3.10/site-packages/botocore/hooks.py b/venv/lib/python3.10/site-packages/botocore/hooks.py new file mode 100644 index 0000000000000000000000000000000000000000..583cb39c3bbb5bb648cfb805d46d3f84f91ebd3a --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/hooks.py @@ -0,0 +1,660 @@ +# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import copy +import logging +from collections import deque, namedtuple + +from botocore.compat import accepts_kwargs +from botocore.utils import EVENT_ALIASES + +logger = logging.getLogger(__name__) + + +_NodeList = namedtuple('NodeList', ['first', 'middle', 'last']) +_FIRST = 0 +_MIDDLE = 1 +_LAST = 2 + + +class NodeList(_NodeList): + def __copy__(self): + first_copy = copy.copy(self.first) + middle_copy = copy.copy(self.middle) + last_copy = copy.copy(self.last) + copied = NodeList(first_copy, middle_copy, last_copy) + return copied + + +def first_non_none_response(responses, default=None): + """Find first non None response in a list of tuples. + + This function can be used to find the first non None response from + handlers connected to an event. This is useful if you are interested + in the returned responses from event handlers. Example usage:: + + print(first_non_none_response([(func1, None), (func2, 'foo'), + (func3, 'bar')])) + # This will print 'foo' + + :type responses: list of tuples + :param responses: The responses from the ``EventHooks.emit`` method. + This is a list of tuples, and each tuple is + (handler, handler_response). + + :param default: If no non-None responses are found, then this default + value will be returned. + + :return: The first non-None response in the list of tuples. + + """ + for response in responses: + if response[1] is not None: + return response[1] + return default + + +class BaseEventHooks: + def emit(self, event_name, **kwargs): + """Call all handlers subscribed to an event. + + :type event_name: str + :param event_name: The name of the event to emit. + + :type **kwargs: dict + :param **kwargs: Arbitrary kwargs to pass through to the + subscribed handlers. The ``event_name`` will be injected + into the kwargs so it's not necessary to add this to **kwargs. + + :rtype: list of tuples + :return: A list of ``(handler_func, handler_func_return_value)`` + + """ + return [] + + def register( + self, event_name, handler, unique_id=None, unique_id_uses_count=False + ): + """Register an event handler for a given event. + + If a ``unique_id`` is given, the handler will not be registered + if a handler with the ``unique_id`` has already been registered. + + Handlers are called in the order they have been registered. + Note handlers can also be registered with ``register_first()`` + and ``register_last()``. All handlers registered with + ``register_first()`` are called before handlers registered + with ``register()`` which are called before handlers registered + with ``register_last()``. + + """ + self._verify_and_register( + event_name, + handler, + unique_id, + register_method=self._register, + unique_id_uses_count=unique_id_uses_count, + ) + + def register_first( + self, event_name, handler, unique_id=None, unique_id_uses_count=False + ): + """Register an event handler to be called first for an event. + + All event handlers registered with ``register_first()`` will + be called before handlers registered with ``register()`` and + ``register_last()``. + + """ + self._verify_and_register( + event_name, + handler, + unique_id, + register_method=self._register_first, + unique_id_uses_count=unique_id_uses_count, + ) + + def register_last( + self, event_name, handler, unique_id=None, unique_id_uses_count=False + ): + """Register an event handler to be called last for an event. + + All event handlers registered with ``register_last()`` will be called + after handlers registered with ``register_first()`` and ``register()``. + + """ + self._verify_and_register( + event_name, + handler, + unique_id, + register_method=self._register_last, + unique_id_uses_count=unique_id_uses_count, + ) + + def _verify_and_register( + self, + event_name, + handler, + unique_id, + register_method, + unique_id_uses_count, + ): + self._verify_is_callable(handler) + self._verify_accept_kwargs(handler) + register_method(event_name, handler, unique_id, unique_id_uses_count) + + def unregister( + self, + event_name, + handler=None, + unique_id=None, + unique_id_uses_count=False, + ): + """Unregister an event handler for a given event. + + If no ``unique_id`` was given during registration, then the + first instance of the event handler is removed (if the event + handler has been registered multiple times). + + """ + pass + + def _verify_is_callable(self, func): + if not callable(func): + raise ValueError(f"Event handler {func} must be callable.") + + def _verify_accept_kwargs(self, func): + """Verifies a callable accepts kwargs + + :type func: callable + :param func: A callable object. + + :returns: True, if ``func`` accepts kwargs, otherwise False. + + """ + try: + if not accepts_kwargs(func): + raise ValueError( + f"Event handler {func} must accept keyword " + f"arguments (**kwargs)" + ) + except TypeError: + return False + + +class HierarchicalEmitter(BaseEventHooks): + def __init__(self): + # We keep a reference to the handlers for quick + # read only access (we never modify self._handlers). + # A cache of event name to handler list. + self._lookup_cache = {} + self._handlers = _PrefixTrie() + # This is used to ensure that unique_id's are only + # registered once. + self._unique_id_handlers = {} + + def _emit(self, event_name, kwargs, stop_on_response=False): + """ + Emit an event with optional keyword arguments. + + :type event_name: string + :param event_name: Name of the event + :type kwargs: dict + :param kwargs: Arguments to be passed to the handler functions. + :type stop_on_response: boolean + :param stop_on_response: Whether to stop on the first non-None + response. If False, then all handlers + will be called. This is especially useful + to handlers which mutate data and then + want to stop propagation of the event. + :rtype: list + :return: List of (handler, response) tuples from all processed + handlers. + """ + responses = [] + # Invoke the event handlers from most specific + # to least specific, each time stripping off a dot. + handlers_to_call = self._lookup_cache.get(event_name) + if handlers_to_call is None: + handlers_to_call = self._handlers.prefix_search(event_name) + self._lookup_cache[event_name] = handlers_to_call + elif not handlers_to_call: + # Short circuit and return an empty response is we have + # no handlers to call. This is the common case where + # for the majority of signals, nothing is listening. + return [] + kwargs['event_name'] = event_name + responses = [] + for handler in handlers_to_call: + logger.debug('Event %s: calling handler %s', event_name, handler) + response = handler(**kwargs) + responses.append((handler, response)) + if stop_on_response and response is not None: + return responses + return responses + + def emit(self, event_name, **kwargs): + """ + Emit an event by name with arguments passed as keyword args. + + >>> responses = emitter.emit( + ... 'my-event.service.operation', arg1='one', arg2='two') + + :rtype: list + :return: List of (handler, response) tuples from all processed + handlers. + """ + return self._emit(event_name, kwargs) + + def emit_until_response(self, event_name, **kwargs): + """ + Emit an event by name with arguments passed as keyword args, + until the first non-``None`` response is received. This + method prevents subsequent handlers from being invoked. + + >>> handler, response = emitter.emit_until_response( + 'my-event.service.operation', arg1='one', arg2='two') + + :rtype: tuple + :return: The first (handler, response) tuple where the response + is not ``None``, otherwise (``None``, ``None``). + """ + responses = self._emit(event_name, kwargs, stop_on_response=True) + if responses: + return responses[-1] + else: + return (None, None) + + def _register( + self, event_name, handler, unique_id=None, unique_id_uses_count=False + ): + self._register_section( + event_name, + handler, + unique_id, + unique_id_uses_count, + section=_MIDDLE, + ) + + def _register_first( + self, event_name, handler, unique_id=None, unique_id_uses_count=False + ): + self._register_section( + event_name, + handler, + unique_id, + unique_id_uses_count, + section=_FIRST, + ) + + def _register_last( + self, event_name, handler, unique_id, unique_id_uses_count=False + ): + self._register_section( + event_name, handler, unique_id, unique_id_uses_count, section=_LAST + ) + + def _register_section( + self, event_name, handler, unique_id, unique_id_uses_count, section + ): + if unique_id is not None: + if unique_id in self._unique_id_handlers: + # We've already registered a handler using this unique_id + # so we don't need to register it again. + count = self._unique_id_handlers[unique_id].get('count', None) + if unique_id_uses_count: + if not count: + raise ValueError( + f"Initial registration of unique id {unique_id} was " + "specified to use a counter. Subsequent register " + "calls to unique id must specify use of a counter " + "as well." + ) + else: + self._unique_id_handlers[unique_id]['count'] += 1 + else: + if count: + raise ValueError( + f"Initial registration of unique id {unique_id} was " + "specified to not use a counter. Subsequent " + "register calls to unique id must specify not to " + "use a counter as well." + ) + return + else: + # Note that the trie knows nothing about the unique + # id. We track uniqueness in this class via the + # _unique_id_handlers. + self._handlers.append_item( + event_name, handler, section=section + ) + unique_id_handler_item = {'handler': handler} + if unique_id_uses_count: + unique_id_handler_item['count'] = 1 + self._unique_id_handlers[unique_id] = unique_id_handler_item + else: + self._handlers.append_item(event_name, handler, section=section) + # Super simple caching strategy for now, if we change the registrations + # clear the cache. This has the opportunity for smarter invalidations. + self._lookup_cache = {} + + def unregister( + self, + event_name, + handler=None, + unique_id=None, + unique_id_uses_count=False, + ): + if unique_id is not None: + try: + count = self._unique_id_handlers[unique_id].get('count', None) + except KeyError: + # There's no handler matching that unique_id so we have + # nothing to unregister. + return + if unique_id_uses_count: + if count is None: + raise ValueError( + f"Initial registration of unique id {unique_id} was specified to " + "use a counter. Subsequent unregister calls to unique " + "id must specify use of a counter as well." + ) + elif count == 1: + handler = self._unique_id_handlers.pop(unique_id)[ + 'handler' + ] + else: + self._unique_id_handlers[unique_id]['count'] -= 1 + return + else: + if count: + raise ValueError( + f"Initial registration of unique id {unique_id} was specified " + "to not use a counter. Subsequent unregister calls " + "to unique id must specify not to use a counter as " + "well." + ) + handler = self._unique_id_handlers.pop(unique_id)['handler'] + try: + self._handlers.remove_item(event_name, handler) + self._lookup_cache = {} + except ValueError: + pass + + def __copy__(self): + new_instance = self.__class__() + new_state = self.__dict__.copy() + new_state['_handlers'] = copy.copy(self._handlers) + new_state['_unique_id_handlers'] = copy.copy(self._unique_id_handlers) + new_instance.__dict__ = new_state + return new_instance + + +class EventAliaser(BaseEventHooks): + def __init__(self, event_emitter, event_aliases=None): + self._event_aliases = event_aliases + if event_aliases is None: + self._event_aliases = EVENT_ALIASES + self._alias_name_cache = {} + self._emitter = event_emitter + + def emit(self, event_name, **kwargs): + aliased_event_name = self._alias_event_name(event_name) + return self._emitter.emit(aliased_event_name, **kwargs) + + def emit_until_response(self, event_name, **kwargs): + aliased_event_name = self._alias_event_name(event_name) + return self._emitter.emit_until_response(aliased_event_name, **kwargs) + + def register( + self, event_name, handler, unique_id=None, unique_id_uses_count=False + ): + aliased_event_name = self._alias_event_name(event_name) + return self._emitter.register( + aliased_event_name, handler, unique_id, unique_id_uses_count + ) + + def register_first( + self, event_name, handler, unique_id=None, unique_id_uses_count=False + ): + aliased_event_name = self._alias_event_name(event_name) + return self._emitter.register_first( + aliased_event_name, handler, unique_id, unique_id_uses_count + ) + + def register_last( + self, event_name, handler, unique_id=None, unique_id_uses_count=False + ): + aliased_event_name = self._alias_event_name(event_name) + return self._emitter.register_last( + aliased_event_name, handler, unique_id, unique_id_uses_count + ) + + def unregister( + self, + event_name, + handler=None, + unique_id=None, + unique_id_uses_count=False, + ): + aliased_event_name = self._alias_event_name(event_name) + return self._emitter.unregister( + aliased_event_name, handler, unique_id, unique_id_uses_count + ) + + def _alias_event_name(self, event_name): + if event_name in self._alias_name_cache: + return self._alias_name_cache[event_name] + + for old_part, new_part in self._event_aliases.items(): + # We can't simply do a string replace for everything, otherwise we + # might end up translating substrings that we never intended to + # translate. When there aren't any dots in the old event name + # part, then we can quickly replace the item in the list if it's + # there. + event_parts = event_name.split('.') + if '.' not in old_part: + try: + # Theoretically a given event name could have the same part + # repeated, but in practice this doesn't happen + event_parts[event_parts.index(old_part)] = new_part + except ValueError: + continue + + # If there's dots in the name, it gets more complicated. Now we + # have to replace multiple sections of the original event. + elif old_part in event_name: + old_parts = old_part.split('.') + self._replace_subsection(event_parts, old_parts, new_part) + else: + continue + + new_name = '.'.join(event_parts) + logger.debug( + f"Changing event name from {event_name} to {new_name}" + ) + self._alias_name_cache[event_name] = new_name + return new_name + + self._alias_name_cache[event_name] = event_name + return event_name + + def _replace_subsection(self, sections, old_parts, new_part): + for i in range(len(sections)): + if ( + sections[i] == old_parts[0] + and sections[i : i + len(old_parts)] == old_parts + ): + sections[i : i + len(old_parts)] = [new_part] + return + + def __copy__(self): + return self.__class__( + copy.copy(self._emitter), copy.copy(self._event_aliases) + ) + + +class _PrefixTrie: + """Specialized prefix trie that handles wildcards. + + The prefixes in this case are based on dot separated + names so 'foo.bar.baz' is:: + + foo -> bar -> baz + + Wildcard support just means that having a key such as 'foo.bar.*.baz' will + be matched with a call to ``get_items(key='foo.bar.ANYTHING.baz')``. + + You can think of this prefix trie as the equivalent as defaultdict(list), + except that it can do prefix searches: + + foo.bar.baz -> A + foo.bar -> B + foo -> C + + Calling ``get_items('foo.bar.baz')`` will return [A + B + C], from + most specific to least specific. + + """ + + def __init__(self): + # Each dictionary can be though of as a node, where a node + # has values associated with the node, and children is a link + # to more nodes. So 'foo.bar' would have a 'foo' node with + # a 'bar' node as a child of foo. + # {'foo': {'children': {'bar': {...}}}}. + self._root = {'chunk': None, 'children': {}, 'values': None} + + def append_item(self, key, value, section=_MIDDLE): + """Add an item to a key. + + If a value is already associated with that key, the new + value is appended to the list for the key. + """ + key_parts = key.split('.') + current = self._root + for part in key_parts: + if part not in current['children']: + new_child = {'chunk': part, 'values': None, 'children': {}} + current['children'][part] = new_child + current = new_child + else: + current = current['children'][part] + if current['values'] is None: + current['values'] = NodeList([], [], []) + current['values'][section].append(value) + + def prefix_search(self, key): + """Collect all items that are prefixes of key. + + Prefix in this case are delineated by '.' characters so + 'foo.bar.baz' is a 3 chunk sequence of 3 "prefixes" ( + "foo", "bar", and "baz"). + + """ + collected = deque() + key_parts = key.split('.') + current = self._root + self._get_items(current, key_parts, collected, 0) + return collected + + def _get_items(self, starting_node, key_parts, collected, starting_index): + stack = [(starting_node, starting_index)] + key_parts_len = len(key_parts) + # Traverse down the nodes, where at each level we add the + # next part from key_parts as well as the wildcard element '*'. + # This means for each node we see we potentially add two more + # elements to our stack. + while stack: + current_node, index = stack.pop() + if current_node['values']: + # We're using extendleft because we want + # the values associated with the node furthest + # from the root to come before nodes closer + # to the root. extendleft() also adds its items + # in right-left order so .extendleft([1, 2, 3]) + # will result in final_list = [3, 2, 1], which is + # why we reverse the lists. + node_list = current_node['values'] + complete_order = ( + node_list.first + node_list.middle + node_list.last + ) + collected.extendleft(reversed(complete_order)) + if not index == key_parts_len: + children = current_node['children'] + directs = children.get(key_parts[index]) + wildcard = children.get('*') + next_index = index + 1 + if wildcard is not None: + stack.append((wildcard, next_index)) + if directs is not None: + stack.append((directs, next_index)) + + def remove_item(self, key, value): + """Remove an item associated with a key. + + If the value is not associated with the key a ``ValueError`` + will be raised. If the key does not exist in the trie, a + ``ValueError`` will be raised. + + """ + key_parts = key.split('.') + current = self._root + self._remove_item(current, key_parts, value, index=0) + + def _remove_item(self, current_node, key_parts, value, index): + if current_node is None: + return + elif index < len(key_parts): + next_node = current_node['children'].get(key_parts[index]) + if next_node is not None: + self._remove_item(next_node, key_parts, value, index + 1) + if index == len(key_parts) - 1: + node_list = next_node['values'] + if value in node_list.first: + node_list.first.remove(value) + elif value in node_list.middle: + node_list.middle.remove(value) + elif value in node_list.last: + node_list.last.remove(value) + if not next_node['children'] and not next_node['values']: + # Then this is a leaf node with no values so + # we can just delete this link from the parent node. + # This makes subsequent search faster in the case + # where a key does not exist. + del current_node['children'][key_parts[index]] + else: + raise ValueError(f"key is not in trie: {'.'.join(key_parts)}") + + def __copy__(self): + # The fact that we're using a nested dict under the covers + # is an implementation detail, and the user shouldn't have + # to know that they'd normally need a deepcopy so we expose + # __copy__ instead of __deepcopy__. + new_copy = self.__class__() + copied_attrs = self._recursive_copy(self.__dict__) + new_copy.__dict__ = copied_attrs + return new_copy + + def _recursive_copy(self, node): + # We can't use copy.deepcopy because we actually only want to copy + # the structure of the trie, not the handlers themselves. + # Each node has a chunk, children, and values. + copied_node = {} + for key, value in node.items(): + if isinstance(value, NodeList): + copied_node[key] = copy.copy(value) + elif isinstance(value, dict): + copied_node[key] = self._recursive_copy(value) + else: + copied_node[key] = value + return copied_node diff --git a/venv/lib/python3.10/site-packages/botocore/httpchecksum.py b/venv/lib/python3.10/site-packages/botocore/httpchecksum.py new file mode 100644 index 0000000000000000000000000000000000000000..a97eb430d4816e935491e6f1f917df8382792e0b --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/httpchecksum.py @@ -0,0 +1,481 @@ +# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +"""The interfaces in this module are not intended for public use. + +This module defines interfaces for applying checksums to HTTP requests within +the context of botocore. This involves both resolving the checksum to be used +based on client configuration and environment, as well as application of the +checksum to the request. +""" + +import base64 +import io +import logging +from binascii import crc32 +from hashlib import sha1, sha256 + +from botocore.compat import HAS_CRT +from botocore.exceptions import ( + AwsChunkedWrapperError, + FlexibleChecksumError, + MissingDependencyException, +) +from botocore.response import StreamingBody +from botocore.utils import ( + conditionally_calculate_md5, + determine_content_length, +) + +if HAS_CRT: + from awscrt import checksums as crt_checksums +else: + crt_checksums = None + +logger = logging.getLogger(__name__) + + +class BaseChecksum: + _CHUNK_SIZE = 1024 * 1024 + + def update(self, chunk): + pass + + def digest(self): + pass + + def b64digest(self): + bs = self.digest() + return base64.b64encode(bs).decode("ascii") + + def _handle_fileobj(self, fileobj): + start_position = fileobj.tell() + for chunk in iter(lambda: fileobj.read(self._CHUNK_SIZE), b""): + self.update(chunk) + fileobj.seek(start_position) + + def handle(self, body): + if isinstance(body, (bytes, bytearray)): + self.update(body) + else: + self._handle_fileobj(body) + return self.b64digest() + + +class Crc32Checksum(BaseChecksum): + def __init__(self): + self._int_crc32 = 0 + + def update(self, chunk): + self._int_crc32 = crc32(chunk, self._int_crc32) & 0xFFFFFFFF + + def digest(self): + return self._int_crc32.to_bytes(4, byteorder="big") + + +class CrtCrc32Checksum(BaseChecksum): + # Note: This class is only used if the CRT is available + def __init__(self): + self._int_crc32 = 0 + + def update(self, chunk): + new_checksum = crt_checksums.crc32(chunk, self._int_crc32) + self._int_crc32 = new_checksum & 0xFFFFFFFF + + def digest(self): + return self._int_crc32.to_bytes(4, byteorder="big") + + +class CrtCrc32cChecksum(BaseChecksum): + # Note: This class is only used if the CRT is available + def __init__(self): + self._int_crc32c = 0 + + def update(self, chunk): + new_checksum = crt_checksums.crc32c(chunk, self._int_crc32c) + self._int_crc32c = new_checksum & 0xFFFFFFFF + + def digest(self): + return self._int_crc32c.to_bytes(4, byteorder="big") + + +class Sha1Checksum(BaseChecksum): + def __init__(self): + self._checksum = sha1() + + def update(self, chunk): + self._checksum.update(chunk) + + def digest(self): + return self._checksum.digest() + + +class Sha256Checksum(BaseChecksum): + def __init__(self): + self._checksum = sha256() + + def update(self, chunk): + self._checksum.update(chunk) + + def digest(self): + return self._checksum.digest() + + +class AwsChunkedWrapper: + _DEFAULT_CHUNK_SIZE = 1024 * 1024 + + def __init__( + self, + raw, + checksum_cls=None, + checksum_name="x-amz-checksum", + chunk_size=None, + ): + self._raw = raw + self._checksum_name = checksum_name + self._checksum_cls = checksum_cls + self._reset() + + if chunk_size is None: + chunk_size = self._DEFAULT_CHUNK_SIZE + self._chunk_size = chunk_size + + def _reset(self): + self._remaining = b"" + self._complete = False + self._checksum = None + if self._checksum_cls: + self._checksum = self._checksum_cls() + + def seek(self, offset, whence=0): + if offset != 0 or whence != 0: + raise AwsChunkedWrapperError( + error_msg="Can only seek to start of stream" + ) + self._reset() + self._raw.seek(0) + + def read(self, size=None): + # Normalize "read all" size values to None + if size is not None and size <= 0: + size = None + + # If the underlying body is done and we have nothing left then + # end the stream + if self._complete and not self._remaining: + return b"" + + # While we're not done and want more bytes + want_more_bytes = size is None or size > len(self._remaining) + while not self._complete and want_more_bytes: + self._remaining += self._make_chunk() + want_more_bytes = size is None or size > len(self._remaining) + + # If size was None, we want to return everything + if size is None: + size = len(self._remaining) + + # Return a chunk up to the size asked for + to_return = self._remaining[:size] + self._remaining = self._remaining[size:] + return to_return + + def _make_chunk(self): + # NOTE: Chunk size is not deterministic as read could return less. This + # means we cannot know the content length of the encoded aws-chunked + # stream ahead of time without ensuring a consistent chunk size + raw_chunk = self._raw.read(self._chunk_size) + hex_len = hex(len(raw_chunk))[2:].encode("ascii") + self._complete = not raw_chunk + + if self._checksum: + self._checksum.update(raw_chunk) + + if self._checksum and self._complete: + name = self._checksum_name.encode("ascii") + checksum = self._checksum.b64digest().encode("ascii") + return b"0\r\n%s:%s\r\n\r\n" % (name, checksum) + + return b"%s\r\n%s\r\n" % (hex_len, raw_chunk) + + def __iter__(self): + while not self._complete: + yield self._make_chunk() + + +class StreamingChecksumBody(StreamingBody): + def __init__(self, raw_stream, content_length, checksum, expected): + super().__init__(raw_stream, content_length) + self._checksum = checksum + self._expected = expected + + def read(self, amt=None): + chunk = super().read(amt=amt) + self._checksum.update(chunk) + if amt is None or (not chunk and amt > 0): + self._validate_checksum() + return chunk + + def _validate_checksum(self): + if self._checksum.digest() != base64.b64decode(self._expected): + error_msg = ( + f"Expected checksum {self._expected} did not match calculated " + f"checksum: {self._checksum.b64digest()}" + ) + raise FlexibleChecksumError(error_msg=error_msg) + + +def resolve_checksum_context(request, operation_model, params): + resolve_request_checksum_algorithm(request, operation_model, params) + resolve_response_checksum_algorithms(request, operation_model, params) + + +def resolve_request_checksum_algorithm( + request, + operation_model, + params, + supported_algorithms=None, +): + http_checksum = operation_model.http_checksum + algorithm_member = http_checksum.get("requestAlgorithmMember") + if algorithm_member and algorithm_member in params: + # If the client has opted into using flexible checksums and the + # request supports it, use that instead of checksum required + if supported_algorithms is None: + supported_algorithms = _SUPPORTED_CHECKSUM_ALGORITHMS + + algorithm_name = params[algorithm_member].lower() + if algorithm_name not in supported_algorithms: + if not HAS_CRT and algorithm_name in _CRT_CHECKSUM_ALGORITHMS: + raise MissingDependencyException( + msg=( + f"Using {algorithm_name.upper()} requires an " + "additional dependency. You will need to pip install " + "botocore[crt] before proceeding." + ) + ) + raise FlexibleChecksumError( + error_msg=f"Unsupported checksum algorithm: {algorithm_name}" + ) + + location_type = "header" + if operation_model.has_streaming_input: + # Operations with streaming input must support trailers. + if request["url"].startswith("https:"): + # We only support unsigned trailer checksums currently. As this + # disables payload signing we'll only use trailers over TLS. + location_type = "trailer" + + algorithm = { + "algorithm": algorithm_name, + "in": location_type, + "name": f"x-amz-checksum-{algorithm_name}", + } + + if algorithm["name"] in request["headers"]: + # If the header is already set by the customer, skip calculation + return + + checksum_context = request["context"].get("checksum", {}) + checksum_context["request_algorithm"] = algorithm + request["context"]["checksum"] = checksum_context + elif operation_model.http_checksum_required or http_checksum.get( + "requestChecksumRequired" + ): + # Otherwise apply the old http checksum behavior via Content-MD5 + checksum_context = request["context"].get("checksum", {}) + checksum_context["request_algorithm"] = "conditional-md5" + request["context"]["checksum"] = checksum_context + + +def apply_request_checksum(request): + checksum_context = request.get("context", {}).get("checksum", {}) + algorithm = checksum_context.get("request_algorithm") + + if not algorithm: + return + + if algorithm == "conditional-md5": + # Special case to handle the http checksum required trait + conditionally_calculate_md5(request) + elif algorithm["in"] == "header": + _apply_request_header_checksum(request) + elif algorithm["in"] == "trailer": + _apply_request_trailer_checksum(request) + else: + raise FlexibleChecksumError( + error_msg="Unknown checksum variant: {}".format(algorithm["in"]) + ) + + +def _apply_request_header_checksum(request): + checksum_context = request.get("context", {}).get("checksum", {}) + algorithm = checksum_context.get("request_algorithm") + location_name = algorithm["name"] + if location_name in request["headers"]: + # If the header is already set by the customer, skip calculation + return + checksum_cls = _CHECKSUM_CLS.get(algorithm["algorithm"]) + digest = checksum_cls().handle(request["body"]) + request["headers"][location_name] = digest + + +def _apply_request_trailer_checksum(request): + checksum_context = request.get("context", {}).get("checksum", {}) + algorithm = checksum_context.get("request_algorithm") + location_name = algorithm["name"] + checksum_cls = _CHECKSUM_CLS.get(algorithm["algorithm"]) + + headers = request["headers"] + body = request["body"] + + if location_name in headers: + # If the header is already set by the customer, skip calculation + return + + headers["Transfer-Encoding"] = "chunked" + if "Content-Encoding" in headers: + # We need to preserve the existing content encoding and add + # aws-chunked as a new content encoding. + headers["Content-Encoding"] += ",aws-chunked" + else: + headers["Content-Encoding"] = "aws-chunked" + headers["X-Amz-Trailer"] = location_name + + content_length = determine_content_length(body) + if content_length is not None: + # Send the decoded content length if we can determine it. Some + # services such as S3 may require the decoded content length + headers["X-Amz-Decoded-Content-Length"] = str(content_length) + + if isinstance(body, (bytes, bytearray)): + body = io.BytesIO(body) + + request["body"] = AwsChunkedWrapper( + body, + checksum_cls=checksum_cls, + checksum_name=location_name, + ) + + +def resolve_response_checksum_algorithms( + request, operation_model, params, supported_algorithms=None +): + http_checksum = operation_model.http_checksum + mode_member = http_checksum.get("requestValidationModeMember") + if mode_member and mode_member in params: + if supported_algorithms is None: + supported_algorithms = _SUPPORTED_CHECKSUM_ALGORITHMS + response_algorithms = { + a.lower() for a in http_checksum.get("responseAlgorithms", []) + } + + usable_algorithms = [] + for algorithm in _ALGORITHMS_PRIORITY_LIST: + if algorithm not in response_algorithms: + continue + if algorithm in supported_algorithms: + usable_algorithms.append(algorithm) + + checksum_context = request["context"].get("checksum", {}) + checksum_context["response_algorithms"] = usable_algorithms + request["context"]["checksum"] = checksum_context + + +def handle_checksum_body(http_response, response, context, operation_model): + headers = response["headers"] + checksum_context = context.get("checksum", {}) + algorithms = checksum_context.get("response_algorithms") + + if not algorithms: + return + + for algorithm in algorithms: + header_name = f"x-amz-checksum-{algorithm}" + # If the header is not found, check the next algorithm + if header_name not in headers: + continue + + # If a - is in the checksum this is not valid Base64. S3 returns + # checksums that include a -# suffix to indicate a checksum derived + # from the hash of all part checksums. We cannot wrap this response + if "-" in headers[header_name]: + continue + + if operation_model.has_streaming_output: + response["body"] = _handle_streaming_response( + http_response, response, algorithm + ) + else: + response["body"] = _handle_bytes_response( + http_response, response, algorithm + ) + + # Expose metadata that the checksum check actually occurred + checksum_context = response["context"].get("checksum", {}) + checksum_context["response_algorithm"] = algorithm + response["context"]["checksum"] = checksum_context + return + + logger.info( + f'Skipping checksum validation. Response did not contain one of the ' + f'following algorithms: {algorithms}.' + ) + + +def _handle_streaming_response(http_response, response, algorithm): + checksum_cls = _CHECKSUM_CLS.get(algorithm) + header_name = f"x-amz-checksum-{algorithm}" + return StreamingChecksumBody( + http_response.raw, + response["headers"].get("content-length"), + checksum_cls(), + response["headers"][header_name], + ) + + +def _handle_bytes_response(http_response, response, algorithm): + body = http_response.content + header_name = f"x-amz-checksum-{algorithm}" + checksum_cls = _CHECKSUM_CLS.get(algorithm) + checksum = checksum_cls() + checksum.update(body) + expected = response["headers"][header_name] + if checksum.digest() != base64.b64decode(expected): + error_msg = ( + f"Expected checksum {expected} did not match calculated " + f"checksum: {checksum.b64digest()}" + ) + raise FlexibleChecksumError(error_msg=error_msg) + return body + + +_CHECKSUM_CLS = { + "crc32": Crc32Checksum, + "sha1": Sha1Checksum, + "sha256": Sha256Checksum, +} +_CRT_CHECKSUM_ALGORITHMS = ["crc32", "crc32c"] +if HAS_CRT: + # Use CRT checksum implementations if available + _CRT_CHECKSUM_CLS = { + "crc32": CrtCrc32Checksum, + "crc32c": CrtCrc32cChecksum, + } + _CHECKSUM_CLS.update(_CRT_CHECKSUM_CLS) + # Validate this list isn't out of sync with _CRT_CHECKSUM_CLS keys + assert all( + name in _CRT_CHECKSUM_ALGORITHMS for name in _CRT_CHECKSUM_CLS.keys() + ) +_SUPPORTED_CHECKSUM_ALGORITHMS = list(_CHECKSUM_CLS.keys()) +_ALGORITHMS_PRIORITY_LIST = ['crc32c', 'crc32', 'sha1', 'sha256'] diff --git a/venv/lib/python3.10/site-packages/botocore/httpsession.py b/venv/lib/python3.10/site-packages/botocore/httpsession.py new file mode 100644 index 0000000000000000000000000000000000000000..bd8b82fafadb3914b8e9114efadf586da04d00a1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/httpsession.py @@ -0,0 +1,509 @@ +import logging +import os +import os.path +import socket +import sys +import warnings +from base64 import b64encode + +from urllib3 import PoolManager, Timeout, proxy_from_url +from urllib3.exceptions import ( + ConnectTimeoutError as URLLib3ConnectTimeoutError, +) +from urllib3.exceptions import ( + LocationParseError, + NewConnectionError, + ProtocolError, + ProxyError, +) +from urllib3.exceptions import ReadTimeoutError as URLLib3ReadTimeoutError +from urllib3.exceptions import SSLError as URLLib3SSLError +from urllib3.util.retry import Retry +from urllib3.util.ssl_ import ( + OP_NO_COMPRESSION, + PROTOCOL_TLS, + OP_NO_SSLv2, + OP_NO_SSLv3, + is_ipaddress, + ssl, +) +from urllib3.util.url import parse_url + +try: + from urllib3.util.ssl_ import OP_NO_TICKET, PROTOCOL_TLS_CLIENT +except ImportError: + # Fallback directly to ssl for version of urllib3 before 1.26. + # They are available in the standard library starting in Python 3.6. + from ssl import OP_NO_TICKET, PROTOCOL_TLS_CLIENT + +try: + # pyopenssl will be removed in urllib3 2.0, we'll fall back to ssl_ at that point. + # This can be removed once our urllib3 floor is raised to >= 2.0. + with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=DeprecationWarning) + # Always import the original SSLContext, even if it has been patched + from urllib3.contrib.pyopenssl import ( + orig_util_SSLContext as SSLContext, + ) +except ImportError: + from urllib3.util.ssl_ import SSLContext + +try: + from urllib3.util.ssl_ import DEFAULT_CIPHERS +except ImportError: + # Defer to system configuration starting with + # urllib3 2.0. This will choose the ciphers provided by + # Openssl 1.1.1+ or secure system defaults. + DEFAULT_CIPHERS = None + +import botocore.awsrequest +from botocore.compat import ( + IPV6_ADDRZ_RE, + ensure_bytes, + filter_ssl_warnings, + unquote, + urlparse, +) +from botocore.exceptions import ( + ConnectionClosedError, + ConnectTimeoutError, + EndpointConnectionError, + HTTPClientError, + InvalidProxiesConfigError, + ProxyConnectionError, + ReadTimeoutError, + SSLError, +) + +filter_ssl_warnings() +logger = logging.getLogger(__name__) +DEFAULT_TIMEOUT = 60 +MAX_POOL_CONNECTIONS = 10 +DEFAULT_CA_BUNDLE = os.path.join(os.path.dirname(__file__), 'cacert.pem') + +try: + from certifi import where +except ImportError: + + def where(): + return DEFAULT_CA_BUNDLE + + +def get_cert_path(verify): + if verify is not True: + return verify + + cert_path = where() + logger.debug(f"Certificate path: {cert_path}") + + return cert_path + + +def create_urllib3_context( + ssl_version=None, cert_reqs=None, options=None, ciphers=None +): + """This function is a vendored version of the same function in urllib3 + + We vendor this function to ensure that the SSL contexts we construct + always use the std lib SSLContext instead of pyopenssl. + """ + # PROTOCOL_TLS is deprecated in Python 3.10 + if not ssl_version or ssl_version == PROTOCOL_TLS: + ssl_version = PROTOCOL_TLS_CLIENT + + context = SSLContext(ssl_version) + + if ciphers: + context.set_ciphers(ciphers) + elif DEFAULT_CIPHERS: + context.set_ciphers(DEFAULT_CIPHERS) + + # Setting the default here, as we may have no ssl module on import + cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs + + if options is None: + options = 0 + # SSLv2 is easily broken and is considered harmful and dangerous + options |= OP_NO_SSLv2 + # SSLv3 has several problems and is now dangerous + options |= OP_NO_SSLv3 + # Disable compression to prevent CRIME attacks for OpenSSL 1.0+ + # (issue urllib3#309) + options |= OP_NO_COMPRESSION + # TLSv1.2 only. Unless set explicitly, do not request tickets. + # This may save some bandwidth on wire, and although the ticket is encrypted, + # there is a risk associated with it being on wire, + # if the server is not rotating its ticketing keys properly. + options |= OP_NO_TICKET + + context.options |= options + + # Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is + # necessary for conditional client cert authentication with TLS 1.3. + # The attribute is None for OpenSSL <= 1.1.0 or does not exist in older + # versions of Python. We only enable on Python 3.7.4+ or if certificate + # verification is enabled to work around Python issue #37428 + # See: https://bugs.python.org/issue37428 + if ( + cert_reqs == ssl.CERT_REQUIRED or sys.version_info >= (3, 7, 4) + ) and getattr(context, "post_handshake_auth", None) is not None: + context.post_handshake_auth = True + + def disable_check_hostname(): + if ( + getattr(context, "check_hostname", None) is not None + ): # Platform-specific: Python 3.2 + # We do our own verification, including fingerprints and alternative + # hostnames. So disable it here + context.check_hostname = False + + # The order of the below lines setting verify_mode and check_hostname + # matter due to safe-guards SSLContext has to prevent an SSLContext with + # check_hostname=True, verify_mode=NONE/OPTIONAL. This is made even more + # complex because we don't know whether PROTOCOL_TLS_CLIENT will be used + # or not so we don't know the initial state of the freshly created SSLContext. + if cert_reqs == ssl.CERT_REQUIRED: + context.verify_mode = cert_reqs + disable_check_hostname() + else: + disable_check_hostname() + context.verify_mode = cert_reqs + + # Enable logging of TLS session keys via defacto standard environment variable + # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values. + if hasattr(context, "keylog_filename"): + sslkeylogfile = os.environ.get("SSLKEYLOGFILE") + if sslkeylogfile and not sys.flags.ignore_environment: + context.keylog_filename = sslkeylogfile + + return context + + +def ensure_boolean(val): + """Ensures a boolean value if a string or boolean is provided + + For strings, the value for True/False is case insensitive + """ + if isinstance(val, bool): + return val + else: + return val.lower() == 'true' + + +def mask_proxy_url(proxy_url): + """ + Mask proxy url credentials. + + :type proxy_url: str + :param proxy_url: The proxy url, i.e. https://username:password@proxy.com + + :return: Masked proxy url, i.e. https://***:***@proxy.com + """ + mask = '*' * 3 + parsed_url = urlparse(proxy_url) + if parsed_url.username: + proxy_url = proxy_url.replace(parsed_url.username, mask, 1) + if parsed_url.password: + proxy_url = proxy_url.replace(parsed_url.password, mask, 1) + return proxy_url + + +def _is_ipaddress(host): + """Wrap urllib3's is_ipaddress to support bracketed IPv6 addresses.""" + return is_ipaddress(host) or bool(IPV6_ADDRZ_RE.match(host)) + + +class ProxyConfiguration: + """Represents a proxy configuration dictionary and additional settings. + + This class represents a proxy configuration dictionary and provides utility + functions to retrieve well structured proxy urls and proxy headers from the + proxy configuration dictionary. + """ + + def __init__(self, proxies=None, proxies_settings=None): + if proxies is None: + proxies = {} + if proxies_settings is None: + proxies_settings = {} + + self._proxies = proxies + self._proxies_settings = proxies_settings + + def proxy_url_for(self, url): + """Retrieves the corresponding proxy url for a given url.""" + parsed_url = urlparse(url) + proxy = self._proxies.get(parsed_url.scheme) + if proxy: + proxy = self._fix_proxy_url(proxy) + return proxy + + def proxy_headers_for(self, proxy_url): + """Retrieves the corresponding proxy headers for a given proxy url.""" + headers = {} + username, password = self._get_auth_from_url(proxy_url) + if username and password: + basic_auth = self._construct_basic_auth(username, password) + headers['Proxy-Authorization'] = basic_auth + return headers + + @property + def settings(self): + return self._proxies_settings + + def _fix_proxy_url(self, proxy_url): + if proxy_url.startswith('http:') or proxy_url.startswith('https:'): + return proxy_url + elif proxy_url.startswith('//'): + return 'http:' + proxy_url + else: + return 'http://' + proxy_url + + def _construct_basic_auth(self, username, password): + auth_str = f'{username}:{password}' + encoded_str = b64encode(auth_str.encode('ascii')).strip().decode() + return f'Basic {encoded_str}' + + def _get_auth_from_url(self, url): + parsed_url = urlparse(url) + try: + return unquote(parsed_url.username), unquote(parsed_url.password) + except (AttributeError, TypeError): + return None, None + + +class URLLib3Session: + """A basic HTTP client that supports connection pooling and proxies. + + This class is inspired by requests.adapters.HTTPAdapter, but has been + boiled down to meet the use cases needed by botocore. For the most part + this classes matches the functionality of HTTPAdapter in requests v2.7.0 + (the same as our vendored version). The only major difference of note is + that we currently do not support sending chunked requests. While requests + v2.7.0 implemented this themselves, later version urllib3 support this + directly via a flag to urlopen so enabling it if needed should be trivial. + """ + + def __init__( + self, + verify=True, + proxies=None, + timeout=None, + max_pool_connections=MAX_POOL_CONNECTIONS, + socket_options=None, + client_cert=None, + proxies_config=None, + ): + self._verify = verify + self._proxy_config = ProxyConfiguration( + proxies=proxies, proxies_settings=proxies_config + ) + self._pool_classes_by_scheme = { + 'http': botocore.awsrequest.AWSHTTPConnectionPool, + 'https': botocore.awsrequest.AWSHTTPSConnectionPool, + } + if timeout is None: + timeout = DEFAULT_TIMEOUT + if not isinstance(timeout, (int, float)): + timeout = Timeout(connect=timeout[0], read=timeout[1]) + + self._cert_file = None + self._key_file = None + if isinstance(client_cert, str): + self._cert_file = client_cert + elif isinstance(client_cert, tuple): + self._cert_file, self._key_file = client_cert + + self._timeout = timeout + self._max_pool_connections = max_pool_connections + self._socket_options = socket_options + if socket_options is None: + self._socket_options = [] + self._proxy_managers = {} + self._manager = PoolManager(**self._get_pool_manager_kwargs()) + self._manager.pool_classes_by_scheme = self._pool_classes_by_scheme + + def _proxies_kwargs(self, **kwargs): + proxies_settings = self._proxy_config.settings + proxies_kwargs = { + 'use_forwarding_for_https': proxies_settings.get( + 'proxy_use_forwarding_for_https' + ), + **kwargs, + } + return {k: v for k, v in proxies_kwargs.items() if v is not None} + + def _get_pool_manager_kwargs(self, **extra_kwargs): + pool_manager_kwargs = { + 'timeout': self._timeout, + 'maxsize': self._max_pool_connections, + 'ssl_context': self._get_ssl_context(), + 'socket_options': self._socket_options, + 'cert_file': self._cert_file, + 'key_file': self._key_file, + } + pool_manager_kwargs.update(**extra_kwargs) + return pool_manager_kwargs + + def _get_ssl_context(self): + return create_urllib3_context() + + def _get_proxy_manager(self, proxy_url): + if proxy_url not in self._proxy_managers: + proxy_headers = self._proxy_config.proxy_headers_for(proxy_url) + proxy_ssl_context = self._setup_proxy_ssl_context(proxy_url) + proxy_manager_kwargs = self._get_pool_manager_kwargs( + proxy_headers=proxy_headers + ) + proxy_manager_kwargs.update( + self._proxies_kwargs(proxy_ssl_context=proxy_ssl_context) + ) + proxy_manager = proxy_from_url(proxy_url, **proxy_manager_kwargs) + proxy_manager.pool_classes_by_scheme = self._pool_classes_by_scheme + self._proxy_managers[proxy_url] = proxy_manager + + return self._proxy_managers[proxy_url] + + def _path_url(self, url): + parsed_url = urlparse(url) + path = parsed_url.path + if not path: + path = '/' + if parsed_url.query: + path = path + '?' + parsed_url.query + return path + + def _setup_ssl_cert(self, conn, url, verify): + if url.lower().startswith('https') and verify: + conn.cert_reqs = 'CERT_REQUIRED' + conn.ca_certs = get_cert_path(verify) + else: + conn.cert_reqs = 'CERT_NONE' + conn.ca_certs = None + + def _setup_proxy_ssl_context(self, proxy_url): + proxies_settings = self._proxy_config.settings + proxy_ca_bundle = proxies_settings.get('proxy_ca_bundle') + proxy_cert = proxies_settings.get('proxy_client_cert') + if proxy_ca_bundle is None and proxy_cert is None: + return None + + context = self._get_ssl_context() + try: + url = parse_url(proxy_url) + # urllib3 disables this by default but we need it for proper + # proxy tls negotiation when proxy_url is not an IP Address + if not _is_ipaddress(url.host): + context.check_hostname = True + if proxy_ca_bundle is not None: + context.load_verify_locations(cafile=proxy_ca_bundle) + + if isinstance(proxy_cert, tuple): + context.load_cert_chain(proxy_cert[0], keyfile=proxy_cert[1]) + elif isinstance(proxy_cert, str): + context.load_cert_chain(proxy_cert) + + return context + except (OSError, URLLib3SSLError, LocationParseError) as e: + raise InvalidProxiesConfigError(error=e) + + def _get_connection_manager(self, url, proxy_url=None): + if proxy_url: + manager = self._get_proxy_manager(proxy_url) + else: + manager = self._manager + return manager + + def _get_request_target(self, url, proxy_url): + has_proxy = proxy_url is not None + + if not has_proxy: + return self._path_url(url) + + # HTTP proxies expect the request_target to be the absolute url to know + # which host to establish a connection to. urllib3 also supports + # forwarding for HTTPS through the 'use_forwarding_for_https' parameter. + proxy_scheme = urlparse(proxy_url).scheme + using_https_forwarding_proxy = ( + proxy_scheme == 'https' + and self._proxies_kwargs().get('use_forwarding_for_https', False) + ) + + if using_https_forwarding_proxy or url.startswith('http:'): + return url + else: + return self._path_url(url) + + def _chunked(self, headers): + transfer_encoding = headers.get('Transfer-Encoding', b'') + transfer_encoding = ensure_bytes(transfer_encoding) + return transfer_encoding.lower() == b'chunked' + + def close(self): + self._manager.clear() + for manager in self._proxy_managers.values(): + manager.clear() + + def send(self, request): + try: + proxy_url = self._proxy_config.proxy_url_for(request.url) + manager = self._get_connection_manager(request.url, proxy_url) + conn = manager.connection_from_url(request.url) + self._setup_ssl_cert(conn, request.url, self._verify) + if ensure_boolean( + os.environ.get('BOTO_EXPERIMENTAL__ADD_PROXY_HOST_HEADER', '') + ): + # This is currently an "experimental" feature which provides + # no guarantees of backwards compatibility. It may be subject + # to change or removal in any patch version. Anyone opting in + # to this feature should strictly pin botocore. + host = urlparse(request.url).hostname + conn.proxy_headers['host'] = host + + request_target = self._get_request_target(request.url, proxy_url) + urllib_response = conn.urlopen( + method=request.method, + url=request_target, + body=request.body, + headers=request.headers, + retries=Retry(False), + assert_same_host=False, + preload_content=False, + decode_content=False, + chunked=self._chunked(request.headers), + ) + + http_response = botocore.awsrequest.AWSResponse( + request.url, + urllib_response.status, + urllib_response.headers, + urllib_response, + ) + + if not request.stream_output: + # Cause the raw stream to be exhausted immediately. We do it + # this way instead of using preload_content because + # preload_content will never buffer chunked responses + http_response.content + + return http_response + except URLLib3SSLError as e: + raise SSLError(endpoint_url=request.url, error=e) + except (NewConnectionError, socket.gaierror) as e: + raise EndpointConnectionError(endpoint_url=request.url, error=e) + except ProxyError as e: + raise ProxyConnectionError( + proxy_url=mask_proxy_url(proxy_url), error=e + ) + except URLLib3ConnectTimeoutError as e: + raise ConnectTimeoutError(endpoint_url=request.url, error=e) + except URLLib3ReadTimeoutError as e: + raise ReadTimeoutError(endpoint_url=request.url, error=e) + except ProtocolError as e: + raise ConnectionClosedError( + error=e, request=request, endpoint_url=request.url + ) + except Exception as e: + message = 'Exception received when sending urllib3 HTTP request' + logger.debug(message, exc_info=True) + raise HTTPClientError(error=e) diff --git a/venv/lib/python3.10/site-packages/botocore/loaders.py b/venv/lib/python3.10/site-packages/botocore/loaders.py new file mode 100644 index 0000000000000000000000000000000000000000..f5072a3e5f508d321cb4ba18976494cbd9657e96 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/loaders.py @@ -0,0 +1,525 @@ +# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""Module for loading various model files. + +This module provides the classes that are used to load models used +by botocore. This can include: + + * Service models (e.g. the model for EC2, S3, DynamoDB, etc.) + * Service model extras which customize the service models + * Other models associated with a service (pagination, waiters) + * Non service-specific config (Endpoint data, retry config) + +Loading a module is broken down into several steps: + + * Determining the path to load + * Search the data_path for files to load + * The mechanics of loading the file + * Searching for extras and applying them to the loaded file + +The last item is used so that other faster loading mechanism +besides the default JSON loader can be used. + +The Search Path +=============== + +Similar to how the PATH environment variable is to finding executables +and the PYTHONPATH environment variable is to finding python modules +to import, the botocore loaders have the concept of a data path exposed +through AWS_DATA_PATH. + +This enables end users to provide additional search paths where we +will attempt to load models outside of the models we ship with +botocore. When you create a ``Loader``, there are two paths +automatically added to the model search path: + + * /data/ + * ~/.aws/models + +The first value is the path where all the model files shipped with +botocore are located. + +The second path is so that users can just drop new model files in +``~/.aws/models`` without having to mess around with the AWS_DATA_PATH. + +The AWS_DATA_PATH using the platform specific path separator to +separate entries (typically ``:`` on linux and ``;`` on windows). + + +Directory Layout +================ + +The Loader expects a particular directory layout. In order for any +directory specified in AWS_DATA_PATH to be considered, it must have +this structure for service models:: + + + | + |-- servicename1 + | |-- 2012-10-25 + | |-- service-2.json + |-- ec2 + | |-- 2014-01-01 + | | |-- paginators-1.json + | | |-- service-2.json + | | |-- waiters-2.json + | |-- 2015-03-01 + | |-- paginators-1.json + | |-- service-2.json + | |-- waiters-2.json + | |-- service-2.sdk-extras.json + + +That is: + + * The root directory contains sub directories that are the name + of the services. + * Within each service directory, there's a sub directory for each + available API version. + * Within each API version, there are model specific files, including + (but not limited to): service-2.json, waiters-2.json, paginators-1.json + +The ``-1`` and ``-2`` suffix at the end of the model files denote which version +schema is used within the model. Even though this information is available in +the ``version`` key within the model, this version is also part of the filename +so that code does not need to load the JSON model in order to determine which +version to use. + +The ``sdk-extras`` and similar files represent extra data that needs to be +applied to the model after it is loaded. Data in these files might represent +information that doesn't quite fit in the original models, but is still needed +for the sdk. For instance, additional operation parameters might be added here +which don't represent the actual service api. +""" + +import logging +import os + +from botocore import BOTOCORE_ROOT +from botocore.compat import HAS_GZIP, OrderedDict, json +from botocore.exceptions import DataNotFoundError, UnknownServiceError +from botocore.utils import deep_merge + +_JSON_OPEN_METHODS = { + '.json': open, +} + + +if HAS_GZIP: + from gzip import open as gzip_open + + _JSON_OPEN_METHODS['.json.gz'] = gzip_open + + +logger = logging.getLogger(__name__) + + +def instance_cache(func): + """Cache the result of a method on a per instance basis. + + This is not a general purpose caching decorator. In order + for this to be used, it must be used on methods on an + instance, and that instance *must* provide a + ``self._cache`` dictionary. + + """ + + def _wrapper(self, *args, **kwargs): + key = (func.__name__,) + args + for pair in sorted(kwargs.items()): + key += pair + if key in self._cache: + return self._cache[key] + data = func(self, *args, **kwargs) + self._cache[key] = data + return data + + return _wrapper + + +class JSONFileLoader: + """Loader JSON files. + + This class can load the default format of models, which is a JSON file. + + """ + + def exists(self, file_path): + """Checks if the file exists. + + :type file_path: str + :param file_path: The full path to the file to load without + the '.json' extension. + + :return: True if file path exists, False otherwise. + + """ + for ext in _JSON_OPEN_METHODS: + if os.path.isfile(file_path + ext): + return True + return False + + def _load_file(self, full_path, open_method): + if not os.path.isfile(full_path): + return + + # By default the file will be opened with locale encoding on Python 3. + # We specify "utf8" here to ensure the correct behavior. + with open_method(full_path, 'rb') as fp: + payload = fp.read().decode('utf-8') + + logger.debug("Loading JSON file: %s", full_path) + return json.loads(payload, object_pairs_hook=OrderedDict) + + def load_file(self, file_path): + """Attempt to load the file path. + + :type file_path: str + :param file_path: The full path to the file to load without + the '.json' extension. + + :return: The loaded data if it exists, otherwise None. + + """ + for ext, open_method in _JSON_OPEN_METHODS.items(): + data = self._load_file(file_path + ext, open_method) + if data is not None: + return data + return None + + +def create_loader(search_path_string=None): + """Create a Loader class. + + This factory function creates a loader given a search string path. + + :type search_string_path: str + :param search_string_path: The AWS_DATA_PATH value. A string + of data path values separated by the ``os.path.pathsep`` value, + which is typically ``:`` on POSIX platforms and ``;`` on + windows. + + :return: A ``Loader`` instance. + + """ + if search_path_string is None: + return Loader() + paths = [] + extra_paths = search_path_string.split(os.pathsep) + for path in extra_paths: + path = os.path.expanduser(os.path.expandvars(path)) + paths.append(path) + return Loader(extra_search_paths=paths) + + +class Loader: + """Find and load data models. + + This class will handle searching for and loading data models. + + The main method used here is ``load_service_model``, which is a + convenience method over ``load_data`` and ``determine_latest_version``. + + """ + + FILE_LOADER_CLASS = JSONFileLoader + # The included models in botocore/data/ that we ship with botocore. + BUILTIN_DATA_PATH = os.path.join(BOTOCORE_ROOT, 'data') + # For convenience we automatically add ~/.aws/models to the data path. + CUSTOMER_DATA_PATH = os.path.join( + os.path.expanduser('~'), '.aws', 'models' + ) + BUILTIN_EXTRAS_TYPES = ['sdk'] + + def __init__( + self, + extra_search_paths=None, + file_loader=None, + cache=None, + include_default_search_paths=True, + include_default_extras=True, + ): + self._cache = {} + if file_loader is None: + file_loader = self.FILE_LOADER_CLASS() + self.file_loader = file_loader + if extra_search_paths is not None: + self._search_paths = extra_search_paths + else: + self._search_paths = [] + if include_default_search_paths: + self._search_paths.extend( + [self.CUSTOMER_DATA_PATH, self.BUILTIN_DATA_PATH] + ) + + self._extras_types = [] + if include_default_extras: + self._extras_types.extend(self.BUILTIN_EXTRAS_TYPES) + + self._extras_processor = ExtrasProcessor() + + @property + def search_paths(self): + return self._search_paths + + @property + def extras_types(self): + return self._extras_types + + @instance_cache + def list_available_services(self, type_name): + """List all known services. + + This will traverse the search path and look for all known + services. + + :type type_name: str + :param type_name: The type of the service (service-2, + paginators-1, waiters-2, etc). This is needed because + the list of available services depends on the service + type. For example, the latest API version available for + a resource-1.json file may not be the latest API version + available for a services-2.json file. + + :return: A list of all services. The list of services will + be sorted. + + """ + services = set() + for possible_path in self._potential_locations(): + # Any directory in the search path is potentially a service. + # We'll collect any initial list of potential services, + # but we'll then need to further process these directories + # by searching for the corresponding type_name in each + # potential directory. + possible_services = [ + d + for d in os.listdir(possible_path) + if os.path.isdir(os.path.join(possible_path, d)) + ] + for service_name in possible_services: + full_dirname = os.path.join(possible_path, service_name) + api_versions = os.listdir(full_dirname) + for api_version in api_versions: + full_load_path = os.path.join( + full_dirname, api_version, type_name + ) + if self.file_loader.exists(full_load_path): + services.add(service_name) + break + return sorted(services) + + @instance_cache + def determine_latest_version(self, service_name, type_name): + """Find the latest API version available for a service. + + :type service_name: str + :param service_name: The name of the service. + + :type type_name: str + :param type_name: The type of the service (service-2, + paginators-1, waiters-2, etc). This is needed because + the latest API version available can depend on the service + type. For example, the latest API version available for + a resource-1.json file may not be the latest API version + available for a services-2.json file. + + :rtype: str + :return: The latest API version. If the service does not exist + or does not have any available API data, then a + ``DataNotFoundError`` exception will be raised. + + """ + return max(self.list_api_versions(service_name, type_name)) + + @instance_cache + def list_api_versions(self, service_name, type_name): + """List all API versions available for a particular service type + + :type service_name: str + :param service_name: The name of the service + + :type type_name: str + :param type_name: The type name for the service (i.e service-2, + paginators-1, etc.) + + :rtype: list + :return: A list of API version strings in sorted order. + + """ + known_api_versions = set() + for possible_path in self._potential_locations( + service_name, must_exist=True, is_dir=True + ): + for dirname in os.listdir(possible_path): + full_path = os.path.join(possible_path, dirname, type_name) + # Only add to the known_api_versions if the directory + # contains a service-2, paginators-1, etc. file corresponding + # to the type_name passed in. + if self.file_loader.exists(full_path): + known_api_versions.add(dirname) + if not known_api_versions: + raise DataNotFoundError(data_path=service_name) + return sorted(known_api_versions) + + @instance_cache + def load_service_model(self, service_name, type_name, api_version=None): + """Load a botocore service model + + This is the main method for loading botocore models (e.g. a service + model, pagination configs, waiter configs, etc.). + + :type service_name: str + :param service_name: The name of the service (e.g ``ec2``, ``s3``). + + :type type_name: str + :param type_name: The model type. Valid types include, but are not + limited to: ``service-2``, ``paginators-1``, ``waiters-2``. + + :type api_version: str + :param api_version: The API version to load. If this is not + provided, then the latest API version will be used. + + :type load_extras: bool + :param load_extras: Whether or not to load the tool extras which + contain additional data to be added to the model. + + :raises: UnknownServiceError if there is no known service with + the provided service_name. + + :raises: DataNotFoundError if no data could be found for the + service_name/type_name/api_version. + + :return: The loaded data, as a python type (e.g. dict, list, etc). + """ + # Wrapper around the load_data. This will calculate the path + # to call load_data with. + known_services = self.list_available_services(type_name) + if service_name not in known_services: + raise UnknownServiceError( + service_name=service_name, + known_service_names=', '.join(sorted(known_services)), + ) + if api_version is None: + api_version = self.determine_latest_version( + service_name, type_name + ) + full_path = os.path.join(service_name, api_version, type_name) + model = self.load_data(full_path) + + # Load in all the extras + extras_data = self._find_extras(service_name, type_name, api_version) + self._extras_processor.process(model, extras_data) + + return model + + def _find_extras(self, service_name, type_name, api_version): + """Creates an iterator over all the extras data.""" + for extras_type in self.extras_types: + extras_name = f'{type_name}.{extras_type}-extras' + full_path = os.path.join(service_name, api_version, extras_name) + + try: + yield self.load_data(full_path) + except DataNotFoundError: + pass + + @instance_cache + def load_data_with_path(self, name): + """Same as ``load_data`` but returns file path as second return value. + + :type name: str + :param name: The data path, i.e ``ec2/2015-03-01/service-2``. + + :return: Tuple of the loaded data and the path to the data file + where the data was loaded from. If no data could be found then a + DataNotFoundError is raised. + """ + for possible_path in self._potential_locations(name): + found = self.file_loader.load_file(possible_path) + if found is not None: + return found, possible_path + + # We didn't find anything that matched on any path. + raise DataNotFoundError(data_path=name) + + def load_data(self, name): + """Load data given a data path. + + This is a low level method that will search through the various + search paths until it's able to load a value. This is typically + only needed to load *non* model files (such as _endpoints and + _retry). If you need to load model files, you should prefer + ``load_service_model``. Use ``load_data_with_path`` to get the + data path of the data file as second return value. + + :type name: str + :param name: The data path, i.e ``ec2/2015-03-01/service-2``. + + :return: The loaded data. If no data could be found then + a DataNotFoundError is raised. + """ + data, _ = self.load_data_with_path(name) + return data + + def _potential_locations(self, name=None, must_exist=False, is_dir=False): + # Will give an iterator over the full path of potential locations + # according to the search path. + for path in self.search_paths: + if os.path.isdir(path): + full_path = path + if name is not None: + full_path = os.path.join(path, name) + if not must_exist: + yield full_path + else: + if is_dir and os.path.isdir(full_path): + yield full_path + elif os.path.exists(full_path): + yield full_path + + def is_builtin_path(self, path): + """Whether a given path is within the package's data directory. + + This method can be used together with load_data_with_path(name) + to determine if data has been loaded from a file bundled with the + package, as opposed to a file in a separate location. + + :type path: str + :param path: The file path to check. + + :return: Whether the given path is within the package's data directory. + """ + path = os.path.expanduser(os.path.expandvars(path)) + return path.startswith(self.BUILTIN_DATA_PATH) + + +class ExtrasProcessor: + """Processes data from extras files into service models.""" + + def process(self, original_model, extra_models): + """Processes data from a list of loaded extras files into a model + + :type original_model: dict + :param original_model: The service model to load all the extras into. + + :type extra_models: iterable of dict + :param extra_models: A list of loaded extras models. + """ + for extras in extra_models: + self._process(original_model, extras) + + def _process(self, model, extra_model): + """Process a single extras model into a service model.""" + if 'merge' in extra_model: + deep_merge(model, extra_model['merge']) diff --git a/venv/lib/python3.10/site-packages/botocore/model.py b/venv/lib/python3.10/site-packages/botocore/model.py new file mode 100644 index 0000000000000000000000000000000000000000..df9159e36e3a33eea51ff4c85b26dde881f670ff --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/model.py @@ -0,0 +1,955 @@ +# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""Abstractions to interact with service models.""" + +from collections import defaultdict +from typing import NamedTuple, Union + +from botocore.compat import OrderedDict +from botocore.exceptions import ( + MissingServiceIdError, + UndefinedModelAttributeError, +) +from botocore.utils import CachedProperty, hyphenize_service_id, instance_cache + +NOT_SET = object() + + +class NoShapeFoundError(Exception): + pass + + +class InvalidShapeError(Exception): + pass + + +class OperationNotFoundError(Exception): + pass + + +class InvalidShapeReferenceError(Exception): + pass + + +class ServiceId(str): + def hyphenize(self): + return hyphenize_service_id(self) + + +class Shape: + """Object representing a shape from the service model.""" + + # To simplify serialization logic, all shape params that are + # related to serialization are moved from the top level hash into + # a 'serialization' hash. This list below contains the names of all + # the attributes that should be moved. + SERIALIZED_ATTRS = [ + 'locationName', + 'queryName', + 'flattened', + 'location', + 'payload', + 'streaming', + 'timestampFormat', + 'xmlNamespace', + 'resultWrapper', + 'xmlAttribute', + 'eventstream', + 'event', + 'eventheader', + 'eventpayload', + 'jsonvalue', + 'timestampFormat', + 'hostLabel', + ] + METADATA_ATTRS = [ + 'required', + 'min', + 'max', + 'pattern', + 'sensitive', + 'enum', + 'idempotencyToken', + 'error', + 'exception', + 'endpointdiscoveryid', + 'retryable', + 'document', + 'union', + 'contextParam', + 'clientContextParams', + 'requiresLength', + ] + MAP_TYPE = OrderedDict + + def __init__(self, shape_name, shape_model, shape_resolver=None): + """ + + :type shape_name: string + :param shape_name: The name of the shape. + + :type shape_model: dict + :param shape_model: The shape model. This would be the value + associated with the key in the "shapes" dict of the + service model (i.e ``model['shapes'][shape_name]``) + + :type shape_resolver: botocore.model.ShapeResolver + :param shape_resolver: A shape resolver object. This is used to + resolve references to other shapes. For scalar shape types + (string, integer, boolean, etc.), this argument is not + required. If a shape_resolver is not provided for a complex + type, then a ``ValueError`` will be raised when an attempt + to resolve a shape is made. + + """ + self.name = shape_name + self.type_name = shape_model['type'] + self.documentation = shape_model.get('documentation', '') + self._shape_model = shape_model + if shape_resolver is None: + # If a shape_resolver is not provided, we create an object + # that will throw errors if you attempt to resolve + # a shape. This is actually ok for scalar shapes + # because they don't need to resolve shapes and shouldn't + # be required to provide an object they won't use. + shape_resolver = UnresolvableShapeMap() + self._shape_resolver = shape_resolver + self._cache = {} + + @CachedProperty + def serialization(self): + """Serialization information about the shape. + + This contains information that may be needed for input serialization + or response parsing. This can include: + + * name + * queryName + * flattened + * location + * payload + * streaming + * xmlNamespace + * resultWrapper + * xmlAttribute + * jsonvalue + * timestampFormat + + :rtype: dict + :return: Serialization information about the shape. + + """ + model = self._shape_model + serialization = {} + for attr in self.SERIALIZED_ATTRS: + if attr in self._shape_model: + serialization[attr] = model[attr] + # For consistency, locationName is renamed to just 'name'. + if 'locationName' in serialization: + serialization['name'] = serialization.pop('locationName') + return serialization + + @CachedProperty + def metadata(self): + """Metadata about the shape. + + This requires optional information about the shape, including: + + * min + * max + * pattern + * enum + * sensitive + * required + * idempotencyToken + * document + * union + * contextParam + * clientContextParams + * requiresLength + + :rtype: dict + :return: Metadata about the shape. + + """ + model = self._shape_model + metadata = {} + for attr in self.METADATA_ATTRS: + if attr in self._shape_model: + metadata[attr] = model[attr] + return metadata + + @CachedProperty + def required_members(self): + """A list of members that are required. + + A structure shape can define members that are required. + This value will return a list of required members. If there + are no required members an empty list is returned. + + """ + return self.metadata.get('required', []) + + def _resolve_shape_ref(self, shape_ref): + return self._shape_resolver.resolve_shape_ref(shape_ref) + + def __repr__(self): + return f"<{self.__class__.__name__}({self.name})>" + + @property + def event_stream_name(self): + return None + + +class StructureShape(Shape): + @CachedProperty + def members(self): + members = self._shape_model.get('members', self.MAP_TYPE()) + # The members dict looks like: + # 'members': { + # 'MemberName': {'shape': 'shapeName'}, + # 'MemberName2': {'shape': 'shapeName'}, + # } + # We return a dict of member name to Shape object. + shape_members = self.MAP_TYPE() + for name, shape_ref in members.items(): + shape_members[name] = self._resolve_shape_ref(shape_ref) + return shape_members + + @CachedProperty + def event_stream_name(self): + for member_name, member in self.members.items(): + if member.serialization.get('eventstream'): + return member_name + return None + + @CachedProperty + def error_code(self): + if not self.metadata.get('exception', False): + return None + error_metadata = self.metadata.get("error", {}) + code = error_metadata.get("code") + if code: + return code + # Use the exception name if there is no explicit code modeled + return self.name + + @CachedProperty + def is_document_type(self): + return self.metadata.get('document', False) + + @CachedProperty + def is_tagged_union(self): + return self.metadata.get('union', False) + + +class ListShape(Shape): + @CachedProperty + def member(self): + return self._resolve_shape_ref(self._shape_model['member']) + + +class MapShape(Shape): + @CachedProperty + def key(self): + return self._resolve_shape_ref(self._shape_model['key']) + + @CachedProperty + def value(self): + return self._resolve_shape_ref(self._shape_model['value']) + + +class StringShape(Shape): + @CachedProperty + def enum(self): + return self.metadata.get('enum', []) + + +class StaticContextParameter(NamedTuple): + name: str + value: Union[bool, str] + + +class ContextParameter(NamedTuple): + name: str + member_name: str + + +class ClientContextParameter(NamedTuple): + name: str + type: str + documentation: str + + +class ServiceModel: + """ + + :ivar service_description: The parsed service description dictionary. + + """ + + def __init__(self, service_description, service_name=None): + """ + + :type service_description: dict + :param service_description: The service description model. This value + is obtained from a botocore.loader.Loader, or from directly loading + the file yourself:: + + service_description = json.load( + open('/path/to/service-description-model.json')) + model = ServiceModel(service_description) + + :type service_name: str + :param service_name: The name of the service. Normally this is + the endpoint prefix defined in the service_description. However, + you can override this value to provide a more convenient name. + This is done in a few places in botocore (ses instead of email, + emr instead of elasticmapreduce). If this value is not provided, + it will default to the endpointPrefix defined in the model. + + """ + self._service_description = service_description + # We want clients to be able to access metadata directly. + self.metadata = service_description.get('metadata', {}) + self._shape_resolver = ShapeResolver( + service_description.get('shapes', {}) + ) + self._signature_version = NOT_SET + self._service_name = service_name + self._instance_cache = {} + + def shape_for(self, shape_name, member_traits=None): + return self._shape_resolver.get_shape_by_name( + shape_name, member_traits + ) + + def shape_for_error_code(self, error_code): + return self._error_code_cache.get(error_code, None) + + @CachedProperty + def _error_code_cache(self): + error_code_cache = {} + for error_shape in self.error_shapes: + code = error_shape.error_code + error_code_cache[code] = error_shape + return error_code_cache + + def resolve_shape_ref(self, shape_ref): + return self._shape_resolver.resolve_shape_ref(shape_ref) + + @CachedProperty + def shape_names(self): + return list(self._service_description.get('shapes', {})) + + @CachedProperty + def error_shapes(self): + error_shapes = [] + for shape_name in self.shape_names: + error_shape = self.shape_for(shape_name) + if error_shape.metadata.get('exception', False): + error_shapes.append(error_shape) + return error_shapes + + @instance_cache + def operation_model(self, operation_name): + try: + model = self._service_description['operations'][operation_name] + except KeyError: + raise OperationNotFoundError(operation_name) + return OperationModel(model, self, operation_name) + + @CachedProperty + def documentation(self): + return self._service_description.get('documentation', '') + + @CachedProperty + def operation_names(self): + return list(self._service_description.get('operations', [])) + + @CachedProperty + def service_name(self): + """The name of the service. + + This defaults to the endpointPrefix defined in the service model. + However, this value can be overriden when a ``ServiceModel`` is + created. If a service_name was not provided when the ``ServiceModel`` + was created and if there is no endpointPrefix defined in the + service model, then an ``UndefinedModelAttributeError`` exception + will be raised. + + """ + if self._service_name is not None: + return self._service_name + else: + return self.endpoint_prefix + + @CachedProperty + def service_id(self): + try: + return ServiceId(self._get_metadata_property('serviceId')) + except UndefinedModelAttributeError: + raise MissingServiceIdError(service_name=self._service_name) + + @CachedProperty + def signing_name(self): + """The name to use when computing signatures. + + If the model does not define a signing name, this + value will be the endpoint prefix defined in the model. + """ + signing_name = self.metadata.get('signingName') + if signing_name is None: + signing_name = self.endpoint_prefix + return signing_name + + @CachedProperty + def api_version(self): + return self._get_metadata_property('apiVersion') + + @CachedProperty + def protocol(self): + return self._get_metadata_property('protocol') + + @CachedProperty + def endpoint_prefix(self): + return self._get_metadata_property('endpointPrefix') + + @CachedProperty + def endpoint_discovery_operation(self): + for operation in self.operation_names: + model = self.operation_model(operation) + if model.is_endpoint_discovery_operation: + return model + + @CachedProperty + def endpoint_discovery_required(self): + for operation in self.operation_names: + model = self.operation_model(operation) + if ( + model.endpoint_discovery is not None + and model.endpoint_discovery.get('required') + ): + return True + return False + + @CachedProperty + def client_context_parameters(self): + params = self._service_description.get('clientContextParams', {}) + return [ + ClientContextParameter( + name=param_name, + type=param_val['type'], + documentation=param_val['documentation'], + ) + for param_name, param_val in params.items() + ] + + def _get_metadata_property(self, name): + try: + return self.metadata[name] + except KeyError: + raise UndefinedModelAttributeError( + f'"{name}" not defined in the metadata of the model: {self}' + ) + + # Signature version is one of the rare properties + # that can be modified so a CachedProperty is not used here. + + @property + def signature_version(self): + if self._signature_version is NOT_SET: + signature_version = self.metadata.get('signatureVersion') + self._signature_version = signature_version + return self._signature_version + + @signature_version.setter + def signature_version(self, value): + self._signature_version = value + + def __repr__(self): + return f'{self.__class__.__name__}({self.service_name})' + + +class OperationModel: + def __init__(self, operation_model, service_model, name=None): + """ + + :type operation_model: dict + :param operation_model: The operation model. This comes from the + service model, and is the value associated with the operation + name in the service model (i.e ``model['operations'][op_name]``). + + :type service_model: botocore.model.ServiceModel + :param service_model: The service model associated with the operation. + + :type name: string + :param name: The operation name. This is the operation name exposed to + the users of this model. This can potentially be different from + the "wire_name", which is the operation name that *must* by + provided over the wire. For example, given:: + + "CreateCloudFrontOriginAccessIdentity":{ + "name":"CreateCloudFrontOriginAccessIdentity2014_11_06", + ... + } + + The ``name`` would be ``CreateCloudFrontOriginAccessIdentity``, + but the ``self.wire_name`` would be + ``CreateCloudFrontOriginAccessIdentity2014_11_06``, which is the + value we must send in the corresponding HTTP request. + + """ + self._operation_model = operation_model + self._service_model = service_model + self._api_name = name + # Clients can access '.name' to get the operation name + # and '.metadata' to get the top level metdata of the service. + self._wire_name = operation_model.get('name') + self.metadata = service_model.metadata + self.http = operation_model.get('http', {}) + + @CachedProperty + def name(self): + if self._api_name is not None: + return self._api_name + else: + return self.wire_name + + @property + def wire_name(self): + """The wire name of the operation. + + In many situations this is the same value as the + ``name``, value, but in some services, the operation name + exposed to the user is different from the operation name + we send across the wire (e.g cloudfront). + + Any serialization code should use ``wire_name``. + + """ + return self._operation_model.get('name') + + @property + def service_model(self): + return self._service_model + + @CachedProperty + def documentation(self): + return self._operation_model.get('documentation', '') + + @CachedProperty + def deprecated(self): + return self._operation_model.get('deprecated', False) + + @CachedProperty + def endpoint_discovery(self): + # Explicit None default. An empty dictionary for this trait means it is + # enabled but not required to be used. + return self._operation_model.get('endpointdiscovery', None) + + @CachedProperty + def is_endpoint_discovery_operation(self): + return self._operation_model.get('endpointoperation', False) + + @CachedProperty + def input_shape(self): + if 'input' not in self._operation_model: + # Some operations do not accept any input and do not define an + # input shape. + return None + return self._service_model.resolve_shape_ref( + self._operation_model['input'] + ) + + @CachedProperty + def output_shape(self): + if 'output' not in self._operation_model: + # Some operations do not define an output shape, + # in which case we return None to indicate the + # operation has no expected output. + return None + return self._service_model.resolve_shape_ref( + self._operation_model['output'] + ) + + @CachedProperty + def idempotent_members(self): + input_shape = self.input_shape + if not input_shape: + return [] + + return [ + name + for (name, shape) in input_shape.members.items() + if 'idempotencyToken' in shape.metadata + and shape.metadata['idempotencyToken'] + ] + + @CachedProperty + def static_context_parameters(self): + params = self._operation_model.get('staticContextParams', {}) + return [ + StaticContextParameter(name=name, value=props.get('value')) + for name, props in params.items() + ] + + @CachedProperty + def context_parameters(self): + if not self.input_shape: + return [] + + return [ + ContextParameter( + name=shape.metadata['contextParam']['name'], + member_name=name, + ) + for name, shape in self.input_shape.members.items() + if 'contextParam' in shape.metadata + and 'name' in shape.metadata['contextParam'] + ] + + @CachedProperty + def request_compression(self): + return self._operation_model.get('requestcompression') + + @CachedProperty + def auth_type(self): + return self._operation_model.get('authtype') + + @CachedProperty + def error_shapes(self): + shapes = self._operation_model.get("errors", []) + return list(self._service_model.resolve_shape_ref(s) for s in shapes) + + @CachedProperty + def endpoint(self): + return self._operation_model.get('endpoint') + + @CachedProperty + def http_checksum_required(self): + return self._operation_model.get('httpChecksumRequired', False) + + @CachedProperty + def http_checksum(self): + return self._operation_model.get('httpChecksum', {}) + + @CachedProperty + def has_event_stream_input(self): + return self.get_event_stream_input() is not None + + @CachedProperty + def has_event_stream_output(self): + return self.get_event_stream_output() is not None + + def get_event_stream_input(self): + return self._get_event_stream(self.input_shape) + + def get_event_stream_output(self): + return self._get_event_stream(self.output_shape) + + def _get_event_stream(self, shape): + """Returns the event stream member's shape if any or None otherwise.""" + if shape is None: + return None + event_name = shape.event_stream_name + if event_name: + return shape.members[event_name] + return None + + @CachedProperty + def has_streaming_input(self): + return self.get_streaming_input() is not None + + @CachedProperty + def has_streaming_output(self): + return self.get_streaming_output() is not None + + def get_streaming_input(self): + return self._get_streaming_body(self.input_shape) + + def get_streaming_output(self): + return self._get_streaming_body(self.output_shape) + + def _get_streaming_body(self, shape): + """Returns the streaming member's shape if any; or None otherwise.""" + if shape is None: + return None + payload = shape.serialization.get('payload') + if payload is not None: + payload_shape = shape.members[payload] + if payload_shape.type_name == 'blob': + return payload_shape + return None + + def __repr__(self): + return f'{self.__class__.__name__}(name={self.name})' + + +class ShapeResolver: + """Resolves shape references.""" + + # Any type not in this mapping will default to the Shape class. + SHAPE_CLASSES = { + 'structure': StructureShape, + 'list': ListShape, + 'map': MapShape, + 'string': StringShape, + } + + def __init__(self, shape_map): + self._shape_map = shape_map + self._shape_cache = {} + + def get_shape_by_name(self, shape_name, member_traits=None): + try: + shape_model = self._shape_map[shape_name] + except KeyError: + raise NoShapeFoundError(shape_name) + try: + shape_cls = self.SHAPE_CLASSES.get(shape_model['type'], Shape) + except KeyError: + raise InvalidShapeError( + f"Shape is missing required key 'type': {shape_model}" + ) + if member_traits: + shape_model = shape_model.copy() + shape_model.update(member_traits) + result = shape_cls(shape_name, shape_model, self) + return result + + def resolve_shape_ref(self, shape_ref): + # A shape_ref is a dict that has a 'shape' key that + # refers to a shape name as well as any additional + # member traits that are then merged over the shape + # definition. For example: + # {"shape": "StringType", "locationName": "Foobar"} + if len(shape_ref) == 1 and 'shape' in shape_ref: + # It's just a shape ref with no member traits, we can avoid + # a .copy(). This is the common case so it's specifically + # called out here. + return self.get_shape_by_name(shape_ref['shape']) + else: + member_traits = shape_ref.copy() + try: + shape_name = member_traits.pop('shape') + except KeyError: + raise InvalidShapeReferenceError( + f"Invalid model, missing shape reference: {shape_ref}" + ) + return self.get_shape_by_name(shape_name, member_traits) + + +class UnresolvableShapeMap: + """A ShapeResolver that will throw ValueErrors when shapes are resolved.""" + + def get_shape_by_name(self, shape_name, member_traits=None): + raise ValueError( + f"Attempted to lookup shape '{shape_name}', but no shape map was provided." + ) + + def resolve_shape_ref(self, shape_ref): + raise ValueError( + f"Attempted to resolve shape '{shape_ref}', but no shape " + f"map was provided." + ) + + +class DenormalizedStructureBuilder: + """Build a StructureShape from a denormalized model. + + This is a convenience builder class that makes it easy to construct + ``StructureShape``s based on a denormalized model. + + It will handle the details of creating unique shape names and creating + the appropriate shape map needed by the ``StructureShape`` class. + + Example usage:: + + builder = DenormalizedStructureBuilder() + shape = builder.with_members({ + 'A': { + 'type': 'structure', + 'members': { + 'B': { + 'type': 'structure', + 'members': { + 'C': { + 'type': 'string', + } + } + } + } + } + }).build_model() + # ``shape`` is now an instance of botocore.model.StructureShape + + :type dict_type: class + :param dict_type: The dictionary type to use, allowing you to opt-in + to using OrderedDict or another dict type. This can + be particularly useful for testing when order + matters, such as for documentation. + + """ + + SCALAR_TYPES = ( + 'string', + 'integer', + 'boolean', + 'blob', + 'float', + 'timestamp', + 'long', + 'double', + 'char', + ) + + def __init__(self, name=None): + self.members = OrderedDict() + self._name_generator = ShapeNameGenerator() + if name is None: + self.name = self._name_generator.new_shape_name('structure') + + def with_members(self, members): + """ + + :type members: dict + :param members: The denormalized members. + + :return: self + + """ + self._members = members + return self + + def build_model(self): + """Build the model based on the provided members. + + :rtype: botocore.model.StructureShape + :return: The built StructureShape object. + + """ + shapes = OrderedDict() + denormalized = { + 'type': 'structure', + 'members': self._members, + } + self._build_model(denormalized, shapes, self.name) + resolver = ShapeResolver(shape_map=shapes) + return StructureShape( + shape_name=self.name, + shape_model=shapes[self.name], + shape_resolver=resolver, + ) + + def _build_model(self, model, shapes, shape_name): + if model['type'] == 'structure': + shapes[shape_name] = self._build_structure(model, shapes) + elif model['type'] == 'list': + shapes[shape_name] = self._build_list(model, shapes) + elif model['type'] == 'map': + shapes[shape_name] = self._build_map(model, shapes) + elif model['type'] in self.SCALAR_TYPES: + shapes[shape_name] = self._build_scalar(model) + else: + raise InvalidShapeError(f"Unknown shape type: {model['type']}") + + def _build_structure(self, model, shapes): + members = OrderedDict() + shape = self._build_initial_shape(model) + shape['members'] = members + + for name, member_model in model.get('members', OrderedDict()).items(): + member_shape_name = self._get_shape_name(member_model) + members[name] = {'shape': member_shape_name} + self._build_model(member_model, shapes, member_shape_name) + return shape + + def _build_list(self, model, shapes): + member_shape_name = self._get_shape_name(model) + shape = self._build_initial_shape(model) + shape['member'] = {'shape': member_shape_name} + self._build_model(model['member'], shapes, member_shape_name) + return shape + + def _build_map(self, model, shapes): + key_shape_name = self._get_shape_name(model['key']) + value_shape_name = self._get_shape_name(model['value']) + shape = self._build_initial_shape(model) + shape['key'] = {'shape': key_shape_name} + shape['value'] = {'shape': value_shape_name} + self._build_model(model['key'], shapes, key_shape_name) + self._build_model(model['value'], shapes, value_shape_name) + return shape + + def _build_initial_shape(self, model): + shape = { + 'type': model['type'], + } + if 'documentation' in model: + shape['documentation'] = model['documentation'] + for attr in Shape.METADATA_ATTRS: + if attr in model: + shape[attr] = model[attr] + return shape + + def _build_scalar(self, model): + return self._build_initial_shape(model) + + def _get_shape_name(self, model): + if 'shape_name' in model: + return model['shape_name'] + else: + return self._name_generator.new_shape_name(model['type']) + + +class ShapeNameGenerator: + """Generate unique shape names for a type. + + This class can be used in conjunction with the DenormalizedStructureBuilder + to generate unique shape names for a given type. + + """ + + def __init__(self): + self._name_cache = defaultdict(int) + + def new_shape_name(self, type_name): + """Generate a unique shape name. + + This method will guarantee a unique shape name each time it is + called with the same type. + + :: + + >>> s = ShapeNameGenerator() + >>> s.new_shape_name('structure') + 'StructureType1' + >>> s.new_shape_name('structure') + 'StructureType2' + >>> s.new_shape_name('list') + 'ListType1' + >>> s.new_shape_name('list') + 'ListType2' + + + :type type_name: string + :param type_name: The type name (structure, list, map, string, etc.) + + :rtype: string + :return: A unique shape name for the given type + + """ + self._name_cache[type_name] += 1 + current_index = self._name_cache[type_name] + return f'{type_name.capitalize()}Type{current_index}' diff --git a/venv/lib/python3.10/site-packages/botocore/monitoring.py b/venv/lib/python3.10/site-packages/botocore/monitoring.py new file mode 100644 index 0000000000000000000000000000000000000000..71d7230246b034f1a66f69b7a050a433b0ab9d13 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/monitoring.py @@ -0,0 +1,586 @@ +# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import json +import logging +import re +import time + +from botocore.compat import ensure_bytes, ensure_unicode, urlparse +from botocore.retryhandler import EXCEPTION_MAP as RETRYABLE_EXCEPTIONS + +logger = logging.getLogger(__name__) + + +class Monitor: + _EVENTS_TO_REGISTER = [ + 'before-parameter-build', + 'request-created', + 'response-received', + 'after-call', + 'after-call-error', + ] + + def __init__(self, adapter, publisher): + """Abstraction for monitoring clients API calls + + :param adapter: An adapter that takes event emitter events + and produces monitor events + + :param publisher: A publisher for generated monitor events + """ + self._adapter = adapter + self._publisher = publisher + + def register(self, event_emitter): + """Register an event emitter to the monitor""" + for event_to_register in self._EVENTS_TO_REGISTER: + event_emitter.register_last(event_to_register, self.capture) + + def capture(self, event_name, **payload): + """Captures an incoming event from the event emitter + + It will feed an event emitter event to the monitor's adaptor to create + a monitor event and then publish that event to the monitor's publisher. + """ + try: + monitor_event = self._adapter.feed(event_name, payload) + if monitor_event: + self._publisher.publish(monitor_event) + except Exception as e: + logger.debug( + 'Exception %s raised by client monitor in handling event %s', + e, + event_name, + exc_info=True, + ) + + +class MonitorEventAdapter: + def __init__(self, time=time.time): + """Adapts event emitter events to produce monitor events + + :type time: callable + :param time: A callable that produces the current time + """ + self._time = time + + def feed(self, emitter_event_name, emitter_payload): + """Feed an event emitter event to generate a monitor event + + :type emitter_event_name: str + :param emitter_event_name: The name of the event emitted + + :type emitter_payload: dict + :param emitter_payload: The payload to associated to the event + emitted + + :rtype: BaseMonitorEvent + :returns: A monitor event based on the event emitter events + fired + """ + return self._get_handler(emitter_event_name)(**emitter_payload) + + def _get_handler(self, event_name): + return getattr( + self, '_handle_' + event_name.split('.')[0].replace('-', '_') + ) + + def _handle_before_parameter_build(self, model, context, **kwargs): + context['current_api_call_event'] = APICallEvent( + service=model.service_model.service_id, + operation=model.wire_name, + timestamp=self._get_current_time(), + ) + + def _handle_request_created(self, request, **kwargs): + context = request.context + new_attempt_event = context[ + 'current_api_call_event' + ].new_api_call_attempt(timestamp=self._get_current_time()) + new_attempt_event.request_headers = request.headers + new_attempt_event.url = request.url + context['current_api_call_attempt_event'] = new_attempt_event + + def _handle_response_received( + self, parsed_response, context, exception, **kwargs + ): + attempt_event = context.pop('current_api_call_attempt_event') + attempt_event.latency = self._get_latency(attempt_event) + if parsed_response is not None: + attempt_event.http_status_code = parsed_response[ + 'ResponseMetadata' + ]['HTTPStatusCode'] + attempt_event.response_headers = parsed_response[ + 'ResponseMetadata' + ]['HTTPHeaders'] + attempt_event.parsed_error = parsed_response.get('Error') + else: + attempt_event.wire_exception = exception + return attempt_event + + def _handle_after_call(self, context, parsed, **kwargs): + context['current_api_call_event'].retries_exceeded = parsed[ + 'ResponseMetadata' + ].get('MaxAttemptsReached', False) + return self._complete_api_call(context) + + def _handle_after_call_error(self, context, exception, **kwargs): + # If the after-call-error was emitted and the error being raised + # was a retryable connection error, then the retries must have exceeded + # for that exception as this event gets emitted **after** retries + # happen. + context[ + 'current_api_call_event' + ].retries_exceeded = self._is_retryable_exception(exception) + return self._complete_api_call(context) + + def _is_retryable_exception(self, exception): + return isinstance( + exception, tuple(RETRYABLE_EXCEPTIONS['GENERAL_CONNECTION_ERROR']) + ) + + def _complete_api_call(self, context): + call_event = context.pop('current_api_call_event') + call_event.latency = self._get_latency(call_event) + return call_event + + def _get_latency(self, event): + return self._get_current_time() - event.timestamp + + def _get_current_time(self): + return int(self._time() * 1000) + + +class BaseMonitorEvent: + def __init__(self, service, operation, timestamp): + """Base monitor event + + :type service: str + :param service: A string identifying the service associated to + the event + + :type operation: str + :param operation: A string identifying the operation of service + associated to the event + + :type timestamp: int + :param timestamp: Epoch time in milliseconds from when the event began + """ + self.service = service + self.operation = operation + self.timestamp = timestamp + + def __repr__(self): + return f'{self.__class__.__name__}({self.__dict__!r})' + + def __eq__(self, other): + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + +class APICallEvent(BaseMonitorEvent): + def __init__( + self, + service, + operation, + timestamp, + latency=None, + attempts=None, + retries_exceeded=False, + ): + """Monitor event for a single API call + + This event corresponds to a single client method call, which includes + every HTTP requests attempt made in order to complete the client call + + :type service: str + :param service: A string identifying the service associated to + the event + + :type operation: str + :param operation: A string identifying the operation of service + associated to the event + + :type timestamp: int + :param timestamp: Epoch time in milliseconds from when the event began + + :type latency: int + :param latency: The time in milliseconds to complete the client call + + :type attempts: list + :param attempts: The list of APICallAttempts associated to the + APICall + + :type retries_exceeded: bool + :param retries_exceeded: True if API call exceeded retries. False + otherwise + """ + super().__init__( + service=service, operation=operation, timestamp=timestamp + ) + self.latency = latency + self.attempts = attempts + if attempts is None: + self.attempts = [] + self.retries_exceeded = retries_exceeded + + def new_api_call_attempt(self, timestamp): + """Instantiates APICallAttemptEvent associated to the APICallEvent + + :type timestamp: int + :param timestamp: Epoch time in milliseconds to associate to the + APICallAttemptEvent + """ + attempt_event = APICallAttemptEvent( + service=self.service, operation=self.operation, timestamp=timestamp + ) + self.attempts.append(attempt_event) + return attempt_event + + +class APICallAttemptEvent(BaseMonitorEvent): + def __init__( + self, + service, + operation, + timestamp, + latency=None, + url=None, + http_status_code=None, + request_headers=None, + response_headers=None, + parsed_error=None, + wire_exception=None, + ): + """Monitor event for a single API call attempt + + This event corresponds to a single HTTP request attempt in completing + the entire client method call. + + :type service: str + :param service: A string identifying the service associated to + the event + + :type operation: str + :param operation: A string identifying the operation of service + associated to the event + + :type timestamp: int + :param timestamp: Epoch time in milliseconds from when the HTTP request + started + + :type latency: int + :param latency: The time in milliseconds to complete the HTTP request + whether it succeeded or failed + + :type url: str + :param url: The URL the attempt was sent to + + :type http_status_code: int + :param http_status_code: The HTTP status code of the HTTP response + if there was a response + + :type request_headers: dict + :param request_headers: The HTTP headers sent in making the HTTP + request + + :type response_headers: dict + :param response_headers: The HTTP headers returned in the HTTP response + if there was a response + + :type parsed_error: dict + :param parsed_error: The error parsed if the service returned an + error back + + :type wire_exception: Exception + :param wire_exception: The exception raised in sending the HTTP + request (i.e. ConnectionError) + """ + super().__init__( + service=service, operation=operation, timestamp=timestamp + ) + self.latency = latency + self.url = url + self.http_status_code = http_status_code + self.request_headers = request_headers + self.response_headers = response_headers + self.parsed_error = parsed_error + self.wire_exception = wire_exception + + +class CSMSerializer: + _MAX_CLIENT_ID_LENGTH = 255 + _MAX_EXCEPTION_CLASS_LENGTH = 128 + _MAX_ERROR_CODE_LENGTH = 128 + _MAX_USER_AGENT_LENGTH = 256 + _MAX_MESSAGE_LENGTH = 512 + _RESPONSE_HEADERS_TO_EVENT_ENTRIES = { + 'x-amzn-requestid': 'XAmznRequestId', + 'x-amz-request-id': 'XAmzRequestId', + 'x-amz-id-2': 'XAmzId2', + } + _AUTH_REGEXS = { + 'v4': re.compile( + r'AWS4-HMAC-SHA256 ' + r'Credential=(?P\w+)/\d+/' + r'(?P[a-z0-9-]+)/' + ), + 's3': re.compile(r'AWS (?P\w+):'), + } + _SERIALIZEABLE_EVENT_PROPERTIES = [ + 'service', + 'operation', + 'timestamp', + 'attempts', + 'latency', + 'retries_exceeded', + 'url', + 'request_headers', + 'http_status_code', + 'response_headers', + 'parsed_error', + 'wire_exception', + ] + + def __init__(self, csm_client_id): + """Serializes monitor events to CSM (Client Side Monitoring) format + + :type csm_client_id: str + :param csm_client_id: The application identifier to associate + to the serialized events + """ + self._validate_client_id(csm_client_id) + self.csm_client_id = csm_client_id + + def _validate_client_id(self, csm_client_id): + if len(csm_client_id) > self._MAX_CLIENT_ID_LENGTH: + raise ValueError( + f'The value provided for csm_client_id: {csm_client_id} exceeds ' + f'the maximum length of {self._MAX_CLIENT_ID_LENGTH} characters' + ) + + def serialize(self, event): + """Serializes a monitor event to the CSM format + + :type event: BaseMonitorEvent + :param event: The event to serialize to bytes + + :rtype: bytes + :returns: The CSM serialized form of the event + """ + event_dict = self._get_base_event_dict(event) + event_type = self._get_event_type(event) + event_dict['Type'] = event_type + for attr in self._SERIALIZEABLE_EVENT_PROPERTIES: + value = getattr(event, attr, None) + if value is not None: + getattr(self, '_serialize_' + attr)( + value, event_dict, event_type=event_type + ) + return ensure_bytes(json.dumps(event_dict, separators=(',', ':'))) + + def _get_base_event_dict(self, event): + return { + 'Version': 1, + 'ClientId': self.csm_client_id, + } + + def _serialize_service(self, service, event_dict, **kwargs): + event_dict['Service'] = service + + def _serialize_operation(self, operation, event_dict, **kwargs): + event_dict['Api'] = operation + + def _serialize_timestamp(self, timestamp, event_dict, **kwargs): + event_dict['Timestamp'] = timestamp + + def _serialize_attempts(self, attempts, event_dict, **kwargs): + event_dict['AttemptCount'] = len(attempts) + if attempts: + self._add_fields_from_last_attempt(event_dict, attempts[-1]) + + def _add_fields_from_last_attempt(self, event_dict, last_attempt): + if last_attempt.request_headers: + # It does not matter which attempt to use to grab the region + # for the ApiCall event, but SDKs typically do the last one. + region = self._get_region(last_attempt.request_headers) + if region is not None: + event_dict['Region'] = region + event_dict['UserAgent'] = self._get_user_agent( + last_attempt.request_headers + ) + if last_attempt.http_status_code is not None: + event_dict['FinalHttpStatusCode'] = last_attempt.http_status_code + if last_attempt.parsed_error is not None: + self._serialize_parsed_error( + last_attempt.parsed_error, event_dict, 'ApiCall' + ) + if last_attempt.wire_exception is not None: + self._serialize_wire_exception( + last_attempt.wire_exception, event_dict, 'ApiCall' + ) + + def _serialize_latency(self, latency, event_dict, event_type): + if event_type == 'ApiCall': + event_dict['Latency'] = latency + elif event_type == 'ApiCallAttempt': + event_dict['AttemptLatency'] = latency + + def _serialize_retries_exceeded( + self, retries_exceeded, event_dict, **kwargs + ): + event_dict['MaxRetriesExceeded'] = 1 if retries_exceeded else 0 + + def _serialize_url(self, url, event_dict, **kwargs): + event_dict['Fqdn'] = urlparse(url).netloc + + def _serialize_request_headers( + self, request_headers, event_dict, **kwargs + ): + event_dict['UserAgent'] = self._get_user_agent(request_headers) + if self._is_signed(request_headers): + event_dict['AccessKey'] = self._get_access_key(request_headers) + region = self._get_region(request_headers) + if region is not None: + event_dict['Region'] = region + if 'X-Amz-Security-Token' in request_headers: + event_dict['SessionToken'] = request_headers[ + 'X-Amz-Security-Token' + ] + + def _serialize_http_status_code( + self, http_status_code, event_dict, **kwargs + ): + event_dict['HttpStatusCode'] = http_status_code + + def _serialize_response_headers( + self, response_headers, event_dict, **kwargs + ): + for header, entry in self._RESPONSE_HEADERS_TO_EVENT_ENTRIES.items(): + if header in response_headers: + event_dict[entry] = response_headers[header] + + def _serialize_parsed_error( + self, parsed_error, event_dict, event_type, **kwargs + ): + field_prefix = 'Final' if event_type == 'ApiCall' else '' + event_dict[field_prefix + 'AwsException'] = self._truncate( + parsed_error['Code'], self._MAX_ERROR_CODE_LENGTH + ) + event_dict[field_prefix + 'AwsExceptionMessage'] = self._truncate( + parsed_error['Message'], self._MAX_MESSAGE_LENGTH + ) + + def _serialize_wire_exception( + self, wire_exception, event_dict, event_type, **kwargs + ): + field_prefix = 'Final' if event_type == 'ApiCall' else '' + event_dict[field_prefix + 'SdkException'] = self._truncate( + wire_exception.__class__.__name__, self._MAX_EXCEPTION_CLASS_LENGTH + ) + event_dict[field_prefix + 'SdkExceptionMessage'] = self._truncate( + str(wire_exception), self._MAX_MESSAGE_LENGTH + ) + + def _get_event_type(self, event): + if isinstance(event, APICallEvent): + return 'ApiCall' + elif isinstance(event, APICallAttemptEvent): + return 'ApiCallAttempt' + + def _get_access_key(self, request_headers): + auth_val = self._get_auth_value(request_headers) + _, auth_match = self._get_auth_match(auth_val) + return auth_match.group('access_key') + + def _get_region(self, request_headers): + if not self._is_signed(request_headers): + return None + auth_val = self._get_auth_value(request_headers) + signature_version, auth_match = self._get_auth_match(auth_val) + if signature_version != 'v4': + return None + return auth_match.group('signing_region') + + def _get_user_agent(self, request_headers): + return self._truncate( + ensure_unicode(request_headers.get('User-Agent', '')), + self._MAX_USER_AGENT_LENGTH, + ) + + def _is_signed(self, request_headers): + return 'Authorization' in request_headers + + def _get_auth_value(self, request_headers): + return ensure_unicode(request_headers['Authorization']) + + def _get_auth_match(self, auth_val): + for signature_version, regex in self._AUTH_REGEXS.items(): + match = regex.match(auth_val) + if match: + return signature_version, match + return None, None + + def _truncate(self, text, max_length): + if len(text) > max_length: + logger.debug( + 'Truncating following value to maximum length of ' '%s: %s', + text, + max_length, + ) + return text[:max_length] + return text + + +class SocketPublisher: + _MAX_MONITOR_EVENT_LENGTH = 8 * 1024 + + def __init__(self, socket, host, port, serializer): + """Publishes monitor events to a socket + + :type socket: socket.socket + :param socket: The socket object to use to publish events + + :type host: string + :param host: The host to send events to + + :type port: integer + :param port: The port on the host to send events to + + :param serializer: The serializer to use to serialize the event + to a form that can be published to the socket. This must + have a `serialize()` method that accepts a monitor event + and return bytes + """ + self._socket = socket + self._address = (host, port) + self._serializer = serializer + + def publish(self, event): + """Publishes a specified monitor event + + :type event: BaseMonitorEvent + :param event: The monitor event to be sent + over the publisher's socket to the desired address. + """ + serialized_event = self._serializer.serialize(event) + if len(serialized_event) > self._MAX_MONITOR_EVENT_LENGTH: + logger.debug( + 'Serialized event of size %s exceeds the maximum length ' + 'allowed: %s. Not sending event to socket.', + len(serialized_event), + self._MAX_MONITOR_EVENT_LENGTH, + ) + return + self._socket.sendto(serialized_event, self._address) diff --git a/venv/lib/python3.10/site-packages/botocore/paginate.py b/venv/lib/python3.10/site-packages/botocore/paginate.py new file mode 100644 index 0000000000000000000000000000000000000000..228cdd3cd277e348c7b738d38cbd5f93e237ee8d --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/paginate.py @@ -0,0 +1,720 @@ +# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import base64 +import json +import logging +from itertools import tee + +import jmespath + +from botocore.exceptions import PaginationError +from botocore.utils import merge_dicts, set_value_from_jmespath + +log = logging.getLogger(__name__) + + +class TokenEncoder: + """Encodes dictionaries into opaque strings. + + This for the most part json dumps + base64 encoding, but also supports + having bytes in the dictionary in addition to the types that json can + handle by default. + + This is intended for use in encoding pagination tokens, which in some + cases can be complex structures and / or contain bytes. + """ + + def encode(self, token): + """Encodes a dictionary to an opaque string. + + :type token: dict + :param token: A dictionary containing pagination information, + particularly the service pagination token(s) but also other boto + metadata. + + :rtype: str + :returns: An opaque string + """ + try: + # Try just using json dumps first to avoid having to traverse + # and encode the dict. In 99.9999% of cases this will work. + json_string = json.dumps(token) + except (TypeError, UnicodeDecodeError): + # If normal dumping failed, go through and base64 encode all bytes. + encoded_token, encoded_keys = self._encode(token, []) + + # Save the list of all the encoded key paths. We can safely + # assume that no service will ever use this key. + encoded_token['boto_encoded_keys'] = encoded_keys + + # Now that the bytes are all encoded, dump the json. + json_string = json.dumps(encoded_token) + + # base64 encode the json string to produce an opaque token string. + return base64.b64encode(json_string.encode('utf-8')).decode('utf-8') + + def _encode(self, data, path): + """Encode bytes in given data, keeping track of the path traversed.""" + if isinstance(data, dict): + return self._encode_dict(data, path) + elif isinstance(data, list): + return self._encode_list(data, path) + elif isinstance(data, bytes): + return self._encode_bytes(data, path) + else: + return data, [] + + def _encode_list(self, data, path): + """Encode any bytes in a list, noting the index of what is encoded.""" + new_data = [] + encoded = [] + for i, value in enumerate(data): + new_path = path + [i] + new_value, new_encoded = self._encode(value, new_path) + new_data.append(new_value) + encoded.extend(new_encoded) + return new_data, encoded + + def _encode_dict(self, data, path): + """Encode any bytes in a dict, noting the index of what is encoded.""" + new_data = {} + encoded = [] + for key, value in data.items(): + new_path = path + [key] + new_value, new_encoded = self._encode(value, new_path) + new_data[key] = new_value + encoded.extend(new_encoded) + return new_data, encoded + + def _encode_bytes(self, data, path): + """Base64 encode a byte string.""" + return base64.b64encode(data).decode('utf-8'), [path] + + +class TokenDecoder: + """Decodes token strings back into dictionaries. + + This performs the inverse operation to the TokenEncoder, accepting + opaque strings and decoding them into a useable form. + """ + + def decode(self, token): + """Decodes an opaque string to a dictionary. + + :type token: str + :param token: A token string given by the botocore pagination + interface. + + :rtype: dict + :returns: A dictionary containing pagination information, + particularly the service pagination token(s) but also other boto + metadata. + """ + json_string = base64.b64decode(token.encode('utf-8')).decode('utf-8') + decoded_token = json.loads(json_string) + + # Remove the encoding metadata as it is read since it will no longer + # be needed. + encoded_keys = decoded_token.pop('boto_encoded_keys', None) + if encoded_keys is None: + return decoded_token + else: + return self._decode(decoded_token, encoded_keys) + + def _decode(self, token, encoded_keys): + """Find each encoded value and decode it.""" + for key in encoded_keys: + encoded = self._path_get(token, key) + decoded = base64.b64decode(encoded.encode('utf-8')) + self._path_set(token, key, decoded) + return token + + def _path_get(self, data, path): + """Return the nested data at the given path. + + For instance: + data = {'foo': ['bar', 'baz']} + path = ['foo', 0] + ==> 'bar' + """ + # jmespath isn't used here because it would be difficult to actually + # create the jmespath query when taking all of the unknowns of key + # structure into account. Gross though this is, it is simple and not + # very error prone. + d = data + for step in path: + d = d[step] + return d + + def _path_set(self, data, path, value): + """Set the value of a key in the given data. + + Example: + data = {'foo': ['bar', 'baz']} + path = ['foo', 1] + value = 'bin' + ==> data = {'foo': ['bar', 'bin']} + """ + container = self._path_get(data, path[:-1]) + container[path[-1]] = value + + +class PaginatorModel: + def __init__(self, paginator_config): + self._paginator_config = paginator_config['pagination'] + + def get_paginator(self, operation_name): + try: + single_paginator_config = self._paginator_config[operation_name] + except KeyError: + raise ValueError( + f"Paginator for operation does not exist: {operation_name}" + ) + return single_paginator_config + + +class PageIterator: + """An iterable object to paginate API results. + Please note it is NOT a python iterator. + Use ``iter`` to wrap this as a generator. + """ + + def __init__( + self, + method, + input_token, + output_token, + more_results, + result_keys, + non_aggregate_keys, + limit_key, + max_items, + starting_token, + page_size, + op_kwargs, + ): + self._method = method + self._input_token = input_token + self._output_token = output_token + self._more_results = more_results + self._result_keys = result_keys + self._max_items = max_items + self._limit_key = limit_key + self._starting_token = starting_token + self._page_size = page_size + self._op_kwargs = op_kwargs + self._resume_token = None + self._non_aggregate_key_exprs = non_aggregate_keys + self._non_aggregate_part = {} + self._token_encoder = TokenEncoder() + self._token_decoder = TokenDecoder() + + @property + def result_keys(self): + return self._result_keys + + @property + def resume_token(self): + """Token to specify to resume pagination.""" + return self._resume_token + + @resume_token.setter + def resume_token(self, value): + if not isinstance(value, dict): + raise ValueError(f"Bad starting token: {value}") + + if 'boto_truncate_amount' in value: + token_keys = sorted(self._input_token + ['boto_truncate_amount']) + else: + token_keys = sorted(self._input_token) + dict_keys = sorted(value.keys()) + + if token_keys == dict_keys: + self._resume_token = self._token_encoder.encode(value) + else: + raise ValueError(f"Bad starting token: {value}") + + @property + def non_aggregate_part(self): + return self._non_aggregate_part + + def __iter__(self): + current_kwargs = self._op_kwargs + previous_next_token = None + next_token = {key: None for key in self._input_token} + if self._starting_token is not None: + # If the starting token exists, populate the next_token with the + # values inside it. This ensures that we have the service's + # pagination token on hand if we need to truncate after the + # first response. + next_token = self._parse_starting_token()[0] + # The number of items from result_key we've seen so far. + total_items = 0 + first_request = True + primary_result_key = self.result_keys[0] + starting_truncation = 0 + self._inject_starting_params(current_kwargs) + while True: + response = self._make_request(current_kwargs) + parsed = self._extract_parsed_response(response) + if first_request: + # The first request is handled differently. We could + # possibly have a resume/starting token that tells us where + # to index into the retrieved page. + if self._starting_token is not None: + starting_truncation = self._handle_first_request( + parsed, primary_result_key, starting_truncation + ) + first_request = False + self._record_non_aggregate_key_values(parsed) + else: + # If this isn't the first request, we have already sliced into + # the first request and had to make additional requests after. + # We no longer need to add this to truncation. + starting_truncation = 0 + current_response = primary_result_key.search(parsed) + if current_response is None: + current_response = [] + num_current_response = len(current_response) + truncate_amount = 0 + if self._max_items is not None: + truncate_amount = ( + total_items + num_current_response - self._max_items + ) + if truncate_amount > 0: + self._truncate_response( + parsed, + primary_result_key, + truncate_amount, + starting_truncation, + next_token, + ) + yield response + break + else: + yield response + total_items += num_current_response + next_token = self._get_next_token(parsed) + if all(t is None for t in next_token.values()): + break + if ( + self._max_items is not None + and total_items == self._max_items + ): + # We're on a page boundary so we can set the current + # next token to be the resume token. + self.resume_token = next_token + break + if ( + previous_next_token is not None + and previous_next_token == next_token + ): + message = ( + f"The same next token was received " + f"twice: {next_token}" + ) + raise PaginationError(message=message) + self._inject_token_into_kwargs(current_kwargs, next_token) + previous_next_token = next_token + + def search(self, expression): + """Applies a JMESPath expression to a paginator + + Each page of results is searched using the provided JMESPath + expression. If the result is not a list, it is yielded + directly. If the result is a list, each element in the result + is yielded individually (essentially implementing a flatmap in + which the JMESPath search is the mapping function). + + :type expression: str + :param expression: JMESPath expression to apply to each page. + + :return: Returns an iterator that yields the individual + elements of applying a JMESPath expression to each page of + results. + """ + compiled = jmespath.compile(expression) + for page in self: + results = compiled.search(page) + if isinstance(results, list): + yield from results + else: + # Yield result directly if it is not a list. + yield results + + def _make_request(self, current_kwargs): + return self._method(**current_kwargs) + + def _extract_parsed_response(self, response): + return response + + def _record_non_aggregate_key_values(self, response): + non_aggregate_keys = {} + for expression in self._non_aggregate_key_exprs: + result = expression.search(response) + set_value_from_jmespath( + non_aggregate_keys, expression.expression, result + ) + self._non_aggregate_part = non_aggregate_keys + + def _inject_starting_params(self, op_kwargs): + # If the user has specified a starting token we need to + # inject that into the operation's kwargs. + if self._starting_token is not None: + # Don't need to do anything special if there is no starting + # token specified. + next_token = self._parse_starting_token()[0] + self._inject_token_into_kwargs(op_kwargs, next_token) + if self._page_size is not None: + # Pass the page size as the parameter name for limiting + # page size, also known as the limit_key. + op_kwargs[self._limit_key] = self._page_size + + def _inject_token_into_kwargs(self, op_kwargs, next_token): + for name, token in next_token.items(): + if (token is not None) and (token != 'None'): + op_kwargs[name] = token + elif name in op_kwargs: + del op_kwargs[name] + + def _handle_first_request( + self, parsed, primary_result_key, starting_truncation + ): + # If the payload is an array or string, we need to slice into it + # and only return the truncated amount. + starting_truncation = self._parse_starting_token()[1] + all_data = primary_result_key.search(parsed) + if isinstance(all_data, (list, str)): + data = all_data[starting_truncation:] + else: + data = None + set_value_from_jmespath(parsed, primary_result_key.expression, data) + # We also need to truncate any secondary result keys + # because they were not truncated in the previous last + # response. + for token in self.result_keys: + if token == primary_result_key: + continue + sample = token.search(parsed) + if isinstance(sample, list): + empty_value = [] + elif isinstance(sample, str): + empty_value = '' + elif isinstance(sample, (int, float)): + empty_value = 0 + else: + empty_value = None + set_value_from_jmespath(parsed, token.expression, empty_value) + return starting_truncation + + def _truncate_response( + self, + parsed, + primary_result_key, + truncate_amount, + starting_truncation, + next_token, + ): + original = primary_result_key.search(parsed) + if original is None: + original = [] + amount_to_keep = len(original) - truncate_amount + truncated = original[:amount_to_keep] + set_value_from_jmespath( + parsed, primary_result_key.expression, truncated + ) + # The issue here is that even though we know how much we've truncated + # we need to account for this globally including any starting + # left truncation. For example: + # Raw response: [0,1,2,3] + # Starting index: 1 + # Max items: 1 + # Starting left truncation: [1, 2, 3] + # End right truncation for max items: [1] + # However, even though we only kept 1, this is post + # left truncation so the next starting index should be 2, not 1 + # (left_truncation + amount_to_keep). + next_token['boto_truncate_amount'] = ( + amount_to_keep + starting_truncation + ) + self.resume_token = next_token + + def _get_next_token(self, parsed): + if self._more_results is not None: + if not self._more_results.search(parsed): + return {} + next_tokens = {} + for output_token, input_key in zip( + self._output_token, self._input_token + ): + next_token = output_token.search(parsed) + # We do not want to include any empty strings as actual tokens. + # Treat them as None. + if next_token: + next_tokens[input_key] = next_token + else: + next_tokens[input_key] = None + return next_tokens + + def result_key_iters(self): + teed_results = tee(self, len(self.result_keys)) + return [ + ResultKeyIterator(i, result_key) + for i, result_key in zip(teed_results, self.result_keys) + ] + + def build_full_result(self): + complete_result = {} + for response in self: + page = response + # We want to try to catch operation object pagination + # and format correctly for those. They come in the form + # of a tuple of two elements: (http_response, parsed_responsed). + # We want the parsed_response as that is what the page iterator + # uses. We can remove it though once operation objects are removed. + if isinstance(response, tuple) and len(response) == 2: + page = response[1] + # We're incrementally building the full response page + # by page. For each page in the response we need to + # inject the necessary components from the page + # into the complete_result. + for result_expression in self.result_keys: + # In order to incrementally update a result key + # we need to search the existing value from complete_result, + # then we need to search the _current_ page for the + # current result key value. Then we append the current + # value onto the existing value, and re-set that value + # as the new value. + result_value = result_expression.search(page) + if result_value is None: + continue + existing_value = result_expression.search(complete_result) + if existing_value is None: + # Set the initial result + set_value_from_jmespath( + complete_result, + result_expression.expression, + result_value, + ) + continue + # Now both result_value and existing_value contain something + if isinstance(result_value, list): + existing_value.extend(result_value) + elif isinstance(result_value, (int, float, str)): + # Modify the existing result with the sum or concatenation + set_value_from_jmespath( + complete_result, + result_expression.expression, + existing_value + result_value, + ) + merge_dicts(complete_result, self.non_aggregate_part) + if self.resume_token is not None: + complete_result['NextToken'] = self.resume_token + return complete_result + + def _parse_starting_token(self): + if self._starting_token is None: + return None + + # The starting token is a dict passed as a base64 encoded string. + next_token = self._starting_token + try: + next_token = self._token_decoder.decode(next_token) + index = 0 + if 'boto_truncate_amount' in next_token: + index = next_token.get('boto_truncate_amount') + del next_token['boto_truncate_amount'] + except (ValueError, TypeError): + next_token, index = self._parse_starting_token_deprecated() + return next_token, index + + def _parse_starting_token_deprecated(self): + """ + This handles parsing of old style starting tokens, and attempts to + coerce them into the new style. + """ + log.debug( + "Attempting to fall back to old starting token parser. For " + f"token: {self._starting_token}" + ) + if self._starting_token is None: + return None + + parts = self._starting_token.split('___') + next_token = [] + index = 0 + if len(parts) == len(self._input_token) + 1: + try: + index = int(parts.pop()) + except ValueError: + # This doesn't look like a valid old-style token, so we're + # passing it along as an opaque service token. + parts = [self._starting_token] + + for part in parts: + if part == 'None': + next_token.append(None) + else: + next_token.append(part) + return self._convert_deprecated_starting_token(next_token), index + + def _convert_deprecated_starting_token(self, deprecated_token): + """ + This attempts to convert a deprecated starting token into the new + style. + """ + len_deprecated_token = len(deprecated_token) + len_input_token = len(self._input_token) + if len_deprecated_token > len_input_token: + raise ValueError(f"Bad starting token: {self._starting_token}") + elif len_deprecated_token < len_input_token: + log.debug( + "Old format starting token does not contain all input " + "tokens. Setting the rest, in order, as None." + ) + for i in range(len_input_token - len_deprecated_token): + deprecated_token.append(None) + return dict(zip(self._input_token, deprecated_token)) + + +class Paginator: + PAGE_ITERATOR_CLS = PageIterator + + def __init__(self, method, pagination_config, model): + self._model = model + self._method = method + self._pagination_cfg = pagination_config + self._output_token = self._get_output_tokens(self._pagination_cfg) + self._input_token = self._get_input_tokens(self._pagination_cfg) + self._more_results = self._get_more_results_token(self._pagination_cfg) + self._non_aggregate_keys = self._get_non_aggregate_keys( + self._pagination_cfg + ) + self._result_keys = self._get_result_keys(self._pagination_cfg) + self._limit_key = self._get_limit_key(self._pagination_cfg) + + @property + def result_keys(self): + return self._result_keys + + def _get_non_aggregate_keys(self, config): + keys = [] + for key in config.get('non_aggregate_keys', []): + keys.append(jmespath.compile(key)) + return keys + + def _get_output_tokens(self, config): + output = [] + output_token = config['output_token'] + if not isinstance(output_token, list): + output_token = [output_token] + for config in output_token: + output.append(jmespath.compile(config)) + return output + + def _get_input_tokens(self, config): + input_token = self._pagination_cfg['input_token'] + if not isinstance(input_token, list): + input_token = [input_token] + return input_token + + def _get_more_results_token(self, config): + more_results = config.get('more_results') + if more_results is not None: + return jmespath.compile(more_results) + + def _get_result_keys(self, config): + result_key = config.get('result_key') + if result_key is not None: + if not isinstance(result_key, list): + result_key = [result_key] + result_key = [jmespath.compile(rk) for rk in result_key] + return result_key + + def _get_limit_key(self, config): + return config.get('limit_key') + + def paginate(self, **kwargs): + """Create paginator object for an operation. + + This returns an iterable object. Iterating over + this object will yield a single page of a response + at a time. + + """ + page_params = self._extract_paging_params(kwargs) + return self.PAGE_ITERATOR_CLS( + self._method, + self._input_token, + self._output_token, + self._more_results, + self._result_keys, + self._non_aggregate_keys, + self._limit_key, + page_params['MaxItems'], + page_params['StartingToken'], + page_params['PageSize'], + kwargs, + ) + + def _extract_paging_params(self, kwargs): + pagination_config = kwargs.pop('PaginationConfig', {}) + max_items = pagination_config.get('MaxItems', None) + if max_items is not None: + max_items = int(max_items) + page_size = pagination_config.get('PageSize', None) + if page_size is not None: + if self._limit_key is None: + raise PaginationError( + message="PageSize parameter is not supported for the " + "pagination interface for this operation." + ) + input_members = self._model.input_shape.members + limit_key_shape = input_members.get(self._limit_key) + if limit_key_shape.type_name == 'string': + if not isinstance(page_size, str): + page_size = str(page_size) + else: + page_size = int(page_size) + return { + 'MaxItems': max_items, + 'StartingToken': pagination_config.get('StartingToken', None), + 'PageSize': page_size, + } + + +class ResultKeyIterator: + """Iterates over the results of paginated responses. + + Each iterator is associated with a single result key. + Iterating over this object will give you each element in + the result key list. + + :param pages_iterator: An iterator that will give you + pages of results (a ``PageIterator`` class). + :param result_key: The JMESPath expression representing + the result key. + + """ + + def __init__(self, pages_iterator, result_key): + self._pages_iterator = pages_iterator + self.result_key = result_key + + def __iter__(self): + for page in self._pages_iterator: + results = self.result_key.search(page) + if results is None: + results = [] + yield from results diff --git a/venv/lib/python3.10/site-packages/botocore/parsers.py b/venv/lib/python3.10/site-packages/botocore/parsers.py new file mode 100644 index 0000000000000000000000000000000000000000..0c7a34f218867b9e3882eddc7ecba952e1eea263 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/parsers.py @@ -0,0 +1,1122 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""Response parsers for the various protocol types. + +The module contains classes that can take an HTTP response, and given +an output shape, parse the response into a dict according to the +rules in the output shape. + +There are many similarities amongst the different protocols with regard +to response parsing, and the code is structured in a way to avoid +code duplication when possible. The diagram below is a diagram +showing the inheritance hierarchy of the response classes. + +:: + + + + +--------------+ + |ResponseParser| + +--------------+ + ^ ^ ^ + +--------------------+ | +-------------------+ + | | | + +----------+----------+ +------+-------+ +-------+------+ + |BaseXMLResponseParser| |BaseRestParser| |BaseJSONParser| + +---------------------+ +--------------+ +--------------+ + ^ ^ ^ ^ ^ ^ + | | | | | | + | | | | | | + | ++----------+-+ +-+-----------++ | + | |RestXMLParser| |RestJSONParser| | + +-----+-----+ +-------------+ +--------------+ +----+-----+ + |QueryParser| |JSONParser| + +-----------+ +----------+ + + +The diagram above shows that there is a base class, ``ResponseParser`` that +contains logic that is similar amongst all the different protocols (``query``, +``json``, ``rest-json``, ``rest-xml``). Amongst the various services there +is shared logic that can be grouped several ways: + +* The ``query`` and ``rest-xml`` both have XML bodies that are parsed in the + same way. +* The ``json`` and ``rest-json`` protocols both have JSON bodies that are + parsed in the same way. +* The ``rest-json`` and ``rest-xml`` protocols have additional attributes + besides body parameters that are parsed the same (headers, query string, + status code). + +This is reflected in the class diagram above. The ``BaseXMLResponseParser`` +and the BaseJSONParser contain logic for parsing the XML/JSON body, +and the BaseRestParser contains logic for parsing out attributes that +come from other parts of the HTTP response. Classes like the +``RestXMLParser`` inherit from the ``BaseXMLResponseParser`` to get the +XML body parsing logic and the ``BaseRestParser`` to get the HTTP +header/status code/query string parsing. + +Additionally, there are event stream parsers that are used by the other parsers +to wrap streaming bodies that represent a stream of events. The +BaseEventStreamParser extends from ResponseParser and defines the logic for +parsing values from the headers and payload of a message from the underlying +binary encoding protocol. Currently, event streams support parsing bodies +encoded as JSON and XML through the following hierarchy. + + + +--------------+ + |ResponseParser| + +--------------+ + ^ ^ ^ + +--------------------+ | +------------------+ + | | | + +----------+----------+ +----------+----------+ +-------+------+ + |BaseXMLResponseParser| |BaseEventStreamParser| |BaseJSONParser| + +---------------------+ +---------------------+ +--------------+ + ^ ^ ^ ^ + | | | | + | | | | + +-+----------------+-+ +-+-----------------+-+ + |EventStreamXMLParser| |EventStreamJSONParser| + +--------------------+ +---------------------+ + +Return Values +============= + +Each call to ``parse()`` returns a dict has this form:: + + Standard Response + + { + "ResponseMetadata": {"RequestId": } + + } + + Error response + + { + "ResponseMetadata": {"RequestId": } + "Error": { + "Code": , + "Message": , + "Type": , + + } + } + +""" + +import base64 +import http.client +import json +import logging +import re + +from botocore.compat import ETree, XMLParseError +from botocore.eventstream import EventStream, NoInitialResponseError +from botocore.utils import ( + is_json_value_header, + lowercase_dict, + merge_dicts, + parse_timestamp, +) + +LOG = logging.getLogger(__name__) + +DEFAULT_TIMESTAMP_PARSER = parse_timestamp + + +class ResponseParserFactory: + def __init__(self): + self._defaults = {} + + def set_parser_defaults(self, **kwargs): + """Set default arguments when a parser instance is created. + + You can specify any kwargs that are allowed by a ResponseParser + class. There are currently two arguments: + + * timestamp_parser - A callable that can parse a timestamp string + * blob_parser - A callable that can parse a blob type + + """ + self._defaults.update(kwargs) + + def create_parser(self, protocol_name): + parser_cls = PROTOCOL_PARSERS[protocol_name] + return parser_cls(**self._defaults) + + +def create_parser(protocol): + return ResponseParserFactory().create_parser(protocol) + + +def _text_content(func): + # This decorator hides the difference between + # an XML node with text or a plain string. It's used + # to ensure that scalar processing operates only on text + # strings, which allows the same scalar handlers to be used + # for XML nodes from the body and HTTP headers. + def _get_text_content(self, shape, node_or_string): + if hasattr(node_or_string, 'text'): + text = node_or_string.text + if text is None: + # If an XML node is empty , + # we want to parse that as an empty string, + # not as a null/None value. + text = '' + else: + text = node_or_string + return func(self, shape, text) + + return _get_text_content + + +class ResponseParserError(Exception): + pass + + +class ResponseParser: + """Base class for response parsing. + + This class represents the interface that all ResponseParsers for the + various protocols must implement. + + This class will take an HTTP response and a model shape and parse the + HTTP response into a dictionary. + + There is a single public method exposed: ``parse``. See the ``parse`` + docstring for more info. + + """ + + DEFAULT_ENCODING = 'utf-8' + EVENT_STREAM_PARSER_CLS = None + + def __init__(self, timestamp_parser=None, blob_parser=None): + if timestamp_parser is None: + timestamp_parser = DEFAULT_TIMESTAMP_PARSER + self._timestamp_parser = timestamp_parser + if blob_parser is None: + blob_parser = self._default_blob_parser + self._blob_parser = blob_parser + self._event_stream_parser = None + if self.EVENT_STREAM_PARSER_CLS is not None: + self._event_stream_parser = self.EVENT_STREAM_PARSER_CLS( + timestamp_parser, blob_parser + ) + + def _default_blob_parser(self, value): + # Blobs are always returned as bytes type (this matters on python3). + # We don't decode this to a str because it's entirely possible that the + # blob contains binary data that actually can't be decoded. + return base64.b64decode(value) + + def parse(self, response, shape): + """Parse the HTTP response given a shape. + + :param response: The HTTP response dictionary. This is a dictionary + that represents the HTTP request. The dictionary must have the + following keys, ``body``, ``headers``, and ``status_code``. + + :param shape: The model shape describing the expected output. + :return: Returns a dictionary representing the parsed response + described by the model. In addition to the shape described from + the model, each response will also have a ``ResponseMetadata`` + which contains metadata about the response, which contains at least + two keys containing ``RequestId`` and ``HTTPStatusCode``. Some + responses may populate additional keys, but ``RequestId`` will + always be present. + + """ + LOG.debug('Response headers: %r', response['headers']) + LOG.debug('Response body:\n%r', response['body']) + if response['status_code'] >= 301: + if self._is_generic_error_response(response): + parsed = self._do_generic_error_parse(response) + elif self._is_modeled_error_shape(shape): + parsed = self._do_modeled_error_parse(response, shape) + # We don't want to decorate the modeled fields with metadata + return parsed + else: + parsed = self._do_error_parse(response, shape) + else: + parsed = self._do_parse(response, shape) + + # We don't want to decorate event stream responses with metadata + if shape and shape.serialization.get('eventstream'): + return parsed + + # Add ResponseMetadata if it doesn't exist and inject the HTTP + # status code and headers from the response. + if isinstance(parsed, dict): + response_metadata = parsed.get('ResponseMetadata', {}) + response_metadata['HTTPStatusCode'] = response['status_code'] + # Ensure that the http header keys are all lower cased. Older + # versions of urllib3 (< 1.11) would unintentionally do this for us + # (see urllib3#633). We need to do this conversion manually now. + headers = response['headers'] + response_metadata['HTTPHeaders'] = lowercase_dict(headers) + parsed['ResponseMetadata'] = response_metadata + self._add_checksum_response_metadata(response, response_metadata) + return parsed + + def _add_checksum_response_metadata(self, response, response_metadata): + checksum_context = response.get('context', {}).get('checksum', {}) + algorithm = checksum_context.get('response_algorithm') + if algorithm: + response_metadata['ChecksumAlgorithm'] = algorithm + + def _is_modeled_error_shape(self, shape): + return shape is not None and shape.metadata.get('exception', False) + + def _is_generic_error_response(self, response): + # There are times when a service will respond with a generic + # error response such as: + # 'Http/1.1 Service Unavailable' + # + # This can also happen if you're going through a proxy. + # In this case the protocol specific _do_error_parse will either + # fail to parse the response (in the best case) or silently succeed + # and treat the HTML above as an XML response and return + # non sensical parsed data. + # To prevent this case from happening we first need to check + # whether or not this response looks like the generic response. + if response['status_code'] >= 500: + if 'body' not in response or response['body'] is None: + return True + + body = response['body'].strip() + return body.startswith(b'') or not body + + def _do_generic_error_parse(self, response): + # There's not really much we can do when we get a generic + # html response. + LOG.debug( + "Received a non protocol specific error response from the " + "service, unable to populate error code and message." + ) + return { + 'Error': { + 'Code': str(response['status_code']), + 'Message': http.client.responses.get( + response['status_code'], '' + ), + }, + 'ResponseMetadata': {}, + } + + def _do_parse(self, response, shape): + raise NotImplementedError(f"{self.__class__.__name__}._do_parse") + + def _do_error_parse(self, response, shape): + raise NotImplementedError(f"{self.__class__.__name__}._do_error_parse") + + def _do_modeled_error_parse(self, response, shape, parsed): + raise NotImplementedError( + f"{self.__class__.__name__}._do_modeled_error_parse" + ) + + def _parse_shape(self, shape, node): + handler = getattr( + self, f'_handle_{shape.type_name}', self._default_handle + ) + return handler(shape, node) + + def _handle_list(self, shape, node): + # Enough implementations share list serialization that it's moved + # up here in the base class. + parsed = [] + member_shape = shape.member + for item in node: + parsed.append(self._parse_shape(member_shape, item)) + return parsed + + def _default_handle(self, shape, value): + return value + + def _create_event_stream(self, response, shape): + parser = self._event_stream_parser + name = response['context'].get('operation_name') + return EventStream(response['body'], shape, parser, name) + + def _get_first_key(self, value): + return list(value)[0] + + def _has_unknown_tagged_union_member(self, shape, value): + if shape.is_tagged_union: + cleaned_value = value.copy() + cleaned_value.pop("__type", None) + if len(cleaned_value) != 1: + error_msg = ( + "Invalid service response: %s must have one and only " + "one member set." + ) + raise ResponseParserError(error_msg % shape.name) + tag = self._get_first_key(cleaned_value) + if tag not in shape.members: + msg = ( + "Received a tagged union response with member " + "unknown to client: %s. Please upgrade SDK for full " + "response support." + ) + LOG.info(msg % tag) + return True + return False + + def _handle_unknown_tagged_union_member(self, tag): + return {'SDK_UNKNOWN_MEMBER': {'name': tag}} + + +class BaseXMLResponseParser(ResponseParser): + def __init__(self, timestamp_parser=None, blob_parser=None): + super().__init__(timestamp_parser, blob_parser) + self._namespace_re = re.compile('{.*}') + + def _handle_map(self, shape, node): + parsed = {} + key_shape = shape.key + value_shape = shape.value + key_location_name = key_shape.serialization.get('name') or 'key' + value_location_name = value_shape.serialization.get('name') or 'value' + if shape.serialization.get('flattened') and not isinstance(node, list): + node = [node] + for keyval_node in node: + for single_pair in keyval_node: + # Within each there's a and a + tag_name = self._node_tag(single_pair) + if tag_name == key_location_name: + key_name = self._parse_shape(key_shape, single_pair) + elif tag_name == value_location_name: + val_name = self._parse_shape(value_shape, single_pair) + else: + raise ResponseParserError(f"Unknown tag: {tag_name}") + parsed[key_name] = val_name + return parsed + + def _node_tag(self, node): + return self._namespace_re.sub('', node.tag) + + def _handle_list(self, shape, node): + # When we use _build_name_to_xml_node, repeated elements are aggregated + # into a list. However, we can't tell the difference between a scalar + # value and a single element flattened list. So before calling the + # real _handle_list, we know that "node" should actually be a list if + # it's flattened, and if it's not, then we make it a one element list. + if shape.serialization.get('flattened') and not isinstance(node, list): + node = [node] + return super()._handle_list(shape, node) + + def _handle_structure(self, shape, node): + parsed = {} + members = shape.members + if shape.metadata.get('exception', False): + node = self._get_error_root(node) + xml_dict = self._build_name_to_xml_node(node) + if self._has_unknown_tagged_union_member(shape, xml_dict): + tag = self._get_first_key(xml_dict) + return self._handle_unknown_tagged_union_member(tag) + for member_name in members: + member_shape = members[member_name] + if ( + 'location' in member_shape.serialization + or member_shape.serialization.get('eventheader') + ): + # All members with locations have already been handled, + # so we don't need to parse these members. + continue + xml_name = self._member_key_name(member_shape, member_name) + member_node = xml_dict.get(xml_name) + if member_node is not None: + parsed[member_name] = self._parse_shape( + member_shape, member_node + ) + elif member_shape.serialization.get('xmlAttribute'): + attribs = {} + location_name = member_shape.serialization['name'] + for key, value in node.attrib.items(): + new_key = self._namespace_re.sub( + location_name.split(':')[0] + ':', key + ) + attribs[new_key] = value + if location_name in attribs: + parsed[member_name] = attribs[location_name] + return parsed + + def _get_error_root(self, original_root): + if self._node_tag(original_root) == 'ErrorResponse': + for child in original_root: + if self._node_tag(child) == 'Error': + return child + return original_root + + def _member_key_name(self, shape, member_name): + # This method is needed because we have to special case flattened list + # with a serialization name. If this is the case we use the + # locationName from the list's member shape as the key name for the + # surrounding structure. + if shape.type_name == 'list' and shape.serialization.get('flattened'): + list_member_serialized_name = shape.member.serialization.get( + 'name' + ) + if list_member_serialized_name is not None: + return list_member_serialized_name + serialized_name = shape.serialization.get('name') + if serialized_name is not None: + return serialized_name + return member_name + + def _build_name_to_xml_node(self, parent_node): + # If the parent node is actually a list. We should not be trying + # to serialize it to a dictionary. Instead, return the first element + # in the list. + if isinstance(parent_node, list): + return self._build_name_to_xml_node(parent_node[0]) + xml_dict = {} + for item in parent_node: + key = self._node_tag(item) + if key in xml_dict: + # If the key already exists, the most natural + # way to handle this is to aggregate repeated + # keys into a single list. + # 12 -> {'foo': [Node(1), Node(2)]} + if isinstance(xml_dict[key], list): + xml_dict[key].append(item) + else: + # Convert from a scalar to a list. + xml_dict[key] = [xml_dict[key], item] + else: + xml_dict[key] = item + return xml_dict + + def _parse_xml_string_to_dom(self, xml_string): + try: + parser = ETree.XMLParser( + target=ETree.TreeBuilder(), encoding=self.DEFAULT_ENCODING + ) + parser.feed(xml_string) + root = parser.close() + except XMLParseError as e: + raise ResponseParserError( + f"Unable to parse response ({e}), " + f"invalid XML received. Further retries may succeed:\n{xml_string}" + ) + return root + + def _replace_nodes(self, parsed): + for key, value in parsed.items(): + if list(value): + sub_dict = self._build_name_to_xml_node(value) + parsed[key] = self._replace_nodes(sub_dict) + else: + parsed[key] = value.text + return parsed + + @_text_content + def _handle_boolean(self, shape, text): + if text == 'true': + return True + else: + return False + + @_text_content + def _handle_float(self, shape, text): + return float(text) + + @_text_content + def _handle_timestamp(self, shape, text): + return self._timestamp_parser(text) + + @_text_content + def _handle_integer(self, shape, text): + return int(text) + + @_text_content + def _handle_string(self, shape, text): + return text + + @_text_content + def _handle_blob(self, shape, text): + return self._blob_parser(text) + + _handle_character = _handle_string + _handle_double = _handle_float + _handle_long = _handle_integer + + +class QueryParser(BaseXMLResponseParser): + def _do_error_parse(self, response, shape): + xml_contents = response['body'] + root = self._parse_xml_string_to_dom(xml_contents) + parsed = self._build_name_to_xml_node(root) + self._replace_nodes(parsed) + # Once we've converted xml->dict, we need to make one or two + # more adjustments to extract nested errors and to be consistent + # with ResponseMetadata for non-error responses: + # 1. {"Errors": {"Error": {...}}} -> {"Error": {...}} + # 2. {"RequestId": "id"} -> {"ResponseMetadata": {"RequestId": "id"}} + if 'Errors' in parsed: + parsed.update(parsed.pop('Errors')) + if 'RequestId' in parsed: + parsed['ResponseMetadata'] = {'RequestId': parsed.pop('RequestId')} + return parsed + + def _do_modeled_error_parse(self, response, shape): + return self._parse_body_as_xml(response, shape, inject_metadata=False) + + def _do_parse(self, response, shape): + return self._parse_body_as_xml(response, shape, inject_metadata=True) + + def _parse_body_as_xml(self, response, shape, inject_metadata=True): + xml_contents = response['body'] + root = self._parse_xml_string_to_dom(xml_contents) + parsed = {} + if shape is not None: + start = root + if 'resultWrapper' in shape.serialization: + start = self._find_result_wrapped_shape( + shape.serialization['resultWrapper'], root + ) + parsed = self._parse_shape(shape, start) + if inject_metadata: + self._inject_response_metadata(root, parsed) + return parsed + + def _find_result_wrapped_shape(self, element_name, xml_root_node): + mapping = self._build_name_to_xml_node(xml_root_node) + return mapping[element_name] + + def _inject_response_metadata(self, node, inject_into): + mapping = self._build_name_to_xml_node(node) + child_node = mapping.get('ResponseMetadata') + if child_node is not None: + sub_mapping = self._build_name_to_xml_node(child_node) + for key, value in sub_mapping.items(): + sub_mapping[key] = value.text + inject_into['ResponseMetadata'] = sub_mapping + + +class EC2QueryParser(QueryParser): + def _inject_response_metadata(self, node, inject_into): + mapping = self._build_name_to_xml_node(node) + child_node = mapping.get('requestId') + if child_node is not None: + inject_into['ResponseMetadata'] = {'RequestId': child_node.text} + + def _do_error_parse(self, response, shape): + # EC2 errors look like: + # + # + # + # InvalidInstanceID.Malformed + # Invalid id: "1343124" + # + # + # 12345 + # + # This is different from QueryParser in that it's RequestID, + # not RequestId + original = super()._do_error_parse(response, shape) + if 'RequestID' in original: + original['ResponseMetadata'] = { + 'RequestId': original.pop('RequestID') + } + return original + + def _get_error_root(self, original_root): + for child in original_root: + if self._node_tag(child) == 'Errors': + for errors_child in child: + if self._node_tag(errors_child) == 'Error': + return errors_child + return original_root + + +class BaseJSONParser(ResponseParser): + def _handle_structure(self, shape, value): + final_parsed = {} + if shape.is_document_type: + final_parsed = value + else: + member_shapes = shape.members + if value is None: + # If the comes across the wire as "null" (None in python), + # we should be returning this unchanged, instead of as an + # empty dict. + return None + final_parsed = {} + if self._has_unknown_tagged_union_member(shape, value): + tag = self._get_first_key(value) + return self._handle_unknown_tagged_union_member(tag) + for member_name in member_shapes: + member_shape = member_shapes[member_name] + json_name = member_shape.serialization.get('name', member_name) + raw_value = value.get(json_name) + if raw_value is not None: + final_parsed[member_name] = self._parse_shape( + member_shapes[member_name], raw_value + ) + return final_parsed + + def _handle_map(self, shape, value): + parsed = {} + key_shape = shape.key + value_shape = shape.value + for key, value in value.items(): + actual_key = self._parse_shape(key_shape, key) + actual_value = self._parse_shape(value_shape, value) + parsed[actual_key] = actual_value + return parsed + + def _handle_blob(self, shape, value): + return self._blob_parser(value) + + def _handle_timestamp(self, shape, value): + return self._timestamp_parser(value) + + def _do_error_parse(self, response, shape): + body = self._parse_body_as_json(response['body']) + error = {"Error": {"Message": '', "Code": ''}, "ResponseMetadata": {}} + headers = response['headers'] + # Error responses can have slightly different structures for json. + # The basic structure is: + # + # {"__type":"ConnectClientException", + # "message":"The error message."} + + # The error message can either come in the 'message' or 'Message' key + # so we need to check for both. + error['Error']['Message'] = body.get( + 'message', body.get('Message', '') + ) + # if the message did not contain an error code + # include the response status code + response_code = response.get('status_code') + + code = body.get('__type', response_code and str(response_code)) + if code is not None: + # code has a couple forms as well: + # * "com.aws.dynamodb.vAPI#ProvisionedThroughputExceededException" + # * "ResourceNotFoundException" + if '#' in code: + code = code.rsplit('#', 1)[1] + if 'x-amzn-query-error' in headers: + code = self._do_query_compatible_error_parse( + code, headers, error + ) + error['Error']['Code'] = code + self._inject_response_metadata(error, response['headers']) + return error + + def _do_query_compatible_error_parse(self, code, headers, error): + """ + Error response may contain an x-amzn-query-error header to translate + errors codes from former `query` services into `json`. We use this to + do our lookup in the errorfactory for modeled errors. + """ + query_error = headers['x-amzn-query-error'] + query_error_components = query_error.split(';') + + if len(query_error_components) == 2 and query_error_components[0]: + error['Error']['QueryErrorCode'] = code + error['Error']['Type'] = query_error_components[1] + return query_error_components[0] + return code + + def _inject_response_metadata(self, parsed, headers): + if 'x-amzn-requestid' in headers: + parsed.setdefault('ResponseMetadata', {})['RequestId'] = headers[ + 'x-amzn-requestid' + ] + + def _parse_body_as_json(self, body_contents): + if not body_contents: + return {} + body = body_contents.decode(self.DEFAULT_ENCODING) + try: + original_parsed = json.loads(body) + return original_parsed + except ValueError: + # if the body cannot be parsed, include + # the literal string as the message + return {'message': body} + + +class BaseEventStreamParser(ResponseParser): + def _do_parse(self, response, shape): + final_parsed = {} + if shape.serialization.get('eventstream'): + event_type = response['headers'].get(':event-type') + event_shape = shape.members.get(event_type) + if event_shape: + final_parsed[event_type] = self._do_parse( + response, event_shape + ) + else: + self._parse_non_payload_attrs( + response, shape, shape.members, final_parsed + ) + self._parse_payload(response, shape, shape.members, final_parsed) + return final_parsed + + def _do_error_parse(self, response, shape): + exception_type = response['headers'].get(':exception-type') + exception_shape = shape.members.get(exception_type) + if exception_shape is not None: + original_parsed = self._initial_body_parse(response['body']) + body = self._parse_shape(exception_shape, original_parsed) + error = { + 'Error': { + 'Code': exception_type, + 'Message': body.get('Message', body.get('message', '')), + } + } + else: + error = { + 'Error': { + 'Code': response['headers'].get(':error-code', ''), + 'Message': response['headers'].get(':error-message', ''), + } + } + return error + + def _parse_payload(self, response, shape, member_shapes, final_parsed): + if shape.serialization.get('event'): + for name in member_shapes: + member_shape = member_shapes[name] + if member_shape.serialization.get('eventpayload'): + body = response['body'] + if member_shape.type_name == 'blob': + parsed_body = body + elif member_shape.type_name == 'string': + parsed_body = body.decode(self.DEFAULT_ENCODING) + else: + raw_parse = self._initial_body_parse(body) + parsed_body = self._parse_shape( + member_shape, raw_parse + ) + final_parsed[name] = parsed_body + return + # If we didn't find an explicit payload, use the current shape + original_parsed = self._initial_body_parse(response['body']) + body_parsed = self._parse_shape(shape, original_parsed) + final_parsed.update(body_parsed) + + def _parse_non_payload_attrs( + self, response, shape, member_shapes, final_parsed + ): + headers = response['headers'] + for name in member_shapes: + member_shape = member_shapes[name] + if member_shape.serialization.get('eventheader'): + if name in headers: + value = headers[name] + if member_shape.type_name == 'timestamp': + # Event stream timestamps are an in milleseconds so we + # divide by 1000 to convert to seconds. + value = self._timestamp_parser(value / 1000.0) + final_parsed[name] = value + + def _initial_body_parse(self, body_contents): + # This method should do the initial xml/json parsing of the + # body. We we still need to walk the parsed body in order + # to convert types, but this method will do the first round + # of parsing. + raise NotImplementedError("_initial_body_parse") + + +class EventStreamJSONParser(BaseEventStreamParser, BaseJSONParser): + def _initial_body_parse(self, body_contents): + return self._parse_body_as_json(body_contents) + + +class EventStreamXMLParser(BaseEventStreamParser, BaseXMLResponseParser): + def _initial_body_parse(self, xml_string): + if not xml_string: + return ETree.Element('') + return self._parse_xml_string_to_dom(xml_string) + + +class JSONParser(BaseJSONParser): + EVENT_STREAM_PARSER_CLS = EventStreamJSONParser + + """Response parser for the "json" protocol.""" + + def _do_parse(self, response, shape): + parsed = {} + if shape is not None: + event_name = shape.event_stream_name + if event_name: + parsed = self._handle_event_stream(response, shape, event_name) + else: + parsed = self._handle_json_body(response['body'], shape) + self._inject_response_metadata(parsed, response['headers']) + return parsed + + def _do_modeled_error_parse(self, response, shape): + return self._handle_json_body(response['body'], shape) + + def _handle_event_stream(self, response, shape, event_name): + event_stream_shape = shape.members[event_name] + event_stream = self._create_event_stream(response, event_stream_shape) + try: + event = event_stream.get_initial_response() + except NoInitialResponseError: + error_msg = 'First event was not of type initial-response' + raise ResponseParserError(error_msg) + parsed = self._handle_json_body(event.payload, shape) + parsed[event_name] = event_stream + return parsed + + def _handle_json_body(self, raw_body, shape): + # The json.loads() gives us the primitive JSON types, + # but we need to traverse the parsed JSON data to convert + # to richer types (blobs, timestamps, etc. + parsed_json = self._parse_body_as_json(raw_body) + return self._parse_shape(shape, parsed_json) + + +class BaseRestParser(ResponseParser): + def _do_parse(self, response, shape): + final_parsed = {} + final_parsed['ResponseMetadata'] = self._populate_response_metadata( + response + ) + self._add_modeled_parse(response, shape, final_parsed) + return final_parsed + + def _add_modeled_parse(self, response, shape, final_parsed): + if shape is None: + return final_parsed + member_shapes = shape.members + self._parse_non_payload_attrs( + response, shape, member_shapes, final_parsed + ) + self._parse_payload(response, shape, member_shapes, final_parsed) + + def _do_modeled_error_parse(self, response, shape): + final_parsed = {} + self._add_modeled_parse(response, shape, final_parsed) + return final_parsed + + def _populate_response_metadata(self, response): + metadata = {} + headers = response['headers'] + if 'x-amzn-requestid' in headers: + metadata['RequestId'] = headers['x-amzn-requestid'] + elif 'x-amz-request-id' in headers: + metadata['RequestId'] = headers['x-amz-request-id'] + # HostId is what it's called whenever this value is returned + # in an XML response body, so to be consistent, we'll always + # call is HostId. + metadata['HostId'] = headers.get('x-amz-id-2', '') + return metadata + + def _parse_payload(self, response, shape, member_shapes, final_parsed): + if 'payload' in shape.serialization: + # If a payload is specified in the output shape, then only that + # shape is used for the body payload. + payload_member_name = shape.serialization['payload'] + body_shape = member_shapes[payload_member_name] + if body_shape.serialization.get('eventstream'): + body = self._create_event_stream(response, body_shape) + final_parsed[payload_member_name] = body + elif body_shape.type_name in ['string', 'blob']: + # This is a stream + body = response['body'] + if isinstance(body, bytes): + body = body.decode(self.DEFAULT_ENCODING) + final_parsed[payload_member_name] = body + else: + original_parsed = self._initial_body_parse(response['body']) + final_parsed[payload_member_name] = self._parse_shape( + body_shape, original_parsed + ) + else: + original_parsed = self._initial_body_parse(response['body']) + body_parsed = self._parse_shape(shape, original_parsed) + final_parsed.update(body_parsed) + + def _parse_non_payload_attrs( + self, response, shape, member_shapes, final_parsed + ): + headers = response['headers'] + for name in member_shapes: + member_shape = member_shapes[name] + location = member_shape.serialization.get('location') + if location is None: + continue + elif location == 'statusCode': + final_parsed[name] = self._parse_shape( + member_shape, response['status_code'] + ) + elif location == 'headers': + final_parsed[name] = self._parse_header_map( + member_shape, headers + ) + elif location == 'header': + header_name = member_shape.serialization.get('name', name) + if header_name in headers: + final_parsed[name] = self._parse_shape( + member_shape, headers[header_name] + ) + + def _parse_header_map(self, shape, headers): + # Note that headers are case insensitive, so we .lower() + # all header names and header prefixes. + parsed = {} + prefix = shape.serialization.get('name', '').lower() + for header_name in headers: + if header_name.lower().startswith(prefix): + # The key name inserted into the parsed hash + # strips off the prefix. + name = header_name[len(prefix) :] + parsed[name] = headers[header_name] + return parsed + + def _initial_body_parse(self, body_contents): + # This method should do the initial xml/json parsing of the + # body. We we still need to walk the parsed body in order + # to convert types, but this method will do the first round + # of parsing. + raise NotImplementedError("_initial_body_parse") + + def _handle_string(self, shape, value): + parsed = value + if is_json_value_header(shape): + decoded = base64.b64decode(value).decode(self.DEFAULT_ENCODING) + parsed = json.loads(decoded) + return parsed + + def _handle_list(self, shape, node): + location = shape.serialization.get('location') + if location == 'header' and not isinstance(node, list): + # List in headers may be a comma separated string as per RFC7230 + node = [e.strip() for e in node.split(',')] + return super()._handle_list(shape, node) + + +class RestJSONParser(BaseRestParser, BaseJSONParser): + EVENT_STREAM_PARSER_CLS = EventStreamJSONParser + + def _initial_body_parse(self, body_contents): + return self._parse_body_as_json(body_contents) + + def _do_error_parse(self, response, shape): + error = super()._do_error_parse(response, shape) + self._inject_error_code(error, response) + return error + + def _inject_error_code(self, error, response): + # The "Code" value can come from either a response + # header or a value in the JSON body. + body = self._initial_body_parse(response['body']) + if 'x-amzn-errortype' in response['headers']: + code = response['headers']['x-amzn-errortype'] + # Could be: + # x-amzn-errortype: ValidationException: + code = code.split(':')[0] + error['Error']['Code'] = code + elif 'code' in body or 'Code' in body: + error['Error']['Code'] = body.get('code', body.get('Code', '')) + + def _handle_integer(self, shape, value): + return int(value) + + _handle_long = _handle_integer + + +class RestXMLParser(BaseRestParser, BaseXMLResponseParser): + EVENT_STREAM_PARSER_CLS = EventStreamXMLParser + + def _initial_body_parse(self, xml_string): + if not xml_string: + return ETree.Element('') + return self._parse_xml_string_to_dom(xml_string) + + def _do_error_parse(self, response, shape): + # We're trying to be service agnostic here, but S3 does have a slightly + # different response structure for its errors compared to other + # rest-xml serivces (route53/cloudfront). We handle this by just + # trying to parse both forms. + # First: + # + # + # Sender + # InvalidInput + # Invalid resource type: foo + # + # request-id + # + if response['body']: + # If the body ends up being invalid xml, the xml parser should not + # blow up. It should at least try to pull information about the + # the error response from other sources like the HTTP status code. + try: + return self._parse_error_from_body(response) + except ResponseParserError: + LOG.debug( + 'Exception caught when parsing error response body:', + exc_info=True, + ) + return self._parse_error_from_http_status(response) + + def _parse_error_from_http_status(self, response): + return { + 'Error': { + 'Code': str(response['status_code']), + 'Message': http.client.responses.get( + response['status_code'], '' + ), + }, + 'ResponseMetadata': { + 'RequestId': response['headers'].get('x-amz-request-id', ''), + 'HostId': response['headers'].get('x-amz-id-2', ''), + }, + } + + def _parse_error_from_body(self, response): + xml_contents = response['body'] + root = self._parse_xml_string_to_dom(xml_contents) + parsed = self._build_name_to_xml_node(root) + self._replace_nodes(parsed) + if root.tag == 'Error': + # This is an S3 error response. First we'll populate the + # response metadata. + metadata = self._populate_response_metadata(response) + # The RequestId and the HostId are already in the + # ResponseMetadata, but are also duplicated in the XML + # body. We don't need these values in both places, + # we'll just remove them from the parsed XML body. + parsed.pop('RequestId', '') + parsed.pop('HostId', '') + return {'Error': parsed, 'ResponseMetadata': metadata} + elif 'RequestId' in parsed: + # Other rest-xml services: + parsed['ResponseMetadata'] = {'RequestId': parsed.pop('RequestId')} + default = {'Error': {'Message': '', 'Code': ''}} + merge_dicts(default, parsed) + return default + + @_text_content + def _handle_string(self, shape, text): + text = super()._handle_string(shape, text) + return text + + +PROTOCOL_PARSERS = { + 'ec2': EC2QueryParser, + 'query': QueryParser, + 'json': JSONParser, + 'rest-json': RestJSONParser, + 'rest-xml': RestXMLParser, +} diff --git a/venv/lib/python3.10/site-packages/botocore/regions.py b/venv/lib/python3.10/site-packages/botocore/regions.py new file mode 100644 index 0000000000000000000000000000000000000000..ab20130304a723552d046147f0be8951dfd40182 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/regions.py @@ -0,0 +1,830 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""Resolves regions and endpoints. + +This module implements endpoint resolution, including resolving endpoints for a +given service and region and resolving the available endpoints for a service +in a specific AWS partition. +""" + +import copy +import logging +import re +from enum import Enum + +from botocore import UNSIGNED, xform_name +from botocore.auth import AUTH_TYPE_MAPS, HAS_CRT +from botocore.crt import CRT_SUPPORTED_AUTH_TYPES +from botocore.endpoint_provider import EndpointProvider +from botocore.exceptions import ( + EndpointProviderError, + EndpointVariantError, + InvalidEndpointConfigurationError, + InvalidHostLabelError, + MissingDependencyException, + NoRegionError, + ParamValidationError, + UnknownEndpointResolutionBuiltInName, + UnknownRegionError, + UnknownSignatureVersionError, + UnsupportedS3AccesspointConfigurationError, + UnsupportedS3ConfigurationError, + UnsupportedS3ControlArnError, + UnsupportedS3ControlConfigurationError, +) +from botocore.utils import ensure_boolean, instance_cache + +LOG = logging.getLogger(__name__) +DEFAULT_URI_TEMPLATE = '{service}.{region}.{dnsSuffix}' # noqa +DEFAULT_SERVICE_DATA = {'endpoints': {}} + + +class BaseEndpointResolver: + """Resolves regions and endpoints. Must be subclassed.""" + + def construct_endpoint(self, service_name, region_name=None): + """Resolves an endpoint for a service and region combination. + + :type service_name: string + :param service_name: Name of the service to resolve an endpoint for + (e.g., s3) + + :type region_name: string + :param region_name: Region/endpoint name to resolve (e.g., us-east-1) + if no region is provided, the first found partition-wide endpoint + will be used if available. + + :rtype: dict + :return: Returns a dict containing the following keys: + - partition: (string, required) Resolved partition name + - endpointName: (string, required) Resolved endpoint name + - hostname: (string, required) Hostname to use for this endpoint + - sslCommonName: (string) sslCommonName to use for this endpoint. + - credentialScope: (dict) Signature version 4 credential scope + - region: (string) region name override when signing. + - service: (string) service name override when signing. + - signatureVersions: (list) A list of possible signature + versions, including s3, v4, v2, and s3v4 + - protocols: (list) A list of supported protocols + (e.g., http, https) + - ...: Other keys may be included as well based on the metadata + """ + raise NotImplementedError + + def get_available_partitions(self): + """Lists the partitions available to the endpoint resolver. + + :return: Returns a list of partition names (e.g., ["aws", "aws-cn"]). + """ + raise NotImplementedError + + def get_available_endpoints( + self, service_name, partition_name='aws', allow_non_regional=False + ): + """Lists the endpoint names of a particular partition. + + :type service_name: string + :param service_name: Name of a service to list endpoint for (e.g., s3) + + :type partition_name: string + :param partition_name: Name of the partition to limit endpoints to. + (e.g., aws for the public AWS endpoints, aws-cn for AWS China + endpoints, aws-us-gov for AWS GovCloud (US) Endpoints, etc. + + :type allow_non_regional: bool + :param allow_non_regional: Set to True to include endpoints that are + not regional endpoints (e.g., s3-external-1, + fips-us-gov-west-1, etc). + :return: Returns a list of endpoint names (e.g., ["us-east-1"]). + """ + raise NotImplementedError + + +class EndpointResolver(BaseEndpointResolver): + """Resolves endpoints based on partition endpoint metadata""" + + _UNSUPPORTED_DUALSTACK_PARTITIONS = ['aws-iso', 'aws-iso-b'] + + def __init__(self, endpoint_data, uses_builtin_data=False): + """ + :type endpoint_data: dict + :param endpoint_data: A dict of partition data. + + :type uses_builtin_data: boolean + :param uses_builtin_data: Whether the endpoint data originates in the + package's data directory. + """ + if 'partitions' not in endpoint_data: + raise ValueError('Missing "partitions" in endpoint data') + self._endpoint_data = endpoint_data + self.uses_builtin_data = uses_builtin_data + + def get_service_endpoints_data(self, service_name, partition_name='aws'): + for partition in self._endpoint_data['partitions']: + if partition['partition'] != partition_name: + continue + services = partition['services'] + if service_name not in services: + continue + return services[service_name]['endpoints'] + + def get_available_partitions(self): + result = [] + for partition in self._endpoint_data['partitions']: + result.append(partition['partition']) + return result + + def get_available_endpoints( + self, + service_name, + partition_name='aws', + allow_non_regional=False, + endpoint_variant_tags=None, + ): + result = [] + for partition in self._endpoint_data['partitions']: + if partition['partition'] != partition_name: + continue + services = partition['services'] + if service_name not in services: + continue + service_endpoints = services[service_name]['endpoints'] + for endpoint_name in service_endpoints: + is_regional_endpoint = endpoint_name in partition['regions'] + # Only regional endpoints can be modeled with variants + if endpoint_variant_tags and is_regional_endpoint: + variant_data = self._retrieve_variant_data( + service_endpoints[endpoint_name], endpoint_variant_tags + ) + if variant_data: + result.append(endpoint_name) + elif allow_non_regional or is_regional_endpoint: + result.append(endpoint_name) + return result + + def get_partition_dns_suffix( + self, partition_name, endpoint_variant_tags=None + ): + for partition in self._endpoint_data['partitions']: + if partition['partition'] == partition_name: + if endpoint_variant_tags: + variant = self._retrieve_variant_data( + partition.get('defaults'), endpoint_variant_tags + ) + if variant and 'dnsSuffix' in variant: + return variant['dnsSuffix'] + else: + return partition['dnsSuffix'] + return None + + def construct_endpoint( + self, + service_name, + region_name=None, + partition_name=None, + use_dualstack_endpoint=False, + use_fips_endpoint=False, + ): + if ( + service_name == 's3' + and use_dualstack_endpoint + and region_name is None + ): + region_name = 'us-east-1' + + if partition_name is not None: + valid_partition = None + for partition in self._endpoint_data['partitions']: + if partition['partition'] == partition_name: + valid_partition = partition + + if valid_partition is not None: + result = self._endpoint_for_partition( + valid_partition, + service_name, + region_name, + use_dualstack_endpoint, + use_fips_endpoint, + True, + ) + return result + return None + + # Iterate over each partition until a match is found. + for partition in self._endpoint_data['partitions']: + if use_dualstack_endpoint and ( + partition['partition'] + in self._UNSUPPORTED_DUALSTACK_PARTITIONS + ): + continue + result = self._endpoint_for_partition( + partition, + service_name, + region_name, + use_dualstack_endpoint, + use_fips_endpoint, + ) + if result: + return result + + def get_partition_for_region(self, region_name): + for partition in self._endpoint_data['partitions']: + if self._region_match(partition, region_name): + return partition['partition'] + raise UnknownRegionError( + region_name=region_name, + error_msg='No partition found for provided region_name.', + ) + + def _endpoint_for_partition( + self, + partition, + service_name, + region_name, + use_dualstack_endpoint, + use_fips_endpoint, + force_partition=False, + ): + partition_name = partition["partition"] + if ( + use_dualstack_endpoint + and partition_name in self._UNSUPPORTED_DUALSTACK_PARTITIONS + ): + error_msg = ( + "Dualstack endpoints are currently not supported" + f" for {partition_name} partition" + ) + raise EndpointVariantError(tags=['dualstack'], error_msg=error_msg) + + # Get the service from the partition, or an empty template. + service_data = partition['services'].get( + service_name, DEFAULT_SERVICE_DATA + ) + # Use the partition endpoint if no region is supplied. + if region_name is None: + if 'partitionEndpoint' in service_data: + region_name = service_data['partitionEndpoint'] + else: + raise NoRegionError() + + resolve_kwargs = { + 'partition': partition, + 'service_name': service_name, + 'service_data': service_data, + 'endpoint_name': region_name, + 'use_dualstack_endpoint': use_dualstack_endpoint, + 'use_fips_endpoint': use_fips_endpoint, + } + + # Attempt to resolve the exact region for this partition. + if region_name in service_data['endpoints']: + return self._resolve(**resolve_kwargs) + + # Check to see if the endpoint provided is valid for the partition. + if self._region_match(partition, region_name) or force_partition: + # Use the partition endpoint if set and not regionalized. + partition_endpoint = service_data.get('partitionEndpoint') + is_regionalized = service_data.get('isRegionalized', True) + if partition_endpoint and not is_regionalized: + LOG.debug( + 'Using partition endpoint for %s, %s: %s', + service_name, + region_name, + partition_endpoint, + ) + resolve_kwargs['endpoint_name'] = partition_endpoint + return self._resolve(**resolve_kwargs) + LOG.debug( + 'Creating a regex based endpoint for %s, %s', + service_name, + region_name, + ) + return self._resolve(**resolve_kwargs) + + def _region_match(self, partition, region_name): + if region_name in partition['regions']: + return True + if 'regionRegex' in partition: + return re.compile(partition['regionRegex']).match(region_name) + return False + + def _retrieve_variant_data(self, endpoint_data, tags): + variants = endpoint_data.get('variants', []) + for variant in variants: + if set(variant['tags']) == set(tags): + result = variant.copy() + return result + + def _create_tag_list(self, use_dualstack_endpoint, use_fips_endpoint): + tags = [] + if use_dualstack_endpoint: + tags.append('dualstack') + if use_fips_endpoint: + tags.append('fips') + return tags + + def _resolve_variant( + self, tags, endpoint_data, service_defaults, partition_defaults + ): + result = {} + for variants in [endpoint_data, service_defaults, partition_defaults]: + variant = self._retrieve_variant_data(variants, tags) + if variant: + self._merge_keys(variant, result) + return result + + def _resolve( + self, + partition, + service_name, + service_data, + endpoint_name, + use_dualstack_endpoint, + use_fips_endpoint, + ): + endpoint_data = service_data.get('endpoints', {}).get( + endpoint_name, {} + ) + + if endpoint_data.get('deprecated'): + LOG.warning( + f'Client is configured with the deprecated endpoint: {endpoint_name}' + ) + + service_defaults = service_data.get('defaults', {}) + partition_defaults = partition.get('defaults', {}) + tags = self._create_tag_list(use_dualstack_endpoint, use_fips_endpoint) + + if tags: + result = self._resolve_variant( + tags, endpoint_data, service_defaults, partition_defaults + ) + if result == {}: + error_msg = ( + f"Endpoint does not exist for {service_name} " + f"in region {endpoint_name}" + ) + raise EndpointVariantError(tags=tags, error_msg=error_msg) + self._merge_keys(endpoint_data, result) + else: + result = endpoint_data + + # If dnsSuffix has not already been consumed from a variant definition + if 'dnsSuffix' not in result: + result['dnsSuffix'] = partition['dnsSuffix'] + + result['partition'] = partition['partition'] + result['endpointName'] = endpoint_name + + # Merge in the service defaults then the partition defaults. + self._merge_keys(service_defaults, result) + self._merge_keys(partition_defaults, result) + + result['hostname'] = self._expand_template( + partition, + result['hostname'], + service_name, + endpoint_name, + result['dnsSuffix'], + ) + if 'sslCommonName' in result: + result['sslCommonName'] = self._expand_template( + partition, + result['sslCommonName'], + service_name, + endpoint_name, + result['dnsSuffix'], + ) + + return result + + def _merge_keys(self, from_data, result): + for key in from_data: + if key not in result: + result[key] = from_data[key] + + def _expand_template( + self, partition, template, service_name, endpoint_name, dnsSuffix + ): + return template.format( + service=service_name, region=endpoint_name, dnsSuffix=dnsSuffix + ) + + +class EndpointResolverBuiltins(str, Enum): + # The AWS Region configured for the SDK client (str) + AWS_REGION = "AWS::Region" + # Whether the UseFIPSEndpoint configuration option has been enabled for + # the SDK client (bool) + AWS_USE_FIPS = "AWS::UseFIPS" + # Whether the UseDualStackEndpoint configuration option has been enabled + # for the SDK client (bool) + AWS_USE_DUALSTACK = "AWS::UseDualStack" + # Whether the global endpoint should be used with STS, rather the the + # regional endpoint for us-east-1 (bool) + AWS_STS_USE_GLOBAL_ENDPOINT = "AWS::STS::UseGlobalEndpoint" + # Whether the global endpoint should be used with S3, rather then the + # regional endpoint for us-east-1 (bool) + AWS_S3_USE_GLOBAL_ENDPOINT = "AWS::S3::UseGlobalEndpoint" + # Whether S3 Transfer Acceleration has been requested (bool) + AWS_S3_ACCELERATE = "AWS::S3::Accelerate" + # Whether S3 Force Path Style has been enabled (bool) + AWS_S3_FORCE_PATH_STYLE = "AWS::S3::ForcePathStyle" + # Whether to use the ARN region or raise an error when ARN and client + # region differ (for s3 service only, bool) + AWS_S3_USE_ARN_REGION = "AWS::S3::UseArnRegion" + # Whether to use the ARN region or raise an error when ARN and client + # region differ (for s3-control service only, bool) + AWS_S3CONTROL_USE_ARN_REGION = 'AWS::S3Control::UseArnRegion' + # Whether multi-region access points (MRAP) should be disabled (bool) + AWS_S3_DISABLE_MRAP = "AWS::S3::DisableMultiRegionAccessPoints" + # Whether a custom endpoint has been configured (str) + SDK_ENDPOINT = "SDK::Endpoint" + + +class EndpointRulesetResolver: + """Resolves endpoints using a service's endpoint ruleset""" + + def __init__( + self, + endpoint_ruleset_data, + partition_data, + service_model, + builtins, + client_context, + event_emitter, + use_ssl=True, + requested_auth_scheme=None, + ): + self._provider = EndpointProvider( + ruleset_data=endpoint_ruleset_data, + partition_data=partition_data, + ) + self._param_definitions = self._provider.ruleset.parameters + self._service_model = service_model + self._builtins = builtins + self._client_context = client_context + self._event_emitter = event_emitter + self._use_ssl = use_ssl + self._requested_auth_scheme = requested_auth_scheme + self._instance_cache = {} + + def construct_endpoint( + self, + operation_model, + call_args, + request_context, + ): + """Invokes the provider with params defined in the service's ruleset""" + if call_args is None: + call_args = {} + + if request_context is None: + request_context = {} + + provider_params = self._get_provider_params( + operation_model, call_args, request_context + ) + LOG.debug( + f'Calling endpoint provider with parameters: {provider_params}' + ) + try: + provider_result = self._provider.resolve_endpoint( + **provider_params + ) + except EndpointProviderError as ex: + botocore_exception = self.ruleset_error_to_botocore_exception( + ex, provider_params + ) + if botocore_exception is None: + raise + else: + raise botocore_exception from ex + LOG.debug(f'Endpoint provider result: {provider_result.url}') + + # The endpoint provider does not support non-secure transport. + if not self._use_ssl and provider_result.url.startswith('https://'): + provider_result = provider_result._replace( + url=f'http://{provider_result.url[8:]}' + ) + + # Multi-valued headers are not supported in botocore. Replace the list + # of values returned for each header with just its first entry, + # dropping any additionally entries. + provider_result = provider_result._replace( + headers={ + key: val[0] for key, val in provider_result.headers.items() + } + ) + + return provider_result + + def _get_provider_params( + self, operation_model, call_args, request_context + ): + """Resolve a value for each parameter defined in the service's ruleset + + The resolution order for parameter values is: + 1. Operation-specific static context values from the service definition + 2. Operation-specific dynamic context values from API parameters + 3. Client-specific context parameters + 4. Built-in values such as region, FIPS usage, ... + """ + provider_params = {} + # Builtin values can be customized for each operation by hooks + # subscribing to the ``before-endpoint-resolution.*`` event. + customized_builtins = self._get_customized_builtins( + operation_model, call_args, request_context + ) + for param_name, param_def in self._param_definitions.items(): + param_val = self._resolve_param_from_context( + param_name=param_name, + operation_model=operation_model, + call_args=call_args, + ) + if param_val is None and param_def.builtin is not None: + param_val = self._resolve_param_as_builtin( + builtin_name=param_def.builtin, + builtins=customized_builtins, + ) + if param_val is not None: + provider_params[param_name] = param_val + + return provider_params + + def _resolve_param_from_context( + self, param_name, operation_model, call_args + ): + static = self._resolve_param_as_static_context_param( + param_name, operation_model + ) + if static is not None: + return static + dynamic = self._resolve_param_as_dynamic_context_param( + param_name, operation_model, call_args + ) + if dynamic is not None: + return dynamic + return self._resolve_param_as_client_context_param(param_name) + + def _resolve_param_as_static_context_param( + self, param_name, operation_model + ): + static_ctx_params = self._get_static_context_params(operation_model) + return static_ctx_params.get(param_name) + + def _resolve_param_as_dynamic_context_param( + self, param_name, operation_model, call_args + ): + dynamic_ctx_params = self._get_dynamic_context_params(operation_model) + if param_name in dynamic_ctx_params: + member_name = dynamic_ctx_params[param_name] + return call_args.get(member_name) + + def _resolve_param_as_client_context_param(self, param_name): + client_ctx_params = self._get_client_context_params() + if param_name in client_ctx_params: + client_ctx_varname = client_ctx_params[param_name] + return self._client_context.get(client_ctx_varname) + + def _resolve_param_as_builtin(self, builtin_name, builtins): + if builtin_name not in EndpointResolverBuiltins.__members__.values(): + raise UnknownEndpointResolutionBuiltInName(name=builtin_name) + return builtins.get(builtin_name) + + @instance_cache + def _get_static_context_params(self, operation_model): + """Mapping of param names to static param value for an operation""" + return { + param.name: param.value + for param in operation_model.static_context_parameters + } + + @instance_cache + def _get_dynamic_context_params(self, operation_model): + """Mapping of param names to member names for an operation""" + return { + param.name: param.member_name + for param in operation_model.context_parameters + } + + @instance_cache + def _get_client_context_params(self): + """Mapping of param names to client configuration variable""" + return { + param.name: xform_name(param.name) + for param in self._service_model.client_context_parameters + } + + def _get_customized_builtins( + self, operation_model, call_args, request_context + ): + service_id = self._service_model.service_id.hyphenize() + customized_builtins = copy.copy(self._builtins) + # Handlers are expected to modify the builtins dict in place. + self._event_emitter.emit( + f'before-endpoint-resolution.{service_id}', + builtins=customized_builtins, + model=operation_model, + params=call_args, + context=request_context, + ) + return customized_builtins + + def auth_schemes_to_signing_ctx(self, auth_schemes): + """Convert an Endpoint's authSchemes property to a signing_context dict + + :type auth_schemes: list + :param auth_schemes: A list of dictionaries taken from the + ``authSchemes`` property of an Endpoint object returned by + ``EndpointProvider``. + + :rtype: str, dict + :return: Tuple of auth type string (to be used in + ``request_context['auth_type']``) and signing context dict (for use + in ``request_context['signing']``). + """ + if not isinstance(auth_schemes, list) or len(auth_schemes) == 0: + raise TypeError("auth_schemes must be a non-empty list.") + + LOG.debug( + 'Selecting from endpoint provider\'s list of auth schemes: %s. ' + 'User selected auth scheme is: "%s"', + ', '.join([f'"{s.get("name")}"' for s in auth_schemes]), + self._requested_auth_scheme, + ) + + if self._requested_auth_scheme == UNSIGNED: + return 'none', {} + + auth_schemes = [ + {**scheme, 'name': self._strip_sig_prefix(scheme['name'])} + for scheme in auth_schemes + ] + if self._requested_auth_scheme is not None: + try: + # Use the first scheme that matches the requested scheme, + # after accounting for naming differences between botocore and + # endpoint rulesets. Keep the requested name. + name, scheme = next( + (self._requested_auth_scheme, s) + for s in auth_schemes + if self._does_botocore_authname_match_ruleset_authname( + self._requested_auth_scheme, s['name'] + ) + ) + except StopIteration: + # For legacy signers, no match will be found. Do not raise an + # exception, instead default to the logic in botocore + # customizations. + return None, {} + else: + try: + name, scheme = next( + (s['name'], s) + for s in auth_schemes + if s['name'] in AUTH_TYPE_MAPS + ) + except StopIteration: + # If no auth scheme was specifically requested and an + # authSchemes list is present in the Endpoint object but none + # of the entries are supported, raise an exception. + fixable_with_crt = False + auth_type_options = [s['name'] for s in auth_schemes] + if not HAS_CRT: + fixable_with_crt = any( + scheme in CRT_SUPPORTED_AUTH_TYPES + for scheme in auth_type_options + ) + + if fixable_with_crt: + raise MissingDependencyException( + msg='This operation requires an additional dependency.' + ' Use pip install botocore[crt] before proceeding.' + ) + else: + raise UnknownSignatureVersionError( + signature_version=', '.join(auth_type_options) + ) + + signing_context = {} + if 'signingRegion' in scheme: + signing_context['region'] = scheme['signingRegion'] + elif 'signingRegionSet' in scheme: + if len(scheme['signingRegionSet']) > 0: + signing_context['region'] = scheme['signingRegionSet'][0] + if 'signingName' in scheme: + signing_context.update(signing_name=scheme['signingName']) + if 'disableDoubleEncoding' in scheme: + signing_context['disableDoubleEncoding'] = ensure_boolean( + scheme['disableDoubleEncoding'] + ) + + LOG.debug( + 'Selected auth type "%s" as "%s" with signing context params: %s', + scheme['name'], # original name without "sig" + name, # chosen name can differ when `signature_version` is set + signing_context, + ) + return name, signing_context + + def _strip_sig_prefix(self, auth_name): + """Normalize auth type names by removing any "sig" prefix""" + return auth_name[3:] if auth_name.startswith('sig') else auth_name + + def _does_botocore_authname_match_ruleset_authname(self, botoname, rsname): + """ + Whether a valid string provided as signature_version parameter for + client construction refers to the same auth methods as a string + returned by the endpoint ruleset provider. This accounts for: + + * The ruleset prefixes auth names with "sig" + * The s3 and s3control rulesets don't distinguish between v4[a] and + s3v4[a] signers + * The v2, v3, and HMAC v1 based signers (s3, s3-*) are botocore legacy + features and do not exist in the rulesets + * Only characters up to the first dash are considered + + Example matches: + * v4, sigv4 + * v4, v4 + * s3v4, sigv4 + * s3v7, sigv7 (hypothetical example) + * s3v4a, sigv4a + * s3v4-query, sigv4 + + Example mismatches: + * v4a, sigv4 + * s3, sigv4 + * s3-presign-post, sigv4 + """ + rsname = self._strip_sig_prefix(rsname) + botoname = botoname.split('-')[0] + if botoname != 's3' and botoname.startswith('s3'): + botoname = botoname[2:] + return rsname == botoname + + def ruleset_error_to_botocore_exception(self, ruleset_exception, params): + """Attempts to translate ruleset errors to pre-existing botocore + exception types by string matching exception strings. + """ + msg = ruleset_exception.kwargs.get('msg') + if msg is None: + return + + if msg.startswith('Invalid region in ARN: '): + # Example message: + # "Invalid region in ARN: `us-we$t-2` (invalid DNS name)" + try: + label = msg.split('`')[1] + except IndexError: + label = msg + return InvalidHostLabelError(label=label) + + service_name = self._service_model.service_name + if service_name == 's3': + if ( + msg == 'S3 Object Lambda does not support S3 Accelerate' + or msg == 'Accelerate cannot be used with FIPS' + ): + return UnsupportedS3ConfigurationError(msg=msg) + if ( + msg.startswith('S3 Outposts does not support') + or msg.startswith('S3 MRAP does not support') + or msg.startswith('S3 Object Lambda does not support') + or msg.startswith('Access Points do not support') + or msg.startswith('Invalid configuration:') + or msg.startswith('Client was configured for partition') + ): + return UnsupportedS3AccesspointConfigurationError(msg=msg) + if msg.lower().startswith('invalid arn:'): + return ParamValidationError(report=msg) + if service_name == 's3control': + if msg.startswith('Invalid ARN:'): + arn = params.get('Bucket') + return UnsupportedS3ControlArnError(arn=arn, msg=msg) + if msg.startswith('Invalid configuration:') or msg.startswith( + 'Client was configured for partition' + ): + return UnsupportedS3ControlConfigurationError(msg=msg) + if msg == "AccountId is required but not set": + return ParamValidationError(report=msg) + if service_name == 'events': + if msg.startswith( + 'Invalid Configuration: FIPS is not supported with ' + 'EventBridge multi-region endpoints.' + ): + return InvalidEndpointConfigurationError(msg=msg) + if msg == 'EndpointId must be a valid host label.': + return InvalidEndpointConfigurationError(msg=msg) + return None diff --git a/venv/lib/python3.10/site-packages/botocore/response.py b/venv/lib/python3.10/site-packages/botocore/response.py new file mode 100644 index 0000000000000000000000000000000000000000..ba3fac9bab816bb28e05891dcec84f26d449899b --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/response.py @@ -0,0 +1,201 @@ +# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ +# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import logging +from io import IOBase + +from urllib3.exceptions import ProtocolError as URLLib3ProtocolError +from urllib3.exceptions import ReadTimeoutError as URLLib3ReadTimeoutError + +from botocore import parsers +from botocore.compat import set_socket_timeout +from botocore.exceptions import ( + IncompleteReadError, + ReadTimeoutError, + ResponseStreamingError, +) + +# Keep these imported. There's pre-existing code that uses them. +from botocore import ScalarTypes # noqa +from botocore.compat import XMLParseError # noqa +from botocore.hooks import first_non_none_response # noqa + + +logger = logging.getLogger(__name__) + + +class StreamingBody(IOBase): + """Wrapper class for an http response body. + + This provides a few additional conveniences that do not exist + in the urllib3 model: + + * Set the timeout on the socket (i.e read() timeouts) + * Auto validation of content length, if the amount of bytes + we read does not match the content length, an exception + is raised. + + """ + + _DEFAULT_CHUNK_SIZE = 1024 + + def __init__(self, raw_stream, content_length): + self._raw_stream = raw_stream + self._content_length = content_length + self._amount_read = 0 + + def __del__(self): + # Extending destructor in order to preserve the underlying raw_stream. + # The ability to add custom cleanup logic introduced in Python3.4+. + # https://www.python.org/dev/peps/pep-0442/ + pass + + def set_socket_timeout(self, timeout): + """Set the timeout seconds on the socket.""" + # The problem we're trying to solve is to prevent .read() calls from + # hanging. This can happen in rare cases. What we'd like to ideally + # do is set a timeout on the .read() call so that callers can retry + # the request. + # Unfortunately, this isn't currently possible in requests. + # See: https://github.com/kennethreitz/requests/issues/1803 + # So what we're going to do is reach into the guts of the stream and + # grab the socket object, which we can set the timeout on. We're + # putting in a check here so in case this interface goes away, we'll + # know. + try: + set_socket_timeout(self._raw_stream, timeout) + except AttributeError: + logger.error( + "Cannot access the socket object of " + "a streaming response. It's possible " + "the interface has changed.", + exc_info=True, + ) + raise + + def readable(self): + try: + return self._raw_stream.readable() + except AttributeError: + return False + + def read(self, amt=None): + """Read at most amt bytes from the stream. + + If the amt argument is omitted, read all data. + """ + try: + chunk = self._raw_stream.read(amt) + except URLLib3ReadTimeoutError as e: + # TODO: the url will be None as urllib3 isn't setting it yet + raise ReadTimeoutError(endpoint_url=e.url, error=e) + except URLLib3ProtocolError as e: + raise ResponseStreamingError(error=e) + self._amount_read += len(chunk) + if amt is None or (not chunk and amt > 0): + # If the server sends empty contents or + # we ask to read all of the contents, then we know + # we need to verify the content length. + self._verify_content_length() + return chunk + + def readlines(self): + return self._raw_stream.readlines() + + def __iter__(self): + """Return an iterator to yield 1k chunks from the raw stream.""" + return self.iter_chunks(self._DEFAULT_CHUNK_SIZE) + + def __next__(self): + """Return the next 1k chunk from the raw stream.""" + current_chunk = self.read(self._DEFAULT_CHUNK_SIZE) + if current_chunk: + return current_chunk + raise StopIteration() + + def __enter__(self): + return self._raw_stream + + def __exit__(self, type, value, traceback): + self._raw_stream.close() + + next = __next__ + + def iter_lines(self, chunk_size=_DEFAULT_CHUNK_SIZE, keepends=False): + """Return an iterator to yield lines from the raw stream. + + This is achieved by reading chunk of bytes (of size chunk_size) at a + time from the raw stream, and then yielding lines from there. + """ + pending = b'' + for chunk in self.iter_chunks(chunk_size): + lines = (pending + chunk).splitlines(True) + for line in lines[:-1]: + yield line.splitlines(keepends)[0] + pending = lines[-1] + if pending: + yield pending.splitlines(keepends)[0] + + def iter_chunks(self, chunk_size=_DEFAULT_CHUNK_SIZE): + """Return an iterator to yield chunks of chunk_size bytes from the raw + stream. + """ + while True: + current_chunk = self.read(chunk_size) + if current_chunk == b"": + break + yield current_chunk + + def _verify_content_length(self): + # See: https://github.com/kennethreitz/requests/issues/1855 + # Basically, our http library doesn't do this for us, so we have + # to do this ourself. + if self._content_length is not None and self._amount_read != int( + self._content_length + ): + raise IncompleteReadError( + actual_bytes=self._amount_read, + expected_bytes=int(self._content_length), + ) + + def tell(self): + return self._raw_stream.tell() + + def close(self): + """Close the underlying http response stream.""" + self._raw_stream.close() + + +def get_response(operation_model, http_response): + protocol = operation_model.metadata['protocol'] + response_dict = { + 'headers': http_response.headers, + 'status_code': http_response.status_code, + } + # TODO: Unfortunately, we have to have error logic here. + # If it looks like an error, in the streaming response case we + # need to actually grab the contents. + if response_dict['status_code'] >= 300: + response_dict['body'] = http_response.content + elif operation_model.has_streaming_output: + response_dict['body'] = StreamingBody( + http_response.raw, response_dict['headers'].get('content-length') + ) + else: + response_dict['body'] = http_response.content + + parser = parsers.create_parser(protocol) + return http_response, parser.parse( + response_dict, operation_model.output_shape + ) diff --git a/venv/lib/python3.10/site-packages/botocore/retryhandler.py b/venv/lib/python3.10/site-packages/botocore/retryhandler.py new file mode 100644 index 0000000000000000000000000000000000000000..c2eed1d9d37607b5fa6961ec523d72aceec994aa --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/retryhandler.py @@ -0,0 +1,416 @@ +# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ +# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import functools +import logging +import random +from binascii import crc32 + +from botocore.exceptions import ( + ChecksumError, + ConnectionClosedError, + ConnectionError, + EndpointConnectionError, + ReadTimeoutError, +) + +logger = logging.getLogger(__name__) +# The only supported error for now is GENERAL_CONNECTION_ERROR +# which maps to requests generic ConnectionError. If we're able +# to get more specific exceptions from requests we can update +# this mapping with more specific exceptions. +EXCEPTION_MAP = { + 'GENERAL_CONNECTION_ERROR': [ + ConnectionError, + ConnectionClosedError, + ReadTimeoutError, + EndpointConnectionError, + ], +} + + +def delay_exponential(base, growth_factor, attempts): + """Calculate time to sleep based on exponential function. + + The format is:: + + base * growth_factor ^ (attempts - 1) + + If ``base`` is set to 'rand' then a random number between + 0 and 1 will be used as the base. + Base must be greater than 0, otherwise a ValueError will be + raised. + + """ + if base == 'rand': + base = random.random() + elif base <= 0: + raise ValueError( + f"The 'base' param must be greater than 0, got: {base}" + ) + time_to_sleep = base * (growth_factor ** (attempts - 1)) + return time_to_sleep + + +def create_exponential_delay_function(base, growth_factor): + """Create an exponential delay function based on the attempts. + + This is used so that you only have to pass it the attempts + parameter to calculate the delay. + + """ + return functools.partial( + delay_exponential, base=base, growth_factor=growth_factor + ) + + +def create_retry_handler(config, operation_name=None): + checker = create_checker_from_retry_config( + config, operation_name=operation_name + ) + action = create_retry_action_from_config( + config, operation_name=operation_name + ) + return RetryHandler(checker=checker, action=action) + + +def create_retry_action_from_config(config, operation_name=None): + # The spec has the possibility of supporting per policy + # actions, but right now, we assume this comes from the + # default section, which means that delay functions apply + # for every policy in the retry config (per service). + delay_config = config['__default__']['delay'] + if delay_config['type'] == 'exponential': + return create_exponential_delay_function( + base=delay_config['base'], + growth_factor=delay_config['growth_factor'], + ) + + +def create_checker_from_retry_config(config, operation_name=None): + checkers = [] + max_attempts = None + retryable_exceptions = [] + if '__default__' in config: + policies = config['__default__'].get('policies', []) + max_attempts = config['__default__']['max_attempts'] + for key in policies: + current_config = policies[key] + checkers.append(_create_single_checker(current_config)) + retry_exception = _extract_retryable_exception(current_config) + if retry_exception is not None: + retryable_exceptions.extend(retry_exception) + if operation_name is not None and config.get(operation_name) is not None: + operation_policies = config[operation_name]['policies'] + for key in operation_policies: + checkers.append(_create_single_checker(operation_policies[key])) + retry_exception = _extract_retryable_exception( + operation_policies[key] + ) + if retry_exception is not None: + retryable_exceptions.extend(retry_exception) + if len(checkers) == 1: + # Don't need to use a MultiChecker + return MaxAttemptsDecorator(checkers[0], max_attempts=max_attempts) + else: + multi_checker = MultiChecker(checkers) + return MaxAttemptsDecorator( + multi_checker, + max_attempts=max_attempts, + retryable_exceptions=tuple(retryable_exceptions), + ) + + +def _create_single_checker(config): + if 'response' in config['applies_when']: + return _create_single_response_checker( + config['applies_when']['response'] + ) + elif 'socket_errors' in config['applies_when']: + return ExceptionRaiser() + + +def _create_single_response_checker(response): + if 'service_error_code' in response: + checker = ServiceErrorCodeChecker( + status_code=response['http_status_code'], + error_code=response['service_error_code'], + ) + elif 'http_status_code' in response: + checker = HTTPStatusCodeChecker( + status_code=response['http_status_code'] + ) + elif 'crc32body' in response: + checker = CRC32Checker(header=response['crc32body']) + else: + # TODO: send a signal. + raise ValueError("Unknown retry policy") + return checker + + +def _extract_retryable_exception(config): + applies_when = config['applies_when'] + if 'crc32body' in applies_when.get('response', {}): + return [ChecksumError] + elif 'socket_errors' in applies_when: + exceptions = [] + for name in applies_when['socket_errors']: + exceptions.extend(EXCEPTION_MAP[name]) + return exceptions + + +class RetryHandler: + """Retry handler. + + The retry handler takes two params, ``checker`` object + and an ``action`` object. + + The ``checker`` object must be a callable object and based on a response + and an attempt number, determines whether or not sufficient criteria for + a retry has been met. If this is the case then the ``action`` object + (which also is a callable) determines what needs to happen in the event + of a retry. + + """ + + def __init__(self, checker, action): + self._checker = checker + self._action = action + + def __call__(self, attempts, response, caught_exception, **kwargs): + """Handler for a retry. + + Intended to be hooked up to an event handler (hence the **kwargs), + this will process retries appropriately. + + """ + checker_kwargs = { + 'attempt_number': attempts, + 'response': response, + 'caught_exception': caught_exception, + } + if isinstance(self._checker, MaxAttemptsDecorator): + retries_context = kwargs['request_dict']['context'].get('retries') + checker_kwargs.update({'retries_context': retries_context}) + + if self._checker(**checker_kwargs): + result = self._action(attempts=attempts) + logger.debug("Retry needed, action of: %s", result) + return result + logger.debug("No retry needed.") + + +class BaseChecker: + """Base class for retry checkers. + + Each class is responsible for checking a single criteria that determines + whether or not a retry should not happen. + + """ + + def __call__(self, attempt_number, response, caught_exception): + """Determine if retry criteria matches. + + Note that either ``response`` is not None and ``caught_exception`` is + None or ``response`` is None and ``caught_exception`` is not None. + + :type attempt_number: int + :param attempt_number: The total number of times we've attempted + to send the request. + + :param response: The HTTP response (if one was received). + + :type caught_exception: Exception + :param caught_exception: Any exception that was caught while trying to + send the HTTP response. + + :return: True, if the retry criteria matches (and therefore a retry + should occur. False if the criteria does not match. + + """ + # The default implementation allows subclasses to not have to check + # whether or not response is None or not. + if response is not None: + return self._check_response(attempt_number, response) + elif caught_exception is not None: + return self._check_caught_exception( + attempt_number, caught_exception + ) + else: + raise ValueError("Both response and caught_exception are None.") + + def _check_response(self, attempt_number, response): + pass + + def _check_caught_exception(self, attempt_number, caught_exception): + pass + + +class MaxAttemptsDecorator(BaseChecker): + """Allow retries up to a maximum number of attempts. + + This will pass through calls to the decorated retry checker, provided + that the number of attempts does not exceed max_attempts. It will + also catch any retryable_exceptions passed in. Once max_attempts has + been exceeded, then False will be returned or the retryable_exceptions + that was previously being caught will be raised. + + """ + + def __init__(self, checker, max_attempts, retryable_exceptions=None): + self._checker = checker + self._max_attempts = max_attempts + self._retryable_exceptions = retryable_exceptions + + def __call__( + self, attempt_number, response, caught_exception, retries_context + ): + if retries_context: + retries_context['max'] = max( + retries_context.get('max', 0), self._max_attempts + ) + + should_retry = self._should_retry( + attempt_number, response, caught_exception + ) + if should_retry: + if attempt_number >= self._max_attempts: + # explicitly set MaxAttemptsReached + if response is not None and 'ResponseMetadata' in response[1]: + response[1]['ResponseMetadata']['MaxAttemptsReached'] = ( + True + ) + logger.debug( + "Reached the maximum number of retry attempts: %s", + attempt_number, + ) + return False + else: + return should_retry + else: + return False + + def _should_retry(self, attempt_number, response, caught_exception): + if self._retryable_exceptions and attempt_number < self._max_attempts: + try: + return self._checker( + attempt_number, response, caught_exception + ) + except self._retryable_exceptions as e: + logger.debug( + "retry needed, retryable exception caught: %s", + e, + exc_info=True, + ) + return True + else: + # If we've exceeded the max attempts we just let the exception + # propagate if one has occurred. + return self._checker(attempt_number, response, caught_exception) + + +class HTTPStatusCodeChecker(BaseChecker): + def __init__(self, status_code): + self._status_code = status_code + + def _check_response(self, attempt_number, response): + if response[0].status_code == self._status_code: + logger.debug( + "retry needed: retryable HTTP status code received: %s", + self._status_code, + ) + return True + else: + return False + + +class ServiceErrorCodeChecker(BaseChecker): + def __init__(self, status_code, error_code): + self._status_code = status_code + self._error_code = error_code + + def _check_response(self, attempt_number, response): + if response[0].status_code == self._status_code: + actual_error_code = response[1].get('Error', {}).get('Code') + if actual_error_code == self._error_code: + logger.debug( + "retry needed: matching HTTP status and error code seen: " + "%s, %s", + self._status_code, + self._error_code, + ) + return True + return False + + +class MultiChecker(BaseChecker): + def __init__(self, checkers): + self._checkers = checkers + + def __call__(self, attempt_number, response, caught_exception): + for checker in self._checkers: + checker_response = checker( + attempt_number, response, caught_exception + ) + if checker_response: + return checker_response + return False + + +class CRC32Checker(BaseChecker): + def __init__(self, header): + # The header where the expected crc32 is located. + self._header_name = header + + def _check_response(self, attempt_number, response): + http_response = response[0] + expected_crc = http_response.headers.get(self._header_name) + if expected_crc is None: + logger.debug( + "crc32 check skipped, the %s header is not " + "in the http response.", + self._header_name, + ) + else: + actual_crc32 = crc32(response[0].content) & 0xFFFFFFFF + if not actual_crc32 == int(expected_crc): + logger.debug( + "retry needed: crc32 check failed, expected != actual: " + "%s != %s", + int(expected_crc), + actual_crc32, + ) + raise ChecksumError( + checksum_type='crc32', + expected_checksum=int(expected_crc), + actual_checksum=actual_crc32, + ) + + +class ExceptionRaiser(BaseChecker): + """Raise any caught exceptions. + + This class will raise any non None ``caught_exception``. + + """ + + def _check_caught_exception(self, attempt_number, caught_exception): + # This is implementation specific, but this class is useful by + # coordinating with the MaxAttemptsDecorator. + # The MaxAttemptsDecorator has a list of exceptions it should catch + # and retry, but something needs to come along and actually raise the + # caught_exception. That's what this class is being used for. If + # the MaxAttemptsDecorator is not interested in retrying the exception + # then this exception just propagates out past the retry code. + raise caught_exception diff --git a/venv/lib/python3.10/site-packages/botocore/serialize.py b/venv/lib/python3.10/site-packages/botocore/serialize.py new file mode 100644 index 0000000000000000000000000000000000000000..82ed58d09a704e02550a605fa64441f3dd4564b4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/serialize.py @@ -0,0 +1,811 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +"""Protocol input serializes. + +This module contains classes that implement input serialization +for the various AWS protocol types. + +These classes essentially take user input, a model object that +represents what the expected input should look like, and it returns +a dictionary that contains the various parts of a request. A few +high level design decisions: + + +* Each protocol type maps to a separate class, all inherit from + ``Serializer``. +* The return value for ``serialize_to_request`` (the main entry + point) returns a dictionary that represents a request. This + will have keys like ``url_path``, ``query_string``, etc. This + is done so that it's a) easy to test and b) not tied to a + particular HTTP library. See the ``serialize_to_request`` docstring + for more details. + +Unicode +------- + +The input to the serializers should be text (str/unicode), not bytes, +with the exception of blob types. Those are assumed to be binary, +and if a str/unicode type is passed in, it will be encoded as utf-8. +""" + +import base64 +import calendar +import datetime +import json +import re +from xml.etree import ElementTree + +from botocore import validate +from botocore.compat import formatdate +from botocore.exceptions import ParamValidationError +from botocore.utils import ( + has_header, + is_json_value_header, + parse_to_aware_datetime, + percent_encode, +) + +# From the spec, the default timestamp format if not specified is iso8601. +DEFAULT_TIMESTAMP_FORMAT = 'iso8601' +ISO8601 = '%Y-%m-%dT%H:%M:%SZ' +# Same as ISO8601, but with microsecond precision. +ISO8601_MICRO = '%Y-%m-%dT%H:%M:%S.%fZ' +HOST_PREFIX_RE = re.compile(r"^[A-Za-z0-9\.\-]+$") + + +def create_serializer(protocol_name, include_validation=True): + # TODO: Unknown protocols. + serializer = SERIALIZERS[protocol_name]() + if include_validation: + validator = validate.ParamValidator() + serializer = validate.ParamValidationDecorator(validator, serializer) + return serializer + + +class Serializer: + DEFAULT_METHOD = 'POST' + # Clients can change this to a different MutableMapping + # (i.e OrderedDict) if they want. This is used in the + # compliance test to match the hash ordering used in the + # tests. + MAP_TYPE = dict + DEFAULT_ENCODING = 'utf-8' + + def serialize_to_request(self, parameters, operation_model): + """Serialize parameters into an HTTP request. + + This method takes user provided parameters and a shape + model and serializes the parameters to an HTTP request. + More specifically, this method returns information about + parts of the HTTP request, it does not enforce a particular + interface or standard for an HTTP request. It instead returns + a dictionary of: + + * 'url_path' + * 'host_prefix' + * 'query_string' + * 'headers' + * 'body' + * 'method' + + It is then up to consumers to decide how to map this to a Request + object of their HTTP library of choice. Below is an example + return value:: + + {'body': {'Action': 'OperationName', + 'Bar': 'val2', + 'Foo': 'val1', + 'Version': '2014-01-01'}, + 'headers': {}, + 'method': 'POST', + 'query_string': '', + 'host_prefix': 'value.', + 'url_path': '/'} + + :param parameters: The dictionary input parameters for the + operation (i.e the user input). + :param operation_model: The OperationModel object that describes + the operation. + """ + raise NotImplementedError("serialize_to_request") + + def _create_default_request(self): + # Creates a boilerplate default request dict that subclasses + # can use as a starting point. + serialized = { + 'url_path': '/', + 'query_string': '', + 'method': self.DEFAULT_METHOD, + 'headers': {}, + # An empty body is represented as an empty byte string. + 'body': b'', + } + return serialized + + # Some extra utility methods subclasses can use. + + def _timestamp_iso8601(self, value): + if value.microsecond > 0: + timestamp_format = ISO8601_MICRO + else: + timestamp_format = ISO8601 + return value.strftime(timestamp_format) + + def _timestamp_unixtimestamp(self, value): + return int(calendar.timegm(value.timetuple())) + + def _timestamp_rfc822(self, value): + if isinstance(value, datetime.datetime): + value = self._timestamp_unixtimestamp(value) + return formatdate(value, usegmt=True) + + def _convert_timestamp_to_str(self, value, timestamp_format=None): + if timestamp_format is None: + timestamp_format = self.TIMESTAMP_FORMAT + timestamp_format = timestamp_format.lower() + datetime_obj = parse_to_aware_datetime(value) + converter = getattr(self, f'_timestamp_{timestamp_format}') + final_value = converter(datetime_obj) + return final_value + + def _get_serialized_name(self, shape, default_name): + # Returns the serialized name for the shape if it exists. + # Otherwise it will return the passed in default_name. + return shape.serialization.get('name', default_name) + + def _get_base64(self, value): + # Returns the base64-encoded version of value, handling + # both strings and bytes. The returned value is a string + # via the default encoding. + if isinstance(value, str): + value = value.encode(self.DEFAULT_ENCODING) + return base64.b64encode(value).strip().decode(self.DEFAULT_ENCODING) + + def _expand_host_prefix(self, parameters, operation_model): + operation_endpoint = operation_model.endpoint + if ( + operation_endpoint is None + or 'hostPrefix' not in operation_endpoint + ): + return None + + host_prefix_expression = operation_endpoint['hostPrefix'] + input_members = operation_model.input_shape.members + host_labels = [ + member + for member, shape in input_members.items() + if shape.serialization.get('hostLabel') + ] + format_kwargs = {} + bad_labels = [] + for name in host_labels: + param = parameters[name] + if not HOST_PREFIX_RE.match(param): + bad_labels.append(name) + format_kwargs[name] = param + if bad_labels: + raise ParamValidationError( + report=( + f"Invalid value for parameter(s): {', '.join(bad_labels)}. " + "Must contain only alphanumeric characters, hyphen, " + "or period." + ) + ) + return host_prefix_expression.format(**format_kwargs) + + +class QuerySerializer(Serializer): + TIMESTAMP_FORMAT = 'iso8601' + + def serialize_to_request(self, parameters, operation_model): + shape = operation_model.input_shape + serialized = self._create_default_request() + serialized['method'] = operation_model.http.get( + 'method', self.DEFAULT_METHOD + ) + serialized['headers'] = { + 'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8' + } + # The query serializer only deals with body params so + # that's what we hand off the _serialize_* methods. + body_params = self.MAP_TYPE() + body_params['Action'] = operation_model.name + body_params['Version'] = operation_model.metadata['apiVersion'] + if shape is not None: + self._serialize(body_params, parameters, shape) + serialized['body'] = body_params + + host_prefix = self._expand_host_prefix(parameters, operation_model) + if host_prefix is not None: + serialized['host_prefix'] = host_prefix + + return serialized + + def _serialize(self, serialized, value, shape, prefix=''): + # serialized: The dict that is incrementally added to with the + # final serialized parameters. + # value: The current user input value. + # shape: The shape object that describes the structure of the + # input. + # prefix: The incrementally built up prefix for the serialized + # key (i.e Foo.bar.members.1). + method = getattr( + self, + f'_serialize_type_{shape.type_name}', + self._default_serialize, + ) + method(serialized, value, shape, prefix=prefix) + + def _serialize_type_structure(self, serialized, value, shape, prefix=''): + members = shape.members + for key, value in value.items(): + member_shape = members[key] + member_prefix = self._get_serialized_name(member_shape, key) + if prefix: + member_prefix = f'{prefix}.{member_prefix}' + self._serialize(serialized, value, member_shape, member_prefix) + + def _serialize_type_list(self, serialized, value, shape, prefix=''): + if not value: + # The query protocol serializes empty lists. + serialized[prefix] = '' + return + if self._is_shape_flattened(shape): + list_prefix = prefix + if shape.member.serialization.get('name'): + name = self._get_serialized_name(shape.member, default_name='') + # Replace '.Original' with '.{name}'. + list_prefix = '.'.join(prefix.split('.')[:-1] + [name]) + else: + list_name = shape.member.serialization.get('name', 'member') + list_prefix = f'{prefix}.{list_name}' + for i, element in enumerate(value, 1): + element_prefix = f'{list_prefix}.{i}' + element_shape = shape.member + self._serialize(serialized, element, element_shape, element_prefix) + + def _serialize_type_map(self, serialized, value, shape, prefix=''): + if self._is_shape_flattened(shape): + full_prefix = prefix + else: + full_prefix = f'{prefix}.entry' + template = full_prefix + '.{i}.{suffix}' + key_shape = shape.key + value_shape = shape.value + key_suffix = self._get_serialized_name(key_shape, default_name='key') + value_suffix = self._get_serialized_name(value_shape, 'value') + for i, key in enumerate(value, 1): + key_prefix = template.format(i=i, suffix=key_suffix) + value_prefix = template.format(i=i, suffix=value_suffix) + self._serialize(serialized, key, key_shape, key_prefix) + self._serialize(serialized, value[key], value_shape, value_prefix) + + def _serialize_type_blob(self, serialized, value, shape, prefix=''): + # Blob args must be base64 encoded. + serialized[prefix] = self._get_base64(value) + + def _serialize_type_timestamp(self, serialized, value, shape, prefix=''): + serialized[prefix] = self._convert_timestamp_to_str( + value, shape.serialization.get('timestampFormat') + ) + + def _serialize_type_boolean(self, serialized, value, shape, prefix=''): + if value: + serialized[prefix] = 'true' + else: + serialized[prefix] = 'false' + + def _default_serialize(self, serialized, value, shape, prefix=''): + serialized[prefix] = value + + def _is_shape_flattened(self, shape): + return shape.serialization.get('flattened') + + +class EC2Serializer(QuerySerializer): + """EC2 specific customizations to the query protocol serializers. + + The EC2 model is almost, but not exactly, similar to the query protocol + serializer. This class encapsulates those differences. The model + will have be marked with a ``protocol`` of ``ec2``, so you don't need + to worry about wiring this class up correctly. + + """ + + def _get_serialized_name(self, shape, default_name): + # Returns the serialized name for the shape if it exists. + # Otherwise it will return the passed in default_name. + if 'queryName' in shape.serialization: + return shape.serialization['queryName'] + elif 'name' in shape.serialization: + # A locationName is always capitalized + # on input for the ec2 protocol. + name = shape.serialization['name'] + return name[0].upper() + name[1:] + else: + return default_name + + def _serialize_type_list(self, serialized, value, shape, prefix=''): + for i, element in enumerate(value, 1): + element_prefix = f'{prefix}.{i}' + element_shape = shape.member + self._serialize(serialized, element, element_shape, element_prefix) + + +class JSONSerializer(Serializer): + TIMESTAMP_FORMAT = 'unixtimestamp' + + def serialize_to_request(self, parameters, operation_model): + target = '{}.{}'.format( + operation_model.metadata['targetPrefix'], + operation_model.name, + ) + json_version = operation_model.metadata['jsonVersion'] + serialized = self._create_default_request() + serialized['method'] = operation_model.http.get( + 'method', self.DEFAULT_METHOD + ) + serialized['headers'] = { + 'X-Amz-Target': target, + 'Content-Type': f'application/x-amz-json-{json_version}', + } + body = self.MAP_TYPE() + input_shape = operation_model.input_shape + if input_shape is not None: + self._serialize(body, parameters, input_shape) + serialized['body'] = json.dumps(body).encode(self.DEFAULT_ENCODING) + + host_prefix = self._expand_host_prefix(parameters, operation_model) + if host_prefix is not None: + serialized['host_prefix'] = host_prefix + + return serialized + + def _serialize(self, serialized, value, shape, key=None): + method = getattr( + self, + f'_serialize_type_{shape.type_name}', + self._default_serialize, + ) + method(serialized, value, shape, key) + + def _serialize_type_structure(self, serialized, value, shape, key): + if shape.is_document_type: + serialized[key] = value + else: + if key is not None: + # If a key is provided, this is a result of a recursive + # call so we need to add a new child dict as the value + # of the passed in serialized dict. We'll then add + # all the structure members as key/vals in the new serialized + # dictionary we just created. + new_serialized = self.MAP_TYPE() + serialized[key] = new_serialized + serialized = new_serialized + members = shape.members + for member_key, member_value in value.items(): + member_shape = members[member_key] + if 'name' in member_shape.serialization: + member_key = member_shape.serialization['name'] + self._serialize( + serialized, member_value, member_shape, member_key + ) + + def _serialize_type_map(self, serialized, value, shape, key): + map_obj = self.MAP_TYPE() + serialized[key] = map_obj + for sub_key, sub_value in value.items(): + self._serialize(map_obj, sub_value, shape.value, sub_key) + + def _serialize_type_list(self, serialized, value, shape, key): + list_obj = [] + serialized[key] = list_obj + for list_item in value: + wrapper = {} + # The JSON list serialization is the only case where we aren't + # setting a key on a dict. We handle this by using + # a __current__ key on a wrapper dict to serialize each + # list item before appending it to the serialized list. + self._serialize(wrapper, list_item, shape.member, "__current__") + list_obj.append(wrapper["__current__"]) + + def _default_serialize(self, serialized, value, shape, key): + serialized[key] = value + + def _serialize_type_timestamp(self, serialized, value, shape, key): + serialized[key] = self._convert_timestamp_to_str( + value, shape.serialization.get('timestampFormat') + ) + + def _serialize_type_blob(self, serialized, value, shape, key): + serialized[key] = self._get_base64(value) + + +class BaseRestSerializer(Serializer): + """Base class for rest protocols. + + The only variance between the various rest protocols is the + way that the body is serialized. All other aspects (headers, uri, etc.) + are the same and logic for serializing those aspects lives here. + + Subclasses must implement the ``_serialize_body_params`` method. + + """ + + QUERY_STRING_TIMESTAMP_FORMAT = 'iso8601' + HEADER_TIMESTAMP_FORMAT = 'rfc822' + # This is a list of known values for the "location" key in the + # serialization dict. The location key tells us where on the request + # to put the serialized value. + KNOWN_LOCATIONS = ['uri', 'querystring', 'header', 'headers'] + + def serialize_to_request(self, parameters, operation_model): + serialized = self._create_default_request() + serialized['method'] = operation_model.http.get( + 'method', self.DEFAULT_METHOD + ) + shape = operation_model.input_shape + if shape is None: + serialized['url_path'] = operation_model.http['requestUri'] + return serialized + shape_members = shape.members + # While the ``serialized`` key holds the final serialized request + # data, we need interim dicts for the various locations of the + # request. We need this for the uri_path_kwargs and the + # query_string_kwargs because they are templated, so we need + # to gather all the needed data for the string template, + # then we render the template. The body_kwargs is needed + # because once we've collected them all, we run them through + # _serialize_body_params, which for rest-json, creates JSON, + # and for rest-xml, will create XML. This is what the + # ``partitioned`` dict below is for. + partitioned = { + 'uri_path_kwargs': self.MAP_TYPE(), + 'query_string_kwargs': self.MAP_TYPE(), + 'body_kwargs': self.MAP_TYPE(), + 'headers': self.MAP_TYPE(), + } + for param_name, param_value in parameters.items(): + if param_value is None: + # Don't serialize any parameter with a None value. + continue + self._partition_parameters( + partitioned, param_name, param_value, shape_members + ) + serialized['url_path'] = self._render_uri_template( + operation_model.http['requestUri'], partitioned['uri_path_kwargs'] + ) + + if 'authPath' in operation_model.http: + serialized['auth_path'] = self._render_uri_template( + operation_model.http['authPath'], + partitioned['uri_path_kwargs'], + ) + # Note that we lean on the http implementation to handle the case + # where the requestUri path already has query parameters. + # The bundled http client, requests, already supports this. + serialized['query_string'] = partitioned['query_string_kwargs'] + if partitioned['headers']: + serialized['headers'] = partitioned['headers'] + self._serialize_payload( + partitioned, parameters, serialized, shape, shape_members + ) + self._serialize_content_type(serialized, shape, shape_members) + + host_prefix = self._expand_host_prefix(parameters, operation_model) + if host_prefix is not None: + serialized['host_prefix'] = host_prefix + + return serialized + + def _render_uri_template(self, uri_template, params): + # We need to handle two cases:: + # + # /{Bucket}/foo + # /{Key+}/bar + # A label ending with '+' is greedy. There can only + # be one greedy key. + encoded_params = {} + for template_param in re.findall(r'{(.*?)}', uri_template): + if template_param.endswith('+'): + encoded_params[template_param] = percent_encode( + params[template_param[:-1]], safe='/~' + ) + else: + encoded_params[template_param] = percent_encode( + params[template_param] + ) + return uri_template.format(**encoded_params) + + def _serialize_payload( + self, partitioned, parameters, serialized, shape, shape_members + ): + # partitioned - The user input params partitioned by location. + # parameters - The user input params. + # serialized - The final serialized request dict. + # shape - Describes the expected input shape + # shape_members - The members of the input struct shape + payload_member = shape.serialization.get('payload') + if self._has_streaming_payload(payload_member, shape_members): + # If it's streaming, then the body is just the + # value of the payload. + body_payload = parameters.get(payload_member, b'') + body_payload = self._encode_payload(body_payload) + serialized['body'] = body_payload + elif payload_member is not None: + # If there's a payload member, we serialized that + # member to they body. + body_params = parameters.get(payload_member) + if body_params is not None: + serialized['body'] = self._serialize_body_params( + body_params, shape_members[payload_member] + ) + else: + serialized['body'] = self._serialize_empty_body() + elif partitioned['body_kwargs']: + serialized['body'] = self._serialize_body_params( + partitioned['body_kwargs'], shape + ) + elif self._requires_empty_body(shape): + serialized['body'] = self._serialize_empty_body() + + def _serialize_empty_body(self): + return b'' + + def _serialize_content_type(self, serialized, shape, shape_members): + """ + Some protocols require varied Content-Type headers + depending on user input. This allows subclasses to apply + this conditionally. + """ + pass + + def _requires_empty_body(self, shape): + """ + Some protocols require a specific body to represent an empty + payload. This allows subclasses to apply this conditionally. + """ + return False + + def _has_streaming_payload(self, payload, shape_members): + """Determine if payload is streaming (a blob or string).""" + return payload is not None and shape_members[payload].type_name in ( + 'blob', + 'string', + ) + + def _encode_payload(self, body): + if isinstance(body, str): + return body.encode(self.DEFAULT_ENCODING) + return body + + def _partition_parameters( + self, partitioned, param_name, param_value, shape_members + ): + # This takes the user provided input parameter (``param``) + # and figures out where they go in the request dict. + # Some params are HTTP headers, some are used in the URI, some + # are in the request body. This method deals with this. + member = shape_members[param_name] + location = member.serialization.get('location') + key_name = member.serialization.get('name', param_name) + if location == 'uri': + partitioned['uri_path_kwargs'][key_name] = param_value + elif location == 'querystring': + if isinstance(param_value, dict): + partitioned['query_string_kwargs'].update(param_value) + elif isinstance(param_value, bool): + bool_str = str(param_value).lower() + partitioned['query_string_kwargs'][key_name] = bool_str + elif member.type_name == 'timestamp': + timestamp_format = member.serialization.get( + 'timestampFormat', self.QUERY_STRING_TIMESTAMP_FORMAT + ) + timestamp = self._convert_timestamp_to_str( + param_value, timestamp_format + ) + partitioned['query_string_kwargs'][key_name] = timestamp + else: + partitioned['query_string_kwargs'][key_name] = param_value + elif location == 'header': + shape = shape_members[param_name] + if not param_value and shape.type_name == 'list': + # Empty lists should not be set on the headers + return + value = self._convert_header_value(shape, param_value) + partitioned['headers'][key_name] = str(value) + elif location == 'headers': + # 'headers' is a bit of an oddball. The ``key_name`` + # is actually really a prefix for the header names: + header_prefix = key_name + # The value provided by the user is a dict so we'll be + # creating multiple header key/val pairs. The key + # name to use for each header is the header_prefix (``key_name``) + # plus the key provided by the user. + self._do_serialize_header_map( + header_prefix, partitioned['headers'], param_value + ) + else: + partitioned['body_kwargs'][param_name] = param_value + + def _do_serialize_header_map(self, header_prefix, headers, user_input): + for key, val in user_input.items(): + full_key = header_prefix + key + headers[full_key] = val + + def _serialize_body_params(self, params, shape): + raise NotImplementedError('_serialize_body_params') + + def _convert_header_value(self, shape, value): + if shape.type_name == 'timestamp': + datetime_obj = parse_to_aware_datetime(value) + timestamp = calendar.timegm(datetime_obj.utctimetuple()) + timestamp_format = shape.serialization.get( + 'timestampFormat', self.HEADER_TIMESTAMP_FORMAT + ) + return self._convert_timestamp_to_str(timestamp, timestamp_format) + elif shape.type_name == 'list': + converted_value = [ + self._convert_header_value(shape.member, v) + for v in value + if v is not None + ] + return ",".join(converted_value) + elif is_json_value_header(shape): + # Serialize with no spaces after separators to save space in + # the header. + return self._get_base64(json.dumps(value, separators=(',', ':'))) + else: + return value + + +class RestJSONSerializer(BaseRestSerializer, JSONSerializer): + def _serialize_empty_body(self): + return b'{}' + + def _requires_empty_body(self, shape): + """ + Serialize an empty JSON object whenever the shape has + members not targeting a location. + """ + for member, val in shape.members.items(): + if 'location' not in val.serialization: + return True + return False + + def _serialize_content_type(self, serialized, shape, shape_members): + """Set Content-Type to application/json for all structured bodies.""" + payload = shape.serialization.get('payload') + if self._has_streaming_payload(payload, shape_members): + # Don't apply content-type to streaming bodies + return + + has_body = serialized['body'] != b'' + has_content_type = has_header('Content-Type', serialized['headers']) + if has_body and not has_content_type: + serialized['headers']['Content-Type'] = 'application/json' + + def _serialize_body_params(self, params, shape): + serialized_body = self.MAP_TYPE() + self._serialize(serialized_body, params, shape) + return json.dumps(serialized_body).encode(self.DEFAULT_ENCODING) + + +class RestXMLSerializer(BaseRestSerializer): + TIMESTAMP_FORMAT = 'iso8601' + + def _serialize_body_params(self, params, shape): + root_name = shape.serialization['name'] + pseudo_root = ElementTree.Element('') + self._serialize(shape, params, pseudo_root, root_name) + real_root = list(pseudo_root)[0] + return ElementTree.tostring(real_root, encoding=self.DEFAULT_ENCODING) + + def _serialize(self, shape, params, xmlnode, name): + method = getattr( + self, + f'_serialize_type_{shape.type_name}', + self._default_serialize, + ) + method(xmlnode, params, shape, name) + + def _serialize_type_structure(self, xmlnode, params, shape, name): + structure_node = ElementTree.SubElement(xmlnode, name) + + if 'xmlNamespace' in shape.serialization: + namespace_metadata = shape.serialization['xmlNamespace'] + attribute_name = 'xmlns' + if namespace_metadata.get('prefix'): + attribute_name += f":{namespace_metadata['prefix']}" + structure_node.attrib[attribute_name] = namespace_metadata['uri'] + for key, value in params.items(): + member_shape = shape.members[key] + member_name = member_shape.serialization.get('name', key) + # We need to special case member shapes that are marked as an + # xmlAttribute. Rather than serializing into an XML child node, + # we instead serialize the shape to an XML attribute of the + # *current* node. + if value is None: + # Don't serialize any param whose value is None. + return + if member_shape.serialization.get('xmlAttribute'): + # xmlAttributes must have a serialization name. + xml_attribute_name = member_shape.serialization['name'] + structure_node.attrib[xml_attribute_name] = value + continue + self._serialize(member_shape, value, structure_node, member_name) + + def _serialize_type_list(self, xmlnode, params, shape, name): + member_shape = shape.member + if shape.serialization.get('flattened'): + element_name = name + list_node = xmlnode + else: + element_name = member_shape.serialization.get('name', 'member') + list_node = ElementTree.SubElement(xmlnode, name) + for item in params: + self._serialize(member_shape, item, list_node, element_name) + + def _serialize_type_map(self, xmlnode, params, shape, name): + # Given the ``name`` of MyMap, and input of {"key1": "val1"} + # we serialize this as: + # + # + # key1 + # val1 + # + # + node = ElementTree.SubElement(xmlnode, name) + # TODO: handle flattened maps. + for key, value in params.items(): + entry_node = ElementTree.SubElement(node, 'entry') + key_name = self._get_serialized_name(shape.key, default_name='key') + val_name = self._get_serialized_name( + shape.value, default_name='value' + ) + self._serialize(shape.key, key, entry_node, key_name) + self._serialize(shape.value, value, entry_node, val_name) + + def _serialize_type_boolean(self, xmlnode, params, shape, name): + # For scalar types, the 'params' attr is actually just a scalar + # value representing the data we need to serialize as a boolean. + # It will either be 'true' or 'false' + node = ElementTree.SubElement(xmlnode, name) + if params: + str_value = 'true' + else: + str_value = 'false' + node.text = str_value + + def _serialize_type_blob(self, xmlnode, params, shape, name): + node = ElementTree.SubElement(xmlnode, name) + node.text = self._get_base64(params) + + def _serialize_type_timestamp(self, xmlnode, params, shape, name): + node = ElementTree.SubElement(xmlnode, name) + node.text = self._convert_timestamp_to_str( + params, shape.serialization.get('timestampFormat') + ) + + def _default_serialize(self, xmlnode, params, shape, name): + node = ElementTree.SubElement(xmlnode, name) + node.text = str(params) + + +SERIALIZERS = { + 'ec2': EC2Serializer, + 'query': QuerySerializer, + 'json': JSONSerializer, + 'rest-json': RestJSONSerializer, + 'rest-xml': RestXMLSerializer, +} diff --git a/venv/lib/python3.10/site-packages/botocore/session.py b/venv/lib/python3.10/site-packages/botocore/session.py new file mode 100644 index 0000000000000000000000000000000000000000..93d020757ab5d48cedf5b5051928cf8e31cca71e --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/session.py @@ -0,0 +1,1269 @@ +# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ +# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +""" +This module contains the main interface to the botocore package, the +Session object. +""" + +import copy +import logging +import os +import platform +import socket +import warnings + +import botocore.client +import botocore.configloader +import botocore.credentials +import botocore.tokens +from botocore import ( + UNSIGNED, + __version__, + handlers, + invoke_initializers, + monitoring, + paginate, + retryhandler, + translate, + waiter, +) +from botocore.compat import HAS_CRT, MutableMapping +from botocore.configprovider import ( + BOTOCORE_DEFAUT_SESSION_VARIABLES, + ConfigChainFactory, + ConfiguredEndpointProvider, + ConfigValueStore, + DefaultConfigResolver, + SmartDefaultsConfigStoreFactory, + create_botocore_default_config_mapping, +) +from botocore.errorfactory import ClientExceptionsFactory +from botocore.exceptions import ( + ConfigNotFound, + InvalidDefaultsMode, + PartialCredentialsError, + ProfileNotFound, + UnknownServiceError, +) +from botocore.hooks import ( + EventAliaser, + HierarchicalEmitter, + first_non_none_response, +) +from botocore.loaders import create_loader +from botocore.model import ServiceModel +from botocore.parsers import ResponseParserFactory +from botocore.regions import EndpointResolver +from botocore.useragent import UserAgentString +from botocore.utils import ( + EVENT_ALIASES, + IMDSRegionProvider, + validate_region_name, +) + +from botocore.compat import HAS_CRT # noqa + + +logger = logging.getLogger(__name__) + + +class Session: + """ + The Session object collects together useful functionality + from `botocore` as well as important data such as configuration + information and credentials into a single, easy-to-use object. + + :ivar available_profiles: A list of profiles defined in the config + file associated with this session. + :ivar profile: The current profile. + """ + + SESSION_VARIABLES = copy.copy(BOTOCORE_DEFAUT_SESSION_VARIABLES) + + #: The default format string to use when configuring the botocore logger. + LOG_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' + + def __init__( + self, + session_vars=None, + event_hooks=None, + include_builtin_handlers=True, + profile=None, + ): + """ + Create a new Session object. + + :type session_vars: dict + :param session_vars: A dictionary that is used to override some or all + of the environment variables associated with this session. The + key/value pairs defined in this dictionary will override the + corresponding variables defined in ``SESSION_VARIABLES``. + + :type event_hooks: BaseEventHooks + :param event_hooks: The event hooks object to use. If one is not + provided, an event hooks object will be automatically created + for you. + + :type include_builtin_handlers: bool + :param include_builtin_handlers: Indicates whether or not to + automatically register builtin handlers. + + :type profile: str + :param profile: The name of the profile to use for this + session. Note that the profile can only be set when + the session is created. + + """ + if event_hooks is None: + self._original_handler = HierarchicalEmitter() + else: + self._original_handler = event_hooks + self._events = EventAliaser(self._original_handler) + if include_builtin_handlers: + self._register_builtin_handlers(self._events) + self.user_agent_name = 'Botocore' + self.user_agent_version = __version__ + self.user_agent_extra = '' + # The _profile attribute is just used to cache the value + # of the current profile to avoid going through the normal + # config lookup process each access time. + self._profile = None + self._config = None + self._credentials = None + self._auth_token = None + self._profile_map = None + # This is a dict that stores per session specific config variable + # overrides via set_config_variable(). + self._session_instance_vars = {} + if profile is not None: + self._session_instance_vars['profile'] = profile + self._client_config = None + self._last_client_region_used = None + self._components = ComponentLocator() + self._internal_components = ComponentLocator() + self._register_components() + self.session_var_map = SessionVarDict(self, self.SESSION_VARIABLES) + if session_vars is not None: + self.session_var_map.update(session_vars) + invoke_initializers(self) + + def _register_components(self): + self._register_credential_provider() + self._register_token_provider() + self._register_data_loader() + self._register_endpoint_resolver() + self._register_event_emitter() + self._register_response_parser_factory() + self._register_exceptions_factory() + self._register_config_store() + self._register_monitor() + self._register_default_config_resolver() + self._register_smart_defaults_factory() + self._register_user_agent_creator() + + def _register_event_emitter(self): + self._components.register_component('event_emitter', self._events) + + def _register_token_provider(self): + self._components.lazy_register_component( + 'token_provider', self._create_token_resolver + ) + + def _create_token_resolver(self): + return botocore.tokens.create_token_resolver(self) + + def _register_credential_provider(self): + self._components.lazy_register_component( + 'credential_provider', self._create_credential_resolver + ) + + def _create_credential_resolver(self): + return botocore.credentials.create_credential_resolver( + self, region_name=self._last_client_region_used + ) + + def _register_data_loader(self): + self._components.lazy_register_component( + 'data_loader', + lambda: create_loader(self.get_config_variable('data_path')), + ) + + def _register_endpoint_resolver(self): + def create_default_resolver(): + loader = self.get_component('data_loader') + endpoints, path = loader.load_data_with_path('endpoints') + uses_builtin = loader.is_builtin_path(path) + return EndpointResolver(endpoints, uses_builtin_data=uses_builtin) + + self._internal_components.lazy_register_component( + 'endpoint_resolver', create_default_resolver + ) + + def _register_default_config_resolver(self): + def create_default_config_resolver(): + loader = self.get_component('data_loader') + defaults = loader.load_data('sdk-default-configuration') + return DefaultConfigResolver(defaults) + + self._internal_components.lazy_register_component( + 'default_config_resolver', create_default_config_resolver + ) + + def _register_smart_defaults_factory(self): + def create_smart_defaults_factory(): + default_config_resolver = self._get_internal_component( + 'default_config_resolver' + ) + imds_region_provider = IMDSRegionProvider(session=self) + return SmartDefaultsConfigStoreFactory( + default_config_resolver, imds_region_provider + ) + + self._internal_components.lazy_register_component( + 'smart_defaults_factory', create_smart_defaults_factory + ) + + def _register_response_parser_factory(self): + self._components.register_component( + 'response_parser_factory', ResponseParserFactory() + ) + + def _register_exceptions_factory(self): + self._internal_components.register_component( + 'exceptions_factory', ClientExceptionsFactory() + ) + + def _register_builtin_handlers(self, events): + for spec in handlers.BUILTIN_HANDLERS: + if len(spec) == 2: + event_name, handler = spec + self.register(event_name, handler) + else: + event_name, handler, register_type = spec + if register_type is handlers.REGISTER_FIRST: + self._events.register_first(event_name, handler) + elif register_type is handlers.REGISTER_LAST: + self._events.register_last(event_name, handler) + + def _register_config_store(self): + config_store_component = ConfigValueStore( + mapping=create_botocore_default_config_mapping(self) + ) + self._components.register_component( + 'config_store', config_store_component + ) + + def _register_monitor(self): + self._internal_components.lazy_register_component( + 'monitor', self._create_csm_monitor + ) + + def _register_user_agent_creator(self): + uas = UserAgentString.from_environment() + self._components.register_component('user_agent_creator', uas) + + def _create_csm_monitor(self): + if self.get_config_variable('csm_enabled'): + client_id = self.get_config_variable('csm_client_id') + host = self.get_config_variable('csm_host') + port = self.get_config_variable('csm_port') + handler = monitoring.Monitor( + adapter=monitoring.MonitorEventAdapter(), + publisher=monitoring.SocketPublisher( + socket=socket.socket(socket.AF_INET, socket.SOCK_DGRAM), + host=host, + port=port, + serializer=monitoring.CSMSerializer( + csm_client_id=client_id + ), + ), + ) + return handler + return None + + def _get_crt_version(self): + user_agent_creator = self.get_component('user_agent_creator') + return user_agent_creator._crt_version or 'Unknown' + + @property + def available_profiles(self): + return list(self._build_profile_map().keys()) + + def _build_profile_map(self): + # This will build the profile map if it has not been created, + # otherwise it will return the cached value. The profile map + # is a list of profile names, to the config values for the profile. + if self._profile_map is None: + self._profile_map = self.full_config['profiles'] + return self._profile_map + + @property + def profile(self): + if self._profile is None: + profile = self.get_config_variable('profile') + self._profile = profile + return self._profile + + def get_config_variable(self, logical_name, methods=None): + if methods is not None: + return self._get_config_variable_with_custom_methods( + logical_name, methods + ) + return self.get_component('config_store').get_config_variable( + logical_name + ) + + def _get_config_variable_with_custom_methods(self, logical_name, methods): + # If a custom list of methods was supplied we need to perserve the + # behavior with the new system. To do so a new chain that is a copy of + # the old one will be constructed, but only with the supplied methods + # being added to the chain. This chain will be consulted for a value + # and then thrown out. This is not efficient, nor is the methods arg + # used in botocore, this is just for backwards compatibility. + chain_builder = SubsetChainConfigFactory(session=self, methods=methods) + mapping = create_botocore_default_config_mapping(self) + for name, config_options in self.session_var_map.items(): + config_name, env_vars, default, typecast = config_options + build_chain_config_args = { + 'conversion_func': typecast, + 'default': default, + } + if 'instance' in methods: + build_chain_config_args['instance_name'] = name + if 'env' in methods: + build_chain_config_args['env_var_names'] = env_vars + if 'config' in methods: + build_chain_config_args['config_property_name'] = config_name + mapping[name] = chain_builder.create_config_chain( + **build_chain_config_args + ) + config_store_component = ConfigValueStore(mapping=mapping) + value = config_store_component.get_config_variable(logical_name) + return value + + def set_config_variable(self, logical_name, value): + """Set a configuration variable to a specific value. + + By using this method, you can override the normal lookup + process used in ``get_config_variable`` by explicitly setting + a value. Subsequent calls to ``get_config_variable`` will + use the ``value``. This gives you per-session specific + configuration values. + + :: + >>> # Assume logical name 'foo' maps to env var 'FOO' + >>> os.environ['FOO'] = 'myvalue' + >>> s.get_config_variable('foo') + 'myvalue' + >>> s.set_config_variable('foo', 'othervalue') + >>> s.get_config_variable('foo') + 'othervalue' + + :type logical_name: str + :param logical_name: The logical name of the session variable + you want to set. These are the keys in ``SESSION_VARIABLES``. + :param value: The value to associate with the config variable. + + """ + logger.debug( + "Setting config variable for %s to %r", + logical_name, + value, + ) + self._session_instance_vars[logical_name] = value + + def instance_variables(self): + return copy.copy(self._session_instance_vars) + + def get_scoped_config(self): + """ + Returns the config values from the config file scoped to the current + profile. + + The configuration data is loaded **only** from the config file. + It does not resolve variables based on different locations + (e.g. first from the session instance, then from environment + variables, then from the config file). If you want this lookup + behavior, use the ``get_config_variable`` method instead. + + Note that this configuration is specific to a single profile (the + ``profile`` session variable). + + If the ``profile`` session variable is set and the profile does + not exist in the config file, a ``ProfileNotFound`` exception + will be raised. + + :raises: ConfigNotFound, ConfigParseError, ProfileNotFound + :rtype: dict + + """ + profile_name = self.get_config_variable('profile') + profile_map = self._build_profile_map() + # If a profile is not explicitly set return the default + # profile config or an empty config dict if we don't have + # a default profile. + if profile_name is None: + return profile_map.get('default', {}) + elif profile_name not in profile_map: + # Otherwise if they specified a profile, it has to + # exist (even if it's the default profile) otherwise + # we complain. + raise ProfileNotFound(profile=profile_name) + else: + return profile_map[profile_name] + + @property + def full_config(self): + """Return the parsed config file. + + The ``get_config`` method returns the config associated with the + specified profile. This property returns the contents of the + **entire** config file. + + :rtype: dict + """ + if self._config is None: + try: + config_file = self.get_config_variable('config_file') + self._config = botocore.configloader.load_config(config_file) + except ConfigNotFound: + self._config = {'profiles': {}} + try: + # Now we need to inject the profiles from the + # credentials file. We don't actually need the values + # in the creds file, only the profile names so that we + # can validate the user is not referring to a nonexistent + # profile. + cred_file = self.get_config_variable('credentials_file') + cred_profiles = botocore.configloader.raw_config_parse( + cred_file + ) + for profile in cred_profiles: + cred_vars = cred_profiles[profile] + if profile not in self._config['profiles']: + self._config['profiles'][profile] = cred_vars + else: + self._config['profiles'][profile].update(cred_vars) + except ConfigNotFound: + pass + return self._config + + def get_default_client_config(self): + """Retrieves the default config for creating clients + + :rtype: botocore.client.Config + :returns: The default client config object when creating clients. If + the value is ``None`` then there is no default config object + attached to the session. + """ + return self._client_config + + def set_default_client_config(self, client_config): + """Sets the default config for creating clients + + :type client_config: botocore.client.Config + :param client_config: The default client config object when creating + clients. If the value is ``None`` then there is no default config + object attached to the session. + """ + self._client_config = client_config + + def set_credentials(self, access_key, secret_key, token=None): + """ + Manually create credentials for this session. If you would + prefer to use botocore without a config file, environment variables, + or IAM roles, you can pass explicit credentials into this + method to establish credentials for this session. + + :type access_key: str + :param access_key: The access key part of the credentials. + + :type secret_key: str + :param secret_key: The secret key part of the credentials. + + :type token: str + :param token: An option session token used by STS session + credentials. + """ + self._credentials = botocore.credentials.Credentials( + access_key, secret_key, token + ) + + def get_credentials(self): + """ + Return the :class:`botocore.credential.Credential` object + associated with this session. If the credentials have not + yet been loaded, this will attempt to load them. If they + have already been loaded, this will return the cached + credentials. + + """ + if self._credentials is None: + self._credentials = self._components.get_component( + 'credential_provider' + ).load_credentials() + return self._credentials + + def get_auth_token(self): + """ + Return the :class:`botocore.tokens.AuthToken` object associated with + this session. If the authorization token has not yet been loaded, this + will attempt to load it. If it has already been loaded, this will + return the cached authorization token. + + """ + if self._auth_token is None: + provider = self._components.get_component('token_provider') + self._auth_token = provider.load_token() + return self._auth_token + + def user_agent(self): + """ + Return a string suitable for use as a User-Agent header. + The string will be of the form: + + / Python/ / + + Where: + + - agent_name is the value of the `user_agent_name` attribute + of the session object (`Botocore` by default). + - agent_version is the value of the `user_agent_version` + attribute of the session object (the botocore version by default). + by default. + - py_ver is the version of the Python interpreter beng used. + - plat_name is the name of the platform (e.g. Darwin) + - plat_ver is the version of the platform + - exec_env is exec-env/$AWS_EXECUTION_ENV + + If ``user_agent_extra`` is not empty, then this value will be + appended to the end of the user agent string. + + """ + base = ( + f'{self.user_agent_name}/{self.user_agent_version} ' + f'Python/{platform.python_version()} ' + f'{platform.system()}/{platform.release()}' + ) + if HAS_CRT: + base += f' awscrt/{self._get_crt_version()}' + if os.environ.get('AWS_EXECUTION_ENV') is not None: + base += ' exec-env/{}'.format(os.environ.get('AWS_EXECUTION_ENV')) + if self.user_agent_extra: + base += f' {self.user_agent_extra}' + + return base + + def get_data(self, data_path): + """ + Retrieve the data associated with `data_path`. + + :type data_path: str + :param data_path: The path to the data you wish to retrieve. + """ + return self.get_component('data_loader').load_data(data_path) + + def get_service_model(self, service_name, api_version=None): + """Get the service model object. + + :type service_name: string + :param service_name: The service name + + :type api_version: string + :param api_version: The API version of the service. If none is + provided, then the latest API version will be used. + + :rtype: L{botocore.model.ServiceModel} + :return: The botocore service model for the service. + + """ + service_description = self.get_service_data(service_name, api_version) + return ServiceModel(service_description, service_name=service_name) + + def get_waiter_model(self, service_name, api_version=None): + loader = self.get_component('data_loader') + waiter_config = loader.load_service_model( + service_name, 'waiters-2', api_version + ) + return waiter.WaiterModel(waiter_config) + + def get_paginator_model(self, service_name, api_version=None): + loader = self.get_component('data_loader') + paginator_config = loader.load_service_model( + service_name, 'paginators-1', api_version + ) + return paginate.PaginatorModel(paginator_config) + + def get_service_data(self, service_name, api_version=None): + """ + Retrieve the fully merged data associated with a service. + """ + data_path = service_name + service_data = self.get_component('data_loader').load_service_model( + data_path, type_name='service-2', api_version=api_version + ) + service_id = EVENT_ALIASES.get(service_name, service_name) + self._events.emit( + f'service-data-loaded.{service_id}', + service_data=service_data, + service_name=service_name, + session=self, + ) + return service_data + + def get_available_services(self): + """ + Return a list of names of available services. + """ + return self.get_component('data_loader').list_available_services( + type_name='service-2' + ) + + def set_debug_logger(self, logger_name='botocore'): + """ + Convenience function to quickly configure full debug output + to go to the console. + """ + self.set_stream_logger(logger_name, logging.DEBUG) + + def set_stream_logger( + self, logger_name, log_level, stream=None, format_string=None + ): + """ + Convenience method to configure a stream logger. + + :type logger_name: str + :param logger_name: The name of the logger to configure + + :type log_level: str + :param log_level: The log level to set for the logger. This + is any param supported by the ``.setLevel()`` method of + a ``Log`` object. + + :type stream: file + :param stream: A file like object to log to. If none is provided + then sys.stderr will be used. + + :type format_string: str + :param format_string: The format string to use for the log + formatter. If none is provided this will default to + ``self.LOG_FORMAT``. + + """ + log = logging.getLogger(logger_name) + log.setLevel(logging.DEBUG) + + ch = logging.StreamHandler(stream) + ch.setLevel(log_level) + + # create formatter + if format_string is None: + format_string = self.LOG_FORMAT + formatter = logging.Formatter(format_string) + + # add formatter to ch + ch.setFormatter(formatter) + + # add ch to logger + log.addHandler(ch) + + def set_file_logger(self, log_level, path, logger_name='botocore'): + """ + Convenience function to quickly configure any level of logging + to a file. + + :type log_level: int + :param log_level: A log level as specified in the `logging` module + + :type path: string + :param path: Path to the log file. The file will be created + if it doesn't already exist. + """ + log = logging.getLogger(logger_name) + log.setLevel(logging.DEBUG) + + # create console handler and set level to debug + ch = logging.FileHandler(path) + ch.setLevel(log_level) + + # create formatter + formatter = logging.Formatter(self.LOG_FORMAT) + + # add formatter to ch + ch.setFormatter(formatter) + + # add ch to logger + log.addHandler(ch) + + def register( + self, event_name, handler, unique_id=None, unique_id_uses_count=False + ): + """Register a handler with an event. + + :type event_name: str + :param event_name: The name of the event. + + :type handler: callable + :param handler: The callback to invoke when the event + is emitted. This object must be callable, and must + accept ``**kwargs``. If either of these preconditions are + not met, a ``ValueError`` will be raised. + + :type unique_id: str + :param unique_id: An optional identifier to associate with the + registration. A unique_id can only be used once for + the entire session registration (unless it is unregistered). + This can be used to prevent an event handler from being + registered twice. + + :param unique_id_uses_count: boolean + :param unique_id_uses_count: Specifies if the event should maintain + a count when a ``unique_id`` is registered and unregisted. The + event can only be completely unregistered once every register call + using the unique id has been matched by an ``unregister`` call. + If ``unique_id`` is specified, subsequent ``register`` + calls must use the same value for ``unique_id_uses_count`` + as the ``register`` call that first registered the event. + + :raises ValueError: If the call to ``register`` uses ``unique_id`` + but the value for ``unique_id_uses_count`` differs from the + ``unique_id_uses_count`` value declared by the very first + ``register`` call for that ``unique_id``. + """ + self._events.register( + event_name, + handler, + unique_id, + unique_id_uses_count=unique_id_uses_count, + ) + + def unregister( + self, + event_name, + handler=None, + unique_id=None, + unique_id_uses_count=False, + ): + """Unregister a handler with an event. + + :type event_name: str + :param event_name: The name of the event. + + :type handler: callable + :param handler: The callback to unregister. + + :type unique_id: str + :param unique_id: A unique identifier identifying the callback + to unregister. You can provide either the handler or the + unique_id, you do not have to provide both. + + :param unique_id_uses_count: boolean + :param unique_id_uses_count: Specifies if the event should maintain + a count when a ``unique_id`` is registered and unregisted. The + event can only be completely unregistered once every ``register`` + call using the ``unique_id`` has been matched by an ``unregister`` + call. If the ``unique_id`` is specified, subsequent + ``unregister`` calls must use the same value for + ``unique_id_uses_count`` as the ``register`` call that first + registered the event. + + :raises ValueError: If the call to ``unregister`` uses ``unique_id`` + but the value for ``unique_id_uses_count`` differs from the + ``unique_id_uses_count`` value declared by the very first + ``register`` call for that ``unique_id``. + """ + self._events.unregister( + event_name, + handler=handler, + unique_id=unique_id, + unique_id_uses_count=unique_id_uses_count, + ) + + def emit(self, event_name, **kwargs): + return self._events.emit(event_name, **kwargs) + + def emit_first_non_none_response(self, event_name, **kwargs): + responses = self._events.emit(event_name, **kwargs) + return first_non_none_response(responses) + + def get_component(self, name): + try: + return self._components.get_component(name) + except ValueError: + if name in ['endpoint_resolver', 'exceptions_factory']: + warnings.warn( + f'Fetching the {name} component with the get_component() ' + 'method is deprecated as the component has always been ' + 'considered an internal interface of botocore', + DeprecationWarning, + ) + return self._internal_components.get_component(name) + raise + + def _get_internal_component(self, name): + # While this method may be called by botocore classes outside of the + # Session, this method should **never** be used by a class that lives + # outside of botocore. + return self._internal_components.get_component(name) + + def _register_internal_component(self, name, component): + # While this method may be called by botocore classes outside of the + # Session, this method should **never** be used by a class that lives + # outside of botocore. + return self._internal_components.register_component(name, component) + + def register_component(self, name, component): + self._components.register_component(name, component) + + def lazy_register_component(self, name, component): + self._components.lazy_register_component(name, component) + + def create_client( + self, + service_name, + region_name=None, + api_version=None, + use_ssl=True, + verify=None, + endpoint_url=None, + aws_access_key_id=None, + aws_secret_access_key=None, + aws_session_token=None, + config=None, + ): + """Create a botocore client. + + :type service_name: string + :param service_name: The name of the service for which a client will + be created. You can use the ``Session.get_available_services()`` + method to get a list of all available service names. + + :type region_name: string + :param region_name: The name of the region associated with the client. + A client is associated with a single region. + + :type api_version: string + :param api_version: The API version to use. By default, botocore will + use the latest API version when creating a client. You only need + to specify this parameter if you want to use a previous API version + of the client. + + :type use_ssl: boolean + :param use_ssl: Whether or not to use SSL. By default, SSL is used. + Note that not all services support non-ssl connections. + + :type verify: boolean/string + :param verify: Whether or not to verify SSL certificates. + By default SSL certificates are verified. You can provide the + following values: + + * False - do not validate SSL certificates. SSL will still be + used (unless use_ssl is False), but SSL certificates + will not be verified. + * path/to/cert/bundle.pem - A filename of the CA cert bundle to + uses. You can specify this argument if you want to use a + different CA cert bundle than the one used by botocore. + + :type endpoint_url: string + :param endpoint_url: The complete URL to use for the constructed + client. Normally, botocore will automatically construct the + appropriate URL to use when communicating with a service. You can + specify a complete URL (including the "http/https" scheme) to + override this behavior. If this value is provided, then + ``use_ssl`` is ignored. + + :type aws_access_key_id: string + :param aws_access_key_id: The access key to use when creating + the client. This is entirely optional, and if not provided, + the credentials configured for the session will automatically + be used. You only need to provide this argument if you want + to override the credentials used for this specific client. + + :type aws_secret_access_key: string + :param aws_secret_access_key: The secret key to use when creating + the client. Same semantics as aws_access_key_id above. + + :type aws_session_token: string + :param aws_session_token: The session token to use when creating + the client. Same semantics as aws_access_key_id above. + + :type config: botocore.client.Config + :param config: Advanced client configuration options. If a value + is specified in the client config, its value will take precedence + over environment variables and configuration values, but not over + a value passed explicitly to the method. If a default config + object is set on the session, the config object used when creating + the client will be the result of calling ``merge()`` on the + default config with the config provided to this call. + + :rtype: botocore.client.BaseClient + :return: A botocore client instance + + """ + default_client_config = self.get_default_client_config() + # If a config is provided and a default config is set, then + # use the config resulting from merging the two. + if config is not None and default_client_config is not None: + config = default_client_config.merge(config) + # If a config was not provided then use the default + # client config from the session + elif default_client_config is not None: + config = default_client_config + + region_name = self._resolve_region_name(region_name, config) + + # Figure out the verify value base on the various + # configuration options. + if verify is None: + verify = self.get_config_variable('ca_bundle') + + if api_version is None: + api_version = self.get_config_variable('api_versions').get( + service_name, None + ) + + loader = self.get_component('data_loader') + event_emitter = self.get_component('event_emitter') + response_parser_factory = self.get_component('response_parser_factory') + if config is not None and config.signature_version is UNSIGNED: + credentials = None + elif ( + aws_access_key_id is not None and aws_secret_access_key is not None + ): + credentials = botocore.credentials.Credentials( + access_key=aws_access_key_id, + secret_key=aws_secret_access_key, + token=aws_session_token, + ) + elif self._missing_cred_vars(aws_access_key_id, aws_secret_access_key): + raise PartialCredentialsError( + provider='explicit', + cred_var=self._missing_cred_vars( + aws_access_key_id, aws_secret_access_key + ), + ) + else: + credentials = self.get_credentials() + auth_token = self.get_auth_token() + endpoint_resolver = self._get_internal_component('endpoint_resolver') + exceptions_factory = self._get_internal_component('exceptions_factory') + config_store = copy.copy(self.get_component('config_store')) + user_agent_creator = self.get_component('user_agent_creator') + # Session configuration values for the user agent string are applied + # just before each client creation because they may have been modified + # at any time between session creation and client creation. + user_agent_creator.set_session_config( + session_user_agent_name=self.user_agent_name, + session_user_agent_version=self.user_agent_version, + session_user_agent_extra=self.user_agent_extra, + ) + defaults_mode = self._resolve_defaults_mode(config, config_store) + if defaults_mode != 'legacy': + smart_defaults_factory = self._get_internal_component( + 'smart_defaults_factory' + ) + smart_defaults_factory.merge_smart_defaults( + config_store, defaults_mode, region_name + ) + + self._add_configured_endpoint_provider( + client_name=service_name, + config_store=config_store, + ) + + client_creator = botocore.client.ClientCreator( + loader, + endpoint_resolver, + self.user_agent(), + event_emitter, + retryhandler, + translate, + response_parser_factory, + exceptions_factory, + config_store, + user_agent_creator=user_agent_creator, + ) + client = client_creator.create_client( + service_name=service_name, + region_name=region_name, + is_secure=use_ssl, + endpoint_url=endpoint_url, + verify=verify, + credentials=credentials, + scoped_config=self.get_scoped_config(), + client_config=config, + api_version=api_version, + auth_token=auth_token, + ) + monitor = self._get_internal_component('monitor') + if monitor is not None: + monitor.register(client.meta.events) + return client + + def _resolve_region_name(self, region_name, config): + # Figure out the user-provided region based on the various + # configuration options. + if region_name is None: + if config and config.region_name is not None: + region_name = config.region_name + else: + region_name = self.get_config_variable('region') + + validate_region_name(region_name) + # For any client that we create in retrieving credentials + # we want to create it using the same region as specified in + # creating this client. It is important to note though that the + # credentials client is only created once per session. So if a new + # client is created with a different region, its credential resolver + # will use the region of the first client. However, that is not an + # issue as of now because the credential resolver uses only STS and + # the credentials returned at regional endpoints are valid across + # all regions in the partition. + self._last_client_region_used = region_name + return region_name + + def _resolve_defaults_mode(self, client_config, config_store): + mode = config_store.get_config_variable('defaults_mode') + + if client_config and client_config.defaults_mode: + mode = client_config.defaults_mode + + default_config_resolver = self._get_internal_component( + 'default_config_resolver' + ) + default_modes = default_config_resolver.get_default_modes() + lmode = mode.lower() + if lmode not in default_modes: + raise InvalidDefaultsMode( + mode=mode, valid_modes=', '.join(default_modes) + ) + + return lmode + + def _add_configured_endpoint_provider(self, client_name, config_store): + chain = ConfiguredEndpointProvider( + full_config=self.full_config, + scoped_config=self.get_scoped_config(), + client_name=client_name, + ) + config_store.set_config_provider( + logical_name='endpoint_url', + provider=chain, + ) + + def _missing_cred_vars(self, access_key, secret_key): + if access_key is not None and secret_key is None: + return 'aws_secret_access_key' + if secret_key is not None and access_key is None: + return 'aws_access_key_id' + return None + + def get_available_partitions(self): + """Lists the available partitions found on disk + + :rtype: list + :return: Returns a list of partition names (e.g., ["aws", "aws-cn"]) + """ + resolver = self._get_internal_component('endpoint_resolver') + return resolver.get_available_partitions() + + def get_partition_for_region(self, region_name): + """Lists the partition name of a particular region. + + :type region_name: string + :param region_name: Name of the region to list partition for (e.g., + us-east-1). + + :rtype: string + :return: Returns the respective partition name (e.g., aws). + """ + resolver = self._get_internal_component('endpoint_resolver') + return resolver.get_partition_for_region(region_name) + + def get_available_regions( + self, service_name, partition_name='aws', allow_non_regional=False + ): + """Lists the region and endpoint names of a particular partition. + + :type service_name: string + :param service_name: Name of a service to list endpoint for (e.g., s3). + This parameter accepts a service name (e.g., "elb") or endpoint + prefix (e.g., "elasticloadbalancing"). + + :type partition_name: string + :param partition_name: Name of the partition to limit endpoints to. + (e.g., aws for the public AWS endpoints, aws-cn for AWS China + endpoints, aws-us-gov for AWS GovCloud (US) Endpoints, etc. + + :type allow_non_regional: bool + :param allow_non_regional: Set to True to include endpoints that are + not regional endpoints (e.g., s3-external-1, + fips-us-gov-west-1, etc). + :return: Returns a list of endpoint names (e.g., ["us-east-1"]). + """ + resolver = self._get_internal_component('endpoint_resolver') + results = [] + try: + service_data = self.get_service_data(service_name) + endpoint_prefix = service_data['metadata'].get( + 'endpointPrefix', service_name + ) + results = resolver.get_available_endpoints( + endpoint_prefix, partition_name, allow_non_regional + ) + except UnknownServiceError: + pass + return results + + +class ComponentLocator: + """Service locator for session components.""" + + def __init__(self): + self._components = {} + self._deferred = {} + + def get_component(self, name): + if name in self._deferred: + factory = self._deferred[name] + self._components[name] = factory() + # Only delete the component from the deferred dict after + # successfully creating the object from the factory as well as + # injecting the instantiated value into the _components dict. + try: + del self._deferred[name] + except KeyError: + # If we get here, it's likely that get_component was called + # concurrently from multiple threads, and another thread + # already deleted the entry. This means the factory was + # probably called twice, but cleaning up the deferred entry + # should not crash outright. + pass + try: + return self._components[name] + except KeyError: + raise ValueError(f"Unknown component: {name}") + + def register_component(self, name, component): + self._components[name] = component + try: + del self._deferred[name] + except KeyError: + pass + + def lazy_register_component(self, name, no_arg_factory): + self._deferred[name] = no_arg_factory + try: + del self._components[name] + except KeyError: + pass + + +class SessionVarDict(MutableMapping): + def __init__(self, session, session_vars): + self._session = session + self._store = copy.copy(session_vars) + + def __getitem__(self, key): + return self._store[key] + + def __setitem__(self, key, value): + self._store[key] = value + self._update_config_store_from_session_vars(key, value) + + def __delitem__(self, key): + del self._store[key] + + def __iter__(self): + return iter(self._store) + + def __len__(self): + return len(self._store) + + def _update_config_store_from_session_vars( + self, logical_name, config_options + ): + # This is for backwards compatibility. The new preferred way to + # modify configuration logic is to use the component system to get + # the config_store component from the session, and then update + # a key with a custom config provider(s). + # This backwards compatibility method takes the old session_vars + # list of tuples and and transforms that into a set of updates to + # the config_store component. + config_chain_builder = ConfigChainFactory(session=self._session) + config_name, env_vars, default, typecast = config_options + config_store = self._session.get_component('config_store') + config_store.set_config_provider( + logical_name, + config_chain_builder.create_config_chain( + instance_name=logical_name, + env_var_names=env_vars, + config_property_names=config_name, + default=default, + conversion_func=typecast, + ), + ) + + +class SubsetChainConfigFactory: + """A class for creating backwards compatible configuration chains. + + This class can be used instead of + :class:`botocore.configprovider.ConfigChainFactory` to make it honor the + methods argument to get_config_variable. This class can be used to filter + out providers that are not in the methods tuple when creating a new config + chain. + """ + + def __init__(self, session, methods, environ=None): + self._factory = ConfigChainFactory(session, environ) + self._supported_methods = methods + + def create_config_chain( + self, + instance_name=None, + env_var_names=None, + config_property_name=None, + default=None, + conversion_func=None, + ): + """Build a config chain following the standard botocore pattern. + + This config chain factory will omit any providers not in the methods + tuple provided at initialization. For example if given the tuple + ('instance', 'config',) it will not inject the environment provider + into the standard config chain. This lets the botocore session support + the custom ``methods`` argument for all the default botocore config + variables when calling ``get_config_variable``. + """ + if 'instance' not in self._supported_methods: + instance_name = None + if 'env' not in self._supported_methods: + env_var_names = None + if 'config' not in self._supported_methods: + config_property_name = None + return self._factory.create_config_chain( + instance_name=instance_name, + env_var_names=env_var_names, + config_property_names=config_property_name, + default=default, + conversion_func=conversion_func, + ) + + +def get_session(env_vars=None): + """ + Return a new session object. + """ + return Session(env_vars) diff --git a/venv/lib/python3.10/site-packages/botocore/signers.py b/venv/lib/python3.10/site-packages/botocore/signers.py new file mode 100644 index 0000000000000000000000000000000000000000..89319af10b0c26832129eeacc15ee2a4cb1326f1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/signers.py @@ -0,0 +1,873 @@ +# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import base64 +import datetime +import json +import weakref + +import botocore +import botocore.auth +from botocore.awsrequest import create_request_object, prepare_request_dict +from botocore.compat import OrderedDict +from botocore.exceptions import ( + UnknownClientMethodError, + UnknownSignatureVersionError, + UnsupportedSignatureVersionError, +) +from botocore.utils import ArnParser, datetime2timestamp + +# Keep these imported. There's pre-existing code that uses them. +from botocore.utils import fix_s3_host # noqa + + +class RequestSigner: + """ + An object to sign requests before they go out over the wire using + one of the authentication mechanisms defined in ``auth.py``. This + class fires two events scoped to a service and operation name: + + * choose-signer: Allows overriding the auth signer name. + * before-sign: Allows mutating the request before signing. + + Together these events allow for customization of the request + signing pipeline, including overrides, request path manipulation, + and disabling signing per operation. + + + :type service_id: botocore.model.ServiceId + :param service_id: The service id for the service, e.g. ``S3`` + + :type region_name: string + :param region_name: Name of the service region, e.g. ``us-east-1`` + + :type signing_name: string + :param signing_name: Service signing name. This is usually the + same as the service name, but can differ. E.g. + ``emr`` vs. ``elasticmapreduce``. + + :type signature_version: string + :param signature_version: Signature name like ``v4``. + + :type credentials: :py:class:`~botocore.credentials.Credentials` + :param credentials: User credentials with which to sign requests. + + :type event_emitter: :py:class:`~botocore.hooks.BaseEventHooks` + :param event_emitter: Extension mechanism to fire events. + """ + + def __init__( + self, + service_id, + region_name, + signing_name, + signature_version, + credentials, + event_emitter, + auth_token=None, + ): + self._region_name = region_name + self._signing_name = signing_name + self._signature_version = signature_version + self._credentials = credentials + self._auth_token = auth_token + self._service_id = service_id + + # We need weakref to prevent leaking memory in Python 2.6 on Linux 2.6 + self._event_emitter = weakref.proxy(event_emitter) + + @property + def region_name(self): + return self._region_name + + @property + def signature_version(self): + return self._signature_version + + @property + def signing_name(self): + return self._signing_name + + def handler(self, operation_name=None, request=None, **kwargs): + # This is typically hooked up to the "request-created" event + # from a client's event emitter. When a new request is created + # this method is invoked to sign the request. + # Don't call this method directly. + return self.sign(operation_name, request) + + def sign( + self, + operation_name, + request, + region_name=None, + signing_type='standard', + expires_in=None, + signing_name=None, + ): + """Sign a request before it goes out over the wire. + + :type operation_name: string + :param operation_name: The name of the current operation, e.g. + ``ListBuckets``. + :type request: AWSRequest + :param request: The request object to be sent over the wire. + + :type region_name: str + :param region_name: The region to sign the request for. + + :type signing_type: str + :param signing_type: The type of signing to perform. This can be one of + three possible values: + + * 'standard' - This should be used for most requests. + * 'presign-url' - This should be used when pre-signing a request. + * 'presign-post' - This should be used when pre-signing an S3 post. + + :type expires_in: int + :param expires_in: The number of seconds the presigned url is valid + for. This parameter is only valid for signing type 'presign-url'. + + :type signing_name: str + :param signing_name: The name to use for the service when signing. + """ + explicit_region_name = region_name + if region_name is None: + region_name = self._region_name + + if signing_name is None: + signing_name = self._signing_name + + signature_version = self._choose_signer( + operation_name, signing_type, request.context + ) + + # Allow mutating request before signing + self._event_emitter.emit( + f'before-sign.{self._service_id.hyphenize()}.{operation_name}', + request=request, + signing_name=signing_name, + region_name=self._region_name, + signature_version=signature_version, + request_signer=self, + operation_name=operation_name, + ) + + if signature_version != botocore.UNSIGNED: + kwargs = { + 'signing_name': signing_name, + 'region_name': region_name, + 'signature_version': signature_version, + } + if expires_in is not None: + kwargs['expires'] = expires_in + signing_context = request.context.get('signing', {}) + if not explicit_region_name and signing_context.get('region'): + kwargs['region_name'] = signing_context['region'] + if signing_context.get('signing_name'): + kwargs['signing_name'] = signing_context['signing_name'] + if signing_context.get('request_credentials'): + kwargs['request_credentials'] = signing_context[ + 'request_credentials' + ] + if signing_context.get('identity_cache') is not None: + self._resolve_identity_cache( + kwargs, + signing_context['identity_cache'], + signing_context['cache_key'], + ) + try: + auth = self.get_auth_instance(**kwargs) + except UnknownSignatureVersionError as e: + if signing_type != 'standard': + raise UnsupportedSignatureVersionError( + signature_version=signature_version + ) + else: + raise e + + auth.add_auth(request) + + def _resolve_identity_cache(self, kwargs, cache, cache_key): + kwargs['identity_cache'] = cache + kwargs['cache_key'] = cache_key + + def _choose_signer(self, operation_name, signing_type, context): + """ + Allow setting the signature version via the choose-signer event. + A value of `botocore.UNSIGNED` means no signing will be performed. + + :param operation_name: The operation to sign. + :param signing_type: The type of signing that the signer is to be used + for. + :return: The signature version to sign with. + """ + signing_type_suffix_map = { + 'presign-post': '-presign-post', + 'presign-url': '-query', + } + suffix = signing_type_suffix_map.get(signing_type, '') + + # operation specific signing context takes precedent over client-level + # defaults + signature_version = context.get('auth_type') or self._signature_version + signing = context.get('signing', {}) + signing_name = signing.get('signing_name', self._signing_name) + region_name = signing.get('region', self._region_name) + if ( + signature_version is not botocore.UNSIGNED + and not signature_version.endswith(suffix) + ): + signature_version += suffix + + handler, response = self._event_emitter.emit_until_response( + f'choose-signer.{self._service_id.hyphenize()}.{operation_name}', + signing_name=signing_name, + region_name=region_name, + signature_version=signature_version, + context=context, + ) + + if response is not None: + signature_version = response + # The suffix needs to be checked again in case we get an improper + # signature version from choose-signer. + if ( + signature_version is not botocore.UNSIGNED + and not signature_version.endswith(suffix) + ): + signature_version += suffix + + return signature_version + + def get_auth_instance( + self, + signing_name, + region_name, + signature_version=None, + request_credentials=None, + **kwargs, + ): + """ + Get an auth instance which can be used to sign a request + using the given signature version. + + :type signing_name: string + :param signing_name: Service signing name. This is usually the + same as the service name, but can differ. E.g. + ``emr`` vs. ``elasticmapreduce``. + + :type region_name: string + :param region_name: Name of the service region, e.g. ``us-east-1`` + + :type signature_version: string + :param signature_version: Signature name like ``v4``. + + :rtype: :py:class:`~botocore.auth.BaseSigner` + :return: Auth instance to sign a request. + """ + if signature_version is None: + signature_version = self._signature_version + + cls = botocore.auth.AUTH_TYPE_MAPS.get(signature_version) + if cls is None: + raise UnknownSignatureVersionError( + signature_version=signature_version + ) + + if cls.REQUIRES_TOKEN is True: + frozen_token = None + if self._auth_token is not None: + frozen_token = self._auth_token.get_frozen_token() + auth = cls(frozen_token) + return auth + + credentials = request_credentials or self._credentials + if getattr(cls, "REQUIRES_IDENTITY_CACHE", None) is True: + cache = kwargs["identity_cache"] + key = kwargs["cache_key"] + credentials = cache.get_credentials(key) + del kwargs["cache_key"] + + # If there's no credentials provided (i.e credentials is None), + # then we'll pass a value of "None" over to the auth classes, + # which already handle the cases where no credentials have + # been provided. + frozen_credentials = None + if credentials is not None: + frozen_credentials = credentials.get_frozen_credentials() + kwargs['credentials'] = frozen_credentials + if cls.REQUIRES_REGION: + if self._region_name is None: + raise botocore.exceptions.NoRegionError() + kwargs['region_name'] = region_name + kwargs['service_name'] = signing_name + auth = cls(**kwargs) + return auth + + # Alias get_auth for backwards compatibility. + get_auth = get_auth_instance + + def generate_presigned_url( + self, + request_dict, + operation_name, + expires_in=3600, + region_name=None, + signing_name=None, + ): + """Generates a presigned url + + :type request_dict: dict + :param request_dict: The prepared request dictionary returned by + ``botocore.awsrequest.prepare_request_dict()`` + + :type operation_name: str + :param operation_name: The operation being signed. + + :type expires_in: int + :param expires_in: The number of seconds the presigned url is valid + for. By default it expires in an hour (3600 seconds) + + :type region_name: string + :param region_name: The region name to sign the presigned url. + + :type signing_name: str + :param signing_name: The name to use for the service when signing. + + :returns: The presigned url + """ + request = create_request_object(request_dict) + self.sign( + operation_name, + request, + region_name, + 'presign-url', + expires_in, + signing_name, + ) + + request.prepare() + return request.url + + +class CloudFrontSigner: + '''A signer to create a signed CloudFront URL. + + First you create a cloudfront signer based on a normalized RSA signer:: + + import rsa + def rsa_signer(message): + private_key = open('private_key.pem', 'r').read() + return rsa.sign( + message, + rsa.PrivateKey.load_pkcs1(private_key.encode('utf8')), + 'SHA-1') # CloudFront requires SHA-1 hash + cf_signer = CloudFrontSigner(key_id, rsa_signer) + + To sign with a canned policy:: + + signed_url = cf_signer.generate_signed_url( + url, date_less_than=datetime(2015, 12, 1)) + + To sign with a custom policy:: + + signed_url = cf_signer.generate_signed_url(url, policy=my_policy) + ''' + + def __init__(self, key_id, rsa_signer): + """Create a CloudFrontSigner. + + :type key_id: str + :param key_id: The CloudFront Key Pair ID + + :type rsa_signer: callable + :param rsa_signer: An RSA signer. + Its only input parameter will be the message to be signed, + and its output will be the signed content as a binary string. + The hash algorithm needed by CloudFront is SHA-1. + """ + self.key_id = key_id + self.rsa_signer = rsa_signer + + def generate_presigned_url(self, url, date_less_than=None, policy=None): + """Creates a signed CloudFront URL based on given parameters. + + :type url: str + :param url: The URL of the protected object + + :type date_less_than: datetime + :param date_less_than: The URL will expire after that date and time + + :type policy: str + :param policy: The custom policy, possibly built by self.build_policy() + + :rtype: str + :return: The signed URL. + """ + both_args_supplied = date_less_than is not None and policy is not None + neither_arg_supplied = date_less_than is None and policy is None + if both_args_supplied or neither_arg_supplied: + e = 'Need to provide either date_less_than or policy, but not both' + raise ValueError(e) + if date_less_than is not None: + # We still need to build a canned policy for signing purpose + policy = self.build_policy(url, date_less_than) + if isinstance(policy, str): + policy = policy.encode('utf8') + if date_less_than is not None: + params = [f'Expires={int(datetime2timestamp(date_less_than))}'] + else: + params = [f"Policy={self._url_b64encode(policy).decode('utf8')}"] + signature = self.rsa_signer(policy) + params.extend( + [ + f"Signature={self._url_b64encode(signature).decode('utf8')}", + f"Key-Pair-Id={self.key_id}", + ] + ) + return self._build_url(url, params) + + def _build_url(self, base_url, extra_params): + separator = '&' if '?' in base_url else '?' + return base_url + separator + '&'.join(extra_params) + + def build_policy( + self, resource, date_less_than, date_greater_than=None, ip_address=None + ): + """A helper to build policy. + + :type resource: str + :param resource: The URL or the stream filename of the protected object + + :type date_less_than: datetime + :param date_less_than: The URL will expire after the time has passed + + :type date_greater_than: datetime + :param date_greater_than: The URL will not be valid until this time + + :type ip_address: str + :param ip_address: Use 'x.x.x.x' for an IP, or 'x.x.x.x/x' for a subnet + + :rtype: str + :return: The policy in a compact string. + """ + # Note: + # 1. Order in canned policy is significant. Special care has been taken + # to ensure the output will match the order defined by the document. + # There is also a test case to ensure that order. + # SEE: http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-creating-signed-url-canned-policy.html#private-content-canned-policy-creating-policy-statement + # 2. Albeit the order in custom policy is not required by CloudFront, + # we still use OrderedDict internally to ensure the result is stable + # and also matches canned policy requirement. + # SEE: http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-creating-signed-url-custom-policy.html + moment = int(datetime2timestamp(date_less_than)) + condition = OrderedDict({"DateLessThan": {"AWS:EpochTime": moment}}) + if ip_address: + if '/' not in ip_address: + ip_address += '/32' + condition["IpAddress"] = {"AWS:SourceIp": ip_address} + if date_greater_than: + moment = int(datetime2timestamp(date_greater_than)) + condition["DateGreaterThan"] = {"AWS:EpochTime": moment} + ordered_payload = [('Resource', resource), ('Condition', condition)] + custom_policy = {"Statement": [OrderedDict(ordered_payload)]} + return json.dumps(custom_policy, separators=(',', ':')) + + def _url_b64encode(self, data): + # Required by CloudFront. See also: + # http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-linux-openssl.html + return ( + base64.b64encode(data) + .replace(b'+', b'-') + .replace(b'=', b'_') + .replace(b'/', b'~') + ) + + +def add_generate_db_auth_token(class_attributes, **kwargs): + class_attributes['generate_db_auth_token'] = generate_db_auth_token + + +def generate_db_auth_token(self, DBHostname, Port, DBUsername, Region=None): + """Generates an auth token used to connect to a db with IAM credentials. + + :type DBHostname: str + :param DBHostname: The hostname of the database to connect to. + + :type Port: int + :param Port: The port number the database is listening on. + + :type DBUsername: str + :param DBUsername: The username to log in as. + + :type Region: str + :param Region: The region the database is in. If None, the client + region will be used. + + :return: A presigned url which can be used as an auth token. + """ + region = Region + if region is None: + region = self.meta.region_name + + params = { + 'Action': 'connect', + 'DBUser': DBUsername, + } + + request_dict = { + 'url_path': '/', + 'query_string': '', + 'headers': {}, + 'body': params, + 'method': 'GET', + } + + # RDS requires that the scheme not be set when sent over. This can cause + # issues when signing because the Python url parsing libraries follow + # RFC 1808 closely, which states that a netloc must be introduced by `//`. + # Otherwise the url is presumed to be relative, and thus the whole + # netloc would be treated as a path component. To work around this we + # introduce https here and remove it once we're done processing it. + scheme = 'https://' + endpoint_url = f'{scheme}{DBHostname}:{Port}' + prepare_request_dict(request_dict, endpoint_url) + presigned_url = self._request_signer.generate_presigned_url( + operation_name='connect', + request_dict=request_dict, + region_name=region, + expires_in=900, + signing_name='rds-db', + ) + return presigned_url[len(scheme) :] + + +class S3PostPresigner: + def __init__(self, request_signer): + self._request_signer = request_signer + + def generate_presigned_post( + self, + request_dict, + fields=None, + conditions=None, + expires_in=3600, + region_name=None, + ): + """Generates the url and the form fields used for a presigned s3 post + + :type request_dict: dict + :param request_dict: The prepared request dictionary returned by + ``botocore.awsrequest.prepare_request_dict()`` + + :type fields: dict + :param fields: A dictionary of prefilled form fields to build on top + of. + + :type conditions: list + :param conditions: A list of conditions to include in the policy. Each + element can be either a list or a structure. For example: + [ + {"acl": "public-read"}, + {"bucket": "mybucket"}, + ["starts-with", "$key", "mykey"] + ] + + :type expires_in: int + :param expires_in: The number of seconds the presigned post is valid + for. + + :type region_name: string + :param region_name: The region name to sign the presigned post to. + + :rtype: dict + :returns: A dictionary with two elements: ``url`` and ``fields``. + Url is the url to post to. Fields is a dictionary filled with + the form fields and respective values to use when submitting the + post. For example: + + {'url': 'https://mybucket.s3.amazonaws.com + 'fields': {'acl': 'public-read', + 'key': 'mykey', + 'signature': 'mysignature', + 'policy': 'mybase64 encoded policy'} + } + """ + if fields is None: + fields = {} + + if conditions is None: + conditions = [] + + # Create the policy for the post. + policy = {} + + # Create an expiration date for the policy + datetime_now = datetime.datetime.utcnow() + expire_date = datetime_now + datetime.timedelta(seconds=expires_in) + policy['expiration'] = expire_date.strftime(botocore.auth.ISO8601) + + # Append all of the conditions that the user supplied. + policy['conditions'] = [] + for condition in conditions: + policy['conditions'].append(condition) + + # Store the policy and the fields in the request for signing + request = create_request_object(request_dict) + request.context['s3-presign-post-fields'] = fields + request.context['s3-presign-post-policy'] = policy + + self._request_signer.sign( + 'PutObject', request, region_name, 'presign-post' + ) + # Return the url and the fields for th form to post. + return {'url': request.url, 'fields': fields} + + +def add_generate_presigned_url(class_attributes, **kwargs): + class_attributes['generate_presigned_url'] = generate_presigned_url + + +def generate_presigned_url( + self, ClientMethod, Params=None, ExpiresIn=3600, HttpMethod=None +): + """Generate a presigned url given a client, its method, and arguments + + :type ClientMethod: string + :param ClientMethod: The client method to presign for + + :type Params: dict + :param Params: The parameters normally passed to + ``ClientMethod``. + + :type ExpiresIn: int + :param ExpiresIn: The number of seconds the presigned url is valid + for. By default it expires in an hour (3600 seconds) + + :type HttpMethod: string + :param HttpMethod: The http method to use on the generated url. By + default, the http method is whatever is used in the method's model. + + :returns: The presigned url + """ + client_method = ClientMethod + params = Params + if params is None: + params = {} + expires_in = ExpiresIn + http_method = HttpMethod + context = { + 'is_presign_request': True, + 'use_global_endpoint': _should_use_global_endpoint(self), + } + + request_signer = self._request_signer + + try: + operation_name = self._PY_TO_OP_NAME[client_method] + except KeyError: + raise UnknownClientMethodError(method_name=client_method) + + operation_model = self.meta.service_model.operation_model(operation_name) + params = self._emit_api_params( + api_params=params, + operation_model=operation_model, + context=context, + ) + bucket_is_arn = ArnParser.is_arn(params.get('Bucket', '')) + ( + endpoint_url, + additional_headers, + properties, + ) = self._resolve_endpoint_ruleset( + operation_model, + params, + context, + ignore_signing_region=(not bucket_is_arn), + ) + + request_dict = self._convert_to_request_dict( + api_params=params, + operation_model=operation_model, + endpoint_url=endpoint_url, + context=context, + headers=additional_headers, + set_user_agent_header=False, + ) + + # Switch out the http method if user specified it. + if http_method is not None: + request_dict['method'] = http_method + + # Generate the presigned url. + return request_signer.generate_presigned_url( + request_dict=request_dict, + expires_in=expires_in, + operation_name=operation_name, + ) + + +def add_generate_presigned_post(class_attributes, **kwargs): + class_attributes['generate_presigned_post'] = generate_presigned_post + + +def generate_presigned_post( + self, Bucket, Key, Fields=None, Conditions=None, ExpiresIn=3600 +): + """Builds the url and the form fields used for a presigned s3 post + + :type Bucket: string + :param Bucket: The name of the bucket to presign the post to. Note that + bucket related conditions should not be included in the + ``conditions`` parameter. + + :type Key: string + :param Key: Key name, optionally add ${filename} to the end to + attach the submitted filename. Note that key related conditions and + fields are filled out for you and should not be included in the + ``Fields`` or ``Conditions`` parameter. + + :type Fields: dict + :param Fields: A dictionary of prefilled form fields to build on top + of. Elements that may be included are acl, Cache-Control, + Content-Type, Content-Disposition, Content-Encoding, Expires, + success_action_redirect, redirect, success_action_status, + and x-amz-meta-. + + Note that if a particular element is included in the fields + dictionary it will not be automatically added to the conditions + list. You must specify a condition for the element as well. + + :type Conditions: list + :param Conditions: A list of conditions to include in the policy. Each + element can be either a list or a structure. For example: + + [ + {"acl": "public-read"}, + ["content-length-range", 2, 5], + ["starts-with", "$success_action_redirect", ""] + ] + + Conditions that are included may pertain to acl, + content-length-range, Cache-Control, Content-Type, + Content-Disposition, Content-Encoding, Expires, + success_action_redirect, redirect, success_action_status, + and/or x-amz-meta-. + + Note that if you include a condition, you must specify + the a valid value in the fields dictionary as well. A value will + not be added automatically to the fields dictionary based on the + conditions. + + :type ExpiresIn: int + :param ExpiresIn: The number of seconds the presigned post + is valid for. + + :rtype: dict + :returns: A dictionary with two elements: ``url`` and ``fields``. + Url is the url to post to. Fields is a dictionary filled with + the form fields and respective values to use when submitting the + post. For example: + + {'url': 'https://mybucket.s3.amazonaws.com + 'fields': {'acl': 'public-read', + 'key': 'mykey', + 'signature': 'mysignature', + 'policy': 'mybase64 encoded policy'} + } + """ + bucket = Bucket + key = Key + fields = Fields + conditions = Conditions + expires_in = ExpiresIn + + if fields is None: + fields = {} + else: + fields = fields.copy() + + if conditions is None: + conditions = [] + + context = { + 'is_presign_request': True, + 'use_global_endpoint': _should_use_global_endpoint(self), + } + + post_presigner = S3PostPresigner(self._request_signer) + + # We choose the CreateBucket operation model because its url gets + # serialized to what a presign post requires. + operation_model = self.meta.service_model.operation_model('CreateBucket') + params = self._emit_api_params( + api_params={'Bucket': bucket}, + operation_model=operation_model, + context=context, + ) + bucket_is_arn = ArnParser.is_arn(params.get('Bucket', '')) + ( + endpoint_url, + additional_headers, + properties, + ) = self._resolve_endpoint_ruleset( + operation_model, + params, + context, + ignore_signing_region=(not bucket_is_arn), + ) + + request_dict = self._convert_to_request_dict( + api_params=params, + operation_model=operation_model, + endpoint_url=endpoint_url, + context=context, + headers=additional_headers, + set_user_agent_header=False, + ) + + # Append that the bucket name to the list of conditions. + conditions.append({'bucket': bucket}) + + # If the key ends with filename, the only constraint that can be + # imposed is if it starts with the specified prefix. + if key.endswith('${filename}'): + conditions.append(["starts-with", '$key', key[: -len('${filename}')]]) + else: + conditions.append({'key': key}) + + # Add the key to the fields. + fields['key'] = key + + return post_presigner.generate_presigned_post( + request_dict=request_dict, + fields=fields, + conditions=conditions, + expires_in=expires_in, + ) + + +def _should_use_global_endpoint(client): + if client.meta.partition != 'aws': + return False + s3_config = client.meta.config.s3 + if s3_config: + if s3_config.get('use_dualstack_endpoint', False): + return False + if ( + s3_config.get('us_east_1_regional_endpoint') == 'regional' + and client.meta.config.region_name == 'us-east-1' + ): + return False + if s3_config.get('addressing_style') == 'virtual': + return False + return True diff --git a/venv/lib/python3.10/site-packages/botocore/stub.py b/venv/lib/python3.10/site-packages/botocore/stub.py new file mode 100644 index 0000000000000000000000000000000000000000..018fc087068b6496acb98ea2cd5589d3ab197d6d --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/stub.py @@ -0,0 +1,433 @@ +# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import copy +from collections import deque +from pprint import pformat + +from botocore.awsrequest import AWSResponse +from botocore.exceptions import ( + ParamValidationError, + StubAssertionError, + StubResponseError, + UnStubbedResponseError, +) +from botocore.validate import validate_parameters + + +class _ANY: + """ + A helper object that compares equal to everything. Copied from + unittest.mock + """ + + def __eq__(self, other): + return True + + def __ne__(self, other): + return False + + def __repr__(self): + return '' + + +ANY = _ANY() + + +class Stubber: + """ + This class will allow you to stub out requests so you don't have to hit + an endpoint to write tests. Responses are returned first in, first out. + If operations are called out of order, or are called with no remaining + queued responses, an error will be raised. + + **Example:** + :: + import datetime + import botocore.session + from botocore.stub import Stubber + + + s3 = botocore.session.get_session().create_client('s3') + stubber = Stubber(s3) + + response = { + 'IsTruncated': False, + 'Name': 'test-bucket', + 'MaxKeys': 1000, 'Prefix': '', + 'Contents': [{ + 'Key': 'test.txt', + 'ETag': '"abc123"', + 'StorageClass': 'STANDARD', + 'LastModified': datetime.datetime(2016, 1, 20, 22, 9), + 'Owner': {'ID': 'abc123', 'DisplayName': 'myname'}, + 'Size': 14814 + }], + 'EncodingType': 'url', + 'ResponseMetadata': { + 'RequestId': 'abc123', + 'HTTPStatusCode': 200, + 'HostId': 'abc123' + }, + 'Marker': '' + } + + expected_params = {'Bucket': 'test-bucket'} + + stubber.add_response('list_objects', response, expected_params) + stubber.activate() + + service_response = s3.list_objects(Bucket='test-bucket') + assert service_response == response + + + This class can also be called as a context manager, which will handle + activation / deactivation for you. + + **Example:** + :: + import datetime + import botocore.session + from botocore.stub import Stubber + + + s3 = botocore.session.get_session().create_client('s3') + + response = { + "Owner": { + "ID": "foo", + "DisplayName": "bar" + }, + "Buckets": [{ + "CreationDate": datetime.datetime(2016, 1, 20, 22, 9), + "Name": "baz" + }] + } + + + with Stubber(s3) as stubber: + stubber.add_response('list_buckets', response, {}) + service_response = s3.list_buckets() + + assert service_response == response + + + If you have an input parameter that is a randomly generated value, or you + otherwise don't care about its value, you can use ``stub.ANY`` to ignore + it in validation. + + **Example:** + :: + import datetime + import botocore.session + from botocore.stub import Stubber, ANY + + + s3 = botocore.session.get_session().create_client('s3') + stubber = Stubber(s3) + + response = { + 'IsTruncated': False, + 'Name': 'test-bucket', + 'MaxKeys': 1000, 'Prefix': '', + 'Contents': [{ + 'Key': 'test.txt', + 'ETag': '"abc123"', + 'StorageClass': 'STANDARD', + 'LastModified': datetime.datetime(2016, 1, 20, 22, 9), + 'Owner': {'ID': 'abc123', 'DisplayName': 'myname'}, + 'Size': 14814 + }], + 'EncodingType': 'url', + 'ResponseMetadata': { + 'RequestId': 'abc123', + 'HTTPStatusCode': 200, + 'HostId': 'abc123' + }, + 'Marker': '' + } + + expected_params = {'Bucket': ANY} + stubber.add_response('list_objects', response, expected_params) + + with stubber: + service_response = s3.list_objects(Bucket='test-bucket') + + assert service_response == response + """ + + def __init__(self, client): + """ + :param client: The client to add your stubs to. + """ + self.client = client + self._event_id = 'boto_stubber' + self._expected_params_event_id = 'boto_stubber_expected_params' + self._queue = deque() + + def __enter__(self): + self.activate() + return self + + def __exit__(self, exception_type, exception_value, traceback): + self.deactivate() + + def activate(self): + """ + Activates the stubber on the client + """ + self.client.meta.events.register_first( + 'before-parameter-build.*.*', + self._assert_expected_params, + unique_id=self._expected_params_event_id, + ) + self.client.meta.events.register( + 'before-call.*.*', + self._get_response_handler, + unique_id=self._event_id, + ) + + def deactivate(self): + """ + Deactivates the stubber on the client + """ + self.client.meta.events.unregister( + 'before-parameter-build.*.*', + self._assert_expected_params, + unique_id=self._expected_params_event_id, + ) + self.client.meta.events.unregister( + 'before-call.*.*', + self._get_response_handler, + unique_id=self._event_id, + ) + + def add_response(self, method, service_response, expected_params=None): + """ + Adds a service response to the response queue. This will be validated + against the service model to ensure correctness. It should be noted, + however, that while missing attributes are often considered correct, + your code may not function properly if you leave them out. Therefore + you should always fill in every value you see in a typical response for + your particular request. + + :param method: The name of the client method to stub. + :type method: str + + :param service_response: A dict response stub. Provided parameters will + be validated against the service model. + :type service_response: dict + + :param expected_params: A dictionary of the expected parameters to + be called for the provided service response. The parameters match + the names of keyword arguments passed to that client call. If + any of the parameters differ a ``StubResponseError`` is thrown. + You can use stub.ANY to indicate a particular parameter to ignore + in validation. stub.ANY is only valid for top level params. + """ + self._add_response(method, service_response, expected_params) + + def _add_response(self, method, service_response, expected_params): + if not hasattr(self.client, method): + raise ValueError( + f"Client {self.client.meta.service_model.service_name} " + f"does not have method: {method}" + ) + + # Create a successful http response + http_response = AWSResponse(None, 200, {}, None) + + operation_name = self.client.meta.method_to_api_mapping.get(method) + self._validate_operation_response(operation_name, service_response) + + # Add the service_response to the queue for returning responses + response = { + 'operation_name': operation_name, + 'response': (http_response, service_response), + 'expected_params': expected_params, + } + self._queue.append(response) + + def add_client_error( + self, + method, + service_error_code='', + service_message='', + http_status_code=400, + service_error_meta=None, + expected_params=None, + response_meta=None, + modeled_fields=None, + ): + """ + Adds a ``ClientError`` to the response queue. + + :param method: The name of the service method to return the error on. + :type method: str + + :param service_error_code: The service error code to return, + e.g. ``NoSuchBucket`` + :type service_error_code: str + + :param service_message: The service message to return, e.g. + 'The specified bucket does not exist.' + :type service_message: str + + :param http_status_code: The HTTP status code to return, e.g. 404, etc + :type http_status_code: int + + :param service_error_meta: Additional keys to be added to the + service Error + :type service_error_meta: dict + + :param expected_params: A dictionary of the expected parameters to + be called for the provided service response. The parameters match + the names of keyword arguments passed to that client call. If + any of the parameters differ a ``StubResponseError`` is thrown. + You can use stub.ANY to indicate a particular parameter to ignore + in validation. + + :param response_meta: Additional keys to be added to the + response's ResponseMetadata + :type response_meta: dict + + :param modeled_fields: Additional keys to be added to the response + based on fields that are modeled for the particular error code. + These keys will be validated against the particular error shape + designated by the error code. + :type modeled_fields: dict + + """ + http_response = AWSResponse(None, http_status_code, {}, None) + + # We don't look to the model to build this because the caller would + # need to know the details of what the HTTP body would need to + # look like. + parsed_response = { + 'ResponseMetadata': {'HTTPStatusCode': http_status_code}, + 'Error': {'Message': service_message, 'Code': service_error_code}, + } + + if service_error_meta is not None: + parsed_response['Error'].update(service_error_meta) + + if response_meta is not None: + parsed_response['ResponseMetadata'].update(response_meta) + + if modeled_fields is not None: + service_model = self.client.meta.service_model + shape = service_model.shape_for_error_code(service_error_code) + self._validate_response(shape, modeled_fields) + parsed_response.update(modeled_fields) + + operation_name = self.client.meta.method_to_api_mapping.get(method) + # Note that we do not allow for expected_params while + # adding errors into the queue yet. + response = { + 'operation_name': operation_name, + 'response': (http_response, parsed_response), + 'expected_params': expected_params, + } + self._queue.append(response) + + def assert_no_pending_responses(self): + """ + Asserts that all expected calls were made. + """ + remaining = len(self._queue) + if remaining != 0: + raise AssertionError(f"{remaining} responses remaining in queue.") + + def _assert_expected_call_order(self, model, params): + if not self._queue: + raise UnStubbedResponseError( + operation_name=model.name, + reason=( + 'Unexpected API Call: A call was made but no additional ' + 'calls expected. Either the API Call was not stubbed or ' + 'it was called multiple times.' + ), + ) + + name = self._queue[0]['operation_name'] + if name != model.name: + raise StubResponseError( + operation_name=model.name, + reason=f'Operation mismatch: found response for {name}.', + ) + + def _get_response_handler(self, model, params, context, **kwargs): + self._assert_expected_call_order(model, params) + # Pop off the entire response once everything has been validated + return self._queue.popleft()['response'] + + def _assert_expected_params(self, model, params, context, **kwargs): + if self._should_not_stub(context): + return + self._assert_expected_call_order(model, params) + expected_params = self._queue[0]['expected_params'] + if expected_params is None: + return + + # Validate the parameters are equal + for param, value in expected_params.items(): + if param not in params or expected_params[param] != params[param]: + raise StubAssertionError( + operation_name=model.name, + reason=( + f'Expected parameters:\n{pformat(expected_params)},\n' + f'but received:\n{pformat(params)}' + ), + ) + + # Ensure there are no extra params hanging around + if sorted(expected_params.keys()) != sorted(params.keys()): + raise StubAssertionError( + operation_name=model.name, + reason=( + f'Expected parameters:\n{pformat(expected_params)},\n' + f'but received:\n{pformat(params)}' + ), + ) + + def _should_not_stub(self, context): + # Do not include presign requests when processing stubbed client calls + # as a presign request will never have an HTTP request sent over the + # wire for it and therefore not receive a response back. + if context and context.get('is_presign_request'): + return True + + def _validate_operation_response(self, operation_name, service_response): + service_model = self.client.meta.service_model + operation_model = service_model.operation_model(operation_name) + output_shape = operation_model.output_shape + + # Remove ResponseMetadata so that the validator doesn't attempt to + # perform validation on it. + response = service_response + if 'ResponseMetadata' in response: + response = copy.copy(service_response) + del response['ResponseMetadata'] + + self._validate_response(output_shape, response) + + def _validate_response(self, shape, response): + if shape is not None: + validate_parameters(response, shape) + elif response: + # If the output shape is None, that means the response should be + # empty apart from ResponseMetadata + raise ParamValidationError( + report=( + "Service response should only contain ResponseMetadata." + ) + ) diff --git a/venv/lib/python3.10/site-packages/botocore/tokens.py b/venv/lib/python3.10/site-packages/botocore/tokens.py new file mode 100644 index 0000000000000000000000000000000000000000..6e61694611d384a02e34dae4a084d3ef2c76066c --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/tokens.py @@ -0,0 +1,330 @@ +# Copyright 2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import json +import logging +import os +import threading +from datetime import datetime, timedelta +from typing import NamedTuple, Optional + +import dateutil.parser +from dateutil.tz import tzutc + +from botocore import UNSIGNED +from botocore.compat import total_seconds +from botocore.config import Config +from botocore.exceptions import ( + ClientError, + InvalidConfigError, + TokenRetrievalError, +) +from botocore.utils import CachedProperty, JSONFileCache, SSOTokenLoader + +logger = logging.getLogger(__name__) + + +def _utc_now(): + return datetime.now(tzutc()) + + +def create_token_resolver(session): + providers = [ + SSOTokenProvider(session), + ] + return TokenProviderChain(providers=providers) + + +def _serialize_utc_timestamp(obj): + if isinstance(obj, datetime): + return obj.strftime("%Y-%m-%dT%H:%M:%SZ") + return obj + + +def _sso_json_dumps(obj): + return json.dumps(obj, default=_serialize_utc_timestamp) + + +class FrozenAuthToken(NamedTuple): + token: str + expiration: Optional[datetime] = None + + +class DeferredRefreshableToken: + # The time at which we'll attempt to refresh, but not block if someone else + # is refreshing. + _advisory_refresh_timeout = 15 * 60 + # The time at which all threads will block waiting for a refreshed token + _mandatory_refresh_timeout = 10 * 60 + # Refresh at most once every minute to avoid blocking every request + _attempt_timeout = 60 + + def __init__(self, method, refresh_using, time_fetcher=_utc_now): + self._time_fetcher = time_fetcher + self._refresh_using = refresh_using + self.method = method + + # The frozen token is protected by this lock + self._refresh_lock = threading.Lock() + self._frozen_token = None + self._next_refresh = None + + def get_frozen_token(self): + self._refresh() + return self._frozen_token + + def _refresh(self): + # If we don't need to refresh just return + refresh_type = self._should_refresh() + if not refresh_type: + return None + + # Block for refresh if we're in the mandatory refresh window + block_for_refresh = refresh_type == "mandatory" + if self._refresh_lock.acquire(block_for_refresh): + try: + self._protected_refresh() + finally: + self._refresh_lock.release() + + def _protected_refresh(self): + # This should only be called after acquiring the refresh lock + # Another thread may have already refreshed, double check refresh + refresh_type = self._should_refresh() + if not refresh_type: + return None + + try: + now = self._time_fetcher() + self._next_refresh = now + timedelta(seconds=self._attempt_timeout) + self._frozen_token = self._refresh_using() + except Exception: + logger.warning( + "Refreshing token failed during the %s refresh period.", + refresh_type, + exc_info=True, + ) + if refresh_type == "mandatory": + # This refresh was mandatory, error must be propagated back + raise + + if self._is_expired(): + # Fresh credentials should never be expired + raise TokenRetrievalError( + provider=self.method, + error_msg="Token has expired and refresh failed", + ) + + def _is_expired(self): + if self._frozen_token is None: + return False + + expiration = self._frozen_token.expiration + remaining = total_seconds(expiration - self._time_fetcher()) + return remaining <= 0 + + def _should_refresh(self): + if self._frozen_token is None: + # We don't have a token yet, mandatory refresh + return "mandatory" + + expiration = self._frozen_token.expiration + if expiration is None: + # No expiration, so assume we don't need to refresh. + return None + + now = self._time_fetcher() + if now < self._next_refresh: + return None + + remaining = total_seconds(expiration - now) + + if remaining < self._mandatory_refresh_timeout: + return "mandatory" + elif remaining < self._advisory_refresh_timeout: + return "advisory" + + return None + + +class TokenProviderChain: + def __init__(self, providers=None): + if providers is None: + providers = [] + self._providers = providers + + def load_token(self): + for provider in self._providers: + token = provider.load_token() + if token is not None: + return token + return None + + +class SSOTokenProvider: + METHOD = "sso" + _REFRESH_WINDOW = 15 * 60 + _SSO_TOKEN_CACHE_DIR = os.path.expanduser( + os.path.join("~", ".aws", "sso", "cache") + ) + _SSO_CONFIG_VARS = [ + "sso_start_url", + "sso_region", + ] + _GRANT_TYPE = "refresh_token" + DEFAULT_CACHE_CLS = JSONFileCache + + def __init__( + self, session, cache=None, time_fetcher=_utc_now, profile_name=None + ): + self._session = session + if cache is None: + cache = self.DEFAULT_CACHE_CLS( + self._SSO_TOKEN_CACHE_DIR, + dumps_func=_sso_json_dumps, + ) + self._now = time_fetcher + self._cache = cache + self._token_loader = SSOTokenLoader(cache=self._cache) + self._profile_name = ( + profile_name + or self._session.get_config_variable("profile") + or 'default' + ) + + def _load_sso_config(self): + loaded_config = self._session.full_config + profiles = loaded_config.get("profiles", {}) + sso_sessions = loaded_config.get("sso_sessions", {}) + profile_config = profiles.get(self._profile_name, {}) + + if "sso_session" not in profile_config: + return + + sso_session_name = profile_config["sso_session"] + sso_config = sso_sessions.get(sso_session_name, None) + + if not sso_config: + error_msg = ( + f'The profile "{self._profile_name}" is configured to use the SSO ' + f'token provider but the "{sso_session_name}" sso_session ' + f"configuration does not exist." + ) + raise InvalidConfigError(error_msg=error_msg) + + missing_configs = [] + for var in self._SSO_CONFIG_VARS: + if var not in sso_config: + missing_configs.append(var) + + if missing_configs: + error_msg = ( + f'The profile "{self._profile_name}" is configured to use the SSO ' + f"token provider but is missing the following configuration: " + f"{missing_configs}." + ) + raise InvalidConfigError(error_msg=error_msg) + + return { + "session_name": sso_session_name, + "sso_region": sso_config["sso_region"], + "sso_start_url": sso_config["sso_start_url"], + } + + @CachedProperty + def _sso_config(self): + return self._load_sso_config() + + @CachedProperty + def _client(self): + config = Config( + region_name=self._sso_config["sso_region"], + signature_version=UNSIGNED, + ) + return self._session.create_client("sso-oidc", config=config) + + def _attempt_create_token(self, token): + response = self._client.create_token( + grantType=self._GRANT_TYPE, + clientId=token["clientId"], + clientSecret=token["clientSecret"], + refreshToken=token["refreshToken"], + ) + expires_in = timedelta(seconds=response["expiresIn"]) + new_token = { + "startUrl": self._sso_config["sso_start_url"], + "region": self._sso_config["sso_region"], + "accessToken": response["accessToken"], + "expiresAt": self._now() + expires_in, + # Cache the registration alongside the token + "clientId": token["clientId"], + "clientSecret": token["clientSecret"], + "registrationExpiresAt": token["registrationExpiresAt"], + } + if "refreshToken" in response: + new_token["refreshToken"] = response["refreshToken"] + logger.info("SSO Token refresh succeeded") + return new_token + + def _refresh_access_token(self, token): + keys = ( + "refreshToken", + "clientId", + "clientSecret", + "registrationExpiresAt", + ) + missing_keys = [k for k in keys if k not in token] + if missing_keys: + msg = f"Unable to refresh SSO token: missing keys: {missing_keys}" + logger.info(msg) + return None + + expiry = dateutil.parser.parse(token["registrationExpiresAt"]) + if total_seconds(expiry - self._now()) <= 0: + logger.info(f"SSO token registration expired at {expiry}") + return None + + try: + return self._attempt_create_token(token) + except ClientError: + logger.warning("SSO token refresh attempt failed", exc_info=True) + return None + + def _refresher(self): + start_url = self._sso_config["sso_start_url"] + session_name = self._sso_config["session_name"] + logger.info(f"Loading cached SSO token for {session_name}") + token_dict = self._token_loader(start_url, session_name=session_name) + expiration = dateutil.parser.parse(token_dict["expiresAt"]) + logger.debug(f"Cached SSO token expires at {expiration}") + + remaining = total_seconds(expiration - self._now()) + if remaining < self._REFRESH_WINDOW: + new_token_dict = self._refresh_access_token(token_dict) + if new_token_dict is not None: + token_dict = new_token_dict + expiration = token_dict["expiresAt"] + self._token_loader.save_token( + start_url, token_dict, session_name=session_name + ) + + return FrozenAuthToken( + token_dict["accessToken"], expiration=expiration + ) + + def load_token(self): + if self._sso_config is None: + return None + + return DeferredRefreshableToken( + self.METHOD, self._refresher, time_fetcher=self._now + ) diff --git a/venv/lib/python3.10/site-packages/botocore/translate.py b/venv/lib/python3.10/site-packages/botocore/translate.py new file mode 100644 index 0000000000000000000000000000000000000000..ecfe3bcaf46629a3d75bef81a60ac45e76cfa4db --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/translate.py @@ -0,0 +1,78 @@ +# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/ +# Copyright 2012-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import copy + +from botocore.utils import merge_dicts + + +def build_retry_config( + endpoint_prefix, retry_model, definitions, client_retry_config=None +): + service_config = retry_model.get(endpoint_prefix, {}) + resolve_references(service_config, definitions) + # We want to merge the global defaults with the service specific + # defaults, with the service specific defaults taking precedence. + # So we use the global defaults as the base. + # + # A deepcopy is done on the retry defaults because it ensures the + # retry model has no chance of getting mutated when the service specific + # configuration or client retry config is merged in. + final_retry_config = { + '__default__': copy.deepcopy(retry_model.get('__default__', {})) + } + resolve_references(final_retry_config, definitions) + # The merge the service specific config on top. + merge_dicts(final_retry_config, service_config) + if client_retry_config is not None: + _merge_client_retry_config(final_retry_config, client_retry_config) + return final_retry_config + + +def _merge_client_retry_config(retry_config, client_retry_config): + max_retry_attempts_override = client_retry_config.get('max_attempts') + if max_retry_attempts_override is not None: + # In the retry config, the max_attempts refers to the maximum number + # of requests in general will be made. However, for the client's + # retry config it refers to how many retry attempts will be made at + # most. So to translate this number from the client config, one is + # added to convert it to the maximum number request that will be made + # by including the initial request. + # + # It is also important to note that if we ever support per operation + # configuration in the retry model via the client, we will need to + # revisit this logic to make sure max_attempts gets applied + # per operation. + retry_config['__default__']['max_attempts'] = ( + max_retry_attempts_override + 1 + ) + + +def resolve_references(config, definitions): + """Recursively replace $ref keys. + + To cut down on duplication, common definitions can be declared + (and passed in via the ``definitions`` attribute) and then + references as {"$ref": "name"}, when this happens the reference + dict is placed with the value from the ``definition`` dict. + + This is recursively done. + + """ + for key, value in config.items(): + if isinstance(value, dict): + if len(value) == 1 and list(value.keys())[0] == '$ref': + # Then we need to resolve this reference. + config[key] = definitions[list(value.values())[0]] + else: + resolve_references(value, definitions) diff --git a/venv/lib/python3.10/site-packages/botocore/useragent.py b/venv/lib/python3.10/site-packages/botocore/useragent.py new file mode 100644 index 0000000000000000000000000000000000000000..a9a611910b4b2ecd1bf646f14a7b30d008bb11fe --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/useragent.py @@ -0,0 +1,504 @@ +# Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +""" +NOTE: All classes and functions in this module are considered private and are +subject to abrupt breaking changes. Please do not use them directly. + +To modify the User-Agent header sent by botocore, use one of these +configuration options: +* The ``AWS_SDK_UA_APP_ID`` environment variable. +* The ``sdk_ua_app_id`` setting in the shared AWS config file. +* The ``user_agent_appid`` field in the :py:class:`botocore.config.Config`. +* The ``user_agent_extra`` field in the :py:class:`botocore.config.Config`. + +""" + +import os +import platform +from copy import copy +from string import ascii_letters, digits +from typing import NamedTuple, Optional + +from botocore import __version__ as botocore_version +from botocore.compat import HAS_CRT + +_USERAGENT_ALLOWED_CHARACTERS = ascii_letters + digits + "!$%&'*+-.^_`|~" +_USERAGENT_ALLOWED_OS_NAMES = ( + 'windows', + 'linux', + 'macos', + 'android', + 'ios', + 'watchos', + 'tvos', + 'other', +) +_USERAGENT_PLATFORM_NAME_MAPPINGS = {'darwin': 'macos'} +# The name by which botocore is identified in the User-Agent header. While most +# AWS SDKs follow a naming pattern of "aws-sdk-*", botocore and boto3 continue +# using their existing values. Uses uppercase "B" with all other characters +# lowercase. +_USERAGENT_SDK_NAME = 'Botocore' + + +def sanitize_user_agent_string_component(raw_str, allow_hash): + """Replaces all not allowed characters in the string with a dash ("-"). + + Allowed characters are ASCII alphanumerics and ``!$%&'*+-.^_`|~``. If + ``allow_hash`` is ``True``, "#"``" is also allowed. + + :type raw_str: str + :param raw_str: The input string to be sanitized. + + :type allow_hash: bool + :param allow_hash: Whether "#" is considered an allowed character. + """ + return ''.join( + c + if c in _USERAGENT_ALLOWED_CHARACTERS or (allow_hash and c == '#') + else '-' + for c in raw_str + ) + + +class UserAgentComponent(NamedTuple): + """ + Component of a Botocore User-Agent header string in the standard format. + + Each component consists of a prefix, a name, and a value. In the string + representation these are combined in the format ``prefix/name#value``. + + This class is considered private and is subject to abrupt breaking changes. + """ + + prefix: str + name: str + value: Optional[str] = None + + def to_string(self): + """Create string like 'prefix/name#value' from a UserAgentComponent.""" + clean_prefix = sanitize_user_agent_string_component( + self.prefix, allow_hash=True + ) + clean_name = sanitize_user_agent_string_component( + self.name, allow_hash=False + ) + if self.value is None or self.value == '': + return f'{clean_prefix}/{clean_name}' + clean_value = sanitize_user_agent_string_component( + self.value, allow_hash=True + ) + return f'{clean_prefix}/{clean_name}#{clean_value}' + + +class RawStringUserAgentComponent: + """ + UserAgentComponent interface wrapper around ``str``. + + Use for User-Agent header components that are not constructed from + prefix+name+value but instead are provided as strings. No sanitization is + performed. + """ + + def __init__(self, value): + self._value = value + + def to_string(self): + return self._value + + +# This is not a public interface and is subject to abrupt breaking changes. +# Any usage is not advised or supported in external code bases. +try: + from botocore.customizations.useragent import modify_components +except ImportError: + # Default implementation that returns unmodified User-Agent components. + def modify_components(components): + return components + + +class UserAgentString: + """ + Generator for AWS SDK User-Agent header strings. + + The User-Agent header format contains information from session, client, and + request context. ``UserAgentString`` provides methods for collecting the + information and ``to_string`` for assembling it into the standardized + string format. + + Example usage: + + ua_session = UserAgentString.from_environment() + ua_session.set_session_config(...) + ua_client = ua_session.with_client_config(Config(...)) + ua_string = ua_request.to_string() + + For testing or when information from all sources is available at the same + time, the methods can be chained: + + ua_string = ( + UserAgentString + .from_environment() + .set_session_config(...) + .with_client_config(Config(...)) + .to_string() + ) + + """ + + def __init__( + self, + platform_name, + platform_version, + platform_machine, + python_version, + python_implementation, + execution_env, + crt_version=None, + ): + """ + :type platform_name: str + :param platform_name: Name of the operating system or equivalent + platform name. Should be sourced from :py:meth:`platform.system`. + :type platform_version: str + :param platform_version: Version of the operating system or equivalent + platform name. Should be sourced from :py:meth:`platform.version`. + :type platform_machine: str + :param platform_version: Processor architecture or machine type. For + example "x86_64". Should be sourced from :py:meth:`platform.machine`. + :type python_version: str + :param python_version: Version of the python implementation as str. + Should be sourced from :py:meth:`platform.python_version`. + :type python_implementation: str + :param python_implementation: Name of the python implementation. + Should be sourced from :py:meth:`platform.python_implementation`. + :type execution_env: str + :param execution_env: The value of the AWS execution environment. + Should be sourced from the ``AWS_EXECUTION_ENV` environment + variable. + :type crt_version: str + :param crt_version: Version string of awscrt package, if installed. + """ + self._platform_name = platform_name + self._platform_version = platform_version + self._platform_machine = platform_machine + self._python_version = python_version + self._python_implementation = python_implementation + self._execution_env = execution_env + self._crt_version = crt_version + + # Components that can be added with ``set_session_config()`` + self._session_user_agent_name = None + self._session_user_agent_version = None + self._session_user_agent_extra = None + + self._client_config = None + self._uses_paginator = None + self._uses_waiter = None + self._uses_resource = None + + @classmethod + def from_environment(cls): + crt_version = None + if HAS_CRT: + crt_version = _get_crt_version() or 'Unknown' + return cls( + platform_name=platform.system(), + platform_version=platform.release(), + platform_machine=platform.machine(), + python_version=platform.python_version(), + python_implementation=platform.python_implementation(), + execution_env=os.environ.get('AWS_EXECUTION_ENV'), + crt_version=crt_version, + ) + + def set_session_config( + self, + session_user_agent_name, + session_user_agent_version, + session_user_agent_extra, + ): + """ + Set the user agent configuration values that apply at session level. + + :param user_agent_name: The user agent name configured in the + :py:class:`botocore.session.Session` object. For backwards + compatibility, this will always be at the beginning of the + User-Agent string, together with ``user_agent_version``. + :param user_agent_version: The user agent version configured in the + :py:class:`botocore.session.Session` object. + :param user_agent_extra: The user agent "extra" configured in the + :py:class:`botocore.session.Session` object. + """ + self._session_user_agent_name = session_user_agent_name + self._session_user_agent_version = session_user_agent_version + self._session_user_agent_extra = session_user_agent_extra + return self + + def with_client_config(self, client_config): + """ + Create a copy with all original values and client-specific values. + + :type client_config: botocore.config.Config + :param client_config: The client configuration object. + """ + cp = copy(self) + cp._client_config = client_config + return cp + + def to_string(self): + """ + Build User-Agent header string from the object's properties. + """ + config_ua_override = None + if self._client_config: + if hasattr(self._client_config, '_supplied_user_agent'): + config_ua_override = self._client_config._supplied_user_agent + else: + config_ua_override = self._client_config.user_agent + + if config_ua_override is not None: + return self._build_legacy_ua_string(config_ua_override) + + components = [ + *self._build_sdk_metadata(), + RawStringUserAgentComponent('ua/2.0'), + *self._build_os_metadata(), + *self._build_architecture_metadata(), + *self._build_language_metadata(), + *self._build_execution_env_metadata(), + *self._build_feature_metadata(), + *self._build_config_metadata(), + *self._build_app_id(), + *self._build_extra(), + ] + + components = modify_components(components) + + return ' '.join([comp.to_string() for comp in components]) + + def _build_sdk_metadata(self): + """ + Build the SDK name and version component of the User-Agent header. + + For backwards-compatibility both session-level and client-level config + of custom tool names are honored. If this removes the Botocore + information from the start of the string, Botocore's name and version + are included as a separate field with "md" prefix. + """ + sdk_md = [] + if ( + self._session_user_agent_name + and self._session_user_agent_version + and ( + self._session_user_agent_name != _USERAGENT_SDK_NAME + or self._session_user_agent_version != botocore_version + ) + ): + sdk_md.extend( + [ + UserAgentComponent( + self._session_user_agent_name, + self._session_user_agent_version, + ), + UserAgentComponent( + 'md', _USERAGENT_SDK_NAME, botocore_version + ), + ] + ) + else: + sdk_md.append( + UserAgentComponent(_USERAGENT_SDK_NAME, botocore_version) + ) + + if self._crt_version is not None: + sdk_md.append( + UserAgentComponent('md', 'awscrt', self._crt_version) + ) + + return sdk_md + + def _build_os_metadata(self): + """ + Build the OS/platform components of the User-Agent header string. + + For recognized platform names that match or map to an entry in the list + of standardized OS names, a single component with prefix "os" is + returned. Otherwise, one component "os/other" is returned and a second + with prefix "md" and the raw platform name. + + String representations of example return values: + * ``os/macos#10.13.6`` + * ``os/linux`` + * ``os/other`` + * ``os/other md/foobar#1.2.3`` + """ + if self._platform_name is None: + return [UserAgentComponent('os', 'other')] + + plt_name_lower = self._platform_name.lower() + if plt_name_lower in _USERAGENT_ALLOWED_OS_NAMES: + os_family = plt_name_lower + elif plt_name_lower in _USERAGENT_PLATFORM_NAME_MAPPINGS: + os_family = _USERAGENT_PLATFORM_NAME_MAPPINGS[plt_name_lower] + else: + os_family = None + + if os_family is not None: + return [ + UserAgentComponent('os', os_family, self._platform_version) + ] + else: + return [ + UserAgentComponent('os', 'other'), + UserAgentComponent( + 'md', self._platform_name, self._platform_version + ), + ] + + def _build_architecture_metadata(self): + """ + Build architecture component of the User-Agent header string. + + Returns the machine type with prefix "md" and name "arch", if one is + available. Common values include "x86_64", "arm64", "i386". + """ + if self._platform_machine: + return [ + UserAgentComponent( + 'md', 'arch', self._platform_machine.lower() + ) + ] + return [] + + def _build_language_metadata(self): + """ + Build the language components of the User-Agent header string. + + Returns the Python version in a component with prefix "lang" and name + "python". The Python implementation (e.g. CPython, PyPy) is returned as + separate metadata component with prefix "md" and name "pyimpl". + + String representation of an example return value: + ``lang/python#3.10.4 md/pyimpl#CPython`` + """ + lang_md = [ + UserAgentComponent('lang', 'python', self._python_version), + ] + if self._python_implementation: + lang_md.append( + UserAgentComponent('md', 'pyimpl', self._python_implementation) + ) + return lang_md + + def _build_execution_env_metadata(self): + """ + Build the execution environment component of the User-Agent header. + + Returns a single component prefixed with "exec-env", usually sourced + from the environment variable AWS_EXECUTION_ENV. + """ + if self._execution_env: + return [UserAgentComponent('exec-env', self._execution_env)] + else: + return [] + + def _build_feature_metadata(self): + """ + Build the features components of the User-Agent header string. + + Botocore currently does not report any features. This may change in a + future version. + """ + return [] + + def _build_config_metadata(self): + """ + Build the configuration components of the User-Agent header string. + + Returns a list of components with prefix "cfg" followed by the config + setting name and its value. Tracked configuration settings may be + added or removed in future versions. + """ + if not self._client_config or not self._client_config.retries: + return [] + retry_mode = self._client_config.retries.get('mode') + cfg_md = [UserAgentComponent('cfg', 'retry-mode', retry_mode)] + if self._client_config.endpoint_discovery_enabled: + cfg_md.append(UserAgentComponent('cfg', 'endpoint-discovery')) + return cfg_md + + def _build_app_id(self): + """ + Build app component of the User-Agent header string. + + Returns a single component with prefix "app" and value sourced from the + ``user_agent_appid`` field in :py:class:`botocore.config.Config` or + the ``sdk_ua_app_id`` setting in the shared configuration file, or the + ``AWS_SDK_UA_APP_ID`` environment variable. These are the recommended + ways for apps built with Botocore to insert their identifer into the + User-Agent header. + """ + if self._client_config and self._client_config.user_agent_appid: + return [ + UserAgentComponent('app', self._client_config.user_agent_appid) + ] + else: + return [] + + def _build_extra(self): + """User agent string components based on legacy "extra" settings. + + Creates components from the session-level and client-level + ``user_agent_extra`` setting, if present. Both are passed through + verbatim and should be appended at the end of the string. + + Preferred ways to inject application-specific information into + botocore's User-Agent header string are the ``user_agent_appid` field + in :py:class:`botocore.config.Config`. The ``AWS_SDK_UA_APP_ID`` + environment variable and the ``sdk_ua_app_id`` configuration file + setting are alternative ways to set the ``user_agent_appid`` config. + """ + extra = [] + if self._session_user_agent_extra: + extra.append( + RawStringUserAgentComponent(self._session_user_agent_extra) + ) + if self._client_config and self._client_config.user_agent_extra: + extra.append( + RawStringUserAgentComponent( + self._client_config.user_agent_extra + ) + ) + return extra + + def _build_legacy_ua_string(self, config_ua_override): + components = [config_ua_override] + if self._session_user_agent_extra: + components.append(self._session_user_agent_extra) + if self._client_config.user_agent_extra: + components.append(self._client_config.user_agent_extra) + return ' '.join(components) + + +def _get_crt_version(): + """ + This function is considered private and is subject to abrupt breaking + changes. + """ + try: + import awscrt + + return awscrt.__version__ + except AttributeError: + return None diff --git a/venv/lib/python3.10/site-packages/botocore/utils.py b/venv/lib/python3.10/site-packages/botocore/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..0efd7ef32597ac7e9a970383eb685cfe5a74d49e --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/utils.py @@ -0,0 +1,3622 @@ +# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import base64 +import binascii +import datetime +import email.message +import functools +import hashlib +import io +import logging +import os +import random +import re +import socket +import time +import warnings +import weakref +from datetime import datetime as _DatetimeClass +from ipaddress import ip_address +from pathlib import Path +from urllib.request import getproxies, proxy_bypass + +import dateutil.parser +from dateutil.tz import tzutc +from urllib3.exceptions import LocationParseError + +import botocore +import botocore.awsrequest +import botocore.httpsession + +# IP Regexes retained for backwards compatibility +from botocore.compat import HEX_PAT # noqa: F401 +from botocore.compat import IPV4_PAT # noqa: F401 +from botocore.compat import IPV6_ADDRZ_PAT # noqa: F401 +from botocore.compat import IPV6_PAT # noqa: F401 +from botocore.compat import LS32_PAT # noqa: F401 +from botocore.compat import UNRESERVED_PAT # noqa: F401 +from botocore.compat import ZONE_ID_PAT # noqa: F401 +from botocore.compat import ( + HAS_CRT, + IPV4_RE, + IPV6_ADDRZ_RE, + MD5_AVAILABLE, + UNSAFE_URL_CHARS, + OrderedDict, + get_md5, + get_tzinfo_options, + json, + quote, + urlparse, + urlsplit, + urlunsplit, + zip_longest, +) +from botocore.exceptions import ( + ClientError, + ConfigNotFound, + ConnectionClosedError, + ConnectTimeoutError, + EndpointConnectionError, + HTTPClientError, + InvalidDNSNameError, + InvalidEndpointConfigurationError, + InvalidExpressionError, + InvalidHostLabelError, + InvalidIMDSEndpointError, + InvalidIMDSEndpointModeError, + InvalidRegionError, + MetadataRetrievalError, + MissingDependencyException, + ReadTimeoutError, + SSOTokenLoadError, + UnsupportedOutpostResourceError, + UnsupportedS3AccesspointConfigurationError, + UnsupportedS3ArnError, + UnsupportedS3ConfigurationError, + UnsupportedS3ControlArnError, + UnsupportedS3ControlConfigurationError, +) + +logger = logging.getLogger(__name__) +DEFAULT_METADATA_SERVICE_TIMEOUT = 1 +METADATA_BASE_URL = 'http://169.254.169.254/' +METADATA_BASE_URL_IPv6 = 'http://[fd00:ec2::254]/' +METADATA_ENDPOINT_MODES = ('ipv4', 'ipv6') + +# These are chars that do not need to be urlencoded. +# Based on rfc2986, section 2.3 +SAFE_CHARS = '-._~' +LABEL_RE = re.compile(r'[a-z0-9][a-z0-9\-]*[a-z0-9]') +RETRYABLE_HTTP_ERRORS = ( + ReadTimeoutError, + EndpointConnectionError, + ConnectionClosedError, + ConnectTimeoutError, +) +S3_ACCELERATE_WHITELIST = ['dualstack'] +# In switching events from using service name / endpoint prefix to service +# id, we have to preserve compatibility. This maps the instances where either +# is different than the transformed service id. +EVENT_ALIASES = { + "api.mediatailor": "mediatailor", + "api.pricing": "pricing", + "api.sagemaker": "sagemaker", + "apigateway": "api-gateway", + "application-autoscaling": "application-auto-scaling", + "appstream2": "appstream", + "autoscaling": "auto-scaling", + "autoscaling-plans": "auto-scaling-plans", + "ce": "cost-explorer", + "cloudhsmv2": "cloudhsm-v2", + "cloudsearchdomain": "cloudsearch-domain", + "cognito-idp": "cognito-identity-provider", + "config": "config-service", + "cur": "cost-and-usage-report-service", + "data.iot": "iot-data-plane", + "data.jobs.iot": "iot-jobs-data-plane", + "data.mediastore": "mediastore-data", + "datapipeline": "data-pipeline", + "devicefarm": "device-farm", + "devices.iot1click": "iot-1click-devices-service", + "directconnect": "direct-connect", + "discovery": "application-discovery-service", + "dms": "database-migration-service", + "ds": "directory-service", + "dynamodbstreams": "dynamodb-streams", + "elasticbeanstalk": "elastic-beanstalk", + "elasticfilesystem": "efs", + "elasticloadbalancing": "elastic-load-balancing", + "elasticmapreduce": "emr", + "elastictranscoder": "elastic-transcoder", + "elb": "elastic-load-balancing", + "elbv2": "elastic-load-balancing-v2", + "email": "ses", + "entitlement.marketplace": "marketplace-entitlement-service", + "es": "elasticsearch-service", + "events": "eventbridge", + "cloudwatch-events": "eventbridge", + "iot-data": "iot-data-plane", + "iot-jobs-data": "iot-jobs-data-plane", + "iot1click-devices": "iot-1click-devices-service", + "iot1click-projects": "iot-1click-projects", + "kinesisanalytics": "kinesis-analytics", + "kinesisvideo": "kinesis-video", + "lex-models": "lex-model-building-service", + "lex-runtime": "lex-runtime-service", + "logs": "cloudwatch-logs", + "machinelearning": "machine-learning", + "marketplace-entitlement": "marketplace-entitlement-service", + "marketplacecommerceanalytics": "marketplace-commerce-analytics", + "metering.marketplace": "marketplace-metering", + "meteringmarketplace": "marketplace-metering", + "mgh": "migration-hub", + "models.lex": "lex-model-building-service", + "monitoring": "cloudwatch", + "mturk-requester": "mturk", + "opsworks-cm": "opsworkscm", + "projects.iot1click": "iot-1click-projects", + "resourcegroupstaggingapi": "resource-groups-tagging-api", + "route53": "route-53", + "route53domains": "route-53-domains", + "runtime.lex": "lex-runtime-service", + "runtime.sagemaker": "sagemaker-runtime", + "sdb": "simpledb", + "secretsmanager": "secrets-manager", + "serverlessrepo": "serverlessapplicationrepository", + "servicecatalog": "service-catalog", + "states": "sfn", + "stepfunctions": "sfn", + "storagegateway": "storage-gateway", + "streams.dynamodb": "dynamodb-streams", + "tagging": "resource-groups-tagging-api", +} + + +# This pattern can be used to detect if a header is a flexible checksum header +CHECKSUM_HEADER_PATTERN = re.compile( + r'^X-Amz-Checksum-([a-z0-9]*)$', + flags=re.IGNORECASE, +) + + +def ensure_boolean(val): + """Ensures a boolean value if a string or boolean is provided + + For strings, the value for True/False is case insensitive + """ + if isinstance(val, bool): + return val + elif isinstance(val, str): + return val.lower() == 'true' + else: + return False + + +def resolve_imds_endpoint_mode(session): + """Resolving IMDS endpoint mode to either IPv6 or IPv4. + + ec2_metadata_service_endpoint_mode takes precedence over imds_use_ipv6. + """ + endpoint_mode = session.get_config_variable( + 'ec2_metadata_service_endpoint_mode' + ) + if endpoint_mode is not None: + lendpoint_mode = endpoint_mode.lower() + if lendpoint_mode not in METADATA_ENDPOINT_MODES: + error_msg_kwargs = { + 'mode': endpoint_mode, + 'valid_modes': METADATA_ENDPOINT_MODES, + } + raise InvalidIMDSEndpointModeError(**error_msg_kwargs) + return lendpoint_mode + elif session.get_config_variable('imds_use_ipv6'): + return 'ipv6' + return 'ipv4' + + +def is_json_value_header(shape): + """Determines if the provided shape is the special header type jsonvalue. + + :type shape: botocore.shape + :param shape: Shape to be inspected for the jsonvalue trait. + + :return: True if this type is a jsonvalue, False otherwise + :rtype: Bool + """ + return ( + hasattr(shape, 'serialization') + and shape.serialization.get('jsonvalue', False) + and shape.serialization.get('location') == 'header' + and shape.type_name == 'string' + ) + + +def has_header(header_name, headers): + """Case-insensitive check for header key.""" + if header_name is None: + return False + elif isinstance(headers, botocore.awsrequest.HeadersDict): + return header_name in headers + else: + return header_name.lower() in [key.lower() for key in headers.keys()] + + +def get_service_module_name(service_model): + """Returns the module name for a service + + This is the value used in both the documentation and client class name + """ + name = service_model.metadata.get( + 'serviceAbbreviation', + service_model.metadata.get( + 'serviceFullName', service_model.service_name + ), + ) + name = name.replace('Amazon', '') + name = name.replace('AWS', '') + name = re.sub(r'\W+', '', name) + return name + + +def normalize_url_path(path): + if not path: + return '/' + return remove_dot_segments(path) + + +def normalize_boolean(val): + """Returns None if val is None, otherwise ensure value + converted to boolean""" + if val is None: + return val + else: + return ensure_boolean(val) + + +def remove_dot_segments(url): + # RFC 3986, section 5.2.4 "Remove Dot Segments" + # Also, AWS services require consecutive slashes to be removed, + # so that's done here as well + if not url: + return '' + input_url = url.split('/') + output_list = [] + for x in input_url: + if x and x != '.': + if x == '..': + if output_list: + output_list.pop() + else: + output_list.append(x) + + if url[0] == '/': + first = '/' + else: + first = '' + if url[-1] == '/' and output_list: + last = '/' + else: + last = '' + return first + '/'.join(output_list) + last + + +def validate_jmespath_for_set(expression): + # Validates a limited jmespath expression to determine if we can set a + # value based on it. Only works with dotted paths. + if not expression or expression == '.': + raise InvalidExpressionError(expression=expression) + + for invalid in ['[', ']', '*']: + if invalid in expression: + raise InvalidExpressionError(expression=expression) + + +def set_value_from_jmespath(source, expression, value, is_first=True): + # This takes a (limited) jmespath-like expression & can set a value based + # on it. + # Limitations: + # * Only handles dotted lookups + # * No offsets/wildcards/slices/etc. + if is_first: + validate_jmespath_for_set(expression) + + bits = expression.split('.', 1) + current_key, remainder = bits[0], bits[1] if len(bits) > 1 else '' + + if not current_key: + raise InvalidExpressionError(expression=expression) + + if remainder: + if current_key not in source: + # We've got something in the expression that's not present in the + # source (new key). If there's any more bits, we'll set the key + # with an empty dictionary. + source[current_key] = {} + + return set_value_from_jmespath( + source[current_key], remainder, value, is_first=False + ) + + # If we're down to a single key, set it. + source[current_key] = value + + +def is_global_accesspoint(context): + """Determine if request is intended for an MRAP accesspoint.""" + s3_accesspoint = context.get('s3_accesspoint', {}) + is_global = s3_accesspoint.get('region') == '' + return is_global + + +class _RetriesExceededError(Exception): + """Internal exception used when the number of retries are exceeded.""" + + pass + + +class BadIMDSRequestError(Exception): + def __init__(self, request): + self.request = request + + +class IMDSFetcher: + _RETRIES_EXCEEDED_ERROR_CLS = _RetriesExceededError + _TOKEN_PATH = 'latest/api/token' + _TOKEN_TTL = '21600' + + def __init__( + self, + timeout=DEFAULT_METADATA_SERVICE_TIMEOUT, + num_attempts=1, + base_url=METADATA_BASE_URL, + env=None, + user_agent=None, + config=None, + ): + self._timeout = timeout + self._num_attempts = num_attempts + if config is None: + config = {} + self._base_url = self._select_base_url(base_url, config) + self._config = config + + if env is None: + env = os.environ.copy() + self._disabled = ( + env.get('AWS_EC2_METADATA_DISABLED', 'false').lower() == 'true' + ) + self._imds_v1_disabled = config.get('ec2_metadata_v1_disabled') + self._user_agent = user_agent + self._session = botocore.httpsession.URLLib3Session( + timeout=self._timeout, + proxies=get_environ_proxies(self._base_url), + ) + + def get_base_url(self): + return self._base_url + + def _select_base_url(self, base_url, config): + if config is None: + config = {} + + requires_ipv6 = ( + config.get('ec2_metadata_service_endpoint_mode') == 'ipv6' + ) + custom_metadata_endpoint = config.get('ec2_metadata_service_endpoint') + + if requires_ipv6 and custom_metadata_endpoint: + logger.warning( + "Custom endpoint and IMDS_USE_IPV6 are both set. Using custom endpoint." + ) + + chosen_base_url = None + + if base_url != METADATA_BASE_URL: + chosen_base_url = base_url + elif custom_metadata_endpoint: + chosen_base_url = custom_metadata_endpoint + elif requires_ipv6: + chosen_base_url = METADATA_BASE_URL_IPv6 + else: + chosen_base_url = METADATA_BASE_URL + + logger.debug(f"IMDS ENDPOINT: {chosen_base_url}") + if not is_valid_uri(chosen_base_url): + raise InvalidIMDSEndpointError(endpoint=chosen_base_url) + + return chosen_base_url + + def _construct_url(self, path): + sep = '' + if self._base_url and not self._base_url.endswith('/'): + sep = '/' + return f'{self._base_url}{sep}{path}' + + def _fetch_metadata_token(self): + self._assert_enabled() + url = self._construct_url(self._TOKEN_PATH) + headers = { + 'x-aws-ec2-metadata-token-ttl-seconds': self._TOKEN_TTL, + } + self._add_user_agent(headers) + request = botocore.awsrequest.AWSRequest( + method='PUT', url=url, headers=headers + ) + for i in range(self._num_attempts): + try: + response = self._session.send(request.prepare()) + if response.status_code == 200: + return response.text + elif response.status_code in (404, 403, 405): + return None + elif response.status_code in (400,): + raise BadIMDSRequestError(request) + except ReadTimeoutError: + return None + except RETRYABLE_HTTP_ERRORS as e: + logger.debug( + "Caught retryable HTTP exception while making metadata " + "service request to %s: %s", + url, + e, + exc_info=True, + ) + except HTTPClientError as e: + if isinstance(e.kwargs.get('error'), LocationParseError): + raise InvalidIMDSEndpointError(endpoint=url, error=e) + else: + raise + return None + + def _get_request(self, url_path, retry_func, token=None): + """Make a get request to the Instance Metadata Service. + + :type url_path: str + :param url_path: The path component of the URL to make a get request. + This arg is appended to the base_url that was provided in the + initializer. + + :type retry_func: callable + :param retry_func: A function that takes the response as an argument + and determines if it needs to retry. By default empty and non + 200 OK responses are retried. + + :type token: str + :param token: Metadata token to send along with GET requests to IMDS. + """ + self._assert_enabled() + if not token: + self._assert_v1_enabled() + if retry_func is None: + retry_func = self._default_retry + url = self._construct_url(url_path) + headers = {} + if token is not None: + headers['x-aws-ec2-metadata-token'] = token + self._add_user_agent(headers) + for i in range(self._num_attempts): + try: + request = botocore.awsrequest.AWSRequest( + method='GET', url=url, headers=headers + ) + response = self._session.send(request.prepare()) + if not retry_func(response): + return response + except RETRYABLE_HTTP_ERRORS as e: + logger.debug( + "Caught retryable HTTP exception while making metadata " + "service request to %s: %s", + url, + e, + exc_info=True, + ) + raise self._RETRIES_EXCEEDED_ERROR_CLS() + + def _add_user_agent(self, headers): + if self._user_agent is not None: + headers['User-Agent'] = self._user_agent + + def _assert_enabled(self): + if self._disabled: + logger.debug("Access to EC2 metadata has been disabled.") + raise self._RETRIES_EXCEEDED_ERROR_CLS() + + def _assert_v1_enabled(self): + if self._imds_v1_disabled: + raise MetadataRetrievalError( + error_msg="Unable to retrieve token for use in IMDSv2 call and IMDSv1 has been disabled" + ) + + def _default_retry(self, response): + return self._is_non_ok_response(response) or self._is_empty(response) + + def _is_non_ok_response(self, response): + if response.status_code != 200: + self._log_imds_response(response, 'non-200', log_body=True) + return True + return False + + def _is_empty(self, response): + if not response.content: + self._log_imds_response(response, 'no body', log_body=True) + return True + return False + + def _log_imds_response(self, response, reason_to_log, log_body=False): + statement = ( + "Metadata service returned %s response " + "with status code of %s for url: %s" + ) + logger_args = [reason_to_log, response.status_code, response.url] + if log_body: + statement += ", content body: %s" + logger_args.append(response.content) + logger.debug(statement, *logger_args) + + +class InstanceMetadataFetcher(IMDSFetcher): + _URL_PATH = 'latest/meta-data/iam/security-credentials/' + _REQUIRED_CREDENTIAL_FIELDS = [ + 'AccessKeyId', + 'SecretAccessKey', + 'Token', + 'Expiration', + ] + + def retrieve_iam_role_credentials(self): + try: + token = self._fetch_metadata_token() + role_name = self._get_iam_role(token) + credentials = self._get_credentials(role_name, token) + if self._contains_all_credential_fields(credentials): + credentials = { + 'role_name': role_name, + 'access_key': credentials['AccessKeyId'], + 'secret_key': credentials['SecretAccessKey'], + 'token': credentials['Token'], + 'expiry_time': credentials['Expiration'], + } + self._evaluate_expiration(credentials) + return credentials + else: + # IMDS can return a 200 response that has a JSON formatted + # error message (i.e. if ec2 is not trusted entity for the + # attached role). We do not necessarily want to retry for + # these and we also do not necessarily want to raise a key + # error. So at least log the problematic response and return + # an empty dictionary to signal that it was not able to + # retrieve credentials. These error will contain both a + # Code and Message key. + if 'Code' in credentials and 'Message' in credentials: + logger.debug( + 'Error response received when retrieving' + 'credentials: %s.', + credentials, + ) + return {} + except self._RETRIES_EXCEEDED_ERROR_CLS: + logger.debug( + "Max number of attempts exceeded (%s) when " + "attempting to retrieve data from metadata service.", + self._num_attempts, + ) + except BadIMDSRequestError as e: + logger.debug("Bad IMDS request: %s", e.request) + return {} + + def _get_iam_role(self, token=None): + return self._get_request( + url_path=self._URL_PATH, + retry_func=self._needs_retry_for_role_name, + token=token, + ).text + + def _get_credentials(self, role_name, token=None): + r = self._get_request( + url_path=self._URL_PATH + role_name, + retry_func=self._needs_retry_for_credentials, + token=token, + ) + return json.loads(r.text) + + def _is_invalid_json(self, response): + try: + json.loads(response.text) + return False + except ValueError: + self._log_imds_response(response, 'invalid json') + return True + + def _needs_retry_for_role_name(self, response): + return self._is_non_ok_response(response) or self._is_empty(response) + + def _needs_retry_for_credentials(self, response): + return ( + self._is_non_ok_response(response) + or self._is_empty(response) + or self._is_invalid_json(response) + ) + + def _contains_all_credential_fields(self, credentials): + for field in self._REQUIRED_CREDENTIAL_FIELDS: + if field not in credentials: + logger.debug( + 'Retrieved credentials is missing required field: %s', + field, + ) + return False + return True + + def _evaluate_expiration(self, credentials): + expiration = credentials.get("expiry_time") + if expiration is None: + return + try: + expiration = datetime.datetime.strptime( + expiration, "%Y-%m-%dT%H:%M:%SZ" + ) + refresh_interval = self._config.get( + "ec2_credential_refresh_window", 60 * 10 + ) + jitter = random.randint(120, 600) # Between 2 to 10 minutes + refresh_interval_with_jitter = refresh_interval + jitter + current_time = datetime.datetime.utcnow() + refresh_offset = datetime.timedelta( + seconds=refresh_interval_with_jitter + ) + extension_time = expiration - refresh_offset + if current_time >= extension_time: + new_time = current_time + refresh_offset + credentials["expiry_time"] = new_time.strftime( + "%Y-%m-%dT%H:%M:%SZ" + ) + logger.info( + f"Attempting credential expiration extension due to a " + f"credential service availability issue. A refresh of " + f"these credentials will be attempted again within " + f"the next {refresh_interval_with_jitter/60:.0f} minutes." + ) + except ValueError: + logger.debug( + f"Unable to parse expiry_time in {credentials['expiry_time']}" + ) + + +class IMDSRegionProvider: + def __init__(self, session, environ=None, fetcher=None): + """Initialize IMDSRegionProvider. + :type session: :class:`botocore.session.Session` + :param session: The session is needed to look up configuration for + how to contact the instance metadata service. Specifically the + whether or not it should use the IMDS region at all, and if so how + to configure the timeout and number of attempts to reach the + service. + :type environ: None or dict + :param environ: A dictionary of environment variables to use. If + ``None`` is the argument then ``os.environ`` will be used by + default. + :type fecther: :class:`botocore.utils.InstanceMetadataRegionFetcher` + :param fetcher: The class to actually handle the fetching of the region + from the IMDS. If not provided a default one will be created. + """ + self._session = session + if environ is None: + environ = os.environ + self._environ = environ + self._fetcher = fetcher + + def provide(self): + """Provide the region value from IMDS.""" + instance_region = self._get_instance_metadata_region() + return instance_region + + def _get_instance_metadata_region(self): + fetcher = self._get_fetcher() + region = fetcher.retrieve_region() + return region + + def _get_fetcher(self): + if self._fetcher is None: + self._fetcher = self._create_fetcher() + return self._fetcher + + def _create_fetcher(self): + metadata_timeout = self._session.get_config_variable( + 'metadata_service_timeout' + ) + metadata_num_attempts = self._session.get_config_variable( + 'metadata_service_num_attempts' + ) + imds_config = { + 'ec2_metadata_service_endpoint': self._session.get_config_variable( + 'ec2_metadata_service_endpoint' + ), + 'ec2_metadata_service_endpoint_mode': resolve_imds_endpoint_mode( + self._session + ), + 'ec2_metadata_v1_disabled': self._session.get_config_variable( + 'ec2_metadata_v1_disabled' + ), + } + fetcher = InstanceMetadataRegionFetcher( + timeout=metadata_timeout, + num_attempts=metadata_num_attempts, + env=self._environ, + user_agent=self._session.user_agent(), + config=imds_config, + ) + return fetcher + + +class InstanceMetadataRegionFetcher(IMDSFetcher): + _URL_PATH = 'latest/meta-data/placement/availability-zone/' + + def retrieve_region(self): + """Get the current region from the instance metadata service. + :rvalue: str + :returns: The region the current instance is running in or None + if the instance metadata service cannot be contacted or does not + give a valid response. + :rtype: None or str + :returns: Returns the region as a string if it is configured to use + IMDS as a region source. Otherwise returns ``None``. It will also + return ``None`` if it fails to get the region from IMDS due to + exhausting its retries or not being able to connect. + """ + try: + region = self._get_region() + return region + except self._RETRIES_EXCEEDED_ERROR_CLS: + logger.debug( + "Max number of attempts exceeded (%s) when " + "attempting to retrieve data from metadata service.", + self._num_attempts, + ) + return None + + def _get_region(self): + token = self._fetch_metadata_token() + response = self._get_request( + url_path=self._URL_PATH, + retry_func=self._default_retry, + token=token, + ) + availability_zone = response.text + region = availability_zone[:-1] + return region + + +def merge_dicts(dict1, dict2, append_lists=False): + """Given two dict, merge the second dict into the first. + + The dicts can have arbitrary nesting. + + :param append_lists: If true, instead of clobbering a list with the new + value, append all of the new values onto the original list. + """ + for key in dict2: + if isinstance(dict2[key], dict): + if key in dict1 and key in dict2: + merge_dicts(dict1[key], dict2[key]) + else: + dict1[key] = dict2[key] + # If the value is a list and the ``append_lists`` flag is set, + # append the new values onto the original list + elif isinstance(dict2[key], list) and append_lists: + # The value in dict1 must be a list in order to append new + # values onto it. + if key in dict1 and isinstance(dict1[key], list): + dict1[key].extend(dict2[key]) + else: + dict1[key] = dict2[key] + else: + # At scalar types, we iterate and merge the + # current dict that we're on. + dict1[key] = dict2[key] + + +def lowercase_dict(original): + """Copies the given dictionary ensuring all keys are lowercase strings.""" + copy = {} + for key in original: + copy[key.lower()] = original[key] + return copy + + +def parse_key_val_file(filename, _open=open): + try: + with _open(filename) as f: + contents = f.read() + return parse_key_val_file_contents(contents) + except OSError: + raise ConfigNotFound(path=filename) + + +def parse_key_val_file_contents(contents): + # This was originally extracted from the EC2 credential provider, which was + # fairly lenient in its parsing. We only try to parse key/val pairs if + # there's a '=' in the line. + final = {} + for line in contents.splitlines(): + if '=' not in line: + continue + key, val = line.split('=', 1) + key = key.strip() + val = val.strip() + final[key] = val + return final + + +def percent_encode_sequence(mapping, safe=SAFE_CHARS): + """Urlencode a dict or list into a string. + + This is similar to urllib.urlencode except that: + + * It uses quote, and not quote_plus + * It has a default list of safe chars that don't need + to be encoded, which matches what AWS services expect. + + If any value in the input ``mapping`` is a list type, + then each list element wil be serialized. This is the equivalent + to ``urlencode``'s ``doseq=True`` argument. + + This function should be preferred over the stdlib + ``urlencode()`` function. + + :param mapping: Either a dict to urlencode or a list of + ``(key, value)`` pairs. + + """ + encoded_pairs = [] + if hasattr(mapping, 'items'): + pairs = mapping.items() + else: + pairs = mapping + for key, value in pairs: + if isinstance(value, list): + for element in value: + encoded_pairs.append( + f'{percent_encode(key)}={percent_encode(element)}' + ) + else: + encoded_pairs.append( + f'{percent_encode(key)}={percent_encode(value)}' + ) + return '&'.join(encoded_pairs) + + +def percent_encode(input_str, safe=SAFE_CHARS): + """Urlencodes a string. + + Whereas percent_encode_sequence handles taking a dict/sequence and + producing a percent encoded string, this function deals only with + taking a string (not a dict/sequence) and percent encoding it. + + If given the binary type, will simply URL encode it. If given the + text type, will produce the binary type by UTF-8 encoding the + text. If given something else, will convert it to the text type + first. + """ + # If its not a binary or text string, make it a text string. + if not isinstance(input_str, (bytes, str)): + input_str = str(input_str) + # If it's not bytes, make it bytes by UTF-8 encoding it. + if not isinstance(input_str, bytes): + input_str = input_str.encode('utf-8') + return quote(input_str, safe=safe) + + +def _epoch_seconds_to_datetime(value, tzinfo): + """Parse numerical epoch timestamps (seconds since 1970) into a + ``datetime.datetime`` in UTC using ``datetime.timedelta``. This is intended + as fallback when ``fromtimestamp`` raises ``OverflowError`` or ``OSError``. + + :type value: float or int + :param value: The Unix timestamps as number. + + :type tzinfo: callable + :param tzinfo: A ``datetime.tzinfo`` class or compatible callable. + """ + epoch_zero = datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=tzutc()) + epoch_zero_localized = epoch_zero.astimezone(tzinfo()) + return epoch_zero_localized + datetime.timedelta(seconds=value) + + +def _parse_timestamp_with_tzinfo(value, tzinfo): + """Parse timestamp with pluggable tzinfo options.""" + if isinstance(value, (int, float)): + # Possibly an epoch time. + return datetime.datetime.fromtimestamp(value, tzinfo()) + else: + try: + return datetime.datetime.fromtimestamp(float(value), tzinfo()) + except (TypeError, ValueError): + pass + try: + # In certain cases, a timestamp marked with GMT can be parsed into a + # different time zone, so here we provide a context which will + # enforce that GMT == UTC. + return dateutil.parser.parse(value, tzinfos={'GMT': tzutc()}) + except (TypeError, ValueError) as e: + raise ValueError(f'Invalid timestamp "{value}": {e}') + + +def parse_timestamp(value): + """Parse a timestamp into a datetime object. + + Supported formats: + + * iso8601 + * rfc822 + * epoch (value is an integer) + + This will return a ``datetime.datetime`` object. + + """ + tzinfo_options = get_tzinfo_options() + for tzinfo in tzinfo_options: + try: + return _parse_timestamp_with_tzinfo(value, tzinfo) + except (OSError, OverflowError) as e: + logger.debug( + 'Unable to parse timestamp with "%s" timezone info.', + tzinfo.__name__, + exc_info=e, + ) + # For numeric values attempt fallback to using fromtimestamp-free method. + # From Python's ``datetime.datetime.fromtimestamp`` documentation: "This + # may raise ``OverflowError``, if the timestamp is out of the range of + # values supported by the platform C localtime() function, and ``OSError`` + # on localtime() failure. It's common for this to be restricted to years + # from 1970 through 2038." + try: + numeric_value = float(value) + except (TypeError, ValueError): + pass + else: + try: + for tzinfo in tzinfo_options: + return _epoch_seconds_to_datetime(numeric_value, tzinfo=tzinfo) + except (OSError, OverflowError) as e: + logger.debug( + 'Unable to parse timestamp using fallback method with "%s" ' + 'timezone info.', + tzinfo.__name__, + exc_info=e, + ) + raise RuntimeError( + f'Unable to calculate correct timezone offset for "{value}"' + ) + + +def parse_to_aware_datetime(value): + """Converted the passed in value to a datetime object with tzinfo. + + This function can be used to normalize all timestamp inputs. This + function accepts a number of different types of inputs, but + will always return a datetime.datetime object with time zone + information. + + The input param ``value`` can be one of several types: + + * A datetime object (both naive and aware) + * An integer representing the epoch time (can also be a string + of the integer, i.e '0', instead of 0). The epoch time is + considered to be UTC. + * An iso8601 formatted timestamp. This does not need to be + a complete timestamp, it can contain just the date portion + without the time component. + + The returned value will be a datetime object that will have tzinfo. + If no timezone info was provided in the input value, then UTC is + assumed, not local time. + + """ + # This is a general purpose method that handles several cases of + # converting the provided value to a string timestamp suitable to be + # serialized to an http request. It can handle: + # 1) A datetime.datetime object. + if isinstance(value, _DatetimeClass): + datetime_obj = value + else: + # 2) A string object that's formatted as a timestamp. + # We document this as being an iso8601 timestamp, although + # parse_timestamp is a bit more flexible. + datetime_obj = parse_timestamp(value) + if datetime_obj.tzinfo is None: + # I think a case would be made that if no time zone is provided, + # we should use the local time. However, to restore backwards + # compat, the previous behavior was to assume UTC, which is + # what we're going to do here. + datetime_obj = datetime_obj.replace(tzinfo=tzutc()) + else: + datetime_obj = datetime_obj.astimezone(tzutc()) + return datetime_obj + + +def datetime2timestamp(dt, default_timezone=None): + """Calculate the timestamp based on the given datetime instance. + + :type dt: datetime + :param dt: A datetime object to be converted into timestamp + :type default_timezone: tzinfo + :param default_timezone: If it is provided as None, we treat it as tzutc(). + But it is only used when dt is a naive datetime. + :returns: The timestamp + """ + epoch = datetime.datetime(1970, 1, 1) + if dt.tzinfo is None: + if default_timezone is None: + default_timezone = tzutc() + dt = dt.replace(tzinfo=default_timezone) + d = dt.replace(tzinfo=None) - dt.utcoffset() - epoch + return d.total_seconds() + + +def calculate_sha256(body, as_hex=False): + """Calculate a sha256 checksum. + + This method will calculate the sha256 checksum of a file like + object. Note that this method will iterate through the entire + file contents. The caller is responsible for ensuring the proper + starting position of the file and ``seek()``'ing the file back + to its starting location if other consumers need to read from + the file like object. + + :param body: Any file like object. The file must be opened + in binary mode such that a ``.read()`` call returns bytes. + :param as_hex: If True, then the hex digest is returned. + If False, then the digest (as binary bytes) is returned. + + :returns: The sha256 checksum + + """ + checksum = hashlib.sha256() + for chunk in iter(lambda: body.read(1024 * 1024), b''): + checksum.update(chunk) + if as_hex: + return checksum.hexdigest() + else: + return checksum.digest() + + +def calculate_tree_hash(body): + """Calculate a tree hash checksum. + + For more information see: + + http://docs.aws.amazon.com/amazonglacier/latest/dev/checksum-calculations.html + + :param body: Any file like object. This has the same constraints as + the ``body`` param in calculate_sha256 + + :rtype: str + :returns: The hex version of the calculated tree hash + + """ + chunks = [] + required_chunk_size = 1024 * 1024 + sha256 = hashlib.sha256 + for chunk in iter(lambda: body.read(required_chunk_size), b''): + chunks.append(sha256(chunk).digest()) + if not chunks: + return sha256(b'').hexdigest() + while len(chunks) > 1: + new_chunks = [] + for first, second in _in_pairs(chunks): + if second is not None: + new_chunks.append(sha256(first + second).digest()) + else: + # We're at the end of the list and there's no pair left. + new_chunks.append(first) + chunks = new_chunks + return binascii.hexlify(chunks[0]).decode('ascii') + + +def _in_pairs(iterable): + # Creates iterator that iterates over the list in pairs: + # for a, b in _in_pairs([0, 1, 2, 3, 4]): + # print(a, b) + # + # will print: + # 0, 1 + # 2, 3 + # 4, None + shared_iter = iter(iterable) + # Note that zip_longest is a compat import that uses + # the itertools izip_longest. This creates an iterator, + # this call below does _not_ immediately create the list + # of pairs. + return zip_longest(shared_iter, shared_iter) + + +class CachedProperty: + """A read only property that caches the initially computed value. + + This descriptor will only call the provided ``fget`` function once. + Subsequent access to this property will return the cached value. + + """ + + def __init__(self, fget): + self._fget = fget + + def __get__(self, obj, cls): + if obj is None: + return self + else: + computed_value = self._fget(obj) + obj.__dict__[self._fget.__name__] = computed_value + return computed_value + + +class ArgumentGenerator: + """Generate sample input based on a shape model. + + This class contains a ``generate_skeleton`` method that will take + an input/output shape (created from ``botocore.model``) and generate + a sample dictionary corresponding to the input/output shape. + + The specific values used are place holder values. For strings either an + empty string or the member name can be used, for numbers 0 or 0.0 is used. + The intended usage of this class is to generate the *shape* of the input + structure. + + This can be useful for operations that have complex input shapes. + This allows a user to just fill in the necessary data instead of + worrying about the specific structure of the input arguments. + + Example usage:: + + s = botocore.session.get_session() + ddb = s.get_service_model('dynamodb') + arg_gen = ArgumentGenerator() + sample_input = arg_gen.generate_skeleton( + ddb.operation_model('CreateTable').input_shape) + print("Sample input for dynamodb.CreateTable: %s" % sample_input) + + """ + + def __init__(self, use_member_names=False): + self._use_member_names = use_member_names + + def generate_skeleton(self, shape): + """Generate a sample input. + + :type shape: ``botocore.model.Shape`` + :param shape: The input shape. + + :return: The generated skeleton input corresponding to the + provided input shape. + + """ + stack = [] + return self._generate_skeleton(shape, stack) + + def _generate_skeleton(self, shape, stack, name=''): + stack.append(shape.name) + try: + if shape.type_name == 'structure': + return self._generate_type_structure(shape, stack) + elif shape.type_name == 'list': + return self._generate_type_list(shape, stack) + elif shape.type_name == 'map': + return self._generate_type_map(shape, stack) + elif shape.type_name == 'string': + if self._use_member_names: + return name + if shape.enum: + return random.choice(shape.enum) + return '' + elif shape.type_name in ['integer', 'long']: + return 0 + elif shape.type_name in ['float', 'double']: + return 0.0 + elif shape.type_name == 'boolean': + return True + elif shape.type_name == 'timestamp': + return datetime.datetime(1970, 1, 1, 0, 0, 0) + finally: + stack.pop() + + def _generate_type_structure(self, shape, stack): + if stack.count(shape.name) > 1: + return {} + skeleton = OrderedDict() + for member_name, member_shape in shape.members.items(): + skeleton[member_name] = self._generate_skeleton( + member_shape, stack, name=member_name + ) + return skeleton + + def _generate_type_list(self, shape, stack): + # For list elements we've arbitrarily decided to + # return two elements for the skeleton list. + name = '' + if self._use_member_names: + name = shape.member.name + return [ + self._generate_skeleton(shape.member, stack, name), + ] + + def _generate_type_map(self, shape, stack): + key_shape = shape.key + value_shape = shape.value + assert key_shape.type_name == 'string' + return OrderedDict( + [ + ('KeyName', self._generate_skeleton(value_shape, stack)), + ] + ) + + +def is_valid_ipv6_endpoint_url(endpoint_url): + if UNSAFE_URL_CHARS.intersection(endpoint_url): + return False + hostname = f'[{urlparse(endpoint_url).hostname}]' + return IPV6_ADDRZ_RE.match(hostname) is not None + + +def is_valid_ipv4_endpoint_url(endpoint_url): + hostname = urlparse(endpoint_url).hostname + return IPV4_RE.match(hostname) is not None + + +def is_valid_endpoint_url(endpoint_url): + """Verify the endpoint_url is valid. + + :type endpoint_url: string + :param endpoint_url: An endpoint_url. Must have at least a scheme + and a hostname. + + :return: True if the endpoint url is valid. False otherwise. + + """ + # post-bpo-43882 urlsplit() strips unsafe characters from URL, causing + # it to pass hostname validation below. Detect them early to fix that. + if UNSAFE_URL_CHARS.intersection(endpoint_url): + return False + parts = urlsplit(endpoint_url) + hostname = parts.hostname + if hostname is None: + return False + if len(hostname) > 255: + return False + if hostname[-1] == ".": + hostname = hostname[:-1] + allowed = re.compile( + r"^((?!-)[A-Z\d-]{1,63}(? 63: + # Wrong length + return False + match = LABEL_RE.match(bucket_name) + if match is None or match.end() != len(bucket_name): + return False + return True + + +def fix_s3_host( + request, + signature_version, + region_name, + default_endpoint_url=None, + **kwargs, +): + """ + This handler looks at S3 requests just before they are signed. + If there is a bucket name on the path (true for everything except + ListAllBuckets) it checks to see if that bucket name conforms to + the DNS naming conventions. If it does, it alters the request to + use ``virtual hosting`` style addressing rather than ``path-style`` + addressing. + + """ + if request.context.get('use_global_endpoint', False): + default_endpoint_url = 's3.amazonaws.com' + try: + switch_to_virtual_host_style( + request, signature_version, default_endpoint_url + ) + except InvalidDNSNameError as e: + bucket_name = e.kwargs['bucket_name'] + logger.debug( + 'Not changing URI, bucket is not DNS compatible: %s', bucket_name + ) + + +def switch_to_virtual_host_style( + request, signature_version, default_endpoint_url=None, **kwargs +): + """ + This is a handler to force virtual host style s3 addressing no matter + the signature version (which is taken in consideration for the default + case). If the bucket is not DNS compatible an InvalidDNSName is thrown. + + :param request: A AWSRequest object that is about to be sent. + :param signature_version: The signature version to sign with + :param default_endpoint_url: The endpoint to use when switching to a + virtual style. If None is supplied, the virtual host will be + constructed from the url of the request. + """ + if request.auth_path is not None: + # The auth_path has already been applied (this may be a + # retried request). We don't need to perform this + # customization again. + return + elif _is_get_bucket_location_request(request): + # For the GetBucketLocation response, we should not be using + # the virtual host style addressing so we can avoid any sigv4 + # issues. + logger.debug( + "Request is GetBucketLocation operation, not checking " + "for DNS compatibility." + ) + return + parts = urlsplit(request.url) + request.auth_path = parts.path + path_parts = parts.path.split('/') + + # Retrieve what the endpoint we will be prepending the bucket name to. + if default_endpoint_url is None: + default_endpoint_url = parts.netloc + + if len(path_parts) > 1: + bucket_name = path_parts[1] + if not bucket_name: + # If the bucket name is empty we should not be checking for + # dns compatibility. + return + logger.debug('Checking for DNS compatible bucket for: %s', request.url) + if check_dns_name(bucket_name): + # If the operation is on a bucket, the auth_path must be + # terminated with a '/' character. + if len(path_parts) == 2: + if request.auth_path[-1] != '/': + request.auth_path += '/' + path_parts.remove(bucket_name) + # At the very least the path must be a '/', such as with the + # CreateBucket operation when DNS style is being used. If this + # is not used you will get an empty path which is incorrect. + path = '/'.join(path_parts) or '/' + global_endpoint = default_endpoint_url + host = bucket_name + '.' + global_endpoint + new_tuple = (parts.scheme, host, path, parts.query, '') + new_uri = urlunsplit(new_tuple) + request.url = new_uri + logger.debug('URI updated to: %s', new_uri) + else: + raise InvalidDNSNameError(bucket_name=bucket_name) + + +def _is_get_bucket_location_request(request): + return request.url.endswith('?location') + + +def instance_cache(func): + """Method decorator for caching method calls to a single instance. + + **This is not a general purpose caching decorator.** + + In order to use this, you *must* provide an ``_instance_cache`` + attribute on the instance. + + This decorator is used to cache method calls. The cache is only + scoped to a single instance though such that multiple instances + will maintain their own cache. In order to keep things simple, + this decorator requires that you provide an ``_instance_cache`` + attribute on your instance. + + """ + func_name = func.__name__ + + @functools.wraps(func) + def _cache_guard(self, *args, **kwargs): + cache_key = (func_name, args) + if kwargs: + kwarg_items = tuple(sorted(kwargs.items())) + cache_key = (func_name, args, kwarg_items) + result = self._instance_cache.get(cache_key) + if result is not None: + return result + result = func(self, *args, **kwargs) + self._instance_cache[cache_key] = result + return result + + return _cache_guard + + +def lru_cache_weakref(*cache_args, **cache_kwargs): + """ + Version of functools.lru_cache that stores a weak reference to ``self``. + + Serves the same purpose as :py:func:`instance_cache` but uses Python's + functools implementation which offers ``max_size`` and ``typed`` properties. + + lru_cache is a global cache even when used on a method. The cache's + reference to ``self`` will prevent garbace collection of the object. This + wrapper around functools.lru_cache replaces the reference to ``self`` with + a weak reference to not interfere with garbage collection. + """ + + def wrapper(func): + @functools.lru_cache(*cache_args, **cache_kwargs) + def func_with_weakref(weakref_to_self, *args, **kwargs): + return func(weakref_to_self(), *args, **kwargs) + + @functools.wraps(func) + def inner(self, *args, **kwargs): + return func_with_weakref(weakref.ref(self), *args, **kwargs) + + inner.cache_info = func_with_weakref.cache_info + return inner + + return wrapper + + +def switch_host_s3_accelerate(request, operation_name, **kwargs): + """Switches the current s3 endpoint with an S3 Accelerate endpoint""" + + # Note that when registered the switching of the s3 host happens + # before it gets changed to virtual. So we are not concerned with ensuring + # that the bucket name is translated to the virtual style here and we + # can hard code the Accelerate endpoint. + parts = urlsplit(request.url).netloc.split('.') + parts = [p for p in parts if p in S3_ACCELERATE_WHITELIST] + endpoint = 'https://s3-accelerate.' + if len(parts) > 0: + endpoint += '.'.join(parts) + '.' + endpoint += 'amazonaws.com' + + if operation_name in ['ListBuckets', 'CreateBucket', 'DeleteBucket']: + return + _switch_hosts(request, endpoint, use_new_scheme=False) + + +def switch_host_with_param(request, param_name): + """Switches the host using a parameter value from a JSON request body""" + request_json = json.loads(request.data.decode('utf-8')) + if request_json.get(param_name): + new_endpoint = request_json[param_name] + _switch_hosts(request, new_endpoint) + + +def _switch_hosts(request, new_endpoint, use_new_scheme=True): + final_endpoint = _get_new_endpoint( + request.url, new_endpoint, use_new_scheme + ) + request.url = final_endpoint + + +def _get_new_endpoint(original_endpoint, new_endpoint, use_new_scheme=True): + new_endpoint_components = urlsplit(new_endpoint) + original_endpoint_components = urlsplit(original_endpoint) + scheme = original_endpoint_components.scheme + if use_new_scheme: + scheme = new_endpoint_components.scheme + final_endpoint_components = ( + scheme, + new_endpoint_components.netloc, + original_endpoint_components.path, + original_endpoint_components.query, + '', + ) + final_endpoint = urlunsplit(final_endpoint_components) + logger.debug(f'Updating URI from {original_endpoint} to {final_endpoint}') + return final_endpoint + + +def deep_merge(base, extra): + """Deeply two dictionaries, overriding existing keys in the base. + + :param base: The base dictionary which will be merged into. + :param extra: The dictionary to merge into the base. Keys from this + dictionary will take precedence. + """ + for key in extra: + # If the key represents a dict on both given dicts, merge the sub-dicts + if ( + key in base + and isinstance(base[key], dict) + and isinstance(extra[key], dict) + ): + deep_merge(base[key], extra[key]) + continue + + # Otherwise, set the key on the base to be the value of the extra. + base[key] = extra[key] + + +def hyphenize_service_id(service_id): + """Translate the form used for event emitters. + + :param service_id: The service_id to convert. + """ + return service_id.replace(' ', '-').lower() + + +class IdentityCache: + """Base IdentityCache implementation for storing and retrieving + highly accessed credentials. + + This class is not intended to be instantiated in user code. + """ + + METHOD = "base_identity_cache" + + def __init__(self, client, credential_cls): + self._client = client + self._credential_cls = credential_cls + + def get_credentials(self, **kwargs): + callback = self.build_refresh_callback(**kwargs) + metadata = callback() + credential_entry = self._credential_cls.create_from_metadata( + metadata=metadata, + refresh_using=callback, + method=self.METHOD, + advisory_timeout=45, + mandatory_timeout=10, + ) + return credential_entry + + def build_refresh_callback(**kwargs): + """Callback to be implemented by subclasses. + + Returns a set of metadata to be converted into a new + credential instance. + """ + raise NotImplementedError() + + +class S3ExpressIdentityCache(IdentityCache): + """S3Express IdentityCache for retrieving and storing + credentials from CreateSession calls. + + This class is not intended to be instantiated in user code. + """ + + METHOD = "s3express" + + def __init__(self, client, credential_cls): + self._client = client + self._credential_cls = credential_cls + + @functools.lru_cache(maxsize=100) + def get_credentials(self, bucket): + return super().get_credentials(bucket=bucket) + + def build_refresh_callback(self, bucket): + def refresher(): + response = self._client.create_session(Bucket=bucket) + creds = response['Credentials'] + expiration = self._serialize_if_needed( + creds['Expiration'], iso=True + ) + return { + "access_key": creds['AccessKeyId'], + "secret_key": creds['SecretAccessKey'], + "token": creds['SessionToken'], + "expiry_time": expiration, + } + + return refresher + + def _serialize_if_needed(self, value, iso=False): + if isinstance(value, _DatetimeClass): + if iso: + return value.isoformat() + return value.strftime('%Y-%m-%dT%H:%M:%S%Z') + return value + + +class S3ExpressIdentityResolver: + def __init__(self, client, credential_cls, cache=None): + self._client = weakref.proxy(client) + + if cache is None: + cache = S3ExpressIdentityCache(self._client, credential_cls) + self._cache = cache + + def register(self, event_emitter=None): + logger.debug('Registering S3Express Identity Resolver') + emitter = event_emitter or self._client.meta.events + emitter.register('before-call.s3', self.apply_signing_cache_key) + emitter.register('before-sign.s3', self.resolve_s3express_identity) + + def apply_signing_cache_key(self, params, context, **kwargs): + endpoint_properties = context.get('endpoint_properties', {}) + backend = endpoint_properties.get('backend', None) + + # Add cache key if Bucket supplied for s3express request + bucket_name = context.get('input_params', {}).get('Bucket') + if backend == 'S3Express' and bucket_name is not None: + context.setdefault('signing', {}) + context['signing']['cache_key'] = bucket_name + + def resolve_s3express_identity( + self, + request, + signing_name, + region_name, + signature_version, + request_signer, + operation_name, + **kwargs, + ): + signing_context = request.context.get('signing', {}) + signing_name = signing_context.get('signing_name') + if signing_name == 's3express' and signature_version.startswith( + 'v4-s3express' + ): + signing_context['identity_cache'] = self._cache + if 'cache_key' not in signing_context: + signing_context['cache_key'] = ( + request.context.get('s3_redirect', {}) + .get('params', {}) + .get('Bucket') + ) + + +class S3RegionRedirectorv2: + """Updated version of S3RegionRedirector for use when + EndpointRulesetResolver is in use for endpoint resolution. + + This class is considered private and subject to abrupt breaking changes or + removal without prior announcement. Please do not use it directly. + """ + + def __init__(self, endpoint_bridge, client, cache=None): + self._cache = cache or {} + self._client = weakref.proxy(client) + + def register(self, event_emitter=None): + logger.debug('Registering S3 region redirector handler') + emitter = event_emitter or self._client.meta.events + emitter.register('needs-retry.s3', self.redirect_from_error) + emitter.register( + 'before-parameter-build.s3', self.annotate_request_context + ) + emitter.register( + 'before-endpoint-resolution.s3', self.redirect_from_cache + ) + + def redirect_from_error(self, request_dict, response, operation, **kwargs): + """ + An S3 request sent to the wrong region will return an error that + contains the endpoint the request should be sent to. This handler + will add the redirect information to the signing context and then + redirect the request. + """ + if response is None: + # This could be none if there was a ConnectionError or other + # transport error. + return + + redirect_ctx = request_dict.get('context', {}).get('s3_redirect', {}) + if ArnParser.is_arn(redirect_ctx.get('bucket')): + logger.debug( + 'S3 request was previously for an Accesspoint ARN, not ' + 'redirecting.' + ) + return + + if redirect_ctx.get('redirected'): + logger.debug( + 'S3 request was previously redirected, not redirecting.' + ) + return + + error = response[1].get('Error', {}) + error_code = error.get('Code') + response_metadata = response[1].get('ResponseMetadata', {}) + + # We have to account for 400 responses because + # if we sign a Head* request with the wrong region, + # we'll get a 400 Bad Request but we won't get a + # body saying it's an "AuthorizationHeaderMalformed". + is_special_head_object = ( + error_code in ('301', '400') and operation.name == 'HeadObject' + ) + is_special_head_bucket = ( + error_code in ('301', '400') + and operation.name == 'HeadBucket' + and 'x-amz-bucket-region' + in response_metadata.get('HTTPHeaders', {}) + ) + is_wrong_signing_region = ( + error_code == 'AuthorizationHeaderMalformed' and 'Region' in error + ) + is_redirect_status = response[0] is not None and response[ + 0 + ].status_code in (301, 302, 307) + is_permanent_redirect = error_code == 'PermanentRedirect' + if not any( + [ + is_special_head_object, + is_wrong_signing_region, + is_permanent_redirect, + is_special_head_bucket, + is_redirect_status, + ] + ): + return + + bucket = request_dict['context']['s3_redirect']['bucket'] + client_region = request_dict['context'].get('client_region') + new_region = self.get_bucket_region(bucket, response) + + if new_region is None: + logger.debug( + f"S3 client configured for region {client_region} but the " + f"bucket {bucket} is not in that region and the proper region " + "could not be automatically determined." + ) + return + + logger.debug( + f"S3 client configured for region {client_region} but the bucket {bucket} " + f"is in region {new_region}; Please configure the proper region to " + f"avoid multiple unnecessary redirects and signing attempts." + ) + # Adding the new region to _cache will make construct_endpoint() to + # use the new region as value for the AWS::Region builtin parameter. + self._cache[bucket] = new_region + + # Re-resolve endpoint with new region and modify request_dict with + # the new URL, auth scheme, and signing context. + ep_resolver = self._client._ruleset_resolver + ep_info = ep_resolver.construct_endpoint( + operation_model=operation, + call_args=request_dict['context']['s3_redirect']['params'], + request_context=request_dict['context'], + ) + request_dict['url'] = self.set_request_url( + request_dict['url'], ep_info.url + ) + request_dict['context']['s3_redirect']['redirected'] = True + auth_schemes = ep_info.properties.get('authSchemes') + if auth_schemes is not None: + auth_info = ep_resolver.auth_schemes_to_signing_ctx(auth_schemes) + auth_type, signing_context = auth_info + request_dict['context']['auth_type'] = auth_type + request_dict['context']['signing'] = { + **request_dict['context'].get('signing', {}), + **signing_context, + } + + # Return 0 so it doesn't wait to retry + return 0 + + def get_bucket_region(self, bucket, response): + """ + There are multiple potential sources for the new region to redirect to, + but they aren't all universally available for use. This will try to + find region from response elements, but will fall back to calling + HEAD on the bucket if all else fails. + + :param bucket: The bucket to find the region for. This is necessary if + the region is not available in the error response. + :param response: A response representing a service request that failed + due to incorrect region configuration. + """ + # First try to source the region from the headers. + service_response = response[1] + response_headers = service_response['ResponseMetadata']['HTTPHeaders'] + if 'x-amz-bucket-region' in response_headers: + return response_headers['x-amz-bucket-region'] + + # Next, check the error body + region = service_response.get('Error', {}).get('Region', None) + if region is not None: + return region + + # Finally, HEAD the bucket. No other choice sadly. + try: + response = self._client.head_bucket(Bucket=bucket) + headers = response['ResponseMetadata']['HTTPHeaders'] + except ClientError as e: + headers = e.response['ResponseMetadata']['HTTPHeaders'] + + region = headers.get('x-amz-bucket-region', None) + return region + + def set_request_url(self, old_url, new_endpoint, **kwargs): + """ + Splice a new endpoint into an existing URL. Note that some endpoints + from the the endpoint provider have a path component which will be + discarded by this function. + """ + return _get_new_endpoint(old_url, new_endpoint, False) + + def redirect_from_cache(self, builtins, params, **kwargs): + """ + If a bucket name has been redirected before, it is in the cache. This + handler will update the AWS::Region endpoint resolver builtin param + to use the region from cache instead of the client region to avoid the + redirect. + """ + bucket = params.get('Bucket') + if bucket is not None and bucket in self._cache: + new_region = self._cache.get(bucket) + builtins['AWS::Region'] = new_region + + def annotate_request_context(self, params, context, **kwargs): + """Store the bucket name in context for later use when redirecting. + The bucket name may be an access point ARN or alias. + """ + bucket = params.get('Bucket') + context['s3_redirect'] = { + 'redirected': False, + 'bucket': bucket, + 'params': params, + } + + +class S3RegionRedirector: + """This handler has been replaced by S3RegionRedirectorv2. The original + version remains in place for any third-party libraries that import it. + """ + + def __init__(self, endpoint_bridge, client, cache=None): + self._endpoint_resolver = endpoint_bridge + self._cache = cache + if self._cache is None: + self._cache = {} + + # This needs to be a weak ref in order to prevent memory leaks on + # python 2.6 + self._client = weakref.proxy(client) + + warnings.warn( + 'The S3RegionRedirector class has been deprecated for a new ' + 'internal replacement. A future version of botocore may remove ' + 'this class.', + category=FutureWarning, + ) + + def register(self, event_emitter=None): + emitter = event_emitter or self._client.meta.events + emitter.register('needs-retry.s3', self.redirect_from_error) + emitter.register('before-call.s3', self.set_request_url) + emitter.register('before-parameter-build.s3', self.redirect_from_cache) + + def redirect_from_error(self, request_dict, response, operation, **kwargs): + """ + An S3 request sent to the wrong region will return an error that + contains the endpoint the request should be sent to. This handler + will add the redirect information to the signing context and then + redirect the request. + """ + if response is None: + # This could be none if there was a ConnectionError or other + # transport error. + return + + if self._is_s3_accesspoint(request_dict.get('context', {})): + logger.debug( + 'S3 request was previously to an accesspoint, not redirecting.' + ) + return + + if request_dict.get('context', {}).get('s3_redirected'): + logger.debug( + 'S3 request was previously redirected, not redirecting.' + ) + return + + error = response[1].get('Error', {}) + error_code = error.get('Code') + response_metadata = response[1].get('ResponseMetadata', {}) + + # We have to account for 400 responses because + # if we sign a Head* request with the wrong region, + # we'll get a 400 Bad Request but we won't get a + # body saying it's an "AuthorizationHeaderMalformed". + is_special_head_object = ( + error_code in ('301', '400') and operation.name == 'HeadObject' + ) + is_special_head_bucket = ( + error_code in ('301', '400') + and operation.name == 'HeadBucket' + and 'x-amz-bucket-region' + in response_metadata.get('HTTPHeaders', {}) + ) + is_wrong_signing_region = ( + error_code == 'AuthorizationHeaderMalformed' and 'Region' in error + ) + is_redirect_status = response[0] is not None and response[ + 0 + ].status_code in (301, 302, 307) + is_permanent_redirect = error_code == 'PermanentRedirect' + if not any( + [ + is_special_head_object, + is_wrong_signing_region, + is_permanent_redirect, + is_special_head_bucket, + is_redirect_status, + ] + ): + return + + bucket = request_dict['context']['signing']['bucket'] + client_region = request_dict['context'].get('client_region') + new_region = self.get_bucket_region(bucket, response) + + if new_region is None: + logger.debug( + f"S3 client configured for region {client_region} but the bucket {bucket} is not " + "in that region and the proper region could not be " + "automatically determined." + ) + return + + logger.debug( + f"S3 client configured for region {client_region} but the bucket {bucket} is in region" + f" {new_region}; Please configure the proper region to avoid multiple " + "unnecessary redirects and signing attempts." + ) + endpoint = self._endpoint_resolver.resolve('s3', new_region) + endpoint = endpoint['endpoint_url'] + + signing_context = { + 'region': new_region, + 'bucket': bucket, + 'endpoint': endpoint, + } + request_dict['context']['signing'] = signing_context + + self._cache[bucket] = signing_context + self.set_request_url(request_dict, request_dict['context']) + + request_dict['context']['s3_redirected'] = True + + # Return 0 so it doesn't wait to retry + return 0 + + def get_bucket_region(self, bucket, response): + """ + There are multiple potential sources for the new region to redirect to, + but they aren't all universally available for use. This will try to + find region from response elements, but will fall back to calling + HEAD on the bucket if all else fails. + + :param bucket: The bucket to find the region for. This is necessary if + the region is not available in the error response. + :param response: A response representing a service request that failed + due to incorrect region configuration. + """ + # First try to source the region from the headers. + service_response = response[1] + response_headers = service_response['ResponseMetadata']['HTTPHeaders'] + if 'x-amz-bucket-region' in response_headers: + return response_headers['x-amz-bucket-region'] + + # Next, check the error body + region = service_response.get('Error', {}).get('Region', None) + if region is not None: + return region + + # Finally, HEAD the bucket. No other choice sadly. + try: + response = self._client.head_bucket(Bucket=bucket) + headers = response['ResponseMetadata']['HTTPHeaders'] + except ClientError as e: + headers = e.response['ResponseMetadata']['HTTPHeaders'] + + region = headers.get('x-amz-bucket-region', None) + return region + + def set_request_url(self, params, context, **kwargs): + endpoint = context.get('signing', {}).get('endpoint', None) + if endpoint is not None: + params['url'] = _get_new_endpoint(params['url'], endpoint, False) + + def redirect_from_cache(self, params, context, **kwargs): + """ + This handler retrieves a given bucket's signing context from the cache + and adds it into the request context. + """ + if self._is_s3_accesspoint(context): + return + bucket = params.get('Bucket') + signing_context = self._cache.get(bucket) + if signing_context is not None: + context['signing'] = signing_context + else: + context['signing'] = {'bucket': bucket} + + def _is_s3_accesspoint(self, context): + return 's3_accesspoint' in context + + +class InvalidArnException(ValueError): + pass + + +class ArnParser: + def parse_arn(self, arn): + arn_parts = arn.split(':', 5) + if len(arn_parts) < 6: + raise InvalidArnException( + f'Provided ARN: {arn} must be of the format: ' + 'arn:partition:service:region:account:resource' + ) + return { + 'partition': arn_parts[1], + 'service': arn_parts[2], + 'region': arn_parts[3], + 'account': arn_parts[4], + 'resource': arn_parts[5], + } + + @staticmethod + def is_arn(value): + if not isinstance(value, str) or not value.startswith('arn:'): + return False + arn_parser = ArnParser() + try: + arn_parser.parse_arn(value) + return True + except InvalidArnException: + return False + + +class S3ArnParamHandler: + _RESOURCE_REGEX = re.compile( + r'^(?Paccesspoint|outpost)[/:](?P.+)$' + ) + _OUTPOST_RESOURCE_REGEX = re.compile( + r'^(?P[a-zA-Z0-9\-]{1,63})[/:]accesspoint[/:]' + r'(?P[a-zA-Z0-9\-]{1,63}$)' + ) + _BLACKLISTED_OPERATIONS = ['CreateBucket'] + + def __init__(self, arn_parser=None): + self._arn_parser = arn_parser + if arn_parser is None: + self._arn_parser = ArnParser() + + def register(self, event_emitter): + event_emitter.register('before-parameter-build.s3', self.handle_arn) + + def handle_arn(self, params, model, context, **kwargs): + if model.name in self._BLACKLISTED_OPERATIONS: + return + arn_details = self._get_arn_details_from_bucket_param(params) + if arn_details is None: + return + if arn_details['resource_type'] == 'accesspoint': + self._store_accesspoint(params, context, arn_details) + elif arn_details['resource_type'] == 'outpost': + self._store_outpost(params, context, arn_details) + + def _get_arn_details_from_bucket_param(self, params): + if 'Bucket' in params: + try: + arn = params['Bucket'] + arn_details = self._arn_parser.parse_arn(arn) + self._add_resource_type_and_name(arn, arn_details) + return arn_details + except InvalidArnException: + pass + return None + + def _add_resource_type_and_name(self, arn, arn_details): + match = self._RESOURCE_REGEX.match(arn_details['resource']) + if match: + arn_details['resource_type'] = match.group('resource_type') + arn_details['resource_name'] = match.group('resource_name') + else: + raise UnsupportedS3ArnError(arn=arn) + + def _store_accesspoint(self, params, context, arn_details): + # Ideally the access-point would be stored as a parameter in the + # request where the serializer would then know how to serialize it, + # but access-points are not modeled in S3 operations so it would fail + # validation. Instead, we set the access-point to the bucket parameter + # to have some value set when serializing the request and additional + # information on the context from the arn to use in forming the + # access-point endpoint. + params['Bucket'] = arn_details['resource_name'] + context['s3_accesspoint'] = { + 'name': arn_details['resource_name'], + 'account': arn_details['account'], + 'partition': arn_details['partition'], + 'region': arn_details['region'], + 'service': arn_details['service'], + } + + def _store_outpost(self, params, context, arn_details): + resource_name = arn_details['resource_name'] + match = self._OUTPOST_RESOURCE_REGEX.match(resource_name) + if not match: + raise UnsupportedOutpostResourceError(resource_name=resource_name) + # Because we need to set the bucket name to something to pass + # validation we're going to use the access point name to be consistent + # with normal access point arns. + accesspoint_name = match.group('accesspoint_name') + params['Bucket'] = accesspoint_name + context['s3_accesspoint'] = { + 'outpost_name': match.group('outpost_name'), + 'name': accesspoint_name, + 'account': arn_details['account'], + 'partition': arn_details['partition'], + 'region': arn_details['region'], + 'service': arn_details['service'], + } + + +class S3EndpointSetter: + _DEFAULT_PARTITION = 'aws' + _DEFAULT_DNS_SUFFIX = 'amazonaws.com' + + def __init__( + self, + endpoint_resolver, + region=None, + s3_config=None, + endpoint_url=None, + partition=None, + use_fips_endpoint=False, + ): + # This is calling the endpoint_resolver in regions.py + self._endpoint_resolver = endpoint_resolver + self._region = region + self._s3_config = s3_config + self._use_fips_endpoint = use_fips_endpoint + if s3_config is None: + self._s3_config = {} + self._endpoint_url = endpoint_url + self._partition = partition + if partition is None: + self._partition = self._DEFAULT_PARTITION + + def register(self, event_emitter): + event_emitter.register('before-sign.s3', self.set_endpoint) + event_emitter.register('choose-signer.s3', self.set_signer) + event_emitter.register( + 'before-call.s3.WriteGetObjectResponse', + self.update_endpoint_to_s3_object_lambda, + ) + + def update_endpoint_to_s3_object_lambda(self, params, context, **kwargs): + if self._use_accelerate_endpoint: + raise UnsupportedS3ConfigurationError( + msg='S3 client does not support accelerate endpoints for S3 Object Lambda operations', + ) + + self._override_signing_name(context, 's3-object-lambda') + if self._endpoint_url: + # Only update the url if an explicit url was not provided + return + + resolver = self._endpoint_resolver + # Constructing endpoints as s3-object-lambda as region + resolved = resolver.construct_endpoint( + 's3-object-lambda', self._region + ) + + # Ideally we would be able to replace the endpoint before + # serialization but there's no event to do that currently + # host_prefix is all the arn/bucket specs + new_endpoint = 'https://{host_prefix}{hostname}'.format( + host_prefix=params['host_prefix'], + hostname=resolved['hostname'], + ) + + params['url'] = _get_new_endpoint(params['url'], new_endpoint, False) + + def set_endpoint(self, request, **kwargs): + if self._use_accesspoint_endpoint(request): + self._validate_accesspoint_supported(request) + self._validate_fips_supported(request) + self._validate_global_regions(request) + region_name = self._resolve_region_for_accesspoint_endpoint( + request + ) + self._resolve_signing_name_for_accesspoint_endpoint(request) + self._switch_to_accesspoint_endpoint(request, region_name) + return + if self._use_accelerate_endpoint: + if self._use_fips_endpoint: + raise UnsupportedS3ConfigurationError( + msg=( + 'Client is configured to use the FIPS psuedo region ' + f'for "{self._region}", but S3 Accelerate does not have any FIPS ' + 'compatible endpoints.' + ) + ) + switch_host_s3_accelerate(request=request, **kwargs) + if self._s3_addressing_handler: + self._s3_addressing_handler(request=request, **kwargs) + + def _use_accesspoint_endpoint(self, request): + return 's3_accesspoint' in request.context + + def _validate_fips_supported(self, request): + if not self._use_fips_endpoint: + return + if 'fips' in request.context['s3_accesspoint']['region']: + raise UnsupportedS3AccesspointConfigurationError( + msg={'Invalid ARN, FIPS region not allowed in ARN.'} + ) + if 'outpost_name' in request.context['s3_accesspoint']: + raise UnsupportedS3AccesspointConfigurationError( + msg=( + f'Client is configured to use the FIPS psuedo-region "{self._region}", ' + 'but outpost ARNs do not support FIPS endpoints.' + ) + ) + # Transforming psuedo region to actual region + accesspoint_region = request.context['s3_accesspoint']['region'] + if accesspoint_region != self._region: + if not self._s3_config.get('use_arn_region', True): + # TODO: Update message to reflect use_arn_region + # is not set + raise UnsupportedS3AccesspointConfigurationError( + msg=( + 'Client is configured to use the FIPS psuedo-region ' + f'for "{self._region}", but the access-point ARN provided is for ' + f'the "{accesspoint_region}" region. For clients using a FIPS ' + 'psuedo-region calls to access-point ARNs in another ' + 'region are not allowed.' + ) + ) + + def _validate_global_regions(self, request): + if self._s3_config.get('use_arn_region', True): + return + if self._region in ['aws-global', 's3-external-1']: + raise UnsupportedS3AccesspointConfigurationError( + msg=( + 'Client is configured to use the global psuedo-region ' + f'"{self._region}". When providing access-point ARNs a regional ' + 'endpoint must be specified.' + ) + ) + + def _validate_accesspoint_supported(self, request): + if self._use_accelerate_endpoint: + raise UnsupportedS3AccesspointConfigurationError( + msg=( + 'Client does not support s3 accelerate configuration ' + 'when an access-point ARN is specified.' + ) + ) + request_partition = request.context['s3_accesspoint']['partition'] + if request_partition != self._partition: + raise UnsupportedS3AccesspointConfigurationError( + msg=( + f'Client is configured for "{self._partition}" partition, but access-point' + f' ARN provided is for "{request_partition}" partition. The client and ' + ' access-point partition must be the same.' + ) + ) + s3_service = request.context['s3_accesspoint'].get('service') + if s3_service == 's3-object-lambda' and self._s3_config.get( + 'use_dualstack_endpoint' + ): + raise UnsupportedS3AccesspointConfigurationError( + msg=( + 'Client does not support s3 dualstack configuration ' + 'when an S3 Object Lambda access point ARN is specified.' + ) + ) + outpost_name = request.context['s3_accesspoint'].get('outpost_name') + if outpost_name and self._s3_config.get('use_dualstack_endpoint'): + raise UnsupportedS3AccesspointConfigurationError( + msg=( + 'Client does not support s3 dualstack configuration ' + 'when an outpost ARN is specified.' + ) + ) + self._validate_mrap_s3_config(request) + + def _validate_mrap_s3_config(self, request): + if not is_global_accesspoint(request.context): + return + if self._s3_config.get('s3_disable_multiregion_access_points'): + raise UnsupportedS3AccesspointConfigurationError( + msg=( + 'Invalid configuration, Multi-Region Access Point ' + 'ARNs are disabled.' + ) + ) + elif self._s3_config.get('use_dualstack_endpoint'): + raise UnsupportedS3AccesspointConfigurationError( + msg=( + 'Client does not support s3 dualstack configuration ' + 'when a Multi-Region Access Point ARN is specified.' + ) + ) + + def _resolve_region_for_accesspoint_endpoint(self, request): + if is_global_accesspoint(request.context): + # Requests going to MRAP endpoints MUST be set to any (*) region. + self._override_signing_region(request, '*') + elif self._s3_config.get('use_arn_region', True): + accesspoint_region = request.context['s3_accesspoint']['region'] + # If we are using the region from the access point, + # we will also want to make sure that we set it as the + # signing region as well + self._override_signing_region(request, accesspoint_region) + return accesspoint_region + return self._region + + def set_signer(self, context, **kwargs): + if is_global_accesspoint(context): + if HAS_CRT: + return 's3v4a' + else: + raise MissingDependencyException( + msg="Using S3 with an MRAP arn requires an additional " + "dependency. You will need to pip install " + "botocore[crt] before proceeding." + ) + + def _resolve_signing_name_for_accesspoint_endpoint(self, request): + accesspoint_service = request.context['s3_accesspoint']['service'] + self._override_signing_name(request.context, accesspoint_service) + + def _switch_to_accesspoint_endpoint(self, request, region_name): + original_components = urlsplit(request.url) + accesspoint_endpoint = urlunsplit( + ( + original_components.scheme, + self._get_netloc(request.context, region_name), + self._get_accesspoint_path( + original_components.path, request.context + ), + original_components.query, + '', + ) + ) + logger.debug( + f'Updating URI from {request.url} to {accesspoint_endpoint}' + ) + request.url = accesspoint_endpoint + + def _get_netloc(self, request_context, region_name): + if is_global_accesspoint(request_context): + return self._get_mrap_netloc(request_context) + else: + return self._get_accesspoint_netloc(request_context, region_name) + + def _get_mrap_netloc(self, request_context): + s3_accesspoint = request_context['s3_accesspoint'] + region_name = 's3-global' + mrap_netloc_components = [s3_accesspoint['name']] + if self._endpoint_url: + endpoint_url_netloc = urlsplit(self._endpoint_url).netloc + mrap_netloc_components.append(endpoint_url_netloc) + else: + partition = s3_accesspoint['partition'] + mrap_netloc_components.extend( + [ + 'accesspoint', + region_name, + self._get_partition_dns_suffix(partition), + ] + ) + return '.'.join(mrap_netloc_components) + + def _get_accesspoint_netloc(self, request_context, region_name): + s3_accesspoint = request_context['s3_accesspoint'] + accesspoint_netloc_components = [ + '{}-{}'.format(s3_accesspoint['name'], s3_accesspoint['account']), + ] + outpost_name = s3_accesspoint.get('outpost_name') + if self._endpoint_url: + if outpost_name: + accesspoint_netloc_components.append(outpost_name) + endpoint_url_netloc = urlsplit(self._endpoint_url).netloc + accesspoint_netloc_components.append(endpoint_url_netloc) + else: + if outpost_name: + outpost_host = [outpost_name, 's3-outposts'] + accesspoint_netloc_components.extend(outpost_host) + elif s3_accesspoint['service'] == 's3-object-lambda': + component = self._inject_fips_if_needed( + 's3-object-lambda', request_context + ) + accesspoint_netloc_components.append(component) + else: + component = self._inject_fips_if_needed( + 's3-accesspoint', request_context + ) + accesspoint_netloc_components.append(component) + if self._s3_config.get('use_dualstack_endpoint'): + accesspoint_netloc_components.append('dualstack') + accesspoint_netloc_components.extend( + [region_name, self._get_dns_suffix(region_name)] + ) + return '.'.join(accesspoint_netloc_components) + + def _inject_fips_if_needed(self, component, request_context): + if self._use_fips_endpoint: + return f'{component}-fips' + return component + + def _get_accesspoint_path(self, original_path, request_context): + # The Bucket parameter was substituted with the access-point name as + # some value was required in serializing the bucket name. Now that + # we are making the request directly to the access point, we will + # want to remove that access-point name from the path. + name = request_context['s3_accesspoint']['name'] + # All S3 operations require at least a / in their path. + return original_path.replace('/' + name, '', 1) or '/' + + def _get_partition_dns_suffix(self, partition_name): + dns_suffix = self._endpoint_resolver.get_partition_dns_suffix( + partition_name + ) + if dns_suffix is None: + dns_suffix = self._DEFAULT_DNS_SUFFIX + return dns_suffix + + def _get_dns_suffix(self, region_name): + resolved = self._endpoint_resolver.construct_endpoint( + 's3', region_name + ) + dns_suffix = self._DEFAULT_DNS_SUFFIX + if resolved and 'dnsSuffix' in resolved: + dns_suffix = resolved['dnsSuffix'] + return dns_suffix + + def _override_signing_region(self, request, region_name): + signing_context = request.context.get('signing', {}) + # S3SigV4Auth will use the context['signing']['region'] value to + # sign with if present. This is used by the Bucket redirector + # as well but we should be fine because the redirector is never + # used in combination with the accesspoint setting logic. + signing_context['region'] = region_name + request.context['signing'] = signing_context + + def _override_signing_name(self, context, signing_name): + signing_context = context.get('signing', {}) + # S3SigV4Auth will use the context['signing']['signing_name'] value to + # sign with if present. This is used by the Bucket redirector + # as well but we should be fine because the redirector is never + # used in combination with the accesspoint setting logic. + signing_context['signing_name'] = signing_name + context['signing'] = signing_context + + @CachedProperty + def _use_accelerate_endpoint(self): + # Enable accelerate if the configuration is set to to true or the + # endpoint being used matches one of the accelerate endpoints. + + # Accelerate has been explicitly configured. + if self._s3_config.get('use_accelerate_endpoint'): + return True + + # Accelerate mode is turned on automatically if an endpoint url is + # provided that matches the accelerate scheme. + if self._endpoint_url is None: + return False + + # Accelerate is only valid for Amazon endpoints. + netloc = urlsplit(self._endpoint_url).netloc + if not netloc.endswith('amazonaws.com'): + return False + + # The first part of the url should always be s3-accelerate. + parts = netloc.split('.') + if parts[0] != 's3-accelerate': + return False + + # Url parts between 's3-accelerate' and 'amazonaws.com' which + # represent different url features. + feature_parts = parts[1:-2] + + # There should be no duplicate url parts. + if len(feature_parts) != len(set(feature_parts)): + return False + + # Remaining parts must all be in the whitelist. + return all(p in S3_ACCELERATE_WHITELIST for p in feature_parts) + + @CachedProperty + def _addressing_style(self): + # Use virtual host style addressing if accelerate is enabled or if + # the given endpoint url is an accelerate endpoint. + if self._use_accelerate_endpoint: + return 'virtual' + + # If a particular addressing style is configured, use it. + configured_addressing_style = self._s3_config.get('addressing_style') + if configured_addressing_style: + return configured_addressing_style + + @CachedProperty + def _s3_addressing_handler(self): + # If virtual host style was configured, use it regardless of whether + # or not the bucket looks dns compatible. + if self._addressing_style == 'virtual': + logger.debug("Using S3 virtual host style addressing.") + return switch_to_virtual_host_style + + # If path style is configured, no additional steps are needed. If + # endpoint_url was specified, don't default to virtual. We could + # potentially default provided endpoint urls to virtual hosted + # style, but for now it is avoided. + if self._addressing_style == 'path' or self._endpoint_url is not None: + logger.debug("Using S3 path style addressing.") + return None + + logger.debug( + "Defaulting to S3 virtual host style addressing with " + "path style addressing fallback." + ) + + # By default, try to use virtual style with path fallback. + return fix_s3_host + + +class S3ControlEndpointSetter: + _DEFAULT_PARTITION = 'aws' + _DEFAULT_DNS_SUFFIX = 'amazonaws.com' + _HOST_LABEL_REGEX = re.compile(r'^[a-zA-Z0-9\-]{1,63}$') + + def __init__( + self, + endpoint_resolver, + region=None, + s3_config=None, + endpoint_url=None, + partition=None, + use_fips_endpoint=False, + ): + self._endpoint_resolver = endpoint_resolver + self._region = region + self._s3_config = s3_config + self._use_fips_endpoint = use_fips_endpoint + if s3_config is None: + self._s3_config = {} + self._endpoint_url = endpoint_url + self._partition = partition + if partition is None: + self._partition = self._DEFAULT_PARTITION + + def register(self, event_emitter): + event_emitter.register('before-sign.s3-control', self.set_endpoint) + + def set_endpoint(self, request, **kwargs): + if self._use_endpoint_from_arn_details(request): + self._validate_endpoint_from_arn_details_supported(request) + region_name = self._resolve_region_from_arn_details(request) + self._resolve_signing_name_from_arn_details(request) + self._resolve_endpoint_from_arn_details(request, region_name) + self._add_headers_from_arn_details(request) + elif self._use_endpoint_from_outpost_id(request): + self._validate_outpost_redirection_valid(request) + self._override_signing_name(request, 's3-outposts') + new_netloc = self._construct_outpost_endpoint(self._region) + self._update_request_netloc(request, new_netloc) + + def _use_endpoint_from_arn_details(self, request): + return 'arn_details' in request.context + + def _use_endpoint_from_outpost_id(self, request): + return 'outpost_id' in request.context + + def _validate_endpoint_from_arn_details_supported(self, request): + if 'fips' in request.context['arn_details']['region']: + raise UnsupportedS3ControlArnError( + arn=request.context['arn_details']['original'], + msg='Invalid ARN, FIPS region not allowed in ARN.', + ) + if not self._s3_config.get('use_arn_region', False): + arn_region = request.context['arn_details']['region'] + if arn_region != self._region: + error_msg = ( + 'The use_arn_region configuration is disabled but ' + f'received arn for "{arn_region}" when the client is configured ' + f'to use "{self._region}"' + ) + raise UnsupportedS3ControlConfigurationError(msg=error_msg) + request_partion = request.context['arn_details']['partition'] + if request_partion != self._partition: + raise UnsupportedS3ControlConfigurationError( + msg=( + f'Client is configured for "{self._partition}" partition, but arn ' + f'provided is for "{request_partion}" partition. The client and ' + 'arn partition must be the same.' + ) + ) + if self._s3_config.get('use_accelerate_endpoint'): + raise UnsupportedS3ControlConfigurationError( + msg='S3 control client does not support accelerate endpoints', + ) + if 'outpost_name' in request.context['arn_details']: + self._validate_outpost_redirection_valid(request) + + def _validate_outpost_redirection_valid(self, request): + if self._s3_config.get('use_dualstack_endpoint'): + raise UnsupportedS3ControlConfigurationError( + msg=( + 'Client does not support s3 dualstack configuration ' + 'when an outpost is specified.' + ) + ) + + def _resolve_region_from_arn_details(self, request): + if self._s3_config.get('use_arn_region', False): + arn_region = request.context['arn_details']['region'] + # If we are using the region from the expanded arn, we will also + # want to make sure that we set it as the signing region as well + self._override_signing_region(request, arn_region) + return arn_region + return self._region + + def _resolve_signing_name_from_arn_details(self, request): + arn_service = request.context['arn_details']['service'] + self._override_signing_name(request, arn_service) + return arn_service + + def _resolve_endpoint_from_arn_details(self, request, region_name): + new_netloc = self._resolve_netloc_from_arn_details( + request, region_name + ) + self._update_request_netloc(request, new_netloc) + + def _update_request_netloc(self, request, new_netloc): + original_components = urlsplit(request.url) + arn_details_endpoint = urlunsplit( + ( + original_components.scheme, + new_netloc, + original_components.path, + original_components.query, + '', + ) + ) + logger.debug( + f'Updating URI from {request.url} to {arn_details_endpoint}' + ) + request.url = arn_details_endpoint + + def _resolve_netloc_from_arn_details(self, request, region_name): + arn_details = request.context['arn_details'] + if 'outpost_name' in arn_details: + return self._construct_outpost_endpoint(region_name) + account = arn_details['account'] + return self._construct_s3_control_endpoint(region_name, account) + + def _is_valid_host_label(self, label): + return self._HOST_LABEL_REGEX.match(label) + + def _validate_host_labels(self, *labels): + for label in labels: + if not self._is_valid_host_label(label): + raise InvalidHostLabelError(label=label) + + def _construct_s3_control_endpoint(self, region_name, account): + self._validate_host_labels(region_name, account) + if self._endpoint_url: + endpoint_url_netloc = urlsplit(self._endpoint_url).netloc + netloc = [account, endpoint_url_netloc] + else: + netloc = [ + account, + 's3-control', + ] + self._add_dualstack(netloc) + dns_suffix = self._get_dns_suffix(region_name) + netloc.extend([region_name, dns_suffix]) + return self._construct_netloc(netloc) + + def _construct_outpost_endpoint(self, region_name): + self._validate_host_labels(region_name) + if self._endpoint_url: + return urlsplit(self._endpoint_url).netloc + else: + netloc = [ + 's3-outposts', + region_name, + self._get_dns_suffix(region_name), + ] + self._add_fips(netloc) + return self._construct_netloc(netloc) + + def _construct_netloc(self, netloc): + return '.'.join(netloc) + + def _add_fips(self, netloc): + if self._use_fips_endpoint: + netloc[0] = netloc[0] + '-fips' + + def _add_dualstack(self, netloc): + if self._s3_config.get('use_dualstack_endpoint'): + netloc.append('dualstack') + + def _get_dns_suffix(self, region_name): + resolved = self._endpoint_resolver.construct_endpoint( + 's3', region_name + ) + dns_suffix = self._DEFAULT_DNS_SUFFIX + if resolved and 'dnsSuffix' in resolved: + dns_suffix = resolved['dnsSuffix'] + return dns_suffix + + def _override_signing_region(self, request, region_name): + signing_context = request.context.get('signing', {}) + # S3SigV4Auth will use the context['signing']['region'] value to + # sign with if present. This is used by the Bucket redirector + # as well but we should be fine because the redirector is never + # used in combination with the accesspoint setting logic. + signing_context['region'] = region_name + request.context['signing'] = signing_context + + def _override_signing_name(self, request, signing_name): + signing_context = request.context.get('signing', {}) + # S3SigV4Auth will use the context['signing']['signing_name'] value to + # sign with if present. This is used by the Bucket redirector + # as well but we should be fine because the redirector is never + # used in combination with the accesspoint setting logic. + signing_context['signing_name'] = signing_name + request.context['signing'] = signing_context + + def _add_headers_from_arn_details(self, request): + arn_details = request.context['arn_details'] + outpost_name = arn_details.get('outpost_name') + if outpost_name: + self._add_outpost_id_header(request, outpost_name) + + def _add_outpost_id_header(self, request, outpost_name): + request.headers['x-amz-outpost-id'] = outpost_name + + +class S3ControlArnParamHandler: + """This handler has been replaced by S3ControlArnParamHandlerv2. The + original version remains in place for any third-party importers. + """ + + _RESOURCE_SPLIT_REGEX = re.compile(r'[/:]') + + def __init__(self, arn_parser=None): + self._arn_parser = arn_parser + if arn_parser is None: + self._arn_parser = ArnParser() + warnings.warn( + 'The S3ControlArnParamHandler class has been deprecated for a new ' + 'internal replacement. A future version of botocore may remove ' + 'this class.', + category=FutureWarning, + ) + + def register(self, event_emitter): + event_emitter.register( + 'before-parameter-build.s3-control', + self.handle_arn, + ) + + def handle_arn(self, params, model, context, **kwargs): + if model.name in ('CreateBucket', 'ListRegionalBuckets'): + # CreateBucket and ListRegionalBuckets are special cases that do + # not obey ARN based redirection but will redirect based off of the + # presence of the OutpostId parameter + self._handle_outpost_id_param(params, model, context) + else: + self._handle_name_param(params, model, context) + self._handle_bucket_param(params, model, context) + + def _get_arn_details_from_param(self, params, param_name): + if param_name not in params: + return None + try: + arn = params[param_name] + arn_details = self._arn_parser.parse_arn(arn) + arn_details['original'] = arn + arn_details['resources'] = self._split_resource(arn_details) + return arn_details + except InvalidArnException: + return None + + def _split_resource(self, arn_details): + return self._RESOURCE_SPLIT_REGEX.split(arn_details['resource']) + + def _override_account_id_param(self, params, arn_details): + account_id = arn_details['account'] + if 'AccountId' in params and params['AccountId'] != account_id: + error_msg = ( + 'Account ID in arn does not match the AccountId parameter ' + 'provided: "{}"' + ).format(params['AccountId']) + raise UnsupportedS3ControlArnError( + arn=arn_details['original'], + msg=error_msg, + ) + params['AccountId'] = account_id + + def _handle_outpost_id_param(self, params, model, context): + if 'OutpostId' not in params: + return + context['outpost_id'] = params['OutpostId'] + + def _handle_name_param(self, params, model, context): + # CreateAccessPoint is a special case that does not expand Name + if model.name == 'CreateAccessPoint': + return + arn_details = self._get_arn_details_from_param(params, 'Name') + if arn_details is None: + return + if self._is_outpost_accesspoint(arn_details): + self._store_outpost_accesspoint(params, context, arn_details) + else: + error_msg = 'The Name parameter does not support the provided ARN' + raise UnsupportedS3ControlArnError( + arn=arn_details['original'], + msg=error_msg, + ) + + def _is_outpost_accesspoint(self, arn_details): + if arn_details['service'] != 's3-outposts': + return False + resources = arn_details['resources'] + if len(resources) != 4: + return False + # Resource must be of the form outpost/op-123/accesspoint/name + return resources[0] == 'outpost' and resources[2] == 'accesspoint' + + def _store_outpost_accesspoint(self, params, context, arn_details): + self._override_account_id_param(params, arn_details) + accesspoint_name = arn_details['resources'][3] + params['Name'] = accesspoint_name + arn_details['accesspoint_name'] = accesspoint_name + arn_details['outpost_name'] = arn_details['resources'][1] + context['arn_details'] = arn_details + + def _handle_bucket_param(self, params, model, context): + arn_details = self._get_arn_details_from_param(params, 'Bucket') + if arn_details is None: + return + if self._is_outpost_bucket(arn_details): + self._store_outpost_bucket(params, context, arn_details) + else: + error_msg = ( + 'The Bucket parameter does not support the provided ARN' + ) + raise UnsupportedS3ControlArnError( + arn=arn_details['original'], + msg=error_msg, + ) + + def _is_outpost_bucket(self, arn_details): + if arn_details['service'] != 's3-outposts': + return False + resources = arn_details['resources'] + if len(resources) != 4: + return False + # Resource must be of the form outpost/op-123/bucket/name + return resources[0] == 'outpost' and resources[2] == 'bucket' + + def _store_outpost_bucket(self, params, context, arn_details): + self._override_account_id_param(params, arn_details) + bucket_name = arn_details['resources'][3] + params['Bucket'] = bucket_name + arn_details['bucket_name'] = bucket_name + arn_details['outpost_name'] = arn_details['resources'][1] + context['arn_details'] = arn_details + + +class S3ControlArnParamHandlerv2(S3ControlArnParamHandler): + """Updated version of S3ControlArnParamHandler for use when + EndpointRulesetResolver is in use for endpoint resolution. + + This class is considered private and subject to abrupt breaking changes or + removal without prior announcement. Please do not use it directly. + """ + + def __init__(self, arn_parser=None): + self._arn_parser = arn_parser + if arn_parser is None: + self._arn_parser = ArnParser() + + def register(self, event_emitter): + event_emitter.register( + 'before-endpoint-resolution.s3-control', + self.handle_arn, + ) + + def _handle_name_param(self, params, model, context): + # CreateAccessPoint is a special case that does not expand Name + if model.name == 'CreateAccessPoint': + return + arn_details = self._get_arn_details_from_param(params, 'Name') + if arn_details is None: + return + self._raise_for_fips_pseudo_region(arn_details) + self._raise_for_accelerate_endpoint(context) + if self._is_outpost_accesspoint(arn_details): + self._store_outpost_accesspoint(params, context, arn_details) + else: + error_msg = 'The Name parameter does not support the provided ARN' + raise UnsupportedS3ControlArnError( + arn=arn_details['original'], + msg=error_msg, + ) + + def _store_outpost_accesspoint(self, params, context, arn_details): + self._override_account_id_param(params, arn_details) + + def _handle_bucket_param(self, params, model, context): + arn_details = self._get_arn_details_from_param(params, 'Bucket') + if arn_details is None: + return + self._raise_for_fips_pseudo_region(arn_details) + self._raise_for_accelerate_endpoint(context) + if self._is_outpost_bucket(arn_details): + self._store_outpost_bucket(params, context, arn_details) + else: + error_msg = ( + 'The Bucket parameter does not support the provided ARN' + ) + raise UnsupportedS3ControlArnError( + arn=arn_details['original'], + msg=error_msg, + ) + + def _store_outpost_bucket(self, params, context, arn_details): + self._override_account_id_param(params, arn_details) + + def _raise_for_fips_pseudo_region(self, arn_details): + # FIPS pseudo region names cannot be used in ARNs + arn_region = arn_details['region'] + if arn_region.startswith('fips-') or arn_region.endswith('fips-'): + raise UnsupportedS3ControlArnError( + arn=arn_details['original'], + msg='Invalid ARN, FIPS region not allowed in ARN.', + ) + + def _raise_for_accelerate_endpoint(self, context): + s3_config = context['client_config'].s3 or {} + if s3_config.get('use_accelerate_endpoint'): + raise UnsupportedS3ControlConfigurationError( + msg='S3 control client does not support accelerate endpoints', + ) + + +class ContainerMetadataFetcher: + TIMEOUT_SECONDS = 2 + RETRY_ATTEMPTS = 3 + SLEEP_TIME = 1 + IP_ADDRESS = '169.254.170.2' + _ALLOWED_HOSTS = [ + IP_ADDRESS, + '169.254.170.23', + 'fd00:ec2::23', + 'localhost', + ] + + def __init__(self, session=None, sleep=time.sleep): + if session is None: + session = botocore.httpsession.URLLib3Session( + timeout=self.TIMEOUT_SECONDS + ) + self._session = session + self._sleep = sleep + + def retrieve_full_uri(self, full_url, headers=None): + """Retrieve JSON metadata from container metadata. + + :type full_url: str + :param full_url: The full URL of the metadata service. + This should include the scheme as well, e.g + "http://localhost:123/foo" + + """ + self._validate_allowed_url(full_url) + return self._retrieve_credentials(full_url, headers) + + def _validate_allowed_url(self, full_url): + parsed = botocore.compat.urlparse(full_url) + if self._is_loopback_address(parsed.hostname): + return + is_whitelisted_host = self._check_if_whitelisted_host(parsed.hostname) + if not is_whitelisted_host: + raise ValueError( + f"Unsupported host '{parsed.hostname}'. Can only retrieve metadata " + f"from a loopback address or one of these hosts: {', '.join(self._ALLOWED_HOSTS)}" + ) + + def _is_loopback_address(self, hostname): + try: + ip = ip_address(hostname) + return ip.is_loopback + except ValueError: + return False + + def _check_if_whitelisted_host(self, host): + if host in self._ALLOWED_HOSTS: + return True + return False + + def retrieve_uri(self, relative_uri): + """Retrieve JSON metadata from container metadata. + + :type relative_uri: str + :param relative_uri: A relative URI, e.g "/foo/bar?id=123" + + :return: The parsed JSON response. + + """ + full_url = self.full_url(relative_uri) + return self._retrieve_credentials(full_url) + + def _retrieve_credentials(self, full_url, extra_headers=None): + headers = {'Accept': 'application/json'} + if extra_headers is not None: + headers.update(extra_headers) + attempts = 0 + while True: + try: + return self._get_response( + full_url, headers, self.TIMEOUT_SECONDS + ) + except MetadataRetrievalError as e: + logger.debug( + "Received error when attempting to retrieve " + "container metadata: %s", + e, + exc_info=True, + ) + self._sleep(self.SLEEP_TIME) + attempts += 1 + if attempts >= self.RETRY_ATTEMPTS: + raise + + def _get_response(self, full_url, headers, timeout): + try: + AWSRequest = botocore.awsrequest.AWSRequest + request = AWSRequest(method='GET', url=full_url, headers=headers) + response = self._session.send(request.prepare()) + response_text = response.content.decode('utf-8') + if response.status_code != 200: + raise MetadataRetrievalError( + error_msg=( + f"Received non 200 response {response.status_code} " + f"from container metadata: {response_text}" + ) + ) + try: + return json.loads(response_text) + except ValueError: + error_msg = "Unable to parse JSON returned from container metadata services" + logger.debug('%s:%s', error_msg, response_text) + raise MetadataRetrievalError(error_msg=error_msg) + except RETRYABLE_HTTP_ERRORS as e: + error_msg = ( + "Received error when attempting to retrieve " + f"container metadata: {e}" + ) + raise MetadataRetrievalError(error_msg=error_msg) + + def full_url(self, relative_uri): + return f'http://{self.IP_ADDRESS}{relative_uri}' + + +def get_environ_proxies(url): + if should_bypass_proxies(url): + return {} + else: + return getproxies() + + +def should_bypass_proxies(url): + """ + Returns whether we should bypass proxies or not. + """ + # NOTE: requests allowed for ip/cidr entries in no_proxy env that we don't + # support current as urllib only checks DNS suffix + # If the system proxy settings indicate that this URL should be bypassed, + # don't proxy. + # The proxy_bypass function is incredibly buggy on OS X in early versions + # of Python 2.6, so allow this call to fail. Only catch the specific + # exceptions we've seen, though: this call failing in other ways can reveal + # legitimate problems. + try: + if proxy_bypass(urlparse(url).netloc): + return True + except (TypeError, socket.gaierror): + pass + + return False + + +def determine_content_length(body): + # No body, content length of 0 + if not body: + return 0 + + # Try asking the body for it's length + try: + return len(body) + except (AttributeError, TypeError): + pass + + # Try getting the length from a seekable stream + if hasattr(body, 'seek') and hasattr(body, 'tell'): + try: + orig_pos = body.tell() + body.seek(0, 2) + end_file_pos = body.tell() + body.seek(orig_pos) + return end_file_pos - orig_pos + except io.UnsupportedOperation: + # in case when body is, for example, io.BufferedIOBase object + # it has "seek" method which throws "UnsupportedOperation" + # exception in such case we want to fall back to "chunked" + # encoding + pass + # Failed to determine the length + return None + + +def get_encoding_from_headers(headers, default='ISO-8859-1'): + """Returns encodings from given HTTP Header Dict. + + :param headers: dictionary to extract encoding from. + :param default: default encoding if the content-type is text + """ + + content_type = headers.get('content-type') + + if not content_type: + return None + + message = email.message.Message() + message['content-type'] = content_type + charset = message.get_param("charset") + + if charset is not None: + return charset + + if 'text' in content_type: + return default + + +def calculate_md5(body, **kwargs): + if isinstance(body, (bytes, bytearray)): + binary_md5 = _calculate_md5_from_bytes(body) + else: + binary_md5 = _calculate_md5_from_file(body) + return base64.b64encode(binary_md5).decode('ascii') + + +def _calculate_md5_from_bytes(body_bytes): + md5 = get_md5(body_bytes) + return md5.digest() + + +def _calculate_md5_from_file(fileobj): + start_position = fileobj.tell() + md5 = get_md5() + for chunk in iter(lambda: fileobj.read(1024 * 1024), b''): + md5.update(chunk) + fileobj.seek(start_position) + return md5.digest() + + +def _is_s3express_request(params): + endpoint_properties = params.get('context', {}).get( + 'endpoint_properties', {} + ) + return endpoint_properties.get('backend') == 'S3Express' + + +def _has_checksum_header(params): + headers = params['headers'] + # If a user provided Content-MD5 is present, + # don't try to compute a new one. + if 'Content-MD5' in headers: + return True + + # If a header matching the x-amz-checksum-* pattern is present, we + # assume a checksum has already been provided and an md5 is not needed + for header in headers: + if CHECKSUM_HEADER_PATTERN.match(header): + return True + + return False + + +def conditionally_calculate_checksum(params, **kwargs): + if not _has_checksum_header(params): + conditionally_calculate_md5(params, **kwargs) + conditionally_enable_crc32(params, **kwargs) + + +def conditionally_enable_crc32(params, **kwargs): + checksum_context = params.get('context', {}).get('checksum', {}) + checksum_algorithm = checksum_context.get('request_algorithm') + if ( + _is_s3express_request(params) + and params['body'] is not None + and checksum_algorithm in (None, "conditional-md5") + ): + params['context']['checksum'] = { + 'request_algorithm': { + 'algorithm': 'crc32', + 'in': 'header', + 'name': 'x-amz-checksum-crc32', + } + } + + +def conditionally_calculate_md5(params, **kwargs): + """Only add a Content-MD5 if the system supports it.""" + body = params['body'] + checksum_context = params.get('context', {}).get('checksum', {}) + checksum_algorithm = checksum_context.get('request_algorithm') + if checksum_algorithm and checksum_algorithm != 'conditional-md5': + # Skip for requests that will have a flexible checksum applied + return + + if _has_checksum_header(params): + # Don't add a new header if one is already available. + return + + if _is_s3express_request(params): + # S3Express doesn't support MD5 + return + + if MD5_AVAILABLE and body is not None: + md5_digest = calculate_md5(body, **kwargs) + params['headers']['Content-MD5'] = md5_digest + + +class FileWebIdentityTokenLoader: + def __init__(self, web_identity_token_path, _open=open): + self._web_identity_token_path = web_identity_token_path + self._open = _open + + def __call__(self): + with self._open(self._web_identity_token_path) as token_file: + return token_file.read() + + +class SSOTokenLoader: + def __init__(self, cache=None): + if cache is None: + cache = {} + self._cache = cache + + def _generate_cache_key(self, start_url, session_name): + input_str = start_url + if session_name is not None: + input_str = session_name + return hashlib.sha1(input_str.encode('utf-8')).hexdigest() + + def save_token(self, start_url, token, session_name=None): + cache_key = self._generate_cache_key(start_url, session_name) + self._cache[cache_key] = token + + def __call__(self, start_url, session_name=None): + cache_key = self._generate_cache_key(start_url, session_name) + logger.debug(f'Checking for cached token at: {cache_key}') + if cache_key not in self._cache: + name = start_url + if session_name is not None: + name = session_name + error_msg = f'Token for {name} does not exist' + raise SSOTokenLoadError(error_msg=error_msg) + + token = self._cache[cache_key] + if 'accessToken' not in token or 'expiresAt' not in token: + error_msg = f'Token for {start_url} is invalid' + raise SSOTokenLoadError(error_msg=error_msg) + return token + + +class EventbridgeSignerSetter: + _DEFAULT_PARTITION = 'aws' + _DEFAULT_DNS_SUFFIX = 'amazonaws.com' + + def __init__(self, endpoint_resolver, region=None, endpoint_url=None): + self._endpoint_resolver = endpoint_resolver + self._region = region + self._endpoint_url = endpoint_url + + def register(self, event_emitter): + event_emitter.register( + 'before-parameter-build.events.PutEvents', + self.check_for_global_endpoint, + ) + event_emitter.register( + 'before-call.events.PutEvents', self.set_endpoint_url + ) + + def set_endpoint_url(self, params, context, **kwargs): + if 'eventbridge_endpoint' in context: + endpoint = context['eventbridge_endpoint'] + logger.debug(f"Rewriting URL from {params['url']} to {endpoint}") + params['url'] = endpoint + + def check_for_global_endpoint(self, params, context, **kwargs): + endpoint = params.get('EndpointId') + if endpoint is None: + return + + if len(endpoint) == 0: + raise InvalidEndpointConfigurationError( + msg='EndpointId must not be a zero length string' + ) + + if not HAS_CRT: + raise MissingDependencyException( + msg="Using EndpointId requires an additional " + "dependency. You will need to pip install " + "botocore[crt] before proceeding." + ) + + config = context.get('client_config') + endpoint_variant_tags = None + if config is not None: + if config.use_fips_endpoint: + raise InvalidEndpointConfigurationError( + msg="FIPS is not supported with EventBridge " + "multi-region endpoints." + ) + if config.use_dualstack_endpoint: + endpoint_variant_tags = ['dualstack'] + + if self._endpoint_url is None: + # Validate endpoint is a valid hostname component + parts = urlparse(f'https://{endpoint}') + if parts.hostname != endpoint: + raise InvalidEndpointConfigurationError( + msg='EndpointId is not a valid hostname component.' + ) + resolved_endpoint = self._get_global_endpoint( + endpoint, endpoint_variant_tags=endpoint_variant_tags + ) + else: + resolved_endpoint = self._endpoint_url + + context['eventbridge_endpoint'] = resolved_endpoint + context['auth_type'] = 'v4a' + + def _get_global_endpoint(self, endpoint, endpoint_variant_tags=None): + resolver = self._endpoint_resolver + + partition = resolver.get_partition_for_region(self._region) + if partition is None: + partition = self._DEFAULT_PARTITION + dns_suffix = resolver.get_partition_dns_suffix( + partition, endpoint_variant_tags=endpoint_variant_tags + ) + if dns_suffix is None: + dns_suffix = self._DEFAULT_DNS_SUFFIX + + return f"https://{endpoint}.endpoint.events.{dns_suffix}/" + + +def is_s3_accelerate_url(url): + """Does the URL match the S3 Accelerate endpoint scheme? + + Virtual host naming style with bucket names in the netloc part of the URL + are not allowed by this function. + """ + if url is None: + return False + + # Accelerate is only valid for Amazon endpoints. + url_parts = urlsplit(url) + if not url_parts.netloc.endswith( + 'amazonaws.com' + ) or url_parts.scheme not in ['https', 'http']: + return False + + # The first part of the URL must be s3-accelerate. + parts = url_parts.netloc.split('.') + if parts[0] != 's3-accelerate': + return False + + # Url parts between 's3-accelerate' and 'amazonaws.com' which + # represent different url features. + feature_parts = parts[1:-2] + + # There should be no duplicate URL parts. + if len(feature_parts) != len(set(feature_parts)): + return False + + # Remaining parts must all be in the whitelist. + return all(p in S3_ACCELERATE_WHITELIST for p in feature_parts) + + +class JSONFileCache: + """JSON file cache. + This provides a dict like interface that stores JSON serializable + objects. + The objects are serialized to JSON and stored in a file. These + values can be retrieved at a later time. + """ + + CACHE_DIR = os.path.expanduser(os.path.join('~', '.aws', 'boto', 'cache')) + + def __init__(self, working_dir=CACHE_DIR, dumps_func=None): + self._working_dir = working_dir + if dumps_func is None: + dumps_func = self._default_dumps + self._dumps = dumps_func + + def _default_dumps(self, obj): + return json.dumps(obj, default=self._serialize_if_needed) + + def __contains__(self, cache_key): + actual_key = self._convert_cache_key(cache_key) + return os.path.isfile(actual_key) + + def __getitem__(self, cache_key): + """Retrieve value from a cache key.""" + actual_key = self._convert_cache_key(cache_key) + try: + with open(actual_key) as f: + return json.load(f) + except (OSError, ValueError): + raise KeyError(cache_key) + + def __delitem__(self, cache_key): + actual_key = self._convert_cache_key(cache_key) + try: + key_path = Path(actual_key) + key_path.unlink() + except FileNotFoundError: + raise KeyError(cache_key) + + def __setitem__(self, cache_key, value): + full_key = self._convert_cache_key(cache_key) + try: + file_content = self._dumps(value) + except (TypeError, ValueError): + raise ValueError( + f"Value cannot be cached, must be " + f"JSON serializable: {value}" + ) + if not os.path.isdir(self._working_dir): + os.makedirs(self._working_dir) + with os.fdopen( + os.open(full_key, os.O_WRONLY | os.O_CREAT, 0o600), 'w' + ) as f: + f.truncate() + f.write(file_content) + + def _convert_cache_key(self, cache_key): + full_path = os.path.join(self._working_dir, cache_key + '.json') + return full_path + + def _serialize_if_needed(self, value, iso=False): + if isinstance(value, _DatetimeClass): + if iso: + return value.isoformat() + return value.strftime('%Y-%m-%dT%H:%M:%S%Z') + return value + + +def is_s3express_bucket(bucket): + if bucket is None: + return False + return bucket.endswith('--x-s3') + + +# This parameter is not part of the public interface and is subject to abrupt +# breaking changes or removal without prior announcement. +# Mapping of services that have been renamed for backwards compatibility reasons. +# Keys are the previous name that should be allowed, values are the documented +# and preferred client name. +SERVICE_NAME_ALIASES = {'runtime.sagemaker': 'sagemaker-runtime'} + + +# This parameter is not part of the public interface and is subject to abrupt +# breaking changes or removal without prior announcement. +# Mapping to determine the service ID for services that do not use it as the +# model data directory name. The keys are the data directory name and the +# values are the transformed service IDs (lower case and hyphenated). +CLIENT_NAME_TO_HYPHENIZED_SERVICE_ID_OVERRIDES = { + # Actual service name we use -> Allowed computed service name. + 'apigateway': 'api-gateway', + 'application-autoscaling': 'application-auto-scaling', + 'appmesh': 'app-mesh', + 'autoscaling': 'auto-scaling', + 'autoscaling-plans': 'auto-scaling-plans', + 'ce': 'cost-explorer', + 'cloudhsmv2': 'cloudhsm-v2', + 'cloudsearchdomain': 'cloudsearch-domain', + 'cognito-idp': 'cognito-identity-provider', + 'config': 'config-service', + 'cur': 'cost-and-usage-report-service', + 'datapipeline': 'data-pipeline', + 'directconnect': 'direct-connect', + 'devicefarm': 'device-farm', + 'discovery': 'application-discovery-service', + 'dms': 'database-migration-service', + 'ds': 'directory-service', + 'dynamodbstreams': 'dynamodb-streams', + 'elasticbeanstalk': 'elastic-beanstalk', + 'elastictranscoder': 'elastic-transcoder', + 'elb': 'elastic-load-balancing', + 'elbv2': 'elastic-load-balancing-v2', + 'es': 'elasticsearch-service', + 'events': 'eventbridge', + 'globalaccelerator': 'global-accelerator', + 'iot-data': 'iot-data-plane', + 'iot-jobs-data': 'iot-jobs-data-plane', + 'iot1click-devices': 'iot-1click-devices-service', + 'iot1click-projects': 'iot-1click-projects', + 'iotevents-data': 'iot-events-data', + 'iotevents': 'iot-events', + 'iotwireless': 'iot-wireless', + 'kinesisanalytics': 'kinesis-analytics', + 'kinesisanalyticsv2': 'kinesis-analytics-v2', + 'kinesisvideo': 'kinesis-video', + 'lex-models': 'lex-model-building-service', + 'lexv2-models': 'lex-models-v2', + 'lex-runtime': 'lex-runtime-service', + 'lexv2-runtime': 'lex-runtime-v2', + 'logs': 'cloudwatch-logs', + 'machinelearning': 'machine-learning', + 'marketplacecommerceanalytics': 'marketplace-commerce-analytics', + 'marketplace-entitlement': 'marketplace-entitlement-service', + 'meteringmarketplace': 'marketplace-metering', + 'mgh': 'migration-hub', + 'sms-voice': 'pinpoint-sms-voice', + 'resourcegroupstaggingapi': 'resource-groups-tagging-api', + 'route53': 'route-53', + 'route53domains': 'route-53-domains', + 's3control': 's3-control', + 'sdb': 'simpledb', + 'secretsmanager': 'secrets-manager', + 'serverlessrepo': 'serverlessapplicationrepository', + 'servicecatalog': 'service-catalog', + 'servicecatalog-appregistry': 'service-catalog-appregistry', + 'stepfunctions': 'sfn', + 'storagegateway': 'storage-gateway', +} diff --git a/venv/lib/python3.10/site-packages/botocore/validate.py b/venv/lib/python3.10/site-packages/botocore/validate.py new file mode 100644 index 0000000000000000000000000000000000000000..82aabd66e4de9de89dee0c30bafa1d80b151037d --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/validate.py @@ -0,0 +1,384 @@ +"""User input parameter validation. + +This module handles user input parameter validation +against a provided input model. + +Note that the objects in this module do *not* mutate any +arguments. No type version happens here. It is up to another +layer to properly convert arguments to any required types. + +Validation Errors +----------------- + + +""" + +import decimal +import json +from datetime import datetime + +from botocore.exceptions import ParamValidationError +from botocore.utils import is_json_value_header, parse_to_aware_datetime + + +def validate_parameters(params, shape): + """Validates input parameters against a schema. + + This is a convenience function that validates parameters against a schema. + You can also instantiate and use the ParamValidator class directly if you + want more control. + + If there are any validation errors then a ParamValidationError + will be raised. If there are no validation errors than no exception + is raised and a value of None is returned. + + :param params: The user provided input parameters. + + :type shape: botocore.model.Shape + :param shape: The schema which the input parameters should + adhere to. + + :raise: ParamValidationError + + """ + validator = ParamValidator() + report = validator.validate(params, shape) + if report.has_errors(): + raise ParamValidationError(report=report.generate_report()) + + +def type_check(valid_types): + def _create_type_check_guard(func): + def _on_passes_type_check(self, param, shape, errors, name): + if _type_check(param, errors, name): + return func(self, param, shape, errors, name) + + def _type_check(param, errors, name): + if not isinstance(param, valid_types): + valid_type_names = [str(t) for t in valid_types] + errors.report( + name, + 'invalid type', + param=param, + valid_types=valid_type_names, + ) + return False + return True + + return _on_passes_type_check + + return _create_type_check_guard + + +def range_check(name, value, shape, error_type, errors): + failed = False + min_allowed = float('-inf') + if 'min' in shape.metadata: + min_allowed = shape.metadata['min'] + if value < min_allowed: + failed = True + elif hasattr(shape, 'serialization'): + # Members that can be bound to the host have an implicit min of 1 + if shape.serialization.get('hostLabel'): + min_allowed = 1 + if value < min_allowed: + failed = True + if failed: + errors.report(name, error_type, param=value, min_allowed=min_allowed) + + +class ValidationErrors: + def __init__(self): + self._errors = [] + + def has_errors(self): + if self._errors: + return True + return False + + def generate_report(self): + error_messages = [] + for error in self._errors: + error_messages.append(self._format_error(error)) + return '\n'.join(error_messages) + + def _format_error(self, error): + error_type, name, additional = error + name = self._get_name(name) + if error_type == 'missing required field': + return ( + f"Missing required parameter in {name}: " + f"\"{additional['required_name']}\"" + ) + elif error_type == 'unknown field': + unknown_param = additional['unknown_param'] + valid_names = ', '.join(additional['valid_names']) + return ( + f'Unknown parameter in {name}: "{unknown_param}", ' + f'must be one of: {valid_names}' + ) + elif error_type == 'invalid type': + param = additional['param'] + param_type = type(param) + valid_types = ', '.join(additional['valid_types']) + return ( + f'Invalid type for parameter {name}, value: {param}, ' + f'type: {param_type}, valid types: {valid_types}' + ) + elif error_type == 'invalid range': + param = additional['param'] + min_allowed = additional['min_allowed'] + return ( + f'Invalid value for parameter {name}, value: {param}, ' + f'valid min value: {min_allowed}' + ) + elif error_type == 'invalid length': + param = additional['param'] + min_allowed = additional['min_allowed'] + return ( + f'Invalid length for parameter {name}, value: {param}, ' + f'valid min length: {min_allowed}' + ) + elif error_type == 'unable to encode to json': + return 'Invalid parameter {} must be json serializable: {}'.format( + name, + additional['type_error'], + ) + elif error_type == 'invalid type for document': + param = additional['param'] + param_type = type(param) + valid_types = ', '.join(additional['valid_types']) + return ( + f'Invalid type for document parameter {name}, value: {param}, ' + f'type: {param_type}, valid types: {valid_types}' + ) + elif error_type == 'more than one input': + members = ', '.join(additional['members']) + return ( + f'Invalid number of parameters set for tagged union structure ' + f'{name}. Can only set one of the following keys: ' + f'{members}.' + ) + elif error_type == 'empty input': + members = ', '.join(additional['members']) + return ( + f'Must set one of the following keys for tagged union' + f'structure {name}: {members}.' + ) + + def _get_name(self, name): + if not name: + return 'input' + elif name.startswith('.'): + return name[1:] + else: + return name + + def report(self, name, reason, **kwargs): + self._errors.append((reason, name, kwargs)) + + +class ParamValidator: + """Validates parameters against a shape model.""" + + def validate(self, params, shape): + """Validate parameters against a shape model. + + This method will validate the parameters against a provided shape model. + All errors will be collected before returning to the caller. This means + that this method will not stop at the first error, it will return all + possible errors. + + :param params: User provided dict of parameters + :param shape: A shape model describing the expected input. + + :return: A list of errors. + + """ + errors = ValidationErrors() + self._validate(params, shape, errors, name='') + return errors + + def _check_special_validation_cases(self, shape): + if is_json_value_header(shape): + return self._validate_jsonvalue_string + if shape.type_name == 'structure' and shape.is_document_type: + return self._validate_document + + def _validate(self, params, shape, errors, name): + special_validator = self._check_special_validation_cases(shape) + if special_validator: + special_validator(params, shape, errors, name) + else: + getattr(self, f'_validate_{shape.type_name}')( + params, shape, errors, name + ) + + def _validate_jsonvalue_string(self, params, shape, errors, name): + # Check to see if a value marked as a jsonvalue can be dumped to + # a json string. + try: + json.dumps(params) + except (ValueError, TypeError) as e: + errors.report(name, 'unable to encode to json', type_error=e) + + def _validate_document(self, params, shape, errors, name): + if params is None: + return + + if isinstance(params, dict): + for key in params: + self._validate_document(params[key], shape, errors, key) + elif isinstance(params, list): + for index, entity in enumerate(params): + self._validate_document( + entity, shape, errors, '%s[%d]' % (name, index) + ) + elif not isinstance(params, ((str,), int, bool, float)): + valid_types = (str, int, bool, float, list, dict) + valid_type_names = [str(t) for t in valid_types] + errors.report( + name, + 'invalid type for document', + param=params, + param_type=type(params), + valid_types=valid_type_names, + ) + + @type_check(valid_types=(dict,)) + def _validate_structure(self, params, shape, errors, name): + if shape.is_tagged_union: + if len(params) == 0: + errors.report(name, 'empty input', members=shape.members) + elif len(params) > 1: + errors.report( + name, 'more than one input', members=shape.members + ) + + # Validate required fields. + for required_member in shape.metadata.get('required', []): + if required_member not in params: + errors.report( + name, + 'missing required field', + required_name=required_member, + user_params=params, + ) + members = shape.members + known_params = [] + # Validate known params. + for param in params: + if param not in members: + errors.report( + name, + 'unknown field', + unknown_param=param, + valid_names=list(members), + ) + else: + known_params.append(param) + # Validate structure members. + for param in known_params: + self._validate( + params[param], + shape.members[param], + errors, + f'{name}.{param}', + ) + + @type_check(valid_types=(str,)) + def _validate_string(self, param, shape, errors, name): + # Validate range. For a string, the min/max constraints + # are of the string length. + # Looks like: + # "WorkflowId":{ + # "type":"string", + # "min":1, + # "max":256 + # } + range_check(name, len(param), shape, 'invalid length', errors) + + @type_check(valid_types=(list, tuple)) + def _validate_list(self, param, shape, errors, name): + member_shape = shape.member + range_check(name, len(param), shape, 'invalid length', errors) + for i, item in enumerate(param): + self._validate(item, member_shape, errors, f'{name}[{i}]') + + @type_check(valid_types=(dict,)) + def _validate_map(self, param, shape, errors, name): + key_shape = shape.key + value_shape = shape.value + for key, value in param.items(): + self._validate(key, key_shape, errors, f"{name} (key: {key})") + self._validate(value, value_shape, errors, f'{name}.{key}') + + @type_check(valid_types=(int,)) + def _validate_integer(self, param, shape, errors, name): + range_check(name, param, shape, 'invalid range', errors) + + def _validate_blob(self, param, shape, errors, name): + if isinstance(param, (bytes, bytearray, str)): + return + elif hasattr(param, 'read'): + # File like objects are also allowed for blob types. + return + else: + errors.report( + name, + 'invalid type', + param=param, + valid_types=[str(bytes), str(bytearray), 'file-like object'], + ) + + @type_check(valid_types=(bool,)) + def _validate_boolean(self, param, shape, errors, name): + pass + + @type_check(valid_types=(float, decimal.Decimal) + (int,)) + def _validate_double(self, param, shape, errors, name): + range_check(name, param, shape, 'invalid range', errors) + + _validate_float = _validate_double + + @type_check(valid_types=(int,)) + def _validate_long(self, param, shape, errors, name): + range_check(name, param, shape, 'invalid range', errors) + + def _validate_timestamp(self, param, shape, errors, name): + # We don't use @type_check because datetimes are a bit + # more flexible. You can either provide a datetime + # object, or a string that parses to a datetime. + is_valid_type = self._type_check_datetime(param) + if not is_valid_type: + valid_type_names = [str(datetime), 'timestamp-string'] + errors.report( + name, 'invalid type', param=param, valid_types=valid_type_names + ) + + def _type_check_datetime(self, value): + try: + parse_to_aware_datetime(value) + return True + except (TypeError, ValueError, AttributeError): + # Yes, dateutil can sometimes raise an AttributeError + # when parsing timestamps. + return False + + +class ParamValidationDecorator: + def __init__(self, param_validator, serializer): + self._param_validator = param_validator + self._serializer = serializer + + def serialize_to_request(self, parameters, operation_model): + input_shape = operation_model.input_shape + if input_shape is not None: + report = self._param_validator.validate( + parameters, operation_model.input_shape + ) + if report.has_errors(): + raise ParamValidationError(report=report.generate_report()) + return self._serializer.serialize_to_request( + parameters, operation_model + ) diff --git a/venv/lib/python3.10/site-packages/botocore/waiter.py b/venv/lib/python3.10/site-packages/botocore/waiter.py new file mode 100644 index 0000000000000000000000000000000000000000..ebac2c9f82e71e18bfc4ec2793ef3bd5b0ebf647 --- /dev/null +++ b/venv/lib/python3.10/site-packages/botocore/waiter.py @@ -0,0 +1,392 @@ +# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. +import logging +import time + +import jmespath + +from botocore.docs.docstring import WaiterDocstring +from botocore.utils import get_service_module_name + +from . import xform_name +from .exceptions import ClientError, WaiterConfigError, WaiterError + +logger = logging.getLogger(__name__) + + +def create_waiter_with_client(waiter_name, waiter_model, client): + """ + + :type waiter_name: str + :param waiter_name: The name of the waiter. The name should match + the name (including the casing) of the key name in the waiter + model file (typically this is CamelCasing). + + :type waiter_model: botocore.waiter.WaiterModel + :param waiter_model: The model for the waiter configuration. + + :type client: botocore.client.BaseClient + :param client: The botocore client associated with the service. + + :rtype: botocore.waiter.Waiter + :return: The waiter object. + + """ + single_waiter_config = waiter_model.get_waiter(waiter_name) + operation_name = xform_name(single_waiter_config.operation) + operation_method = NormalizedOperationMethod( + getattr(client, operation_name) + ) + + # Create a new wait method that will serve as a proxy to the underlying + # Waiter.wait method. This is needed to attach a docstring to the + # method. + def wait(self, **kwargs): + Waiter.wait(self, **kwargs) + + wait.__doc__ = WaiterDocstring( + waiter_name=waiter_name, + event_emitter=client.meta.events, + service_model=client.meta.service_model, + service_waiter_model=waiter_model, + include_signature=False, + ) + + # Rename the waiter class based on the type of waiter. + waiter_class_name = str( + f'{get_service_module_name(client.meta.service_model)}.Waiter.{waiter_name}' + ) + + # Create the new waiter class + documented_waiter_cls = type(waiter_class_name, (Waiter,), {'wait': wait}) + + # Return an instance of the new waiter class. + return documented_waiter_cls( + waiter_name, single_waiter_config, operation_method + ) + + +def is_valid_waiter_error(response): + error = response.get('Error') + if isinstance(error, dict) and 'Code' in error: + return True + return False + + +class NormalizedOperationMethod: + def __init__(self, client_method): + self._client_method = client_method + + def __call__(self, **kwargs): + try: + return self._client_method(**kwargs) + except ClientError as e: + return e.response + + +class WaiterModel: + SUPPORTED_VERSION = 2 + + def __init__(self, waiter_config): + """ + + Note that the WaiterModel takes ownership of the waiter_config. + It may or may not mutate the waiter_config. If this is a concern, + it is best to make a copy of the waiter config before passing it to + the WaiterModel. + + :type waiter_config: dict + :param waiter_config: The loaded waiter config + from the *.waiters.json file. This can be + obtained from a botocore Loader object as well. + + """ + self._waiter_config = waiter_config['waiters'] + + # These are part of the public API. Changing these + # will result in having to update the consuming code, + # so don't change unless you really need to. + version = waiter_config.get('version', 'unknown') + self._verify_supported_version(version) + self.version = version + self.waiter_names = list(sorted(waiter_config['waiters'].keys())) + + def _verify_supported_version(self, version): + if version != self.SUPPORTED_VERSION: + raise WaiterConfigError( + error_msg=( + "Unsupported waiter version, supported version " + f"must be: {self.SUPPORTED_VERSION}, but version " + f"of waiter config is: {version}" + ) + ) + + def get_waiter(self, waiter_name): + try: + single_waiter_config = self._waiter_config[waiter_name] + except KeyError: + raise ValueError(f"Waiter does not exist: {waiter_name}") + return SingleWaiterConfig(single_waiter_config) + + +class SingleWaiterConfig: + """Represents the waiter configuration for a single waiter. + + A single waiter is considered the configuration for a single + value associated with a named waiter (i.e TableExists). + + """ + + def __init__(self, single_waiter_config): + self._config = single_waiter_config + + # These attributes are part of the public API. + self.description = single_waiter_config.get('description', '') + # Per the spec, these three fields are required. + self.operation = single_waiter_config['operation'] + self.delay = single_waiter_config['delay'] + self.max_attempts = single_waiter_config['maxAttempts'] + + @property + def acceptors(self): + acceptors = [] + for acceptor_config in self._config['acceptors']: + acceptor = AcceptorConfig(acceptor_config) + acceptors.append(acceptor) + return acceptors + + +class AcceptorConfig: + def __init__(self, config): + self.state = config['state'] + self.matcher = config['matcher'] + self.expected = config['expected'] + self.argument = config.get('argument') + self.matcher_func = self._create_matcher_func() + + @property + def explanation(self): + if self.matcher == 'path': + return f'For expression "{self.argument}" we matched expected path: "{self.expected}"' + elif self.matcher == 'pathAll': + return ( + f'For expression "{self.argument}" all members matched ' + f'expected path: "{self.expected}"' + ) + elif self.matcher == 'pathAny': + return ( + f'For expression "{self.argument}" we matched expected ' + f'path: "{self.expected}" at least once' + ) + elif self.matcher == 'status': + return f'Matched expected HTTP status code: {self.expected}' + elif self.matcher == 'error': + return f'Matched expected service error code: {self.expected}' + else: + return f'No explanation for unknown waiter type: "{self.matcher}"' + + def _create_matcher_func(self): + # An acceptor function is a callable that takes a single value. The + # parsed AWS response. Note that the parsed error response is also + # provided in the case of errors, so it's entirely possible to + # handle all the available matcher capabilities in the future. + # There's only three supported matchers, so for now, this is all + # contained to a single method. If this grows, we can expand this + # out to separate methods or even objects. + + if self.matcher == 'path': + return self._create_path_matcher() + elif self.matcher == 'pathAll': + return self._create_path_all_matcher() + elif self.matcher == 'pathAny': + return self._create_path_any_matcher() + elif self.matcher == 'status': + return self._create_status_matcher() + elif self.matcher == 'error': + return self._create_error_matcher() + else: + raise WaiterConfigError( + error_msg=f"Unknown acceptor: {self.matcher}" + ) + + def _create_path_matcher(self): + expression = jmespath.compile(self.argument) + expected = self.expected + + def acceptor_matches(response): + if is_valid_waiter_error(response): + return + return expression.search(response) == expected + + return acceptor_matches + + def _create_path_all_matcher(self): + expression = jmespath.compile(self.argument) + expected = self.expected + + def acceptor_matches(response): + if is_valid_waiter_error(response): + return + result = expression.search(response) + if not isinstance(result, list) or not result: + # pathAll matcher must result in a list. + # Also we require at least one element in the list, + # that is, an empty list should not result in this + # acceptor match. + return False + for element in result: + if element != expected: + return False + return True + + return acceptor_matches + + def _create_path_any_matcher(self): + expression = jmespath.compile(self.argument) + expected = self.expected + + def acceptor_matches(response): + if is_valid_waiter_error(response): + return + result = expression.search(response) + if not isinstance(result, list) or not result: + # pathAny matcher must result in a list. + # Also we require at least one element in the list, + # that is, an empty list should not result in this + # acceptor match. + return False + for element in result: + if element == expected: + return True + return False + + return acceptor_matches + + def _create_status_matcher(self): + expected = self.expected + + def acceptor_matches(response): + # We don't have any requirements on the expected incoming data + # other than it is a dict, so we don't assume there's + # a ResponseMetadata.HTTPStatusCode. + status_code = response.get('ResponseMetadata', {}).get( + 'HTTPStatusCode' + ) + return status_code == expected + + return acceptor_matches + + def _create_error_matcher(self): + expected = self.expected + + def acceptor_matches(response): + # When the client encounters an error, it will normally raise + # an exception. However, the waiter implementation will catch + # this exception, and instead send us the parsed error + # response. So response is still a dictionary, and in the case + # of an error response will contain the "Error" and + # "ResponseMetadata" key. + # When expected is True, accept any error code. + # When expected is False, check if any errors were encountered. + # Otherwise, check for a specific AWS error code. + if expected is True: + return "Error" in response and "Code" in response["Error"] + elif expected is False: + return "Error" not in response + else: + return response.get("Error", {}).get("Code", "") == expected + + return acceptor_matches + + +class Waiter: + def __init__(self, name, config, operation_method): + """ + + :type name: string + :param name: The name of the waiter + + :type config: botocore.waiter.SingleWaiterConfig + :param config: The configuration for the waiter. + + :type operation_method: callable + :param operation_method: A callable that accepts **kwargs + and returns a response. For example, this can be + a method from a botocore client. + + """ + self._operation_method = operation_method + # The two attributes are exposed to allow for introspection + # and documentation. + self.name = name + self.config = config + + def wait(self, **kwargs): + acceptors = list(self.config.acceptors) + current_state = 'waiting' + # pop the invocation specific config + config = kwargs.pop('WaiterConfig', {}) + sleep_amount = config.get('Delay', self.config.delay) + max_attempts = config.get('MaxAttempts', self.config.max_attempts) + last_matched_acceptor = None + num_attempts = 0 + + while True: + response = self._operation_method(**kwargs) + num_attempts += 1 + for acceptor in acceptors: + if acceptor.matcher_func(response): + last_matched_acceptor = acceptor + current_state = acceptor.state + break + else: + # If none of the acceptors matched, we should + # transition to the failure state if an error + # response was received. + if is_valid_waiter_error(response): + # Transition to a failure state, which we + # can just handle here by raising an exception. + raise WaiterError( + name=self.name, + reason='An error occurred ({}): {}'.format( + response['Error'].get('Code', 'Unknown'), + response['Error'].get('Message', 'Unknown'), + ), + last_response=response, + ) + if current_state == 'success': + logger.debug( + "Waiting complete, waiter matched the " "success state." + ) + return + if current_state == 'failure': + reason = f'Waiter encountered a terminal failure state: {acceptor.explanation}' + raise WaiterError( + name=self.name, + reason=reason, + last_response=response, + ) + if num_attempts >= max_attempts: + if last_matched_acceptor is None: + reason = 'Max attempts exceeded' + else: + reason = ( + f'Max attempts exceeded. Previously accepted state: ' + f'{acceptor.explanation}' + ) + raise WaiterError( + name=self.name, + reason=reason, + last_response=response, + ) + time.sleep(sleep_amount) diff --git a/venv/lib/python3.10/site-packages/cachetools/__init__.py b/venv/lib/python3.10/site-packages/cachetools/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d3051c1049f6a82a4a8b01a8b69941e26550f2b9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/cachetools/__init__.py @@ -0,0 +1,738 @@ +"""Extensible memoizing collections and decorators.""" + +__all__ = ( + "Cache", + "FIFOCache", + "LFUCache", + "LRUCache", + "MRUCache", + "RRCache", + "TLRUCache", + "TTLCache", + "cached", + "cachedmethod", +) + +__version__ = "5.5.2" + +import collections +import collections.abc +import functools +import heapq +import random +import time + +from . import keys +from ._decorators import _cached_wrapper + + +class _DefaultSize: + __slots__ = () + + def __getitem__(self, _): + return 1 + + def __setitem__(self, _, value): + assert value == 1 + + def pop(self, _): + return 1 + + +class Cache(collections.abc.MutableMapping): + """Mutable mapping to serve as a simple cache or cache base class.""" + + __marker = object() + + __size = _DefaultSize() + + def __init__(self, maxsize, getsizeof=None): + if getsizeof: + self.getsizeof = getsizeof + if self.getsizeof is not Cache.getsizeof: + self.__size = dict() + self.__data = dict() + self.__currsize = 0 + self.__maxsize = maxsize + + def __repr__(self): + return "%s(%s, maxsize=%r, currsize=%r)" % ( + self.__class__.__name__, + repr(self.__data), + self.__maxsize, + self.__currsize, + ) + + def __getitem__(self, key): + try: + return self.__data[key] + except KeyError: + return self.__missing__(key) + + def __setitem__(self, key, value): + maxsize = self.__maxsize + size = self.getsizeof(value) + if size > maxsize: + raise ValueError("value too large") + if key not in self.__data or self.__size[key] < size: + while self.__currsize + size > maxsize: + self.popitem() + if key in self.__data: + diffsize = size - self.__size[key] + else: + diffsize = size + self.__data[key] = value + self.__size[key] = size + self.__currsize += diffsize + + def __delitem__(self, key): + size = self.__size.pop(key) + del self.__data[key] + self.__currsize -= size + + def __contains__(self, key): + return key in self.__data + + def __missing__(self, key): + raise KeyError(key) + + def __iter__(self): + return iter(self.__data) + + def __len__(self): + return len(self.__data) + + def get(self, key, default=None): + if key in self: + return self[key] + else: + return default + + def pop(self, key, default=__marker): + if key in self: + value = self[key] + del self[key] + elif default is self.__marker: + raise KeyError(key) + else: + value = default + return value + + def setdefault(self, key, default=None): + if key in self: + value = self[key] + else: + self[key] = value = default + return value + + @property + def maxsize(self): + """The maximum size of the cache.""" + return self.__maxsize + + @property + def currsize(self): + """The current size of the cache.""" + return self.__currsize + + @staticmethod + def getsizeof(value): + """Return the size of a cache element's value.""" + return 1 + + +class FIFOCache(Cache): + """First In First Out (FIFO) cache implementation.""" + + def __init__(self, maxsize, getsizeof=None): + Cache.__init__(self, maxsize, getsizeof) + self.__order = collections.OrderedDict() + + def __setitem__(self, key, value, cache_setitem=Cache.__setitem__): + cache_setitem(self, key, value) + try: + self.__order.move_to_end(key) + except KeyError: + self.__order[key] = None + + def __delitem__(self, key, cache_delitem=Cache.__delitem__): + cache_delitem(self, key) + del self.__order[key] + + def popitem(self): + """Remove and return the `(key, value)` pair first inserted.""" + try: + key = next(iter(self.__order)) + except StopIteration: + raise KeyError("%s is empty" % type(self).__name__) from None + else: + return (key, self.pop(key)) + + +class LFUCache(Cache): + """Least Frequently Used (LFU) cache implementation.""" + + def __init__(self, maxsize, getsizeof=None): + Cache.__init__(self, maxsize, getsizeof) + self.__counter = collections.Counter() + + def __getitem__(self, key, cache_getitem=Cache.__getitem__): + value = cache_getitem(self, key) + if key in self: # __missing__ may not store item + self.__counter[key] -= 1 + return value + + def __setitem__(self, key, value, cache_setitem=Cache.__setitem__): + cache_setitem(self, key, value) + self.__counter[key] -= 1 + + def __delitem__(self, key, cache_delitem=Cache.__delitem__): + cache_delitem(self, key) + del self.__counter[key] + + def popitem(self): + """Remove and return the `(key, value)` pair least frequently used.""" + try: + ((key, _),) = self.__counter.most_common(1) + except ValueError: + raise KeyError("%s is empty" % type(self).__name__) from None + else: + return (key, self.pop(key)) + + +class LRUCache(Cache): + """Least Recently Used (LRU) cache implementation.""" + + def __init__(self, maxsize, getsizeof=None): + Cache.__init__(self, maxsize, getsizeof) + self.__order = collections.OrderedDict() + + def __getitem__(self, key, cache_getitem=Cache.__getitem__): + value = cache_getitem(self, key) + if key in self: # __missing__ may not store item + self.__update(key) + return value + + def __setitem__(self, key, value, cache_setitem=Cache.__setitem__): + cache_setitem(self, key, value) + self.__update(key) + + def __delitem__(self, key, cache_delitem=Cache.__delitem__): + cache_delitem(self, key) + del self.__order[key] + + def popitem(self): + """Remove and return the `(key, value)` pair least recently used.""" + try: + key = next(iter(self.__order)) + except StopIteration: + raise KeyError("%s is empty" % type(self).__name__) from None + else: + return (key, self.pop(key)) + + def __update(self, key): + try: + self.__order.move_to_end(key) + except KeyError: + self.__order[key] = None + + +class MRUCache(Cache): + """Most Recently Used (MRU) cache implementation.""" + + def __init__(self, maxsize, getsizeof=None): + from warnings import warn + + warn("MRUCache is deprecated", DeprecationWarning, stacklevel=2) + + Cache.__init__(self, maxsize, getsizeof) + self.__order = collections.OrderedDict() + + def __getitem__(self, key, cache_getitem=Cache.__getitem__): + value = cache_getitem(self, key) + if key in self: # __missing__ may not store item + self.__update(key) + return value + + def __setitem__(self, key, value, cache_setitem=Cache.__setitem__): + cache_setitem(self, key, value) + self.__update(key) + + def __delitem__(self, key, cache_delitem=Cache.__delitem__): + cache_delitem(self, key) + del self.__order[key] + + def popitem(self): + """Remove and return the `(key, value)` pair most recently used.""" + try: + key = next(iter(self.__order)) + except StopIteration: + raise KeyError("%s is empty" % type(self).__name__) from None + else: + return (key, self.pop(key)) + + def __update(self, key): + try: + self.__order.move_to_end(key, last=False) + except KeyError: + self.__order[key] = None + + +class RRCache(Cache): + """Random Replacement (RR) cache implementation.""" + + def __init__(self, maxsize, choice=random.choice, getsizeof=None): + Cache.__init__(self, maxsize, getsizeof) + self.__choice = choice + + @property + def choice(self): + """The `choice` function used by the cache.""" + return self.__choice + + def popitem(self): + """Remove and return a random `(key, value)` pair.""" + try: + key = self.__choice(list(self)) + except IndexError: + raise KeyError("%s is empty" % type(self).__name__) from None + else: + return (key, self.pop(key)) + + +class _TimedCache(Cache): + """Base class for time aware cache implementations.""" + + class _Timer: + def __init__(self, timer): + self.__timer = timer + self.__nesting = 0 + + def __call__(self): + if self.__nesting == 0: + return self.__timer() + else: + return self.__time + + def __enter__(self): + if self.__nesting == 0: + self.__time = time = self.__timer() + else: + time = self.__time + self.__nesting += 1 + return time + + def __exit__(self, *exc): + self.__nesting -= 1 + + def __reduce__(self): + return _TimedCache._Timer, (self.__timer,) + + def __getattr__(self, name): + return getattr(self.__timer, name) + + def __init__(self, maxsize, timer=time.monotonic, getsizeof=None): + Cache.__init__(self, maxsize, getsizeof) + self.__timer = _TimedCache._Timer(timer) + + def __repr__(self, cache_repr=Cache.__repr__): + with self.__timer as time: + self.expire(time) + return cache_repr(self) + + def __len__(self, cache_len=Cache.__len__): + with self.__timer as time: + self.expire(time) + return cache_len(self) + + @property + def currsize(self): + with self.__timer as time: + self.expire(time) + return super().currsize + + @property + def timer(self): + """The timer function used by the cache.""" + return self.__timer + + def clear(self): + with self.__timer as time: + self.expire(time) + Cache.clear(self) + + def get(self, *args, **kwargs): + with self.__timer: + return Cache.get(self, *args, **kwargs) + + def pop(self, *args, **kwargs): + with self.__timer: + return Cache.pop(self, *args, **kwargs) + + def setdefault(self, *args, **kwargs): + with self.__timer: + return Cache.setdefault(self, *args, **kwargs) + + +class TTLCache(_TimedCache): + """LRU Cache implementation with per-item time-to-live (TTL) value.""" + + class _Link: + __slots__ = ("key", "expires", "next", "prev") + + def __init__(self, key=None, expires=None): + self.key = key + self.expires = expires + + def __reduce__(self): + return TTLCache._Link, (self.key, self.expires) + + def unlink(self): + next = self.next + prev = self.prev + prev.next = next + next.prev = prev + + def __init__(self, maxsize, ttl, timer=time.monotonic, getsizeof=None): + _TimedCache.__init__(self, maxsize, timer, getsizeof) + self.__root = root = TTLCache._Link() + root.prev = root.next = root + self.__links = collections.OrderedDict() + self.__ttl = ttl + + def __contains__(self, key): + try: + link = self.__links[key] # no reordering + except KeyError: + return False + else: + return self.timer() < link.expires + + def __getitem__(self, key, cache_getitem=Cache.__getitem__): + try: + link = self.__getlink(key) + except KeyError: + expired = False + else: + expired = not (self.timer() < link.expires) + if expired: + return self.__missing__(key) + else: + return cache_getitem(self, key) + + def __setitem__(self, key, value, cache_setitem=Cache.__setitem__): + with self.timer as time: + self.expire(time) + cache_setitem(self, key, value) + try: + link = self.__getlink(key) + except KeyError: + self.__links[key] = link = TTLCache._Link(key) + else: + link.unlink() + link.expires = time + self.__ttl + link.next = root = self.__root + link.prev = prev = root.prev + prev.next = root.prev = link + + def __delitem__(self, key, cache_delitem=Cache.__delitem__): + cache_delitem(self, key) + link = self.__links.pop(key) + link.unlink() + if not (self.timer() < link.expires): + raise KeyError(key) + + def __iter__(self): + root = self.__root + curr = root.next + while curr is not root: + # "freeze" time for iterator access + with self.timer as time: + if time < curr.expires: + yield curr.key + curr = curr.next + + def __setstate__(self, state): + self.__dict__.update(state) + root = self.__root + root.prev = root.next = root + for link in sorted(self.__links.values(), key=lambda obj: obj.expires): + link.next = root + link.prev = prev = root.prev + prev.next = root.prev = link + self.expire(self.timer()) + + @property + def ttl(self): + """The time-to-live value of the cache's items.""" + return self.__ttl + + def expire(self, time=None): + """Remove expired items from the cache and return an iterable of the + expired `(key, value)` pairs. + + """ + if time is None: + time = self.timer() + root = self.__root + curr = root.next + links = self.__links + expired = [] + cache_delitem = Cache.__delitem__ + cache_getitem = Cache.__getitem__ + while curr is not root and not (time < curr.expires): + expired.append((curr.key, cache_getitem(self, curr.key))) + cache_delitem(self, curr.key) + del links[curr.key] + next = curr.next + curr.unlink() + curr = next + return expired + + def popitem(self): + """Remove and return the `(key, value)` pair least recently used that + has not already expired. + + """ + with self.timer as time: + self.expire(time) + try: + key = next(iter(self.__links)) + except StopIteration: + raise KeyError("%s is empty" % type(self).__name__) from None + else: + return (key, self.pop(key)) + + def __getlink(self, key): + value = self.__links[key] + self.__links.move_to_end(key) + return value + + +class TLRUCache(_TimedCache): + """Time aware Least Recently Used (TLRU) cache implementation.""" + + @functools.total_ordering + class _Item: + __slots__ = ("key", "expires", "removed") + + def __init__(self, key=None, expires=None): + self.key = key + self.expires = expires + self.removed = False + + def __lt__(self, other): + return self.expires < other.expires + + def __init__(self, maxsize, ttu, timer=time.monotonic, getsizeof=None): + _TimedCache.__init__(self, maxsize, timer, getsizeof) + self.__items = collections.OrderedDict() + self.__order = [] + self.__ttu = ttu + + def __contains__(self, key): + try: + item = self.__items[key] # no reordering + except KeyError: + return False + else: + return self.timer() < item.expires + + def __getitem__(self, key, cache_getitem=Cache.__getitem__): + try: + item = self.__getitem(key) + except KeyError: + expired = False + else: + expired = not (self.timer() < item.expires) + if expired: + return self.__missing__(key) + else: + return cache_getitem(self, key) + + def __setitem__(self, key, value, cache_setitem=Cache.__setitem__): + with self.timer as time: + expires = self.__ttu(key, value, time) + if not (time < expires): + return # skip expired items + self.expire(time) + cache_setitem(self, key, value) + # removing an existing item would break the heap structure, so + # only mark it as removed for now + try: + self.__getitem(key).removed = True + except KeyError: + pass + self.__items[key] = item = TLRUCache._Item(key, expires) + heapq.heappush(self.__order, item) + + def __delitem__(self, key, cache_delitem=Cache.__delitem__): + with self.timer as time: + # no self.expire() for performance reasons, e.g. self.clear() [#67] + cache_delitem(self, key) + item = self.__items.pop(key) + item.removed = True + if not (time < item.expires): + raise KeyError(key) + + def __iter__(self): + for curr in self.__order: + # "freeze" time for iterator access + with self.timer as time: + if time < curr.expires and not curr.removed: + yield curr.key + + @property + def ttu(self): + """The local time-to-use function used by the cache.""" + return self.__ttu + + def expire(self, time=None): + """Remove expired items from the cache and return an iterable of the + expired `(key, value)` pairs. + + """ + if time is None: + time = self.timer() + items = self.__items + order = self.__order + # clean up the heap if too many items are marked as removed + if len(order) > len(items) * 2: + self.__order = order = [item for item in order if not item.removed] + heapq.heapify(order) + expired = [] + cache_delitem = Cache.__delitem__ + cache_getitem = Cache.__getitem__ + while order and (order[0].removed or not (time < order[0].expires)): + item = heapq.heappop(order) + if not item.removed: + expired.append((item.key, cache_getitem(self, item.key))) + cache_delitem(self, item.key) + del items[item.key] + return expired + + def popitem(self): + """Remove and return the `(key, value)` pair least recently used that + has not already expired. + + """ + with self.timer as time: + self.expire(time) + try: + key = next(iter(self.__items)) + except StopIteration: + raise KeyError("%s is empty" % self.__class__.__name__) from None + else: + return (key, self.pop(key)) + + def __getitem(self, key): + value = self.__items[key] + self.__items.move_to_end(key) + return value + + +_CacheInfo = collections.namedtuple( + "CacheInfo", ["hits", "misses", "maxsize", "currsize"] +) + + +def cached(cache, key=keys.hashkey, lock=None, info=False): + """Decorator to wrap a function with a memoizing callable that saves + results in a cache. + + """ + + def decorator(func): + if info: + if isinstance(cache, Cache): + + def make_info(hits, misses): + return _CacheInfo(hits, misses, cache.maxsize, cache.currsize) + + elif isinstance(cache, collections.abc.Mapping): + + def make_info(hits, misses): + return _CacheInfo(hits, misses, None, len(cache)) + + else: + + def make_info(hits, misses): + return _CacheInfo(hits, misses, 0, 0) + + wrapper = _cached_wrapper(func, cache, key, lock, make_info) + else: + wrapper = _cached_wrapper(func, cache, key, lock, None) + + wrapper.cache = cache + wrapper.cache_key = key + wrapper.cache_lock = lock + + return functools.update_wrapper(wrapper, func) + + return decorator + + +def cachedmethod(cache, key=keys.methodkey, lock=None): + """Decorator to wrap a class or instance method with a memoizing + callable that saves results in a cache. + + """ + + def decorator(method): + if lock is None: + + def wrapper(self, *args, **kwargs): + c = cache(self) + if c is None: + return method(self, *args, **kwargs) + k = key(self, *args, **kwargs) + try: + return c[k] + except KeyError: + pass # key not found + v = method(self, *args, **kwargs) + try: + c[k] = v + except ValueError: + pass # value too large + return v + + def clear(self): + c = cache(self) + if c is not None: + c.clear() + + else: + + def wrapper(self, *args, **kwargs): + c = cache(self) + if c is None: + return method(self, *args, **kwargs) + k = key(self, *args, **kwargs) + try: + with lock(self): + return c[k] + except KeyError: + pass # key not found + v = method(self, *args, **kwargs) + # in case of a race, prefer the item already in the cache + try: + with lock(self): + return c.setdefault(k, v) + except ValueError: + return v # value too large + + def clear(self): + c = cache(self) + if c is not None: + with lock(self): + c.clear() + + wrapper.cache = cache + wrapper.cache_key = key + wrapper.cache_lock = lock + wrapper.cache_clear = clear + + return functools.update_wrapper(wrapper, method) + + return decorator diff --git a/venv/lib/python3.10/site-packages/cachetools/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/cachetools/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b9cf573b333255114c335ce739244c4ca9bf7b96 Binary files /dev/null and b/venv/lib/python3.10/site-packages/cachetools/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/cachetools/__pycache__/_decorators.cpython-310.pyc b/venv/lib/python3.10/site-packages/cachetools/__pycache__/_decorators.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e2d3c33ff2e691e6983011e652abbf0b87b78290 Binary files /dev/null and b/venv/lib/python3.10/site-packages/cachetools/__pycache__/_decorators.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/cachetools/__pycache__/func.cpython-310.pyc b/venv/lib/python3.10/site-packages/cachetools/__pycache__/func.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0f8123236ef8e0077ff0f8c8ea6d4d82b4eaecfc Binary files /dev/null and b/venv/lib/python3.10/site-packages/cachetools/__pycache__/func.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/cachetools/__pycache__/keys.cpython-310.pyc b/venv/lib/python3.10/site-packages/cachetools/__pycache__/keys.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7f43eb186510279cab3f45e10bedcb4a5d2aa5d7 Binary files /dev/null and b/venv/lib/python3.10/site-packages/cachetools/__pycache__/keys.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/cachetools/_decorators.py b/venv/lib/python3.10/site-packages/cachetools/_decorators.py new file mode 100644 index 0000000000000000000000000000000000000000..fcac1e01b9abfee703ca4414e429c7ea033e6cd1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/cachetools/_decorators.py @@ -0,0 +1,152 @@ +"""Extensible memoizing decorator helpers.""" + + +def _cached_locked_info(func, cache, key, lock, info): + hits = misses = 0 + + def wrapper(*args, **kwargs): + nonlocal hits, misses + k = key(*args, **kwargs) + with lock: + try: + result = cache[k] + hits += 1 + return result + except KeyError: + misses += 1 + v = func(*args, **kwargs) + with lock: + try: + # in case of a race, prefer the item already in the cache + return cache.setdefault(k, v) + except ValueError: + return v # value too large + + def cache_clear(): + nonlocal hits, misses + with lock: + cache.clear() + hits = misses = 0 + + def cache_info(): + with lock: + return info(hits, misses) + + wrapper.cache_clear = cache_clear + wrapper.cache_info = cache_info + return wrapper + + +def _cached_unlocked_info(func, cache, key, info): + hits = misses = 0 + + def wrapper(*args, **kwargs): + nonlocal hits, misses + k = key(*args, **kwargs) + try: + result = cache[k] + hits += 1 + return result + except KeyError: + misses += 1 + v = func(*args, **kwargs) + try: + cache[k] = v + except ValueError: + pass # value too large + return v + + def cache_clear(): + nonlocal hits, misses + cache.clear() + hits = misses = 0 + + wrapper.cache_clear = cache_clear + wrapper.cache_info = lambda: info(hits, misses) + return wrapper + + +def _uncached_info(func, info): + misses = 0 + + def wrapper(*args, **kwargs): + nonlocal misses + misses += 1 + return func(*args, **kwargs) + + def cache_clear(): + nonlocal misses + misses = 0 + + wrapper.cache_clear = cache_clear + wrapper.cache_info = lambda: info(0, misses) + return wrapper + + +def _cached_locked(func, cache, key, lock): + def wrapper(*args, **kwargs): + k = key(*args, **kwargs) + with lock: + try: + return cache[k] + except KeyError: + pass # key not found + v = func(*args, **kwargs) + with lock: + try: + # in case of a race, prefer the item already in the cache + return cache.setdefault(k, v) + except ValueError: + return v # value too large + + def cache_clear(): + with lock: + cache.clear() + + wrapper.cache_clear = cache_clear + return wrapper + + +def _cached_unlocked(func, cache, key): + def wrapper(*args, **kwargs): + k = key(*args, **kwargs) + try: + return cache[k] + except KeyError: + pass # key not found + v = func(*args, **kwargs) + try: + cache[k] = v + except ValueError: + pass # value too large + return v + + wrapper.cache_clear = lambda: cache.clear() + return wrapper + + +def _uncached(func): + def wrapper(*args, **kwargs): + return func(*args, **kwargs) + + wrapper.cache_clear = lambda: None + return wrapper + + +def _cached_wrapper(func, cache, key, lock, info): + if info is not None: + if cache is None: + wrapper = _uncached_info(func, info) + elif lock is None: + wrapper = _cached_unlocked_info(func, cache, key, info) + else: + wrapper = _cached_locked_info(func, cache, key, lock, info) + else: + if cache is None: + wrapper = _uncached(func) + elif lock is None: + wrapper = _cached_unlocked(func, cache, key) + else: + wrapper = _cached_locked(func, cache, key, lock) + wrapper.cache_info = None + return wrapper diff --git a/venv/lib/python3.10/site-packages/cachetools/func.py b/venv/lib/python3.10/site-packages/cachetools/func.py new file mode 100644 index 0000000000000000000000000000000000000000..3eafddf1135444be1dcce7ad2e546e174a6c97e4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/cachetools/func.py @@ -0,0 +1,121 @@ +"""`functools.lru_cache` compatible memoizing function decorators.""" + +__all__ = ("fifo_cache", "lfu_cache", "lru_cache", "mru_cache", "rr_cache", "ttl_cache") + +import math +import random +import time + +try: + from threading import RLock +except ImportError: # pragma: no cover + from dummy_threading import RLock + +from . import FIFOCache, LFUCache, LRUCache, MRUCache, RRCache, TTLCache +from . import cached +from . import keys + + +class _UnboundTTLCache(TTLCache): + def __init__(self, ttl, timer): + TTLCache.__init__(self, math.inf, ttl, timer) + + @property + def maxsize(self): + return None + + +def _cache(cache, maxsize, typed): + def decorator(func): + key = keys.typedkey if typed else keys.hashkey + wrapper = cached(cache=cache, key=key, lock=RLock(), info=True)(func) + wrapper.cache_parameters = lambda: {"maxsize": maxsize, "typed": typed} + return wrapper + + return decorator + + +def fifo_cache(maxsize=128, typed=False): + """Decorator to wrap a function with a memoizing callable that saves + up to `maxsize` results based on a First In First Out (FIFO) + algorithm. + + """ + if maxsize is None: + return _cache({}, None, typed) + elif callable(maxsize): + return _cache(FIFOCache(128), 128, typed)(maxsize) + else: + return _cache(FIFOCache(maxsize), maxsize, typed) + + +def lfu_cache(maxsize=128, typed=False): + """Decorator to wrap a function with a memoizing callable that saves + up to `maxsize` results based on a Least Frequently Used (LFU) + algorithm. + + """ + if maxsize is None: + return _cache({}, None, typed) + elif callable(maxsize): + return _cache(LFUCache(128), 128, typed)(maxsize) + else: + return _cache(LFUCache(maxsize), maxsize, typed) + + +def lru_cache(maxsize=128, typed=False): + """Decorator to wrap a function with a memoizing callable that saves + up to `maxsize` results based on a Least Recently Used (LRU) + algorithm. + + """ + if maxsize is None: + return _cache({}, None, typed) + elif callable(maxsize): + return _cache(LRUCache(128), 128, typed)(maxsize) + else: + return _cache(LRUCache(maxsize), maxsize, typed) + + +def mru_cache(maxsize=128, typed=False): + """Decorator to wrap a function with a memoizing callable that saves + up to `maxsize` results based on a Most Recently Used (MRU) + algorithm. + """ + from warnings import warn + + warn("@mru_cache is deprecated", DeprecationWarning, stacklevel=2) + + if maxsize is None: + return _cache({}, None, typed) + elif callable(maxsize): + return _cache(MRUCache(128), 128, typed)(maxsize) + else: + return _cache(MRUCache(maxsize), maxsize, typed) + + +def rr_cache(maxsize=128, choice=random.choice, typed=False): + """Decorator to wrap a function with a memoizing callable that saves + up to `maxsize` results based on a Random Replacement (RR) + algorithm. + + """ + if maxsize is None: + return _cache({}, None, typed) + elif callable(maxsize): + return _cache(RRCache(128, choice), 128, typed)(maxsize) + else: + return _cache(RRCache(maxsize, choice), maxsize, typed) + + +def ttl_cache(maxsize=128, ttl=600, timer=time.monotonic, typed=False): + """Decorator to wrap a function with a memoizing callable that saves + up to `maxsize` results based on a Least Recently Used (LRU) + algorithm with a per-item time-to-live (TTL) value. + """ + if maxsize is None: + return _cache(_UnboundTTLCache(ttl, timer), None, typed) + elif callable(maxsize): + return _cache(TTLCache(128, ttl, timer), 128, typed)(maxsize) + else: + return _cache(TTLCache(maxsize, ttl, timer), maxsize, typed) diff --git a/venv/lib/python3.10/site-packages/cachetools/keys.py b/venv/lib/python3.10/site-packages/cachetools/keys.py new file mode 100644 index 0000000000000000000000000000000000000000..8689b17b9da1040bcc3f4eda30c090e10bae4bc0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/cachetools/keys.py @@ -0,0 +1,62 @@ +"""Key functions for memoizing decorators.""" + +__all__ = ("hashkey", "methodkey", "typedkey", "typedmethodkey") + + +class _HashedTuple(tuple): + """A tuple that ensures that hash() will be called no more than once + per element, since cache decorators will hash the key multiple + times on a cache miss. See also _HashedSeq in the standard + library functools implementation. + + """ + + __hashvalue = None + + def __hash__(self, hash=tuple.__hash__): + hashvalue = self.__hashvalue + if hashvalue is None: + self.__hashvalue = hashvalue = hash(self) + return hashvalue + + def __add__(self, other, add=tuple.__add__): + return _HashedTuple(add(self, other)) + + def __radd__(self, other, add=tuple.__add__): + return _HashedTuple(add(other, self)) + + def __getstate__(self): + return {} + + +# used for separating keyword arguments; we do not use an object +# instance here so identity is preserved when pickling/unpickling +_kwmark = (_HashedTuple,) + + +def hashkey(*args, **kwargs): + """Return a cache key for the specified hashable arguments.""" + + if kwargs: + return _HashedTuple(args + sum(sorted(kwargs.items()), _kwmark)) + else: + return _HashedTuple(args) + + +def methodkey(self, *args, **kwargs): + """Return a cache key for use with cached methods.""" + return hashkey(*args, **kwargs) + + +def typedkey(*args, **kwargs): + """Return a typed cache key for the specified hashable arguments.""" + + key = hashkey(*args, **kwargs) + key += tuple(type(v) for v in args) + key += tuple(type(v) for _, v in sorted(kwargs.items())) + return key + + +def typedmethodkey(self, *args, **kwargs): + """Return a typed cache key for use with cached methods.""" + return typedkey(*args, **kwargs) diff --git a/venv/lib/python3.10/site-packages/click_help_colors/__init__.py b/venv/lib/python3.10/site-packages/click_help_colors/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..173924a48f4a63c2dd4e7b35a17f9de6aaba0ba3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/click_help_colors/__init__.py @@ -0,0 +1,19 @@ +from .core import HelpColorsFormatter, HelpColorsMixin, HelpColorsGroup, \ + HelpColorsCommand, HelpColorsMultiCommand + +from .utils import _colorize, HelpColorsException + +from .decorators import version_option + + +__all__ = [ + 'HelpColorsFormatter', 'HelpColorsMixin', 'HelpColorsGroup', + 'HelpColorsCommand', 'HelpColorsMultiCommand', + + '_colorize', 'HelpColorsException', + + 'version_option' +] + + +__version__ = '0.9.4' diff --git a/venv/lib/python3.10/site-packages/click_help_colors/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/click_help_colors/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..551c7b539041c368743b751b9ddcdedbcd58b6dd Binary files /dev/null and b/venv/lib/python3.10/site-packages/click_help_colors/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/click_help_colors/__pycache__/core.cpython-310.pyc b/venv/lib/python3.10/site-packages/click_help_colors/__pycache__/core.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c1e7ad73e5ed79a0f162c2b61228ceb66eed4d12 Binary files /dev/null and b/venv/lib/python3.10/site-packages/click_help_colors/__pycache__/core.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/click_help_colors/__pycache__/decorators.cpython-310.pyc b/venv/lib/python3.10/site-packages/click_help_colors/__pycache__/decorators.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d680dc89475b4e3b3999268d783d916dc6d08d8c Binary files /dev/null and b/venv/lib/python3.10/site-packages/click_help_colors/__pycache__/decorators.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/click_help_colors/__pycache__/utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/click_help_colors/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b4721f4d292b2df117ecf5a8ad8bc01536386d07 Binary files /dev/null and b/venv/lib/python3.10/site-packages/click_help_colors/__pycache__/utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/click_help_colors/core.py b/venv/lib/python3.10/site-packages/click_help_colors/core.py new file mode 100644 index 0000000000000000000000000000000000000000..6d694e2e0e2a4fba87f382791a5abf475ea6810a --- /dev/null +++ b/venv/lib/python3.10/site-packages/click_help_colors/core.py @@ -0,0 +1,184 @@ +import re +import typing as t + +import click + +from .utils import _colorize, _extend_instance + + +class HelpColorsFormatter(click.HelpFormatter): + options_regex = re.compile(r'-{1,2}[\w\-]+') + + def __init__(self, + headers_color: t.Optional[str] = None, + options_color: t.Optional[str] = None, + options_custom_colors: t.Optional[t.Mapping[str, str]] = None, + indent_increment: int = 2, + width: t.Optional[int] = None, + max_width: t.Optional[int] = None): + self.headers_color = headers_color + self.options_color = options_color + self.options_custom_colors = options_custom_colors + super().__init__(indent_increment, width, max_width) + + def _get_opt_names(self, option_name: str) -> t.List[str]: + opts = self.options_regex.findall(option_name) + if not opts: + return [option_name] + else: + # Include this for backwards compatibility + opts.append(option_name.split()[0]) + return opts + + def _pick_color(self, option_name: str) -> t.Optional[str]: + opts = self._get_opt_names(option_name) + for opt in opts: + if self.options_custom_colors and (opt in self.options_custom_colors.keys()): + return self.options_custom_colors[opt] + return self.options_color + + def write_usage(self, prog: str, args: str = '', prefix: t.Optional[str] = None) -> None: + if prefix is None: + prefix = 'Usage' + + colorized_prefix = _colorize(prefix, color=self.headers_color, suffix=": ") + super().write_usage(prog, args, prefix=colorized_prefix) + + def write_heading(self, heading: str) -> None: + colorized_heading = _colorize(heading, color=self.headers_color) + super().write_heading(colorized_heading) + + def write_dl(self, rows: t.Sequence[t.Tuple[str, str]], col_max: int = 30, col_spacing: int = 2) -> None: + colorized_rows = [(_colorize(row[0], self._pick_color(row[0])), row[1]) for row in rows] + super().write_dl(colorized_rows, col_max, col_spacing) + + +class HelpColorsMixin: + def __init__(self, + help_headers_color: t.Optional[str] = None, + help_options_color: t.Optional[str] = None, + help_options_custom_colors: t.Optional[t.Mapping[str, str]] = None, + *args: t.Any, + **kwargs: t.Any): + self.help_headers_color = help_headers_color + self.help_options_color = help_options_color + self.help_options_custom_colors = help_options_custom_colors + super().__init__(*args, **kwargs) + + def get_help(self, ctx: click.Context) -> str: + formatter = HelpColorsFormatter( + width=ctx.terminal_width, + max_width=ctx.max_content_width, + headers_color=self.help_headers_color, + options_color=self.help_options_color, + options_custom_colors=self.help_options_custom_colors) + self.format_help(ctx, formatter) + return formatter.getvalue().rstrip('\n') + + format_help: t.Callable[[click.Context, click.HelpFormatter], None] + + +CommandType = t.TypeVar("CommandType", bound=click.Command) +GroupType = t.TypeVar("GroupType", bound=click.Group) + + +class HelpColorsGroup(HelpColorsMixin, click.Group): + @t.overload + def command(self, __func: t.Callable[..., t.Any]) -> 'HelpColorsCommand': ... + + @t.overload + def command(self, + name: t.Optional[str], + cls: t.Type[CommandType], + **attrs: t.Any, + ) -> t.Callable[[t.Callable[..., t.Any]], CommandType]: ... + + @t.overload + def command(self, + name: None = ..., + *, + cls: t.Type[CommandType], + **attrs: t.Any, + ) -> t.Callable[[t.Callable[..., t.Any]], CommandType]: ... + + @t.overload + def command(self, + name: t.Optional[str] = ..., + cls: None = ..., + **attrs: t.Any, + ) -> t.Callable[[t.Callable[..., t.Any]], 'HelpColorsCommand']: ... + + def command(self, + *args: t.Any, + **kwargs: t.Any, + ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], CommandType], 'HelpColorsCommand']: + kwargs.setdefault('cls', HelpColorsCommand) + kwargs.setdefault('help_headers_color', self.help_headers_color) + kwargs.setdefault('help_options_color', self.help_options_color) + kwargs.setdefault('help_options_custom_colors', self.help_options_custom_colors) + return super().command(*args, **kwargs) # type: ignore + + @t.overload + def group(self, __func: t.Callable[..., t.Any]) -> 'HelpColorsGroup': ... + + @t.overload + def group(self, + name: t.Optional[str], + cls: t.Type[GroupType], + **attrs: t.Any, + ) -> t.Callable[[t.Callable[..., t.Any]], GroupType]: ... + + @t.overload + def group(self, + name: None = ..., + *, + cls: t.Type[GroupType], + **attrs: t.Any, + ) -> t.Callable[[t.Callable[..., t.Any]], GroupType]: ... + + @t.overload + def group(self, + name: t.Optional[str] = ..., + cls: None = ..., + **attrs: t.Any, + ) -> t.Callable[[t.Callable[..., t.Any]], 'HelpColorsGroup']: ... + + def group(self, + *args: t.Any, + **kwargs: t.Any + ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], GroupType], 'HelpColorsGroup']: + kwargs.setdefault('cls', HelpColorsGroup) + kwargs.setdefault('help_headers_color', self.help_headers_color) + kwargs.setdefault('help_options_color', self.help_options_color) + kwargs.setdefault('help_options_custom_colors', self.help_options_custom_colors) + return super().group(*args, **kwargs) # type: ignore + + +class HelpColorsCommand(HelpColorsMixin, click.Command): + pass + + +class HelpColorsMultiCommand(HelpColorsMixin, click.MultiCommand): + def resolve_command(self, + ctx: click.Context, + args: t.List[str], + ) -> t.Tuple[t.Optional[str], t.Optional[click.Command], t.List[str]]: + cmd_name, cmd, args[1:] = super().resolve_command(ctx, args) + + if cmd is not None: + if not isinstance(cmd, HelpColorsMixin): + if isinstance(cmd, click.Group): + _extend_instance(cmd, HelpColorsGroup) + cmd = t.cast(HelpColorsGroup, cmd) + if isinstance(cmd, click.Command): + _extend_instance(cmd, HelpColorsCommand) + cmd = t.cast(HelpColorsCommand, cmd) + + if not getattr(cmd, 'help_headers_color', None): + cmd.help_headers_color = self.help_headers_color + if not getattr(cmd, 'help_options_color', None): + cmd.help_options_color = self.help_options_color + if not getattr(cmd, 'help_options_custom_colors', None): + cmd.help_options_custom_colors = self.help_options_custom_colors + + return cmd_name, cmd, args[1:] diff --git a/venv/lib/python3.10/site-packages/click_help_colors/decorators.py b/venv/lib/python3.10/site-packages/click_help_colors/decorators.py new file mode 100644 index 0000000000000000000000000000000000000000..2a0e3929c2830c3dbaaee4cfc495e7eaa8283a47 --- /dev/null +++ b/venv/lib/python3.10/site-packages/click_help_colors/decorators.py @@ -0,0 +1,75 @@ +import re +import typing as t + +from .utils import _colorize + + +from click import version_option as click_version_option, Command + +FC = t.TypeVar("FC", bound=t.Union[t.Callable[..., t.Any], Command]) + + +@t.overload +def version_option( + version: str, + prog_name: str, + message: None = ..., + message_color: t.Optional[str] = ..., + prog_name_color: t.Optional[str] = ..., + version_color: t.Optional[str] = ..., + **kwargs: t.Any, +) -> t.Callable[[FC], FC]: ... + + +@t.overload +def version_option( + version: t.Optional[str] = ..., + prog_name: t.Optional[str] = ..., + message: str = ..., + message_color: t.Optional[str] = ..., + prog_name_color: t.Optional[str] = ..., + version_color: t.Optional[str] = ..., + **kwargs: t.Any, +) -> t.Callable[[FC], FC]: ... + + +def version_option( + version: t.Optional[str] = None, + prog_name: t.Optional[str] = None, + message: t.Optional[str] = None, + message_color: t.Optional[str] = None, + prog_name_color: t.Optional[str] = None, + version_color: t.Optional[str] = None, + **kwargs: t.Any, +) -> t.Callable[[FC], FC]: + """ + :param prog_name_color: color of the prog_name. + :param version_color: color of the version. + :param message_color: default color of the message. + + for other params see Click's version_option decorator: + https://click.palletsprojects.com/en/7.x/api/#click.version_option + """ + if message is None: + message = "%(prog)s, version %(version)s" + + msg_parts = [] + for s in re.split(r'(%\(version\)s|%\(prog\)s)', message): + if s == '%(prog)s': + if prog_name is None: + raise TypeError("version_option() missing required argument: 'prog_name'") + msg_parts.append(_colorize(prog_name, prog_name_color or message_color)) + elif s == '%(version)s': + if version is None: + raise TypeError("version_option() missing required argument: 'version'") + msg_parts.append(_colorize(version, version_color or message_color)) + else: + msg_parts.append(_colorize(s, message_color)) + message = ''.join(msg_parts) + + return click_version_option( + version=version, + prog_name=prog_name, + message=message, + **kwargs + ) diff --git a/venv/lib/python3.10/site-packages/click_help_colors/py.typed b/venv/lib/python3.10/site-packages/click_help_colors/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.10/site-packages/click_help_colors/utils.py b/venv/lib/python3.10/site-packages/click_help_colors/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..bcca8789348cdc92a57327f1ec96980a1355049b --- /dev/null +++ b/venv/lib/python3.10/site-packages/click_help_colors/utils.py @@ -0,0 +1,24 @@ +import os +import typing as t + +from click.termui import _ansi_colors, _ansi_reset_all + + +class HelpColorsException(Exception): + pass + + +def _colorize(text: str, color: t.Optional[str] = None, suffix: t.Optional[str] = None) -> str: + if not color or os.getenv("NO_COLOR"): + return text + (suffix or '') + try: + return '\033[%dm' % (_ansi_colors[color]) + text + _ansi_reset_all + (suffix or '') + except KeyError: + raise HelpColorsException('Unknown color %r' % color) + + +def _extend_instance(obj: object, cls: t.Type[object]) -> None: + """Apply mixin to a class instance after creation""" + base_cls = obj.__class__ + base_cls_name = obj.__class__.__name__ + obj.__class__ = type(base_cls_name, (cls, base_cls), {}) diff --git a/venv/lib/python3.10/site-packages/compressed_tensors-0.9.3.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/compressed_tensors-0.9.3.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors-0.9.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/compressed_tensors-0.9.3.dist-info/METADATA b/venv/lib/python3.10/site-packages/compressed_tensors-0.9.3.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..267688ffb482f74786840d815008608967f55f04 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors-0.9.3.dist-info/METADATA @@ -0,0 +1,169 @@ +Metadata-Version: 2.4 +Name: compressed-tensors +Version: 0.9.3 +Summary: Library for utilization of compressed safetensors of neural network models +Home-page: https://github.com/neuralmagic/compressed-tensors +Author: Neuralmagic, Inc. +Author-email: support@neuralmagic.com +License: Apache 2.0 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: torch>=1.7.0 +Requires-Dist: transformers +Requires-Dist: pydantic>=2.0 +Provides-Extra: dev +Requires-Dist: black==22.12.0; extra == "dev" +Requires-Dist: isort==5.8.0; extra == "dev" +Requires-Dist: wheel>=0.36.2; extra == "dev" +Requires-Dist: flake8>=3.8.3; extra == "dev" +Requires-Dist: pytest>=6.0.0; extra == "dev" +Requires-Dist: nbconvert>=7.16.3; extra == "dev" +Provides-Extra: accelerate +Requires-Dist: accelerate; extra == "accelerate" +Dynamic: author +Dynamic: author-email +Dynamic: description +Dynamic: description-content-type +Dynamic: home-page +Dynamic: license +Dynamic: license-file +Dynamic: provides-extra +Dynamic: requires-dist +Dynamic: summary + +# compressed-tensors + +The `compressed-tensors` library extends the [safetensors](https://github.com/huggingface/safetensors) format, providing a versatile and efficient way to store and manage compressed tensor data. This library supports various quantization and sparsity schemes, making it a unified format for handling different model optimizations like GPTQ, AWQ, SmoothQuant, INT8, FP8, SparseGPT, and more. + +## Why `compressed-tensors`? + +As model compression becomes increasingly important for efficient deployment of LLMs, the landscape of quantization and compression techniques has become increasingly fragmented. +Each method often comes with its own storage format and loading procedures, making it challenging to work with multiple techniques or switch between them. +`compressed-tensors` addresses this by providing a single, extensible format that can represent a wide variety of compression schemes. + +* **Unified Checkpoint Format**: Supports various compression schemes in a single, consistent format. +* **Wide Compatibility**: Works with popular quantization methods like GPTQ, SmoothQuant, and FP8. See [llm-compressor](https://github.com/vllm-project/llm-compressor) +* **Flexible Quantization Support**: + * Weight-only quantization (e.g., W4A16, W8A16, WnA16) + * Activation quantization (e.g., W8A8) + * KV cache quantization + * Non-uniform schemes (different layers can be quantized in different ways!) +* **Sparsity Support**: Handles both unstructured and semi-structured (e.g., 2:4) sparsity patterns. +* **Open-Source Integration**: Designed to work seamlessly with Hugging Face models and PyTorch. + +This allows developers and researchers to easily experiment with composing different quantization methods, simplify model deployment pipelines, and reduce the overhead of supporting multiple compression formats in inference engines. + +## Installation + +### From [PyPI](https://pypi.org/project/compressed-tensors) + +Stable release: +```bash +pip install compressed-tensors +``` + +Nightly release: +```bash +pip install compressed-tensors-nightly +``` + +### From Source + +```bash +git clone https://github.com/neuralmagic/compressed-tensors +cd compressed-tensors +pip install -e . +``` + +## Getting started + +### Saving/Loading Compressed Tensors (Bitmask Compression) + +The function `save_compressed` uses the `compression_format` argument to apply compression to tensors. +The function `load_compressed` reverses the process: converts the compressed weights on disk to decompressed weights in device memory. + +```python +from compressed_tensors import save_compressed, load_compressed, BitmaskConfig +from torch import Tensor +from typing import Dict + +# the example BitmaskConfig method efficiently compresses +# tensors with large number of zero entries +compression_config = BitmaskConfig() + +tensors: Dict[str, Tensor] = {"tensor_1": Tensor( + [[0.0, 0.0, 0.0], + [1.0, 1.0, 1.0]] +)} +# compress tensors using BitmaskConfig compression format (save them efficiently on disk) +save_compressed(tensors, "model.safetensors", compression_format=compression_config.format) + +# decompress tensors (load_compressed returns a generator for memory efficiency) +decompressed_tensors = {} +for tensor_name, tensor in load_compressed("model.safetensors", compression_config = compression_config): + decompressed_tensors[tensor_name] = tensor +``` + +## Saving/Loading Compressed Models (Bitmask Compression) + +We can apply bitmask compression to a whole model. For more detailed example see `example` directory. +```python +from compressed_tensors import save_compressed_model, load_compressed, BitmaskConfig +from transformers import AutoModelForCausalLM + +model_name = "neuralmagic/llama2.c-stories110M-pruned50" +model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype="auto") + +original_state_dict = model.state_dict() + +compression_config = BitmaskConfig() + +# save compressed model weights +save_compressed_model(model, "compressed_model.safetensors", compression_format=compression_config.format) + +# load compressed model weights (`dict` turns generator into a dictionary) +state_dict = dict(load_compressed("compressed_model.safetensors", compression_config)) +``` + +For more in-depth tutorial on bitmask compression, refer to the [notebook](https://github.com/neuralmagic/compressed-tensors/blob/d707c5b84bc3fef164aebdcd97cb6eaa571982f8/examples/bitmask_compression.ipynb). + + +## Saving a Compressed Model with PTQ + +We can use compressed-tensors to run basic post training quantization (PTQ) and save the quantized model compressed on disk + +```python +model_name = "TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T" +model = AutoModelForCausalLM.from_pretrained(model_name, device_map="cuda:0", torch_dtype="auto") + +config = QuantizationConfig.parse_file("./examples/bit_packing/int4_config.json") +config.quantization_status = QuantizationStatus.CALIBRATION +apply_quantization_config(model, config) + +dataset = load_dataset("ptb_text_only")["train"] +tokenizer = AutoTokenizer.from_pretrained(model_name) + +def tokenize_function(examples): + return tokenizer(examples["sentence"], padding=False, truncation=True, max_length=1024) + +tokenized_dataset = dataset.map(tokenize_function, batched=True) +data_loader = DataLoader(tokenized_dataset, batch_size=1, collate_fn=DefaultDataCollator()) + +with torch.no_grad(): + for idx, sample in tqdm(enumerate(data_loader), desc="Running calibration"): + sample = {key: value.to(device) for key,value in sample.items()} + _ = model(**sample) + + if idx >= 512: + break + +model.apply(freeze_module_quantization) +model.apply(compress_quantized_weights) + +output_dir = "./ex_llama1.1b_w4a16_packed_quantize" +compressor = ModelCompressor(quantization_config=config) +compressed_state_dict = compressor.compress(model) +model.save_pretrained(output_dir, state_dict=compressed_state_dict) +``` + +For more in-depth tutorial on quantization compression, refer to the [notebook](./examples/quantize_and_pack_int4.ipynb). diff --git a/venv/lib/python3.10/site-packages/compressed_tensors-0.9.3.dist-info/RECORD b/venv/lib/python3.10/site-packages/compressed_tensors-0.9.3.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..f17c5733fdaf40cedbca730b6b32a38bc8ee8d12 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors-0.9.3.dist-info/RECORD @@ -0,0 +1,100 @@ +compressed_tensors-0.9.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +compressed_tensors-0.9.3.dist-info/METADATA,sha256=zs3aFaG-BGV9hqJbW9Zwzex0TVcM5sPZhiaeVx2qjR0,6997 +compressed_tensors-0.9.3.dist-info/RECORD,, +compressed_tensors-0.9.3.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91 +compressed_tensors-0.9.3.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357 +compressed_tensors-0.9.3.dist-info/top_level.txt,sha256=w2i-GyPs2s1UwVxvutSvN_lM22SXC2hQFBmoMcPnV7Y,19 +compressed_tensors/__init__.py,sha256=UtKmifNeBCSE2TZSAfduVNNzHY-3V7bLjZ7n7RuXLOE,812 +compressed_tensors/__pycache__/__init__.cpython-310.pyc,, +compressed_tensors/__pycache__/base.cpython-310.pyc,, +compressed_tensors/__pycache__/version.cpython-310.pyc,, +compressed_tensors/base.py,sha256=73HYH7HY7O2roC89yG_piPFnZwrBfn_i7HmKl90SKc0,875 +compressed_tensors/compressors/__init__.py,sha256=smSygTSfcfuujRrAXDc6uZm4L_ccV1tWZewqVnOb4lM,825 +compressed_tensors/compressors/__pycache__/__init__.cpython-310.pyc,, +compressed_tensors/compressors/__pycache__/base.cpython-310.pyc,, +compressed_tensors/compressors/__pycache__/helpers.cpython-310.pyc,, +compressed_tensors/compressors/base.py,sha256=x8dQrWVEurynXw03yHJZTaAmrRTOsdZJoHjmvs0IKwk,7002 +compressed_tensors/compressors/helpers.py,sha256=OK6qxX9j3bHwF9JfIYSGMgBJe2PWjlTA3byXKCJaTIQ,5431 +compressed_tensors/compressors/model_compressors/__init__.py,sha256=5RGGPFu4YqEt_aOdFSQYFYFDjcZFJN0CsMqRtDZz3Js,666 +compressed_tensors/compressors/model_compressors/__pycache__/__init__.cpython-310.pyc,, +compressed_tensors/compressors/model_compressors/__pycache__/model_compressor.cpython-310.pyc,, +compressed_tensors/compressors/model_compressors/model_compressor.py,sha256=n0gcrKwefJuO6b4LNjCynJQf7NNqNHDcoLlzZgTCPGc,23080 +compressed_tensors/compressors/quantized_compressors/__init__.py,sha256=09UJq68Pht6Bf-4iP9xYl3tetKsncNPHD8IAGbePsr4,714 +compressed_tensors/compressors/quantized_compressors/__pycache__/__init__.cpython-310.pyc,, +compressed_tensors/compressors/quantized_compressors/__pycache__/base.cpython-310.pyc,, +compressed_tensors/compressors/quantized_compressors/__pycache__/naive_quantized.cpython-310.pyc,, +compressed_tensors/compressors/quantized_compressors/__pycache__/pack_quantized.cpython-310.pyc,, +compressed_tensors/compressors/quantized_compressors/base.py,sha256=GXTSWgFAhksbno94Ulpth9-YM4a7NsDlx4oQGGB0swQ,8567 +compressed_tensors/compressors/quantized_compressors/naive_quantized.py,sha256=fd0KlkSx6bvZ3xwIkK3jEUdPSUPs56Eua4dEDOtzKW0,5150 +compressed_tensors/compressors/quantized_compressors/pack_quantized.py,sha256=zH2PocRe_T5yt1-3kLdZH9AUQWQyaVOi4U9nEJiYaWA,8509 +compressed_tensors/compressors/sparse_compressors/__init__.py,sha256=Atuz-OdEgn8OCUhx7Ovd6gXdyImAI186uCR-uR0t_Nk,737 +compressed_tensors/compressors/sparse_compressors/__pycache__/__init__.cpython-310.pyc,, +compressed_tensors/compressors/sparse_compressors/__pycache__/base.cpython-310.pyc,, +compressed_tensors/compressors/sparse_compressors/__pycache__/dense.cpython-310.pyc,, +compressed_tensors/compressors/sparse_compressors/__pycache__/sparse_24_bitmask.cpython-310.pyc,, +compressed_tensors/compressors/sparse_compressors/__pycache__/sparse_bitmask.cpython-310.pyc,, +compressed_tensors/compressors/sparse_compressors/base.py,sha256=CVWbs3sd7GKJEoWOIKImABQ01VOTX8dlF2AQaEVPotw,5883 +compressed_tensors/compressors/sparse_compressors/dense.py,sha256=_uW_HISeDNz4yboSZWoh6GwrkUE6HFibzPQSKrHOCkg,1505 +compressed_tensors/compressors/sparse_compressors/sparse_24_bitmask.py,sha256=mEKSSgpXookqYSJw3mlyP6cYYKD-eaIvpQMvi4JO6TY,8807 +compressed_tensors/compressors/sparse_compressors/sparse_bitmask.py,sha256=S8vW0FI9ep_XtUQOxj0P5utJt3vKEYOHjWEPp-Xd9aY,5820 +compressed_tensors/compressors/sparse_quantized_compressors/__init__.py,sha256=4f_cwcKXB1nVVMoiKgTFAc8jAPjPLElo-Df_EDm1_xw,675 +compressed_tensors/compressors/sparse_quantized_compressors/__pycache__/__init__.cpython-310.pyc,, +compressed_tensors/compressors/sparse_quantized_compressors/__pycache__/marlin_24.cpython-310.pyc,, +compressed_tensors/compressors/sparse_quantized_compressors/marlin_24.py,sha256=xY0CdHXAzVHeDeSCD_I-5UZKcntKzd3FiKSP-ZqcSBs,9614 +compressed_tensors/config/__init__.py,sha256=8sOoZ6xvYSC79mBvEtO8l6xk4PC80d29AnnJiGMrY2M,737 +compressed_tensors/config/__pycache__/__init__.cpython-310.pyc,, +compressed_tensors/config/__pycache__/base.cpython-310.pyc,, +compressed_tensors/config/__pycache__/dense.cpython-310.pyc,, +compressed_tensors/config/__pycache__/sparse_24_bitmask.cpython-310.pyc,, +compressed_tensors/config/__pycache__/sparse_bitmask.cpython-310.pyc,, +compressed_tensors/config/base.py,sha256=R3iUmFf1MslEjin5LgwQbmfJHIsS7Uw0UIxfn780uqY,3479 +compressed_tensors/config/dense.py,sha256=NgSxnFCnckU9-iunxEaqiFwqgdO7YYxlWKR74jNbjks,1317 +compressed_tensors/config/sparse_24_bitmask.py,sha256=Lhj39zT2V1hxftprvxvneyhv45ShlXOKd75DBbDTyTE,1401 +compressed_tensors/config/sparse_bitmask.py,sha256=pZUboRNZTu6NajGOQEFExoPknak5ynVAUeiiYpS1Gt8,1308 +compressed_tensors/linear/__init__.py,sha256=fH6rjBYAxuwrTzBTlTjTgCYNyh6TCvCqajCz4Im4YrA,617 +compressed_tensors/linear/__pycache__/__init__.cpython-310.pyc,, +compressed_tensors/linear/__pycache__/compressed_linear.cpython-310.pyc,, +compressed_tensors/linear/compressed_linear.py,sha256=_m6XpNcI53eeSHO8VdiuAM6UBTdpDhn5Ivd8iRMwEKc,3980 +compressed_tensors/quantization/__init__.py,sha256=83J5bPB7PavN2TfCoW7_vEDhfYpm4TDrqYO9vdSQ5bk,760 +compressed_tensors/quantization/__pycache__/__init__.cpython-310.pyc,, +compressed_tensors/quantization/__pycache__/quant_args.cpython-310.pyc,, +compressed_tensors/quantization/__pycache__/quant_config.cpython-310.pyc,, +compressed_tensors/quantization/__pycache__/quant_scheme.cpython-310.pyc,, +compressed_tensors/quantization/lifecycle/__init__.py,sha256=_uItzFWusyV74Zco_pHLOTdE9a83cL-R-ZdyQrBkIyw,772 +compressed_tensors/quantization/lifecycle/__pycache__/__init__.cpython-310.pyc,, +compressed_tensors/quantization/lifecycle/__pycache__/apply.cpython-310.pyc,, +compressed_tensors/quantization/lifecycle/__pycache__/compressed.cpython-310.pyc,, +compressed_tensors/quantization/lifecycle/__pycache__/forward.cpython-310.pyc,, +compressed_tensors/quantization/lifecycle/__pycache__/helpers.cpython-310.pyc,, +compressed_tensors/quantization/lifecycle/__pycache__/initialize.cpython-310.pyc,, +compressed_tensors/quantization/lifecycle/apply.py,sha256=lZmCCSm1_o79iUAy460w6Bv9FaOvntVisMdS-dN9fnk,16594 +compressed_tensors/quantization/lifecycle/compressed.py,sha256=Fj9n66IN0EWsOAkBHg3O0GlOQpxstqjCcs0ttzMXrJ0,2296 +compressed_tensors/quantization/lifecycle/forward.py,sha256=DOWouUqfaLA4Qhg-ojVVBdhhSAlgZqFC26vZARxE0ko,12961 +compressed_tensors/quantization/lifecycle/helpers.py,sha256=C0mhy2vJ0fCjVeN4kFNhw8Eq1wkteBGHiZ36RVLThRY,944 +compressed_tensors/quantization/lifecycle/initialize.py,sha256=sK3PLm69N91QepBuq-83Qd2Br6XcOmRDpD5qo_WWNJo,7469 +compressed_tensors/quantization/quant_args.py,sha256=sKpb8DcNObidjXjNol1Tn_Iih3ZXBycSp-fyz68TGhY,9117 +compressed_tensors/quantization/quant_config.py,sha256=vx06wBo91p4LCb3Vzd-2eCTUeIf_Sz2ZXRP263eQyjQ,10385 +compressed_tensors/quantization/quant_scheme.py,sha256=eQ0JrRZ80GX69fpwW87VzPzzhajhk4mUaJScjk82OY4,6010 +compressed_tensors/quantization/utils/__init__.py,sha256=VdtEmP0bvuND_IGQnyqUPc5lnFp-1_yD7StKSX4x80w,656 +compressed_tensors/quantization/utils/__pycache__/__init__.cpython-310.pyc,, +compressed_tensors/quantization/utils/__pycache__/helpers.cpython-310.pyc,, +compressed_tensors/quantization/utils/helpers.py,sha256=DBP-sGRpGAY01K0LFE7qqonNj4hkTYL_mXrMs2LtAD8,14100 +compressed_tensors/registry/__init__.py,sha256=FwLSNYqfIrb5JD_6OK_MT4_svvKTN_nEhpgQlQvGbjI,658 +compressed_tensors/registry/__pycache__/__init__.cpython-310.pyc,, +compressed_tensors/registry/__pycache__/registry.cpython-310.pyc,, +compressed_tensors/registry/registry.py,sha256=vRcjVB1ITfSbfYUaGndBBmqhip_5vsS62weorVg0iXo,11896 +compressed_tensors/utils/__init__.py,sha256=gS4gSU2pwcAbsKj-6YMaqhm25udFy6ISYaWBf-myRSM,808 +compressed_tensors/utils/__pycache__/__init__.cpython-310.pyc,, +compressed_tensors/utils/__pycache__/helpers.cpython-310.pyc,, +compressed_tensors/utils/__pycache__/offload.cpython-310.pyc,, +compressed_tensors/utils/__pycache__/permutations_24.cpython-310.pyc,, +compressed_tensors/utils/__pycache__/permute.cpython-310.pyc,, +compressed_tensors/utils/__pycache__/safetensors_load.cpython-310.pyc,, +compressed_tensors/utils/__pycache__/semi_structured_conversions.cpython-310.pyc,, +compressed_tensors/utils/helpers.py,sha256=RrNvzD08naEjEiXdU-FdZjQVda1nQywu1hA_GCDj0vg,10415 +compressed_tensors/utils/offload.py,sha256=H4aAg21zUvJM2uwE6QCNYazX_p_o41yQUAgLLWBqR0w,14079 +compressed_tensors/utils/permutations_24.py,sha256=kx6fsfDHebx94zsSzhXGyCyuC9sVyah6BUUir_StT28,2530 +compressed_tensors/utils/permute.py,sha256=V6tJLKo3Syccj-viv4F7ZKZgJeCB-hl-dK8RKI_kBwI,2355 +compressed_tensors/utils/safetensors_load.py,sha256=5SeM2hzLh77Ne8Vk7qR6-km7cf8bhov41ExpWITqX3A,11470 +compressed_tensors/utils/semi_structured_conversions.py,sha256=XKNffPum54kPASgqKzgKvyeqWPAkair2XEQXjkp7ho8,13489 +compressed_tensors/version.py,sha256=X4y5lqlF1QFUgl25iumzagpg3dzyVoLP6i82HZEhCJA,1585 diff --git a/venv/lib/python3.10/site-packages/compressed_tensors-0.9.3.dist-info/WHEEL b/venv/lib/python3.10/site-packages/compressed_tensors-0.9.3.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..1eb3c49d99559863120cfb8433fc8738fba43ba9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors-0.9.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (78.1.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/compressed_tensors-0.9.3.dist-info/licenses/LICENSE b/venv/lib/python3.10/site-packages/compressed_tensors-0.9.3.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors-0.9.3.dist-info/licenses/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.10/site-packages/compressed_tensors-0.9.3.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/compressed_tensors-0.9.3.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..8a05844b986341fe812005800f6def048765cb58 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors-0.9.3.dist-info/top_level.txt @@ -0,0 +1 @@ +compressed_tensors diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/__init__.py b/venv/lib/python3.10/site-packages/compressed_tensors/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..64a52ddac805e028276e702617ba9de24db6642f --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/__init__.py @@ -0,0 +1,22 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .base import * + +# flake8: noqa +from .compressors import * +from .config import * +from .quantization import QuantizationConfig, QuantizationStatus +from .utils import * +from .version import * diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b9cad25dbff4c3a9a9b4b5e583b68846fed9dae0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/__pycache__/base.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/__pycache__/base.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0ecb7242e1cd5b26ea6d807e4c14159ba38e1028 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/__pycache__/base.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/__pycache__/version.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/__pycache__/version.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3f73d996e10fa37ccae2ec24898ac0aa73c801b3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/__pycache__/version.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/base.py b/venv/lib/python3.10/site-packages/compressed_tensors/base.py new file mode 100644 index 0000000000000000000000000000000000000000..0e073262f85632636ecd8aedef9d2fa16c3b1648 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/base.py @@ -0,0 +1,20 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +SPARSITY_CONFIG_NAME = "sparsity_config" +QUANTIZATION_CONFIG_NAME = "quantization_config" +COMPRESSION_CONFIG_NAME = "compression_config" +KV_CACHE_SCHEME_NAME = "kv_cache_scheme" +COMPRESSION_VERSION_NAME = "version" +QUANTIZATION_METHOD_NAME = "quant_method" diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/__init__.py b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..138e3899bf47d1b22914781cfb43dc9598b5eb95 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/__init__.py @@ -0,0 +1,22 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# flake8: noqa + +from .base import * +from .helpers import * +from .model_compressors import * +from .quantized_compressors import * +from .sparse_compressors import * +from .sparse_quantized_compressors import * diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2746efccc7fea93b13ff504f7514b0ca6ab09c75 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/__pycache__/base.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/__pycache__/base.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f88e48a1a89db23bbe86071543ff61e616d3666a Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/__pycache__/base.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/__pycache__/helpers.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/__pycache__/helpers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..47777ccec6f557d21d0cc75b0a44803cca0dd63e Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/__pycache__/helpers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/base.py b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/base.py new file mode 100644 index 0000000000000000000000000000000000000000..8573a0e16d74b89115e9a9c197003514857a044e --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/base.py @@ -0,0 +1,197 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import ABC, abstractmethod +from typing import Dict, Generator, Optional, Tuple, Union + +import torch +from compressed_tensors.config import SparsityCompressionConfig +from compressed_tensors.quantization import QuantizationArgs, QuantizationConfig +from compressed_tensors.registry import RegistryMixin +from torch import Tensor +from torch.nn import Module + + +__all__ = ["BaseCompressor"] + + +class BaseCompressor(RegistryMixin, ABC): + """ + Base class representing a model compression algorithm. Each child class should + implement compression_param_info, compress_weight and decompress_weight. + + Compressors support compressing/decompressing a full module state dict or a single + quantized PyTorch leaf module. + + Model Load Lifecycle (run_compressed=False): + - ModelCompressor.decompress() + - apply_quantization_config() + - BaseCompressor.decompress() + + Model Save Lifecycle: + - ModelCompressor.compress() + - BaseCompressor.compress() + + + Module Lifecycle (run_compressed=True): + - apply_quantization_config() + - compressed_module = CompressedLinear(module) + - initialize_module_for_quantization() + - BaseCompressor.compression_param_info() + - register_parameters() + - compressed_module.forward() + -compressed_module.decompress() + + + :param config: config specifying compression parameters + """ + + def __init__( + self, config: Union[SparsityCompressionConfig, QuantizationConfig, None] = None + ): + self.config = config + + def compression_param_info( + self, + weight_shape: torch.Size, + quantization_args: Optional[QuantizationArgs] = None, + ) -> Dict[str, Tuple[torch.Size, torch.dtype]]: + """ + Creates a dictionary of expected shapes and dtypes for each compression + parameter used by the compressor + + :param weight_shape: uncompressed weight shape + :param quantization_args: quantization parameters for the weight + :return: dictionary mapping compressed parameter names to shape and dtype + """ + raise NotImplementedError() + + @property + @abstractmethod + def compression_param_names(self) -> Tuple[str]: + """ + Returns a tuple of compression parameter names introduced by + the compressor during compression + """ + raise NotImplementedError() + + @abstractmethod + def compress( + self, + model_state: Dict[str, Tensor], + **kwargs, + ) -> Dict[str, Tensor]: + """ + Compresses a dense state dict + + :param model_state: state dict of uncompressed model + :param kwargs: additional arguments for compression + :return: compressed state dict + """ + raise NotImplementedError() + + @abstractmethod + def decompress( + self, + path_to_model_or_tensors: str, + device: str = "cpu", + **kwargs, + ) -> Generator[Tuple[str, Tensor], None, None]: + """ + Reads a compressed state dict located at path_to_model_or_tensors + and returns a generator for sequentially decompressing back to a + dense state dict + + :param path_to_model_or_tensors: path to compressed safetensors model (directory + with one or more safetensors files) or compressed tensors file + :param names_to_scheme: quantization args for each quantized weight + :param device: optional device to load intermediate weights into + :return: compressed state dict + """ + raise NotImplementedError() + + def compress_module(self, module: Module) -> Optional[Dict[str, torch.Tensor]]: + """ + Compresses a single quantized leaf PyTorch module. If the module is not + quantized, this function has no effect. + + :param module: PyTorch module to compress + :return: dictionary of compressed weight data, or None if module is not + quantized + """ + if not hasattr(module, "quantization_scheme"): + return None # module is not quantized + quantization_scheme = module.quantization_scheme + if not hasattr(quantization_scheme, "weights"): + return None # weights are not quantized + + quantization_args = quantization_scheme.weights + weight = getattr(module, "weight", None) + weight_scale = getattr(module, "weight_scale", None) + weight_zero_point = getattr(module, "weight_zero_point", None) + + return self.compress_weight( + weight=weight, + scale=weight_scale, + zero_point=weight_zero_point, + quantization_args=quantization_args, + ) + + def compress_weight( + self, + weight: Tensor, + **kwargs, + ) -> Dict[str, torch.Tensor]: + """ + Compresses a single uncompressed weight + + :param weight: uncompressed weight tensor + :param kwargs: additional arguments for compression + """ + raise NotImplementedError() + + def decompress_module(self, module: Module): + """ + Decompresses a single compressed leaf PyTorch module. If the module is not + quantized, this function has no effect. + + :param module: PyTorch module to decompress + :return: tensor of the decompressed weight, or None if module is not quantized + """ + if not hasattr(module, "quantization_scheme"): + return None # module is not quantized + quantization_scheme = module.quantization_scheme + if not hasattr(quantization_scheme, "weights"): + return None # weights are not quantized + + quantization_args = quantization_scheme.weights + compressed_data = {} + for name, parameter in module.named_parameters(): + compressed_data[name] = parameter + + return self.decompress_weight( + compressed_data=compressed_data, quantization_args=quantization_args + ) + + def decompress_weight( + self, compressed_data: Dict[str, Tensor], **kwargs + ) -> torch.Tensor: + """ + Decompresses a single compressed weight + + :param compressed_data: dictionary of data needed for decompression + :param kwargs: additional arguments for decompression + :return: tensor of the decompressed weight + """ + raise NotImplementedError() diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/helpers.py b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..7b03a9a150d9aecaea347ad91748c930edf497ef --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/helpers.py @@ -0,0 +1,137 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pathlib import Path +from typing import Dict, Generator, Optional, Tuple, Union + +import torch +from compressed_tensors.compressors import BaseCompressor +from compressed_tensors.config import CompressionFormat, SparsityCompressionConfig +from compressed_tensors.utils.safetensors_load import get_weight_mappings +from safetensors import safe_open +from safetensors.torch import save_file +from torch import Tensor + + +__all__ = [ + "load_compressed", + "save_compressed", + "save_compressed_model", +] + + +def save_compressed( + tensors: Dict[str, Tensor], + save_path: Union[str, Path], + compression_format: Optional[CompressionFormat] = None, +): + """ + Save compressed tensors to disk. If tensors are not compressed, + save them as is. + + :param tensors: dictionary of tensors to compress + :param save_path: path to save compressed tensors + :param compression_format: compression format used for the tensors + :return: compression config, if tensors were compressed - None otherwise + """ + if tensors is None or len(tensors) == 0: + raise ValueError("No tensors or empty tensors provided to compress") + + # if no compression_format specified, default to `dense` + compression_format = compression_format or CompressionFormat.dense.value + + if not ( + compression_format in BaseCompressor.registered_names() + or compression_format in BaseCompressor.registered_aliases() + ): + raise ValueError( + f"Unknown compression format: {compression_format}. " + f"Must be one of {set(BaseCompressor.registered_names() + BaseCompressor.registered_aliases())}" # noqa E501 + ) + + # compress + compressor = BaseCompressor.load_from_registry(compression_format) + # save compressed tensors + compressed_tensors = compressor.compress(tensors) + save_file(compressed_tensors, save_path) + + +def load_compressed( + compressed_tensors: Union[str, Path], + compression_config: SparsityCompressionConfig = None, + device: Optional[str] = "cpu", +) -> Generator[Tuple[str, Tensor], None, None]: + """ + Load compressed tensors from disk. + If tensors are not compressed, load them as is. + + :param compressed_tensors: path to compressed tensors. + This can be a path to a file or a directory containing + one or multiple safetensor files (if multiple - in the format + assumed by huggingface) + :param compression_config: compression config to use for decompressing tensors. + :param device: device to move tensors to. If None, tensors are loaded on CPU. + :param return_dict: if True, return a dictionary of decompressed tensors + :return a generator that yields the name and tensor of the decompressed tensor + """ + if compressed_tensors is None or not Path(compressed_tensors).exists(): + raise ValueError("No compressed tensors provided to load") + + if ( + compression_config is None + or compression_config.format == CompressionFormat.dense.value + ): + # if no compression_config specified, or `dense` format specified, + # assume tensors are not compressed on disk + weight_mappings = get_weight_mappings(compressed_tensors) + for weight_name, file_with_weight_name in weight_mappings.items(): + with safe_open(file_with_weight_name, framework="pt", device=device) as f: + weight = f.get_tensor(weight_name) + yield weight_name, weight + else: + # decompress tensors + compression_format = compression_config.format + compressor = BaseCompressor.load_from_registry( + compression_format, config=compression_config + ) + yield from compressor.decompress(compressed_tensors, device=device) + + +def save_compressed_model( + model: torch.nn.Module, + filename: str, + compression_format: Optional[CompressionFormat] = None, + force_contiguous: bool = True, +): + """ + Wrapper around safetensors `save_model` helper function, which allows for + saving compressed model to disk. + + Note: The model is assumed to have a + state_dict with unique entries + + :param model: model to save on disk + :param filename: filename location to save the file + :param compression_format: compression format used for the model + :param force_contiguous: forcing the state_dict to be saved as contiguous tensors + """ + state_dict = model.state_dict() + if force_contiguous: + state_dict = {k: v.contiguous() for k, v in state_dict.items()} + try: + save_compressed(state_dict, filename, compression_format=compression_format) + except ValueError as e: + msg = str(e) + msg += " Or use save_compressed_model(..., force_contiguous=True), read the docs for potential caveats." # noqa E501 + raise ValueError(msg) diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/model_compressors/__init__.py b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/model_compressors/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d9cfa8525dbaa915d7aa820de387b1dcbbf534ed --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/model_compressors/__init__.py @@ -0,0 +1,17 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# flake8: noqa + + +from .model_compressor import * diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/model_compressors/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/model_compressors/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e8ee4699c1f4ccbd3d4cc1221c90430f89d1606f Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/model_compressors/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/model_compressors/__pycache__/model_compressor.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/model_compressors/__pycache__/model_compressor.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..98bd7ad293ecb69c54c9224cc69ced6309f28ee4 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/model_compressors/__pycache__/model_compressor.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/model_compressors/model_compressor.py b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/model_compressors/model_compressor.py new file mode 100644 index 0000000000000000000000000000000000000000..618b49ee07bbfe38652b5326513f99775a332d64 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/model_compressors/model_compressor.py @@ -0,0 +1,580 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import logging +import operator +import os +import re +from contextlib import contextmanager +from copy import deepcopy +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple, TypeVar, Union + +import compressed_tensors +import torch +import transformers +from compressed_tensors.base import ( + COMPRESSION_VERSION_NAME, + QUANTIZATION_CONFIG_NAME, + QUANTIZATION_METHOD_NAME, + SPARSITY_CONFIG_NAME, +) +from compressed_tensors.compressors.base import BaseCompressor +from compressed_tensors.config import CompressionFormat, SparsityCompressionConfig +from compressed_tensors.quantization import ( + DEFAULT_QUANTIZATION_METHOD, + QuantizationConfig, + QuantizationStatus, + apply_quantization_config, + load_pretrained_quantization, +) +from compressed_tensors.quantization.lifecycle import expand_target_names +from compressed_tensors.quantization.quant_args import QuantizationArgs +from compressed_tensors.quantization.utils import ( + is_module_quantized, + iter_named_leaf_modules, +) +from compressed_tensors.utils import ( + get_safetensors_folder, + merge_names, + update_parameter_data, +) +from compressed_tensors.utils.helpers import ( + fix_fsdp_module_name, + is_compressed_tensors_config, +) +from torch import Tensor +from torch.nn import Module +from tqdm import tqdm +from transformers import AutoConfig +from transformers.file_utils import CONFIG_NAME + + +__all__ = ["ModelCompressor", "map_modules_to_quant_args"] + +_LOGGER: logging.Logger = logging.getLogger(__name__) + + +if TYPE_CHECKING: + # dummy type if not available from transformers + CompressedTensorsConfig = TypeVar("CompressedTensorsConfig") + + +class ModelCompressor: + """ + Handles compression and decompression of a model with a sparsity config and/or + quantization config. + + Compression LifeCycle + - compressor = ModelCompressor.from_pretrained_model(model) + - compressed_state_dict = compressor.compress(model, state_dict) + - compressor.quantization_compressor.compress(model, state_dict) + - compressor.sparsity_compressor.compress(model, state_dict) + - model.save_pretrained(output_dir, state_dict=compressed_state_dict) + - compressor.update_config(output_dir) + + Decompression LifeCycle + - compressor = ModelCompressor.from_pretrained(comp_model_path) + - model = AutoModel.from_pretrained(comp_model_path) + - compressor.decompress(comp_model_path, model) + - compressor.sparsity_compressor.decompress(comp_model_path, model) + - compressor.quantization_compressor.decompress(comp_model_path, model) + + :param sparsity_config: config specifying sparsity compression parameters + :param quantization_config: config specifying quantization compression parameters + """ + + @classmethod + def from_pretrained( + cls, + pretrained_model_name_or_path: str, + **kwargs, + ) -> Optional["ModelCompressor"]: + """ + Given a path to a model config, extract the sparsity and/or quantization + configs and load a ModelCompressor + + :param pretrained_model_name_or_path: path to model config on disk or HF hub + :return: compressor for the configs, or None if model is not compressed + """ + config = AutoConfig.from_pretrained(pretrained_model_name_or_path, **kwargs) + compression_config = getattr(config, QUANTIZATION_CONFIG_NAME, None) + return cls.from_compression_config(compression_config) + + @classmethod + def from_compression_config( + cls, + compression_config: Union[Dict[str, Any], "CompressedTensorsConfig"], + ): + """ + :param compression_config: + A compression or quantization config + + The type is one of the following: + 1. A Dict found under either "quantization_config" or "compression_config" + keys in the config.json + 2. A CompressedTensorsConfig found under key "quantization_config" in HF + model config + :return: compressor for the configs, or None if model is not compressed + """ + if compression_config is None: + return None + + sparsity_config = cls.parse_sparsity_config(compression_config) + quantization_config = cls.parse_quantization_config(compression_config) + if sparsity_config is None and quantization_config is None: + return None + + if sparsity_config is not None: + format = sparsity_config.get("format") + sparsity_config = SparsityCompressionConfig.load_from_registry( + format, **sparsity_config + ) + if quantization_config is not None: + quantization_config = QuantizationConfig.model_validate(quantization_config) + + return cls( + sparsity_config=sparsity_config, quantization_config=quantization_config + ) + + @classmethod + def from_pretrained_model( + cls, + model: Module, + sparsity_config: Union[SparsityCompressionConfig, str, None] = None, + quantization_format: Optional[str] = None, + ) -> Optional["ModelCompressor"]: + """ + Given a pytorch model and optional sparsity and/or quantization configs, + load the appropriate compressors + + :param model: pytorch model to target for compression + :param sparsity_config: a filled in sparsity config or string corresponding + to a sparsity compression algorithm + :param quantization_format: string corresponding to a quantization compression + algorithm + :return: compressor for the configs, or None if model is not compressed + """ + quantization_config = QuantizationConfig.from_pretrained( + model, format=quantization_format + ) + + if isinstance(sparsity_config, str): # we passed in a sparsity format + sparsity_config = SparsityCompressionConfig.load_from_registry( + sparsity_config + ) + + if sparsity_config is None and quantization_config is None: + return None + + return cls( + sparsity_config=sparsity_config, quantization_config=quantization_config + ) + + @staticmethod + def parse_sparsity_config( + compression_config: Union[Dict[str, Any], "CompressedTensorsConfig"] + ) -> Union[Dict[str, Any], None]: + """ + Parse sparsity config from quantization/compression config. Sparsity + config is nested inside q/c config + + :param compression_config: quantization/compression config + :return: sparsity config + """ + if compression_config is None: + return None + + if is_compressed_tensors_config(compression_config): + s_config = compression_config.sparsity_config + return s_config.model_dump() if s_config is not None else None + + return compression_config.get(SPARSITY_CONFIG_NAME, None) + + @staticmethod + def parse_quantization_config( + compression_config: Union[Dict[str, Any], "CompressedTensorsConfig"] + ) -> Union[Dict[str, Any], None]: + """ + Parse quantization config from quantization/compression config. The + quantization are all the fields that are not the sparsity config or + metadata fields + + :param compression_config: quantization/compression config + :return: quantization config without sparsity config or metadata fields + """ + if compression_config is None: + return None + + if is_compressed_tensors_config(compression_config): + q_config = compression_config.quantization_config + return q_config.model_dump() if q_config is not None else None + + quantization_config = deepcopy(compression_config) + quantization_config.pop(SPARSITY_CONFIG_NAME, None) + + # some fields are required, even if a qconfig is not present + # pop them off and if nothing remains, then there is no qconfig + quant_method = quantization_config.pop(QUANTIZATION_METHOD_NAME, None) + _ = quantization_config.pop(COMPRESSION_VERSION_NAME, None) + + if len(quantization_config) == 0: + return None + + # replace popped off values + # note that version is discarded for now + if quant_method is not None: + quantization_config[QUANTIZATION_METHOD_NAME] = quant_method + + return quantization_config + + def __init__( + self, + sparsity_config: Optional[SparsityCompressionConfig] = None, + quantization_config: Optional[QuantizationConfig] = None, + ): + self.sparsity_config = sparsity_config + self.quantization_config = quantization_config + self.sparsity_compressor = None + self.quantization_compressor = None + + if sparsity_config is not None: + self.sparsity_compressor = BaseCompressor.load_from_registry( + sparsity_config.format, config=sparsity_config + ) + if quantization_config is not None: + self.quantization_compressor = BaseCompressor.load_from_registry( + quantization_config.format, config=quantization_config + ) + + def get_missing_module_keys(self, model: Module) -> List[str]: + """ + Identifies the expected missing weight keys in the compressed state_dict. + + When a model undergoes sparsity or quantization compression, certain + weight tensors may be absent from the checkpoint by virtue of compression. + This function determines which weight keys are missing based on the + applied compression techniques. + + + :param model: The PyTorch model to check for missing keys. + :return: A list of missing keys expected in the compressed state_dict. + """ + missing_keys = set() + + # Determine missing keys due to sparsity compression + if ( + self.sparsity_compressor + and self.sparsity_config.format != CompressionFormat.dense.value + ): + sparse_targets = expand_target_names( + model=model, + targets=self.sparsity_config.targets, + ignore=self.sparsity_config.ignore, + ) + missing_keys.update( + merge_names(target, "weight") for target in sparse_targets + ) + + # Determine missing keys due to pack quantization + if ( + self.quantization_compressor + and self.quantization_config.format + == CompressionFormat.pack_quantized.value + ): + for scheme in self.quantization_config.config_groups.values(): + quant_targets = expand_target_names( + model=model, + targets=scheme.targets, + ignore=self.quantization_config.ignore, + ) + missing_keys.update( + merge_names(target, "weight") for target in quant_targets + ) + + return list(missing_keys) + + def get_unexpected_file_keys(self, model: Module) -> List[str]: + """ + Identifies extra keys introduced by the compression process in the + compressed state_dict that are not expected by the model graph. + + During sparsity or quantization compression, additional metadata or + auxiliary parameters may be stored in the checkpoint, which do not + correspond to any parameter in the original model. These keys are + typically introduced to support the reconstruction of compressed weights. + + For example, Sparse24Bitmask compression may introduce keys such as + 'compressed', 'bitmask', and 'shape' in the checkpoint, which are + not part of the original model parameters. + + :param model: The PyTorch model to check for unexpected keys. + :return: A list of extra keys introduced by the compression process + that are not expected by the model. + """ + + unexpected_keys = set() + + # Identify unexpected keys from sparsity compression + if ( + self.sparsity_compressor + and self.sparsity_config.format != CompressionFormat.dense.value + ): + sparse_targets: Set[str] = expand_target_names( + model=model, + targets=self.sparsity_config.targets, + ignore=self.sparsity_config.ignore, + ) + unexpected_keys.update( + merge_names(target, param) + for target in sparse_targets + for param in self.sparsity_compressor.compression_param_names + ) + + # Identify unexpected keys from quantization compression + if self.quantization_compressor: + for scheme in self.quantization_config.config_groups.values(): + quant_targets: Set[str] = expand_target_names( + model=model, + targets=scheme.targets, + ignore=self.quantization_config.ignore, + ) + unexpected_keys.update( + merge_names(target, param) + for target in quant_targets + for param in self.quantization_compressor.compression_param_names + if param != "weight" + ) + + return list(unexpected_keys) + + def compress( + self, model: Module, state_dict: Optional[Dict[str, Tensor]] = None + ) -> Dict[str, Tensor]: + """ + Compresses a dense state dict or model with sparsity and/or quantization + + :param model: uncompressed model to compress + :param state_dict: optional uncompressed state_dict to insert into model + :return: compressed state dict + """ + if state_dict is None: + state_dict = model.state_dict() + + compressed_state_dict = state_dict + + quantized_modules_to_args: Dict[ + str, QuantizationArgs + ] = map_modules_to_quant_args(model) + + if self.quantization_compressor is not None: + compressed_state_dict = self.quantization_compressor.compress( + state_dict, names_to_scheme=quantized_modules_to_args + ) + if self.quantization_config.format != CompressionFormat.dense.value: + self.quantization_config.quantization_status = ( + QuantizationStatus.COMPRESSED + ) + + if self.sparsity_compressor is not None: + sparse_compression_targets: Set[str] = expand_target_names( + model=model, + targets=self.sparsity_config.targets, + ignore=self.sparsity_config.ignore, + ) + compressed_state_dict = self.sparsity_compressor.compress( + compressed_state_dict, + compression_targets=sparse_compression_targets, + ) + + # HACK: Override the dtype_byte_size function in transformers to + # support float8 types. Fix is posted upstream + # https://github.com/huggingface/transformers/pull/30488 + transformers.modeling_utils.dtype_byte_size = new_dtype_byte_size + + return compressed_state_dict + + def decompress(self, model_path: str, model: Module): + """ + Overwrites the weights in model with weights decompressed from model_path + + :param model_path: path to compressed weights + :param model: pytorch model to load decompressed weights into + """ + model_path = get_safetensors_folder(model_path) + sparse_decompressed = False + + if ( + self.sparsity_compressor is not None + and self.sparsity_config.format != CompressionFormat.dense.value + ): + # Sparse decompression is applied on the model_path + dense_gen = self.sparsity_compressor.decompress(model_path) + self._replace_weights(dense_gen, model) + setattr(model, SPARSITY_CONFIG_NAME, self.sparsity_compressor.config) + sparse_decompressed = True + + if self.quantization_compressor is not None: + # Temporarily set quantization status to FROZEN to prevent + # quantization during apply_quantization_config. This ensures + # that the dtypes of the weights are not unintentionally updated. + # The status is restored after quantization params are loaded. + with override_quantization_status( + self.quantization_config, QuantizationStatus.FROZEN + ): + names_to_scheme = apply_quantization_config( + model, self.quantization_config + ) + load_pretrained_quantization(model, model_path) + + model_path_or_state_dict = ( + model.state_dict() if sparse_decompressed else model_path + ) + + dense_gen = self.quantization_compressor.decompress( + model_path_or_state_dict, names_to_scheme=names_to_scheme + ) + self._replace_weights(dense_gen, model) + + def freeze_quantization_status(module): + module.quantization_status = QuantizationStatus.FROZEN + + model.apply(freeze_quantization_status) + setattr(model, QUANTIZATION_CONFIG_NAME, self.quantization_config) + + def update_config(self, save_directory: str): + """ + Update the model config located at save_directory with compression configs + for sparsity and/or quantization + + :param save_directory: path to a folder containing a HF model config + """ + if self.quantization_config is None and self.sparsity_config is None: + return + + config_file_path = os.path.join(save_directory, CONFIG_NAME) + if not os.path.exists(config_file_path): + _LOGGER.warning( + f"Could not find a valid model config file in " + f"{save_directory}. Compression config will not be saved." + ) + return + + with open(config_file_path, "r") as config_file: + config_data = json.load(config_file) + + # required metadata whenever a quantization or sparsity config is present + # overwrite previous config and version if already existing + config_data[QUANTIZATION_CONFIG_NAME] = {} + config_data[QUANTIZATION_CONFIG_NAME][ + COMPRESSION_VERSION_NAME + ] = compressed_tensors.__version__ + if self.quantization_config is not None: + self.quantization_config.quant_method = DEFAULT_QUANTIZATION_METHOD + else: + config_data[QUANTIZATION_CONFIG_NAME][ + QUANTIZATION_METHOD_NAME + ] = DEFAULT_QUANTIZATION_METHOD + + # quantization and sparsity configs + if self.quantization_config is not None: + quant_config_data = self.quantization_config.model_dump() + config_data[QUANTIZATION_CONFIG_NAME] = quant_config_data + if self.sparsity_config is not None: + sparsity_config_data = self.sparsity_config.model_dump() + config_data[QUANTIZATION_CONFIG_NAME][ + SPARSITY_CONFIG_NAME + ] = sparsity_config_data + + with open(config_file_path, "w") as config_file: + json.dump(config_data, config_file, indent=2, sort_keys=True) + + def _replace_weights(self, dense_weight_generator, model: Module): + """ + Replace the weights of the model with the + provided dense weights. + + This method iterates over the dense_weight_generator and + updates the corresponding weights in the model. If a parameter + name does not exist in the model, it will be skipped. + + :param dense_weight_generator (generator): A generator that yields + tuples of (name, data), where 'name' is the parameter name and + 'data' is the updated param data + :param model: The model whose weights are to be updated. + """ + for name, data in tqdm(dense_weight_generator, desc="Decompressing model"): + split_name = name.split(".") + prefix, param_name = ".".join(split_name[:-1]), split_name[-1] + module = operator.attrgetter(prefix)(model) + if hasattr(module, param_name): + update_parameter_data(module, data, param_name) + + +def map_modules_to_quant_args( + model: Module, +) -> Dict[str, Union[QuantizationArgs, Tuple[QuantizationArgs, QuantizationArgs]]]: + """ + Given a pytorch model, map out the submodule name (usually linear layers) + to the weight QuantizationArgs. If running input activation quantization, will also + map to the input QuantizationArgs in a tuple. + + :param model: pytorch model + """ + quantized_modules_to_args = {} + for name, submodule in iter_named_leaf_modules(model): + if is_module_quantized(submodule): + if submodule.quantization_scheme.weights is not None: + name = fix_fsdp_module_name(name) + quantized_modules_to_args[name] = submodule.quantization_scheme.weights + if submodule.quantization_scheme.input_activations is not None: + weight_args = quantized_modules_to_args.get(name) + quantized_modules_to_args[name] = ( + weight_args, + submodule.quantization_scheme.input_activations, + ) + + return quantized_modules_to_args + + +# HACK: Override the dtype_byte_size function in transformers to support float8 types +# Fix is posted upstream https://github.com/huggingface/transformers/pull/30488 +def new_dtype_byte_size(dtype): + if dtype == torch.bool: + return 1 / 8 + bit_search = re.search(r"[^\d](\d+)_?", str(dtype)) + if bit_search is None: + raise ValueError(f"`dtype` is not a valid dtype: {dtype}.") + bit_size = int(bit_search.groups()[0]) + return bit_size // 8 + + +@contextmanager +def override_quantization_status( + config: QuantizationConfig, status: QuantizationStatus +): + """ + Within this context, the quantization status will be set to the + supplied status. After the context exits, the original status + will be restored. + + :param config: the quantization config to override + :param status: the status to temporarily set + """ + original_status = config.quantization_status + config.quantization_status = status + try: + yield + finally: + config.quantization_status = original_status diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/__init__.py b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..51e8b8e2c6ba6f8630b20dea222b971ad69e6f10 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/__init__.py @@ -0,0 +1,18 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# flake8: noqa + +from .base import * +from .naive_quantized import * +from .pack_quantized import * diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7b7a45a7c3c0bdacc512f4ccfadabd2b4534ed39 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/__pycache__/base.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/__pycache__/base.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3a03dba5140dca38b0eab0bf1a1d9d61edd52234 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/__pycache__/base.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/__pycache__/naive_quantized.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/__pycache__/naive_quantized.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..12ae1118e26aada66cd7333ef4574d9ec1207f8b Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/__pycache__/naive_quantized.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/__pycache__/pack_quantized.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/__pycache__/pack_quantized.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2c28b3f1c9927cc1cd998080a6cb13303a0c559a Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/__pycache__/pack_quantized.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/base.py b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/base.py new file mode 100644 index 0000000000000000000000000000000000000000..0827691590e145001d0e2da382892ed865e07413 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/base.py @@ -0,0 +1,205 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from pathlib import Path +from typing import Any, Dict, Generator, Tuple, Union + +import torch +from compressed_tensors.compressors.base import BaseCompressor +from compressed_tensors.quantization import QuantizationArgs +from compressed_tensors.utils import ( + get_nested_mappings_from_state_dict, + get_nested_weight_mappings, + merge_names, +) +from safetensors import safe_open +from torch import Tensor +from tqdm import tqdm + + +_LOGGER: logging.Logger = logging.getLogger(__name__) + +__all__ = ["BaseQuantizationCompressor"] + + +class BaseQuantizationCompressor(BaseCompressor): + """ + Base class representing a quant compression algorithm. Each child class should + implement compression_param_info, compress_weight and decompress_weight. + + Compressors support compressing/decompressing a full module state dict or a single + quantized PyTorch leaf module. + + Model Load Lifecycle (run_compressed=False): + - ModelCompressor.decompress() + - apply_quantization_config() + - BaseQuantizationCompressor.decompress() + - BaseQuantizationCompressor.decompress_weight() + + Model Save Lifecycle: + - ModelCompressor.compress() + - BaseQuantizationCompressor.compress() + - BaseQuantizationCompressor.compress_weight() + + Module Lifecycle (run_compressed=True): + - apply_quantization_config() + - compressed_module = CompressedLinear(module) + - initialize_module_for_quantization() + - BaseQuantizationCompressor.compression_param_info() + - register_parameters() + - compressed_module.forward() + - compressed_module.decompress() + + + :param config: config specifying compression parameters + """ + + def compress( + self, + model_state: Dict[str, Tensor], + names_to_scheme: Dict[str, QuantizationArgs], + **kwargs, + ) -> Dict[str, Tensor]: + """ + Compresses a dense state dict + + :param model_state: state dict of uncompressed model + :param names_to_scheme: quantization args for each quantized weight, needed for + quantize function to calculate bit depth + :return: compressed state dict + """ + compressed_dict = {} + weight_suffix = ".weight" + input_zp_suffix = ".input_zero_point" + weight_zp_suffix = ".weight_zero_point" + _LOGGER.debug( + f"Compressing model with {len(model_state)} parameterized layers..." + ) + + for name, value in tqdm(model_state.items(), desc="Quantized Compression"): + # check if the parameter we're compressing is the weight zp + # or the input zp + is_weight_zp = name.endswith(weight_zp_suffix) + is_input_zp = name.endswith(input_zp_suffix) + + # if we're saving the weight zp, fetch weight quant args + if is_weight_zp: + quant_args_zp = names_to_scheme.get(name[: -(len(weight_zp_suffix))]) + if isinstance(quant_args_zp, tuple): + # If tuple, first value is weight args, second is input args + quant_args_zp = quant_args_zp[0] + + # if we're saving the input zp, fetch input quant args + if is_input_zp: + input_args_zp = names_to_scheme.get(name[: -(len(input_zp_suffix))]) + if isinstance(input_args_zp, tuple): + # If tuple, first value is weight args, second is input args + input_args_zp = input_args_zp[-1] + + if name.endswith(weight_suffix): + prefix = name[: -(len(weight_suffix))] + scale = model_state.get(merge_names(prefix, "weight_scale"), None) + zp = model_state.get(merge_names(prefix, "weight_zero_point"), None) + g_idx = model_state.get(merge_names(prefix, "weight_g_idx"), None) + if scale is not None: + # weight is quantized, compress it + if isinstance(names_to_scheme[prefix], tuple): + quant_args = names_to_scheme[prefix][0] + else: + quant_args = names_to_scheme[prefix] + + compressed_data = self.compress_weight( + weight=value, + scale=scale, + zero_point=zp, + g_idx=g_idx, + quantization_args=quant_args, + device="cpu", + ) + for key, value in compressed_data.items(): + compressed_dict[merge_names(prefix, key)] = value + else: + compressed_dict[name] = value.to("cpu") + # only save if asym + elif is_weight_zp and quant_args_zp.symmetric: + continue + # only save if asym + elif is_input_zp and input_args_zp.symmetric: + continue + elif name.endswith("g_idx") and torch.any(value <= -1): + continue + else: + compressed_dict[name] = value.to("cpu") + + return compressed_dict + + def decompress( + self, + path_to_model_or_tensors: Union[str, Path, Dict[str, Any]], + names_to_scheme: Dict[str, QuantizationArgs], + device: str = "cpu", + ) -> Generator[Tuple[str, Tensor], None, None]: + """ + Reads a compressed state dict located at path_to_model_or_tensors + and returns a generator for sequentially decompressing back to a + dense state dict + :param path_to_model_or_tensors: path to compressed safetensors model (directory + with one or more safetensors files) or compressed tensors file + :param names_to_scheme: quantization args for each quantized weight + :param device: optional device to load intermediate weights into + :return: compressed state dict + """ + if isinstance(path_to_model_or_tensors, (str, Path)): + yield from self._decompress_from_path( + path_to_model_or_tensors, names_to_scheme, device + ) + + else: + yield from self._decompress_from_state_dict( + path_to_model_or_tensors, names_to_scheme + ) + + def _decompress_from_path(self, path_to_model, names_to_scheme, device): + weight_mappings = get_nested_weight_mappings( + path_to_model, self.compression_param_names + ) + for weight_name in weight_mappings.keys(): + weight_data = {} + for param_name, safe_path in weight_mappings[weight_name].items(): + full_name = merge_names(weight_name, param_name) + with safe_open(safe_path, framework="pt", device=device) as f: + weight_data[param_name] = f.get_tensor(full_name) + if "weight_scale" in weight_data: + quant_args = names_to_scheme[weight_name] + decompressed = self.decompress_weight( + compressed_data=weight_data, quantization_args=quant_args + ) + yield merge_names(weight_name, "weight"), decompressed + + def _decompress_from_state_dict(self, state_dict, names_to_scheme): + weight_mappings = get_nested_mappings_from_state_dict( + state_dict, self.compression_param_names + ) + for weight_name in weight_mappings.keys(): + weight_data = {} + for param_name, param_value in weight_mappings[weight_name].items(): + weight_data[param_name] = param_value + + if "weight_scale" in weight_data: + quant_args = names_to_scheme[weight_name] + decompressed = self.decompress_weight( + compressed_data=weight_data, quantization_args=quant_args + ) + yield merge_names(weight_name, "weight"), decompressed diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/naive_quantized.py b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/naive_quantized.py new file mode 100644 index 0000000000000000000000000000000000000000..e3c599ebdca6659fd52f4b0cc3b8ada62fe8538d --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/naive_quantized.py @@ -0,0 +1,148 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Dict, Optional, Tuple + +import torch +from compressed_tensors.compressors.base import BaseCompressor +from compressed_tensors.compressors.quantized_compressors.base import ( + BaseQuantizationCompressor, +) +from compressed_tensors.config import CompressionFormat +from compressed_tensors.quantization import QuantizationArgs +from compressed_tensors.quantization.lifecycle.forward import dequantize, quantize +from compressed_tensors.quantization.utils import can_quantize +from torch import Tensor + + +__all__ = [ + "NaiveQuantizationCompressor", + "IntQuantizationCompressor", + "FloatQuantizationCompressor", +] + + +@BaseCompressor.register(name=CompressionFormat.naive_quantized.value) +class NaiveQuantizationCompressor(BaseQuantizationCompressor): + """ + Implements naive compression for quantized models. Weight of each + quantized layer is converted from its original float type to the closest Pytorch + type to the type specified by the layer's QuantizationArgs. + """ + + @property + def compression_param_names(self) -> Tuple[str]: + """ + Returns a tuple of compression parameter names introduced by + the compressor during compression + """ + return ( + "weight", + "weight_scale", + "weight_zero_point", + "weight_g_idx", + ) + + def compression_param_info( + self, + weight_shape: torch.Size, + quantization_args: Optional[QuantizationArgs] = None, + ) -> Dict[str, Tuple[torch.Size, torch.dtype]]: + """ + Creates a dictionary of expected shapes and dtypes for each compression + parameter used by the compressor + + :param weight_shape: uncompressed weight shape + :param quantization_args: quantization parameters for the weight + :return: dictionary mapping compressed parameter names to shape and dtype + """ + dtype = quantization_args.pytorch_dtype() + return {"weight": (weight_shape, dtype)} + + def compress_weight( + self, + weight: Tensor, + scale: Tensor, + quantization_args: QuantizationArgs, + zero_point: Optional[Tensor] = None, + g_idx: Optional[torch.Tensor] = None, + device: Optional[torch.device] = None, + ) -> Dict[str, torch.Tensor]: + """ + Compresses a single uncompressed weight + + :param weight: uncompressed weight tensor + :param scale: quantization scale for weight + :param quantization_args: quantization parameters for weight + :param zero_point: quantization zero point for weight + :param g_idx: optional mapping from column index to group index + :param device: optional device to move compressed output to + :return: dictionary of compressed weight data + """ + if can_quantize(weight, quantization_args): + quantized_weight = quantize( + x=weight, + scale=scale, + zero_point=zero_point, + g_idx=g_idx, + args=quantization_args, + dtype=quantization_args.pytorch_dtype(), + ) + else: + quantized_weight = weight + + if device is not None: + quantized_weight = quantized_weight.to(device) + + return {"weight": quantized_weight} + + def decompress_weight( + self, + compressed_data: Dict[str, Tensor], + quantization_args: Optional[QuantizationArgs] = None, + ) -> torch.Tensor: + """ + Decompresses a single compressed weight + + :param compressed_data: dictionary of data needed for decompression + :param quantization_args: quantization parameters for the weight + :return: tensor of the decompressed weight + """ + weight = compressed_data["weight"] + scale = compressed_data["weight_scale"] + zero_point = compressed_data.get("weight_zero_point", None) + g_idx = compressed_data.get("weight_g_idx", None) + decompressed_weight = dequantize( + x_q=weight, scale=scale, zero_point=zero_point, g_idx=g_idx + ) + + return decompressed_weight + + +@BaseCompressor.register(name=CompressionFormat.int_quantized.value) +class IntQuantizationCompressor(NaiveQuantizationCompressor): + """ + Alias for integer quantized models + """ + + pass + + +@BaseCompressor.register(name=CompressionFormat.float_quantized.value) +class FloatQuantizationCompressor(NaiveQuantizationCompressor): + """ + Alias for fp quantized models + """ + + pass diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/pack_quantized.py b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/pack_quantized.py new file mode 100644 index 0000000000000000000000000000000000000000..bba47d81d2337f40d31dcd57b79bc3e7a664d7bd --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/quantized_compressors/pack_quantized.py @@ -0,0 +1,236 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import math +from typing import Dict, Optional, Tuple + +import numpy as np +import torch +from compressed_tensors.compressors.base import BaseCompressor +from compressed_tensors.compressors.quantized_compressors.base import ( + BaseQuantizationCompressor, +) +from compressed_tensors.config import CompressionFormat +from compressed_tensors.quantization import QuantizationArgs +from compressed_tensors.quantization.lifecycle.forward import dequantize, quantize +from compressed_tensors.quantization.utils import can_quantize +from torch import Tensor + + +__all__ = ["PackedQuantizationCompressor", "pack_to_int32", "unpack_from_int32"] + + +@BaseCompressor.register(name=CompressionFormat.pack_quantized.value) +class PackedQuantizationCompressor(BaseQuantizationCompressor): + """ + Compresses a quantized model by packing every eight 4-bit weights into an int32 + """ + + @property + def compression_param_names(self) -> Tuple[str]: + """ + Returns a tuple of compression parameter names introduced by + the compressor during compression + """ + return ( + "weight_packed", + "weight_scale", + "weight_zero_point", + "weight_g_idx", + "weight_shape", + ) + + def compression_param_info( + self, + weight_shape: torch.Size, + quantization_args: Optional[QuantizationArgs] = None, + ) -> Dict[str, Tuple[torch.Size, torch.dtype]]: + """ + Creates a dictionary of expected shapes and dtypes for each compression + parameter used by the compressor + + :param weight_shape: uncompressed weight shape + :param quantization_args: quantization parameters for the weight + :return: dictionary mapping compressed parameter names to shape and dtype + """ + pack_factor = 32 // quantization_args.num_bits + packed_size = math.ceil(weight_shape[1] / pack_factor) + return { + "weight_packed": (torch.Size((weight_shape[0], packed_size)), torch.int32), + "weight_shape": (torch.Size((2,)), torch.int32), + } + + def compress_weight( + self, + weight: Tensor, + scale: Tensor, + quantization_args: QuantizationArgs, + zero_point: Optional[Tensor] = None, + g_idx: Optional[torch.Tensor] = None, + device: Optional[torch.device] = None, + ) -> Dict[str, torch.Tensor]: + """ + Compresses a single uncompressed weight + + :param weight: uncompressed weight tensor + :param scale: quantization scale for weight + :param quantization_args: quantization parameters for weight + :param zero_point: quantization zero point for weight + :param g_idx: optional mapping from column index to group index + :param device: optional device to move compressed output to + :return: dictionary of compressed weight data + """ + compressed_dict = {} + if can_quantize(weight, quantization_args): + quantized_weight = quantize( + x=weight, + scale=scale, + zero_point=zero_point, + g_idx=g_idx, + args=quantization_args, + dtype=torch.int8, + ) + else: + quantized_weight = weight + + packed_weight = pack_to_int32(quantized_weight, quantization_args.num_bits) + weight_shape = torch.tensor(weight.shape) + if device is not None: + packed_weight = packed_weight.to(device) + weight_shape = weight_shape.to(device) + + compressed_dict["weight_shape"] = weight_shape + compressed_dict["weight_packed"] = packed_weight + + return compressed_dict + + def decompress_weight( + self, + compressed_data: Dict[str, Tensor], + quantization_args: Optional[QuantizationArgs] = None, + ) -> torch.Tensor: + """ + Decompresses a single compressed weight + + :param compressed_data: dictionary of data needed for decompression + :param quantization_args: quantization parameters for the weight + :return: tensor of the decompressed weight + """ + weight = compressed_data["weight_packed"] + scale = compressed_data["weight_scale"] + zero_point = compressed_data.get("weight_zero_point", None) + g_idx = compressed_data.get("weight_g_idx", None) + original_shape = torch.Size(compressed_data["weight_shape"]) + num_bits = quantization_args.num_bits + unpacked = unpack_from_int32(weight, num_bits, original_shape) + decompressed_weight = dequantize( + x_q=unpacked, scale=scale, zero_point=zero_point, g_idx=g_idx + ) + + return decompressed_weight + + +def pack_to_int32(value: torch.Tensor, num_bits: int) -> torch.Tensor: + """ + Packs a tensor of quantized weights stored in int8 into int32s with padding + + Pseudocode: + 1. Shift wrt num_bits to convert to unsigned. num_bits=8 + [1,2] -> [129, 130] + 2. Pad to fill in 32 bits + [129, 130] -> [129, 130, 0, 0] + 3. convert to binary align in order + [129, 130, 0, 0] -> 00000000 00000000 10000010 10000001 + 4. convert aligned binary to number + 00000000000000001000001010000001 -> 33409 + 5. covert back to uint32 + 33409 -> 33409 + + :param value: tensor to pack + :param num_bits: number of bits used to store underlying data, must be at least 1 + :returns: packed int32 tensor + """ + if value.dtype is not torch.int8: + raise ValueError("Tensor must be quantized to torch.int8 before packing") + + if num_bits > 8: + raise ValueError("Packing is only supported for less than 8 bits") + + if num_bits < 1: + raise ValueError(f"num_bits must be at least 1, got {num_bits}") + + # convert to unsigned for packing + offset = 1 << (num_bits - 1) + value = (value + offset).to(torch.uint8) + value = value.cpu().numpy().astype(np.uint32) + pack_factor = 32 // num_bits + + # pad input tensor and initialize packed output + packed_size = math.ceil(value.shape[1] / pack_factor) + padding = packed_size * pack_factor - value.shape[1] + value = np.pad(value, pad_width=[(0, 0), (0, padding)], constant_values=0) + + # pack values + packed = np.zeros((value.shape[0], packed_size), dtype=np.uint32) + for i in range(pack_factor): + packed |= value[:, i::pack_factor] << num_bits * i + + # convert back to signed and torch + packed = np.ascontiguousarray(packed).view(np.int32) + return torch.from_numpy(packed) + + +def unpack_from_int32( + value: torch.Tensor, num_bits: int, shape: torch.Size +) -> torch.Tensor: + """ + Unpacks a tensor of packed int32 weights into individual int8s, maintaining the + original bit range. + + Return tensors in int8 + + :param value: tensor to upack + :param num_bits: number of bits to unpack each data point into + :param shape: shape to unpack into, used to remove padding + :returns: unpacked int8 tensor + """ + if value.dtype is not torch.int32: + raise ValueError( + f"Expected {torch.int32} but got {value.dtype}, Aborting unpack." + ) + + if num_bits > 8: + raise ValueError("Unpacking is only supported for less than 8 bits") + + pack_factor = 32 // num_bits + + # unpack + mask = (1 << num_bits) - 1 + unpacked = torch.zeros( + (value.shape[0], value.shape[1] * pack_factor), + device=value.device, + dtype=torch.int32, + ) + for i in range(pack_factor): + unpacked[:, i::pack_factor] = (value >> (num_bits * i)) & mask + + # remove padding + original_row_size = int(shape[1]) + unpacked = unpacked[:, :original_row_size] + + # bits are packed in unsigned format, reformat to signed + # update the value range from unsigned to signed + offset = pow(2, num_bits) // 2 + unpacked = (unpacked - offset).to(torch.int8) + + return unpacked diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/__init__.py b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..871079ac9abcfbbe5852f441738ba2ff1808c846 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/__init__.py @@ -0,0 +1,19 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# flake8: noqa + +from .base import * +from .dense import * +from .sparse_24_bitmask import * +from .sparse_bitmask import * diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..41d3f225e5f955323976d07e892973ba6182ec74 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/__pycache__/base.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/__pycache__/base.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dc40b337b8e1a20a297e7cf3fa6a3345cb207f2c Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/__pycache__/base.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/__pycache__/dense.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/__pycache__/dense.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..50e062c389ed4a664b464ab09908c41f67efc789 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/__pycache__/dense.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/__pycache__/sparse_24_bitmask.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/__pycache__/sparse_24_bitmask.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d887750a7aee4cc5c7050eb30cf7344e7bc2bb1d Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/__pycache__/sparse_24_bitmask.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/__pycache__/sparse_bitmask.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/__pycache__/sparse_bitmask.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..060bbac20c898ba3e97434b9777c9e4029f25120 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/__pycache__/sparse_bitmask.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/base.py b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/base.py new file mode 100644 index 0000000000000000000000000000000000000000..5e81c64d014ad40b99dba1ad843020e31b84f805 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/base.py @@ -0,0 +1,147 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from typing import Dict, Generator, Optional, Set, Tuple + +from compressed_tensors.compressors.base import BaseCompressor +from compressed_tensors.utils import get_nested_weight_mappings, merge_names +from safetensors import safe_open +from torch import Tensor +from tqdm import tqdm + + +__all__ = ["BaseSparseCompressor"] + +_LOGGER: logging.Logger = logging.getLogger(__name__) + + +class BaseSparseCompressor(BaseCompressor): + """ + Base class representing a sparse compression algorithm. Each child class should + implement compression_param_names, compress_weight and decompress_weight; + + Compressors support compressing/decompressing a full module state dict or a single + quantized PyTorch leaf module. + + Model Load Lifecycle (run_compressed=False): + - ModelCompressor.decompress() + - apply_quantization_config() + - BaseSparseCompressor.decompress() + - BaseSparseCompressor.decompress_weight() + + Model Save Lifecycle: + - ModelCompressor.compress() + - BaseSparseCompressor.compress() + - BaseSparseCompressor.compress_weight() + + Module Lifecycle (run_compressed=True): + - apply_quantization_config() + - compressed_module = CompressedLinear(module) + - initialize_module_for_quantization() + - BaseSparseCompressor.compression_param_info() + - register_parameters() + - compressed_module.forward() + - compressed_module.decompress() + + + :param config: config specifying compression parameters + """ + + def compress( + self, + model_state: Dict[str, Tensor], + compression_targets: Optional[Set[str]] = None, + ) -> Dict[str, Tensor]: + """ + Compresses a dense state dict using bitmask compression + + :param model_state: state dict of uncompressed model + :param compression_targets: optional set of layer prefixes to compress, + otherwise compress all layers (for backwards compatibility) + :return: compressed state dict + """ + compressed_dict = {} + _LOGGER.debug( + f"Compressing model with {len(model_state)} parameterized layers..." + ) + for name, value in tqdm(model_state.items(), desc="Compressing model"): + if not self.should_compress(name, compression_targets): + compressed_dict[name] = value + continue + prefix = name + if prefix.endswith(".weight"): + prefix = prefix[: -(len(".weight"))] + + compression_data = self.compress_weight(prefix, value) + for key in compression_data.keys(): + if key in compressed_dict: + _LOGGER.warn( + f"Expected all compressed state_dict keys to be unique, but " + f"found an existing entry for {key}. The existing entry will " + "be replaced." + ) + + compressed_dict.update(compression_data) + + return compressed_dict + + def decompress( + self, path_to_model_or_tensors: str, device: str = "cpu", **kwargs + ) -> Generator[Tuple[str, Tensor], None, None]: + """ + Reads a bitmask compressed state dict located + at path_to_model_or_tensors and returns a generator + for sequentially decompressing back to a dense state dict + + :param model_path: path to compressed safetensors model (directory with + one or more safetensors files) or compressed tensors file + :param device: device to load decompressed weights onto + :return: iterator for generating decompressed weights + """ + weight_mappings, ignored_params = get_nested_weight_mappings( + path_to_model_or_tensors, + self.compression_param_names, + return_unmatched_params=True, + ) + for weight_name in weight_mappings.keys(): + weight_data = {} + for param_name, safe_path in weight_mappings[weight_name].items(): + full_name = merge_names(weight_name, param_name) + with safe_open(safe_path, framework="pt", device=device) as f: + weight_data[param_name] = f.get_tensor(full_name) + decompressed = self.decompress_weight(weight_data) + yield merge_names(weight_name, "weight"), decompressed + + for ignored_param_name, safe_path in ignored_params.items(): + with safe_open(safe_path, framework="pt", device=device) as f: + value = f.get_tensor(ignored_param_name) + yield ignored_param_name, value + + @staticmethod + def should_compress(name: str, expanded_targets: Optional[Set[str]] = None) -> bool: + """ + Check if a parameter should be compressed. + Currently, this only returns True for weight parameters. + + :param name: name of the parameter + :param expanded_targets: set of layer prefixes to compress + :return: whether or not the parameter should be compressed + """ + if expanded_targets is None: + return name.endswith(".weight") + + return ( + name.endswith(".weight") and name[: -(len(".weight"))] in expanded_targets + ) diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/dense.py b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/dense.py new file mode 100644 index 0000000000000000000000000000000000000000..2550d6164ab52dd36211ac23b1019974f04ebed1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/dense.py @@ -0,0 +1,42 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Dict, Generator, Tuple + +from compressed_tensors.compressors.base import BaseCompressor +from compressed_tensors.config import CompressionFormat +from torch import Tensor + + +@BaseCompressor.register(name=CompressionFormat.dense.value) +class DenseCompressor(BaseCompressor): + """ + Identity compressor for dense models, returns the original state_dict + """ + + @property + def compression_param_names(self) -> Tuple[str]: + """ + Returns a tuple of compression parameter names introduced by + the compressor during compression + """ + return () + + def compress(self, model_state: Dict[str, Tensor], **kwargs) -> Dict[str, Tensor]: + return model_state + + def decompress( + self, path_to_model_or_tensors: str, device: str = "cpu", **kwargs + ) -> Generator[Tuple[str, Tensor], None, None]: + return iter([]) diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/sparse_24_bitmask.py b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/sparse_24_bitmask.py new file mode 100644 index 0000000000000000000000000000000000000000..b21fb7fda3aecd8213715b38f4a4dbaea60c7cc3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/sparse_24_bitmask.py @@ -0,0 +1,246 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dataclasses import dataclass +from typing import Dict, List, Tuple, Union + +import torch +from compressed_tensors.compressors.base import BaseCompressor +from compressed_tensors.compressors.sparse_compressors.base import BaseSparseCompressor +from compressed_tensors.config import CompressionFormat, SparsityStructure +from compressed_tensors.quantization import FP8_DTYPE +from compressed_tensors.utils import merge_names, pack_bitmasks, unpack_bitmasks +from torch import Tensor + + +__all__ = [ + "Sparse24BitMaskCompressor", + "Sparse24BitMaskTensor", + "sparse24_bitmask_compress", + "sparse24_bitmask_decompress", + "get_24_bytemasks", +] + + +@BaseCompressor.register(name=CompressionFormat.sparse_24_bitmask.value) +class Sparse24BitMaskCompressor(BaseSparseCompressor): + """ + Compression for sparse models using bitmasks. Non-zero weights are stored in a 2d + values tensor, with their locations stored in a 2d bitmask + """ + + @property + def compression_param_names(self) -> Tuple[str]: + """ + Returns a tuple of compression parameter names introduced by + the compressor during compression + """ + return ( + "shape", + "compressed", + "bitmask", + ) + + def compress_weight(self, name, value): + bitmask_tensor = Sparse24BitMaskTensor.from_dense( + value, self.config.sparsity_structure + ) + bitmask_dict = bitmask_tensor.dict(name_prefix=name, device="cpu") + return bitmask_dict + + def decompress_weight(self, weight_data): + data = Sparse24BitMaskTensor.from_compressed_data(**weight_data) + decompressed = data.decompress() + return decompressed + + +@dataclass +class Sparse24BitMaskTensor: + """ + Owns compressions and decompression for a single 2:4 sparse + bitmask compressed tensor. + + :param shape: shape of dense tensor + :param compressed: 2d tensor of non-zero values + :param bitmask: 2d bitmask of non-zero values + """ + + shape: List[int] + compressed: Tensor + bitmask: Tensor + + @staticmethod + def from_dense( + tensor: Tensor, + sparsity_structure: Union[SparsityStructure, str] = SparsityStructure.TWO_FOUR, + ) -> "Sparse24BitMaskTensor": + """ + :param tensor: dense tensor to compress + :return: instantiated compressed tensor + """ + shape = list(tensor.shape) + compressed, bitmask = sparse24_bitmask_compress( + tensor.cpu(), sparsity_structure=sparsity_structure + ) + return Sparse24BitMaskTensor( + shape=shape, + compressed=compressed, + bitmask=bitmask, + ) + + @staticmethod + def from_compressed_data( + shape: Union[List[int], Tensor], compressed: Tensor, bitmask: Tensor + ) -> "Sparse24BitMaskTensor": + """ + :param shape: shape of the dense tensor (can be a list or a tensor) + :param compressed: 2d tensor of non-zero values + :param bitmask: 2d bitmask of non-zero values + :return: instantiated Sparse24BitMaskTensor + """ + if isinstance(shape, list): + shape = torch.tensor(shape) + if isinstance(shape, torch.Tensor): + shape = shape.flatten().tolist() + return Sparse24BitMaskTensor( + shape=shape, compressed=compressed, bitmask=bitmask + ) + + def decompress(self) -> Tensor: + """ + :return: reconstructed dense tensor + """ + return sparse24_bitmask_decompress(self.compressed, self.bitmask, self.shape) + + def curr_memory_size_bytes(self) -> int: + """ + :return: size in bytes required to store compressed tensor on disk + """ + + def sizeof_tensor(a: Tensor) -> int: + return a.element_size() * a.nelement() + + return sizeof_tensor(self.compressed) + sizeof_tensor(self.bitmask) + + def dict(self, name_prefix: str, device: str = "cpu") -> Dict[str, Tensor]: + """ + :param name_prefix: name of original tensor to store compressed weight as + :return: dict of compressed data for the stored weight + """ + if name_prefix.endswith(".weight"): + name_prefix = name_prefix[: -len(".weight")] + return { + merge_names(name_prefix, "shape"): torch.tensor( + self.shape, device=device + ).reshape(-1, 1), + merge_names(name_prefix, "compressed"): self.compressed.to(device), + merge_names(name_prefix, "bitmask"): self.bitmask.to(device), + } + + def __repr__(self) -> str: + return f"BitMaskTensor(shape={self.shape}, compressed=True)" + + +def sparse24_bitmask_compress( + tensor: Tensor, + sparsity_structure: Union[SparsityStructure, str] = SparsityStructure.TWO_FOUR, +) -> Tuple[Tensor, Tensor, Tensor]: + """ + Compresses a dense tensor using bitmask compression + + :param tensor: dense 2D tensor to compress + :param sparsity_structure: structure of sparsity in the tensor, defaults + to unstructured, can also be set to `2:4` + :return: tuple of compressed data representing tensor + """ + assert len(tensor.shape) == 2, "Only 2D tensors are supported" + assert ( + SparsityStructure(sparsity_structure) == SparsityStructure.TWO_FOUR + ), "Only 2:4 sparsity is supported" + + bytemasks = get_24_bytemasks(tensor=tensor) + + if tensor.dtype == FP8_DTYPE: + # acces raw bytes of the tensor + tensor_view = tensor.view(torch.int8) + values = tensor_view[bytemasks] + values = values.view(FP8_DTYPE) + else: + values = tensor[bytemasks] + + num_rows, num_cols = tensor.shape + compressed_values = values.reshape(num_rows, num_cols // 2) + bitmasks_packed = pack_bitmasks(bytemasks) + return compressed_values, bitmasks_packed + + +def sparse24_bitmask_decompress( + values: Tensor, bitmasks: Tensor, original_shape: torch.Size +) -> Tensor: + """ + Reconstructs a dense tensor from a compressed one + + :param values: 1d tensor of non-zero values + :param bitmasks: 2d int8 tensor flagging locations of non-zero values in the + tensors original shape + :param original_shape: shape of the dense tensor + :return: decompressed dense tensor + """ + bytemasks_unpacked = unpack_bitmasks(bitmasks, original_shape) + + decompressed_tensor = torch.zeros(original_shape, dtype=values.dtype) + decompressed_tensor = decompressed_tensor.to(values.device) + values = values.flatten() + if decompressed_tensor.dtype == FP8_DTYPE: + decompressed_tensor[bytemasks_unpacked] = values + decompressed_tensor = decompressed_tensor.cuda() + else: + decompressed_tensor[bytemasks_unpacked] = values + return decompressed_tensor + + +def get_24_bytemasks(tensor): + """ + Generate a 2:4 sparsity mask for the given tensor. + + This function creates a mask where exactly 2 out of every 4 elements are + preserved based on their magnitudes. The preserved elements are the ones + with the highest absolute values in each group of 4 elements. + + :param tensor: The input tensor for which the 2:4 sparsity mask is to be created. + The tensor can be of any shape but its total number of elements + must be a multiple of 4. + :return: A boolean tensor of the same shape as the input tensor, where `True` + indicates the preserved elements and `False` indicates the pruned elements. + :raises ValueError: If the total number of elements in the tensor is not a + multiple of 4. + """ + original_dtype = tensor.dtype + if tensor.dtype == FP8_DTYPE: + tensor = tensor.view(torch.int8) + original_shape = tensor.shape + num_elements = tensor.numel() + + if num_elements % 4 != 0: + raise ValueError("Tensor size must be a multiple of 4 for TWO_FOUR sparsity") + + reshaped_tensor = tensor.view(-1, 4) + abs_tensor = reshaped_tensor.abs() + topk_indices = abs_tensor.topk(2, dim=1).indices + mask = torch.zeros_like(reshaped_tensor, dtype=torch.bool) + mask.scatter_(1, topk_indices, True) + mask = mask.view(original_shape) + tensor = tensor.view(original_dtype) + + return mask diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/sparse_bitmask.py b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/sparse_bitmask.py new file mode 100644 index 0000000000000000000000000000000000000000..0e08be031d74d569fc629502122dd488a34bc2fa --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_compressors/sparse_bitmask.py @@ -0,0 +1,169 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Dict, List, Tuple, Union + +import torch +from compressed_tensors.compressors.base import BaseCompressor +from compressed_tensors.compressors.sparse_compressors.base import BaseSparseCompressor +from compressed_tensors.config import CompressionFormat +from compressed_tensors.quantization import FP8_DTYPE +from compressed_tensors.utils import merge_names, pack_bitmasks, unpack_bitmasks +from torch import Tensor + + +__all__ = [ + "BitmaskCompressor", + "BitmaskTensor", + "bitmask_compress", + "bitmask_decompress", +] + + +@BaseCompressor.register(name=CompressionFormat.sparse_bitmask.value) +class BitmaskCompressor(BaseSparseCompressor): + """ + Compression for sparse models using bitmasks. Non-zero weights are stored in a 1d + values tensor, with their locations stored in a 2d bitmask + """ + + @property + def compression_param_names(self) -> Tuple[str]: + """ + Returns a tuple of compression parameter names introduced by + the compressor during compression + """ + return ("shape", "compressed", "bitmask", "row_offsets") + + def compress_weight(self, name, value): + bitmask_tensor = BitmaskTensor.from_dense(value) + bitmask_dict = bitmask_tensor.dict(name_prefix=name, device="cpu") + return bitmask_dict + + def decompress_weight(self, weight_data): + data = BitmaskTensor(**weight_data) + decompressed = data.decompress() + return decompressed + + +class BitmaskTensor: + """ + Owns compressions and decompression for a single bitmask compressed tensor. + Adapted from: https://github.com/mgoin/torch_bitmask/tree/main + + :param shape: shape of dense tensor + :compressed: flat tensor of non-zero values + :bitmask: 2d bitmask of non-zero values + :row_offsets: flat tensor indicating what index in values each dense row starts at + """ + + def __init__( + self, + shape: Union[torch.Size, List], + compressed: Tensor, + bitmask: Tensor, + row_offsets: Tensor, + ): + self.shape = list(shape) + self.compressed = compressed + self.bitmask = bitmask + self.row_offsets = row_offsets + + @staticmethod + def from_dense(tensor: Tensor) -> "BitmaskTensor": + """ + :param tensor: dense tensor to compress + :return: instantiated compressed tensor + """ + shape = tensor.shape + compressed, bitmask, row_offsets = bitmask_compress(tensor.cpu()) + return BitmaskTensor( + shape=shape, compressed=compressed, bitmask=bitmask, row_offsets=row_offsets + ) + + def decompress(self) -> Tensor: + """ + :return: reconstructed dense tensor + """ + return bitmask_decompress(self.compressed, self.bitmask, self.shape) + + def curr_memory_size_bytes(self): + """ + :return: size in bytes required to store compressed tensor on disk + """ + + def sizeof_tensor(a): + return a.element_size() * a.nelement() + + return ( + sizeof_tensor(self.compressed) + + sizeof_tensor(self.bitmask) + + sizeof_tensor(self.row_offsets) + ) + + def dict(self, name_prefix: str, device: str = "cpu") -> Dict[str, Tensor]: + """ + :name_prefix: name of original tensor to store compressed weight as + :return: dict of compressed data for the stored weight + """ + return { + merge_names(name_prefix, "shape"): torch.tensor(self.shape, device=device), + merge_names(name_prefix, "compressed"): self.compressed.to(device), + merge_names(name_prefix, "bitmask"): self.bitmask.to(device), + merge_names(name_prefix, "row_offsets"): self.row_offsets.to(device), + } + + def __repr__(self): + return f"BitmaskTensor(shape={self.shape}, compressed=True)" + + +def bitmask_compress(tensor: Tensor) -> Tuple[Tensor, Tensor, Tensor]: + """ + Compresses a dense tensor using bitmask compression + + :param tensor: dense tensor to compress + :return: tuple of compressed data representing tensor + """ + bytemasks = tensor != 0 + row_counts = bytemasks.sum(dim=-1) + row_offsets = torch.cumsum(row_counts, 0) - row_counts + if tensor.dtype == FP8_DTYPE: + # acces raw bytes of the tensor + tensor_view = tensor.view(torch.int8) + values = tensor_view[bytemasks] + values = values.view(FP8_DTYPE) + else: + values = tensor[bytemasks] + bitmasks_packed = pack_bitmasks(bytemasks) + return values, bitmasks_packed, row_offsets + + +def bitmask_decompress( + values: Tensor, bitmasks: Tensor, original_shape: torch.Size +) -> Tensor: + """ + Reconstructs a dense tensor from a compressed one + + :param values: 1d tensor of non-zero values + :param bitmasks: 2d int8 tensor flagging locations of non-zero values in the + tensors original shape + :param original_shape: shape of the dense tensor + :return: decompressed dense tensor + """ + bytemasks_unpacked = unpack_bitmasks(bitmasks, original_shape) + + decompressed_tensor = torch.zeros(original_shape, dtype=values.dtype) + decompressed_tensor[bytemasks_unpacked] = values + + return decompressed_tensor diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_quantized_compressors/__init__.py b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_quantized_compressors/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c3615f062c958821e70c0cde951352540b2f5055 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_quantized_compressors/__init__.py @@ -0,0 +1,16 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# flake8: noqa + +from .marlin_24 import Marlin24Compressor diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_quantized_compressors/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_quantized_compressors/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..883d7cb6c18c0ad8b9b898bc6d375029798d4798 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_quantized_compressors/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_quantized_compressors/__pycache__/marlin_24.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_quantized_compressors/__pycache__/marlin_24.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3f8d6d7e2a3cb8e8491ca26d9e8705e34019c415 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_quantized_compressors/__pycache__/marlin_24.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_quantized_compressors/marlin_24.py b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_quantized_compressors/marlin_24.py new file mode 100644 index 0000000000000000000000000000000000000000..24f9cbf027b74c171838432c74b680c924e89333 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/compressors/sparse_quantized_compressors/marlin_24.py @@ -0,0 +1,257 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from typing import Dict, Generator, Tuple + +import numpy as np +import torch +from compressed_tensors.compressors.base import BaseCompressor +from compressed_tensors.config import CompressionFormat +from compressed_tensors.quantization import QuantizationArgs, QuantizationStrategy +from compressed_tensors.quantization.lifecycle.forward import quantize +from compressed_tensors.utils import ( + get_permutations_24, + is_quantization_param, + merge_names, + sparse_semi_structured_from_dense_cutlass, + tensor_follows_mask_structure, +) +from torch import Tensor +from tqdm import tqdm + + +_LOGGER: logging.Logger = logging.getLogger(__name__) + + +@BaseCompressor.register(name=CompressionFormat.marlin_24.value) +class Marlin24Compressor(BaseCompressor): + """ + Compresses a quantized model with 2:4 sparsity structure for inference with the + Marlin24 kernel. Decompression is not implemented for this compressor. + """ + + @staticmethod + def validate_quant_compatability( + model_quant_args: Dict[str, QuantizationArgs] + ) -> bool: + """ + Checks if every quantized module in the model is compatible with Marlin24 + compression. Quantization must be channel or group strategy with group_size + of 128. Only symmetric quantization is supported + + :param model_quant_args: dictionary of mapping module names to their + quantization configuration + :return: True if all modules are compatible with Marlin24 compression, raises + a ValueError otherwise + """ + for name, quant_args in model_quant_args.items(): + strategy = quant_args.strategy + group_size = quant_args.group_size + symmetric = quant_args.symmetric + if ( + strategy is not QuantizationStrategy.GROUP.value + and strategy is not QuantizationStrategy.CHANNEL.value + ): + raise ValueError( + f"Marlin24 Compressor is only valid for group and channel " + f"quantization strategies, got {strategy} in {name}" + ) + + if group_size is not None and group_size != 128: + raise ValueError( + f"Marlin24 Compressor is only valid for group size 128, " + f"got {group_size} in {name}" + ) + + if not symmetric: + raise ValueError( + f"Marlin24 Compressor is only valid for symmetric quantzation, " + f"got symmetric={symmetric} in {name}" + ) + + return True + + @staticmethod + def validate_sparsity_structure(name: str, weight: Tensor) -> bool: + """ + Checks if a tensor fits the required 2:4 sparsity structure + + :param name: name of the tensor to check + :param weight: tensor to check for sparsity structure + :return: True if all rows match the 2:4 sparsity structure, raises + ValueError otherwise + """ + + if not tensor_follows_mask_structure(weight): + raise ValueError( + "Marlin24 Compressor is only compatible with weights that have " + f"a 2:4 sparsity structure. Found segments in {name} " + "that do not match the expected structure." + ) + + return True + + @property + def compression_param_names(self) -> Tuple[str]: + """ + Returns a tuple of compression parameter names introduced by + the compressor during compression + """ + return ("weight_packed", "scale_packed", "meta") + + def compress( + self, + model_state: Dict[str, Tensor], + names_to_scheme: Dict[str, QuantizationArgs], + **kwargs, + ) -> Dict[str, Tensor]: + """ + Compresses a quantized state_dict with 2:4 sparsity structure for inference + with the Marlin24 kernel + + :param model_state: state dict of uncompressed model + :param names_to_scheme: quantization args for each quantized weight, needed for + quantize function to calculate bit depth + :return: compressed state dict + """ + self.validate_quant_compatability(names_to_scheme) + + compressed_dict = {} + weight_suffix = ".weight" + _LOGGER.debug( + f"Compressing model with {len(model_state)} parameterized layers..." + ) + + for name, value in tqdm(model_state.items(), desc="Compressing model"): + if name.endswith(weight_suffix): + prefix = name[: -(len(weight_suffix))] + scale = model_state.get(merge_names(prefix, "weight_scale"), None) + zp = model_state.get(merge_names(prefix, "weight_zero_point"), None) + if scale is not None: # weight is quantized, compress it + + # Marlin24 kernel requires float16 inputs + scale = scale.to(torch.float16) + value = value.to(torch.float16) + + # quantize weight, keeping it as a float16 for now + quant_args = names_to_scheme[prefix] + value = quantize( + x=value, scale=scale, zero_point=zp, args=quant_args + ) + + # compress based on sparsity structure + self.validate_sparsity_structure(prefix, value) + value, meta = compress_weight_24(value) + meta = meta.cpu() + + # Marlin24 kernel expects input dim first + value = value.t().contiguous().cpu() + scale = scale.t().contiguous().cpu() + og_weight_shape = value.shape + + # Marlin24 kernel expects unsigned values, shift zero-point + value += (1 << quant_args.num_bits) // 2 + + # pack quantized weight and scale + value = pack_weight_24(value, quant_args) + packed_scale = pack_scales_24(scale, quant_args, og_weight_shape) + meta = meta.resize_(meta.shape[1] // 2, meta.shape[0] * 2) + + # save compressed values + compressed_dict[merge_names(prefix, "scale_packed")] = packed_scale + compressed_dict[merge_names(prefix, "weight_packed")] = value + compressed_dict[merge_names(prefix, "meta")] = meta + continue + + if not is_quantization_param(name): + # export unquantized parameters without modifying + compressed_dict[name] = value.to("cpu") + + return compressed_dict + + def decompress( + self, path_to_model_or_tensors: str, device: str = "cpu", **kwargs + ) -> Generator[Tuple[str, Tensor], None, None]: + raise NotImplementedError( + "Decompression is not implemented for the Marlin24 Compressor." + ) + + +def compress_weight_24(weight: Tensor): + weight = weight.contiguous() + w_comp, meta = sparse_semi_structured_from_dense_cutlass(weight) + w_comp = w_comp.contiguous() + return w_comp, meta + + +def marlin_permute_weights(q_w, size_k, size_n, perm, tile): + assert q_w.shape == (size_k, size_n) + assert size_k % tile == 0, f"size_k = {size_k}, tile = {tile}" + assert size_n % tile == 0, f"size_k = {size_n}, tile = {tile}" + + # Permute weights to 16x64 marlin tiles + q_w = q_w.reshape((size_k // tile, tile, size_n // tile, tile)) + q_w = q_w.permute((0, 2, 1, 3)) + q_w = q_w.reshape((size_k // tile, size_n * tile)) + + q_w = q_w.reshape((-1, perm.numel()))[:, perm].reshape(q_w.shape) + + return q_w + + +def pack_weight_24( + weight: Tensor, + quantization_args: QuantizationArgs, + tile: int = 16, +): + size_k = weight.shape[0] + size_n = weight.shape[1] + num_bits = quantization_args.num_bits + pack_factor = 32 // num_bits + + # Reshuffle to marlin_24 format + perm, _, _ = get_permutations_24(num_bits) + q_w = marlin_permute_weights(weight, size_k, size_n, perm, tile) + + q_w = q_w.cpu().numpy().astype(np.uint32) + + q_packed = np.zeros((q_w.shape[0], q_w.shape[1] // pack_factor), dtype=np.uint32) + for i in range(pack_factor): + q_packed |= q_w[:, i::pack_factor] << num_bits * i + + q_packed = torch.from_numpy(q_packed.astype(np.int32)) + + return q_packed + + +def pack_scales_24(scales, quantization_args, w_shape): + size_k = w_shape[0] + size_n = w_shape[1] + num_bits = quantization_args.num_bits + + _, scale_perm_2_4, scale_perm_single_2_4 = get_permutations_24(num_bits) + + if ( + quantization_args.strategy == QuantizationStrategy.GROUP + and quantization_args.group_size < size_k + ): + scales = scales.reshape((-1, len(scale_perm_2_4)))[:, scale_perm_2_4] + else: # channelwise + scales = scales.reshape((-1, len(scale_perm_single_2_4)))[ + :, scale_perm_single_2_4 + ] + scales = scales.reshape((-1, size_n)).contiguous() + + return scales diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/config/__init__.py b/venv/lib/python3.10/site-packages/compressed_tensors/config/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..582b8a9e1a47b7d6a9e2feb780a7f955bfd03f20 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/config/__init__.py @@ -0,0 +1,19 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# flake8: noqa +from .base import * +from .dense import * +from .sparse_24_bitmask import * +from .sparse_bitmask import * diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/config/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/config/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e6959107bd61f4824d12cee4a92e92e1906bd920 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/config/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/config/__pycache__/base.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/config/__pycache__/base.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4795426e357e8c20a10c1fabc6f2a37084899224 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/config/__pycache__/base.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/config/__pycache__/dense.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/config/__pycache__/dense.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f481269f110aaddedb15da52a4b0254330228bad Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/config/__pycache__/dense.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/config/__pycache__/sparse_24_bitmask.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/config/__pycache__/sparse_24_bitmask.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ca3fa125ca860fe072092a96a142059443fc226f Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/config/__pycache__/sparse_24_bitmask.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/config/__pycache__/sparse_bitmask.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/config/__pycache__/sparse_bitmask.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9ab9ea5a045ecec3e8629e0827fffb1f93336fac Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/config/__pycache__/sparse_bitmask.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/config/base.py b/venv/lib/python3.10/site-packages/compressed_tensors/config/base.py new file mode 100644 index 0000000000000000000000000000000000000000..9ca6f2cf2209cee3d97878f59f83349338c0c530 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/config/base.py @@ -0,0 +1,111 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from enum import Enum, unique +from typing import List, Optional + +from compressed_tensors.registry import RegistryMixin +from pydantic import BaseModel + + +__all__ = ["SparsityCompressionConfig", "CompressionFormat", "SparsityStructure"] + + +@unique +class CompressionFormat(Enum): + dense = "dense" + sparse_bitmask = "sparse-bitmask" + sparse_24_bitmask = "sparse-24-bitmask" + int_quantized = "int-quantized" + float_quantized = "float-quantized" + naive_quantized = "naive-quantized" + pack_quantized = "pack-quantized" + marlin_24 = "marlin-24" + + +@unique +class SparsityStructure(Enum): + """ + An enumeration to represent different sparsity structures. + + Attributes + ---------- + TWO_FOUR : str + Represents a 2:4 sparsity structure. + ZERO_ZERO : str + Represents a 0:0 sparsity structure. + UNSTRUCTURED : str + Represents an unstructured sparsity structure. + + Examples + -------- + >>> SparsityStructure('2:4') + + + >>> SparsityStructure('unstructured') + + + >>> SparsityStructure('2:4') == SparsityStructure.TWO_FOUR + True + + >>> SparsityStructure('UNSTRUCTURED') == SparsityStructure.UNSTRUCTURED + True + + >>> SparsityStructure(None) == SparsityStructure.UNSTRUCTURED + True + + >>> SparsityStructure('invalid') + Traceback (most recent call last): + ... + ValueError: invalid is not a valid SparsityStructure + """ + + TWO_FOUR = "2:4" + UNSTRUCTURED = "unstructured" + ZERO_ZERO = "0:0" + + def __new__(cls, value): + obj = object.__new__(cls) + obj._value_ = value.lower() if value is not None else value + return obj + + @classmethod + def _missing_(cls, value): + # Handle None and case-insensitive values + if value is None: + return cls.UNSTRUCTURED + for member in cls: + if member.value == value.lower(): + return member + raise ValueError(f"{value} is not a valid {cls.__name__}") + + +class SparsityCompressionConfig(RegistryMixin, BaseModel): + """ + Base data class for storing sparsity compression parameters + + :param format: name of compression format + :param targets: List of layer names or layer types that aren't sparse and should + be ignored during compression. By default, assume all layers are targeted + :param ignore: List of layer names (unique) to ignore from targets. Defaults to None + :param global_sparsity: average sparsity of the entire model + :param sparsity_structure: structure of the sparsity, such as + "unstructured", "2:4", "8:16" etc + """ + + format: str + targets: Optional[List[str]] = None + ignore: Optional[List[str]] = None + global_sparsity: Optional[float] = 0.0 + sparsity_structure: Optional[str] = "unstructured" diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/config/dense.py b/venv/lib/python3.10/site-packages/compressed_tensors/config/dense.py new file mode 100644 index 0000000000000000000000000000000000000000..8e7e3b7a4b69c164f6497e4860ee18951e44189b --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/config/dense.py @@ -0,0 +1,36 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional + +from compressed_tensors.config import CompressionFormat, SparsityCompressionConfig + + +__all__ = ["DenseSparsityConfig"] + + +@SparsityCompressionConfig.register(name=CompressionFormat.dense.value) +class DenseSparsityConfig(SparsityCompressionConfig): + """ + Identity configuration for storing a sparse model in + an uncompressed dense format + + :param global_sparsity: average sparsity of the entire model + :param sparsity_structure: structure of the sparsity, such as + "unstructured", "2:4", "8:16" etc + """ + + format: str = CompressionFormat.dense.value + global_sparsity: Optional[float] = 0.0 + sparsity_structure: Optional[str] = "unstructured" diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/config/sparse_24_bitmask.py b/venv/lib/python3.10/site-packages/compressed_tensors/config/sparse_24_bitmask.py new file mode 100644 index 0000000000000000000000000000000000000000..7aae2dbe595e7c365ae9996664209598af4adf0b --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/config/sparse_24_bitmask.py @@ -0,0 +1,40 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional + +from compressed_tensors.config import ( + CompressionFormat, + SparsityCompressionConfig, + SparsityStructure, +) + + +__all__ = ["Sparse24BitMaskConfig"] + + +@SparsityCompressionConfig.register(name=CompressionFormat.sparse_24_bitmask.value) +class Sparse24BitMaskConfig(SparsityCompressionConfig): + """ + Configuration for storing a 24 sparse model using + bytemask compression + + :param global_sparsity: average sparsity of the entire model + :param sparsity_structure: structure of the sparsity, should always be + "2:4" for this compression format + """ + + format: str = CompressionFormat.sparse_24_bitmask.value + global_sparsity: Optional[float] = 0.0 + sparsity_structure: Optional[str] = SparsityStructure.TWO_FOUR.value diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/config/sparse_bitmask.py b/venv/lib/python3.10/site-packages/compressed_tensors/config/sparse_bitmask.py new file mode 100644 index 0000000000000000000000000000000000000000..c14d9f7cde01a4dc2ac2e8fe50fcd60931a1dc01 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/config/sparse_bitmask.py @@ -0,0 +1,36 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional + +from compressed_tensors.config import CompressionFormat, SparsityCompressionConfig + + +__all__ = ["BitmaskConfig"] + + +@SparsityCompressionConfig.register(name=CompressionFormat.sparse_bitmask.value) +class BitmaskConfig(SparsityCompressionConfig): + """ + Configuration for storing a sparse model using + bitmask compression + + :param global_sparsity: average sparsity of the entire model + :param sparsity_structure: structure of the sparsity, such as + "unstructured", "2:4", "8:16" etc + """ + + format: str = CompressionFormat.sparse_bitmask.value + global_sparsity: Optional[float] = 0.0 + sparsity_structure: Optional[str] = "unstructured" diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/linear/__init__.py b/venv/lib/python3.10/site-packages/compressed_tensors/linear/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0c44f887a47844c08db4f0f3f2d3452ed7f5aedd --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/linear/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/linear/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/linear/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6edb309ad4154665d54429378946f4e40eee7a2f Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/linear/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/linear/__pycache__/compressed_linear.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/linear/__pycache__/compressed_linear.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2aca4b4fd48668d07cfb1b4f553a671d45744919 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/linear/__pycache__/compressed_linear.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/linear/compressed_linear.py b/venv/lib/python3.10/site-packages/compressed_tensors/linear/compressed_linear.py new file mode 100644 index 0000000000000000000000000000000000000000..65f992b67266e988e1f9f5300a300bc6780eedd2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/linear/compressed_linear.py @@ -0,0 +1,108 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import warnings +from typing import Dict, Tuple + +import torch +from compressed_tensors.compressors.base import BaseCompressor +from compressed_tensors.quantization import ( + QuantizationScheme, + QuantizationStatus, + initialize_module_for_quantization, +) +from compressed_tensors.utils import register_offload_parameter +from torch import Tensor +from torch.nn import Parameter +from torch.nn.functional import linear +from torch.nn.modules import Linear + + +class CompressedLinear(Linear): + """ + Wrapper module for running a compressed forward pass of a quantized Linear module. + The wrapped layer will decompressed on each forward call. + + """ + + def __init__(self, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + warnings.warn( + "CompressedLinear should not be initialized directly. " + "Use the from_linear method instead.", + UserWarning, + ) + + @classmethod + @torch.no_grad() + def from_linear( + cls, + module: Linear, + quantization_scheme: QuantizationScheme, + quantization_format: str, + ): + """ + :param module: dense linear module to replace + :param quantization_scheme: quantization config for the module to wrap + :param quantization_format: compression format module is stored as + :return: CompressedLinear module wrapping the input module + """ + module.__class__ = CompressedLinear + module.compressor = BaseCompressor.load_from_registry(quantization_format) + device = next(module.parameters()).device + + # this will initialize all the scales and zero points + initialize_module_for_quantization( + module, quantization_scheme, force_zero_point=False + ) + + # get the shape and dtype of compressed parameters + compression_params: Dict[str, Tuple] = module.compressor.compression_param_info( + module.weight.shape, quantization_scheme.weights + ) + + # no need for this once quantization is initialized, will be replaced + # with the compressed parameter + delattr(module, "weight") + + # populate compressed weights and quantization parameters + for name, (shape, dtype) in compression_params.items(): + param = Parameter( + torch.empty(shape, device=device, dtype=dtype), requires_grad=False + ) + register_offload_parameter(module, name, param) + + # mark module as compressed + module.quantization_status = QuantizationStatus.COMPRESSED + + # handles case where forward is wrapped in new_forward by accelerate hooks + if hasattr(module, "_old_forward"): + module._old_forward = CompressedLinear.forward.__get__( + module, CompressedLinear + ) + + return module + + def forward(self, input: Tensor) -> Tensor: + """ + Decompresses the weight, then runs the wrapped forward pass + """ + if self.quantization_status == QuantizationStatus.COMPRESSED: + weight_data = self.compressor.decompress_module(self) + param = Parameter(weight_data, requires_grad=False) + register_offload_parameter(self, "weight", param) + + self.quantization_status = QuantizationStatus.FROZEN + + return linear(input, self.weight, self.bias) diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/__init__.py b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9fde69a351939461d536f6eeea980520563cc7c0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/__init__.py @@ -0,0 +1,21 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# flake8: noqa +# isort: skip_file + +from .quant_args import * +from .quant_config import * +from .quant_scheme import * +from .lifecycle import * diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3a004d9c535290962dd838cf2bc725cd9257ee17 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/__pycache__/quant_args.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/__pycache__/quant_args.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eb6db39e2a46335fc8d009c7b4a1a4f5d45383f6 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/__pycache__/quant_args.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/__pycache__/quant_config.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/__pycache__/quant_config.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d3116f386ca70279bc83329b91145e96af17aa14 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/__pycache__/quant_config.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/__pycache__/quant_scheme.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/__pycache__/quant_scheme.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c1f42afea74d78645cefe350dca667b8ef0a910d Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/__pycache__/quant_scheme.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/__init__.py b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6acab2551f1a03dac27d80ac9f72f5d36728cb36 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/__init__.py @@ -0,0 +1,22 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# flake8: noqa +# isort: skip_file + +from .forward import * +from .initialize import * +from .compressed import * +from .apply import * +from .helpers import * diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7999c2dbefac1617847f999e18e68f8a8770a90f Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/__pycache__/apply.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/__pycache__/apply.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7ff4259a1f4904a3a5716b311d0ae88ee2d50dca Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/__pycache__/apply.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/__pycache__/compressed.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/__pycache__/compressed.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..145e1bdf0e317b232a42dbbacee0a130d0e95940 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/__pycache__/compressed.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/__pycache__/forward.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/__pycache__/forward.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f26bff88de187dd8e44682cc115605d96e98969b Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/__pycache__/forward.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/__pycache__/helpers.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/__pycache__/helpers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..118109aaf6f439b45f3861080cf5708b490ed90f Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/__pycache__/helpers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/__pycache__/initialize.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/__pycache__/initialize.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..090ef4e55b59ab2c4868c1c19d5c7336131aabac Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/__pycache__/initialize.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/apply.py b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/apply.py new file mode 100644 index 0000000000000000000000000000000000000000..ca8fa68a98b02e50d4cd6c604607a353050c156f --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/apply.py @@ -0,0 +1,441 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import re +from collections import OrderedDict, defaultdict +from copy import deepcopy +from typing import Dict, Iterable, List, Optional +from typing import OrderedDict as OrderedDictType +from typing import Set, Union + +import torch +from compressed_tensors.config import CompressionFormat +from compressed_tensors.quantization.lifecycle.compressed import ( + compress_quantized_weights, +) +from compressed_tensors.quantization.lifecycle.initialize import ( + initialize_module_for_quantization, +) +from compressed_tensors.quantization.quant_args import QuantizationArgs +from compressed_tensors.quantization.quant_config import ( + QuantizationConfig, + QuantizationStatus, +) +from compressed_tensors.quantization.quant_scheme import QuantizationScheme +from compressed_tensors.quantization.utils import ( + KV_CACHE_TARGETS, + infer_quantization_status, + is_kv_cache_quant_scheme, + iter_named_leaf_modules, + iter_named_quantizable_modules, +) +from compressed_tensors.utils.helpers import fix_fsdp_module_name, replace_module +from compressed_tensors.utils.offload import update_parameter_data +from compressed_tensors.utils.safetensors_load import get_safetensors_folder +from torch.nn import Module + + +__all__ = [ + "load_pretrained_quantization", + "apply_quantization_config", + "apply_quantization_status", + "find_name_or_class_matches", + "expand_target_names", + "is_target", +] + +from compressed_tensors.quantization.utils.helpers import is_module_quantized +from compressed_tensors.utils.safetensors_load import get_quantization_state_dict + + +_LOGGER = logging.getLogger(__name__) + + +def load_pretrained_quantization(model: Module, model_name_or_path: str): + """ + Loads the quantization parameters (scale and zero point) from model_name_or_path to + a model that has already been initialized with a quantization config + + :param model: model to load pretrained quantization parameters to + :param model_name_or_path: Hugging Face stub or local folder containing a quantized + model, which is used to load quantization parameters + """ + model_path = get_safetensors_folder(model_name_or_path) + state_dict = get_quantization_state_dict(model_path) + + for name, submodule in iter_named_leaf_modules(model): + if not is_module_quantized(submodule): + continue + if submodule.quantization_scheme.weights is not None: + base_name = "weight" + _load_quant_args_from_state_dict( + base_name=base_name, + module_name=name, + module=submodule, + state_dict=state_dict, + ) + if submodule.quantization_scheme.input_activations is not None: + base_name = "input" + _load_quant_args_from_state_dict( + base_name=base_name, + module_name=name, + module=submodule, + state_dict=state_dict, + ) + if submodule.quantization_scheme.output_activations is not None: + base_name = "output" + _load_quant_args_from_state_dict( + base_name=base_name, + module_name=name, + module=submodule, + state_dict=state_dict, + ) + + +def apply_quantization_config( + model: Module, config: Union[QuantizationConfig, None], run_compressed: bool = False +) -> OrderedDict: + """ + Initializes the model for quantization in-place based on the given config. + Optionally coverts quantizable modules to compressed_linear modules + + :param model: model to apply quantization config to + :param config: quantization config + :param run_compressed: Whether the model will be run in compressed mode or + decompressed fully on load + """ + # Workaround for when HF Quantizer passes None, see PR #180 + if config is None: + return OrderedDict() + + # remove reference to the original `config` + # argument. This function can mutate it, and we'd + # like to keep the original `config` as it is. + config = deepcopy(config) + # build mapping of targets to schemes for easier matching + # use ordered dict to preserve target ordering in config + target_to_scheme = OrderedDict() + config = process_quantization_config(config) + names_to_scheme = OrderedDict() + for scheme in config.config_groups.values(): + for target in scheme.targets: + target_to_scheme[target] = scheme + + if run_compressed: + from compressed_tensors.linear.compressed_linear import CompressedLinear + + # list of submodules to ignore + ignored_submodules = defaultdict(list) + # mark appropriate layers for quantization by setting their quantization schemes + for name, submodule in iter_named_quantizable_modules( + model, + include_children=True, + include_attn=True, + ): # child modules and attention modules + # potentially fix module name to remove FSDP wrapper prefix + name = fix_fsdp_module_name(name) + if matches := find_name_or_class_matches(name, submodule, config.ignore): + for match in matches: + ignored_submodules[match].append(name) + continue # layer matches ignore list, continue + + targets = find_name_or_class_matches(name, submodule, target_to_scheme) + + if targets: + # mark modules to be quantized by adding + # quant scheme to the matching layers + scheme = _scheme_from_targets(target_to_scheme, targets, name) + if run_compressed: + format = config.format + if format != CompressionFormat.dense.value: + if isinstance(submodule, torch.nn.Linear): + # TODO: expand to more module types + compressed_linear = CompressedLinear.from_linear( + submodule, + quantization_scheme=scheme, + quantization_format=format, + ) + replace_module(model, name, compressed_linear) + + # target matched - add layer and scheme to target list + submodule.quantization_scheme = _scheme_from_targets( + target_to_scheme, targets, name + ) + + names_to_scheme[name] = submodule.quantization_scheme.weights + + if config.ignore is not None and ignored_submodules is not None: + if set(config.ignore) - set(ignored_submodules): + _LOGGER.warning( + "Some layers that were to be ignored were " + "not found in the model: " + f"{set(config.ignore) - set(ignored_submodules)}" + ) + + # apply current quantization status across all targeted layers + apply_quantization_status(model, config.quantization_status) + return names_to_scheme + + +def process_quantization_config(config: QuantizationConfig) -> QuantizationConfig: + """ + Preprocess the raw QuantizationConfig + + :param config: the raw QuantizationConfig + :return: the processed QuantizationConfig + """ + if config.kv_cache_scheme is not None: + config = process_kv_cache_config(config) + + return config + + +def process_kv_cache_config( + config: QuantizationConfig, targets: Union[List[str], str] = KV_CACHE_TARGETS +) -> QuantizationConfig: + """ + Reformulate the `config.kv_cache` as a `config_group` + and add it to the set of existing `config.groups` + + :param config: the QuantizationConfig + :return: the QuantizationConfig with additional "kv_cache" group + """ + if targets == KV_CACHE_TARGETS: + _LOGGER.info(f"KV cache targets set to default value of: {KV_CACHE_TARGETS}") + + kv_cache_dict = config.kv_cache_scheme.model_dump() + kv_cache_scheme = QuantizationScheme( + output_activations=QuantizationArgs(**kv_cache_dict), + targets=targets, + ) + kv_cache_group = dict(kv_cache=kv_cache_scheme) + config.config_groups.update(kv_cache_group) + return config + + +def apply_quantization_status(model: Module, status: QuantizationStatus): + """ + Applies in place the quantization lifecycle up to the given status + + :param model: model to apply quantization to + :param status: status to update the module to + """ + + current_status = infer_quantization_status(model) + + if status >= QuantizationStatus.INITIALIZED > current_status: + force_zero_point_init = status != QuantizationStatus.COMPRESSED + model.apply( + lambda module: initialize_module_for_quantization( + module, force_zero_point=force_zero_point_init + ) + ) + + if current_status < status >= QuantizationStatus.COMPRESSED > current_status: + model.apply(compress_quantized_weights) + + +def expand_target_names( + model: Module, + targets: Optional[Iterable[str]] = None, + ignore: Optional[Iterable[str]] = None, +) -> Set[str]: + """ + Finds all unique module names in the model that match the given + targets and ignore lists. + + Note: Targets must be regexes, layer types, or full layer names. + + :param model: model to search for targets in + :param targets: Iterable of targets to search for + :param ignore: Iterable of targets to ignore + :return: set of all targets that match the given targets and should + not be ignored + """ + return { + name + for name, module in iter_named_leaf_modules(model) + if is_target(name, module, targets, ignore) + } + + +def is_target( + name: str, + module: Module, + targets: Optional[Iterable[str]] = None, + ignore: Optional[Iterable[str]] = None, +) -> bool: + """ + Determines if a module should be included in the targets based on the + targets and ignore lists. + + Note: Targets must be regexes, layer types, or full layer names. + + :param name: name of the module + :param module: the module itself + :param targets: Iterable of targets to search for + :param ignore: Iterable of targets to ignore + :return: True if the module is a target and not ignored, False otherwise + """ + return bool( + find_name_or_class_matches(name, module, targets or []) + and not find_name_or_class_matches(name, module, ignore or []) + ) + + +def find_name_or_class_matches( + name: str, module: Module, targets: Iterable[str], check_contains: bool = False +) -> List[str]: + """ + Returns all targets that match the given name or the class name. + Returns empty list otherwise. + The order of the output `matches` list matters. + The entries are sorted in the following order: + 1. matches on exact strings + 2. matches on regex patterns + 3. matches on module names + """ + targets = sorted(targets, key=lambda x: ("re:" in x, x)) + if isinstance(targets, Iterable): + matches = _find_matches(name, targets) + _find_matches( + module.__class__.__name__, targets, check_contains + ) + matches = [match for match in matches if match is not None] + return matches + + +def _find_matches( + value: str, targets: Iterable[str], check_contains: bool = False +) -> List[str]: + # returns all the targets that match value either + # exactly or as a regex after 're:'. if check_contains is set to True, + # additionally checks if the target string is contained with value. + matches = [] + for target in targets: + if target.startswith("re:"): + pattern = target[3:] + if re.match(pattern, value): + matches.append(target) + elif check_contains: + if target.lower() in value.lower(): + matches.append(target) + elif target == value: + matches.append(target) + return matches + + +def _infer_status(model: Module) -> Optional[QuantizationStatus]: + for module in model.modules(): + status = getattr(module, "quantization_status", None) + if status is not None: + return status + return None + + +def _load_quant_args_from_state_dict( + base_name: str, module_name: str, module: Module, state_dict: Dict +): + """ + Loads scale and zero point from a state_dict into the specified module + + :param base_name: quantization target, one of: weights, input_activations or + output_activations + :param module_name: pytorch module name to look up in state_dict + :module: pytorch module associated with module_name + :state_dict: state_dict to search for matching quantization parameters + """ + scale_name = f"{base_name}_scale" + zp_name = f"{base_name}_zero_point" + g_idx_name = f"{base_name}_g_idx" + + state_dict_scale = state_dict.get(f"{module_name}.{scale_name}", None) + state_dict_zp = state_dict.get(f"{module_name}.{zp_name}", None) + state_dict_g_idx = state_dict.get(f"{module_name}.{g_idx_name}", None) + + if state_dict_scale is not None: + # module is quantized + update_parameter_data(module, state_dict_scale, scale_name) + if state_dict_zp is None: + # fill in zero point for symmetric quantization + state_dict_zp = torch.zeros_like(state_dict_scale, device="cpu") + update_parameter_data(module, state_dict_zp, zp_name) + + if state_dict_g_idx is not None: + update_parameter_data(module, state_dict_g_idx, g_idx_name) + + +def _scheme_from_targets( + target_to_scheme: OrderedDictType[str, QuantizationScheme], + targets: List[str], + name: str, +) -> QuantizationScheme: + if len(targets) == 1: + # if `targets` iterable contains a single element + # use it as the key + return target_to_scheme[targets[0]] + + # otherwise, we need to merge QuantizationSchemes corresponding + # to multiple targets. This is most likely because `name` module + # is being target both as an ordinary quantization target, as well + # as kv cache quantization target + schemes_to_merge = [target_to_scheme[target] for target in targets] + return _merge_schemes(schemes_to_merge, name) + + +def _merge_schemes( + schemes_to_merge: List[QuantizationScheme], name: str +) -> QuantizationScheme: + + kv_cache_quantization_scheme = [ + scheme for scheme in schemes_to_merge if is_kv_cache_quant_scheme(scheme) + ] + if not kv_cache_quantization_scheme: + # if the schemes_to_merge do not contain any + # kv cache QuantizationScheme + # return the first scheme (the prioritized one, + # since the order of schemes_to_merge matters) + return schemes_to_merge[0] + else: + # fetch the kv cache QuantizationScheme and the highest + # priority non-kv cache QuantizationScheme and merge them + kv_cache_quantization_scheme = kv_cache_quantization_scheme[0] + quantization_scheme = [ + scheme + for scheme in schemes_to_merge + if not is_kv_cache_quant_scheme(scheme) + ][0] + schemes_to_merge = [kv_cache_quantization_scheme, quantization_scheme] + merged_scheme = {} + for scheme in schemes_to_merge: + scheme_dict = { + k: v for k, v in scheme.model_dump().items() if v is not None + } + # when merging multiple schemes, the final target will be + # the `name` argument - hence erase the original targets + del scheme_dict["targets"] + # make sure that schemes do not "clash" with each other + overlapping_keys = set(merged_scheme.keys()) & set(scheme_dict.keys()) + if overlapping_keys: + raise ValueError( + f"The module: {name} is being modified by two clashing " + f"quantization schemes, that jointly try to override " + f"properties: {overlapping_keys}. Fix the quantization config " + "so that it is not ambiguous." + ) + merged_scheme.update(scheme_dict) + + merged_scheme.update(targets=[name]) + + return QuantizationScheme(**merged_scheme) diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/compressed.py b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/compressed.py new file mode 100644 index 0000000000000000000000000000000000000000..00f707920d5c01f4cb60f5286f3fb499b26304f4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/compressed.py @@ -0,0 +1,71 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import logging + +import torch +from compressed_tensors.quantization.lifecycle.forward import quantize +from compressed_tensors.quantization.quant_config import QuantizationStatus +from torch.nn import Module + + +__all__ = [ + "compress_quantized_weights", +] + + +_LOGGER = logging.getLogger(__name__) + + +def compress_quantized_weights(module: Module): + """ + Quantizes the module weight representation to use fewer bits in memory + + apply to full model with `model.apply(compress_quantized_weights)` + + :param module: module to compress to quantized representation + """ + scheme = getattr(module, "quantization_scheme", None) + if not scheme or not scheme.weights: + # no quantization scheme or weights not quantized, nothing to do + return + + if scheme is QuantizationStatus.COMPRESSED: + # module is already compressed, nothing to do + return + + weight = getattr(module, "weight", None) + scale = getattr(module, "weight_scale", None) + zero_point = getattr(module, "weight_zero_point", None) + g_idx = getattr(module, "weight_g_idx", None) + + if weight is None or scale is None: + # no weight, scale, or ZP, nothing to do + + # mark as compressed here to maintain consistent status throughout the model + module.quantization_status = QuantizationStatus.COMPRESSED + return + + module.weight.requires_grad = False # cannot use auto grad after compression + module.weight.data = quantize( + x=weight, + scale=scale, + zero_point=zero_point, + g_idx=g_idx, + args=scheme.weights, + dtype=torch.int8, + ) + + module.quantization_status = QuantizationStatus.COMPRESSED diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/forward.py b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/forward.py new file mode 100644 index 0000000000000000000000000000000000000000..f4f93f278ea5789c222d85a02455ada8b9af9d10 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/forward.py @@ -0,0 +1,394 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from functools import wraps +from math import ceil +from typing import Optional + +import torch +from compressed_tensors.quantization.quant_args import ( + QuantizationArgs, + QuantizationStrategy, + round_to_quantized_type, +) +from compressed_tensors.quantization.quant_config import QuantizationStatus +from compressed_tensors.quantization.quant_scheme import QuantizationScheme +from compressed_tensors.quantization.utils import ( + calculate_range, + compute_dynamic_scales_and_zp, +) +from compressed_tensors.utils import safe_permute +from torch.nn import Module + + +__all__ = [ + "quantize", + "dequantize", + "fake_quantize", + "wrap_module_forward_quantized", + "forward_quantize", +] + + +@torch.no_grad() +def quantize( + x: torch.Tensor, + scale: torch.Tensor, + zero_point: torch.Tensor, + args: QuantizationArgs, + dtype: Optional[torch.dtype] = None, + g_idx: Optional[torch.Tensor] = None, +) -> torch.Tensor: + """ + Quantize the input tensor x using the QuantizationStrategy specified in args. + Quantization can be done per tensor, channel, token or group. For group + quantization, the group_size must be divisible by the column size. The input scale + and zero_points are reshaped to support vectorization (Assumes 1 is the + channel dimension) + + :param x: Input tensor + :param scale: scale tensor + :param zero_point: zero point tensor + :param args: quantization args dictating how to quantize x + :param dtype: optional dtype to cast the quantized output to + :param g_idx: optional mapping from column index to group index + :return: fake quantized tensor + """ + + return _process_quantization( + x=x, + scale=scale, + zero_point=zero_point, + args=args, + dtype=dtype, + do_quantize=True, + do_dequantize=False, + g_idx=g_idx, + ) + + +@torch.no_grad() +def dequantize( + x_q: torch.Tensor, + scale: torch.Tensor, + zero_point: Optional[torch.Tensor] = None, + args: Optional[QuantizationArgs] = None, + dtype: Optional[torch.dtype] = None, + g_idx: Optional[torch.Tensor] = None, +) -> torch.Tensor: + """ + Dequantize a quantized input tensor x_q based on the strategy specified in args. If + args is not provided, the strategy will be inferred. + + :param x: quantized input tensor + :param scale: scale tensor + :param zero_point: zero point tensor + :param args: quantization args used to quantize x_q + :param dtype: optional dtype to cast the dequantized output to + :param g_idx: optional mapping from column index to group index + :return: dequantized float tensor + """ + if args is None: + if scale.ndim == 0 or scale.ndim == 1: + args = QuantizationArgs(strategy=QuantizationStrategy.TENSOR) + elif scale.ndim == 2: + if scale.shape[1] == 1: + args = QuantizationArgs(strategy=QuantizationStrategy.CHANNEL) + else: + group_size = int(x_q.shape[1] / scale.shape[1]) + args = QuantizationArgs( + strategy=QuantizationStrategy.GROUP, group_size=group_size + ) + else: + raise ValueError( + f"Could not infer a quantization strategy from scale with {scale.ndim} " + "dimmensions. Expected 0 or 2 dimmensions." + ) + + if dtype is None: + dtype = scale.dtype + + return _process_quantization( + x=x_q, + scale=scale, + zero_point=zero_point, + args=args, + do_quantize=False, + do_dequantize=True, + dtype=dtype, + g_idx=g_idx, + ) + + +@torch.no_grad() +def fake_quantize( + x: torch.Tensor, + scale: torch.Tensor, + zero_point: torch.Tensor, + args: QuantizationArgs, + g_idx: Optional[torch.Tensor] = None, +) -> torch.Tensor: + """ + Fake quantize the input tensor x by quantizing then dequantizing with + the QuantizationStrategy specified in args. Quantization can be done per tensor, + channel, token or group. For group quantization, the group_size must be divisible + by the column size. The input scale and zero_points are reshaped to support + vectorization (Assumes 1 is the channel dimension) + + :param x: Input tensor + :param scale: scale tensor + :param zero_point: zero point tensor + :param args: quantization args dictating how to quantize x + :param g_idx: optional mapping from column index to group index + :return: fake quantized tensor + """ + return _process_quantization( + x=x, + scale=scale, + zero_point=zero_point, + args=args, + do_quantize=True, + do_dequantize=True, + g_idx=g_idx, + ) + + +@torch.no_grad() +def _process_quantization( + x: torch.Tensor, + scale: torch.Tensor, + zero_point: torch.Tensor, + args: QuantizationArgs, + g_idx: Optional[torch.Tensor] = None, + dtype: Optional[torch.dtype] = None, + do_quantize: bool = True, + do_dequantize: bool = True, +) -> torch.Tensor: + q_min, q_max = calculate_range(args, x.device) + group_size = args.group_size + + if args.strategy == QuantizationStrategy.GROUP: + output_dtype = dtype if dtype is not None else x.dtype + output = torch.zeros_like(x).to(output_dtype) + columns = output.shape[1] + + # TODO: make validation step for inputs + + while scale.ndim < 2: + # pad scale and zero point dims for slicing + scale = scale.unsqueeze(1) + zero_point = zero_point.unsqueeze(1) if zero_point is not None else None + + if columns >= group_size: + if columns % group_size != 0: + raise ValueError( + "tensor column shape must be divisble " + f"by the given group_size {group_size}" + ) + + # support column-order (default) quantization as well as other orderings + # such as activation ordering. Below checks if g_idx has been initialized + is_column_order = g_idx is None or -1 in g_idx + if is_column_order: + num_groups = int(ceil(columns / group_size)) + group_sizes = torch.full((num_groups,), group_size, dtype=torch.int) + + else: + group_indices, group_sizes = torch.unique(g_idx, return_counts=True) + group_sizes = group_sizes[torch.argsort(group_indices)] + + perm = torch.argsort(g_idx) + x = safe_permute(x, perm, dim=1) + + # TODO: experiment with vectorizing for loop for performance + end = 0 + for index, group_count in enumerate(group_sizes): + sc = scale[:, index].view(-1, 1) + zp = zero_point[:, index].view(-1, 1) if zero_point is not None else None + + start = end + end = start + group_count + if do_quantize: + output[:, start:end] = _quantize( + x[:, start:end], + sc, + zp, + q_min, + q_max, + args, + dtype=dtype, + ) + + if do_dequantize: + input = output[:, start:end] if do_quantize else x[:, start:end] + output[:, start:end] = _dequantize(input, sc, zp) + + if not is_column_order: + output = safe_permute(output, torch.argsort(perm), dim=1) + + else: # covers channel, token and tensor strategies + if do_quantize: + output = _quantize( + x, + scale, + zero_point, + q_min, + q_max, + args, + dtype=dtype, + ) + if do_dequantize: + output = _dequantize(output if do_quantize else x, scale, zero_point) + + return output + + +def wrap_module_forward_quantized(module: Module, scheme: QuantizationScheme): + # expects a module already initialized and injected with the parameters in + # initialize_module_for_quantization + if hasattr(module.forward, "__func__"): + forward_func_orig = module.forward.__func__ + else: + forward_func_orig = module.forward.func + + @wraps(forward_func_orig) # ensures docstring, names, etc are propagated + def wrapped_forward(self, *args, **kwargs): + if not getattr(module, "quantization_enabled", True): + # quantization is disabled on forward passes, return baseline + # forward call + return forward_func_orig.__get__(module, module.__class__)(*args, **kwargs) + + input_ = args[0] + + compressed = module.quantization_status == QuantizationStatus.COMPRESSED + + if scheme.input_activations is not None: + # prehook should calibrate activations before forward call + input_ = forward_quantize(module, input_, "input", scheme.input_activations) + + if scheme.weights is not None and not compressed: + # calibrate and (fake) quantize weights when applicable + unquantized_weight = self.weight.data.clone() + self.weight.data = forward_quantize( + module, self.weight, "weight", scheme.weights + ) + + # perform wrapped forward call + output = forward_func_orig.__get__(module, module.__class__)( + input_, *args[1:], **kwargs + ) + + # restore back to unquantized_value + if scheme.weights is not None and not compressed: + self.weight.data = unquantized_weight + + if scheme.output_activations is not None: + # forward-hook should calibrate/forward_quantize + if ( + module.quantization_status == QuantizationStatus.CALIBRATION + and not scheme.output_activations.dynamic + ): + return output + + output = forward_quantize( + module, output, "output", scheme.output_activations + ) + return output + + # bind wrapped forward to module class so reference to `self` is correct + bound_wrapped_forward = wrapped_forward.__get__(module, module.__class__) + # set forward to wrapped forward + setattr(module, "forward", bound_wrapped_forward) + + +def forward_quantize( + module: Module, value: torch.Tensor, base_name: str, args: "QuantizationArgs" +) -> torch.Tensor: + + # in compressed mode, the weight is already compressed and quantized so we don't + # need to run fake quantization + if ( + module.quantization_status == QuantizationStatus.COMPRESSED + and base_name == "weight" + ): + return value + + if value.numel() == 0: + # if the tensor is empty, + # skip quantization + return value + + g_idx = getattr(module, "weight_g_idx", None) + + if args.dynamic: + # dynamic quantization - determine the scale/zp on the fly + scale, zero_point = compute_dynamic_scales_and_zp(value=value, args=args) + else: + # static quantization - get scale and zero point from layer + scale = getattr(module, f"{base_name}_scale") + zero_point = getattr(module, f"{base_name}_zero_point", None) + + return fake_quantize( + x=value, + scale=scale, + zero_point=zero_point, + args=args, + g_idx=g_idx, + ) + + +@torch.no_grad() +def _quantize( + x: torch.Tensor, + scale: torch.Tensor, + zero_point: torch.Tensor, + q_min: torch.Tensor, + q_max: torch.Tensor, + args: QuantizationArgs, + dtype: Optional[torch.dtype] = None, +) -> torch.Tensor: + + scaled = x / scale + if zero_point is not None: + scaled += zero_point.to(x.dtype) + # clamp first because cast isn't guaranteed to be saturated (ie for fp8) + clamped_value = torch.clamp( + scaled, + q_min, + q_max, + ) + quantized_value = round_to_quantized_type(clamped_value, args) + if dtype is not None: + quantized_value = quantized_value.to(dtype) + + return quantized_value + + +@torch.no_grad() +def _dequantize( + x_q: torch.Tensor, + scale: torch.Tensor, + zero_point: torch.Tensor = None, + dtype: Optional[torch.dtype] = None, +) -> torch.Tensor: + dequant_value = x_q.to(scale.dtype) + + if zero_point is not None: + dequant_value = dequant_value - zero_point.to(scale.dtype) + dequant_value = dequant_value * scale + + if dtype is not None: + dequant_value = dequant_value.to(dtype) + + return dequant_value diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/helpers.py b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..9d7553284da7ee9cce616062d78aa195895cd6af --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/helpers.py @@ -0,0 +1,33 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Miscelaneous helpers for the quantization lifecycle +""" + +from torch.nn import Module + + +__all__ = [ + "enable_quantization", + "disable_quantization", +] + + +def enable_quantization(module: Module): + module.quantization_enabled = True + + +def disable_quantization(module: Module): + module.quantization_enabled = False diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/initialize.py b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/initialize.py new file mode 100644 index 0000000000000000000000000000000000000000..6886423a0bc4a2c3d719df013c3914f605a7a848 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/lifecycle/initialize.py @@ -0,0 +1,212 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import logging +from enum import Enum +from typing import Optional + +import torch +from compressed_tensors.quantization.lifecycle.forward import ( + wrap_module_forward_quantized, +) +from compressed_tensors.quantization.quant_args import ( + ActivationOrdering, + QuantizationArgs, + QuantizationStrategy, +) +from compressed_tensors.quantization.quant_config import QuantizationStatus +from compressed_tensors.quantization.quant_scheme import QuantizationScheme +from compressed_tensors.quantization.utils import is_kv_cache_quant_scheme +from compressed_tensors.utils import ( + disable_hf_hook, + has_offloaded_params, + register_offload_parameter, +) +from torch.nn import Module, Parameter + + +__all__ = [ + "initialize_module_for_quantization", + "is_attention_module", + "KVCacheScaleType", +] + + +_LOGGER = logging.getLogger(__name__) + + +class KVCacheScaleType(Enum): + KEY = "k_scale" + VALUE = "v_scale" + + +def initialize_module_for_quantization( + module: Module, + scheme: Optional[QuantizationScheme] = None, + force_zero_point: bool = True, +): + """ + attaches appropriate scales, zero points, and observers to a layer + given its target quantization scheme + + apply to full model with `model.apply(initialize_module_for_quantization)` + + :param module: module to set for calibration + :param scheme: scheme to use for quantization. if None is provided, + will attempt to use scheme stored in the module under `quantization_scheme`, + if not provided, the layer will be skipped + :param force_zero_point: whether to force initialization of a zero point for + symmetric quantization + """ + scheme = scheme or getattr(module, "quantization_scheme", None) + if scheme is None: + # no scheme passed and layer not targeted for quantization - skip + return + + if is_attention_module(module): + # quantized actions based on calltime status + _initialize_attn_scales(module) + + else: + + if scheme.input_activations is not None: + _initialize_scale_zero_point( + module, + "input", + scheme.input_activations, + force_zero_point=force_zero_point, + ) + if scheme.weights is not None: + if hasattr(module, "weight"): + weight_shape = None + if isinstance(module, torch.nn.Linear): + weight_shape = module.weight.shape + _initialize_scale_zero_point( + module, + "weight", + scheme.weights, + weight_shape=weight_shape, + force_zero_point=force_zero_point, + ) + else: + _LOGGER.warning( + f"module type {type(module)} targeted for weight quantization but " + "has no attribute weight, skipping weight quantization " + f"for {type(module)}" + ) + + if scheme.output_activations is not None: + if not is_kv_cache_quant_scheme(scheme): + _initialize_scale_zero_point( + module, "output", scheme.output_activations + ) + + module.quantization_scheme = scheme + module.quantization_status = QuantizationStatus.INITIALIZED + + with disable_hf_hook(module): + # wrap forward call of module to perform + # quantized actions based on calltime status + wrap_module_forward_quantized(module, scheme) + + +def is_attention_module(module: Module): + return "attention" in module.__class__.__name__.lower() and ( + hasattr(module, "k_proj") + or hasattr(module, "v_proj") + or hasattr(module, "qkv_proj") + ) + + +def _initialize_scale_zero_point( + module: Module, + base_name: str, + quantization_args: QuantizationArgs, + weight_shape: Optional[torch.Size] = None, + force_zero_point: bool = True, +): + if quantization_args.dynamic: + return + + # begin on the same device as other parameters or cpu if offloaded. + # in the offloaded case, there's no point moving tensors to the execution device + # if they're going to be immediately offloaded by `register_offload_parameter` + params_device = next(module.parameters()).device + device = "cpu" if has_offloaded_params(module) else params_device + + # infer expected scale/zero point shape + if quantization_args.strategy == QuantizationStrategy.TOKEN: + expected_shape = (1, 1) + else: + expected_shape = 1 + + if base_name == "weight" and weight_shape is not None: + if quantization_args.strategy == QuantizationStrategy.CHANNEL: + # (output_channels, 1) + expected_shape = (weight_shape[0], 1) + elif quantization_args.strategy == QuantizationStrategy.GROUP: + num_groups = weight_shape[1] // quantization_args.group_size + expected_shape = (weight_shape[0], max(num_groups, 1)) + + scale_dtype = module.weight.dtype + if scale_dtype not in [torch.float16, torch.bfloat16, torch.float32]: + scale_dtype = torch.float16 + + # initializes empty scale, zero point, and g_idx parameters for the module + init_scale = Parameter( + torch.empty(expected_shape, dtype=scale_dtype, device=device), + requires_grad=False, + ) + register_offload_parameter(module, f"{base_name}_scale", init_scale) + + if force_zero_point or not quantization_args.symmetric: + zp_dtype = quantization_args.pytorch_dtype() + init_zero_point = Parameter( + torch.zeros(expected_shape, device=device, dtype=zp_dtype), + requires_grad=False, + ) + register_offload_parameter(module, f"{base_name}_zero_point", init_zero_point) + + # only grouped activation ordering has g_idx + if quantization_args.actorder == ActivationOrdering.GROUP: + g_idx_shape = (weight_shape[1],) + g_idx_dtype = torch.int + init_g_idx = Parameter( + torch.full(g_idx_shape, -1, device=device, dtype=g_idx_dtype), + requires_grad=False, + ) + register_offload_parameter(module, f"{base_name}_g_idx", init_g_idx) + + +def _initialize_attn_scales(module: Module) -> None: + """Initlaize k_scale, v_scale for self_attn""" + + expected_shape = 1 # per tensor + + param = next(module.parameters()) + scale_dtype = param.dtype + device = param.device + + init_scale = Parameter( + torch.empty(expected_shape, dtype=scale_dtype, device=device), + requires_grad=False, + ) + register_offload_parameter(module, KVCacheScaleType.KEY.value, init_scale) + + init_scale = Parameter( + torch.empty(expected_shape, dtype=scale_dtype, device=device), + requires_grad=False, + ) + register_offload_parameter(module, KVCacheScaleType.VALUE.value, init_scale) diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/quant_args.py b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/quant_args.py new file mode 100644 index 0000000000000000000000000000000000000000..69c289d2c7ca3120a7eac47d096a4972fc55ac01 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/quant_args.py @@ -0,0 +1,272 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import warnings +from enum import Enum +from typing import Any, Dict, Optional, Union + +import torch +from compressed_tensors.utils import Aliasable +from compressed_tensors.utils.helpers import deprecated +from pydantic import BaseModel, Field, field_validator, model_validator + + +__all__ = [ + "FP8_DTYPE", + "QuantizationType", + "QuantizationStrategy", + "QuantizationArgs", + "round_to_quantized_type", + "ActivationOrdering", +] + +FP8_DTYPE = torch.float8_e4m3fn + + +class QuantizationType(str, Enum): + """ + Enum storing quantization type options + """ + + INT = "int" + FLOAT = "float" + + +class QuantizationStrategy(str, Enum): + """ + Enum storing quantization strategy options + """ + + TENSOR = "tensor" + CHANNEL = "channel" + GROUP = "group" + BLOCK = "block" + TOKEN = "token" + + +class ActivationOrdering(Aliasable, str, Enum): + """ + Enum storing strategies for activation ordering + + Group: reorder groups and weight\n + Weight: only reorder weight, not groups. Slightly lower accuracy but also lower + latency when compared to group actorder\n + Dynamic: alias for Group\n + Static: alias for Weight\n + """ + + GROUP = "group" + WEIGHT = "weight" + # aliases + DYNAMIC = "dynamic" + STATIC = "static" + + @staticmethod + def get_aliases() -> Dict[str, str]: + return { + "dynamic": "group", + "static": "weight", + } + + +class QuantizationArgs(BaseModel, use_enum_values=True): + """ + User facing arguments used to define a quantization config for weights or + activations + + :param num_bits: quantization bit depth + :param type: dtype to quantized to, either int or float + :param symmetric: whether or not quantization scale is symmetric about zero-point + :param strategy: string id determining the scope of scale/zero-point to apply + :param group_size: group length to use for the group strategy + :param block_structure: 2d block structure to use for the block strategy, must be + of the format "2x4", "8x16", etc. + :param dynamic: set True to perform dynamic quantization - values will not be + calibrated during calibration phase, instead during inference new quantization + ranges will be observed with every sample. Defaults to False for static + quantization. Note that enabling dynamic quantization will change the default + observer to a memoryless one + :param actorder: whether to apply group quantization in decreasing order of + activation. Defaults to None for arbitrary ordering + """ + + num_bits: int = 8 + type: QuantizationType = QuantizationType.INT + symmetric: bool = True + group_size: Optional[int] = None + strategy: Optional[QuantizationStrategy] = None + block_structure: Optional[str] = None + dynamic: bool = False + actorder: Union[ActivationOrdering, bool, None] = None + observer: Optional[str] = Field( + default=None, + description=( + "Determines the method of computing quantization parameters (scales and " + "zero-points). Defaults to min-max when not using dynamic quantization" + ), + ) + observer_kwargs: Dict[str, Any] = Field( + default_factory=dict, + description=( + "optional dict of kwargs to be passed directly to torch quantization " + "Observers constructor excluding quantization range or symmetry" + ), + ) + + @field_validator("type", mode="before") + def validate_type(cls, value) -> QuantizationType: + if isinstance(value, str): + return QuantizationType(value.lower()) + + return value + + @field_validator("group_size", mode="before") + def validate_group(cls, value) -> Union[int, None]: + if value is None: + return value + + if value < -1: + raise ValueError( + f"Invalid group size {value}. Use group_size > 0 for " + "strategy='group' and group_size = -1 for 'channel'" + ) + + return value + + @field_validator("strategy", mode="before") + def validate_strategy(cls, value) -> Union[QuantizationStrategy, None]: + if isinstance(value, str): + return QuantizationStrategy(value.lower()) + + return value + + @field_validator("actorder", mode="before") + def validate_actorder(cls, value) -> Optional[ActivationOrdering]: + if isinstance(value, bool): + return ActivationOrdering.GROUP if value else None + + if isinstance(value, str): + return ActivationOrdering(value.lower()) + + return value + + @model_validator(mode="after") + def validate_model_after(model: "QuantizationArgs") -> Dict[str, Any]: + # extract user-passed values from dictionary + strategy = model.strategy + group_size = model.group_size + actorder = model.actorder + dynamic = model.dynamic + observer = model.observer + + # infer strategy + if strategy is None: + if group_size is None: + strategy = QuantizationStrategy.TENSOR + elif group_size > 0: + strategy = QuantizationStrategy.GROUP + elif group_size == -1: + strategy = QuantizationStrategy.CHANNEL + else: + raise ValueError( + f"Invalid group size {group_size}. Use group_size > 0 for " + "strategy='group' and group_size = -1 for 'channel'" + ) + + # validate strategy and group + if strategy == QuantizationStrategy.GROUP: + if group_size is None or group_size <= 0: + raise ValueError( + f"strategy {strategy} requires group_size to be " + "set to a positive value" + ) + if ( + group_size is not None + and group_size > 0 + and strategy != QuantizationStrategy.GROUP + ): + raise ValueError("group_size requires strategy to be set to 'group'") + + # validate activation ordering and strategy + if actorder is not None and strategy != QuantizationStrategy.GROUP: + raise ValueError( + "Must use group quantization strategy in order to apply " + "activation ordering" + ) + + # infer observer w.r.t. dynamic + if dynamic: + if strategy not in ( + QuantizationStrategy.TOKEN, + QuantizationStrategy.TENSOR, + ): + raise ValueError( + f"One of {QuantizationStrategy.TOKEN} or " + f"{QuantizationStrategy.TENSOR} must be used for dynamic ", + "quantization", + ) + if observer is not None: + if observer != "memoryless": # avoid annoying users with old configs + warnings.warn( + "No observer is used for dynamic quantization, setting to None" + ) + observer = None + + elif observer is None: + # default to minmax for non-dynamic cases + observer = "minmax" + + # write back modified values + model.strategy = strategy + model.observer = observer + return model + + def pytorch_dtype(self) -> torch.dtype: + if self.type == QuantizationType.FLOAT: + return FP8_DTYPE + elif self.type == QuantizationType.INT: + if self.num_bits <= 8: + return torch.int8 + elif self.num_bits <= 16: + return torch.int16 + else: + return torch.int32 + else: + raise ValueError(f"Invalid quantization type {self.type}") + + @deprecated("QuantizationArgs.observer") + def get_observer(self) -> str: + return self.observer + + +def round_to_quantized_type( + tensor: torch.Tensor, args: QuantizationArgs +) -> torch.Tensor: + """ + Rounds each element of the input tensor to the nearest quantized representation, + keeping to original dtype + + :param tensor: tensor to round + :param args: QuantizationArgs to pull appropriate dtype from + :return: rounded tensor + """ + original_dtype = tensor.dtype + if args.type == QuantizationType.FLOAT: + rounded = tensor.to(FP8_DTYPE) + elif args.type == QuantizationType.INT: + rounded = torch.round(tensor) + else: + raise ValueError(f"Invalid quantization type {args.type}") + + return rounded.to(original_dtype) diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/quant_config.py b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/quant_config.py new file mode 100644 index 0000000000000000000000000000000000000000..3a80f0cb1cfcc2afc892356e5f5ee36901479060 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/quant_config.py @@ -0,0 +1,264 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from enum import Enum +from typing import Dict, List, Optional, Union + +from compressed_tensors.config import CompressionFormat +from compressed_tensors.quantization.quant_args import QuantizationArgs +from compressed_tensors.quantization.quant_scheme import ( + QuantizationScheme, + preset_name_to_scheme, +) +from compressed_tensors.quantization.utils import ( + calculate_compression_ratio, + is_module_quantized, + iter_named_quantizable_modules, + module_type, + parse_out_kv_cache_args, +) +from pydantic import BaseModel, Field +from torch.nn import Module + + +__all__ = [ + "QuantizationStatus", + "QuantizationConfig", + "LIFECYCLE_ORDER", + "DEFAULT_QUANTIZATION_METHOD", + "DEFAULT_QUANTIZATION_FORMAT", +] + + +class QuantizationStatus(str, Enum): + """ + Enum storing the different states a quantized layer can be in + + Initialized: scale, zero points and observers have been attached to the layer but + are set to dummy values (not yet calibrated) + Calibration: scale and zero points have been calibrated through OBCQ or similar + algorithm, observers are still attached + Frozen: scale and zero points are finalized, observers have been deleted, weights + are still in their original precision + Compressed: weights have been converted to their target type or compressed to + their closed approximation + """ + + INITIALIZED = "initialized" + CALIBRATION = "calibration" + FROZEN = "frozen" + COMPRESSED = "compressed" + + @classmethod + def lifecycle_order(cls) -> List["QuantizationStatus"]: + """ + :return: list of correct quantization lifecycle order + """ + return + + def __ge__(self, other): + if other is None: + return True + if not isinstance(other, self.__class__): + raise NotImplementedError + return LIFECYCLE_ORDER.index(self) >= LIFECYCLE_ORDER.index(other) + + def __gt__(self, other): + if other is None: + return True + if not isinstance(other, self.__class__): + raise NotImplementedError + return LIFECYCLE_ORDER.index(self) > LIFECYCLE_ORDER.index(other) + + def __lt__(self, other): + if other is None: + return False + if not isinstance(other, self.__class__): + raise NotImplementedError + return LIFECYCLE_ORDER.index(self) < LIFECYCLE_ORDER.index(other) + + def __le__(self, other): + if other is None: + return False + if not isinstance(other, self.__class__): + raise NotImplementedError + return LIFECYCLE_ORDER.index(self) <= LIFECYCLE_ORDER.index(other) + + +LIFECYCLE_ORDER = [ + QuantizationStatus.INITIALIZED, + QuantizationStatus.CALIBRATION, + QuantizationStatus.FROZEN, + QuantizationStatus.COMPRESSED, +] + +DEFAULT_QUANTIZATION_METHOD = "compressed-tensors" +DEFAULT_QUANTIZATION_FORMAT = "fakequant" + + +class QuantizationConfig(BaseModel): + """ + Full configuration specifying how a model is quantized. Each quantized layer is + mapped to a QuantizationScheme in config_groups. + + :param config_groups: dict of QuantizationSchemes specifying the quantization + settings for each quantized layer. A group could also be a reference to + a predefined scheme name, mapped to a list of its target layers/classes + :param quant_method: a constant used to differentiate sparseML quantization from + other quantization configs + :param format: specifies how the quantized model is stored on disk + :quantization_status: specifies the current status of all quantized layers. It is + assumed all layers are in the same state. + :param kv_cache_scheme: optional QuantizationArgs, that specify the + quantization of the kv cache. If None, kv cache is not quantized. + When applying kv cache quantization to transformer AutoModelForCausalLM, + the kv_cache_scheme gets converted into a QuantizationScheme that: + - targets the `q_proj` and `k_proj` modules of the model. The outputs + of those modules are the keys and values that might be cached + - quantizes the outputs of the aformentioned layers, so that + keys and values are compressed before storing them in the cache + There is an explicit assumption that the model contains modules with + `k_proj` and `v_proj` in their names. If this is not the case + and kv_cache_scheme != None, the quantization of kv cache will fail + :global_compression_ratio: optional informational config to report the model + compression ratio acheived by the quantization config + :ignore: optional list of layers to ignore from config_groups. Layers in this list + are not quantized even if they match up with a target in config_groups + """ + + config_groups: Dict[str, Union[QuantizationScheme, List[str]]] + quant_method: str = DEFAULT_QUANTIZATION_METHOD + kv_cache_scheme: Optional[QuantizationArgs] = None + format: str = DEFAULT_QUANTIZATION_FORMAT + quantization_status: QuantizationStatus = QuantizationStatus.INITIALIZED + global_compression_ratio: Optional[float] = None + ignore: Optional[List[str]] = Field(default_factory=list) + + def model_post_init(self, __context): + """ + updates any quantization schemes defined as presets to be fully loaded + schemes + """ + for group_name, targets_or_scheme in self.config_groups.items(): + if isinstance(targets_or_scheme, QuantizationScheme): + continue # scheme already defined + self.config_groups[group_name] = preset_name_to_scheme( + name=group_name, + targets=targets_or_scheme, + ) + + def to_dict(self): + # for compatibility with HFQuantizer + return self.model_dump() + + @staticmethod + def from_pretrained( + model: Module, format: Optional[str] = None + ) -> Optional["QuantizationConfig"]: + """ + Converts a model into its associated QuantizationConfig based on the + QuantizationScheme attached to each quantized module + + :param model: model to calculate quantization scheme of + :return: filled out QuantizationScheme for the input model + """ + quant_scheme_to_layers = [] + quantization_status = None + ignore = {} + quantization_type_names = set() + for name, submodule in iter_named_quantizable_modules( + model, include_children=True, include_attn=True + ): + layer_type = module_type(submodule) + if not is_module_quantized(submodule): + if layer_type not in ignore: + ignore[layer_type] = [] + ignore[layer_type].append(name) + else: + quantization_status = submodule.quantization_status + scheme = submodule.quantization_scheme + quantization_type_names.add(layer_type) + + match_found = False + for existing_scheme in quant_scheme_to_layers: + if scheme == existing_scheme: + match_found = True + break + if not match_found: + quant_scheme_to_layers.append(scheme) + + if len(quant_scheme_to_layers) == 0: # No quantized layers + return None + + # kv-cache only, no weight/activation quantization + if ( + len(quantization_type_names) == 1 + and "attention" in list(quantization_type_names)[0].lower() + ): + quantization_type_names.add("Linear") + + # clean up ignore list, we can leave out layers types if none of the + # instances are quantized + consolidated_ignore = [] + for layer_type, ignore_names in ignore.items(): + if layer_type in quantization_type_names: + # specific layers of a quantized type are ignored + consolidated_ignore += ignore_names + # else we leave it off the ignore list, doesn't fall under any of the + # existing quantization schemes so it won't be quantized + + kv_cache_args, quant_scheme_to_layers = parse_out_kv_cache_args( + quant_scheme_to_layers + ) + kv_cache_scheme = ( + kv_cache_args.model_dump() if kv_cache_args is not None else kv_cache_args + ) + + config_groups = {} + for idx, scheme in enumerate(quant_scheme_to_layers): + group_name = "group_" + str(idx) + config_groups[group_name] = scheme + + # TODO: this is incorrect in compressed mode, since we are overwriting the + # original weight we lose the uncompressed bit_depth indo + compression_ratio = calculate_compression_ratio(model) + + if format is None: + if quantization_status == QuantizationStatus.COMPRESSED: + format = CompressionFormat.int_quantized.value + else: + format = CompressionFormat.dense.value + + return QuantizationConfig( + config_groups=config_groups, + quantization_status=quantization_status, + kv_cache_scheme=kv_cache_scheme, + global_compression_ratio=compression_ratio, + format=format, + ignore=consolidated_ignore, + ) + + def requires_calibration_data(self): + if self.kv_cache_scheme is not None: + return True + + for _, scheme in self.config_groups.items(): + if scheme.input_activations is not None: + if not scheme.input_activations.dynamic: + return True + if scheme.output_activations is not None: + if not scheme.output_activations.dynamic: + return True + + return False diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/quant_scheme.py b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/quant_scheme.py new file mode 100644 index 0000000000000000000000000000000000000000..36b886044565ad3b36adb47a8c7ab5a9c081adb1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/quant_scheme.py @@ -0,0 +1,215 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from copy import deepcopy +from typing import Any, Dict, List, Optional + +from compressed_tensors.quantization.quant_args import ( + QuantizationArgs, + QuantizationStrategy, + QuantizationType, +) +from pydantic import BaseModel, model_validator + + +__all__ = [ + "QuantizationScheme", + "preset_name_to_scheme", + "is_preset_scheme", +] + + +class QuantizationScheme(BaseModel): + """ + Set of QuantizationArgs defining how the weights, inputs and outputs of target list + of modules should be quantized + + :param targets: list of modules to apply the QuantizationArgs to, can be layer + names, layer types or a regular expression, typically ["Linear"] + :param weights: quantization config for layer weights + :param input_activations: quantization config for layer inputs + :param output_activations: quantization config for layer outputs + """ + + targets: List[str] + weights: Optional[QuantizationArgs] = None + input_activations: Optional[QuantizationArgs] = None + output_activations: Optional[QuantizationArgs] = None + + @model_validator(mode="after") + def validate_model_after(model: "QuantizationArgs") -> Dict[str, Any]: + inputs = model.input_activations + outputs = model.output_activations + + if inputs is not None: + if inputs.actorder is not None: + raise ValueError("Cannot apply actorder to input activations") + + if outputs is not None: + if outputs.actorder is not None: + raise ValueError("Cannot apply actorder to output activations") + + return model + + +""" +Pre-Set Quantization Scheme Args +""" + + +def preset_name_to_scheme(name: str, targets: List[str]) -> QuantizationScheme: + """ + :param name: preset quantization settings name. must exist in upper case in + PRESET_SCHEMES + :param targets: list of quantization targets to be passed to the Scheme + :return: new QuantizationScheme for a given name with the given targets + """ + name = name.upper() + + if name not in PRESET_SCHEMES: + raise KeyError( + f"Unknown preset scheme name {name}, " + f"available names: {list(PRESET_SCHEMES.keys())}" + ) + + scheme_args = deepcopy(PRESET_SCHEMES[name]) # deepcopy to avoid args references + return QuantizationScheme( + targets=targets, + **scheme_args, + ) + + +def is_preset_scheme(name: str) -> bool: + """ + :param name: preset quantization settings name + :return: True if the name is a preset scheme name + """ + return name.upper() in PRESET_SCHEMES + + +UNQUANTIZED = dict() + +# 8 bit integer weights and 8 bit activations quantization +INT8_W8A8 = dict( + weights=QuantizationArgs( + num_bits=8, + type=QuantizationType.INT, + strategy=QuantizationStrategy.CHANNEL, + symmetric=True, + dynamic=False, + ), + input_activations=QuantizationArgs( + num_bits=8, + type=QuantizationType.INT, + strategy=QuantizationStrategy.TOKEN, + symmetric=True, + dynamic=True, + observer=None, + ), +) + +# 8 bit integer weights only quantization +W8A16 = dict( + weights=QuantizationArgs( + num_bits=8, + type=QuantizationType.INT, + strategy=QuantizationStrategy.CHANNEL, + symmetric=True, + dynamic=False, + ), +) + +# 4 bit integer weights only quantization +W4A16 = dict( + weights=QuantizationArgs( + num_bits=4, + type=QuantizationType.INT, + strategy=QuantizationStrategy.GROUP, + group_size=128, + symmetric=True, + dynamic=False, + ), +) + +# 4 bit integer weights and 8 bit activations quantization +INT8_W4A8 = dict( + weights=QuantizationArgs( + num_bits=4, + type=QuantizationType.INT, + group_size=128, + strategy=QuantizationStrategy.GROUP, + symmetric=True, + dynamic=False, + ), + input_activations=QuantizationArgs( + num_bits=8, + type=QuantizationType.INT, + strategy=QuantizationStrategy.TOKEN, + symmetric=True, + dynamic=True, + observer=None, + ), +) + +# FP8 weights and FP8 activations quantization +FP8 = dict( + weights=QuantizationArgs( + num_bits=8, + type=QuantizationType.FLOAT, + strategy=QuantizationStrategy.TENSOR, + symmetric=True, + dynamic=False, + ), + input_activations=QuantizationArgs( + num_bits=8, + type=QuantizationType.FLOAT, + strategy=QuantizationStrategy.TENSOR, + symmetric=True, + dynamic=False, + ), +) + +# FP8 weights and FP8 dynamic activations quantization +FP8_DYNAMIC = dict( + weights=QuantizationArgs( + num_bits=8, + type=QuantizationType.FLOAT, + strategy=QuantizationStrategy.CHANNEL, + symmetric=True, + dynamic=False, + ), + input_activations=QuantizationArgs( + num_bits=8, + type=QuantizationType.FLOAT, + strategy=QuantizationStrategy.TOKEN, + symmetric=True, + dynamic=True, + observer=None, + ), +) + +PRESET_SCHEMES = { + # Unquantized (no-op) + "UNQUANTIZED": UNQUANTIZED, + # Integer weight only schemes + "W8A16": W8A16, + "W4A16": W4A16, + # Integer weight and activation schemes + "W8A8": INT8_W8A8, + "INT8": INT8_W8A8, # alias for W8A8 + "W4A8": INT8_W4A8, + # Float weight and activation schemes + "FP8": FP8, + "FP8_DYNAMIC": FP8_DYNAMIC, +} diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/utils/__init__.py b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a91f9e5d2cd34e36262d83ed5772724fb4181781 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/utils/__init__.py @@ -0,0 +1,16 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# flake8: noqa +from .helpers import * diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/utils/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/utils/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5b7eb76ec4677c03f452e20c5a53b80bcc2e5f13 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/utils/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/utils/__pycache__/helpers.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/utils/__pycache__/helpers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..16e706954b198e7c2d7a37040afeea217f507da2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/utils/__pycache__/helpers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/quantization/utils/helpers.py b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/utils/helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..9f65ee33030fca305a178724b7cb0744b5700520 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/quantization/utils/helpers.py @@ -0,0 +1,392 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from typing import Generator, List, Optional, Tuple + +import torch +from compressed_tensors.quantization.quant_args import ( + FP8_DTYPE, + QuantizationArgs, + QuantizationStrategy, + QuantizationType, +) +from compressed_tensors.quantization.quant_scheme import QuantizationScheme +from torch import FloatTensor, IntTensor, Tensor +from torch.nn import Module +from tqdm import tqdm + + +__all__ = [ + "infer_quantization_status", + "is_module_quantized", + "is_model_quantized", + "module_type", + "calculate_compression_ratio", + "get_torch_bit_depth", + "can_quantize", + "parse_out_kv_cache_args", + "KV_CACHE_TARGETS", + "is_kv_cache_quant_scheme", + "iter_named_leaf_modules", + "iter_named_quantizable_modules", + "compute_dynamic_scales_and_zp", + "calculate_range", + "calculate_qparams", +] + +# target the self_attn layer +# QuantizedKVParameterCache is responsible for obtaining the k_scale and v_scale +KV_CACHE_TARGETS = ["re:.*self_attn$"] + +_LOGGER: logging.Logger = logging.getLogger(__name__) + + +def calculate_qparams( + min_vals: Tensor, max_vals: Tensor, quantization_args: QuantizationArgs +) -> Tuple[FloatTensor, IntTensor]: + """ + :param min_vals: tensor of min value(s) to calculate scale(s) and zero point(s) + from + :param max_vals: tensor of max value(s) to calculate scale(s) and zero point(s) + from + :param quantization_args: settings to quantization + :return: tuple of the calculated scale(s) and zero point(s) + """ + min_vals = torch.min(min_vals, torch.zeros_like(min_vals)) + max_vals = torch.max(max_vals, torch.zeros_like(max_vals)) + device = min_vals.device + + bit_min, bit_max = calculate_range(quantization_args, device) + bit_range = bit_max - bit_min + zp_dtype = quantization_args.pytorch_dtype() + + if quantization_args.symmetric: + max_val_pos = torch.max(torch.abs(min_vals), torch.abs(max_vals)) + scales = max_val_pos / (float(bit_range) / 2) + scales = torch.clamp(scales, min=torch.finfo(torch.float32).eps) + zero_points = torch.zeros(scales.shape, device=device, dtype=min_vals.dtype) + else: + scales = (max_vals - min_vals) / float(bit_range) + scales = torch.clamp(scales, min=torch.finfo(torch.float32).eps) + zero_points = bit_min - (min_vals / scales) + zero_points = torch.clamp(zero_points, bit_min, bit_max) + + # match zero-points to quantized type + zero_points = zero_points.to(zp_dtype) + + if scales.ndim == 0: + scales = scales.reshape(1) + zero_points = zero_points.reshape(1) + + return scales, zero_points + + +def compute_dynamic_scales_and_zp(value: Tensor, args: QuantizationArgs): + """ + Returns the computed scales and zero points for dynamic activation + qunatization. + + :param value: tensor to calculate quantization parameters for + :param args: quantization args + :param reduce_dims: optional tuple of dimensions to reduce along, + returned scale and zero point will be shaped (1,) along the + reduced dimensions + :return: tuple of scale and zero point derived from the observed tensor + """ + if args.strategy == QuantizationStrategy.TOKEN: + dim = {1, 2} + reduce_dims = tuple(idx for idx in range(value.ndim) if idx not in dim) + elif args.strategy == QuantizationStrategy.TENSOR: + reduce_dims = None + else: + raise ValueError( + f"One of {QuantizationStrategy.TOKEN} or {QuantizationStrategy.TENSOR} ", + "must be used for dynamic quantization", + ) + + if not reduce_dims: + min_val, max_val = torch.aminmax(value) + else: + min_val = torch.amin(value, dim=reduce_dims, keepdims=True) + max_val = torch.amax(value, dim=reduce_dims, keepdims=True) + + return calculate_qparams(min_val, max_val, args) + + +def calculate_range(quantization_args: QuantizationArgs, device: str) -> Tuple: + """ + Calculated the effective quantization range for the given Quantization Args + + :param quantization_args: quantization args to get range of + :param device: device to store the range to + :return: tuple endpoints for the given quantization range + """ + if quantization_args.type == QuantizationType.INT: + bit_range = 2**quantization_args.num_bits + q_max = torch.tensor(bit_range / 2 - 1, device=device) + q_min = torch.tensor(-bit_range / 2, device=device) + elif quantization_args.type == QuantizationType.FLOAT: + if quantization_args.num_bits != 8: + raise ValueError( + "Floating point quantization is only supported for 8 bits," + f"got {quantization_args.num_bits}" + ) + fp_range_info = torch.finfo(FP8_DTYPE) + q_max = torch.tensor(fp_range_info.max, device=device) + q_min = torch.tensor(fp_range_info.min, device=device) + else: + raise ValueError(f"Invalid quantization type {quantization_args.type}") + + return q_min, q_max + + +def infer_quantization_status(model: Module) -> Optional["QuantizationStatus"]: # noqa + """ + Checks the quantization status of a model. Assumes all modules in the model have + the same status, so only the first quantized model is checked. + + :param model: model to check quantization status for + :return: quantization status if the model is quantized, otherwise None + """ + for module in model.modules(): + status = getattr(module, "quantization_status", None) + if status is not None: + return status + return None + + +def is_module_quantized(module: Module) -> bool: + """ + Check if a module is quantized, based on the existence of a non-empty quantization + scheme + + :param module: pytorch module to check + :return: True if module is quantized, False otherwise + """ + if not hasattr(module, "quantization_scheme"): + return False + + if module.quantization_scheme.weights is not None: + return True + + if module.quantization_scheme.input_activations is not None: + return True + + if module.quantization_scheme.output_activations is not None: + return True + + return False + + +def is_model_quantized(model: Module) -> bool: + """ + Check if any modules in a model are quantized, based on the existence of a non-empty + quantization scheme in at least one module + + :param model: pytorch model + :return: True if model is quantized, False otherwise + """ + + for _, submodule in iter_named_leaf_modules(model): + if is_module_quantized(submodule): + return True + + return False + + +def module_type(module: Module) -> str: + """ + Gets a string representation of a module type + + :module: pytorch module to get type of + :return: module type as a string + """ + return type(module).__name__ + + +def iter_named_leaf_modules(model: Module) -> Generator[Tuple[str, Module], None, None]: + """ + Yields modules that do not have any submodules except observers. The observers + themselves are not yielded + :param model: model to get leaf modules of + :returns: generator tuple of (name, leaf_submodule) + """ + for name, submodule in model.named_modules(): + children = list(submodule.children()) + # TODO: verify if an observer would ever be attached in this case/remove check + if len(children) == 0 and "observer" in name: + yield name, submodule + else: + if len(children) > 0: + named_children, children = zip(*list(submodule.named_children())) + has_non_observer_children = False + for i in range(len(children)): + child_name = named_children[i] + + if "observer" not in child_name: + has_non_observer_children = True + + if not has_non_observer_children: + yield name, submodule + + +def iter_named_quantizable_modules( + model: Module, include_children: bool = True, include_attn: bool = False +) -> Generator[Tuple[str, Module], None, None]: + """ + Yield name and submodule of + - leaf modules, set by include_children + - attention modyles, set by include_attn + + :param model: model to get leaf modules of + :param include_children: flag to get the leaf modules + :param inlcude_attn: flag to get the attention modules + :returns: generator tuple of (name, submodule) + """ + for name, submodule in model.named_modules(): + # TODO: verify if an observer would ever be attached in this case/remove check + if include_children: + children = list(submodule.children()) + if len(children) == 0 and "observer" not in name: + yield name, submodule + else: + if len(children) > 0: + named_children, children = zip(*list(submodule.named_children())) + has_non_observer_children = False + for i in range(len(children)): + child_name = named_children[i] + + if "observer" not in child_name: + has_non_observer_children = True + + if not has_non_observer_children: + yield name, submodule + if include_attn: + if name.endswith("self_attn"): + yield name, submodule + + +def get_torch_bit_depth(value: torch.Tensor) -> int: + """ + Determine the number of bits used to represent the dtype of a tensor + + :param value: tensor to check bit depth of + :return: bit depth of each element in the value tensor + """ + try: + bit_depth = torch.finfo(value.dtype).bits + except TypeError: + bit_depth = torch.iinfo(value.dtype).bits + + return bit_depth + + +def can_quantize(value: torch.Tensor, quant_args: "QuantizationArgs") -> bool: # noqa + """ + Checks if value can be quantized by quant_args. + + :param value: tensor to check for quantization + :param quant_args: QuantizationArgs to use for quantization + :return: False if value is already quantized to quant_args or value is incompatible + with quant_args, True if value can be quantized with quant_args + """ + bit_depth = get_torch_bit_depth(value) + requested_depth = quant_args.num_bits + if bit_depth < quant_args.num_bits: + _LOGGER.warn( + f"Can't quantize tensor with bit depth {bit_depth} to {requested_depth}." + "The QuantizationArgs provided are not compatible with the input tensor." + ) + + return bit_depth > quant_args.num_bits + + +def calculate_compression_ratio(model: Module) -> float: + """ + Calculates the quantization compression ratio of a pytorch model, based on the + number of bits needed to represent the total weights in compressed form. Does not + take into account activation quantizatons. + + :param model: pytorch module to calculate compression ratio for + :return: compression ratio of the whole model + """ + total_compressed = 0.0 + total_uncompressed = 0.0 + for name, submodule in tqdm( + iter_named_leaf_modules(model), + desc="Calculating quantization compression ratio", + ): + for parameter in model.parameters(): + uncompressed_bits = get_torch_bit_depth(parameter) + compressed_bits = uncompressed_bits + if is_module_quantized(submodule) and submodule.quantization_scheme.weights: + compressed_bits = submodule.quantization_scheme.weights.num_bits + + num_weights = parameter.numel() + total_compressed += compressed_bits * num_weights + total_uncompressed += uncompressed_bits * num_weights + + return total_uncompressed / total_compressed + + +def is_kv_cache_quant_scheme(scheme: QuantizationScheme) -> bool: + """ + Check whether the QuantizationScheme targets the kv cache. + It does if all the following criteria are met: + - the scheme targets either exactly match the KV_CACHE_TARGETS + or the match KV_CACHE_TARGETS regex pattern + - the scheme quantizes output_activations (we want to quantize the + outputs from the KV_CACHE_TARGETS, as their correspond to the + keys and values that are to be saved in the cache) + + :param scheme: The QuantizationScheme to investigate + :return: boolean flag + """ + for target in scheme.targets: + if target in KV_CACHE_TARGETS: + return True + + return False + + +def parse_out_kv_cache_args( + quant_scheme_to_layers: List[QuantizationScheme], +) -> Tuple[Optional[QuantizationArgs], List[QuantizationScheme]]: + """ + If possible, parse out the kv cache specific QuantizationArgs + from the list of the QuantizationSchemes. If no kv cache + specific QuantizationArgs available, this function acts + as an identity function + + :param quant_scheme_to_layers: list of QuantizationSchemes + :return: kv_cache_args (optional) and the (remaining or original) + list of the QuantizationSchemes + """ + kv_cache_quant_scheme_to_layers = [ + scheme for scheme in quant_scheme_to_layers if is_kv_cache_quant_scheme(scheme) + ] + quant_scheme_to_layers = [ + scheme + for scheme in quant_scheme_to_layers + if not is_kv_cache_quant_scheme(scheme) + ] + + if kv_cache_quant_scheme_to_layers: + kv_cache_quant_scheme_to_layers = kv_cache_quant_scheme_to_layers[0] + kv_cache_args = kv_cache_quant_scheme_to_layers.output_activations + else: + kv_cache_args = None + + return kv_cache_args, quant_scheme_to_layers diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/registry/__init__.py b/venv/lib/python3.10/site-packages/compressed_tensors/registry/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..241d9d55ebac91d271c09ec0c003f1abedb3fb00 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/registry/__init__.py @@ -0,0 +1,17 @@ +# flake8: noqa + +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .registry import * diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/registry/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/registry/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..51cbae32dcec4e8ddb4934cf36400581c2aa3215 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/registry/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/registry/__pycache__/registry.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/registry/__pycache__/registry.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..752908a4f7d435980afb0dc6f5836563ef148df6 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/registry/__pycache__/registry.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/registry/registry.py b/venv/lib/python3.10/site-packages/compressed_tensors/registry/registry.py new file mode 100644 index 0000000000000000000000000000000000000000..76026313aea054dbb32f3d9044aab190bc153053 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/registry/registry.py @@ -0,0 +1,360 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Universal registry to support registration and loading of child classes and plugins +of neuralmagic utilities +""" + +import importlib +from collections import defaultdict +from typing import Any, Dict, List, Optional, Type, Union + + +__all__ = [ + "RegistryMixin", + "register", + "get_from_registry", + "registered_names", + "registered_aliases", + "standardize_lookup_name", +] + + +_ALIAS_REGISTRY: Dict[Type, Dict[str, str]] = defaultdict(dict) +_REGISTRY: Dict[Type, Dict[str, Any]] = defaultdict(dict) + + +def standardize_lookup_name(name: str) -> str: + """ + Standardize the given name for lookup in the registry. + This will replace all underscores and spaces with hyphens and + convert the name to lowercase. + + example: + ``` + standardize_lookup_name("Foo_bar baz") == "foo-bar-baz" + ``` + + :param name: name to standardize + :return: standardized name + """ + return name.replace("_", "-").replace(" ", "-").lower() + + +def standardize_alias_name( + name: Union[None, str, List[str]] +) -> Union[None, str, List[str]]: + if name is None: + return None + elif isinstance(name, str): + return standardize_lookup_name(name) + else: # isinstance(name, list) + return [standardize_lookup_name(n) for n in name] + + +class RegistryMixin: + """ + Universal registry to support registration and loading of child classes and plugins + of neuralmagic utilities. + + Classes that require a registry or plugins may add the `RegistryMixin` and use + `register` and `load` as the main entrypoints for adding new implementations and + loading requested values from its registry. + + If a class should only have its child classes in its registry, the class should + set the static attribute `registry_requires_subclass` to True + + example + ```python + class Dataset(RegistryMixin): + pass + + + # register with default name + @Dataset.register() + class ImageNetDataset(Dataset): + pass + + # load as "ImageNetDataset" + imagenet = Dataset.load("ImageNetDataset") + + # register with custom name + @Dataset.register(name="cifar-dataset") + class Cifar(Dataset): + pass + + Note: the name will be standardized for lookup in the registry. + For example, if a class is registered as "cifar_dataset" or + "cifar dataset", it will be stored as "cifar-dataset". The user + will be able to load the class with any of the three name variants. + + # register with multiple aliases + @Dataset.register(alias=["cifar-10-dataset", "cifar_100_dataset"]) + class Cifar(Dataset): + pass + + # load as "cifar-dataset" + cifar = Dataset.load_from_registry("cifar-dataset") + + # load from custom file that implements a dataset + mnist = Dataset.load_from_registry("/path/to/mnnist_dataset.py:MnistDataset") + ``` + """ + + # set to True in child class to add check that registered/retrieved values + # implement the class it is registered to + registry_requires_subclass: bool = False + + @classmethod + def register( + cls, name: Optional[str] = None, alias: Union[List[str], str, None] = None + ): + """ + Decorator for registering a value (ie class or function) wrapped by this + decorator to the base class (class that .register is called from) + + :param name: name or list of names to register the wrapped value as, + defaults to value.__name__ + :param alias: alias or list of aliases to register the wrapped value as, + defaults to None + :return: register decorator + """ + + def decorator(value: Any): + cls.register_value(value, name=name, alias=alias) + return value + + return decorator + + @classmethod + def register_value( + cls, value: Any, name: str, alias: Union[str, List[str], None] = None + ): + """ + Registers the given value to the class `.register_value` is called from + :param value: value to register + :param name: name to register the wrapped value as, + defaults to value.__name__ + :param alias: alias or list of aliases to register the wrapped value as, + defaults to None + """ + register( + parent_class=cls, + value=value, + name=name, + alias=alias, + require_subclass=cls.registry_requires_subclass, + ) + + @classmethod + def load_from_registry(cls, name: str, **constructor_kwargs) -> object: + """ + :param name: name of registered class to load + :param constructor_kwargs: arguments to pass to the constructor retrieved + from the registry + :return: loaded object registered to this class under the given name, + constructed with the given kwargs. Raises error if the name is + not found in the registry + """ + constructor = cls.get_value_from_registry(name=name) + return constructor(**constructor_kwargs) + + @classmethod + def get_value_from_registry(cls, name: str): + """ + :param name: name to retrieve from the registry + :return: value from retrieved the registry for the given name, raises + error if not found + """ + return get_from_registry( + parent_class=cls, + name=name, + require_subclass=cls.registry_requires_subclass, + ) + + @classmethod + def registered_names(cls) -> List[str]: + """ + :return: list of all names registered to this class + """ + return registered_names(cls) + + @classmethod + def registered_aliases(cls) -> List[str]: + """ + :return: list of all aliases registered to this class + """ + return registered_aliases(cls) + + +def register( + parent_class: Type, + value: Any, + name: Optional[str] = None, + alias: Union[List[str], str, None] = None, + require_subclass: bool = False, +): + """ + :param parent_class: class to register the name under + :param value: the value to register + :param name: name to register the wrapped value as, defaults to value.__name__ + :param alias: alias or list of aliases to register the wrapped value as, + defaults to None + :param require_subclass: require that value is a subclass of the class this + method is called from + """ + if name is None: + # default name + name = value.__name__ + + name = standardize_lookup_name(name) + alias = standardize_alias_name(alias) + register_alias(name=name, alias=alias, parent_class=parent_class) + + if require_subclass: + _validate_subclass(parent_class, value) + + if name in _REGISTRY[parent_class]: + # name already exists - raise error if two different values are attempting + # to share the same name + registered_value = _REGISTRY[parent_class][name] + if registered_value is not value: + raise RuntimeError( + f"Attempting to register name {name} as {value} " + f"however {name} has already been registered as {registered_value}" + ) + else: + _REGISTRY[parent_class][name] = value + + +def get_from_registry( + parent_class: Type, name: str, require_subclass: bool = False +) -> Any: + """ + :param parent_class: class that the name is registered under + :param name: name to retrieve from the registry of the class + :param require_subclass: require that value is a subclass of the class this + method is called from + :return: value from retrieved the registry for the given name, raises + error if not found + """ + name = standardize_lookup_name(name) + + if ":" in name: + # user specifying specific module to load and value to import + module_path, value_name = name.split(":") + retrieved_value = _import_and_get_value_from_module(module_path, value_name) + else: + # look up name in alias registry + name = _ALIAS_REGISTRY[parent_class].get(name, name) + # look up name in registry + retrieved_value = _REGISTRY[parent_class].get(name) + if retrieved_value is None: + raise KeyError( + f"Unable to find {name} registered under type {parent_class}.\n" + f"Registered values for {parent_class}: " + f"{registered_names(parent_class)}\n" + f"Registered aliases for {parent_class}: " + f"{registered_aliases(parent_class)}" + ) + + if require_subclass: + _validate_subclass(parent_class, retrieved_value) + + return retrieved_value + + +def registered_names(parent_class: Type) -> List[str]: + """ + :param parent_class: class to look up the registry of + :return: all names registered to the given class + """ + return list(_REGISTRY[parent_class].keys()) + + +def registered_aliases(parent_class: Type) -> List[str]: + """ + :param parent_class: class to look up the registry of + :return: all aliases registered to the given class + """ + registered_aliases_plus_names = list(_ALIAS_REGISTRY[parent_class].keys()) + registered_aliases = list( + set(registered_aliases_plus_names) - set(registered_names(parent_class)) + ) + return registered_aliases + + +def register_alias( + name: str, parent_class: Type, alias: Union[str, List[str], None] = None +): + """ + Updates the mapping from the alias(es) to the given name. + If the alias is None, the name is used as the alias. + ``` + + :param name: name that the alias refers to + :param parent_class: class that the name is registered under + :param alias: single alias or list of aliases that + refer to the name, defaults to None + """ + if alias is not None: + alias = alias if isinstance(alias, list) else [alias] + else: + alias = [] + + if name in alias: + raise KeyError( + f"Attempting to register alias {name}, " + f"that is identical to the standardized name: {name}." + ) + alias.append(name) + + for alias_name in alias: + if alias_name in _ALIAS_REGISTRY[parent_class]: + raise KeyError( + f"Attempting to register alias {alias_name} as {name} " + f"however {alias_name} has already been registered as " + f"{_ALIAS_REGISTRY[alias_name]}" + ) + _ALIAS_REGISTRY[parent_class][alias_name] = name + + +def _import_and_get_value_from_module(module_path: str, value_name: str) -> Any: + # import the given module path and try to get the value_name if it is included + # in the module + + # load module + spec = importlib.util.spec_from_file_location( + f"plugin_module_for_{value_name}", module_path + ) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + + # get value from module + value = getattr(module, value_name, None) + + if not value: + raise RuntimeError( + f"Unable to find attribute {value_name} in module {module_path}" + ) + return value + + +def _validate_subclass(parent_class: Type, child_class: Type): + if not issubclass(child_class, parent_class): + raise ValueError( + f"class {child_class} is not a subclass of the class it is " + f"registered for: {parent_class}." + ) diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/utils/__init__.py b/venv/lib/python3.10/site-packages/compressed_tensors/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..976d55f789b453db0e60ead84acfcbf54c17ba54 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/utils/__init__.py @@ -0,0 +1,21 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# flake8: noqa + +from .helpers import * +from .offload import * +from .permutations_24 import * +from .permute import * +from .safetensors_load import * +from .semi_structured_conversions import * diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/utils/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/utils/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2f6b1a7a6ac8647ff74e69de67393c97f794236a Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/utils/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/utils/__pycache__/helpers.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/utils/__pycache__/helpers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cfed19448e793be7fe74b12d8c4a4d7e95696694 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/utils/__pycache__/helpers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/utils/__pycache__/offload.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/utils/__pycache__/offload.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..50308d8b8f49597ad61056370742d265257c3931 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/utils/__pycache__/offload.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/utils/__pycache__/permutations_24.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/utils/__pycache__/permutations_24.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..79521f05e107520c8e5d65f418955cae849f66ed Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/utils/__pycache__/permutations_24.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/utils/__pycache__/permute.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/utils/__pycache__/permute.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b160ebbc6bd34130e5964e85ff711b74b459cfe7 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/utils/__pycache__/permute.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/utils/__pycache__/safetensors_load.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/utils/__pycache__/safetensors_load.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..10408c705ab41ae764c683d7571f3b6e7ef6976d Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/utils/__pycache__/safetensors_load.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/utils/__pycache__/semi_structured_conversions.cpython-310.pyc b/venv/lib/python3.10/site-packages/compressed_tensors/utils/__pycache__/semi_structured_conversions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5a168d10b8a4bc1fde168f100e936bd041802b13 Binary files /dev/null and b/venv/lib/python3.10/site-packages/compressed_tensors/utils/__pycache__/semi_structured_conversions.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/utils/helpers.py b/venv/lib/python3.10/site-packages/compressed_tensors/utils/helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..a842d00eafdccbd30bb84b9081b4fc0910505d17 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/utils/helpers.py @@ -0,0 +1,330 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import warnings +from functools import wraps +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional + +import numpy +import torch +from transformers import AutoConfig + + +if TYPE_CHECKING: + from compressed_tensors.compressors import ModelCompressor + + +__all__ = [ + "infer_compressor_from_model_config", + "fix_fsdp_module_name", + "tensor_follows_mask_structure", + "replace_module", + "is_compressed_tensors_config", + "getattr_chain", + "deprecated", + "Aliasable", + "combine_shards", + "shard_tensor", + "pack_bitmasks", + "unpack_bitmasks", +] + +FSDP_WRAPPER_NAME = "_fsdp_wrapped_module" + + +def infer_compressor_from_model_config( + pretrained_model_name_or_path: str, +) -> Optional["ModelCompressor"]: # noqa: F821 + """ + Given a path to a model config, extract a sparsity config if it exists and return + the associated ModelCompressor + + :param pretrained_model_name_or_path: path to model config on disk or HF hub + :return: matching compressor if config contains a sparsity config + """ + from compressed_tensors.compressors import ModelCompressor + from compressed_tensors.config import CompressionConfig + + config = AutoConfig.from_pretrained(pretrained_model_name_or_path) + sparsity_config = ModelCompressor.parse_sparsity_config(config) + if sparsity_config is None: + return None + + format = sparsity_config.get("format") + sparsity_config = CompressionConfig.load_from_registry(format, **sparsity_config) + compressor = ModelCompressor.load_from_registry(format, config=sparsity_config) + return compressor + + +# TODO: There is already the same function in +# SparseML, should be moved to a shared location +# in the future +def fix_fsdp_module_name(name: str) -> str: + """ + Remove FSDP wrapper prefixes from a module name + Accounts for scenario where FSDP_WRAPPER_NAME is + at the end of the name, as well as in the middle. + :param name: name to strip + :return: stripped name + """ + return name.replace(FSDP_WRAPPER_NAME + ".", "").replace( + "." + FSDP_WRAPPER_NAME, "" + ) + + +def tensor_follows_mask_structure(tensor, mask: str = "2:4") -> bool: + """ + :param tensor: tensor to check + :param mask: mask structure to check for, in the format "n:m" + :return: True if the tensor follows the mask structure, False otherwise. + Note, some weights can incidentally be zero, so we check for + atleast n zeros in each chunk of size m + """ + + n, m = tuple(map(int, mask.split(":"))) + # Reshape the tensor into chunks of size m + tensor = tensor.view(-1, m) + + # Count the number of zeros in each chunk + zero_counts = (tensor == 0).sum(dim=1) + + # Check if the number of zeros in each chunk atleast n + # Greater than sign is needed as some weights can incidentally + # be zero + if not torch.all(zero_counts >= n).item(): + raise ValueError() + + return True + + +def replace_module(model: torch.nn.Module, name: str, new_module: torch.nn.Module): + if "." in name: + parent_name = name.rsplit(".", 1)[0] + child_name = name[len(parent_name) + 1 :] + parent = model.get_submodule(parent_name) + else: + parent_name = "" + parent = model + child_name = name + setattr(parent, child_name, new_module) + + +def is_compressed_tensors_config(compression_config: Any) -> bool: + """ + Returns True if CompressedTensorsConfig is available from transformers and + compression_config is an instance of CompressedTensorsConfig + + See: https://github.com/huggingface/transformers/pull/31704 + """ + try: + from transformers.utils.quantization_config import CompressedTensorsConfig + + return isinstance(compression_config, CompressedTensorsConfig) + except ImportError: + return False + + +def getattr_chain(obj: Any, chain_str: str, *args, **kwargs) -> Any: + """ + Chain multiple getattr calls, separated by `.` + + :param obj: base object whose attributes are being retrieved + :param chain_str: attribute names separated by `.` + :param default: default value, throw error otherwise + """ + if len(args) >= 1: + has_default = True + default = args[0] + elif "default" in kwargs: + has_default = True + default = kwargs["default"] + else: + has_default = False + + attr_names = chain_str.split(".") + + res = obj + for attr_name in attr_names: + if not hasattr(res, attr_name): + if has_default: + return default + else: + raise AttributeError(f"{res} object has no attribute {attr_name}") + res = getattr(res, attr_name) + + return res + + +def deprecated(future_name: Optional[str] = None, message: Optional[str] = None): + """ + Decorator to mark functions as deprecated + + :param new_function: Function called in place of deprecated function + :param message: Deprecation message, replaces default deprecation message + """ + + def decorator(func: Callable[[Any], Any]): + nonlocal message + + if message is None: + message = ( + f"{func.__name__} is deprecated and will be removed in a future release" + ) + if future_name is not None: + message += f". Please use {future_name} instead." + + @wraps(func) + def wrapped(*args, **kwargs): + warnings.warn(message, DeprecationWarning, stacklevel=2) + return func(*args, **kwargs) + + return wrapped + + return decorator + + +class Aliasable: + """ + A mixin for enums to allow aliasing of enum members + + Example: + >>> class MyClass(Aliasable, int, Enum): + >>> ... + """ + + @staticmethod + def get_aliases() -> Dict[str, str]: + raise NotImplementedError() + + def __eq__(self, other): + if isinstance(other, self.__class__): + aliases = self.get_aliases() + return self.value == other.value or ( + aliases.get(self.value, self.value) + == aliases.get(other.value, other.value) + ) + else: + aliases = self.get_aliases() + self_value = aliases.get(self.value, self.value) + other_value = aliases.get(other, other) + return self_value == other_value + + def __hash__(self): + canonical_value = self.aliases.get(self.value, self.value) + return hash(canonical_value) + + +def shard_tensor( + tensor: torch.Tensor, shard_sizes: List[int], dim: int = 0 +) -> List[torch.Tensor]: + """ + Shards a tensor into a list of tensors along a given dimension. + + raises: ValueError: If the sum of shard_sizes does not match the + size of the tensor along the given dimension. + + :param tensor: The input tensor to shard. + :param shard_sizes : List of sizes for each shard along the specified dimension. + :param dim : The dimension along which to shard the tensor. + :returns: A list of tensors sharded along the specified dimension. + """ + if sum(shard_sizes) != tensor.size(dim): + raise ValueError( + "Sum of shard_sizes must equal the size of the tensor " + "along the specified dimension." + ) + + shards = [] + start_idx = 0 + + for size in shard_sizes: + end_idx = start_idx + size + shard = tensor.narrow(dim, start_idx, size) + shards.append(shard) + start_idx = end_idx + + return shards + + +def combine_shards(shards, dim=0): + """ + Combine decompressed shards along a given dimension using `narrow`. + + :param shards: List of decompressed shard tensors. + :param dim: Dimension to combine along (default: 0). + :return: Combined decompressed tensor. + """ + if not shards: + raise ValueError("The list of shards is empty.") + + # Assert that all shards have the same dtype + shard_dtypes = {shard.dtype for shard in shards} + if len(shard_dtypes) > 1: + raise ValueError("All shards must have the same dtype.") + + # Determine the total shape of the combined tensor + total_shape = list(shards[0].shape) + total_shape[dim] = sum(shard.shape[dim] for shard in shards) + + # Create the combined tensor + combined = torch.zeros(total_shape, dtype=shards[0].dtype, device=shards[0].device) + + # Fill the combined tensor using narrow + shard_offset = 0 + for shard in shards: + shard_size = shard.shape[dim] + combined.narrow(dim, shard_offset, shard_size).copy_(shard) + shard_offset += shard_size + + return combined + + +def pack_bitmasks(bytemasks: torch.Tensor) -> torch.Tensor: + """ + Converts a bytemask tensor to a bitmask tensor to reduce memory. Shape RxC will be + compressed to R x ceil(C/8) + + :param bytemasks: mask tensor where each byte corresponds to a weight + :return: mask tensor where each bit corresounds to a weight + """ + packed_bits_numpy = numpy.packbits(bytemasks.numpy(), axis=-1, bitorder="little") + packed_bits_torch = torch.from_numpy(packed_bits_numpy) + + return packed_bits_torch + + +def unpack_bitmasks( + packed_bitmasks: torch.Tensor, original_shape: List[int] +) -> torch.Tensor: + """ + Converts a bitmask tensor back to a bytemask tensor for use during decompression + + :param packed_bitmasks: mask tensor where each bit corresponds to a weight + :param original_shape: dense shape to decompress to + :return: boolean mask of weights in the original dense shape + """ + # Unpack the bits + unpacked_bits = numpy.unpackbits( + packed_bitmasks.cpu().numpy(), + axis=-1, + count=original_shape[-1], + bitorder="little", + ) + + # Reshape to match the original shape + unpacked_bitmasks_torch = torch.from_numpy( + unpacked_bits.reshape(original_shape).astype(bool) + ) + + return unpacked_bitmasks_torch diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/utils/offload.py b/venv/lib/python3.10/site-packages/compressed_tensors/utils/offload.py new file mode 100644 index 0000000000000000000000000000000000000000..a62b854046ac58f7806fd7a630ea84452b81e3b2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/utils/offload.py @@ -0,0 +1,410 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Utilities associated with offloading functionality provided by `accelerate`. + +| ----------------------------------------------------------------------------------------------------- | # noqa: E501 +| Operation | Without offloading support | With offloading support | # noqa: E501 +| --------- | -------------------------------------- | ------------------------------------------------ | # noqa: E501 +| Add | module.register_parameter(name, param) | register_offload_parameter(module, name, param) | # noqa: E501 +| Check | N/A | has_offloaded_params(module) | # noqa: E501 +| Onload | N/A | with align_module_device(module) | # noqa: E501 +| Update | module.name.data.copy_(new_data) | update_offload_parameter(module, name, new_data) | # noqa: E501 +| Delete | del module.name | delete_offload_parameter(module, name) | # noqa: E501 +| ----------------------------------------------------------------------------------------------------- | # noqa: E501 +""" + +import contextlib +import warnings +from functools import wraps +from typing import Any, Callable, Dict, Literal, Optional, Union + +import torch + + +try: + from accelerate.hooks import ( + AlignDevicesHook, + add_hook_to_module, + remove_hook_from_module, + ) + from accelerate.utils import ( + OffloadedWeightsLoader, + PrefixedDataset, + set_module_tensor_to_device, + ) + + _has_accelerate = True +except ImportError: + _has_accelerate = False + AlignDevicesHook = None + add_hook_to_module = None + remove_hook_from_module = None + OffloadedWeightsLoader = None + PrefixedDataset = None + set_module_tensor_to_device = None + + +__all__ = [ + "is_module_offloaded", + "get_execution_device", + "get_offloaded_device", + "update_prefix_dict", + "update_parameter_data", + "register_offload_parameter", + "update_offload_parameter", + "delete_offload_parameter", + "has_offloaded_params", + "disable_hf_hook", + "align_module_device", +] + + +def check_accelerate(fallback: Any): + def decorator(func: Callable[[Any], Any]): + if not _has_accelerate: + + @wraps(func) + def fallback_fn(*args, **kwargs): + return fallback + + return fallback_fn + + return func + + return decorator + + +""" Candidates for Depreciation """ + + +@check_accelerate(fallback=False) +def is_module_offloaded(module: torch.nn.Module) -> bool: + return has_offloaded_params(module) + + +def get_execution_device(module: torch.nn.Module) -> torch.device: + """ + :param module: module to check + :return: device module is loaded onto during forward pass + """ + if has_offloaded_params(module): + return module._hf_hook.execution_device + device = next(module.parameters()).device + + # offload only gets set for leaf modules, fallback to checking for device type + if device.type == "meta": + return module._hf_hook.execution_device + + return device + + +def get_offloaded_device(module: torch.nn.Module) -> torch.device: + """ + :param module: module to check + :return: device module is offloaded to onto after forward pass + """ + if has_offloaded_params(module): + first_key = list(module._hf_hook.weights_map.keys())[0] + prefix_dataset = module._hf_hook.weights_map.dataset + return prefix_dataset[first_key].device + return next(module.parameters()).device + + +@check_accelerate(fallback=None) +def update_prefix_dict(module: torch.nn.Module, key: str, data: torch.Tensor): + """ + Updates the offloaded state dict for a given module. Parameter named key is replaced + by data. This is neccesary because parameter updates for offloaded modules do not + persist automatically between loads. This function only affects the offloaded + state dict and not the current state of the loaded module. + + :param module: module containing the parameter to update + :param key: name of parameter to update + :param data: tensor to update parameter with in the offloaded state dict + """ + if not has_offloaded_params(module): + raise ValueError("Prefix dict is only applicable to offloaded modules") + + weights_map = module._hf_hook.weights_map + offload_to_weights_map(weights_map, key, data) + + +def update_parameter_data( + module: torch.nn.Module, new_param_data: torch.Tensor, param_name: str +): + """ + Update the data of an existing parameter and its offload dict. Supports both + parameters of offloaded modules and non-offloaded modules + + :param module: module containing the parameter to update + :param new_param_data: tensor to update parameter with + :param param_name: name of module parameter to update + """ + update_offload_parameter(module, param_name, new_param_data) + + +""" Candidates for Upstreaming """ + + +def register_offload_parameter( + module: torch.nn.Module, + name: str, + parameter: torch.nn.Parameter, + offload_device: Optional[Union[torch.device, Literal["disk"]]] = None, +): + """ + Register a parameter to the given module which may be offloaded + + :param module: maybe offloaded module + :param name: name of newly registered parameter + :param parameter: parameter being registered + :param offload_device: device on which weight will be offloaded to. If None is + provided, then infer device from parameters on module + """ + has_onload = any(p.device != torch.device("meta") for p in module.parameters()) + module.register_parameter(name, parameter) + + if has_offloaded_params(module): + weights_map = module._hf_hook.weights_map + offload_to_weights_map(weights_map, name, parameter.data, offload_device) + if not has_onload: + set_module_tensor_to_device(module, name, "meta") + + +def update_offload_parameter( + module: torch.nn.Module, + name: str, + data: Optional[torch.Tensor], + offload_device: Optional[Union[torch.device, Literal["disk"]]] = None, +): + """ + Update the data of an existing parameter and its offload dict. Supports both + parameters of offloaded modules and non-offloaded modules + + :param module: module containing the parameter to update + :param name: name of module parameter to update + :param data: tensor to update parameter with + :param offload_device: device on which weight will be offloaded to. If None is + provided, then infer device from parameters on module + """ + param = getattr(module, name) + data = data.to(param.dtype) + if param.data.shape != data.shape: + warnings.warn( + f"Shape of parameter being updated {param.data.shape} does not match shape " + f"of update data {data.shape}" + ) + + # copy data into onloaded parameter if applicable + if param.device != torch.device("meta"): + param.data.copy_(data) + + # update offload dict + if has_offloaded_params(module): + weights_map = module._hf_hook.weights_map + offload_to_weights_map(weights_map, name, data, offload_device) + + +def delete_offload_parameter(module: torch.nn.Module, name: str): + """ + Delete a parameter from a module which may be offloaded + + :param module: maybe offloaded module + :param name: name of parameter being deleted + """ + delattr(module, name) + + if has_offloaded_params(module): + weights_map = module._hf_hook.weights_map + delete_from_weights_map(weights_map, name) + + +@check_accelerate(fallback=contextlib.nullcontext()) +@contextlib.contextmanager +def disable_hf_hook(module: torch.nn.Module): + hooks = {} + + def collect_hooks(module): + nonlocal hooks + if hasattr(module, "_hf_hook"): + hooks[module] = module._hf_hook + remove_hook_from_module(module) + + module.apply(collect_hooks) + + yield + + for submodule, hook in hooks.items(): + add_hook_to_module(submodule, hook) + + +@check_accelerate(fallback=None) +def offload_to_weights_map( + weights_map: Union[PrefixedDataset, Dict, OffloadedWeightsLoader], + key: str, + value: torch.Tensor, + offload_device: Optional[Union[torch.device, Literal["disk"]]] = None, +): + """ + Helper function which implements offloaded item assignment for PrefixedDataset, + OffloadedWeightsLoader, and Dict types. + + :param weights_map: weight map to be updated with offload information + :param key: key used to identify weight location + :param value: weight being offloaded + :param offload_device: device on which weight will be offloaded to. If None is + provided, then infer device from parameters in weights_map + """ + if isinstance(weights_map, PrefixedDataset): + if offload_device == "disk": + raise ValueError(f"Cannot offload to disk with type {type(weights_map)}") + + dataset = weights_map.dataset + key = f"{weights_map.prefix}{key}" + offload_to_weights_map(dataset, key, value, offload_device) + + elif isinstance(weights_map, OffloadedWeightsLoader): + if key not in weights_map.all_keys: + weights_map.all_keys.append(key) + + if len(weights_map.index) <= 0 and offload_device != "disk": + offload_to_weights_map(weights_map.state_dict, key, value, offload_device) + + else: + raise NotImplementedError( + "Updating weights_map with disk offloading is not implemented yet" + ) + + elif isinstance(weights_map, dict): + if offload_device == "disk": + raise ValueError(f"Cannot offload to disk with type {type(weights_map)}") + + # infer offload device + if offload_device is None: + if key in weights_map: + offload_device = weights_map[key].device + else: + tens = next(iter(weights_map.values()), None) + if tens is None: + raise ValueError( + "Cannot infer offload device from empty weights_map" + ) + offload_device = tens.device + + weights_map[key] = value.to(device=offload_device) + + else: + raise NotImplementedError( + "Updating offload data not implemented for weights_map of type " + f"{type(weights_map)}" + ) + + +@check_accelerate(fallback=None) +def delete_from_weights_map( + weights_map: Union[PrefixedDataset, Dict, OffloadedWeightsLoader], + key: str, +): + if isinstance(weights_map, PrefixedDataset): + dataset = weights_map.dataset + key = f"{weights_map.prefix}{key}" + delete_from_weights_map(dataset, key) + + elif isinstance(weights_map, OffloadedWeightsLoader): + if len(weights_map.index) <= 0: + delete_from_weights_map(weights_map.state_dict, key) + + else: + raise NotImplementedError( + "Delete from weights_map with disk offloading is not implemented yet" + ) + + elif isinstance(weights_map, dict): + del weights_map[key] + + else: + raise NotImplementedError( + "Updating offload data not implemented for weights_map of type " + f"{type(weights_map)}" + ) + + +""" Upstreamed Functions """ + + +# introduced in accelerate v1.1.0 +@check_accelerate(fallback=False) +def has_offloaded_params(module: torch.nn.Module) -> bool: + """ + Checks if a module has offloaded parameters by checking if the given module has a + AlignDevicesHook attached with offloading enabled + + Args: + module (`torch.nn.Module`): The module to check for an offload hook. + + Returns: + bool: `True` if the module has an offload hook and offloading is enabled, + `False` otherwise. + """ + return ( + hasattr(module, "_hf_hook") + and isinstance(module._hf_hook, AlignDevicesHook) + and module._hf_hook.offload + ) + + +# introduced in accelerate v1.1.0 +@check_accelerate(fallback=contextlib.nullcontext()) +@contextlib.contextmanager +def align_module_device( + module: torch.nn.Module, execution_device: Optional[torch.device] = None +): + """ + Context manager that moves a module's parameters to the specified execution device. + + Args: + module (`torch.nn.Module`): + Module with parameters to align. + execution_device (`torch.device`, *optional*): + If provided, overrides the module's execution device within the context. + Otherwise, use hook execution device or pass + """ + if has_offloaded_params(module): + if execution_device is not None: + original_device = module._hf_hook.execution_device + module._hf_hook.execution_device = execution_device + + try: + module._hf_hook.pre_forward(module) + yield + finally: + module._hf_hook.post_forward(module, None) + if execution_device is not None: + module._hf_hook.execution_device = original_device + + elif execution_device is not None: + devices = { + name: param.device for name, param in module.named_parameters(recurse=False) + } + try: + for name in devices: + set_module_tensor_to_device(module, name, execution_device) + yield + finally: + for name, device in devices.items(): + set_module_tensor_to_device(module, name, device) + + else: + yield diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/utils/permutations_24.py b/venv/lib/python3.10/site-packages/compressed_tensors/utils/permutations_24.py new file mode 100644 index 0000000000000000000000000000000000000000..5b078e270e982f3a3448391eab1cf8ce1cbe4def --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/utils/permutations_24.py @@ -0,0 +1,65 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import numpy +import torch + + +__all__ = ["get_permutations_24"] + + +# Precompute permutations for Marlin24 weight and scale shuffling +# Originally implemented in nm-vllm/vllm/model_executor/layers/quantization/utils/marlin_24_perms.py # noqa: E501 +# +# Marlin works on [16*2,64] tiles. The goal of the permutations is to reorder the weight +# data so that it is compatible with the tensor-core format that is described here: +# https://docs.nvidia.com/cuda/parallel-thread-execution/index.html#matrix-fragments-for-mma-m16n8k16-with-floating-point-type # noqa: E501 +# +# As a result of this reordering, the vector loads inside the kernel will get the data +# as it is needed for tensor-core (without the need to use ldmatrix instructions) +def get_permutations_24(num_bits): + perm_list = [] + for i in range(32): + perm1 = [] + col = i // 4 + col_o = col // 2 + for block in [0, 1]: + for row in [ + 2 * (i % 4), + 2 * (i % 4) + 1, + 2 * (i % 4 + 4), + 2 * (i % 4 + 4) + 1, + ]: + perm1.append(16 * row + col_o * 256 + 8 * (col % 2) + 4 * block) + for j in range(4): + perm_list.extend([p + 1 * j for p in perm1]) + perm = numpy.array(perm_list) + + if num_bits == 4: + interleave = numpy.array([0, 2, 4, 6, 1, 3, 5, 7]) + elif num_bits == 8: + interleave = numpy.array([0, 2, 1, 3]) + else: + raise ValueError("num_bits must be 4 or 8, got {}".format(num_bits)) + + perm = perm.reshape((-1, len(interleave)))[:, interleave].ravel() + perm = torch.from_numpy(perm) + scale_perm = [] + for i in range(8): + scale_perm.extend([i * 8 + j for j in [0, 4, 1, 5, 2, 6, 3, 7]]) + scale_perm_single = [] + for i in range(8): + scale_perm_single.extend([8 * i + j for j in [0, 1, 2, 3, 4, 5, 6, 7]]) + return perm, scale_perm, scale_perm_single diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/utils/permute.py b/venv/lib/python3.10/site-packages/compressed_tensors/utils/permute.py new file mode 100644 index 0000000000000000000000000000000000000000..e31d4862b69818a6de37cf93883101df2da0b1a9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/utils/permute.py @@ -0,0 +1,70 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Set, Tuple + +import torch + + +__all__ = ["safe_permute"] + + +# these datatypes are missing implementations required for standard permutation +_EXPERIMENTAL_DTYPES: Set[Tuple[torch.dtype, torch.device]] = set() + + +def safe_permute(value: torch.Tensor, perm: torch.Tensor, dim: int = 0) -> torch.Tensor: + """ + Perform out-of-place permutation without using torch.Tensor.index_put_, + whose implementation is missing for datatypes such as `torch.float8_e4m3fn` + + :param value: tensor to permute + :param perm: permutation map + :param dim: dimension along which to apply permutation + :return: permuted value + """ + dtype_tuple = (value.dtype, value.device) + + if dtype_tuple in _EXPERIMENTAL_DTYPES: + return _fallback_permute(value, perm, dim) + + try: + return value[tuple([slice(None)] * dim + [perm])] + except RuntimeError: + # Mark dtype as experimental if advanced indexing fails + _EXPERIMENTAL_DTYPES.add(dtype_tuple) + return _fallback_permute(value, perm, dim) + + +def _fallback_permute( + value: torch.Tensor, perm: torch.Tensor, dim: int +) -> torch.Tensor: + """ + Fallback permutation method for experimental dtypes. + + :param value: tensor to permute + :param perm: permutation map + :param dim: dimension along which to apply permutation + :return: permuted value + """ + value_ret = value.clone() # cannot use zeros_like b/c of missing impl. + orig_slices = [slice(None)] * (dim + 1) + perm_slices = [slice(None)] * (dim + 1) + + for index, perm_index in enumerate(perm): + orig_slices[dim] = index + perm_slices[dim] = perm_index + value_ret[tuple(orig_slices)] = value[tuple(perm_slices)] + + return value_ret diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/utils/safetensors_load.py b/venv/lib/python3.10/site-packages/compressed_tensors/utils/safetensors_load.py new file mode 100644 index 0000000000000000000000000000000000000000..0adbe07623017f9590df2739bf3dbc7267e0bc63 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/utils/safetensors_load.py @@ -0,0 +1,308 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import os +import re +import struct +from typing import Dict, Iterable, Optional, Tuple, Union + +from safetensors import safe_open +from torch import Tensor +from transformers.utils import SAFE_WEIGHTS_INDEX_NAME, SAFE_WEIGHTS_NAME, cached_file + + +__all__ = [ + "get_safetensors_folder", + "get_safetensors_header", + "match_param_name", + "merge_names", + "get_weight_mappings", + "get_nested_weight_mappings", + "get_nested_mappings_from_state_dict", + "get_quantization_state_dict", + "is_quantization_param", +] + +WeightMappingType = Dict[str, str] +NestedWeightMappingType = Dict[str, WeightMappingType] + + +def get_safetensors_folder( + pretrained_model_name_or_path: str, cache_dir: Optional[str] = None +) -> str: + """ + Given a Hugging Face stub or a local path, return the folder containing the + safetensors weight files + + :param pretrained_model_name_or_path: local path to model or HF stub + :param cache_dir: optional cache dir to search through, if none is specified the + model will be searched for in the default TRANSFORMERS_CACHE + :return: local folder containing model data + """ + if os.path.exists(pretrained_model_name_or_path): + # argument is a path to a local folder + return os.path.abspath(pretrained_model_name_or_path) + + safetensors_path = cached_file( + pretrained_model_name_or_path, + SAFE_WEIGHTS_NAME, + cache_dir=cache_dir, + _raise_exceptions_for_missing_entries=False, + ) + index_path = cached_file( + pretrained_model_name_or_path, + SAFE_WEIGHTS_INDEX_NAME, + cache_dir=cache_dir, + _raise_exceptions_for_missing_entries=False, + ) + if safetensors_path is not None: + # found a single cached safetensors file + return os.path.split(safetensors_path)[0] + if index_path is not None: + # found a cached safetensors weight index file + return os.path.split(index_path)[0] + + # model weights could not be found locally or cached from HF Hub + raise ValueError( + "Could not locate safetensors weight or index file from " + f"{pretrained_model_name_or_path}." + ) + + +def get_safetensors_header(safetensors_path: str) -> Dict[str, str]: + """ + Extracts the metadata from a safetensors file as JSON + + :param safetensors_path: path to a safetensors file + :return: dictionary of metadata extracted from the safetensors file + """ + with open(safetensors_path, "rb") as f: + length_of_header = struct.unpack(" Optional[str]: + """ + Helper function extracting the uncompressed parameterized layer name from a + compressed name. Assumes the compressed name was merged using merge_names. + + :param full_name: full name of parameter in compressed model + :param param_name: compression paramater name + :return: uncompressed name of the uncompressed parameterized layer + """ + pattern = r"^(.*)\." + param_name + r"$" + regex = re.findall(pattern, full_name) + if len(regex) == 0: + return None + return regex[0] + + +def merge_names(parent_name: str, child_name: str) -> str: + """ + Helper function for merging an uncompressed parameterized layer name with a + compression parameter. Names merged with this function can then be parsed by + match_param_name. + + :param parent_name: uncompressed parameterized layer name + :param child_name: compression parameter name + :return: merged compressed name + """ + return parent_name + "." + child_name + + +def get_weight_mappings(path_to_model_or_tensors: str) -> Dict[str, str]: + """ + Takes a path to a state dict saved in safetensors format and returns a mapping + from parameterized layer name to file location. + + { + layer.weight.bitmask: file_location, + layer.weight.row_offsets: file_location, + layer.weight.shape: file_location, + layer.weight.compressed: file_location + } + + This generalizes to cases where the model is split into multiple safetensors files + + :param path_to_model_or_tensors: path to directory that contains + safetensors (must contain either a single file or multiple files with an index), + or a path to a single safetensors file + :return: mapping of parameterized layer name to file location + """ + + if os.path.isfile(path_to_model_or_tensors): + # we have a single safetensors file to read + header = get_safetensors_header(path_to_model_or_tensors) + for key in header.keys(): + header[key] = path_to_model_or_tensors + header.pop("__metadata__", None) + else: + # we have a directory with multiple safetensors files + safetensors_path = os.path.join(path_to_model_or_tensors, SAFE_WEIGHTS_NAME) + index_path = os.path.join(path_to_model_or_tensors, SAFE_WEIGHTS_INDEX_NAME) + if os.path.exists(safetensors_path): + # we have a single safetensors file to read + header = get_safetensors_header(safetensors_path) + for key in header.keys(): + header[key] = SAFE_WEIGHTS_NAME + header.pop("__metadata__", None) + elif os.path.exists(index_path): + # we have multiple safetensors file, read from index + with open(index_path, "r", encoding="utf-8") as f: + index = json.load(f) + header = index["weight_map"] + else: + raise ValueError( + "Could not find a safetensors weight " + f"or index file at {path_to_model_or_tensors}" + ) + + # convert weight locations to full paths + for key, value in header.items(): + header[key] = os.path.join(path_to_model_or_tensors, value) + + return header + + +def get_nested_weight_mappings( + model_path: str, + params_to_nest: Iterable[str], + return_unmatched_params: bool = False, +) -> Union[NestedWeightMappingType, Tuple[NestedWeightMappingType, WeightMappingType]]: + """ + Takes a path to a state dict saved in safetensors format and returns a nested + mapping from uncompressed parameterized layer names to the file locations of + each layer's compression parameters. + + Example of the nested mapping: + layer: { + bitmask: file_location, + row_offsets: file_location, + shape: file_location, + compressed: file_location + } + + If other parameters are found that do not match the nested parameters, they will + be returned in a separate dictionary only if return_unmatched_params is True. + This dictionary may be needed for cases where compressors are stacked (e.g., + quantization compression followed by sparse compression). + + Example of the unmatched params mapping: + { + layer.weight_scale: file_location, + layer.input_scale: file_location + } + + This generalizes to cases where the model is split into multiple safetensors + files. + + :param model_path: Path to the safetensors state dict, must contain either a + single safetensors file or multiple files with an index. + :param params_to_nest: Iterable of parameter names to nest. + :param return_unmatched_params: If True, return a second dictionary containing + the remaining parameters that were not matched to the params_to_nest. + :return: + - If return_unmatched_params is False: + NestedWeightMappingType: A nested mapping of parameterized layer names to + file locations of each layer's compression parameters. + - If return_unmatched_params is True: + Tuple[NestedWeightMappingType, WeightMappingType]: A tuple containing: + - NestedWeightMappingType: A nested mapping of parameterized layer + names to file locations of each layer's compression parameters. + - WeightMappingType: A mapping of the remaining parameter names to + their file locations that were not matched to the params_to_nest. + """ + weight_mappings = get_weight_mappings(model_path) + nested_weight_mappings = {} + unmatched_params = {} + + for key, file_location in weight_mappings.items(): + matched = False + for param_name in params_to_nest: + dense_param = match_param_name(key, param_name) + if dense_param: + if dense_param not in nested_weight_mappings: + nested_weight_mappings[dense_param] = {} + nested_weight_mappings[dense_param][param_name] = file_location + matched = True + if return_unmatched_params and not matched: + unmatched_params[key] = file_location + + if return_unmatched_params: + return nested_weight_mappings, unmatched_params + return nested_weight_mappings + + +def get_nested_mappings_from_state_dict( + state_dict, params_to_nest: Iterable[str] +) -> NestedWeightMappingType: + """ + Takes a state dict and returns a nested mapping from uncompressed + parameterized layer names to the value of + each layer's compression parameters. + + Example of the nested mapping: + layer: { + weight_scale: ..., + weight: ..., + zero_point: ..., + } + + :param state_dict: state dict of the model + :param params_to_nest: Iterable of parameter names to nest. + :return: Nested mapping of parameterized layer names to the value of + each layer's compression parameters. + """ + nested_weight_mappings = {} + for key in state_dict.keys(): + for param_name in params_to_nest: + dense_param = match_param_name(key, param_name) + if dense_param: + if dense_param not in nested_weight_mappings: + nested_weight_mappings[dense_param] = {} + nested_weight_mappings[dense_param][param_name] = state_dict[key] + return nested_weight_mappings + + +def get_quantization_state_dict(model_path: str) -> Dict[str, Tensor]: + weight_mappings = get_weight_mappings(model_path) + state_dict = {} + for weight_name, safe_path in weight_mappings.items(): + if not is_quantization_param(weight_name): + continue + with safe_open(safe_path, framework="pt", device="cpu") as f: + state_dict[weight_name] = f.get_tensor(weight_name) + + return state_dict + + +def is_quantization_param(name: str) -> bool: + """ + Checks is a parameter name is associated with a quantization parameter + + :param name: parameter name to check + :return: True if parameter name is a quantization parameter, else False + """ + if name.endswith("_scale"): + return True + if name.endswith("zero_point"): + return True + if name.endswith("g_idx"): + return True + + return False diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/utils/semi_structured_conversions.py b/venv/lib/python3.10/site-packages/compressed_tensors/utils/semi_structured_conversions.py new file mode 100644 index 0000000000000000000000000000000000000000..ef318a48869e7d945030694460e438a0a7ae50e7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/utils/semi_structured_conversions.py @@ -0,0 +1,342 @@ +# +# Modified by Roberto Lopez Castro (roberto.lopez.castro@udc.es). +# Pulled from nm-vllm/vllm/model_executor/layers/quantization/utils/format_24.py +# +# flake8: noqa +# isort: skip_file + +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import torch + + +__all__ = [ + "sparse_semi_structured_from_dense_cutlass", + "sparse_semi_structured_to_dense_cutlass", + "mask_creator", +] + + +# This is PyTorch implementation of main part of reorder_meta() +# function, from tools/util/include/cutlass/util/host_reorder.h file +# of CUTLASS source tree. Furthermore, CUTLASS template for sparse +# GEMM decides upon layout of this matrix, and at the moment for the +# sparse GEMM executed on tensor cores, this is layout described by +# ColumnMajorInterleaved<2> data structure, in +# include/cutlass/layout/matrix.h of CUTLASS source tree. The +# reordering of meta matrix into meta_reordered matrix calculated +# according to these segments of CUTLASS code is re-implemented here. +# Note that this calculation produces offsets for scattering metadata +# matrix elements into reordered metadata matrix elements (or, +# equivalently, for gathering reordered metadata matrix element back +# into metadata matrix elements). +def _calculate_meta_reordering_scatter_offsets(m, meta_ncols, meta_dtype, device): + dst_rows = torch.arange(0, m, device=device)[:, None].repeat(1, meta_ncols) + dst_cols = torch.arange(0, meta_ncols, device=device).repeat(m, 1) + + # Reorder the rows, then swizzle the 2x2 blocks. + group_x = 64 + group_y = 32 if meta_dtype.itemsize == 2 else 16 + + dst_rows = ( + dst_rows // group_x * group_x + + (dst_rows % 2) * 2 + + (dst_rows % 8) // 4 + + ((dst_rows % group_y) % 4) // 2 * 32 + + ((dst_rows % group_x) // 8) * 4 + ) + + topright = ((dst_rows % 2 == 0) & (dst_cols % 2 == 1)).to(torch.int8) + bottomleft = ((dst_rows % 2 == 1) & (dst_cols % 2 == 0)).to(torch.int8) + dst_rows += topright - bottomleft + dst_cols -= topright - bottomleft + + # Assumed that meta tensor is to be stored in CUTLASS + # InterleavedColumnMajor layout, and reverse engineered + # corresponding code to store values into this tensor. + interleave = 2 + cols_maj = dst_cols // interleave + cols_min = dst_cols % interleave + return (cols_maj * m * interleave + dst_rows * interleave + cols_min).view(-1) + + +# This function converts dense matrix into sparse semi-structured +# representation, producing "compressed" matrix, in the layout used by +# CUTLASS backend, and corresponding metadata matrix. +def sparse_semi_structured_from_dense_cutlass(dense): + if dense.dim() != 2: + raise RuntimeError( + f"Expected 2-dimensional dense tensor, got {dense.dim()}-dimensional tensor" # noqa: E501 + ) + + m, k = dense.shape + device = dense.device + + meta_dtype = torch.int8 + if dense.dtype == torch.int8: + meta_dtype = torch.int32 + elif dense.dtype in [torch.half, torch.bfloat16, torch.float, torch.int32]: + meta_dtype = torch.int16 + else: + raise RuntimeError(f"Invalid datatype {dense.dtype} of dense matrix") + quadbits_per_meta_elem = meta_dtype.itemsize * 8 // 4 + if quadbits_per_meta_elem not in (4, 8): + raise RuntimeError("Invalid number of elements per meta element calculated") + + if meta_dtype == torch.int32: + if m % 16 != 0: + raise RuntimeError( + f"Number of rows of dense matrix {m} must be divisible by 16" + ) + else: + if m % 32 != 0: + raise RuntimeError( + f"Number of rows of dense matrix {m} must be divisible by 32" + ) + if k % (4 * quadbits_per_meta_elem) != 0: + raise RuntimeError( + f"Number of columns of dense matrix {k} must be divisible by {4 * quadbits_per_meta_elem}" # noqa: E501 + ) + + if dense.dtype != torch.float: + ksparse = 4 + dense_4 = dense.view(-1, k // ksparse, ksparse) + m0, m1, m2, m3 = (dense_4 != 0).unbind(-1) + else: + ksparse = 2 + dense_2 = dense.view(-1, k // ksparse, ksparse) + m0, m2 = m1, m3 = (dense_2 != 0).unbind(-1) + meta_ncols = k // (ksparse * quadbits_per_meta_elem) + + # Encoding quadruples of True/False values as follows: + # [True, True, False, False] -> 0b0100 + # [True, False, True, False] -> 0b1000 + # [False, True, True, False] -> 0b1001 + # [True, False, False, True ] -> 0b1100 + # [False, True, False, True ] -> 0b1101 + # [False, False, True, True ] -> 0b1110 + # Thus, lower two bits in the encoding are index of the True value + # at the lowest index in the quadruple, and the higher two bits in + # the encoding are index of the other True value in the quadruple. + # In case there are less than two True values, than False value or + # values at some index or indices are considered True for the + # encoding. In case there are more than two True values, then the + # excess True value(s) at some indices are considered False for + # the encoding. The exact encodings used for these cases are as + # follows: + # [False, False, False, False] -> 0b1110 + # [False, False, False, True ] -> 0b1110 + # [False, False, True, False] -> 0b1110 + # [False, True, False, False] -> 0b1001 + # [False, True, True, True ] -> 0b1101 + # [True, False, False, False] -> 0b1000 + # [True, False, True, True ] -> 0b1100 + # [True, True, False, True ] -> 0b0100 + # [True, True, True, False] -> 0b0100 + # [True, True, True, True ] -> 0b0100 + # These particular encodings are chosen, with the help of Espresso + # logic minimizer software, for the purpose of minimization of + # corresponding Boolean functions, that translate non-zero flags + # into encoding bits. Note also possible choices for the first + # and last of these encodings were limited only to (0b0100, + # 0b1110), in order to produce valid encodings for 1:2 sparsity + # case. + + expr0 = m0 & m1 + expr1 = ~m0 & m1 + expr2 = ~m0 & ~m1 + bit0 = expr1 + bit1 = expr2 + bit2 = expr0 | expr2 | m3 + bit3 = expr1 | ~m1 + idxs0 = bit0 | (bit1.to(torch.int64) << 1) + idxs1 = bit2 | (bit3.to(torch.int64) << 1) + + if dense.dtype != torch.float: + sparse0 = dense_4.gather( + -1, idxs0.unsqueeze(-1) + ) # type: ignore[possibly-undefined] + sparse1 = dense_4.gather(-1, idxs1.unsqueeze(-1)) + sparse = torch.stack((sparse0, sparse1), dim=-1).view(m, k // 2) + else: + sparse = dense_2.gather(-1, idxs0.unsqueeze(-1) // 2).view( + m, k // 2 + ) # type: ignore[possibly-undefined] + + meta_4 = idxs0 | (idxs1 << 2) + meta_n = meta_4.view((-1, meta_ncols, quadbits_per_meta_elem)).to(meta_dtype) + + if quadbits_per_meta_elem == 4: + meta = ( + meta_n[:, :, 0] + | (meta_n[:, :, 1] << 4) + | (meta_n[:, :, 2] << 8) + | (meta_n[:, :, 3] << 12) + ) + elif quadbits_per_meta_elem == 8: + meta = ( + meta_n[:, :, 0] + | (meta_n[:, :, 1] << 4) + | (meta_n[:, :, 2] << 8) + | (meta_n[:, :, 3] << 12) + | (meta_n[:, :, 4] << 16) + | (meta_n[:, :, 5] << 20) + | (meta_n[:, :, 6] << 24) + | (meta_n[:, :, 7] << 28) + ) + + # Reorder meta tensor elements. + meta_reordered = meta.new_empty( + (m * meta_ncols,) + ) # type: ignore[possibly-undefined] + meta_offsets = _calculate_meta_reordering_scatter_offsets( + m, meta_ncols, meta_dtype, device + ) + meta_reordered.scatter_(0, meta_offsets, meta.view(-1)) + + return (sparse, meta_reordered.view(m, meta_ncols)) + + +# This function performs reverse of the function above - it +# reconstructs dense matrix from a pair of "compressed" matrix, given +# in the layout used by CUTLASS backend, and accompanying metadata +# matrix. +def sparse_semi_structured_to_dense_cutlass(sparse, meta_reordered): + if sparse.dim() != 2: + raise RuntimeError( + f"Expected 2-dimensional sparse tensor, got {sparse.dim()}-dimensional tensor" # noqa: E501 + ) + + m, k = sparse.shape + device = sparse.device + + if meta_reordered.dim() != 2: + raise RuntimeError( + f"Expected 2-dimensional meta tensor, got {meta_reordered.dim()}-dimensional tensor" # noqa: E501 + ) + if meta_reordered.device != device: + raise RuntimeError( + f"Expected meta matrix to be on {device} device, got matrix on {meta_reordered.device} device" # noqa: E501 + ) + + meta_dtype = meta_reordered.dtype + if meta_dtype not in (torch.int16, torch.int32): + raise RuntimeError(f"Invalid datatype {meta_dtype} of meta matrix") + quadbits_per_meta_elem = meta_dtype.itemsize * 8 // 4 + + ksparse = 4 if sparse.dtype != torch.float else 2 + + meta_nrows, meta_ncols = meta_reordered.shape + if meta_nrows != m: + raise RuntimeError( + f"Number of rows of meta matrix {meta_nrows} must be equal to number of columns of spase matrix {m}" # noqa: E501 + ) + if meta_ncols * ksparse * quadbits_per_meta_elem != 2 * k: + raise RuntimeError( + f"Number of columns of sparse matrix {k} different from the {meta_ncols * ksparse * quadbits_per_meta_elem // 2}, " # noqa: E501 + "expected according to the number of columns of meta matrix" + ) + + # Undo meta tensor elements reordering. + meta_offsets = _calculate_meta_reordering_scatter_offsets( + m, meta_ncols, meta_dtype, device + ) + meta = torch.gather(meta_reordered.view(-1), 0, meta_offsets).view(m, meta_ncols) + + # Unpack sparse tensor back to original dense tensor, using + # information provided by meta tensor. Note that torch.float + # datatype is handled pretty much the same as + # torch.half/torch.bfloat16, as metadata for a pair of torch.float + # value is encoded as if underlying 8 bytes contain four + # torch.half/torch.bfloat16 values, where either first two or last + # two are zeros. + meta_2 = torch.empty( + (m, meta_ncols, 2 * quadbits_per_meta_elem), + dtype=meta_dtype, + device=device, + ) + if quadbits_per_meta_elem == 4: + meta_2[:, :, 0] = meta & 0b11 + meta_2[:, :, 1] = (meta >> 2) & 0b11 + meta_2[:, :, 2] = (meta >> 4) & 0b11 + meta_2[:, :, 3] = (meta >> 6) & 0b11 + meta_2[:, :, 4] = (meta >> 8) & 0b11 + meta_2[:, :, 5] = (meta >> 10) & 0b11 + meta_2[:, :, 6] = (meta >> 12) & 0b11 + meta_2[:, :, 7] = (meta >> 14) & 0b11 + elif quadbits_per_meta_elem == 8: + meta_2[:, :, 0] = meta & 0b11 + meta_2[:, :, 1] = (meta >> 2) & 0b11 + meta_2[:, :, 2] = (meta >> 4) & 0b11 + meta_2[:, :, 3] = (meta >> 6) & 0b11 + meta_2[:, :, 4] = (meta >> 8) & 0b11 + meta_2[:, :, 5] = (meta >> 10) & 0b11 + meta_2[:, :, 6] = (meta >> 12) & 0b11 + meta_2[:, :, 7] = (meta >> 14) & 0b11 + meta_2[:, :, 8] = (meta >> 16) & 0b11 + meta_2[:, :, 9] = (meta >> 18) & 0b11 + meta_2[:, :, 10] = (meta >> 20) & 0b11 + meta_2[:, :, 11] = (meta >> 22) & 0b11 + meta_2[:, :, 12] = (meta >> 24) & 0b11 + meta_2[:, :, 13] = (meta >> 26) & 0b11 + meta_2[:, :, 14] = (meta >> 28) & 0b11 + meta_2[:, :, 15] = (meta >> 30) & 0b11 + + dense_offsets = meta_2.view(-1) + ( + torch.arange(0, 2 * m * k // ksparse, device=device) * 4 + ).view(-1, 1).repeat(1, 2).view(-1) + + dense = torch.zeros((m * 2 * k,), dtype=sparse.dtype, device=device) + if sparse.dtype != torch.float: + # dense.scatter_(0, dense_offsets, sparse.view(-1)) + dense.scatter_(0, dense_offsets, sparse.reshape(-1)) + else: + dense.view(torch.half).scatter_( + 0, dense_offsets, sparse.view(torch.half).view(-1) + ) + + return dense.view(m, 2 * k) + + +def mask_creator(tensor): + """ + Class for creating N:M sparsity masks. + Masks will be created using the N:M ratio, where for every block of + M weights, N will be pruned based on ranked weight value. Each mask + will correspond to the given tensor. + + :param N: The number of weights in a group to keep + :param M: The size of a weight group + """ + N = 2 + M = 4 + + mask = None + # for i, tensor in enumerate(tensors): + if tensor.numel() % M != 0: + raise ValueError( + f"Tensor of size {tensor.shape} can't be evenly divided into " f"{M} groups" + ) + + num_groups = tensor.numel() // M + + # N:M sparsity for linear layers + tensor_temp = tensor.detach().abs().reshape(num_groups, M) + index = torch.argsort(tensor_temp, dim=1)[:, : int(M - N)] + + w_b = torch.ones(tensor_temp.shape, device=tensor_temp.device) + mask = w_b.scatter_(dim=1, index=index, value=0).reshape(tensor.shape) + + return mask diff --git a/venv/lib/python3.10/site-packages/compressed_tensors/version.py b/venv/lib/python3.10/site-packages/compressed_tensors/version.py new file mode 100644 index 0000000000000000000000000000000000000000..2124576ef38502a6efd86ac371c8e9b6c4e82e3e --- /dev/null +++ b/venv/lib/python3.10/site-packages/compressed_tensors/version.py @@ -0,0 +1,53 @@ +# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Functionality for storing and setting the version info for SparseML +""" + + +version_base = "0.9.3" +is_release = True # change to True to set the generated version as a release version + + +def _generate_version( + is_release: bool, + version_base: str, +): + from datetime import date + + if is_release: + return version_base + else: + return f"{version_base}.{date.today().strftime('%Y%m%d')}" + + +__all__ = [ + "__version__", + "version_base", + "is_release", + "version", + "version_major", + "version_minor", + "version_patch", + "version_build", + "version_major_minor", +] +__version__ = _generate_version(is_release, version_base) + +version = __version__ +version_major, version_minor, version_patch, version_build = version.split(".") + ( + [None] if len(version.split(".")) < 4 else [] +) # handle conditional for version being 3 parts or 4 (4 containing build date) +version_major_minor = f"{version_major}.{version_minor}" diff --git a/venv/lib/python3.10/site-packages/dtlib/__init__.py b/venv/lib/python3.10/site-packages/dtlib/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6afc9f01c72c977fe8d1fc9177828a44a6003bfc --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/__init__.py @@ -0,0 +1,4 @@ +from . import trees +from . import utils + +__all__ = ['utils', 'trees'] \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/dtlib/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0c7823f3f0a55e994ee87c23f2c775afef64ec1f Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/__pycache__/utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..31d0cdc0c11df075f83664a86931bd77b7cb2700 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/__pycache__/utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/BinarySearchTree.py b/venv/lib/python3.10/site-packages/dtlib/trees/BinarySearchTree.py new file mode 100644 index 0000000000000000000000000000000000000000..aad34eeef92b61e1b3b73fe5d33056d6e153591a --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/BinarySearchTree.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- +""" +Created on Thu Aug 25 18:57:59 2022 + +@author: jeffr +""" + +import abc +import dtlib.trees.BinaryTree as BT +import dtlib.trees._ArrayBinarySearchTree as ABST +import dtlib.trees._LinkedBinarySearchTree as LBST +from dtlib.trees.Tree import Tree, TreeMeta +import dtlib.trees._Node as _Node + +from dtlib.trees._constants import BT_BALANCED, TRAVERSE_INORDER, \ + LINKED_STORAGE, ARRAY_STORAGE, LIST_NODE, DEFAULT_SEARCH_ORDER, \ + SEARCH_FIRST_INORDER, SEARCH_LAST_INORDER, SEARCH_FIRST_LEVELORDER, \ + DIR_LEFT, DIR_RIGHT + +## Public API/ABC for Binary Trees + +## Really all this does is provide a selector/factory for Binary Tree implementations +## so that the public API is implementation independent + +############################## Module globals ################################ + +## Creation/Types + +DEFAULT_STORAGE = LINKED_STORAGE +DEFAULT_LINKED_NODE_FACTORY = _Node.Node_factory(LIST_NODE, {DIR_LEFT: None, DIR_RIGHT: None}) +DEFAULT_ARRAY_NODE_FACTORY = _Node.Node_factory(LIST_NODE) + +############################ Module Initialization ############################ + +# Think of this as an interface for BinarySearchTree +class BinarySearchTree(BT.BinaryTree, metaclass=TreeMeta): + def __init__(self): + self.name = "This is a Binary Search Tree" + + def __new__(cls, *args, storage=DEFAULT_STORAGE, **kwargs): + if storage == LINKED_STORAGE: + inst = Tree.__new__(LinkedBinarySearchTree, *args, **kwargs) + elif storage == ARRAY_STORAGE: + inst = Tree.__new__(ArrayBinarySearchTree, *args, **kwargs) + else: + raise ValueError(f"storage mechanism {storage} for BinarySearchTree creation not understood") + return inst + + # should inherit all requirements of BinaryTree plus it is searchable + @abc.abstractmethod + def search(self): + pass + + # eventually add select and rank though the implementations will be very slow compared to an OrderStatisticsTree + +class LinkedBinarySearchTree(BT.LinkedBinaryTree, BinarySearchTree): + def __init__(self, contents=None, Nmin=0, /, *, binary_tree_type=BT_BALANCED, key=None, default_traverse=TRAVERSE_INORDER, unique=False, node_factory=DEFAULT_LINKED_NODE_FACTORY): + # since initialization can be expensive for computation and memory and + # BinarySearchTrees initialize very differently from BinaryTrees, this + # should not call BT.ArrayBinaryTree.__init__ + self.node_factory = node_factory + if contents is None: + self.tree = None + else: + self.tree = LBST.LBST_create(contents, binary_tree_type=binary_tree_type, node_factory=self.node_factory) + self.key = key + self._reversed = False + self.default_traverse = default_traverse + self.unique = unique + self.name = "This is a Linked Binary Search Tree" + + def search(self, value, order=DEFAULT_SEARCH_ORDER): + return LBST.LBST_search(self.tree, value, key=self.key, order=order) + + def minimum(self): + return LBST.LBST_min(self.tree) + + def maximum(self): + return LBST.LBST_max(self.tree) + + # highly suggest also implementing a more configurable version self.contains + def __contains__(self, value, /): + return LBST.LBST_contains(self.tree, value, key=self.key) + + def validate(self): + return LBST.LBST_validate(self.tree, key=self.default_key, unique=self.unique) + + def add(self, value, /): + self.tree = LBST.LBST_add(self.tree, value, key=self.key, unique=self.unique, node_factory=self.node_factory) + + def remove(self, value, /, *, order=DEFAULT_SEARCH_ORDER): + self.tree = LBST.LBST_remove(self.tree, value, key=self.key, order=order) + + def discard(self, value, /, *, order=DEFAULT_SEARCH_ORDER): + self.tree = LBST.LBST_discard(self.tree, value, key=self.key, order=order) + +class ArrayBinarySearchTree(BT.ArrayBinaryTree, BinarySearchTree): + def __init__(self, contents=None, Nmin=0, /, *, inplace=False, binary_tree_type=BT_BALANCED, key=None, default_traverse=TRAVERSE_INORDER, unique=False, node_factory=DEFAULT_ARRAY_NODE_FACTORY): + # since initialization can be expensive for computation and memory and + # BinarySearchTrees initialize very differently from BinaryTrees, this + # should not call BT.ArrayBinaryTree.__init__ + self.node_factory = node_factory + if contents is None: + self.tree = [None]*Nmin + else: + self.tree = ABST.ABST_create(contents, Nmin, inplace=inplace, binary_tree_type=binary_tree_type, node_factory=self.node_factory) + self.key = key + self._reversed = False + self.default_traverse = default_traverse + self.unique = unique + self.name = "Array Binary Search Tree" # TODO: remove once class hierarchy is stable + + def search(self, value, order=DEFAULT_SEARCH_ORDER): + return ABST.ABST_search(self.tree, value, key=self.key, order=order) + + def minimum(self): + return ABST.ABST_min(self.tree) + + def maximum(self): + return ABST.ABST_max(self.tree) + + # highly suggest also implementing a more configurable version self.contains + def __contains__(self, value, /): + return ABST.ABST_contains(self.tree, value, key=self.key) + + def validate(self): + return ABST.ABST_validate(self.tree, key=self.key, unique=self.unique) + + def add(self, value, /, update=False): + self.tree = ABST.ABST_add(self.tree, value, key=self.key, unique=self.unique, update=update, node_factory=self.node_factory) + + def remove(self, value, /, *, order=DEFAULT_SEARCH_ORDER): + self.tree = ABST.ABST_remove(self.tree, value, key=self.key, order=order) + + def discard(self, value, /, *, order=DEFAULT_SEARCH_ORDER): + self.tree = ABST.ABST_discard(self.tree, value, key=self.key, order=order) \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/BinaryTree.py b/venv/lib/python3.10/site-packages/dtlib/trees/BinaryTree.py new file mode 100644 index 0000000000000000000000000000000000000000..a5264bf7cae4f4be6037d80be016c095ef31061f --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/BinaryTree.py @@ -0,0 +1,256 @@ +# -*- coding: utf-8 -*- +""" +Created on Thu Aug 25 18:56:19 2022 + +@author: jeffr +""" + +import abc +from dtlib.trees._constants import BT_BALANCED, TRAVERSE_INORDER, \ + LINKED_STORAGE, ARRAY_STORAGE, LIST_NODE, DIR_LEFT, DIR_RIGHT +import dtlib.trees._ArrayBinaryTree as ABT +import dtlib.trees._LinkedBinaryTree as LBT +import dtlib.trees._Node as _Node +from dtlib.trees.Tree import Tree, TreeMeta + +## Public API/ABC for Binary Trees + +## Really all this does is provide a selector/factory for Binary Tree implementations +## so that the public API is implementation independent + +############################## Module globals ################################ + +## Creation/Types + +DEFAULT_STORAGE = LINKED_STORAGE +DEFAULT_LINKED_NODE_FACTORY = _Node.Node_factory(LIST_NODE, {DIR_LEFT: None, DIR_RIGHT: None}) +DEFAULT_ARRAY_NODE_FACTORY = _Node.Node_factory(LIST_NODE) + +############################ Module Initialization ############################ + +# Think of this as an interface for BinaryTree +#TODO: determine which of the abstractmethod really need to be here or should be pushed to Tree +class BinaryTree(Tree, metaclass=TreeMeta): + def __init__(self): + self.name = "This is a Binary Tree" + + def __new__(cls, *args, storage=DEFAULT_STORAGE, **kwargs): + if storage == LINKED_STORAGE: + inst = Tree.__new__(LinkedBinaryTree, *args, **kwargs) + elif storage == ARRAY_STORAGE: + inst = Tree.__new__(ArrayBinaryTree,*args, **kwargs) + else: + raise ValueError(f"storage mechanism {storage} for BinaryTree creation not understood") + return inst + + @abc.abstractmethod + def traverse(self): + pass + + @abc.abstractmethod + def iterator(self): + raise NotImplementedError() + + @abc.abstractmethod + def __iter__(self): + pass + + @abc.abstractmethod + def count(self): + pass + + """ + @abc.abstractmethod + def find(self): + pass + """ + + # highly suggest also implementing a more configurable version self.contains + @abc.abstractmethod + def __contains__(self): + pass + + @abc.abstractmethod + def __eq__(self): + pass + + def __str__(self): + return self.__format__('') + + def __format__(self, format_spec): + return self.name + +class LinkedBinaryTree(BinaryTree): + def __init__(self, contents=None, /, *, binary_tree_type=BT_BALANCED, default_traverse=TRAVERSE_INORDER, key=None, node_factory=DEFAULT_LINKED_NODE_FACTORY): + self.node_factory = node_factory + self.tree = LBT.LBT_create(contents, binary_tree_type=binary_tree_type, node_factory=self.node_factory) + self.key = key + self._reversed = False + self.default_traverse = default_traverse + self.name = "Linked Binary Tree" + + def size(self): + return LBT.LBT_size(self.tree) + + def traverse(self, func, *args, traversal=None, reverse=None, **kwargs): + if traversal is None: + traversal = self.default_traverse + if reverse is None: + reverse = self._reverse + elif reverse: + reverse = not self._reversed + LBT.LBT_traverse(self.tree, func, *args, traversal=traversal, reverse=reverse, **kwargs) + + def reverse(self): + self._reversed = not self._reversed + + def iterator(self, order=None, /): + if order is None: + return self.__iter__() + + #TODO: see notes at top + raise NotImplementedError(f"iterator by {order} is not yet implemented") + + def __reversed__(self): + raise NotImplementedError("reverse iterator not yet implemented") + + def __iter__(self): + #TODO: see notes at top + raise NotImplementedError("iterator is not yet implemented") + + """ for a future release where something other than self.key can be used + def count(self, value, /, *, key=None): + if key is None: + key = self.key + return LBT.LBT_count(self.tree, value, key=key) + """ + def count(self, value, /): + return LBT.LBT_count(self.tree, value, key=self.key) + + """ # WARNING: don't use this. Need to review the signature; specifically the output + def find(self, value, number=-1, /, *, key=None): + if key is None: + key = self.default_key + return LBT._LBT_find(self.tree, value, number=number, key=key) + """ + + # highly suggest also implementing a more configurable version self.contains + """ for a future release where something other than self.key can be used + def __contains__(self, value, /, *, key=None): + if key is None: + key = self.key + return LBT.LBT_contains(self.tree, value, key=key) + """ + def __contains__(self, value, /): + return LBT.LBT_contains(self.tree, value, key=self.key) + + def add(self, value, /): + self.tree = LBT.LBT_add(self.tree, value) + + """ for a future release where something other than self.key can be used + def remove(self, value, /, *, key=None): + if key is None: + key = self.key + self.tree = LBT.LBT_remove(self.tree, value, key=key) + """ + def remove(self, value, /): + self.tree = LBT.LBT_remove(self.tree, value, key=self.key) + + """ for a future release where something other than self.key can be used + def discard(self, value, /, *, key=None): + if key is None: + key = self.key + self.tree = LBT.LBT_discard(self.tree, value, key=key) + """ + def discard(self, value, /): + self.tree = LBT.LBT_discard(self.tree, value, key=self.key) + + def __eq__(self, other, /): + return LBT.LBT_equals(self.tree, other) + +class ArrayBinaryTree(BinaryTree): + def __init__(self, contents=None, Nmin=0, /, *, inplace=False, binary_tree_type=BT_BALANCED, key=None, default_traverse=TRAVERSE_INORDER, node_factory=DEFAULT_ARRAY_NODE_FACTORY): + self.tree = ABT.ABT_create(contents, Nmin, inplace=inplace, binary_tree_type=binary_tree_type) + self.key = key + self._reversed = False + self.node_factory = node_factory + self.default_traverse = default_traverse + self.name = "Array Binary Tree" # TODO: remove once class hierarchy is stable + + def size(self): + return ABT.ABT_size(self.tree) + + def traverse(self, func, *args, traversal=None, reverse=None, **kwargs): + if traversal is None: + traversal = self.default_traverse + if reverse is None: + reverse = self._reverse + elif reverse: + reverse = not self._reversed + ABT.ABT_traverse(self.tree, func, *args, traversal=traversal, reverse=reverse, **kwargs) + + def reverse(self): + self._reversed = not self._reversed + + def iterator(self, order=None, /): + if order is None: + return self.__iter__() + #TODO: see notes at top + raise NotImplementedError(f"iterator by {order} is not yet implemented") + + def __reversed__(self): + raise NotImplementedError("reverse iterator not yet implemented") + + def __iter__(self): + #TODO: see notes at top + raise NotImplementedError("iterator is not yet implemented") + + """ for a future release where something other than self.key can be used + def count(self, value, /, *, key=None): + if key is None: + key = self.key + return ABT.ABT_count(self.tree, value, key=key) + """ + def count(self, value, /): + return ABT.ABT_count(self.tree, value, key=self.key) + + """ # WARNING: don't use this. Need to review the signature; specifically the output + def find(self, value, number=-1, /, *, key=None): + if key is None: + key = self.key + return ABT.ABT_find(self.tree, value, number=number, key=key) + """ + + # highly suggest also implementing a more configurable version self.contains + """ for a future release where something other than self.key can be used + def __contains__(self, value, /, *, key=None): + if key is None: + key = self.key + return ABT.ABT_contains(self.tree, value, key=key) + """ + def __contains__(self, value, /): + return ABT.ABT_contains(self.tree, value, key=self.key) + + def add(self, value, /): + self.tree = ABT.ABT_add(self.tree, value, node_factory=self.node_factory) + + """ for a future release where something other than self.key can be used + def remove(self, value, /, *, key=None): + if key is None: + key = self.key + self.tree = ABT.ABT_remove(self.tree, value, key=key) + """ + def remove(self, value, /): + self.tree = ABT.ABT_remove(self.tree, value, key=self.key) + + """ for a future release where something other than self.key can be used + def discard(self, value, /, *, key=None): + if key is None: + key = self.key + self.tree = ABT.ABT_discard(self.tree, value, key=key) + """ + def discard(self, value, /): + self.tree = ABT.ABT_discard(self.tree, value, key=self.key) + + def __eq__(self, other, /): + return ABT.ABT_equals(self.tree, other) \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/Heap.py b/venv/lib/python3.10/site-packages/dtlib/trees/Heap.py new file mode 100644 index 0000000000000000000000000000000000000000..f5c6a2f162b9f2ccc7187bba7694a8e829e51288 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/Heap.py @@ -0,0 +1,124 @@ +# -*- coding: utf-8 -*- +""" +Created on Tue Sep 6 20:22:53 2022 + +@author: jeffr +""" + +import abc +import dtlib.trees.BinaryTree as BT +import dtlib.trees._ArrayHeap as AH +import dtlib.trees._LinkedHeap as LH +from dtlib.trees.Tree import Tree, TreeMeta +import dtlib.trees._Node as _Node + +from dtlib.trees._constants import BT_BALANCED, TRAVERSE_INORDER, \ + LINKED_STORAGE, ARRAY_STORAGE, LIST_NODE, DEFAULT_SEARCH_ORDER, \ + SEARCH_FIRST_INORDER, SEARCH_LAST_INORDER, SEARCH_FIRST_LEVELORDER, \ + DIR_LEFT, DIR_RIGHT + +## Public API/ABC for Binary Trees + +## Really all this does is provide a selector/factory for Binary Tree implementations +## so that the public API is implementation independent + +############################## Module globals ################################ + +## Creation/Types + +DEFAULT_STORAGE = ARRAY_STORAGE +DEFAULT_LINKED_NODE_FACTORY = _Node.Node_factory(LIST_NODE, {DIR_LEFT: None, DIR_RIGHT: None}) +DEFAULT_ARRAY_NODE_FACTORY = _Node.Node_factory(LIST_NODE) + +############################ Module Initialization ############################ + +# Think of this as an interface for BinarySearchTree +class Heap(BT.BinaryTree, metaclass=TreeMeta): + def __init__(self): + self.name = "This is a Heap" + + def __new__(cls, *args, storage=DEFAULT_STORAGE, **kwargs): + if storage == LINKED_STORAGE: + inst = Tree.__new__(LinkedHeap, *args, **kwargs) + elif storage == ARRAY_STORAGE: + inst = Tree.__new__(ArrayHeap, *args, **kwargs) + else: + raise ValueError(f"storage mechanism {storage} for BinarySearchTree creation not understood") + return inst + + # should inherit all requirements of BinaryTree plus it is searchable + @abc.abstractmethod + def minimum(self): + pass + + @abc.abstractmethod + def maximum(self): + pass + + # eventually add select and rank though the implementations will be very slow compared to an OrderStatisticsTree + +class LinkedHeap(BT.LinkedBinaryTree, Heap): + def __init__(self, contents=None, Nmin=0, /, *, binary_tree_type=BT_BALANCED, key=None, default_traverse=TRAVERSE_INORDER, unique=False, node_factory=DEFAULT_LINKED_NODE_FACTORY): + # since initialization can be expensive for computation and memory and + # BinarySearchTrees initialize very differently from BinaryTrees, this + # should not call BT.ArrayBinaryTree.__init__ + self.node_factory = node_factory + if contents is None: + self.tree = None + else: + self.tree = LBST.LBST_create(contents, binary_tree_type=binary_tree_type, node_factory=self.node_factory) + self.key = key + self.default_traverse = default_traverse + self.unique = unique + self.name = "This is a Linked Heap" + + def minimum(self): + return LBST.LBST_min(self.tree) + + def maximum(self): + return LBST.LBST_max(self.tree) + + def validate(self): + return LBST.LBST_validate(self.tree, key=self.default_key, unique=self.unique) + + def add(self, value, /): + self.tree = LBST.LBST_add(self.tree, value, key=self.key, unique=self.unique, node_factory=self.node_factory) + + def remove(self, value, /, *, order=DEFAULT_SEARCH_ORDER): + self.tree = LBST.LBST_remove(self.tree, value, key=self.key, order=order) + + def discard(self, value, /, *, order=DEFAULT_SEARCH_ORDER): + self.tree = LBST.LBST_discard(self.tree, value, key=self.key, order=order) + +class ArrayHeap(BT.ArrayBinaryTree, Heap): + def __init__(self, contents=None, Nmin=0, /, *, inplace=False, binary_tree_type=BT_BALANCED, key=None, default_traverse=TRAVERSE_INORDER, unique=False, node_factory=DEFAULT_ARRAY_NODE_FACTORY): + # since initialization can be expensive for computation and memory and + # BinarySearchTrees initialize very differently from BinaryTrees, this + # should not call BT.ArrayBinaryTree.__init__ + self.node_factory = node_factory + if contents is None: + self.tree = [None]*Nmin + else: + self.tree = ABST.ABST_create(contents, Nmin, inplace=inplace, binary_tree_type=binary_tree_type, node_factory=self.node_factory) + self.key = key + self.default_traverse = default_traverse + self.unique = unique + self.name = "This is an Array Heap" # TODO: remove once class hierarchy is stable + + def minimum(self): + return ABST.ABST_min(self.tree) + + def maximum(self): + return ABST.ABST_max(self.tree) + + def validate(self): + return ABST.ABST_validate(self.tree, key=self.key, unique=self.unique) + + def add(self, value, /, update=False): + self.tree = ABST.ABST_add(self.tree, value, key=self.key, unique=self.unique, update=update, node_factory=self.node_factory) + + def remove(self, value, /, *, order=DEFAULT_SEARCH_ORDER): + self.tree = ABST.ABST_remove(self.tree, value, key=self.key, order=order) + + def discard(self, value, /, *, order=DEFAULT_SEARCH_ORDER): + self.tree = ABST.ABST_discard(self.tree, value, key=self.key, order=order) \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/OrderStatisticTree.py b/venv/lib/python3.10/site-packages/dtlib/trees/OrderStatisticTree.py new file mode 100644 index 0000000000000000000000000000000000000000..253672f226f32cb09d5461ffac044969bc71ab6e --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/OrderStatisticTree.py @@ -0,0 +1,126 @@ +# -*- coding: utf-8 -*- +""" +Created on Tue Sep 6 19:02:32 2022 + +@author: jeffr +""" + +import abc +import dtlib.trees.BinarySearchTree as BST +import dtlib.trees._ArrayOrderStatisticTree as AOST +import dtlib.trees._LinkedOrderStatisticTree as LOST +from dtlib.trees.Tree import Tree, TreeMeta +import dtlib.trees._Node as _Node + +from dtlib.trees._constants import BT_BALANCED, TRAVERSE_INORDER, \ + LINKED_STORAGE, ARRAY_STORAGE, LIST_NODE, DEFAULT_SEARCH_ORDER, \ + SEARCH_FIRST_INORDER, SEARCH_LAST_INORDER, SEARCH_FIRST_LEVELORDER, \ + DIR_LEFT, DIR_RIGHT + +## Public API/ABC for Binary Trees + +## Really all this does is provide a selector/factory for Binary Tree implementations +## so that the public API is implementation independent + +############################## Module globals ################################ + +## Creation/Types + +DEFAULT_STORAGE = LINKED_STORAGE +DEFAULT_LINKED_NODE_FACTORY = _Node.Node_factory(LIST_NODE, {DIR_LEFT: None, DIR_RIGHT: None, LOST.SIZE_KEY: 1}) +DEFAULT_ARRAY_NODE_FACTORY = _Node.Node_factory(LIST_NODE, {AOST.SIZE_KEY: 1}) + +############################ Module Initialization ############################ + +# Think of this as an interface for BinarySearchTree +class OrderStatisticTree(BST.BinarySearchTree, metaclass=TreeMeta): + def __init__(self): + self.name = "This is an Order Statistic Tree" + + def __new__(cls, *args, storage=DEFAULT_STORAGE, **kwargs): + if storage == LINKED_STORAGE: + inst = Tree.__new__(LinkedOrderStatisticTree, *args, **kwargs) + elif storage == ARRAY_STORAGE: + inst = Tree.__new__(ArrayOrderStatisticTree, *args, **kwargs) + else: + raise ValueError(f"storage mechanism {storage} for OrderStatisticsTree creation not understood") + return inst + + # should inherit all requirements of BinaryTree plus it is searchable + @abc.abstractmethod + def select(self): + pass + + @abc.abstractmethod + def rank(self): + pass + + # eventually add select and rank though the implementations will be very slow compared to an OrderStatisticsTree + +class LinkedOrderStatisticTree(BST.LinkedBinarySearchTree, OrderStatisticTree): + def __init__(self, contents=None, Nmin=0, /, *, inplace=False, binary_tree_type=BT_BALANCED, key=None, default_traverse=TRAVERSE_INORDER, unique=False, node_factory=DEFAULT_LINKED_NODE_FACTORY): + # since initialization can be expensive for computation and memory and + # BinarySearchTrees initialize very differently from BinaryTrees, this + # should not call BT.ArrayBinaryTree.__init__ + self.node_factory = node_factory + if contents is None: + self.tree = None + else: + self.tree = LOST.LOST_create(contents, binary_tree_type=binary_tree_type, node_factory=self.node_factory) + self.key = key + self._reversed = False + self.default_traverse = default_traverse + self.unique = unique + self.name = "This is a Linked Order Statistic Tree" + + def select(self, k): + return LOST.LOST_select(self.tree, k) + + def rank(self, value, /, *, order=DEFAULT_SEARCH_ORDER): + return LOST.LOST_rank(self.tree, value, key=self.key, order=order) + + def validate(self): + return LOST.LOST_validate(self.tree, key=self.default_key, unique=self.unique) + + def add(self, value, /): + self.tree = LOST.LOST_add(self.tree, value, key=self.key, unique=self.unique, node_factory=self.node_factory) + + def remove(self, value, /, *, order=DEFAULT_SEARCH_ORDER): + self.tree = LOST.LOST_remove(self.tree, value, key=self.key, order=order) + + def discard(self, value, /, *, order=DEFAULT_SEARCH_ORDER): + self.tree = LOST.LOST_discard(self.tree, value, key=self.key, order=order) + +class ArrayOrderStatisticTree(BST.ArrayBinarySearchTree, OrderStatisticTree): + def __init__(self, contents=None, Nmin=0, /, *, inplace=False, binary_tree_type=BT_BALANCED, key=None, default_traverse=TRAVERSE_INORDER, unique=False, node_factory=DEFAULT_ARRAY_NODE_FACTORY): + # since initialization can be expensive for computation and memory and + # BinarySearchTrees initialize very differently from BinaryTrees, this + # should not call BT.ArrayBinaryTree.__init__ + self.node_factory = node_factory + if contents is None: + self.tree = [None]*Nmin + else: + self.tree = AOST.AOST_create(contents, Nmin, inplace=inplace, binary_tree_type=binary_tree_type, node_factory=self.node_factory) + self.key = key + self._reversed = False + self.default_traverse = default_traverse + self.unique = unique + self.name = "Array Order Statistic Tree" # TODO: remove once class hierarchy is stable + + def select(self, k): + return AOST.AOST_select(self.tree, k) + + def rank(self, value, /, *, order=DEFAULT_SEARCH_ORDER): + return AOST.AOST_rank(self.tree, value, key=self.key, order=order) + + def validate(self): + return AOST.AOST_validate(self.tree, key=self.key, unique=self.unique) + + def add(self, value, /, update=False): + self.tree = AOST.AOST_add(self.tree, value, key=self.key, unique=self.unique, update=update, node_factory=self.node_factory) + + def remove(self, value, /, *, order=DEFAULT_SEARCH_ORDER): + self.tree = AOST.AOST_remove(self.tree, value, key=self.key, order=order) + + def discard(self, value, /, *, order=DEFAULT_SEARCH_ORDER): + self.tree = AOST.AOST_discard(self.tree, value, key=self.key, order=order) \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/Tree.py b/venv/lib/python3.10/site-packages/dtlib/trees/Tree.py new file mode 100644 index 0000000000000000000000000000000000000000..81f2eb7689fecabfbb809559a01c6a1b804af635 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/Tree.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +""" +Created on Fri Sep 2 16:41:36 2022 + +@author: jeffr +""" + +import abc + +# allow arguments to pass to __new__ but not init to help select class instantiation +class TreeMeta(abc.ABCMeta): + def __call__(cls, *args, **kwargs): + obj = cls.__new__(cls, *args, **kwargs) + if "storage" in kwargs: + del kwargs["storage"] + if isinstance(obj, cls): + obj.__init__(*args, **kwargs) + return obj + +""" +This is just a reference class that does not provide any functionality...yet +its main purpose is to provide a reference superclass from which each +"interface" subtype of tree can actually create concrete subclasses, e.g. +BinaryTree or BinarySearchTrees, although in the inheritance hierarchy are +never directly instantiated +""" +#TODO: move a bunch of the abstractmethods from BinaryTree to Tree, but only +# if they apply to ALL trees and not just binary Trees, e.g. size, height, +# depth +# if Tree ends up inheriting from some other object, should remove __new__ +class Tree(metaclass=TreeMeta): + def __new__(cls, *args, **kwargs): + return super().__new__(cls) # clear up whatever args or keywords are passed. + + @abc.abstractmethod + def size(self): + pass + + def __len__(self): + self.size() + + @abc.abstractmethod + def add(self): + pass + + @abc.abstractmethod + def remove(self): + pass + + @abc.abstractmethod + def discard(self): + pass \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/WeightBalancedTree.py b/venv/lib/python3.10/site-packages/dtlib/trees/WeightBalancedTree.py new file mode 100644 index 0000000000000000000000000000000000000000..8a53250f070cfce27ec9cee3362d5fa48156167f --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/WeightBalancedTree.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- +""" +Created on Tue Sep 6 20:04:34 2022 + +@author: jeffr +""" + +import dtlib.trees.OrderStatisticTree as OST +import dtlib.trees._ArrayWeightBalancedTree as AWBT +import dtlib.trees._LinkedWeightBalancedTree as LWBT +from dtlib.trees.Tree import Tree, TreeMeta +import dtlib.trees._Node as _Node + +from dtlib.trees._constants import BT_BALANCED, TRAVERSE_INORDER, \ + LINKED_STORAGE, ARRAY_STORAGE, LIST_NODE, DEFAULT_SEARCH_ORDER, \ + SEARCH_FIRST_INORDER, SEARCH_LAST_INORDER, SEARCH_FIRST_LEVELORDER, \ + DIR_LEFT, DIR_RIGHT + +## Public API/ABC for Binary Trees + +## Really all this does is provide a selector/factory for Binary Tree implementations +## so that the public API is implementation independent + +############################## Module globals ################################ + +## Creation/Types + +DEFAULT_STORAGE = ARRAY_STORAGE +DEFAULT_LINKED_NODE_FACTORY = _Node.Node_factory(LIST_NODE, {DIR_LEFT: None, DIR_RIGHT: None, AWBT.SIZE_KEY: 1}) +DEFAULT_ARRAY_NODE_FACTORY = _Node.Node_factory(LIST_NODE, {AWBT.SIZE_KEY: 1}) + +############################ Module Initialization ############################ + +# Think of this as an interface for BinarySearchTree +class WeightBalancedTree(OST.OrderStatisticTree, metaclass=TreeMeta): + def __init__(self): + self.name = "This is a Weight-Balanced Tree" + + def __new__(cls, *args, storage=DEFAULT_STORAGE, **kwargs): + if storage == LINKED_STORAGE: + inst = Tree.__new__(LinkedWeightBalancedTree, *args, **kwargs) + elif storage == ARRAY_STORAGE: + inst = Tree.__new__(ArrayWeightBalancedTree, *args, **kwargs) + else: + raise ValueError(f"storage mechanism {storage} for WeightBalancedTree creation not understood") + return inst + + # eventually add select and rank though the implementations will be very slow compared to an OrderStatisticsTree + +class LinkedWeightBalancedTree(OST.LinkedOrderStatisticTree, WeightBalancedTree): + def __init__(self, contents=None, Nmin=0, /, *, inplace=False, binary_tree_type=BT_BALANCED, key=None, default_traverse=TRAVERSE_INORDER, unique=False, node_factory=DEFAULT_LINKED_NODE_FACTORY): + # since initialization can be expensive for computation and memory and + # BinarySearchTrees initialize very differently from BinaryTrees, this + # should not call BT.ArrayBinaryTree.__init__ + self.node_factory = node_factory + if contents is None: + self.tree = None + else: + self.tree = LWBT.LWBT_create(contents, binary_tree_type=binary_tree_type, node_factory=self.node_factory) + self.key = key + self._reversed = False + self.default_traverse = default_traverse + self.unique = unique + self.name = "This is a Linked Order Statistic Tree" + + def validate(self): + return LWBT.LWBT_validate(self.tree, key=self.default_key, unique=self.unique) + + def add(self, value, /): + self.tree = LWBT.LWBT_add(self.tree, value, key=self.key, unique=self.unique, node_factory=self.node_factory) + + def remove(self, value, /, *, order=DEFAULT_SEARCH_ORDER): + self.tree = LWBT.LWBT_remove(self.tree, value, key=self.key, order=order) + + def discard(self, value, /, *, order=DEFAULT_SEARCH_ORDER): + self.tree = LWBT.LWBT_discard(self.tree, value, key=self.key, order=order) + +class ArrayWeightBalancedTree(OST.ArrayOrderStatisticTree, WeightBalancedTree): + def __init__(self, contents=None, Nmin=0, /, *, inplace=False, binary_tree_type=BT_BALANCED, key=None, default_traverse=TRAVERSE_INORDER, unique=False, node_factory=DEFAULT_ARRAY_NODE_FACTORY): + # since initialization can be expensive for computation and memory and + # BinarySearchTrees initialize very differently from BinaryTrees, this + # should not call BT.ArrayBinaryTree.__init__ + self.node_factory = node_factory + if contents is None: + self.tree = [None]*Nmin + else: + self.tree = AWBT.AWBT_create(contents, Nmin, inplace=inplace, binary_tree_type=binary_tree_type, node_factory=self.node_factory) + self.key = key + self._reversed = False + self.default_traverse = default_traverse + self.unique = unique + self.name = "Array Order Statistic Tree" # TODO: remove once class hierarchy is stable + + def validate(self): + return AWBT.AWBT_validate(self.tree, key=self.key, unique=self.unique) + + def add(self, value, /, update=False): + self.tree = AWBT.AWBT_add(self.tree, value, key=self.key, unique=self.unique, update=update, node_factory=self.node_factory) + + def remove(self, value, /, *, order=DEFAULT_SEARCH_ORDER): + self.tree = AWBT.AWBT_remove(self.tree, value, key=self.key, order=order) + + def discard(self, value, /, *, order=DEFAULT_SEARCH_ORDER): + self.tree = AWBT.AWBT_discard(self.tree, value, key=self.key, order=order) \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/_ArrayBinarySearchTree.py b/venv/lib/python3.10/site-packages/dtlib/trees/_ArrayBinarySearchTree.py new file mode 100644 index 0000000000000000000000000000000000000000..6f6dda41dfd01cef29b27456a15f99757229a8d0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/_ArrayBinarySearchTree.py @@ -0,0 +1,344 @@ +# -*- coding: utf-8 -*- +""" +Created on Mon Aug 22 13:32:25 2022 + +@author: jeffr +""" + +from collections.abc import Iterable +import dtlib.trees._ArrayBinaryTree as ABT +import dtlib.trees._Node as _Node +from dtlib.trees._constants import VALUE_KEY, DIR_LEFT, DIR_RIGHT, \ + TRAVERSE_GO, TRAVERSE_STOP, BT_BALANCED,\ + TRAVERSE_LEVELORDER, SEARCH_FIRST_INORDER, \ + SEARCH_LAST_INORDER, DEFAULT_SEARCH_ORDER, LIST_NODE +from operator import gt, lt + +# TODO: create iterator classes for each type of iteration +# TODO: Iterable = ABST_iter(tree, /, *, traversal='inorder') + +############################## Module globals ################################ + +## utilizing BLib.Trees._constants + +DEFAULT_NODE_FACTORY = _Node.Node_factory(LIST_NODE) + +################################ UTILITIES #################################### + +## comment out aliasing into namespace until they are actually needed. Left +## here for documentation purposes + +_ABST_is_leaf = ABT._ABT_is_leaf + +_ABST_move_index = ABT._ABT_move_index +_move = _ABST_move_index + +_ABST_size = ABT.ABT_size + +_ABST_diameter = ABT._ABT_diameter + +_ABST_depth = ABT._ABT_depth + +_ABST_extend = ABT._ABT_extend + +_ABST_swap = ABT._ABT_swap + +_ABST_move_subtree = ABT._ABT_move_subtree + +_ABST_leftmost = ABT._ABT_leftmost + +_ABST_rightmost = ABT._ABT_rightmost + +# specicalization of _ABST_search. key(tree[index][VALUE_KEY]) == key(value) or tree[index][VALUE_KEY] == value must be satisfied +def _ABST_search_most(tree, index, dir_, /, *, key=None, path=None): + if path is None: + path = [] + if dir_ == DIR_LEFT: + _leader, _follower, cmp = DIR_LEFT, DIR_RIGHT, lt + else: + _leader, _follower, cmp = DIR_RIGHT, DIR_LEFT, gt + #most = index + N = len(tree) + if key is None: + value = tree[index][VALUE_KEY] + while index < N and tree[index] is not None: + kindex = tree[index][VALUE_KEY] + if kindex == value: + path.append(index) + #most = index + index = _move(index, _leader) + elif cmp(kindex, value): # was kindex < value for _ABST_search_leftmost + path.append(index) + index = _move(index, _follower) + else: + index = N # exit condition + #return most + while tree[path[-1]][VALUE_KEY] != value: + path.pop() + else: + value = key(tree[index][VALUE_KEY]) + while index < N and tree[index] is not None: + kindex = key(tree[index][VALUE_KEY]) + if kindex == value: + path.append(index) + #most = index + index = _move(index, _leader) + elif cmp(kindex, value): # was kindex < value for _ABST_search_leftmost + path.append(index) + index = _move(index, _follower) + else: + index = N # exit condition + #return most + while key(tree[path[-1]][VALUE_KEY]) != value: + path.pop() + #return most + return path[-1] + +# TODO: find a way to inject uniqueness into the search; otherwise the leftmost and rightmost are triggered and full O(height) is computed +def _ABST_search(tree, value, /, *, key=None, order=DEFAULT_SEARCH_ORDER, path=None): + if path is None: + path = [] + root = 0 + N = len(tree) + if key is None: + while root < N and tree[root] is not None: + path.append(root) + cur = tree[root][VALUE_KEY] + if cur == value: + break # continue with root + elif cur < value: + root = _move(root, DIR_RIGHT) + else: + root = _move(root, DIR_LEFT) + else: + while root < N and tree[root] is not None: + path.append(root) + cur = key(tree[root][VALUE_KEY]) + if cur == value: + break # continue with root + elif cur < value: + root = _move(root, DIR_RIGHT) + else: + root = _move(root, DIR_LEFT) + + if root < N and tree[root] is not None: + if order == SEARCH_FIRST_INORDER: + path.pop() + root = _ABST_search_most(tree, root, DIR_LEFT, key=key, path=path) + #root = _ABST_search_leftmost(tree, root, key=key) + elif order == SEARCH_LAST_INORDER: + path.pop() + root = _ABST_search_most(tree, root, DIR_RIGHT, key=key, path=path) + #root = _ABST_search_rightmost(tree, root, key=key) + else: # order == SEARCH_FIRST_LEVELORDER + pass + return root + +################################# Traversals ################################## + +## using the public API from ABT as the implementation is identical + +################################# Public API ################################## + +# node_factory is only meant for specializations that actually requires nodes. For simple BSTs, do not use node_factory unless you correct the key parameters for the node structure +def ABST_create(contents=None, Nmin=0, /, *, key=None, inplace=False, binary_tree_type=BT_BALANCED, node_factory=None): + if contents is None: + return [None]*Nmin + + if isinstance(contents, Iterable): + if inplace: + if not isinstance(contents, list): + raise ValueError("cannot create an ABT in place with non-list contents") + else: + contents.sort(key=key) + else: + contents = sorted(contents, key=key) + else: + contents = [contents] + + return ABT.ABT_create(contents, Nmin, inplace=inplace, binary_tree_type=binary_tree_type, node_factory=node_factory) + +## Tree properties/geometry + +ABST_size = ABT.ABT_size +ABST_height = ABT.ABT_height +ABST_depth = ABT.ABT_depth + +## Tree contents/queries/traversals + +ABST_traverse = ABT.ABT_traverse + +def ABST_search(tree, value, /, *, key=None, order=DEFAULT_SEARCH_ORDER): + index = _ABST_search(tree, value, key=key, order=order) + if index < len(tree) and tree[index] is not None: + return tree[index][VALUE_KEY] + return None + +def ABST_contains(tree, value, /, *, key=None): + if ABST_search(tree, value, key=key) is None: + return False + return True + +def ABST_min(tree, index=0, /): + index, depth = _ABST_leftmost(tree, index) + return tree[index][VALUE_KEY] + +def ABST_max(tree, index=0, /): + index, depth = _ABST_rightmost(tree, index) + return tree[index][VALUE_KEY] + +# TODO: ABST_rank + +# TODO: ABST_select(tree, k) # O(N), ABST_selectN(tree, k: Iterable) + +# CONSIDER: might be able to speed this up to O(k*logn) where k is the number of elements with the same value by utilizing _ABST_search +# WARNING: don't use this directly, subject to change signature; specifically the output +#ABST_find = ABT.ABT_find + +# currently O(N)...could be faster +ABST_count = ABT.ABT_count + +def ABST_validate(tree, /, *, key=None, unique=False): + result = [True] + if key is None: + def validate_node(tree, index, result): + # left child + child = _move(index, DIR_LEFT) + if tree[child] is not None: + if unique and tree[child][VALUE_KEY] == tree[index][VALUE_KEY]: # not unique + result[0] = False + return TRAVERSE_STOP + elif tree[child][VALUE_KEY] > tree[index][VALUE_KEY]: # not sorted + result[0] = False + return TRAVERSE_STOP + # right child + child = _move(index, DIR_RIGHT) + if tree[child] is not None: + if unique and tree[child][VALUE_KEY] == tree[index][VALUE_KEY]: # not unique + result[0] = False + return TRAVERSE_STOP + elif tree[child][VALUE_KEY] < tree[index][VALUE_KEY]: # not sorted + result[0] = False + return TRAVERSE_STOP + return TRAVERSE_GO + ABST_traverse(tree, validate_node, result, traversal=TRAVERSE_LEVELORDER) + else: + def validate_node(tree, index, result, key): + # left child + child = _move(index, DIR_LEFT) + if tree[child] is not None: + c = key(tree[child][VALUE_KEY]) + r = key(tree[index][VALUE_KEY]) + if unique and c == r: # not unique + result[0] = False + return TRAVERSE_STOP + elif c > r: # not sorted + result[0] = False + return TRAVERSE_STOP + # right child + child = _move(index, DIR_RIGHT) + if tree[child] is not None: + c = key(tree[child][VALUE_KEY]) + r = key(tree[index][VALUE_KEY]) + if unique and c == r: # not unique + result[0] = False + return TRAVERSE_STOP + elif c < r: # not sorted + result[0] = False + return TRAVERSE_STOP + return TRAVERSE_GO + ABST_traverse(tree, validate_node, result, key, traversal=TRAVERSE_LEVELORDER) + return result[0] + +# node_factory is only meant for specializations that actually requires nodes. For simple BSTs, do not use node_factory unless you correct the key parameters for the node structure +def ABST_add(tree, value, /, *, key=None, unique=False, update=False, node_factory=None): + N = len(tree) + root = 0 + if key is None: + kvalue = value + while root < N and tree[root] is not None: + kroot = tree[root][VALUE_KEY] + if kvalue < kroot: + root = _move(root, DIR_LEFT) + elif kvalue > kroot: + root = _move(root, DIR_RIGHT) + elif not unique and not update: # allowed to have duplicates + #root = _ABST_search_rightmost(tree, root, key=key) + root = _ABST_search_most(tree, root, DIR_RIGHT, key=key) + + root = _move(root, DIR_RIGHT) + while root < N and tree[root] is not None: + root = _move(root, DIR_LEFT) + else: # kvalue == kroot and unique = True + if update: + tree[root][VALUE_KEY] = value + return tree + else: + kvalue = key(value) + while root < N and tree[root] is not None: + kroot = key(tree[root][VALUE_KEY]) + if kvalue < kroot: + root = _move(root, DIR_LEFT) + elif kvalue > kroot: + root = _move(root, DIR_RIGHT) + elif not unique and not update: # allowed to have duplicates + #root = _ABST_search_rightmost(tree, root, key=key) + root = _ABST_search_most(tree, root, DIR_RIGHT, key=key) + + root = _move(root, DIR_RIGHT) + while root < N and tree[root] is not None: + root = _move(root, DIR_LEFT) + else: # kvalue == kroot and unique = True + if update: + tree[root][VALUE_KEY] = value + return tree + if root >= N: + _ABST_extend(tree, root + 1) + if node_factory is None: + tree[root] = value + else: + tree[root] = node_factory(value) + return tree + +# TODO: ABST_update(tree_dest, *other_ABSTs, /, unique=False) # merge trees + +def ABST_remove(tree, value, /, *, key=None, order=DEFAULT_SEARCH_ORDER): + N = len(tree) + root = _ABST_search(tree, value, key=key, order=order) + if root >= N or tree[root] is None: + raise KeyError(f"ABST_remove: key {value} not found in tree") + + if not _ABST_is_leaf(tree, root): + child = _move(root, DIR_RIGHT) + replacement = None + + if child < N and tree[child] is not None: + replacement, replacement_depth = _ABST_leftmost(tree, child) + dir_ = DIR_RIGHT + child = _move(root, DIR_LEFT) + if child < N and tree[child] is not None: + leaf, depth = _ABST_rightmost(tree, child) + if replacement is None or depth > replacement_depth: + replacement = leaf + dir_ = DIR_LEFT + + _ABST_swap(tree, root, replacement) + root = replacement + if not _ABST_is_leaf(tree, root): + tree[root] = None # have to set to None after since otherwise _ABST_is_leaf will return False + _ABST_move_subtree(tree, _move(root, dir_), root) + else: + tree[root] = None + else: + tree[root] = None + return tree + +def ABST_discard(tree, value, /, *, key=None, order=DEFAULT_SEARCH_ORDER, path=None): + if path is None: + path = [] + try: + return ABST_remove(tree, value, key=key, order=order, path=path) + except KeyError: + return tree + +ABST_equals = ABT.ABT_equals \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/_ArrayBinaryTree.py b/venv/lib/python3.10/site-packages/dtlib/trees/_ArrayBinaryTree.py new file mode 100644 index 0000000000000000000000000000000000000000..95716bebfa3f084599c11431127eea613b25923f --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/_ArrayBinaryTree.py @@ -0,0 +1,775 @@ +# -*- coding: utf-8 -*- +""" +Created on Sun Aug 21 18:39:40 2022 + +@author: jeffr +""" + +from collections import deque # for traversals +from collections.abc import Iterable +from dtlib.utils import _next_pow2, _interval_root +import dtlib.trees._Node as _Node +from dtlib.trees._constants import VALUE_KEY, DIR_LEFT, DIR_RIGHT, \ + DIR_PARENT, BT_BALANCED, BT_COMPLETE, TRAVERSE_GO, \ + TRAVERSE_STOP, TRAVERSE_INORDER, TRAVERSE_PREORDER, \ + TRAVERSE_POSTORDER, TRAVERSE_LEVELORDER, LIST_NODE +#from BLib.Trees import BinaryTree as BT +import turtle # to be removed + +# TODO: create iterator classes for each type of iteration +# TODO: Iterable = ABT_iter(tree, /, *, traversal='inorder') +# TODO: Note that BinarySearchTree + +############################## Module globals ################################ + +## utilizing BLib.Trees._constants + +DEFAULT_NODE_FACTORY = _Node.Node_factory(LIST_NODE) + +################################ UTILITIES #################################### + +## Node properties + +def _ABT_is_leaf(tree, index, /): + N = len(tree) + if index < N and tree[index] is not None: + child = _move(index, DIR_LEFT) + if child >= N or tree[child] is None: + child = _move(index, DIR_RIGHT) + if child >= N or tree[child] is None: + return True + return False + +## Navigation + +def _ABT_move_index(index, dir_, /): + if dir_ == DIR_PARENT: + return ((index-1) >> 1) + #return (index << 1) + 1 + dir_ # depends on DIR_LEFT = 0 + return (index << 1) + (1 if dir_==DIR_LEFT else 2) + +_move = _ABT_move_index + +## Tree properties/geometry + +# size_t = _ABT_size(tree, index=0, /) +def _ABT_size(tree, index=0, /): + if index >= len(tree) or tree[index] is None: + return 0 + return 1 + _ABT_size(tree, _move(index, DIR_LEFT)) + _ABT_size(tree, _move(index, DIR_RIGHT)) + +def _ABT_diameter_helper(tree, index, /): + if index >= len(tree) or tree[index] is None: + return 0, 0 + l_max_diameter, l_height = _ABT_diameter_helper(tree, _move(index, DIR_LEFT)) + r_max_diameter, r_height = _ABT_diameter_helper(tree, _move(index, DIR_RIGHT)) + return max(l_max_diameter, r_max_diameter, 1 + l_height + r_height), 1 + max(l_height, r_height) + +# size_t = _ABT_diameter(tree, index=0, /) +def _ABT_diameter(tree, index=0, /): + if not tree or index >= len(tree) or tree[index] is None: + return 0 + return _ABT_diameter_helper(tree, index)[0] + +# O(logN) +# size_t = _ABT_depth(tree, index, /) +def _ABT_depth(tree, index=0, /): + if not tree or index >= len(tree) or tree[index] is None: + return 0 + depth = 0 + while index >= 0: + index = _move(index, DIR_PARENT) + depth += 1 + return depth + +# size_t = _ABT_height(tree, index=0, /) +def _ABT_height(tree, index=0, /): + #print(tree) + if not tree or index >= len(tree) or tree[index] is None: + return 0 + return 1 + max(_ABT_height(tree, _move(index, DIR_LEFT)), _ABT_height(tree, _move(index, DIR_RIGHT))) + +## Tree manipulation + +# WARNING: swap does not check to ensure that the tree pre/post maintains structure invariants +def _ABT_swap(tree, index1, index2): + tree[index1], tree[index2] = tree[index2], tree[index1] + +def _ABT_value_swap(tree, index1, index2): + tree[index1][VALUE_KEY], tree[index2][VALUE_KEY] = tree[index2][VALUE_KEY], tree[index1][VALUE_KEY] + +def _ABT_extend(tree, Ntarget, /): + N = len(tree) + if Ntarget > N: + tree.extend([None]*(Ntarget-N)) + +# time is 2 * N where N is the size of the subtree located at root_index +def _ABT_move_subtree(tree, root, target, /): + N = len(tree) + if root >= N or tree[root] is None: + # not sure why I put this here...leaving it commented. If root >= N but target < N, that's equivalent to trying to delete by moving out of bounds...do not want that + #if target < N: + # tree[target] = None + return + + if target > root: # shift down or parallel right + Ncur = N + # perform a reverse postorder traversal of the subtree tree (reverse meaning right, left, then root as opposed to left, right, then root) + # as this is a nonstandard traversal and I need to modify the tree while traversing, it is custom and not reusing code I already have + # in principle this can be done with a standard postorder traversal, but this loop makes a lot more sense to me + st_visit = [] + st_traverse = [(root, target)] + + # find if target is in the left subtree or to the left of root + parent = target + left = _move(root, DIR_LEFT) + rightmost_left_level = _next_pow2(left+1)-2 + while parent > rightmost_left_level: + parent = _move(parent, DIR_PARENT) + if parent > left: # target is to the right of the root + while st_traverse: + # note that N cannot change while traversing! That's why we have Ncur + if st_traverse[-1][0] < N and tree[st_traverse[-1][0]] is not None: + st_visit.append(st_traverse.pop()) + + root = _move(st_visit[-1][0], DIR_LEFT) # move to left child of source + target = _move(st_visit[-1][-1], DIR_LEFT) # move to left child of destination + st_traverse.append((root, target)) # add left children to traversal stack + + root += DIR_RIGHT - DIR_LEFT # move to right child of source + target += DIR_RIGHT - DIR_LEFT # move to right child of destination + st_traverse.append((root, target)) # add right children to traversal stack + else: + st_traverse.pop() + while st_visit and (not st_traverse or st_visit[-1][0] > st_traverse[-1][0]): + if st_visit[-1][1] >= Ncur: + # this next line is why I cannot do a traditional traversal, which is stable only if the underlying tree is NOT modified + _ABT_extend(tree, st_visit[-1][1] + 1) # this could get very expensive if it has to be executed multiple times, but in a rotation, this should happen only once if the tree structures maintains a "complete" size and maybe 2x otherwise. + Ncur = len(tree) + _ABT_swap(tree, *st_visit.pop()) + else: + while st_traverse: + # note that N cannot change while traversing! That's why we have Ncur + if st_traverse[-1][0] < N and tree[st_traverse[-1][0]] is not None: + st_visit.append(st_traverse.pop()) + + root = _move(st_visit[-1][0], DIR_RIGHT) # move to right child of source + target = _move(st_visit[-1][-1], DIR_RIGHT) # move to right child of destination + st_traverse.append((root, target)) # add right children to traversal stack + + root -= DIR_RIGHT - DIR_LEFT # move to left child of source + target -= DIR_RIGHT - DIR_LEFT # move to left child of destination + st_traverse.append((root, target)) # add left children to traversal stack + else: + st_traverse.pop() + while st_visit and (not st_traverse or st_visit[-1][0] > st_traverse[-1][0]): + if st_visit[-1][1] >= Ncur: + # this next line is why I cannot do a traditional traversal, which is stable only if the underlying tree is NOT modified + _ABT_extend(tree, st_visit[-1][1] + 1) # this could get very expensive if it has to be executed multiple times, but in a rotation, this should happen only once if the tree structures maintains a "complete" size and maybe 2x otherwise. + Ncur = len(tree) + _ABT_swap(tree, *st_visit.pop()) + + elif target < root: # shift up or parallel left + if target < 0: + raise ValueError(f"cannot move subtree to negative root index {target}") + + # perform a preorder traversal of the tree moving subtree elements at root_index to subtree rooted at target_index + deq_traverse = deque([(root, target)]) + while deq_traverse: + src, dest = deq_traverse.popleft() + if src < N and tree[src] is not None: + _ABT_swap(tree, src, dest) # swap source and destination + + root = _move(src, DIR_RIGHT) # move to right child of source + target = _move(dest, DIR_RIGHT) # move to right child of destination + deq_traverse.append((root, target)) # add right children to traversal deque + + root += DIR_LEFT - DIR_RIGHT # move to left child of source + target += DIR_LEFT - DIR_RIGHT # move to right child of destination + deq_traverse.append((root, target)) # add left children to traversal deque + return # else do nothing because it's not moving + +# tree = _ABT_rotate(tree, index, dir_, /) +# rotate the node represented by tree[index] in dir_ direction +# complexity. For a tree of size N and a subtree rooted at index of size M, this is O(M) in time complexity and max(O(N)) in space +def _ABT_rotate(tree, index, dir_, /): + N = len(tree) + if index >= N or tree[index] is None: + return # do nothing because it cannot be rotated or rotating is meaningless + + left_child = _move(index, DIR_LEFT) + right_child = _move(index, DIR_RIGHT) + + if dir_ == DIR_LEFT: + if right_child >= N or tree[right_child] is None: # cannot rotate left + return + + # move index's left child subtree down to its left # down and left move + _ABT_move_subtree(tree, left_child, _move(left_child, DIR_LEFT)) + + #tree[left_child] = None # clear the position. should not be necessary + # swap index to its left child position + _ABT_swap(tree, index, left_child) + + # move index's right child's left subtree to index's left child's right subtree # left move + _ABT_move_subtree(tree, _move(right_child, DIR_LEFT), _move(left_child, DIR_RIGHT)) + + # swap index's right child to index + _ABT_swap(tree, index, right_child) + + # move index's right child's right subtree up to index's right subtree # up and left move + _ABT_move_subtree(tree, _move(right_child, DIR_RIGHT), right_child) + + elif dir_ == DIR_RIGHT: + if left_child >= N or tree[left_child] is None: # cannot rotate right + return + # if rotating right, the swaps require AT LEAST a tree of size right_child + 1 + if right_child >= N: + _ABT_extend(tree, right_child+1) + # move index's right child down to its right # down and right move + _ABT_move_subtree(tree, right_child, _move(right_child, DIR_RIGHT)) + + # swap index to its right child position + _ABT_swap(tree, index, right_child) + + # move index's left child's right subtree to index's right child's left subtree # right move + _ABT_move_subtree(tree, _move(left_child, DIR_RIGHT), _move(right_child, DIR_LEFT)) + + # swap index's left child to index + _ABT_swap(tree, index, left_child) + + #move index's left child's left subtree up to index's left subtree # up and right + _ABT_move_subtree(tree, _move(left_child, DIR_LEFT), left_child) + + else: + raise ValueError(f"rotation direction {dir_} in _ABT_rotate not understood") + + return tree + +# tree = _ABT_split_rotate(tree, index, dir_, /) # optimization on double rotations +# perform split rotation as defined in Nievergelt 1973. This is obviously meant for weight-balanced trees, but is still generally applicable to binary trees +# this is a separate function because we are dealing with array representations and performing a "double-rotation" or two successive rotations is unnecessarily inefficients +# The way this is implemented, it's a bit of a misnomer to call it a rotation. A grandchild subtree is promoted to the root after pushing the root down + +# a double rotation would result in O(3*M/2) movements in data for a subtree of size M rooted at M, the split rotation below uses O(3*M/4) or half as much overhead +# there is a risk that we still trigger the O(N) increase in memory and time complexity if the subtree in dir_ is full height +def _ABT_split_rotate(tree, index, dir_, /): + N = len(tree) + if index >= N or tree[index] is None: + return # do nothing because it cannot be rotated or rotating is meaningless + + left_child = _move(index, DIR_LEFT) + right_child = _move(index, DIR_RIGHT) + + if dir_ == DIR_LEFT: + grandchild = _move(right_child, DIR_LEFT) + if right_child >= N or grandchild >= N or tree[right_child] is None or tree[grandchild] is None: # cannot rotate left + return + # move left_child to its left subtree # might incur O(N) memory increase and build time + _ABT_move_subtree(tree, left_child, _move(left_child, DIR_LEFT)) + + # swap index and left_child + _ABT_swap(tree, index, left_child) + + # swap grandchild and index + _ABT_swap(tree, index, grandchild) + + # move grandchild's left subtree to left_child's right subtree + _ABT_move_subtree(tree, _move(grandchild, DIR_LEFT), _move(left_child, DIR_RIGHT)) + + # move granchild's right subtree to right_child's left subtree + _ABT_move_subtree(tree, _move(grandchild, DIR_RIGHT), _move(right_child, DIR_LEFT)) + + elif dir_ == DIR_RIGHT: + grandchild = _move(left_child, DIR_RIGHT) + if left_child >= N or grandchild >= N or tree[left_child] is None or tree[grandchild] is None: # cannot rotate right + return + if grandchild >= N: + _ABT_extend(tree, grandchild+1) + + # move right_child to its right subtree # might incur O(N) memory increase and build time + _ABT_move_subtree(tree, right_child, _move(right_child, DIR_RIGHT)) + # swap index and right_child + _ABT_swap(tree, index, right_child) + # swap grandchild and index + _ABT_swap(tree, index, grandchild) + # move granchild's right subtree to right_child's left subtree + _ABT_move_subtree(tree, _move(grandchild, DIR_RIGHT), _move(right_child, DIR_LEFT)) + # move grandchild's left subtree to left_child's right subtree + _ABT_move_subtree(tree, _move(grandchild, DIR_LEFT), _move(left_child, DIR_RIGHT)) + + else: + raise ValueError(f"rotation direction {dir_} in _ABT_rotate not understood") + + return tree + +## Internal queries + +# index, size_t = _ABT_leftmost(tree, index=0, /) +def _ABT_leftmost(tree, index=0, /, *, path=None): + if path is None: + path = [] + N = len(tree) + while index < N and tree[index] is not None: + path.append(index) + index = _move(index, DIR_LEFT) + return path[-1], len(path)-1 + """ + depth = 0 + left = _move(index, DIR_LEFT) + while left < N and tree[left] is not None: + + depth += 1 + index, left = left, _move(left, DIR_LEFT) + return index, depth + """ + +# index, size_t = _ABT_leftmost(tree, index=0, /) +def _ABT_rightmost(tree, index=0, /, *, path=None): + if path is None: + path = [] + N = len(tree) + while index < N and tree[index] is not None: + path.append(index) + index = _move(index, DIR_RIGHT) + return path[-1], len(path)-1 + """ + depth = 0 + right = _move(index, DIR_RIGHT) + while right < N and tree[right] is not None: + depth += 1 + index, right = right, _move(right, DIR_RIGHT) + return index, depth + """ + +# index to leaf = _ABT_extremal_paths_to_leaves(tree, index, /) +# not sure this is useful +def _ABT_extremal_paths_to_leaves(tree, index=0, /): + if index >= len(tree) or tree[index] is None: + return None, None + + left_max, left_min = _ABT_extremal_paths_to_leaves(tree, _move(index, DIR_LEFT)) + right_max, right_min = _ABT_extremal_paths_to_leaves(tree, _move(index, DIR_RIGHT)) + if left_max is None and right_max is None: # node tree[index] is a leaf + return index, index + elif right_max is None: # left is not None + return left_max, left_min + elif left_max is None: # right is not None + return right_max, right_min + + return max(left_max, right_max), min(left_min, right_min) + +# [path of indices] = _ABT_path_to(tree, value, /, *, key=None) +def _ABT_path_to(tree, value, /, *, key=None): + st_out = [] # not really a stack; it's going to be the index for an occurrence of value + if key is None: + def _ABT_path_to_helper(tree, index, value, st_out): + if tree[index][VALUE_KEY] == value: + st_out.append(index) + return TRAVERSE_STOP + return TRAVERSE_GO + ABT_traverse(tree, _ABT_path_to_helper, value, st_out, traversal=TRAVERSE_POSTORDER) + else: + def _ABT_path_to_helper(tree, index, value, st_out, key): + if key(tree[index][VALUE_KEY]) == value: + st_out.append(index) + return TRAVERSE_STOP + return TRAVERSE_GO + ABT_traverse(tree, _ABT_path_to_helper, value, st_out, key, traversal=TRAVERSE_POSTORDER) + if st_out: + while st_out[-1] >= 0: + st_out.append(_move(st_out[-1], DIR_PARENT)) + st_out.pop() + st_out.reverse() + return st_out + +## Visualization + +# DEPRACATION WARNING: these draw commands will be removed + +# Draw Tree +def _ABT_draw_tree(tree): + def jumpto(x, y): + t.penup() + t.goto(x, y) + t.pendown() + def draw(tree, index, x, y, dx): + if index < len(tree) and tree[index] is not None: + t.goto(x, y) + jumpto(x, y-20) + t.write(tree[index][VALUE_KEY], align='center', font=('Arial', 12, 'normal')) + draw(tree, _move(index, DIR_LEFT), x-dx, y-60, dx/2) + jumpto(x, y-20) + draw(tree, _move(index, DIR_RIGHT), x+dx, y-60, dx/2) + # because turtle has this weird behavior that after it closes, you have to run it twice to get it back + try: + t = turtle.Turtle() + except: + t = turtle.Turtle() + t.speed(0); turtle.delay(0) + h = _ABT_height(tree) + jumpto(0, 30*h) + draw(tree, 0, 0, 30*h, 40*h) + t.hideturtle() + turtle.mainloop() + +################################# Traversals ################################## +## func must be a function that returns TRAVERSE_GO to continue or TRAVERSE_STOP to return! +## func must have signature: tree/stack trace of nodes, index/node +## Traversals themselves do not return anything + +# _ABT_inorder_traversal(tree, func, /, *args, **kwargs) +def _ABT_inorder_traversal(tree, func, /, *args, reverse=False, **kwargs): + if not reverse: + _DIR_LEADER, _DIR_FOLLOWER = DIR_LEFT, DIR_RIGHT + else: + _DIR_LEADER, _DIR_FOLLOWER = DIR_RIGHT, DIR_LEFT + st = [] + index = 0 + N = len(tree) + cont_cond = TRAVERSE_GO + while cont_cond and (st or (index < N and tree[index] is not None)): + # go down left side of sub-tree + while index < N and tree[index] is not None: + st.append(index) + index = _move(index, _DIR_LEADER) + + index = st.pop() + cont_cond = func(tree, index, *args, **kwargs) + index = _move(index, _DIR_FOLLOWER) + +# _ABT_preorder_traversal(tree, func, /, *args, **kwargs) +def _ABT_preorder_traversal(tree, func, /, *args, reverse=False, **kwargs): + if not reverse: + _DIR_LEADER, _DIR_FOLLOWER = DIR_LEFT, DIR_RIGHT + else: + _DIR_LEADER, _DIR_FOLLOWER = DIR_RIGHT, DIR_LEFT + st = [] + N = len(tree) + index = 0 + cont_cond = TRAVERSE_GO + st.append(index) + while cont_cond and st: + index = st.pop() + if index < N and tree[index] is not None: + cont_cond = func(tree, index, *args, **kwargs) + st.append(_move(index, _DIR_FOLLOWER)) + st.append(_move(index, _DIR_LEADER)) + +# _ABT_postorder_traversal(tree, func, /, *args, **kwargs) +# the trick to avoid O(N) memory is to actually traverse as preorder. as you +# are popping off the traversal stack, if the next value is the current node's +# right now, flip their order +def _ABT_postorder_traversal(tree, func, /, *args, reverse=False, **kwargs): + if not reverse: + _DIR_LEADER, _DIR_FOLLOWER = DIR_LEFT, DIR_RIGHT + else: + _DIR_LEADER, _DIR_FOLLOWER = DIR_RIGHT, DIR_LEFT + index = 0 + st = [index] + N = len(tree) + cont_cond = TRAVERSE_GO + while cont_cond: + index = st.pop() + # go down left side of sub-tree + while index < N and tree[index] is not None: + right = _move(index, _DIR_FOLLOWER) + if right < N and tree[right] is not None: + st.append(right) + st.append(index) + index = _move(index, _DIR_LEADER) + + index = st.pop() + right = _move(index, _DIR_FOLLOWER) + while st and st[-1] != right: + cont_cond = func(tree, index, *args, **kwargs) + index = st.pop() + right = _move(index, _DIR_FOLLOWER) + if st: + right = st.pop() + st.append(index) + st.append(right) + else: # if stack is empty at this point, run last index. This can also go past the outer loop if additionally checked by cont_cond==TRAVERSE_GO + cont_cond = func(tree, index, *args, **kwargs) + cont_cond = TRAVERSE_STOP + +# _ABT_levelorder_traversal(tree, func, /, *args, **kwargs) +# this is a lot simplier if reverse=False +def _ABT_levelorder_traversal(tree, func, /, *args, reverse=False, **kwargs): + index = 0 + cont_cond = TRAVERSE_GO + N = len(tree) + level_size = 1 + level_start_index = 0 + if reverse: + delta = -1 + else: + delta = 1 + while cont_cond and index < N: + if tree[index] is not None: + cont_cond = func(tree, index, *args, **kwargs) + if reverse and index == level_start_index: + level_start_index += level_size + if level_start_index < N: + level_size <<= 1 + index = min(N, level_start_index + level_size) + else: + index = N+1 + index += delta + +## Traversal for parsing/creating + +# There is a copy of _inorder_to_level_order in ArrayBinaryTree.py +def _inorder_to_level_order(arr, /, *, inplace=False, binary_tree_type=BT_BALANCED): + out = None + N = len(arr) + out = [] + if not N: + return out + if binary_tree_type == BT_BALANCED: + deq = deque([(0, N)]) + while deq: + start, end = deq.popleft() + if start < N: + diff = end-start + if diff > 2: + root = _interval_root(start, end) + deq.append((start, root)) + deq.append((root+1, end)) + elif diff == 2: + root = start + 1 + deq.append((start, root)) + deq.append((N, N)) + elif diff == 1: + root = start + deq.append((N, N)) + deq.append((N, N)) + out.append(arr[root]) + else: + out.append(None) + + while out and out[-1] is None: + out.pop() + + if inplace: + for i in range(N): + arr[i] = out[i] + for i in range(N, len(out)): + arr.append(out[i]) + elif binary_tree_type == BT_COMPLETE: + N = len(arr) + if not N: + return [] + out = [None]*N + path = [] + i = 0 + node = 0 + while i < N and (node < N or path): + while node < N: + path.append(node) + node = _move(node, DIR_LEFT) + + node = path.pop() + out[node] = arr[i] + + i += 1 + node = _move(node, DIR_RIGHT) + + if inplace: + for i in range(N): + arr[i] = out[i] + else: + raise ValueError(f"binary tree to level order does not support creation to binary tree type {binary_tree_type}") + return out + +################################# Public API ################################## + +## Creation + +# tree = ABT_create(contents, /, *, None, inplace=False, binary_tree_type='balanced' : 'complete', node_factory=None) # node_factory is not used +# does a shallow copy of contents if Iterable and not inplace +# node_factory is only meant for specializations that actually requires nodes. For simple BSTs, do not use node_factory unless you correct the key parameters for the node structure +def ABT_create(contents=None, Nmin=0, /, *, inplace=False, binary_tree_type=BT_BALANCED, node_factory=DEFAULT_NODE_FACTORY): + if contents is None: + return [None]*Nmin + + if isinstance(contents, Iterable): + if inplace: + if not isinstance(contents, list): + raise ValueError("cannot create an ABT in place with non-list contents") + else: + pass # contents is already a list that can be modified + else: + contents = list(contents) + else: + contents = [contents] + + if inplace: + _inorder_to_level_order(contents, inplace=True, binary_tree_type=binary_tree_type) + else: + contents = _inorder_to_level_order(contents, inplace=False, binary_tree_type=binary_tree_type) + + # extension only makes sense if already in level order + N = len(contents) + # CONSIDER: working in the application of node_factory into _inorder_to_level_order so that node creation happens at the same time as filling contents + # would cut down overhead by factor of 2, but then there would maybe be differences between the Linked version and the Array version + if node_factory is not None: + for i in range(N): + if contents[i] is not None: + contents[i] = node_factory(contents[i]) + if Nmin > N: + contents.extend([None]*(Nmin-N)) + return contents + +## Tree properties/geometry + +ABT_size = _ABT_size +ABT_height = _ABT_height +ABT_depth = _ABT_depth + +## Tree contents/queries/traversals + +# multiple dispatch might also work here, but we cannot do it by argument type +# ABT_traverse(tree, func, *args, traversal=TRAVERSE_INORDER, **kwargs) # for first release, if func=None, *args are ignored +def ABT_traverse(tree, func, *args, traversal=TRAVERSE_INORDER, reverse=False, **kwargs): + if traversal == TRAVERSE_INORDER: + _ABT_inorder_traversal(tree, func, *args, reverse=reverse, **kwargs) + elif traversal == TRAVERSE_PREORDER: + _ABT_preorder_traversal(tree, func, *args, reverse=reverse, **kwargs) + elif traversal == TRAVERSE_POSTORDER: + _ABT_postorder_traversal(tree, func, *args, reverse=reverse, **kwargs) + elif traversal == TRAVERSE_LEVELORDER: + _ABT_levelorder_traversal(tree, func, *args, reverse=reverse, **kwargs) + else: + raise ValueError(f"traversal option {traversal} not understood or not implemented for ABTs") + +# size_t = ABT_count(tree, value, /, key=None) +def ABT_count(tree, value, /, *, key=None): + result = [0] + if key is None: + def _ABT_count_helper(tree, index, value, result): + if tree[index][VALUE_KEY] == value: + result[0] += 1 + return TRAVERSE_GO + ABT_traverse(tree, _ABT_count_helper, value, result) + else: + def _ABT_count_helper(tree, index, value, key, result): + if key(tree[index][VALUE_KEY]) == value: + result[0] += 1 + return TRAVERSE_GO + ABT_traverse(tree, _ABT_count_helper, value, key, result) + return result[0] + +# [inorder indices] = _ABT_find(tree, value, all_=True, /, *, key=None) +# TODO: add reverse +# WARNING: don't use this, subject to change signature; specifically the output +def _ABT_find(tree, value, number=-1, /, *, key=None): + result = [] + counter = [-1] + if key is None: + def _ABT_find_helper(tree, index, value, number, result, counter): + counter[0] += 1 + if tree[index][VALUE_KEY] == value: + result.append(counter[0]) + if len(result) == number: + return TRAVERSE_STOP + return TRAVERSE_GO + ABT_traverse(tree, _ABT_find_helper, value, number, result, counter) + else: + def _ABT_find_helper(tree, index, value, key, number, result, counter): + counter[0] += 1 + if key(tree[index][VALUE_KEY]) == value: + result.append(counter[0]) + if len(result) == number: + return TRAVERSE_STOP + return TRAVERSE_GO + ABT_traverse(tree, _ABT_find_helper, value, key, number, result, counter) + return result + +# boolean = _ABT_contains(tree, value, /, *, key=None) +def ABT_contains(tree, value, /, *, key=None): + return len(_ABT_find(tree, value, 1, key=key)) > 0 + +## Tree mutations + +# tree = ABT_add(tree, value, /) +# node_factory is only meant for specializations that actually requires nodes. For simple BSTs, do not use node_factory unless you correct the key parameters for the node structure +def ABT_add(tree, value, /, *, node_factory=DEFAULT_NODE_FACTORY): + result = [] + N = len(tree) + def _ABT_find_open_node(tree, index, end, result): + child = _move(index, DIR_LEFT) + if child >= end or tree[child] is None: + result.append(child) + return TRAVERSE_STOP + child = _move(index, DIR_RIGHT) + if child >= end or tree[child] is None: + result.append(child) + return TRAVERSE_STOP + return TRAVERSE_GO + ABT_traverse(tree, _ABT_find_open_node, N, result, traversal=TRAVERSE_LEVELORDER) + index = result.pop() + if index >= N: + _ABT_extend(tree, index+1) + if node_factory is None: + tree[index] = value + else: + tree[index] = node_factory(value) + return tree + +# tree = ABT_remove(tree, value, /, *, key=None) # raises KeyError if not found +# removes element by finding the element and then swaping it with the element +# furthest from its position and then deleting the node. This keeps sibling tree +# structures the same but brings tree more balanced. Unfortunately, this method +# is meaningless for any of the specialized trees that would inherit from +# Binary Tree +def ABT_remove(tree, value, /, *, key=None): + path = _ABT_path_to(tree, value, key=key) + if not path: + raise KeyError(f"ABT_remove: key {value} not found in tree") + index = path.pop() + + if _ABT_is_leaf(tree, index): + if path: + tree[index] = None + return tree + else: # node is root and it is a leaf...destroy the root + tree.clear() + return tree + + left_max_path, left_min_path = _ABT_extremal_paths_to_leaves(tree, index) + right_max_path, right_min_path = _ABT_extremal_paths_to_leaves(tree, index) + + if left_max_path > right_max_path: + # remove from left subtree + replacement = left_max_path + else: + # remove from right subtree + replacement = right_max_path + _ABT_swap(tree, index, replacement) # node to delete is now at replacement + tree[replacement] = None + return tree + +# tree = ABT_discard(tree, value, /, *, key=None) # wrapper for _ABT_remove that catches KeyError +def ABT_discard(tree, value, /, *, key=None): + try: + return ABT_remove(tree, value, key=key) + except KeyError: + return tree + +def ABT_equals(tree1, tree2): + N1, N2 = len(tree1), len(tree2) + if N1 < N2: + # tree1 should always be at least as big as tree2 + return ABT_equals(tree2, tree1) + + i = 0 + while i < N2: + if tree1[i] != tree2[i]: + return False + i += 1 + while i < N1: + if tree1[i] is not None: + return False + i += 1 + return True diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/_ArrayHeap.py b/venv/lib/python3.10/site-packages/dtlib/trees/_ArrayHeap.py new file mode 100644 index 0000000000000000000000000000000000000000..0030b78779297410123e4379e0518da2acf507b6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/_ArrayHeap.py @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +""" +Created on Tue Sep 6 20:14:49 2022 + +@author: jeffr +""" + diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/_ArrayOrderStatisticTree.py b/venv/lib/python3.10/site-packages/dtlib/trees/_ArrayOrderStatisticTree.py new file mode 100644 index 0000000000000000000000000000000000000000..899a052a748610afcaaef7623c87150b0f3184d1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/_ArrayOrderStatisticTree.py @@ -0,0 +1,338 @@ +# -*- coding: utf-8 -*- +""" +Created on Mon Sep 5 10:17:37 2022 + +@author: jeffr +""" + +from collections.abc import Iterable +import dtlib.trees._ArrayBinarySearchTree as ABST +import dtlib.trees._ArrayBinaryTree as ABT +import dtlib.trees._Node as _Node +from dtlib.trees._constants import DIR_PARENT, DIR_LEFT, DIR_RIGHT, \ + VALUE_KEY, TRAVERSE_GO, TRAVERSE_STOP, BT_BALANCED, \ + TRAVERSE_LEVELORDER, DEFAULT_SEARCH_ORDER, LIST_NODE, \ + SEARCH_FIRST_INORDER, SEARCH_LAST_INORDER + +# TODO: create iterator classes for each type of iteration +# TODO: Iterable = AOST_iter(tree, /, *, traversal='inorder') + +############################## Module globals ################################ + +## utilizing BLib.Trees._constants + +SIZE_KEY = 1 +DEFAULT_NODE_FACTORY = _Node.Node_factory(LIST_NODE, {SIZE_KEY: 1}) + +################################ UTILITIES #################################### + +## comment out aliasing into namespace until they are actually needed. Left +## here for documentation purposes + +#_AOST_is_leaf = ABT._ABT_is_leaf +# faster method since we have access to size +def _AOST_is_leaf(tree, index): + return _AOST_size(tree, index) == 1 + +_AOST_move_index = ABT._ABT_move_index +_move = _AOST_move_index + +# O(1) instead of O(n) +def _AOST_size(tree, index): + if tree[index] is None: + return 0 + return tree[index][SIZE_KEY] + +def _AOST_update_size(tree, index): + tree[index][SIZE_KEY] = 1 + tree[_move(index, DIR_LEFT)][SIZE_KEY] + tree[_move(index, DIR_RIGHT)][SIZE_KEY] + +_AOST_diameter = ABT._ABT_diameter + +_AOST_depth = ABT._ABT_depth + +_AOST_extend = ABT._ABT_extend + +_AOST_swap = ABT._ABT_swap + +_AOST_move_subtree = ABT._ABT_move_subtree + +_AOST_leftmost = ABT._ABT_leftmost + +_AOST_rightmost = ABT._ABT_rightmost + +_AOST_search_most = ABST._ABST_search_most + +_AOST_search = ABST._ABST_search + +# fairly certain this is still NlogM where M is the number of elements in the AWBT and N is the number of k values, which probably difficult to show; it is definitely true in the worst case +# this is still going to be faster than N calls to AWBT_select as we do not have to restart at the root for the traversal +def _AOST_select_N(tree, k): + k = sorted(k) + M = len(k) + out = [None]*M + root = 0 + N = AOST_size(tree, root) + if k[0] >= N: + return out + + i = 0 + _path = -1 + root_k = [AOST_size(tree, _move(root, DIR_LEFT))] + while i < M and k[i] < N: + while root_k[-1] != k[i]: + _path = root + if root_k[-1] < k[i]: + root = _move(root, DIR_RIGHT) + root_k.append(root_k[-1] + AOST_size(tree, _move(root, DIR_LEFT)) + 1) # add node's left subtree to k as well as parent node + else: + root = _move(root, DIR_LEFT) + root_k.append(root_k[-1] - AOST_size(tree, _move(root, DIR_RIGHT)) - 1) # remove right subtree and node from count + out[i] = tree[root][VALUE_KEY] + i += 1 + # reverse up the path until we find a node that would have resulted in a move to the right + # do not have to protect against _path >= 0 because it is only == -1 if len(root_k) == 1 + while i < M and len(root_k) > 1 and root_k[-2] <= k[i]: # meaning the current node's parent is smaller in k than target...move up to next parent + root = _path + _path = _move(_path, DIR_PARENT) + root_k.pop() + + return out + +################################# Traversals ################################## + +## using the public API from ABT as the implementation is identical + +################################# Public API ################################## + + +def AOST_create(contents=None, Nmin=0, /, *, key=None, inplace=False, binary_tree_type=BT_BALANCED, node_factory=DEFAULT_NODE_FACTORY): + return ABST.ABST_create(contents, Nmin, key=key, inplace=inplace, binary_tree_type=binary_tree_type, node_factory=node_factory) + +## Tree properties/geometry + +AOST_size = _AOST_size +AOST_height = ABT.ABT_height +AOST_depth = ABT.ABT_depth + +## Tree contents/queries/traversals + +AOST_traverse = ABT.ABT_traverse + +AOST_search = ABST.ABST_search + +def AOST_select(tree, k, /): + if isinstance(k, Iterable): + return _AOST_select_N(tree, sorted(k)) + N = len(tree) + if k >= N: + return None # probably should raise a ValueError instead + root = 0 + root_k = AOST_size(tree, _move(root, DIR_LEFT)) + while root < N and tree[root] is not None and root_k != k: + if root_k < k: + root = _move(root, DIR_RIGHT) # move root right + root_k += AOST_size(tree, _move(root, DIR_LEFT)) + 1 # add node's left subtree to k as well as parent node + else: + root = _move(root, DIR_LEFT) + root_k -= AOST_size(tree, _move(root, DIR_RIGHT)) + 1 # remove right subtree and node from count + return tree[root][VALUE_KEY] + +def AOST_rank(tree, value, /, *, key=None, order=DEFAULT_SEARCH_ORDER): + index = _AOST_search(tree, value, key=key, order=order) + if index >= len(tree): + return None + return tree[_move(index, DIR_LEFT)][SIZE_KEY] + +AOST_contains = ABST.ABST_contains + +AOST_min = ABST.ABST_min + +AOST_max = ABST.ABST_max + +# CONSIDER: might be able to speed this up to O(k*logn) where k is the number of elements with the same value by utilizing _ABST_search +# WARNING: don't use this directly, subject to change signature; specifically the output +#ABST_find = ABT.ABT_find + +# currently O(N)...could be faster +#TODO: this is where it can certainly be faster by finding the rank of the last occurrence minus the last occurrence +#AOST_count = ABT.ABT_count +#TODO: really need to test the implementation. This should be O(logN) +def AOST_count(tree, value, /, *, key=None): + return AOST_rank(tree, value, key=key, order=SEARCH_LAST_INORDER) - AOST_rank(tree, value, key=key, order=SEARCH_FIRST_INORDER) + 1 + +def AOST_validate(tree, /, *, key=None, unique=False): + result = [True] + if key is None: + def validate_node(tree, index, result): + # left child + child = _move(index, DIR_LEFT) + if tree[child] is not None: + if unique and tree[child] == tree[index]: # not unique + result[0] = False + return TRAVERSE_STOP + elif tree[child][VALUE_KEY] > tree[index][VALUE_KEY]: # not sorted + result[0] = False + return TRAVERSE_STOP + left_size = tree[child][SIZE_KEY] + # right child + child = _move(index, DIR_RIGHT) + if tree[child] is not None: + if unique and tree[child][VALUE_KEY] == tree[index][VALUE_KEY]: # not unique + result[0] = False + return TRAVERSE_STOP + elif tree[child][VALUE_KEY] < tree[index][VALUE_KEY]: # not sorted + result[0] = False + return TRAVERSE_STOP + if tree[index][SIZE_KEY] != 1 + left_size + tree[child][SIZE_KEY]: + result[0] = False + return TRAVERSE_STOP + return TRAVERSE_GO + AOST_traverse(tree, validate_node, result, traversal=TRAVERSE_LEVELORDER) + else: + def validate_node(tree, index, result, key): + # left child + child = _move(index, DIR_LEFT) + if tree[child] is not None: + c = key(tree[child][VALUE_KEY]) + r = key(tree[index][VALUE_KEY]) + if unique and c == r: # not unique + result[0] = False + return TRAVERSE_STOP + elif c > r: # not sorted + result[0] = False + return TRAVERSE_STOP + left_size = tree[child][SIZE_KEY] + # right child + child = _move(index, DIR_RIGHT) + if tree[child] is not None: + c = key(tree[child][VALUE_KEY]) + r = key(tree[index][VALUE_KEY]) + if unique and c == r: # not unique + result[0] = False + return TRAVERSE_STOP + elif c < r: # not sorted + result[0] = False + return TRAVERSE_STOP + if tree[index][SIZE_KEY] != 1 + left_size + tree[child][SIZE_KEY]: + result[0] = False + return TRAVERSE_STOP + return TRAVERSE_GO + AOST_traverse(tree, validate_node, result, key, traversal=TRAVERSE_LEVELORDER) + return result[0] + +# path output will NOT include node added +def AOST_add(tree, value, /, *, key=None, unique=False, update=False, node_factory=DEFAULT_NODE_FACTORY, path=None): + if path is None: + path = [] + node = node_factory(value) + root = 0 + N = len(tree) + if key is None: + kvalue = value + while root < N and tree[root] is not None: + path.append(root) + kroot = tree[root][VALUE_KEY] + if kvalue < kroot: # move root left + root = _move(root, DIR_LEFT) + elif kvalue > kroot: # move root right + root = _move(root, DIR_RIGHT) + elif not unique and not update: # allowed to have duplicates + path.pop() + root = _AOST_search_most(tree, root, DIR_RIGHT, key=key, path=path) + root = _move(root, DIR_RIGHT) + if root < N and tree[root] is not None: + root, depth = _AOST_leftmost(tree, root, path=path) + root = _move(root, DIR_LEFT) + else: # the key is already found and unique is True; exit without making changes to tree. + #if udpate: # this really does nothing when key == None, but is needed if key != None + # tree[root][VALUE_KEY] = value + return tree# do nothing + else: + kvalue = key(value) + while root < N and tree[root] is not None: + path.append(root) + kroot = key(tree[root][VALUE_KEY]) + if kvalue < kroot: # move root left + root = _move(root, DIR_LEFT) + elif kvalue > kroot: # move root right + root = _move(root, DIR_RIGHT) + elif not unique and not update: # allowed to have duplicates + path.pop() + root = _AOST_search_most(tree, root, DIR_RIGHT, key=key, path=path) + root = _move(root, DIR_RIGHT) + if root < N and tree[root] is not None: + root, depth = _AOST_leftmost(tree, root, path=path) + root = _move(root, DIR_LEFT) + else: # the key is already found and unique is True; exit without making changes to tree. + if update: # this really does nothing when key == None, but is needed if key != None + tree[root][VALUE_KEY] = value + return tree# do nothing + + # at this point, root should be >= N or tree[root] and last element in _path is the parent + # that is supposed to receive the new node on the child to _dir direction + + # make sure tree is long enough + if root >= N: + _AOST_extend(tree, root+1) + + # insert the new node + tree[root] = node + for p in path: + tree[p][SIZE_KEY] += 1 + return tree + +# TODO: ABST_update(tree_dest, *other_ABSTs, /, unique=False) # merge trees + +# path output will NOT include node removed +def AOST_remove(tree, value, /, *, key=None, order=DEFAULT_SEARCH_ORDER, path=None): + if path is None: + path = [] + N = len(tree) + if not N: + return ValueError(f"Empty WeightBalancedTree does not contain {value}") + root = _AOST_search(tree, value, key=key, order=order, path=path) + # need to run through path and reduce size by 1 if element is actually removed + + if root >= N or tree[root] is None: # value not found + raise KeyError(f"ABST_remove: key {value} not found in tree") + + node_to_remove = tree[root] + if key is None: + assert node_to_remove[VALUE_KEY] == value + else: + assert key(node_to_remove[VALUE_KEY]) == value + + # if node to remove has children, need to replace the value, + if AOST_size(tree, root) > 1: # node is not a leaf, need to move up a descendent + #path_remove_index = len(_path) # reference to where the node in _path is that needs to change + + # traverse a different path depending on which subtree is larger. + # at the end, root holds the node to replace + if AOST_size(tree, _move(root, DIR_LEFT)) > AOST_size(tree, _move(root, DIR_RIGHT)): # left subtree is nonempty and bigger than right subtree so pull up postorder predecessor + root, depth = _AOST_rightmost(tree, _move(root, DIR_LEFT), path=path) + replacement_node = tree[root] + tree[root] = None + _AOST_move_subtree(tree, _move(root, DIR_LEFT), root) + else: # right subtree is nonempty and at least as big as left subtree so pull up preorder postdecessor + root, depth = _AOST_leftmost(tree, _move(root, DIR_RIGHT), path=path) + replacement_node = tree[root] + tree[root] = None + _AOST_move_subtree(tree, _move(root, DIR_RIGHT), root) + # swap the value in root to node_to_remove; no need to move children + node_to_remove[VALUE_KEY], replacement_node[VALUE_KEY] = replacement_node[VALUE_KEY], node_to_remove[VALUE_KEY] + else: # node_to_remove is a leaf, just remove it from the parent + tree[root] = None + path.pop() # top before this is the "root" index that will be removed/set to null and all replacements and subtrees are unaffected by size changes + for p in path: + tree[p][SIZE_KEY] -= 1 + return tree + +#TODO need extensive modification...pull from ArrayWeightBalancedTree without _rebalance call +def AOST_discard(tree, value, /, *, key=None, order=DEFAULT_SEARCH_ORDER, path=None): + if path is None: + path = [] + try: + return AOST_remove(tree, value, key=key, order=order, path=path) + except KeyError: + return tree + diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/_ArrayWeightBalancedTree.py b/venv/lib/python3.10/site-packages/dtlib/trees/_ArrayWeightBalancedTree.py new file mode 100644 index 0000000000000000000000000000000000000000..6a1828a9642dc63f6b4c6135fdde87e927024bd0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/_ArrayWeightBalancedTree.py @@ -0,0 +1,251 @@ +# -*- coding: utf-8 -*- +""" +Created on Tue Sep 6 19:27:17 2022 + +@author: jeffr +""" + +import dtlib.trees._ArrayOrderStatisticTree as AOST +import dtlib.trees._ArrayBinarySearchTree as ABST +import dtlib.trees._ArrayBinaryTree as ABT +import dtlib.trees._Node as _Node +from dtlib.trees._constants import DIR_PARENT, DIR_LEFT, DIR_RIGHT, \ + VALUE_KEY, TRAVERSE_GO, TRAVERSE_STOP, BT_BALANCED, \ + TRAVERSE_LEVELORDER, DEFAULT_SEARCH_ORDER, LIST_NODE, \ + WBT_ALPHA, _WBT_DOUBLEROT_THRESH + +# TODO: create iterator classes for each type of iteration +# TODO: Iterable = AOST_iter(tree, /, *, traversal='inorder') + +############################## Module globals ################################ + +## utilizing dtlib.trees._constants + +SIZE_KEY = 1 +DEFAULT_NODE_FACTORY = _Node.Node_factory(LIST_NODE, {SIZE_KEY: 1}) + +################################ UTILITIES #################################### + +## comment out aliasing into namespace until they are actually needed. Left +## here for documentation purposes + +_AWBT_is_leaf = AOST._AOST_is_leaf + +_AWBT_move_index = ABT._ABT_move_index +_move = _AWBT_move_index + +_AWBT_size = AOST._AOST_size + +_AWBT_update_size = AOST._AOST_update_size + +def _AWBT_balance(tree, index, /): + if index >= len(tree) or tree[index] is None: + return 0 + return (1 + _AWBT_size(tree, _move(index, DIR_LEFT))) / (1 + _AWBT_size(tree, index)) + +_AWBT_diameter = ABT._ABT_diameter + +_AWBT_depth = ABT._ABT_depth + +_AWBT_extend = ABT._ABT_extend + +_AWBT_swap = ABT._ABT_swap + +_AWBT_rotate = ABT._ABT_rotate + +_AWBT_split_rotate = ABT._ABT_split_rotate + +def _AWBT_rebalance(tree, _path, /): + while _path >= 0: + index = _path # now the last element, if present, is the parent of node + # establishes parent--> child relationship in linked nodes; should not be necessary + #if child is not None: + # node[dir_] = child + + bal_node = _AWBT_balance(tree, index) + if bal_node < WBT_ALPHA:# unabalanced subtree; rotate left or double rotation + # should be same as n->left->weight < alpha * n->weight in Brass p.64 + # but trying Blum's result...appears to work, but this is directly from Blum + # note: Brass's doesn't make sense; a little worried it is ad-hoc with its secondary value epsilon (delta in Blum's paper) + + # if right child's balance is leq a threshold, single rotation else double rotation + if _AWBT_balance(tree, _move(index, DIR_RIGHT)) <= _WBT_DOUBLEROT_THRESH: + _AWBT_rotate(tree, index, DIR_LEFT) + # establishes parent--> child relationship in linked nodes; should not be necessary + #if _path: # update subtree's parent-child relationship if it exists + # _path[-1][0][_path[-1][1]] = child + + # update from affected children on up + _AWBT_update_size(tree, _move(index, DIR_LEFT)) + _AWBT_update_size(tree, index) + else: + _AWBT_split_rotate(tree, index, DIR_LEFT) + + # update from affected children on up + _AWBT_update_size(tree, _move(index, DIR_RIGHT)) + _AWBT_update_size(tree, _move(index, DIR_LEFT)) + _AWBT_update_size(tree, index) + elif bal_node > (1-WBT_ALPHA): # unbalanced subtree; rotate right or double rotation + # should be same as n->right->weight < alpha * n->weight in Brass p.64 + # but trying Blum's result swapping right and left in definitions of child, grandchild + # note: Brass's doesn't make sense; a little worried it is ad-hoc with its secondary value epsilon (delta in Blum's paper) + + # I'm not sure this makes sense...need to go through Blum's paper with reverse geometry + # this makes sense in the sense that beta2 determines whether there are enough nodes in the grandchild subtree to warrant doulbe rotation and moving the grandchild up + # this is what _AWBT_balance_child) for the rotate left condition does. The corresponding measure in the rotate right condition is 1-_AWBT_balance(child)) + # in any case, this passes a lot of randomized large tree building tests + # if left child's balance is leq a threshold, single rotation else double rotation + if 1 - _AWBT_balance(tree, _move(index, DIR_LEFT)) <= _WBT_DOUBLEROT_THRESH: + _AWBT_rotate(tree, index, DIR_RIGHT) + + # update affected subtree sizes + _AWBT_update_size(tree, _move(index, DIR_RIGHT)) + _AWBT_update_size(tree, index) # update current subtree root + else: + _AWBT_split_rotate(tree, index, DIR_RIGHT) + + # update affected subtree sizes + _AWBT_update_size(tree, _move(index, DIR_RIGHT)) + _AWBT_update_size(tree, _move(index, DIR_LEFT)) + _AWBT_update_size(tree, index) + else: # node is already balanced + pass + _path = _move(_path, DIR_PARENT) + return tree + +_AWBT_move_subtree = ABT._ABT_move_subtree + +_AWBT_leftmost = ABT._ABT_leftmost + +_AWBT_rightmost = ABT._ABT_rightmost + +_AWBT_search_most = ABST._ABST_search_most + +_AWBT_search = ABST._ABST_search + +_AWBT_select_N = AOST._AOST_select_N + +################################# Traversals ################################## + +## using the public API from ABT as the implementation is identical + +################################# Public API ################################## + +AWBT_create = AOST.AOST_create +## Tree properties/geometry + +AWBT_size = _AWBT_size +AWBT_height = ABT.ABT_height +AWBT_depth = ABT.ABT_depth + +## Tree contents/queries/traversals + +AWBT_traverse = ABT.ABT_traverse + +AWBT_search = ABST.ABST_search + +AWBT_select = AOST.AOST_select + +AWBT_rank = AOST.AOST_rank + +AWBT_contains = ABST.ABST_contains + +AWBT_min = ABST.ABST_min + +AWBT_max = ABST.ABST_max + +# CONSIDER: might be able to speed this up to O(k*logn) where k is the number of elements with the same value by utilizing _ABST_search +# WARNING: don't use this directly, subject to change signature; specifically the output +#ABST_find = ABT.ABT_find + +# currently O(N)...could be faster +#TODO: need to update AOST_count to be the faster algorithm +AWBT_count = AOST.AOST_count + +def AWBT_validate(tree, /, *, key=None, unique=False): + result = [True] + if key is None: + def validate_node(tree, index, result): + # left child + child = _move(index, DIR_LEFT) + if tree[child] is not None: + if unique and tree[child] == tree[index]: # not unique + result[0] = False + return TRAVERSE_STOP + elif tree[child][VALUE_KEY] > tree[index][VALUE_KEY]: # not sorted + result[0] = False + return TRAVERSE_STOP + left_size = tree[child][SIZE_KEY] + # right child + child = _move(index, DIR_RIGHT) + if tree[child] is not None: + if unique and tree[child][VALUE_KEY] == tree[index][VALUE_KEY]: # not unique + result[0] = False + return TRAVERSE_STOP + elif tree[child][VALUE_KEY] < tree[index][VALUE_KEY]: # not sorted + result[0] = False + return TRAVERSE_STOP + if tree[index][SIZE_KEY] != 1 + left_size + tree[child][SIZE_KEY]: + result[0] = False + return TRAVERSE_STOP + _balance = _AWBT_balance(tree, index) + if _balance < WBT_ALPHA or _balance > (1-WBT_ALPHA): + result[0] = False + return TRAVERSE_STOP + return TRAVERSE_GO + AWBT_traverse(tree, validate_node, result, traversal=TRAVERSE_LEVELORDER) + else: + def validate_node(tree, index, result, key): + # left child + child = _move(index, DIR_LEFT) + if tree[child] is not None: + c = key(tree[child][VALUE_KEY]) + r = key(tree[index][VALUE_KEY]) + if unique and c == r: # not unique + result[0] = False + return TRAVERSE_STOP + elif c > r: # not sorted + result[0] = False + return TRAVERSE_STOP + left_size = tree[child][SIZE_KEY] + # right child + child = _move(index, DIR_RIGHT) + if tree[child] is not None: + c = key(tree[child][VALUE_KEY]) + r = key(tree[index][VALUE_KEY]) + if unique and c == r: # not unique + result[0] = False + return TRAVERSE_STOP + elif c < r: # not sorted + result[0] = False + return TRAVERSE_STOP + if tree[index][SIZE_KEY] != 1 + left_size + tree[child][SIZE_KEY]: + result[0] = False + return TRAVERSE_STOP + _balance = _AWBT_balance(tree, index) + if _balance < WBT_ALPHA or _balance > (1-WBT_ALPHA): + result[0] = False + return TRAVERSE_STOP + return TRAVERSE_GO + AWBT_traverse(tree, validate_node, result, key, traversal=TRAVERSE_LEVELORDER) + return result[0] + +def AWBT_add(tree, value, /, *, key=None, unique=False, update=False, node_factory=DEFAULT_NODE_FACTORY): + path = [] + root = AOST.AOST_add(tree, value, key=key, unique=unique, update=update, node_factory=node_factory, path=path) + return _AWBT_rebalance(path[-1]) # does not consume path + +# TODO: ABST_update(tree_dest, *other_ABSTs, /, unique=False) # merge trees + +def AWBT_remove(root, value, /, *, key=None, order=DEFAULT_SEARCH_ORDER): + path = [] + root = AOST.AOST_remove(root, value, key=key, order=order, path=path) + return _AWBT_rebalance(path[-1]) # consumes path + +#TODO need extensive modification...pull from ArrayWeightBalancedTree without _rebalance call +def AWBT_discard(tree, value, /, *, key=None, order=DEFAULT_SEARCH_ORDER): + try: + return AWBT_remove(tree, value, key=key, order=order) + except KeyError: + return tree + diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/_LinkedBinarySearchTree.py b/venv/lib/python3.10/site-packages/dtlib/trees/_LinkedBinarySearchTree.py new file mode 100644 index 0000000000000000000000000000000000000000..20b47f64a72b04022740c0c783a37b19f8901d9f --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/_LinkedBinarySearchTree.py @@ -0,0 +1,456 @@ +# -*- coding: utf-8 -*- +""" +Created on Tue Aug 16 13:21:43 2022 + +@author: jeffr +""" + +from collections.abc import Iterable +import dtlib.trees._LinkedBinaryTree as LBT +import dtlib.trees._Node as _Node +from dtlib.trees._constants import DIR_LEFT, DIR_RIGHT, \ + DIR_PARENT, VALUE_KEY, TRAVERSE_GO, TRAVERSE_STOP, \ + TRAVERSE_LEVELORDER, SEARCH_FIRST_INORDER, \ + SEARCH_LAST_INORDER, DEFAULT_SEARCH_ORDER, BT_BALANCED, \ + BT_COMPLETE, LIST_NODE +from operator import lt, gt + +# TODO: create iterator classes for each type of iteration +# TODO: Iterable = ABST_iter(tree, /, *, traversal='inorder') + +############################## Module globals ################################ + +## utilizing BLib.Trees._constants + +DEFAULT_NODE_FACTORY = _Node.Node_factory(LIST_NODE, {DIR_LEFT: None, DIR_RIGHT: None}) + +################################ UTILITIES #################################### + +## comment out aliasing into namespace until they are actually needed. Left +## here for documentation purposes + +_LBST_is_leaf = LBT._LBT_is_leaf + +_LBST_size = LBT.LBT_size + +_LBST_diameter = LBT._LBT_diameter + +_LBST_depth = LBT._LBT_depth + +_LBST_swap = LBT._LBT_swap + +_LBST_leftmost = LBT._LBT_leftmost + +_LBST_rightmost = LBT._LBT_rightmost + +# specicalization of _LBST_search. key(tree[index]) == key(value) or tree[index] == value must be satisfied +def _LBST_search_most(root, node=None, dir_=None, /, *, key=None, path=None): + if path is None: + path = [] + if node is not None: + root = node + if dir_ == DIR_LEFT: + _leader, _follower, cmp = DIR_LEFT, DIR_RIGHT, lt + else: + _leader, _follower, cmp = DIR_RIGHT, DIR_LEFT, gt + #most = root + if key is None: + value = root[VALUE_KEY] + while root is not None: + kroot = root[VALUE_KEY] + if kroot == value: + #most = root + path.append(root) + root = root[_leader] + elif cmp(kroot, value): # was kroot < value for _LBST_search_leftmost + path.append(root) + root = root[_follower] + else: + root = None # exit condition + #return most + while path[-1][VALUE_KEY] != value: + path.pop() + else: + value = key(root[VALUE_KEY]) + while root is not None: + kroot = key(root[VALUE_KEY]) + if kroot == value: + #most = root + path.append(root) + root = root[_leader] + elif cmp(kroot, value): # was kroot < value for _LBST_search_leftmost + path.append(root) + root = root[_follower] + else: + root = None # exit condition + #return most + while key(path[-1][VALUE_KEY]) != value: + path.pop() + #return most + return path[-1] + +# TODO: find a way to inject uniqueness into the search; otherwise the leftmost and rightmost are triggered and full O(height) is computed +def _LBST_search(root, value, /, *, key=None, order=DEFAULT_SEARCH_ORDER, path=None): + if path is None: + path = [] + if key is None: + while root is not None: + path.append(root) + kroot = root[VALUE_KEY] + if kroot == value: + break # continue with root + elif kroot < value: + root = root[DIR_RIGHT] + else: + root = root[DIR_LEFT] + else: + while root is not None: + path.append(root) + kroot = key(root[VALUE_KEY]) + if kroot == value: + break # continue with root + elif kroot < value: + root = root[DIR_RIGHT] + else: + root = root[DIR_LEFT] + + if root is not None: + if order == SEARCH_FIRST_INORDER: + path.pop() + root = _LBST_search_most(root, None, DIR_LEFT, key=key, path=path) + elif order == SEARCH_LAST_INORDER: + path.pop() + root = _LBST_search_most(root, None, DIR_RIGHT, key=key, path=path) + else: + pass + return root + +################################# Traversals ################################## + +## using the public API from ABT as the implementation is identical + +################################# Public API ################################## + +def LBST_create(contents, /, *, key=None, binary_tree_type=BT_BALANCED, node_factory=DEFAULT_NODE_FACTORY): + if isinstance(contents, Iterable): + contents = sorted(contents, key=key) + else: + contents = [contents] + + return LBT.LBT_create(contents, binary_tree_type=binary_tree_type, node_factory=node_factory) + +## Tree properties/geometry + +LBST_size = _LBST_size +LBST_height = LBT.LBT_height +LBST_depth = _LBST_depth + +## Tree contents/queries/traversals + +LBST_traverse = LBT.LBT_traverse + +def LBST_search(root, value, /, *, key=None, order=DEFAULT_SEARCH_ORDER): + node = _LBST_search(root, value, key=key, order=order) + if node is None: + return node[VALUE_KEY] + return None + +def LBST_contains(root, value, /, *, key=None): + if LBST_search(root, value, key=key) is None: + return False + return True + +def LBST_min(root, node=None, /): + node, depth = _LBST_leftmost(root, node) + return node[VALUE_KEY] + +def LBST_max(root, node=None, /): + node, depth = _LBST_rightmost(root, node) + return node[VALUE_KEY] + +# TODO: ABST_rank + +# TODO: ABST_select(tree, k) # O(N), ABST_selectN(tree, k: Iterable) + +# CONSIDER: might be able to speed this up to O(k*logn) where k is the number of elements with the same value by utilizing _ABST_search +# WARNING: don't use this directly, subject to change signature; specifically the output +#LBST_find = LBT.LBT_find + +# currently O(N)...could be faster +LBST_count = LBT.LBT_count + +def LBST_validate(root, /, *, key=None, unique=False): + result = [True] + if key is None: + def validate_node(st, node, result): + # left child + child = node[DIR_LEFT] + if child is not None: + if unique and child[VALUE_KEY] == node[VALUE_KEY]: # not unique + result[0] = False + return TRAVERSE_STOP + elif child[VALUE_KEY] > node[VALUE_KEY]: # not sorted + result[0] = False + return TRAVERSE_STOP + # right child + child = node[DIR_RIGHT] + if child is not None: + if unique and child[VALUE_KEY] == node[VALUE_KEY]: # not unique + result[0] = False + return TRAVERSE_STOP + elif child[VALUE_KEY] < node[VALUE_KEY]: # not sorted + result[0] = False + return TRAVERSE_STOP + return TRAVERSE_GO + LBST_traverse(root, validate_node, result, traversal=TRAVERSE_LEVELORDER) + else: + def validate_node(st, node, result, key): + # left child + child = node[DIR_LEFT] + if child is not None: + c = key(child[VALUE_KEY]) + r = key(node[VALUE_KEY]) + if unique and c == r: # not unique + result[0] = False + return TRAVERSE_STOP + elif c > r: # not sorted + result[0] = False + return TRAVERSE_STOP + # right child + child = node[DIR_RIGHT] + if child is not None: + c = key(child[VALUE_KEY]) + r = key(node[VALUE_KEY]) + if unique and c == r: # not unique + result[0] = False + return TRAVERSE_STOP + elif c < r: # not sorted + result[0] = False + return TRAVERSE_STOP + return TRAVERSE_GO + LBST_traverse(root, validate_node, result, key, traversal=TRAVERSE_LEVELORDER) + return result[0] + +def LBST_add(root, value, /, *, key=None, unique=False, update=False, node_factory=DEFAULT_NODE_FACTORY): + if root is None: + return node_factory(value) + node = root + parent = None + if key is None: + kvalue = value + while node is not None: + knode = node[VALUE_KEY] + if kvalue < knode: + parent = node + dir_ = DIR_LEFT + node = parent[dir_] + elif kvalue > knode: + parent = node + dir_ = DIR_RIGHT + node = parent[dir_] + elif not unique and not update: # allowed to have duplicates + parent = _LBST_search_most(root, node, DIR_RIGHT, key=key) + dir_ = DIR_RIGHT + node = parent[dir_] + if node is not None: + dir_ = DIR_LEFT + while node is not None: + parent = node + node = parent[dir_] + else: # kvalue == kroot and unique = True + if update: + node[VALUE_KEY] = value + return root + else: + kvalue = key(value) + while node is not None: + knode = key(node[VALUE_KEY]) + if kvalue < knode: + #print(f"moving left for {kvalue} < {knode}") + parent = node + dir_ = DIR_LEFT + node = parent[dir_] + elif kvalue > knode: + #print(f"moving right for {kvalue} < {knode}") + parent = node + dir_ = DIR_RIGHT + node = parent[dir_] + elif not unique and not update: # allowed to have duplicates + #print(f"not unique and not update...finding repeats for {kvalue} == {knode}") + parent = _LBST_search_most(root, node, DIR_RIGHT, key=key) + dir_ = DIR_RIGHT + node = parent[dir_] + if node is not None: + dir_ = DIR_LEFT + while node is not None: + parent = node + node = parent[dir_] + else: # kvalue == kroot and unique = True + #print(f"unique, possibly updating {kvalue} == {knode}") + if update: + node[VALUE_KEY] = value + return root + parent[dir_] = node_factory(value) + + return root + +# TODO: LBST_update(tree_dest, *other_LBSTs, /, unique=False) # merge trees + +def LBST_remove(root, value, /, *, key=None, order=DEFAULT_SEARCH_ORDER): + path = [] + node = _LBST_search(root, value, key=key, path=path, order=order) + #print(node) + if len(path) > 1: + node_p = path[-2] + #parent = path[-2] + else: + node_p = None + #parent = None + if node is None: + raise KeyError(f"LBST_remove: key {value} not found in tree") + + if _LBST_is_leaf(node): + if node_p is not None: + if node_p[DIR_RIGHT] == node: + node_p[DIR_RIGHT] = None + else: + node_p[DIR_LEFT] = None + else: + root = None + elif node[DIR_RIGHT] is None: # node[DIR_LEFT] is not None + if node_p is None: + root = node[DIR_LEFT] + else: + if node_p[DIR_LEFT] == node: + node_p[DIR_LEFT] = node[DIR_LEFT] + else: + node_p[DIR_RIGHT] = node[DIR_LEFT] + node[DIR_LEFT] = None + elif node[DIR_LEFT] is None: # node[DIR_RIGHT] is not None + if node_p is None: + root = node[DIR_RIGHT] + else: + if node_p[DIR_LEFT] == node: + node_p[DIR_LEFT] = node[DIR_RIGHT] + else: + node_p[DIR_RIGHT] = node[DIR_RIGHT] + node[DIR_RIGHT] = None + else: + rchild = node[DIR_RIGHT] + rreppath = [] + rrep, rrep_depth = _LBST_leftmost(rchild, path=rreppath) + lchild = node[DIR_LEFT] + lreppath = [] + lrep, lrep_depth = _LBST_rightmost(lchild, path=lreppath) + + if rrep_depth >= lrep_depth: # replace with element from right subtree + if len(rreppath) == 1: # rrep is rchild; just remove node + rrep[DIR_LEFT] = lchild + node[DIR_LEFT] = None + node[DIR_RIGHT] = None + if node_p is None: # node is root + root = rrep + else: + if node_p[DIR_RIGHT] == node: + node_p[DIR_RIGHT] = rrep + else: + node_p[DIR_LEFT] = rrep + else: # rreppath[-2] is rrep's parent + _LBST_swap(node, rrep) # swap values... + node = rrep # node is now in rrep's original position + rreppath[-2][DIR_LEFT] = node[DIR_RIGHT] + node[DIR_RIGHT] = None + else: # replace with element from left subtree + if len(lreppath) == 1: # lrep is lchild; just remove node + lrep[DIR_RIGHT] = rchild + node[DIR_LEFT] = None + node[DIR_RIGHT] = None + if node_p is None: # node is root + root = lrep + else: + if node_p[DIR_RIGHT] == node: + node_p[DIR_RIGHT] = lrep + else: + node_p[DIR_LEFT] = lrep + else: # lreppath[-2] is lrep's parent + _LBST_swap(node, lrep) # swap values... + node = lrep # node is now in lrep's original position + lreppath[-2][DIR_RIGHT] = node[DIR_LEFT] + node[DIR_LEFT] = None + + """ + if _LBST_is_leaf(node): + if parent is not None: + if parent[DIR_RIGHT] == node: + parent[DIR_RIGHT] = None + else: + parent[DIR_LEFT] = None + elif node[DIR_RIGHT] is None: # node[DIR_LEFT] is not None + if parent is None: + root = node[DIR_LEFT] + else: + if parent[DIR_LEFT] == node: + parent[DIR_LEFT] = node[DIR_LEFT] + else: + parent[DIR_RIGHT] = node[DIR_LEFT] + node[DIR_LEFT] = None + elif node[DIR_LEFT] is None: # node[DIR_RIGHT] is not None + if parent is None: + root = node[DIR_RIGHT] + else: + if parent[DIR_LEFT] == node: + parent[DIR_LEFT] = node[DIR_RIGHT] + else: + parent[DIR_RIGHT] = node[DIR_RIGHT] + node[DIR_RIGHT] = None + else: # node[DIR_RIGHT] and node[DIR_LEFT] are both not None + child = node[DIR_RIGHT] + path.clear() + path.append(parent) + path.append(node) + replacement, replacement_depth = _LBST_leftmost(child, path=path) + parent = path[-2] + #print("leftmost of right child", replacement, replacement_depth) + + child = node[DIR_LEFT] + path.clear() + path.append(parent) + path.append(node) + leaf, depth = _LBST_rightmost(child, path=path) + #print("rightmost of left child", leaf, depth) + if depth > replacement_depth: + replacement = leaf + parent = path[-2] + #print("taking rightmost of left child", replacement) + #else: + #print("taking leftmost of right child", replacement) + + if parent == node + + _LBST_swap(node, replacement) + node = replacement + # TODO: need to clean up this next bit of removing references to node + if not _LBST_is_leaf(node): + if parent[DIR_LEFT] == node: + parent[DIR_LEFT] = node[DIR_RIGHT] + node[DIR_RIGHT] = None + else: + parent[DIR_RIGHT] = node[DIR_LEFT] + node[DIR_LEFT] = None + else: + if parent[DIR_LEFT] == node: + parent[DIR_LEFT] = None + else: + parent[DIR_RIGHT] = None + + # destroy node + """ + return root + +def LBST_discard(root, value, /, *, key=None, order=DEFAULT_SEARCH_ORDER): + try: + return LBST_remove(root, value, key=key, order=order) + except KeyError: + return root + +LBST_equals = LBT.LBT_equals \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/_LinkedBinaryTree.py b/venv/lib/python3.10/site-packages/dtlib/trees/_LinkedBinaryTree.py new file mode 100644 index 0000000000000000000000000000000000000000..f7c3399d18aa90db53d2a367aa41da7fa93b4bbc --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/_LinkedBinaryTree.py @@ -0,0 +1,569 @@ +# -*- coding: utf-8 -*- +""" +Created on Fri Aug 19 21:30:30 2022 + +@author: jeffr +""" + +#import BLib +from collections import deque # for traversals +from collections.abc import Iterable +from dtlib.utils import _interval_root +from dtlib.trees._constants import DIR_LEFT, DIR_RIGHT, \ + DIR_PARENT, VALUE_KEY, BT_BALANCED, BT_COMPLETE, \ + TRAVERSE_GO, TRAVERSE_STOP, TRAVERSE_INORDER, TRAVERSE_PREORDER, \ + TRAVERSE_POSTORDER, TRAVERSE_LEVELORDER, LIST_NODE +#from BLib.Trees import BinaryTree as BT +#import BLib.Trees.BinaryTree as BT +import dtlib.trees._Node as _Node +import turtle # to be removed + +# TODO: all the classes +# TODO: create iterator classes for each type of iteration +# TODO: Iterable = ABT_iter(tree, /, *, traversal='inorder') + +############################## Module globals ################################ + +## most taken from dtlib.trees._constants + +DEFAULT_NODE_FACTORY = _Node.Node_factory(LIST_NODE, {DIR_LEFT: None, DIR_RIGHT: None}) + +############################ Module Initialization ############################ + + + +################################ UTILITIES #################################### + +## Node properties + +def _LBT_is_leaf(root, node=None, /): + if node is not None: + root = node + return root[DIR_LEFT] is None and root[DIR_RIGHT] is None + +## Tree creation + +# sequence_ is mutated! +def _LBT_create_from_sequence(sequence_, *, binary_tree_type=BT_BALANCED, node_factory=_Node.Node_factory(LIST_NODE)): # key is not needed here since not sorting or using the value at all + _inorder_to_level_order(sequence_, inplace=True, binary_tree_type=BT_BALANCED) + sequence_[0] = node_factory(sequence_[0]) + parent = -1 + for i in range(1, len(sequence_)): + if sequence_[i] is not None: + sequence_[i] = node_factory(sequence_[i]) + # do not have to check if parent is None because BT_BALANCED and BT_COMPLETE binary trees in array form will always have non-None parents + if i & 1: + parent += 1 + sequence_[parent][DIR_LEFT] = sequence_[i] + else: + sequence_[parent][DIR_RIGHT] = sequence_[i] + + return sequence_[0] + +## Navigation + +def _LBT_move_index(index, dir_, /): + if dir_ == DIR_PARENT: + return ((index-1) >> 1) + #return (index << 1) + 1 + dir_ # depends on DIR_LEFT = 0 + return (index << 1) + (1 if dir_==DIR_LEFT else 2) + +_move = _LBT_move_index + +## Tree properties/geometry + +# size_t = _LBT_size(root, node=None, /) # if node is not None, root is set to the node +def _LBT_size(root, node=None, /): + if node is not None: + root = node + if root is None: + return 0 + return 1 + _LBT_size(root[DIR_LEFT]) + _LBT_size(root[DIR_RIGHT]) + +def _LBT_diameter_helper(node, /): + if node is None: + return 0, 0 + l_max_diameter, l_height = _LBT_diameter_helper(node[DIR_LEFT]) + r_max_diameter, r_height = _LBT_diameter_helper(node[DIR_RIGHT]) + return max(l_max_diameter, r_max_diameter, 1 + l_height + r_height), 1 + max(l_height, r_height) + +# size_t = _LBT_diameter(root, node=None, /) +def _LBT_diameter(root, node=None, /): + if node is not None: + root = node + if root is None: + return 0 + return _LBT_diameter_helper(root)[0] + +# O(n) +# size_t = _LBT_depth(root, node, /) # repeated appropriate calls to _LBT_height +def _LBT_depth(root, node=None, /): + if root is None: + return 0 + if node is None: + return 1 + return 1 + _LBT_height(root) - _LBT_height(node) + + +# size_t = _LBT_height(root, node=None, /) # if node is not None, root is set to node +def _LBT_height(root, node=None, /): + if node is not None: + root = node + if root is None: + return 0 + return 1 + max(_LBT_height(root[DIR_LEFT]), _LBT_height(root[DIR_RIGHT])) + +## Tree manipulation + +def _LBT_swap(node1, node2): + node1[VALUE_KEY], node2[VALUE_KEY] = node2[VALUE_KEY], node1[VALUE_KEY] + +# root = _LBT_rotate(root, node, dir_, /) # for double rotations, use it repeatedly, root is unused +def _LBT_rotate(root, node, dir_, /): # root is unused + if node is None: + return None + + if dir_ == DIR_LEFT: + child = node[DIR_RIGHT] + if child is None: # cannot rotate node left because right child is None + return node + grandchild = child[DIR_LEFT] + child[DIR_LEFT] = node + node[DIR_RIGHT] = grandchild # this line really only make sense for BinarySearchTrees + elif dir_ == DIR_RIGHT: + child = node[DIR_LEFT] + if child is None: + return node + grandchild = child[DIR_RIGHT] + child[DIR_RIGHT] = node + node[DIR_LEFT] = grandchild # this line really only make sense for BinarySearchTrees + return child + +# TODO: _LBT_split_rotate to replace double rotations; this should be more efficient although not as much of a difference for Linked structures as opposed to Array structures + +## Internal queries + +#TODO: change this do directionless name and add a parameter +# node, size_t = _LBT_rightmost(tree, index=0, /) +def _LBT_leftmost(root, node=None, /, *, path=None): + if path is None: + path = [] + if node is not None: + root = node + if root is None: + return None, 0 + while root is not None: + path.append(root) + root = root[DIR_LEFT] + return path[-1], len(path)-1 + +# index, size_t = _LBT_rightmost(tree, index=0, /) +def _LBT_rightmost(root, node=None, /, *, path=None): + if path is None: + path = [] + if node is not None: + root = node + if root is None: + return None, 0 + print(root[VALUE_KEY]) + while root is not None: + path.append(root) + root = root[DIR_RIGHT] + return path[-1], len(path)-1 + +# [stack of nodes from leaf to root] = _LBT_path_shortest_to_leaves(root, node=None, /) +# not sure this is useful +def _LBT_extremal_paths_to_leaves(root, node=None, /): + if node is not None: + root = node + + if root is None: + return [], [] + + left_max, left_min = _LBT_extremal_paths_to_leaves(root[DIR_LEFT]) + right_max, right_min = _LBT_extremal_paths_to_leaves(root[DIR_RIGHT]) + if not left_max and not right_max: # root is a leaf + return [root], [root] + elif not right_max: # left is not empty + left_max.append(root) + left_min.append(root) + return left_max, left_min + elif not left_max: # right is not empty + right_max.append(root) + right_min.append(root) + return right_max, right_min + + left_max.append(root) + left_min.append(root) + right_max.append(root) + right_min.append(root) + return left_max if len(left_max) > len(right_max) else right_max, left_min if len(left_min) <= len(right_min) else right_min + +# [path of nodes] = _LBT_path_to(root, value, /, *, key=None) +# path is from root to node with value +# WARNING: depends on stack ordering in postorder traversal iterative +def _LBT_path_to(root, value, /, *, key=None): + st_out = [] + if key is None: + def _LBT_path_to_helper(st, node, value, st_out): + if node[VALUE_KEY] == value: + st_out.extend(st) + st_out.append(node) + return TRAVERSE_STOP + return TRAVERSE_GO + LBT_traverse(root, _LBT_path_to_helper, value, st_out, traversal=TRAVERSE_POSTORDER) + else: + def _LBT_path_to_helper(st, node, value, st_out, key): + if key(node[VALUE_KEY]) == value: + st_out.extend(st) + st_out.append(node) + return TRAVERSE_STOP + return TRAVERSE_GO + LBT_traverse(root, _LBT_path_to_helper, value, st_out, key, traversal=TRAVERSE_POSTORDER) + # fix the stack, which has nodes on the path: specifically the right children that have not been visited of each subtree root + if not st_out: # value not found + return st_out + path = [st_out.pop()] + while st_out: + # top of stack's child is top of path. For left children this should always be true, for right children it is after they have been visited but not traversed + if (st_out[-1][DIR_RIGHT] is path[-1]) or (st_out[-1][DIR_LEFT] is path[-1]): + path.append(st_out.pop()) + else: # top of st_out is right child of root, which is not on the path...pop it + st_out.pop() + path.reverse() + return path + +## Visualization + +# DEPRACATION WARNING: these draw commands will be removed + +# Draw Tree +def _LBT_draw_tree(root): + def height(root): + return 1 + max(height(root[DIR_LEFT]), height(root[DIR_RIGHT])) if root else 0 + def jumpto(x, y): + t.penup() + t.goto(x, y) + t.pendown() + def draw(node, x, y, dx): + if node: + t.goto(x, y) + jumpto(x, y-20) + t.write(node[VALUE_KEY], align='center', font=('Arial', 12, 'normal')) + draw(node[DIR_LEFT], x-dx, y-60, dx/2) + jumpto(x, y-20) + draw(node[DIR_RIGHT], x+dx, y-60, dx/2) + # because turtle has this weird behavior that after it closes, you have to run it twice to get it back + try: + t = turtle.Turtle() + except: + t = turtle.Turtle() + t.speed(0); turtle.delay(0) + h = height(root) + jumpto(0, 30*h) + draw(root, 0, 30*h, 40*h) + t.hideturtle() + turtle.mainloop() + +################################# Traversals ################################## +## func must be a function that returns TRAVERSE_GO to continue or TRAVERSE_STOP to return! +## func must have signature: tree/stack trace of nodes, index/node +## Traversals themselves do not return anything + +# _LBT_inorder_traversal(root, func, /, *args, **kwargs) +def _LBT_inorder_traversal(root, func, /, *args, reverse=False, **kwargs): + if not reverse: + _DIR_LEADER, _DIR_FOLLOWER = DIR_LEFT, DIR_RIGHT + else: + _DIR_LEADER, _DIR_FOLLOWER = DIR_RIGHT, DIR_LEFT + st = [] + cont_cond = TRAVERSE_GO + node = root + while cont_cond and (st or node is not None): + # go down left side of sub-tree + while node is not None: + st.append(node) + node = node[_DIR_LEADER] + + node = st.pop() + cont_cond = func(st, node, *args, **kwargs) + node = node[_DIR_FOLLOWER] + +# _LBT_preorder_traversal(root, func, /, *args, **kwargs) +def _LBT_preorder_traversal(root, func, /, *args, reverse=False, **kwargs): + if not reverse: + _DIR_LEADER, _DIR_FOLLOWER = DIR_LEFT, DIR_RIGHT + else: + _DIR_LEADER, _DIR_FOLLOWER = DIR_RIGHT, DIR_LEFT + st = [] + cont_cond = TRAVERSE_GO + node = root + st.append(node) + while cont_cond and st: + node = st.pop() + if node is not None: + cont_cond = func(st, node, *args, **kwargs) + st.append(node[_DIR_FOLLOWER]) + st.append(node[_DIR_LEADER]) + +# _LBT_postorder_traversal(root, func, /, *args, **kwargs) +def _LBT_postorder_traversal(root, func, /, *args, reverse=False, **kwargs): + if not reverse: + _DIR_LEADER, _DIR_FOLLOWER = DIR_LEFT, DIR_RIGHT + else: + _DIR_LEADER, _DIR_FOLLOWER = DIR_RIGHT, DIR_LEFT + cont_cond = TRAVERSE_GO + node = root + st = [node] + while cont_cond: + node = st.pop() + # go down left side of sub-tree + while node is not None: + if node[_DIR_FOLLOWER] is not None: + st.append(node[_DIR_FOLLOWER]) + st.append(node) + node = node[_DIR_LEADER] + + node = st.pop() + while st and st[-1] != node[_DIR_FOLLOWER]: + cont_cond = func(st, node, *args, **kwargs) + node = st.pop() + if st: + st.pop() + st.append(node) + st.append(node[_DIR_FOLLOWER]) + else: # if stack is empty at this point, run last index. This can also go past the outer loop if additionally checked by cont_cond==TRAVERSE_GO + cont_cond = func(st, node, *args, **kwargs) + cont_cond = TRAVERSE_STOP + +# _LBT_levelorder_traversal(root, func, /, *args, **kwargs) +def _LBT_levelorder_traversal(root, func, /, *args, reverse=False, **kwargs): + if not reverse: + _DIR_LEADER, _DIR_FOLLOWER = DIR_LEFT, DIR_RIGHT + else: + _DIR_LEADER, _DIR_FOLLOWER = DIR_RIGHT, DIR_LEFT + cont_cond = TRAVERSE_GO + node = root + deq = deque([node]) + while cont_cond and deq: + node = deq.popleft() + if node is not None: + deq.append(node[_DIR_LEADER]) + deq.append(node[_DIR_FOLLOWER]) + cont_cond = func(deq, node, *args, **kwargs) + +## Traversal for parsing/creating + +# There is a copy of _inorder_to_level_order in LinkedBinaryTree.py +def _inorder_to_level_order(arr, /, *, inplace=False, binary_tree_type=BT_BALANCED): + out = None + N = len(arr) + out = [] + if not N: + return out + if binary_tree_type == BT_BALANCED: + deq = deque([(0, N)]) + while deq: + start, end = deq.popleft() + if start < N: + diff = end-start + if diff > 2: + root = _interval_root(start, end) + deq.append((start, root)) + deq.append((root+1, end)) + elif diff == 2: + root = start + 1 + deq.append((start, root)) + deq.append((N, N)) + elif diff == 1: + root = start + deq.append((N, N)) + deq.append((N, N)) + out.append(arr[root]) + else: + out.append(None) + + while out and out[-1] is None: + out.pop() + + if inplace: + for i in range(N): + arr[i] = out[i] + for i in range(N, len(out)): + arr.append(out[i]) + elif binary_tree_type == BT_COMPLETE: + N = len(arr) + if not N: + return [] + out = [None]*N + path = [] + i = 0 + node = 0 + while i < N and (node < N or path): + while node < N: + path.append(node) + node = _move(node, DIR_LEFT) + + node = path.pop() + out[node] = arr[i] + + i += 1 + node = _move(node, DIR_RIGHT) + + if inplace: + for i in range(N): + arr[i] = out[i] + else: + raise ValueError(f"binary tree to level order does not support creation to binary tree type {binary_tree_type}") + return out + +################################# Public API ################################## + +## Creation + +# root = LBT_create(contents, /, *, inplace=False, binary_tree_type='balanced' : 'complete') # inplace is not used +def LBT_create(contents, /, *, binary_tree_type=BT_BALANCED, node_factory=DEFAULT_NODE_FACTORY): + if isinstance(contents, Iterable): + # function mutates list(contents) + return _LBT_create_from_sequence(list(contents), binary_tree_type=binary_tree_type, node_factory=node_factory) + else: + return node_factory(contents) + +## Tree properties/geometry + +LBT_size = _LBT_size +LBT_height = _LBT_height +LBT_depth = _LBT_depth + +## Tree contents/queries/traversals + +# multiple dispatch might also work here, but we cannot do it by argument type +# LBT_traverse(root, func, *args, traversal=TRAVERSE_INORDER, **kwargs) # for first release, if func=None, *args are ignored +def LBT_traverse(root, func, *args, traversal=TRAVERSE_INORDER, reverse=False, **kwargs): + if traversal == TRAVERSE_INORDER: + _LBT_inorder_traversal(root, func, *args, reverse=reverse, **kwargs) + elif traversal == TRAVERSE_PREORDER: + _LBT_preorder_traversal(root, func, *args, reverse=reverse, **kwargs) + elif traversal == TRAVERSE_POSTORDER: + _LBT_postorder_traversal(root, func, *args, reverse=reverse, **kwargs) + elif traversal == TRAVERSE_LEVELORDER: + _LBT_levelorder_traversal(root, func, *args, reverse=reverse, **kwargs) + else: + raise ValueError(f"traversal option {traversal} not understood or not implemented for LBTs") + +# size_t = LBT_count(root, value, /, key=None) +def LBT_count(root, value, /, *, key=None): + result = [0] + if key is None: + def _LBT_count_helper(root, node, value, result): + if node[VALUE_KEY] == value: + result[0] += 1 + return TRAVERSE_GO + LBT_traverse(root, _LBT_count_helper, value, result) + else: + def _LBT_count_helper(root, node, value, key, result): + if key(node[VALUE_KEY]) == value: + result[0] += 1 + return TRAVERSE_GO + LBT_traverse(root, _LBT_count_helper, value, key, result) + return result[0] + +# [inorder indices] = _LBT_find(root, value, number=-1, /, *, key=None) +# WARNING: don't use this, subject to change signature; specifically the output +def _LBT_find(root, value, number=-1, /, *, key=None): + result = [] + counter = [-1] + if key is None: + def _LBT_find_helper(root, node, value, number, result, counter): + counter[0] += 1 + if node[VALUE_KEY] == value: + result.append(counter[0]) + if len(result) == number: + return TRAVERSE_STOP + return TRAVERSE_GO + LBT_traverse(root, _LBT_find_helper, value, number, result, counter) + else: + def _LBT_find_helper(root, node, value, key, number, result, counter): + counter[0] += 1 + if key(node[VALUE_KEY]) == value: + result.append(counter[0]) + if len(result) == number: + return TRAVERSE_STOP + return TRAVERSE_GO + LBT_traverse(root, _LBT_find_helper, value, key, number, result, counter) + return result + +# boolean = _LBT_contains(root, value, /, *, key=None) +def LBT_contains(root, value, /, *, key=None): + return len(_LBT_find(root, value, 1, key=key)) > 0 + +## Tree mutations + +# new_root = LBT_add(root, value, /) +def LBT_add(root, value, /, node_factory=DEFAULT_NODE_FACTORY): + result = [] + def _LBT_find_open_node(root, node, result): + if node[DIR_LEFT] is None: + result.append((node, DIR_LEFT)) + return TRAVERSE_STOP + if node[DIR_RIGHT] is None: + result.append((node, DIR_RIGHT)) + return TRAVERSE_STOP + return TRAVERSE_GO + LBT_traverse(root, _LBT_find_open_node, result, traversal=TRAVERSE_LEVELORDER) + node, dir_ = result.pop() + child = node_factory(value) + node[dir_] = child + return root + +# new_root = LBT_remove(root, value, /, *, key=None) # raises KeyError if not found +def LBT_remove(root, value, /, *, key=None): + path = _LBT_path_to(root, value, key=key) + if not path: + raise KeyError(f"LBT_remove: key {value} not found in tree") + node = path.pop() + + if _LBT_is_leaf(node): + if path: + parent = path.pop() + dir_ = DIR_LEFT if node is parent[DIR_LEFT] else DIR_RIGHT + parent[dir_] = None + del node + return root + else: # node is root and it is a leaf...destroy the root + root = None + return root + + left_max_path, left_min_path = _LBT_extremal_paths_to_leaves(node[DIR_LEFT]) + right_max_path, right_min_path = _LBT_extremal_paths_to_leaves(node[DIR_LEFT]) + + if len(left_max_path) > len(right_max_path): + # remove from left subtree + replacement = left_max_path.pop() + if left_max_path[-1][DIR_LEFT] is replacement: + left_max_path[-1][DIR_LEFT] = None + else: + left_max_path[-1][DIR_RIGHT] = None + else: + # remove from right subtree + replacement = right_max_path.pop() + if right_max_path[-1][DIR_LEFT] is replacement: + right_max_path[-1][DIR_LEFT] = None + else: + right_max_path[-1][DIR_RIGHT] = None + replacement[VALUE_KEY], node[VALUE_KEY] = node[VALUE_KEY], replacement[VALUE_KEY] + del replacement + return root + +# new_root = LBT_discard(root, value, /, *, key=None, parent=None) # wrapper for _LBT_remove that catches KeyError +def LBT_discard(root, value, /, *, key=None): + try: + return LBT_remove(root, value, key=key) + except KeyError: + return root + +# bool = LBT_equals(root1, root2, /) +def LBT_equals(root1, root2): + if root1 is None or root2 is None: + return root1 == root2 + return root1[VALUE_KEY] == root2[VALUE_KEY] and LBT_equals(root1[DIR_LEFT]) and LBT_equals(root2[DIR_RIGHT]) \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/_LinkedHeap.py b/venv/lib/python3.10/site-packages/dtlib/trees/_LinkedHeap.py new file mode 100644 index 0000000000000000000000000000000000000000..89596858623a7ea61c1758c3b8830f571bd117a6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/_LinkedHeap.py @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +""" +Created on Tue Sep 6 20:20:51 2022 + +@author: jeffr +""" + diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/_LinkedOrderStatisticTree.py b/venv/lib/python3.10/site-packages/dtlib/trees/_LinkedOrderStatisticTree.py new file mode 100644 index 0000000000000000000000000000000000000000..fabc4f7b7d8ef898ff42aa2d76e5a4c0997132d0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/_LinkedOrderStatisticTree.py @@ -0,0 +1,350 @@ +# -*- coding: utf-8 -*- +""" +Created on Mon Sep 5 23:31:58 2022 + +@author: jeffr +""" + +from collections.abc import Iterable +import dtlib.trees._LinkedBinarySearchTree as LBST +import dtlib.trees._LinkedBinaryTree as LBT +import dtlib.trees._Node as _Node +from dtlib.trees._constants import DIR_LEFT, DIR_RIGHT, \ + VALUE_KEY, TRAVERSE_GO, TRAVERSE_STOP, BT_BALANCED, \ + TRAVERSE_LEVELORDER, DEFAULT_SEARCH_ORDER, LIST_NODE, \ + SEARCH_FIRST_INORDER, SEARCH_LAST_INORDER + +# TODO: create iterator classes for each type of iteration +# TODO: Iterable = LOST_iter(tree, /, *, traversal='inorder') + +############################## Module globals ################################ + +## utilizing BLib.Trees._constants + +SIZE_KEY = 3 +DEFAULT_NODE_FACTORY = _Node.Node_factory(LIST_NODE, {DIR_LEFT: None, DIR_RIGHT: None, SIZE_KEY: 1}) + +################################ UTILITIES #################################### + +## comment out aliasing into namespace until they are actually needed. Left +## here for documentation purposes + +#_LOST_is_leaf = ABT._ABT_is_leaf +# faster method since we have access to size +def _LOST_is_leaf(node): + return _LOST_size(node) == 1 + +# O(1) instead of O(n) +def _LOST_size(node): + if node is None: + return 0 + return node[SIZE_KEY] + +def _LOST_update_size(root, node=None): + if node is not None: + root = node + if root is not None: + root[SIZE_KEY] = 1 + root[DIR_LEFT][SIZE_KEY] + root[DIR_RIGHT][SIZE_KEY] + +_LOST_diameter = LBT._LBT_diameter + +_LOST_depth = LBT._LBT_depth + +_LOST_swap = LBT._LBT_swap + +_LOST_leftmost = LBT._LBT_leftmost + +_LOST_rightmost = LBT._LBT_rightmost + +_LOST_search_most = LBST._LBST_search_most + +_LOST_search = LBST._LBST_search + +# fairly certain this is still NlogM where M is the number of elements in the LWBT and N is the number of k values, which probably difficult to show; it is definitely true in the worst case +# this is still going to be faster than N calls to LWBT_select as we do not have to restart at the root for the traversal +def _LOST_select_N(root, k): + k = sorted(k) + M = len(k) + out = [None]*M + if root is None: + return out + N = root[SIZE_KEY] + i = 0 + _path = [] + root_k = [LOST_size(root[DIR_LEFT])] + while i < M and k[i] < N: + while root_k[-1] != k[i]: + _path.append(root) + if root_k[-1] < k[i]: + root = root[DIR_RIGHT] + root_k.append(root_k[-1] + LOST_size(root[DIR_LEFT]) + 1) # add node's left subtree to k as well as parent node + else: + root = root[DIR_LEFT] + root_k.append(root_k[-1] - LOST_size(root[DIR_RIGHT]) - 1) # remove right subtree and node from count + out[i] = root[VALUE_KEY] + i += 1 + # reverse up the path until we find a node that would have resulted in a move to the right + while i < M and len(root_k) > 1 and root_k[-2] <= k[i]: # meaning the current node's parent is smaller in k than target + root = _path.pop() + root_k.pop() + + return out + +################################# Traversals ################################## + +## using the public API from ABT as the implementation is identical + +################################# Public API ################################## + + +def LOST_create(contents=None, /, *, key=None, binary_tree_type=BT_BALANCED, node_factory=DEFAULT_NODE_FACTORY): + return LBST.LBST_create(contents, key=key, binary_tree_type=binary_tree_type, node_factory=node_factory) + +## Tree properties/geometry + +LOST_size = _LOST_size +LOST_height = LBT.LBT_height +LOST_depth = LBT.LBT_depth + +## Tree contents/queries/traversals + +LOST_traverse = LBT.LBT_traverse + +LOST_search = LBST.LBST_search + +def LOST_select(root, k): + if isinstance(k, Iterable): + return _LOST_select_N(root, sorted(k)) + if root is None: + return None # probably should raise a ValueError instead + root_k = LOST_size(root[DIR_LEFT]) + while root and root_k != k: + if root_k < k: + root = root[DIR_RIGHT] # move root right + root_k += LOST_size(root[DIR_LEFT]) + 1 # add node's left subtree to k as well as parent node + else: + root = root[DIR_LEFT] + root_k -= LOST_size(root[DIR_RIGHT]) + 1 # remove right subtree and node from count + return root[VALUE_KEY] + +def LOST_rank(root, value, /, *, key=None, order=DEFAULT_SEARCH_ORDER): + node = _LOST_search(root, value, key=key, order=order) + if node is None: + return node + return node[DIR_LEFT][SIZE_KEY] + +LOST_contains = LBST.LBST_contains + +LOST_min = LBST.LBST_min + +LOST_max = LBST.LBST_max + +# CONSIDER: might be able to speed this up to O(k*logn) where k is the number of elements with the same value by utilizing _ABST_search +# WARNING: don't use this directly, subject to change signature; specifically the output +#ABST_find = ABT.ABT_find + +# currently O(N)...could be faster +#TODO: this is where it can certainly be faster by finding the rank of the last occurrence minus the last occurrence +#LOST_count = LBT.LBT_count +#TODO: really need to test the implementation. This should be O(logN) +def LOST_count(tree, value, /, *, key=None): + return LOST_rank(tree, value, key=key, order=SEARCH_LAST_INORDER) - LOST_rank(tree, value, key=key, order=SEARCH_FIRST_INORDER) + 1 + +def LOST_validate(root, /, *, key=None, unique=False): + result = [True] + if key is None: + def validate_node(st, node, result): + # left child + child = node[DIR_LEFT] + if child is not None: + if unique and child[VALUE_KEY] == node[VALUE_KEY]: # not unique + result[0] = False + return TRAVERSE_STOP + elif child[VALUE_KEY] > node[VALUE_KEY]: # not sorted + result[0] = False + return TRAVERSE_STOP + left_size = child[SIZE_KEY] + # right child + child = node[DIR_RIGHT] + if child is not None: + if unique and child[VALUE_KEY] == node[VALUE_KEY]: # not unique + result[0] = False + return TRAVERSE_STOP + elif child[VALUE_KEY] < node[VALUE_KEY]: # not sorted + result[0] = False + return TRAVERSE_STOP + if node[SIZE_KEY] != 1 + left_size + child[SIZE_KEY]: + result[0] = False + return TRAVERSE_STOP + return TRAVERSE_GO + LOST_traverse(root, validate_node, result, traversal=TRAVERSE_LEVELORDER) + else: + def validate_node(st, node, result, key): + # left child + child = node[DIR_LEFT] + if child is not None: + c = key(child[VALUE_KEY]) + r = key(node[VALUE_KEY]) + if unique and c == r: # not unique + result[0] = False + return TRAVERSE_STOP + elif c > r: # not sorted + result[0] = False + return TRAVERSE_STOP + left_size = child[SIZE_KEY] + # right child + child = node[DIR_RIGHT] + if child is not None: + c = key(child[VALUE_KEY]) + r = key(node[VALUE_KEY]) + if unique and c == r: # not unique + result[0] = False + return TRAVERSE_STOP + elif c < r: # not sorted + result[0] = False + return TRAVERSE_STOP + if node[SIZE_KEY] != 1 + left_size + child[SIZE_KEY]: + result[0] = False + return TRAVERSE_STOP + return TRAVERSE_GO + LOST_traverse(root, validate_node, result, key, traversal=TRAVERSE_LEVELORDER) + return result[0] + +#TODO: I am here in updating + +# path output will NOT include node added +def LOST_add(root, value, /, *, key=None, unique=False, update=False, node_factory=DEFAULT_NODE_FACTORY, path=None): + if path is None: + path = [] + nvalue = node_factory(value) + if root is None: + return nvalue + node = root + dir_ = DIR_RIGHT + if key is None: + kvalue = value + while node is not None: + path.append(node) + kroot = node[VALUE_KEY] + if kvalue < kroot: # move node left + dir_ = DIR_LEFT + node = node[dir_] + elif kvalue > kroot: # move node right + dir_ = DIR_RIGHT + node = node[dir_] + elif not unique and not update: # allowed to have duplicates + path.pop() + node = _LOST_search_most(root, node, DIR_RIGHT, key=key, path=path) + dir_ = DIR_RIGHT + node = node[dir_] + if node is not None: + node, depth = _LOST_leftmost(root, node, path=path) + dir_ = DIR_LEFT + node = node[dir_] + else: # the key is already found and unique is True; exit without making changes to tree. + #if udpate: # this really does nothing when key == None, but is needed if key != None + # tree[root][VALUE_KEY] = value + return root# do nothing + else: + kvalue = key(value) + while node is not None: + path.append(node) + kroot = key(node[VALUE_KEY]) + if kvalue < kroot: # move node left + dir_ = DIR_LEFT + node = node[dir_] + elif kvalue > kroot: # move node right + dir_ = DIR_RIGHT + node = node[dir_] + elif not unique and not update: # allowed to have duplicates + path.pop() + node = _LOST_search_most(root, node, DIR_RIGHT, key=key, path=path) + dir_ = DIR_RIGHT + node = node[dir_] + if node is not None: + node, depth = _LOST_leftmost(root, node, path=path) + dir_ = DIR_LEFT + node = node[dir_] + else: # the key is already found and unique is True; exit without making changes to tree. + if update: # this really does nothing when key == None, but is needed if key != None + node[VALUE_KEY] = value + return root# do nothing + + # at this point, node should be None and last element in _path is the parent + # that is supposed to receive the new node on the child to _dir direction + + # insert the new node + path[-1][dir_] = nvalue + for p in path: + p[SIZE_KEY] += 1 + return root + +# TODO: ABST_update(tree_dest, *other_ABSTs, /, unique=False) # merge trees + +# path output will NOT include node removed +def LOST_remove(root, value, /, *, key=None, order=DEFAULT_SEARCH_ORDER, path=None): + if path is None: + path = [] + if root is None: + return ValueError(f"Empty WeightBalancedTree does not contain {value}") + node = _LOST_search(root, value, key=key, order=order, path=path) + # need to run through path and reduce size by 1 if element is actually removed + + if node is None: # value not found + raise KeyError(f"ABST_remove: key {value} not found in tree") + + node_to_remove = node + if key is None: + assert node_to_remove[VALUE_KEY] == value + else: + assert key(node_to_remove[VALUE_KEY]) == value + + # if node to remove has children, need to replace the value, + if LOST_size(node) > 1: # node is not a leaf, need to move up a descendent + #path_remove_index = len(_path) # reference to where the node in _path is that needs to change + + # traverse a different path depending on which subtree is larger. + # at the end, root holds the node to replace + if LOST_size(node[DIR_LEFT]) > LOST_size(node[DIR_RIGHT]): # left subtree is nonempty and bigger than right subtree so pull up postorder predecessor + node, depth = _LOST_rightmost(root, node[DIR_LEFT], path=path) + + replacement_node = path.pop() + if path[-1] is node_to_remove: # replacement path's parent is the node to remove; skip over replacement nodes + node_to_remove[DIR_LEFT] = replacement_node[DIR_LEFT] + else: # rightmost node has parent as path[-1] whose right child must be replaced with replacement node's left child + path[-1][DIR_RIGHT] = replacement_node[DIR_LEFT] + else: # right subtree is nonempty and at least as big as left subtree so pull up preorder postdecessor + node, depth = _LOST_leftmost(root, node[DIR_RIGHT], path=path) + replacement_node = path.pop() + if path[-1] is node_to_remove: # replacement path's parent is the node to remove; skip over replacement nodes + node_to_remove[DIR_RIGHT] = replacement_node[DIR_RIGHT] + else: # leftmost node has parent as path[-1] whose left child must be replaced with replacement node's right child + path[-1][DIR_LEFT] = replacement_node[DIR_RIGHT] + # swap the value in root to node_to_remove; no need to move children + node_to_remove[VALUE_KEY] = replacement_node[VALUE_KEY] + replacement_node = None + else: # node_to_remove is a leaf, just remove it from the parent + path.pop() + if path: + if path[-1][DIR_RIGHT] == node_to_remove: + path[-1][DIR_RIGHT] = None + else: + path[-1][DIR_LEFT] = None + node_to_remove = None + else: + node_to_remove = None + return None + + for p in path: + p[SIZE_KEY] -= 1 + return root + +#TODO need extensive modification...pull from ArrayWeightBalancedTree without _rebalance call +def LOST_discard(root, value, /, *, key=None, order=DEFAULT_SEARCH_ORDER, path=None): + if path is None: + path = [] + try: + return LOST_remove(root, value, key=key, order=order, path=path) + except KeyError: + return root + diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/_LinkedWeightBalancedTree.py b/venv/lib/python3.10/site-packages/dtlib/trees/_LinkedWeightBalancedTree.py new file mode 100644 index 0000000000000000000000000000000000000000..759ff9f55c1a45ceafcb8aba8d1e4f619dd90b20 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/_LinkedWeightBalancedTree.py @@ -0,0 +1,259 @@ +# -*- coding: utf-8 -*- +""" +Created on Tue Sep 6 19:27:29 2022 + +@author: jeffr +""" + +import dtlib.trees._LinkedOrderStatisticTree as LOST +import dtlib.trees._LinkedBinarySearchTree as LBST +import dtlib.trees._LinkedBinaryTree as LBT +import dtlib.trees._Node as _Node +from dtlib.trees._constants import DIR_LEFT, DIR_RIGHT, \ + VALUE_KEY, TRAVERSE_GO, TRAVERSE_STOP, BT_BALANCED, \ + TRAVERSE_LEVELORDER, DEFAULT_SEARCH_ORDER, LIST_NODE, \ + WBT_ALPHA, _WBT_DOUBLEROT_THRESH + +# TODO: create iterator classes for each type of iteration +# TODO: Iterable = LOST_iter(tree, /, *, traversal='inorder') + +############################## Module globals ################################ + +## utilizing BLib.Trees._constants + +SIZE_KEY = 3 +DEFAULT_NODE_FACTORY = _Node.Node_factory(LIST_NODE, {DIR_LEFT: None, DIR_RIGHT: None, SIZE_KEY: 1}) + +################################ UTILITIES #################################### + +## comment out aliasing into namespace until they are actually needed. Left +## here for documentation purposes + +_LWBT_is_leaf = LOST._LOST_is_leaf + +_LWBT_size = LOST._LOST_size + +_LWBT_update_size = LOST._LOST_update_size + +def _LWBT_balance(node, /): + if node is None: + return 0 + return (1 + _LWBT_size(node[DIR_LEFT]))/(1 + _LWBT_size(node)) + +_LWBT_diameter = LBT._LBT_diameter + +_LWBT_depth = LBT._LBT_depth + +_LWBT_swap = LBT._LBT_swap + +_LWBT_rotate = LBT._LBT_rotate + +def _LWBT_rebalance(_path, /): + child = None # must set to None for initial loop + dir_ = None + while _path: + node = _path.pop() # now the last element, if present, is the parent of node + + # establishes parent--> child relationship in linked nodes + if child is not None: + node[dir_] = child + # This next line should replace the need to have an explicit direction in the path + dir_ = DIR_RIGHT if _path and _path[-1][DIR_RIGHT] is node else DIR_LEFT + + bal_node = _LWBT_balance(node) + if bal_node < WBT_ALPHA:# unabalanced subtree; rotate left or double rotation + # should be same as n->left->weight < alpha * n->weight in Brass p.64 + # but trying Blum's result...appears to work, but this is directly from Blum + # note: Brass's doesn't make sense; a little worried it is ad-hoc with its secondary value epsilon (delta in Blum's paper) + + child = node[DIR_RIGHT] + + # if child's balance is leq a threshold, single rotation else double rotation + if _LWBT_balance(child) <= _WBT_DOUBLEROT_THRESH: + child = _LWBT_rotate(node, DIR_LEFT) + if _path: # update subtree's parent-child relationship if it exists + _path[-1][0][_path[-1][1]] = child + + _LWBT_update_size(child[DIR_LEFT]) # update original node + _LWBT_update_size(child) # update current subtree root + else: + child = _LWBT_rotate(child, DIR_RIGHT) + node[DIR_RIGHT] = child + child = _LWBT_rotate(node, DIR_LEFT) + if _path: # update subtree's parent-child relationship if it exists + _path[-1][0][_path[-1][1]] = child + + # update affected subtree sizes + _LWBT_update_size(child[DIR_RIGHT]) + _LWBT_update_size(child[DIR_LEFT]) + _LWBT_update_size(child) + elif bal_node > (1-WBT_ALPHA): # unbalanced subtree; rotate right or double rotation + # should be same as n->right->weight < alpha * n->weight in Brass p.64 + # but trying Blum's result swapping right and left in definitions of child, grandchild + # note: Brass's doesn't make sense; a little worried it is ad-hoc with its secondary value epsilon (delta in Blum's paper) + + child = node[DIR_LEFT] + + # I'm not sure this makes sense...need to go through Blum's paper with reverse geometry + # this makes sense in the sense that beta2 determines whether there are enough nodes in the grandchild subtree to warrant doulbe rotation and moving the grandchild up + # this is what _LWBT_balance_child) for the rotate left condition does. The corresponding measure in the rotate right condition is 1-_LWBT_balance(child)) + # in any case, this passes a lot of randomized large tree building tests + # if child's balance is leq a threshold, single rotation else double rotation + if 1 - _LWBT_balance(child) <= _WBT_DOUBLEROT_THRESH: + child = _LWBT_rotate(node, DIR_RIGHT) + if _path: # update subtree's parent-child relationship if it exists + _path[-1][0][_path[-1][1]] = child + + # update affected subtree sizes + _LWBT_update_size(child[DIR_RIGHT]) # update original node + _LWBT_update_size(child) # update current subtree root + else: + child = _LWBT_rotate(child, DIR_LEFT) + node[DIR_LEFT] = child + child = _LWBT_rotate(node, DIR_RIGHT) + if _path: # update subtree's parent-child relationship if it exists + _path[-1][0][_path[-1][1]] = child + + # update affected subtree sizes + _LWBT_update_size(child[DIR_LEFT]) + _LWBT_update_size(child[DIR_RIGHT]) + _LWBT_update_size(child) # set child to current subtree root + else: # node is already balanced + child = node # set child to current subtree root + + # if path is empty, child should be the last subtree root and therefore the new root + return child + +_LWBT_leftmost = LBT._LBT_leftmost + +_LWBT_rightmost = LBT._LBT_rightmost + +_LWBT_search_most = LBST._LBST_search_most + +_LWBT_search = LBST._LBST_search + +_LWBT_select_N = LOST._LOST_select_N + +################################# Traversals ################################## + +## using the public API from ABT as the implementation is identical + +################################# Public API ################################## + +LWBT_create = LOST.LOST_create + +## Tree properties/geometry + +LWBT_size = LOST._LOST_size +LWBT_height = LBT.LBT_height +LWBT_depth = LBT.LBT_depth + +## Tree contents/queries/traversals + +LWBT_traverse = LBT.LBT_traverse + +LWBT_search = LBST.LBST_search + +LWBT_select = LOST.LOST_select + +LWBT_rank = LOST.LOST_rank + +LWBT_contains = LBST.LBST_contains + +LWBT_min = LBST.LBST_min + +LWBT_max = LBST.LBST_max + +# currently O(N)...could be faster +#TODO: need to update LOST_count to be the faster algorithm +LWBT_count = LOST.LOST_count + +def LWBT_validate(root, /, *, key=None, unique=False): + result = [True] + if key is None: + def validate_node(st, node, result): + # left child + child = node[DIR_LEFT] + if child is not None: + if unique and child[VALUE_KEY] == node[VALUE_KEY]: # not unique + result[0] = False + return TRAVERSE_STOP + elif child[VALUE_KEY] > node[VALUE_KEY]: # not sorted + result[0] = False + return TRAVERSE_STOP + left_size = child[SIZE_KEY] + # right child + child = node[DIR_RIGHT] + if child is not None: + if unique and child[VALUE_KEY] == node[VALUE_KEY]: # not unique + result[0] = False + return TRAVERSE_STOP + elif child[VALUE_KEY] < node[VALUE_KEY]: # not sorted + result[0] = False + return TRAVERSE_STOP + if node[SIZE_KEY] != 1 + left_size + child[SIZE_KEY]: + result[0] = False + return TRAVERSE_STOP + _balance = _LWBT_balance(node) + if _balance < WBT_ALPHA or _balance > (1-WBT_ALPHA): + result[0] = False + return TRAVERSE_STOP + return TRAVERSE_GO + LWBT_traverse(root, validate_node, result, traversal=TRAVERSE_LEVELORDER) + else: + def validate_node(st, node, result, key): + # left child + child = node[DIR_LEFT] + if child is not None: + c = key(child[VALUE_KEY]) + r = key(node[VALUE_KEY]) + if unique and c == r: # not unique + result[0] = False + return TRAVERSE_STOP + elif c > r: # not sorted + result[0] = False + return TRAVERSE_STOP + left_size = child[SIZE_KEY] + # right child + child = node[DIR_RIGHT] + if child is not None: + c = key(child[VALUE_KEY]) + r = key(node[VALUE_KEY]) + if unique and c == r: # not unique + result[0] = False + return TRAVERSE_STOP + elif c < r: # not sorted + result[0] = False + return TRAVERSE_STOP + if node[SIZE_KEY] != 1 + left_size + child[SIZE_KEY]: + result[0] = False + return TRAVERSE_STOP + _balance = _LWBT_balance(node) + if _balance < WBT_ALPHA or _balance > (1-WBT_ALPHA): + result[0] = False + return TRAVERSE_STOP + return TRAVERSE_GO + LWBT_traverse(root, validate_node, result, key, traversal=TRAVERSE_LEVELORDER) + return result[0] + +#TODO: I am here in updating + +def LWBT_add(root, value, /, *, key=None, unique=False, update=False, node_factory=DEFAULT_NODE_FACTORY): + path = [] + root = LOST.LOST_add(root, value, key=key, unique=unique, update=update, node_factory=node_factory, path=path) + return _LWBT_rebalance(path) # consumes path + +# TODO: ABST_update(tree_dest, *other_ABSTs, /, unique=False) # merge trees + +def LWBT_remove(root, value, /, *, key=None, order=DEFAULT_SEARCH_ORDER): + path = [] + root = LOST.LOST_remove(root, value, key=key, order=order, path=path) + return _LWBT_rebalance(path) # consumes path + +#TODO need extensive modification...pull from ArrayWeightBalancedTree without _rebalance call +def LWBT_discard(root, value, /, *, key=None, order=DEFAULT_SEARCH_ORDER): + try: + return LWBT_remove(root, value, key=key, order=order) + except KeyError: + return root + diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/_Node.py b/venv/lib/python3.10/site-packages/dtlib/trees/_Node.py new file mode 100644 index 0000000000000000000000000000000000000000..229596c30d052c8b7315443d6a3990d0c90eecba --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/_Node.py @@ -0,0 +1,119 @@ +# -*- coding: utf-8 -*- +""" +Created on Thu Sep 1 09:07:52 2022 + +@author: jeffr +""" + +from dtlib.trees._constants import DIR_LEFT, DIR_RIGHT, VALUE_KEY, \ + LIST_NODE, DICT_NODE, CLASS_NODE, SLOTTED_CLASS_NODE + +# for when node_factory is compatible with ArrayBinary*Tree +# there is maintenance and porting value to having cases where there is a node simply just to store VALUE_KEY +# while only adding memory to the most basic of trees: BinaryTree when storage is Array: standard BSTs, Heaps, MinMaxHeaps, etc. +# this also makes the translation to C/C++ more clear as we can just swap None-->Null (C), --> nullptr (C++) +def ListNode_factory(specs=None, /): + if specs is None: + specs = {} + _specs = [(VALUE_KEY, None)] + _specs.extend(specs.items()) + def list_node_factory(*args): + Nargs = len(args) + return [args[i] if i < Nargs else _spec[1] for i, _spec in enumerate(_specs)] + return list_node_factory + +def DictNode_factory(specs=None, /): + if specs is None: + specs = {} + _specs = [(VALUE_KEY, None)] + _specs.extend(specs.items()) + def dict_node_factory(*args): + Nargs = len(args) + return {_spec[0]: (args[i] if i < Nargs else _spec[1]) for i, _spec in enumerate(_specs)} + return dict_node_factory + +class ClassNode: + def __getitem__(self, key): + return self.__dict__[key] + def __setitem__(self, key, value): + self.__dict__[key] = value + def __str__(self): + return str(self[VALUE_KEY]) + +def ClassNode_subclass_init(obj_self, *args): + Narg = len(args) + for i, _spec in enumerate(obj_self._specs): + obj_self.__dict__[_spec[0]] = args[i] if i < Narg else _spec[1] + +_next_class_node_subclass = 'ClassNode_0' +ClassNode_subclasses = {} + +def _get_next_class_node_subclass(): + global _next_class_node_subclass + out = _next_class_node_subclass + _next_class_node_subclass = 'ClassNode_' + str(int(_next_class_node_subclass.split('_')[-1]) + 1) + return out + +def ClassNode_factory(specs=None, name=None, /): + if specs is None: + specs = {} + _specs = [(VALUE_KEY, None),] + if name is None or name in ClassNode_subclasses: + if name in ClassNode_subclasses: + raise Warning(f"attempting to create a named subclass {name} of ClassNode that already exists") + name = _get_next_class_node_subclass() + _specs.extend(specs.items()) + obj = type(name, (ClassNode, ), {'_specs': _specs, '__init__': ClassNode_subclass_init}) + return obj + +class SlottedClassNode: + __slots__ = [] + def __getitem__(self, key): + return getattr(self, self._slot_keys[key]) + def __setitem__(self, key, value): + setattr(self, self._slot_keys[key], value) + def __str__(self): + return str(self[VALUE_KEY]) + +def SlottedClassNode_subclass_init(obj_self, *args): + Narg = len(args) + for i, _spec in enumerate(obj_self._specs): + obj_self.__setitem__(_spec[0], args[i] if i < Narg else _spec[1]) + +_next_slotted_class_node_subclass = 'SlottedClassNode_0' +SlottedClassNode_subclasses = {} + +def _get_next_slotted_class_node_subclass(): + global _next_slotted_class_node_subclass + out = _next_slotted_class_node_subclass + _next_slotted_class_node_subclass = 'SlottedClassNode_' + str(int(_next_slotted_class_node_subclass) + 1) + return out + +def SlottedClassNode_factory(specs=None, name=None, /): + if specs is None: + specs = {} + _specs = [(VALUE_KEY, None)] + if name is None or name in ClassNode_subclasses: + if name in ClassNode_subclasses: + raise Warning(f"attempting to create a named subclass {name} of ClassNode that already exists") + name = _get_next_class_node_subclass() + _specs.extend(specs.items()) + _slots = ['_' + str(_spec[0]) for _spec in _specs] + _slot_map = {_specs[i][0]: _slots[i] for i in range(len(_specs))} + obj = type(name, (SlottedClassNode, ), {'_slot_keys': _slot_map, '_specs': _specs, '__slots__': _slots, '__init__': SlottedClassNode_subclass_init}) + + return obj + +# returns the appropriate node factory for the requested storage mode +# if **kwargs is not empty, specifies the spec of the +def Node_factory(storage=LIST_NODE, specs=None, name=None, /): + if storage == LIST_NODE: + return ListNode_factory(specs) + elif storage == DICT_NODE: + return DictNode_factory(specs) + elif storage == CLASS_NODE: + return ClassNode_factory(specs, name) + elif storage == SLOTTED_CLASS_NODE: + return SlottedClassNode_factory(specs, name) + else: + raise ValueError(f"unknown storage mechanism {storage} for Binary Tree nodes") \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/__init__.py b/venv/lib/python3.10/site-packages/dtlib/trees/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c0a02f4d27a195acd4c29201488b1d63586767fd --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/__init__.py @@ -0,0 +1,10 @@ +from . import _ArrayBinaryTree +from . import _LinkedBinaryTree +from . import BinaryTree +#from . import ArrayBinarySearchTree +#from . import LinkedBinarySearchTree +from . import BinarySearchTree +from . import _constants +from . import _config # TODO: remove dependencys in LinkedBinaryTree and remove + +#from BLib.Trees._config import * \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/BinarySearchTree.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/BinarySearchTree.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..beca1d576e66c8289530a33ca13caedf45b5f03a Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/BinarySearchTree.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/BinaryTree.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/BinaryTree.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..66bdec718d9eb1285962cb2934881d0bb37ef3fa Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/BinaryTree.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/Heap.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/Heap.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..73bf1f47ffdae3c7bcad3b94f6d6c3c824b77571 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/Heap.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/OrderStatisticTree.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/OrderStatisticTree.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fcbc7e0b46df041a110c67408984915103158e63 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/OrderStatisticTree.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/Tree.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/Tree.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e7fe1f10123407c18f6efbfa89988a5afd9b534c Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/Tree.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/WeightBalancedTree.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/WeightBalancedTree.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..febb01f2dbe54d4f4337a921c1cafba01d8e0071 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/WeightBalancedTree.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_ArrayBinarySearchTree.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_ArrayBinarySearchTree.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2e96088be7fe9813078819d043292e98b3ff74d7 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_ArrayBinarySearchTree.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_ArrayBinaryTree.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_ArrayBinaryTree.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6ac5e1c5762e447ac6fec134000469b2f26bbdf5 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_ArrayBinaryTree.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_ArrayHeap.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_ArrayHeap.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b05129420201ff56ef3a300e048751d80d0af1a8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_ArrayHeap.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_ArrayOrderStatisticTree.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_ArrayOrderStatisticTree.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3e3baba493704206c738373dcbe00806d450326f Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_ArrayOrderStatisticTree.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_ArrayWeightBalancedTree.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_ArrayWeightBalancedTree.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7d0a2334974179bb2ccdc5a9d2fd0705baea8f1e Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_ArrayWeightBalancedTree.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_LinkedBinarySearchTree.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_LinkedBinarySearchTree.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ae102fa7befe7214cc1caef23f4c210e5101179d Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_LinkedBinarySearchTree.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_LinkedBinaryTree.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_LinkedBinaryTree.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..30c400ad2ad1936e7d4d0a382a03697c33284132 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_LinkedBinaryTree.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_LinkedHeap.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_LinkedHeap.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5e14d33f94d17c0d4451216771e8ed2243062046 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_LinkedHeap.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_LinkedOrderStatisticTree.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_LinkedOrderStatisticTree.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..be400acccfb3ef6fd73a67a86b5d40943873e385 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_LinkedOrderStatisticTree.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_LinkedWeightBalancedTree.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_LinkedWeightBalancedTree.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a483ae6eb4685a5b9a091a4cc852be7f19490d43 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_LinkedWeightBalancedTree.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_Node.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_Node.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3accce38982d163c5d39befc489c86a3c03272c9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_Node.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fc50ecc8f7d4eb01448189d81642488d4c38d98d Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_config.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_config.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..84ec822ab9362c9fc8e09cc9c518a96de0151c6a Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_config.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_constants.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_constants.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cd16e4047f189acf8284ec9e793908e7b0cf89ac Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/_constants.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c8e9ea8fdc29b64dba5e7f5dbe647e266b8976b1 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/__pycache__/utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/_config.py b/venv/lib/python3.10/site-packages/dtlib/trees/_config.py new file mode 100644 index 0000000000000000000000000000000000000000..bad7ee2326f7b0d662c72973587fba9668ecbcd9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/_config.py @@ -0,0 +1,11 @@ +LIST_NODE = 'l' +DICT_NODE = 'd' +CLASS_NODE = 'c' +SLOTTED_CLASS_NODE = 'cs' + +tree_params = {"TreeNode": LIST_NODE} + +def configure(**kwargs): + for k, v in kwargs.items(): + tree_params[k] = v + print(f"updated tree_params[{k}] = {v}") \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/_constants.py b/venv/lib/python3.10/site-packages/dtlib/trees/_constants.py new file mode 100644 index 0000000000000000000000000000000000000000..13cc6c276ad63b9c47c1d2b175d17bbdedf030eb --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/_constants.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +""" +Created on Wed Aug 31 21:25:36 2022 + +@author: jeffr +""" + +############################## Package globals ################################ + +## Creation/Construction + +LIST_NODE = 'l' +DICT_NODE = 'd' +CLASS_NODE = 'c' +SLOTTED_CLASS_NODE = 'cs' + +BT_BALANCED = 0 +BT_COMPLETE = 1 + +LINKED_STORAGE = 'Linked' +ARRAY_STORAGE = 'Array' + +## Navigation + +# Whereas most of the other globals are just identifiers for algorithm or +# structure selection, these are used as list indices and constants in +# algorithms. DO NOT CHANGE THESE +DIR_PARENT = -1 +VALUE_KEY = 0 +DIR_LEFT = 1 +DIR_RIGHT = 2 + +## Traversals + +TRAVERSE_STOP = False +TRAVERSE_GO = True + +TRAVERSE_INORDER = 0 +TRAVERSE_PREORDER = 1 +TRAVERSE_POSTORDER = 2 +TRAVERSE_LEVELORDER = 3 + +## BST + +SEARCH_FIRST_INORDER = -1 +SEARCH_FIRST_LEVELORDER = 0 +SEARCH_LAST_INORDER = 1 +DEFAULT_SEARCH_ORDER = SEARCH_FIRST_LEVELORDER + +## Weight-Balanced Tree + +WBT_ALPHA = 1-2**.5/2 # 2/11 < WBT_ALPHA < 1-sqrt(2)/2 - c(WBT_DELTA) # C(WBT_DELTA) defined in Blum, but we know c(0) = 0 + +# don't change this. Proof of upper bound in Nievergelt, Proof of lower bound in Blum +assert 2/11 <= WBT_ALPHA <= 1-2**.5/2 +WBT_DELTA = 0 # must be between 0 and 0.01; would prefer an analytic formula but have to follow Blum to identify, 0 corresponds to Nievergelt +_WBT_DOUBLEROT_THRESH = 1./(2-WBT_ALPHA) + WBT_DELTA/((1+(1+WBT_DELTA)*(1-WBT_ALPHA))*(2-WBT_ALPHA)) \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/tests/__init__.py b/venv/lib/python3.10/site-packages/dtlib/trees/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/tests/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/tests/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..10617d7b24d69286817971ae321141114275d573 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/tests/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/tests/__pycache__/test_ArrayBinaryTree.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/tests/__pycache__/test_ArrayBinaryTree.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2694996c5710102fcb4a2fb7ff213bfb2e855d15 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/tests/__pycache__/test_ArrayBinaryTree.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/tests/__pycache__/test_Node.cpython-310.pyc b/venv/lib/python3.10/site-packages/dtlib/trees/tests/__pycache__/test_Node.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..42637385f63585fbbd09d202168ed1413e8e6632 Binary files /dev/null and b/venv/lib/python3.10/site-packages/dtlib/trees/tests/__pycache__/test_Node.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/tests/test_ArrayBinaryTree.py b/venv/lib/python3.10/site-packages/dtlib/trees/tests/test_ArrayBinaryTree.py new file mode 100644 index 0000000000000000000000000000000000000000..15d0174ed2f21d4a185d028f0692c6635db997af --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/tests/test_ArrayBinaryTree.py @@ -0,0 +1,342 @@ +# -*- coding: utf-8 -*- +""" +Created on Mon Aug 29 18:00:58 2022 + +@author: jeffr +""" + +import dtlib.trees._ArrayBinaryTree as BT +from unittest import TestCase as TC +import random +from dtlib.utils import _next_pow2 + +N = None + +class test_ABT(TC): + @classmethod + def setUpClass(cls): + cls.trees = [[(0,), (1,)], + [(0,), N, (2,)], + [(0,), (1,),(2,)], + [(0,), (1,),(2,), (3,),(4,),(5,),(6,)], + [(0,), (1,), N, (3,),(4,)], + [(0,), N, (2,), N, N,(5,),(6,)], + [(0,), (1,), (2,), (3,),N,(5,),(6,), N, N, N, N, (11,)], + [(0,), (1,), (2,), (3,),(4,),N,N, (7,),(8,),(9,),N,N,N,N,N, (15,),(16,),N,N,N,N,N,N,N,N,N,N,N,N,N,N, (31,)]] + cls.leaves = [{(1,)}, + {(2,)}, + {(1,), (2,)}, + {(3,), (4,), (5,), (6,)}, + {(3,), (4,)}, + {(5,), (6,)}, + {(3,), (6,), (11,)}, + {(2,), (8,), (9,), (16,), (31,)}] + + def test_ABT_swap(self): + for i, tree in enumerate(self.trees): + N = len(tree) + ind1 = random.randrange(0, N) + while tree[ind1] is N: + ind1 = random.randrange(0, N) + ind2 = random.randrange(0, N) + while tree[ind2] is N or ind2 == ind1: + ind2 = random.randrange(0, N) + old1, old2 = tree[ind1], tree[ind2] + BT._ABT_swap(tree, ind1, ind2) + self.assertEqual(old1, tree[ind2], f"{ind1} and {ind2} did not swap in tree[{i}]") + self.assertEqual(old2, tree[ind1], f"{ind1} and {ind2} did not swap in tree[{i}]") + + # reset to original tree + BT._ABT_swap(tree, ind1, ind2) + self.assertEqual(old1, tree[ind1], f"{ind2} and {ind1} did not swap in tree[{i}]") + self.assertEqual(old2, tree[ind2], f"{ind2} and {ind1} did not swap in tree[{i}]") + + def test_ABT_extend(self): + for i, tree in enumerate(self.trees): + N = len(tree) + Ntarget = _next_pow2(N)-1 + BT._ABT_extend(tree, Ntarget) + self.assertEqual(len(tree), Ntarget, f"tree[{i}] did not extend from {N} to {Ntarget}") + for j in range(N, Ntarget): + self.assertIsNone(tree[j], f"tree[{i}][{j}] was not initialized to N in extending") + while len(tree) > N: + tree.pop() + + def test_ABT_move_index(self): + cases = [(0, BT.DIR_PARENT, -1), + (2, BT.DIR_PARENT, 0), + (3, BT.DIR_PARENT, 1), + (5, BT.DIR_PARENT, 2), + (0, BT.DIR_LEFT, 1), + (0, BT.DIR_RIGHT, 2), + (1, BT.DIR_RIGHT, 4), + (2, BT.DIR_LEFT, 5)] + for case in cases: + self.assertEqual(BT._move(case[0], case[1]), case[2], f"moving {case[0]} to {'RIGHT' if case[1] == BT.DIR_RIGHT else 'LEFT'} not equal to {case[2]}") + + def test_ABT_is_leaf(self): + for i, leaves in enumerate(self.leaves): + tree = self.trees[i] + candidates = leaves.copy() + for j in range(len(tree)): + if BT._ABT_is_leaf(tree, j): + try: + candidates.remove(tree[j]) # throws exception if leaf is found + except KeyError: + self.assertTrue(False, f"falsely detected trees[{i}] has leaf at {j}: {tree[j]}") + self.assertSetEqual(candidates, set(), f"failed to detect leaves {candidates} in trees[{i}]") + + def test_ABT_equals(self): + for i in range(len(self.trees)): + for j in range(len(self.trees)): + if i != j: + self.assertFalse(BT.ABT_equals(self.trees[i], self.trees[j]), f"ABT_equals failed to assert different trees at ({i},{j})") + else: + cpy = self.trees[j].copy() + self.assertTrue(BT.ABT_equals(self.trees[i], cpy), f"ABT_equals failed to assert same tree ({i})") + cpy.append(N) + self.assertTrue(BT.ABT_equals(self.trees[i], cpy), f"ABT_equals failed to assert same tree after appending N at ({i})") + + # TODO: Need to test moving a large subtree either here (where I cannot visualize the output since moving a subtree alone will most often result in an invalid binary tree) or rotate + def test_ABT_move_subtree(self): + cases = [(0, 1, 2, [(0,), N,(1,)]), + (1, 2, 1, [(0,), (2,),N]), + (2, 2, 3, [(0,), (1,),N, (2,)]), + (3, 6, 11, [(0,), (1,),(2,),(3,),(4,), (5,),N,N,N,N,N,(6,)]), + #(3, 1, 3, [(0,), N, (2,), (1,), N, (5,), (6, ), (3,), (4,)]), # this was screwing up my rotate + (4, 4, 2, [(0,), (1,),(4,), (3,),N]), + (5, 6, 1, [(0,), (6,),(2,), N,N,(5,),N]), + (5, 5, 1, [(0,), (5,),(2,), N,N,N,(6,)]), + (6, 11, 4, [(0,), (1,),(2,), (3,),(11,),(5,),(6,), N,N,N,N,N]), + (6, 11, 7, [(0,), (1,),(2,), (3,),N,(5,),(6,), (11,),N,N,N,N]), + (6, 11, 8, [(0,), (1,),(2,), (3,),N,(5,),(6,), N,(11,),N,N,N]), + (6, 2, 4, [(0,), (1,),N, (3,),(2,),N,N, N,N,(5,),(6,),N,N,N,N, N,N,N,N,(11,)]), + (7, 1, 3, [(0,), N,(2,), (1,),N,N,N, (3,),(4,),N,N,N,N,N,N, (7,),(8,),(9,),N,N,N,N,N,N,N,N,N,N,N,N,N, (15,),(16,),N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N, (31,)]), + (7, 1, 4, [(0,), N,(2,), N,(1,),N,N, N,N,(3,),(4,),N,N,N,N, N,N,N,N,(7,),(8,),(9,),N,N,N,N,N,N,N,N,N, N,N,N,N,N,N,N,N,(15,),(16,),N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N, N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,(31,)]), + (7, 1, 5, [(0,), N,(2,), N,N,(1,),N, N,N,N,N,(3,),(4,),N,N, N,N,N,N,N,N,N,N,(7,),(8,),(9,),N,N,N,N,N, N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,(15,),(16,),N,N,N,N,N,N,N,N,N,N,N,N,N,N, N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,(31,)]), + (7, 1, 7, [(0,), N,(2,), N,N,N,N, (1,),N,N,N,N,N,N,N, (3,),(4,),N,N,N,N,N,N,N,N,N,N,N,N,N,N, (7,),(8,),(9,),N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N, (15,),(16,),N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N, (31,)])] + for i, case in enumerate(cases): + index, src, dest, res = case + #print('\n'.join(str(c) for c in case) + '\n') + #BT._ABT_draw_tree(self.trees[index]) + cpy = self.trees[index].copy() + BT._ABT_move_subtree(cpy, src, dest) + self.assertTrue(BT.ABT_equals(cpy, res), f"_ABT_move_subtree failed for case {case[:3]}\n{res} and \n{cpy}") + # undo + BT._ABT_move_subtree(cpy, dest, src) + self.assertTrue(BT.ABT_equals(cpy, self.trees[index]), f"_ABT_move_subtree reverse failed for case {case[:3]}\n{self.trees[index]} --> {cpy}") + + def test_ABT_rotate(self): + cases = [(0, 0, BT.DIR_LEFT, [(0,), (1,)], N), # should not change as it is an illegal rotate + (0, 1, BT.DIR_LEFT, [(0,), (1,)], N), # should not change as it is an illegal rotate + (0, 2, BT.DIR_LEFT, [(0,), (1,)], N), # should not change as it is an illegal rotate + (0, 0, BT.DIR_RIGHT, [(1,), N,(0,)], BT.DIR_LEFT), + (1, 0, BT.DIR_LEFT, [(2,), (0,),N], BT.DIR_RIGHT), + (1, 0, BT.DIR_RIGHT, [(0,), N,(2,)], N), + (1, 1, BT.DIR_RIGHT, [(0,), N,(2,)], N), + (1, 2, BT.DIR_RIGHT, [(0,), N,(2,)], N), + (3, 0, BT.DIR_LEFT, [(2,), (0,),(6,), (1,),(5,),N,N, (3,),(4,)], BT.DIR_RIGHT), + (3, 0, BT.DIR_RIGHT, [(1,), (3,),(0,), N,N,(4,),(2,), N,N,N,N,N,N,(5,),(6,)], BT.DIR_LEFT), + (4, 0, BT.DIR_RIGHT, [(1,), (3,),(0,), N,N,(4,)], BT.DIR_LEFT), + (4, 1, BT.DIR_LEFT, [(0,), (4,),N, (1,),N,N,N, (3,)], BT.DIR_RIGHT), + (4, 1, BT.DIR_RIGHT, [(0,), (3,),N, N,(1,),N,N, N,N,N,(4,)], BT.DIR_LEFT), + (5, 0, BT.DIR_LEFT, [(2,), (0,),(6,), N,(5,)], BT.DIR_RIGHT), + (5, 2, BT.DIR_LEFT, [(0,), N,(6,), N,N,(2,),N, N,N,N,N,(5,)], BT.DIR_RIGHT), + (5, 2, BT.DIR_RIGHT, [(0,), N,(5,), N,N,N,(2,), N,N,N,N,N,N,N,(6,)], BT.DIR_LEFT), + (6, 5, BT.DIR_RIGHT, [(0,), (1,),(2,), (3,),N,(11,),(6,), N,N,N,N,N,(5,)], BT.DIR_LEFT), + (6, 2, BT.DIR_RIGHT, [(0,), (1,),(5,), (3,),N,(11,),(2,), N,N,N,N,N,N,N,(6,)], BT.DIR_LEFT), + (6, 2, BT.DIR_LEFT, [(0,), (1,),(6,), (3,),N,(2,),N, N,N,N,N,(5,),N,N,N, N,N,N,N,N,N,N,N,(11,)], BT.DIR_RIGHT), + (6, 0, BT.DIR_RIGHT, [(1,), (3,),(0,), N,N,N,(2,), N,N,N,N,N,N,(5,),(6,), N,N,N,N,N,N,N,N,N,N,N,N,(11,)], BT.DIR_LEFT), + (7, 0, BT.DIR_LEFT, [(2,), (0,),N, (1,),N,N,N, (3,),(4,),N,N,N,N,N,N, (7,),(8,),(9,),N,N,N,N,N,N,N,N,N,N,N,N,N, (15,),(16,),N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N,N, (31,)], BT.DIR_RIGHT)] + + for i, case in enumerate(cases): + index, node, dir_, res, undo_dir_ = case + cpy = self.trees[index].copy() + BT._ABT_rotate(cpy, node, dir_) + self.assertTrue(BT.ABT_equals(cpy, res), f"_ABT_rotate failed to rotate tree[{index}] around node {node}: {cpy} != {res}") + if undo_dir_ is not N: + BT._ABT_rotate(cpy, node, undo_dir_) + self.assertTrue(BT.ABT_equals(cpy, self.trees[index]), f"_ABT_rotate reverse failed for case {case}\n{self.trees[index]} --> {cpy}") + + def test_ABT_split_rotate(self): + cases = [(3, 0, BT.DIR_LEFT, [(5,), (0,), (2,), (1,), N, N, (6,), (3,), (4,)]), + (3, 0, BT.DIR_RIGHT, [(4,), (1,), (0,), (3,), N, N, (2,), N, N, N, N, N, N, (5,), (6,)]), + (6, 0, BT.DIR_LEFT, [(5,), (0,), (2,), (1,), (11,), N, (6,), (3,)])] + for i, case in enumerate(cases): + index, node, dir_, res = case + cpy = self.trees[index].copy() + BT._ABT_split_rotate(cpy, node, dir_) + self.assertTrue(BT.ABT_equals(cpy, res), f"_ABT_split_rotate failed to rotate tree[{index}] around {node}: {cpy} != {res}") + # TODO: include the "un-split rotate" operations + + """ + @classmethod + def setUpClass(cls): + cls.trees = [[(0,), (1,)], + [(0,), N, (2,)], + [(0,), (1,),(2,)], + [(0,), (1,),(2,), (3,),(4,),(5,),(6,)], + [(0,), (1,), N, (3,),(4,)], + [(0,), N, (2,), N, N,(5,),(6,)], + [(0,), (1,), (2,), (3,),N,(5,),(6,), N, N, N, N, (11,)], + [(0,), (1,), (2,), (3,),(4,),N,N, (7,),(8,),(9,),N,N,N,N,N, (15,),(16,),N,N,N,N,N,N,N,N,N,N,N,N,N,N, (31,)]] + """ + def test_ABT_inorder_traversal(self): + results = [[(1,),(0,)], + [(0,),(2,)], + [(1,),(0,),(2,)], + [(3,),(1,),(4,),(0,),(5,),(2,),(6,)], + [(3,),(1,),(4,),(0,)], + [(0,),(5,),(2,),(6,)], + [(3,),(1,),(0,),(11,),(5,),(2,),(6,)], + [(31,),(15,),(7,),(16,),(3,),(8,),(1,),(9,),(4,),(0,),(2,)]] + + def collect(tree, index, out): + out.append(tree[index]) + return BT.TRAVERSE_GO + + for i, tree in enumerate(self.trees): + result = results[i] + arr = [] + BT.ABT_traverse(tree, collect, arr, traversal=BT.TRAVERSE_INORDER) + self.assertListEqual(result, arr, f"inorder traversal of tree[{i}] failed: {result} != {arr}") + + def test_ABT_preorder_traversal(self): + results = [[(0,),(1,)], + [(0,),(2,)], + [(0,),(1,),(2,)], + [(0,),(1,),(3,),(4,),(2,),(5,),(6,)], + [(0,),(1,),(3,),(4,)], + [(0,),(2,),(5,),(6,)], + [(0,),(1,),(3,),(2,),(5,),(11,),(6,)], + [(0,),(1,),(3,),(7,),(15,),(31,),(16,),(8,),(4,),(9,),(2,)]] + + def collect(tree, index, out): + out.append(tree[index]) + return BT.TRAVERSE_GO + + for i, result in enumerate(results): + tree = self.trees[i] + arr = [] + BT.ABT_traverse(tree, collect, arr, traversal=BT.TRAVERSE_PREORDER) + self.assertListEqual(result, arr, f"preorder traversal of tree[{i}] failed: {result} != {arr}") + + def test_ABT_postorder_traversal(self): + results = [[(1,),(0,)], + [(2,),(0,)], + [(1,),(2,),(0,)], + [(3,),(4,),(1,),(5,),(6,),(2,),(0,)], + [(3,),(4,),(1,),(0,)], + [(5,),(6,),(2,),(0,)], + [(3,),(1,),(11,),(5,),(6,),(2,),(0,)], + [(31,),(15,),(16,),(7,),(8,),(3,),(9,),(4,),(1,),(2,),(0,)]] + + def collect(tree, index, out): + out.append(tree[index]) + return BT.TRAVERSE_GO + + for i, result in enumerate(results): + tree = self.trees[i] + arr = [] + BT.ABT_traverse(tree, collect, arr, traversal=BT.TRAVERSE_POSTORDER) + self.assertListEqual(result, arr, f"postorder traversal of tree[{i}] failed: {result} != {arr}") + + def test_ABT_levelorder_traversal(self): + results = [[(0,),(1,)], + [(0,),(2,)], + [(0,),(1,),(2,)], + [(0,),(1,),(2,),(3,),(4,),(5,),(6,)], + [(0,),(1,),(3,),(4,)], + [(0,),(2,),(5,),(6,)], + [(0,),(1,),(2,),(3,),(5,),(6,),(11,)], + [(0,),(1,),(2,),(3,),(4,),(7,),(8,),(9,),(15,),(16,),(31,)]] + + def collect(tree, index, out): + out.append(tree[index]) + return BT.TRAVERSE_GO + + for i, result in enumerate(results): + tree = self.trees[i] + arr = [] + BT.ABT_traverse(tree, collect, arr, traversal=BT.TRAVERSE_LEVELORDER) + self.assertListEqual(result, arr, f"levelorder traversal of tree[{i}] failed: {result} != {arr}") + + def test_ABT_inorder_traversal_reverse(self): + results = [[(0,),(1,)], + [(2,),(0,)], + [(2,),(0,),(1,)], + [(6,),(2,),(5,),(0,),(4,),(1,),(3,)], + [(0,),(4,),(1,),(3,)], + [(6,),(2,),(5,),(0,)], + [(6,),(2,),(5,),(11,),(0,),(1,),(3,)], + [(2,),(0,),(4,),(9,),(1,),(8,),(3,),(16,),(7,),(15,),(31,)]] + + def collect(tree, index, out): + out.append(tree[index]) + return BT.TRAVERSE_GO + + for i, tree in enumerate(self.trees): + result = results[i] + arr = [] + BT.ABT_traverse(tree, collect, arr, traversal=BT.TRAVERSE_INORDER, reverse=True) + self.assertListEqual(result, arr, f"inorder reverse traversal of tree[{i}] failed: {result} != {arr}") + + def test_ABT_preorder_traversal_reverse(self): + results = [[(0,),(1,)], + [(0,),(2,)], + [(0,),(2,),(1,)], + [(0,),(2,),(6,),(5,),(1,),(4,),(3,)], + [(0,),(1,),(4,),(3,)], + [(0,),(2,),(6,),(5,)], + [(0,),(2,),(6,),(5,),(11,),(1,),(3,)], + [(0,),(2,),(1,),(4,),(9,),(3,),(8,),(7,),(16,),(15,),(31,)]] + + def collect(tree, index, out): + out.append(tree[index]) + return BT.TRAVERSE_GO + + for i, result in enumerate(results): + tree = self.trees[i] + arr = [] + BT.ABT_traverse(tree, collect, arr, traversal=BT.TRAVERSE_PREORDER, reverse=True) + self.assertListEqual(result, arr, f"preorder reverse traversal of tree[{i}] failed: {result} != {arr}") + + def test_ABT_postorder_traversal_reverse(self): + results = [[(1,),(0,)], + [(2,),(0,)], + [(2,),(1,),(0,)], + [(6,),(5,),(2,),(4,),(3,),(1,),(0,)], + [(4,),(3,),(1,),(0,)], + [(6,),(5,),(2,),(0,)], + [(6,),(11,),(5,),(2,),(3,),(1,),(0,)], + [(2,),(9,),(4,),(8,),(16,),(31,),(15,),(7,),(3,),(1,),(0,)]] + + def collect(tree, index, out): + out.append(tree[index]) + return BT.TRAVERSE_GO + + for i, result in enumerate(results): + tree = self.trees[i] + arr = [] + BT.ABT_traverse(tree, collect, arr, traversal=BT.TRAVERSE_POSTORDER, reverse=True) + self.assertListEqual(result, arr, f"postorder reverse traversal of tree[{i}] failed: {result} != {arr}") + + def test_ABT_levelorder_traversal_reverse(self): + results = [[(0,),(1,)], + [(0,),(2,)], + [(0,),(2,),(1,)], + [(0,),(2,),(1,),(6,),(5,),(4,),(3,)], + [(0,),(1,),(4,),(3,)], + [(0,),(2,),(6,),(5,)], + [(0,),(2,),(1,),(6,),(5,),(3,),(11,)], + [(0,),(2,),(1,),(4,),(3,),(9,),(8,),(7,),(16,),(15,),(31,)]] + + def collect(tree, index, out): + out.append(tree[index]) + return BT.TRAVERSE_GO + + for i, result in enumerate(results): + tree = self.trees[i] + arr = [] + BT.ABT_traverse(tree, collect, arr, traversal=BT.TRAVERSE_LEVELORDER, reverse=True) + self.assertListEqual(result, arr, f"levelorder reverse traversal of tree[{i}] failed: {result} != {arr}") \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/tests/test_Node.py b/venv/lib/python3.10/site-packages/dtlib/trees/tests/test_Node.py new file mode 100644 index 0000000000000000000000000000000000000000..c38629cebd27407cbfac353c6e051790d8cdfbdf --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/tests/test_Node.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +""" +Created on Thu Sep 1 23:23:16 2022 + +@author: jeffr +""" + +import dtlib.trees._Node as _Node +from dtlib.trees._constants import DIR_LEFT, DIR_RIGHT, VALUE_KEY +from unittest import TestCase as TC + +class cd(): + def __init__(self, a): + self.a = a + +class test_Node(TC): + @classmethod + def setUpClass(cls): + cls.node_data = [[0, 1, 2], + [(0,), (1,), (2,)], + [[0],[1], [2]], + [{'a':0}, {'a':1}, {'a': 2}], + [cd(0), cd(1), cd(2)]] + cls.node_type_consts = [_Node.LIST_NODE, _Node.DICT_NODE, _Node.CLASS_NODE, _Node.SLOTTED_CLASS_NODE] + cls.node_types = [list, dict, _Node.ClassNode, _Node.SlottedClassNode] + + def test_NodeObjects(self): + for i, node_type in enumerate(self.node_type_consts): + node_factory = _Node.Node_factory(node_type, {DIR_LEFT: None, DIR_RIGHT: None}) + for data in self.node_data: + root = node_factory(data[0], node_factory(data[1]), node_factory(data[2])) + self.assertIsInstance(root, self.node_types[i]) + self.assertIsNotNone(root[DIR_LEFT]) + self.assertIsNotNone(root[DIR_RIGHT]) + self.assertEqual(data[0], root[VALUE_KEY]) + self.assertIsInstance(root[DIR_LEFT], self.node_types[i]) + self.assertIsNone(root[DIR_LEFT][DIR_LEFT]) + self.assertIsNone(root[DIR_LEFT][DIR_RIGHT]) + self.assertEqual(data[1], root[DIR_LEFT][VALUE_KEY]) + self.assertIsInstance(root[DIR_RIGHT], self.node_types[i]) + self.assertIsNone(root[DIR_RIGHT][DIR_LEFT]) + self.assertIsNone(root[DIR_RIGHT][DIR_RIGHT]) + self.assertEqual(data[2], root[DIR_RIGHT][VALUE_KEY]) \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/dtlib/trees/utils.py b/venv/lib/python3.10/site-packages/dtlib/trees/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..badbd64b0d4c024fef01ce7bbe411a5cc3b300b1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/trees/utils.py @@ -0,0 +1,11 @@ +# -*- coding: utf-8 -*- +""" +Created on Mon Sep 5 09:17:08 2022 + +@author: jeffr +""" + +import math + +def binary_tree_level(index): + return int(math.log2(index+1)) \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/dtlib/utils.py b/venv/lib/python3.10/site-packages/dtlib/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..2f76743f83df7aebc090dd63975d3faf1e4ac657 --- /dev/null +++ b/venv/lib/python3.10/site-packages/dtlib/utils.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +""" +Created on Thu Aug 25 18:26:32 2022 + +@author: jeffr +""" + +# given a posive integer 0 < int_, calculate next higher power of 2 +def _next_pow2(int_): + return 1 << ((int_).bit_length()) + +# given an integer 0 <= int_, calculate next higher multiple of 2 +def _next_mult2(int_): + return ((int_+1) >> 1) << 1 + +# for given positive integers 0 <= start <= end, calculates the index in the middle; useful for binary trees in array format and binary searches/bisect (hence name root) +def _interval_root(start, end): + return start + ((end-start) >> 1) \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/fastapi-0.116.1.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/fastapi-0.116.1.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi-0.116.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/fastapi-0.116.1.dist-info/METADATA b/venv/lib/python3.10/site-packages/fastapi-0.116.1.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..fe82f2cb304c21ba5d766fd4ebd4cda51cc332fe --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi-0.116.1.dist-info/METADATA @@ -0,0 +1,575 @@ +Metadata-Version: 2.1 +Name: fastapi +Version: 0.116.1 +Summary: FastAPI framework, high performance, easy to learn, fast to code, ready for production +Author-Email: =?utf-8?q?Sebasti=C3=A1n_Ram=C3=ADrez?= +Classifier: Intended Audience :: Information Technology +Classifier: Intended Audience :: System Administrators +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python +Classifier: Topic :: Internet +Classifier: Topic :: Software Development :: Libraries :: Application Frameworks +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Software Development +Classifier: Typing :: Typed +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Web Environment +Classifier: Framework :: AsyncIO +Classifier: Framework :: FastAPI +Classifier: Framework :: Pydantic +Classifier: Framework :: Pydantic :: 1 +Classifier: Framework :: Pydantic :: 2 +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Topic :: Internet :: WWW/HTTP :: HTTP Servers +Classifier: Topic :: Internet :: WWW/HTTP +Project-URL: Homepage, https://github.com/fastapi/fastapi +Project-URL: Documentation, https://fastapi.tiangolo.com/ +Project-URL: Repository, https://github.com/fastapi/fastapi +Project-URL: Issues, https://github.com/fastapi/fastapi/issues +Project-URL: Changelog, https://fastapi.tiangolo.com/release-notes/ +Requires-Python: >=3.8 +Requires-Dist: starlette<0.48.0,>=0.40.0 +Requires-Dist: pydantic!=1.8,!=1.8.1,!=2.0.0,!=2.0.1,!=2.1.0,<3.0.0,>=1.7.4 +Requires-Dist: typing-extensions>=4.8.0 +Provides-Extra: standard +Requires-Dist: fastapi-cli[standard]>=0.0.8; extra == "standard" +Requires-Dist: httpx>=0.23.0; extra == "standard" +Requires-Dist: jinja2>=3.1.5; extra == "standard" +Requires-Dist: python-multipart>=0.0.18; extra == "standard" +Requires-Dist: email-validator>=2.0.0; extra == "standard" +Requires-Dist: uvicorn[standard]>=0.12.0; extra == "standard" +Provides-Extra: standard-no-fastapi-cloud-cli +Requires-Dist: fastapi-cli[standard-no-fastapi-cloud-cli]>=0.0.8; extra == "standard-no-fastapi-cloud-cli" +Requires-Dist: httpx>=0.23.0; extra == "standard-no-fastapi-cloud-cli" +Requires-Dist: jinja2>=3.1.5; extra == "standard-no-fastapi-cloud-cli" +Requires-Dist: python-multipart>=0.0.18; extra == "standard-no-fastapi-cloud-cli" +Requires-Dist: email-validator>=2.0.0; extra == "standard-no-fastapi-cloud-cli" +Requires-Dist: uvicorn[standard]>=0.12.0; extra == "standard-no-fastapi-cloud-cli" +Provides-Extra: all +Requires-Dist: fastapi-cli[standard]>=0.0.8; extra == "all" +Requires-Dist: httpx>=0.23.0; extra == "all" +Requires-Dist: jinja2>=3.1.5; extra == "all" +Requires-Dist: python-multipart>=0.0.18; extra == "all" +Requires-Dist: itsdangerous>=1.1.0; extra == "all" +Requires-Dist: pyyaml>=5.3.1; extra == "all" +Requires-Dist: ujson!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,>=4.0.1; extra == "all" +Requires-Dist: orjson>=3.2.1; extra == "all" +Requires-Dist: email-validator>=2.0.0; extra == "all" +Requires-Dist: uvicorn[standard]>=0.12.0; extra == "all" +Requires-Dist: pydantic-settings>=2.0.0; extra == "all" +Requires-Dist: pydantic-extra-types>=2.0.0; extra == "all" +Description-Content-Type: text/markdown + +

+ FastAPI +

+

+ FastAPI framework, high performance, easy to learn, fast to code, ready for production +

+

+ + Test + + + Coverage + + + Package version + + + Supported Python versions + +

+ +--- + +**Documentation**: https://fastapi.tiangolo.com + +**Source Code**: https://github.com/fastapi/fastapi + +--- + +FastAPI is a modern, fast (high-performance), web framework for building APIs with Python based on standard Python type hints. + +The key features are: + +* **Fast**: Very high performance, on par with **NodeJS** and **Go** (thanks to Starlette and Pydantic). [One of the fastest Python frameworks available](#performance). +* **Fast to code**: Increase the speed to develop features by about 200% to 300%. * +* **Fewer bugs**: Reduce about 40% of human (developer) induced errors. * +* **Intuitive**: Great editor support. Completion everywhere. Less time debugging. +* **Easy**: Designed to be easy to use and learn. Less time reading docs. +* **Short**: Minimize code duplication. Multiple features from each parameter declaration. Fewer bugs. +* **Robust**: Get production-ready code. With automatic interactive documentation. +* **Standards-based**: Based on (and fully compatible with) the open standards for APIs: OpenAPI (previously known as Swagger) and JSON Schema. + +* estimation based on tests on an internal development team, building production applications. + +## Sponsors + + + + + + + + + + + + + + + + + + + + + + +Other sponsors + +## Opinions + +"_[...] I'm using **FastAPI** a ton these days. [...] I'm actually planning to use it for all of my team's **ML services at Microsoft**. Some of them are getting integrated into the core **Windows** product and some **Office** products._" + +
Kabir Khan - Microsoft (ref)
+ +--- + +"_We adopted the **FastAPI** library to spawn a **REST** server that can be queried to obtain **predictions**. [for Ludwig]_" + +
Piero Molino, Yaroslav Dudin, and Sai Sumanth Miryala - Uber (ref)
+ +--- + +"_**Netflix** is pleased to announce the open-source release of our **crisis management** orchestration framework: **Dispatch**! [built with **FastAPI**]_" + +
Kevin Glisson, Marc Vilanova, Forest Monsen - Netflix (ref)
+ +--- + +"_I’m over the moon excited about **FastAPI**. It’s so fun!_" + +
Brian Okken - Python Bytes podcast host (ref)
+ +--- + +"_Honestly, what you've built looks super solid and polished. In many ways, it's what I wanted **Hug** to be - it's really inspiring to see someone build that._" + +
Timothy Crosley - Hug creator (ref)
+ +--- + +"_If you're looking to learn one **modern framework** for building REST APIs, check out **FastAPI** [...] It's fast, easy to use and easy to learn [...]_" + +"_We've switched over to **FastAPI** for our **APIs** [...] I think you'll like it [...]_" + +
Ines Montani - Matthew Honnibal - Explosion AI founders - spaCy creators (ref) - (ref)
+ +--- + +"_If anyone is looking to build a production Python API, I would highly recommend **FastAPI**. It is **beautifully designed**, **simple to use** and **highly scalable**, it has become a **key component** in our API first development strategy and is driving many automations and services such as our Virtual TAC Engineer._" + +
Deon Pillsbury - Cisco (ref)
+ +--- + +## **Typer**, the FastAPI of CLIs + + + +If you are building a CLI app to be used in the terminal instead of a web API, check out **Typer**. + +**Typer** is FastAPI's little sibling. And it's intended to be the **FastAPI of CLIs**. ⌨️ 🚀 + +## Requirements + +FastAPI stands on the shoulders of giants: + +* Starlette for the web parts. +* Pydantic for the data parts. + +## Installation + +Create and activate a virtual environment and then install FastAPI: + +
+ +```console +$ pip install "fastapi[standard]" + +---> 100% +``` + +
+ +**Note**: Make sure you put `"fastapi[standard]"` in quotes to ensure it works in all terminals. + +## Example + +### Create it + +Create a file `main.py` with: + +```Python +from typing import Union + +from fastapi import FastAPI + +app = FastAPI() + + +@app.get("/") +def read_root(): + return {"Hello": "World"} + + +@app.get("/items/{item_id}") +def read_item(item_id: int, q: Union[str, None] = None): + return {"item_id": item_id, "q": q} +``` + +
+Or use async def... + +If your code uses `async` / `await`, use `async def`: + +```Python hl_lines="9 14" +from typing import Union + +from fastapi import FastAPI + +app = FastAPI() + + +@app.get("/") +async def read_root(): + return {"Hello": "World"} + + +@app.get("/items/{item_id}") +async def read_item(item_id: int, q: Union[str, None] = None): + return {"item_id": item_id, "q": q} +``` + +**Note**: + +If you don't know, check the _"In a hurry?"_ section about `async` and `await` in the docs. + +
+ +### Run it + +Run the server with: + +
+ +```console +$ fastapi dev main.py + + ╭────────── FastAPI CLI - Development mode ───────────╮ + │ │ + │ Serving at: http://127.0.0.1:8000 │ + │ │ + │ API docs: http://127.0.0.1:8000/docs │ + │ │ + │ Running in development mode, for production use: │ + │ │ + │ fastapi run │ + │ │ + ╰─────────────────────────────────────────────────────╯ + +INFO: Will watch for changes in these directories: ['/home/user/code/awesomeapp'] +INFO: Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit) +INFO: Started reloader process [2248755] using WatchFiles +INFO: Started server process [2248757] +INFO: Waiting for application startup. +INFO: Application startup complete. +``` + +
+ +
+About the command fastapi dev main.py... + +The command `fastapi dev` reads your `main.py` file, detects the **FastAPI** app in it, and starts a server using Uvicorn. + +By default, `fastapi dev` will start with auto-reload enabled for local development. + +You can read more about it in the FastAPI CLI docs. + +
+ +### Check it + +Open your browser at http://127.0.0.1:8000/items/5?q=somequery. + +You will see the JSON response as: + +```JSON +{"item_id": 5, "q": "somequery"} +``` + +You already created an API that: + +* Receives HTTP requests in the _paths_ `/` and `/items/{item_id}`. +* Both _paths_ take `GET` operations (also known as HTTP _methods_). +* The _path_ `/items/{item_id}` has a _path parameter_ `item_id` that should be an `int`. +* The _path_ `/items/{item_id}` has an optional `str` _query parameter_ `q`. + +### Interactive API docs + +Now go to http://127.0.0.1:8000/docs. + +You will see the automatic interactive API documentation (provided by Swagger UI): + +![Swagger UI](https://fastapi.tiangolo.com/img/index/index-01-swagger-ui-simple.png) + +### Alternative API docs + +And now, go to http://127.0.0.1:8000/redoc. + +You will see the alternative automatic documentation (provided by ReDoc): + +![ReDoc](https://fastapi.tiangolo.com/img/index/index-02-redoc-simple.png) + +## Example upgrade + +Now modify the file `main.py` to receive a body from a `PUT` request. + +Declare the body using standard Python types, thanks to Pydantic. + +```Python hl_lines="4 9-12 25-27" +from typing import Union + +from fastapi import FastAPI +from pydantic import BaseModel + +app = FastAPI() + + +class Item(BaseModel): + name: str + price: float + is_offer: Union[bool, None] = None + + +@app.get("/") +def read_root(): + return {"Hello": "World"} + + +@app.get("/items/{item_id}") +def read_item(item_id: int, q: Union[str, None] = None): + return {"item_id": item_id, "q": q} + + +@app.put("/items/{item_id}") +def update_item(item_id: int, item: Item): + return {"item_name": item.name, "item_id": item_id} +``` + +The `fastapi dev` server should reload automatically. + +### Interactive API docs upgrade + +Now go to http://127.0.0.1:8000/docs. + +* The interactive API documentation will be automatically updated, including the new body: + +![Swagger UI](https://fastapi.tiangolo.com/img/index/index-03-swagger-02.png) + +* Click on the button "Try it out", it allows you to fill the parameters and directly interact with the API: + +![Swagger UI interaction](https://fastapi.tiangolo.com/img/index/index-04-swagger-03.png) + +* Then click on the "Execute" button, the user interface will communicate with your API, send the parameters, get the results and show them on the screen: + +![Swagger UI interaction](https://fastapi.tiangolo.com/img/index/index-05-swagger-04.png) + +### Alternative API docs upgrade + +And now, go to http://127.0.0.1:8000/redoc. + +* The alternative documentation will also reflect the new query parameter and body: + +![ReDoc](https://fastapi.tiangolo.com/img/index/index-06-redoc-02.png) + +### Recap + +In summary, you declare **once** the types of parameters, body, etc. as function parameters. + +You do that with standard modern Python types. + +You don't have to learn a new syntax, the methods or classes of a specific library, etc. + +Just standard **Python**. + +For example, for an `int`: + +```Python +item_id: int +``` + +or for a more complex `Item` model: + +```Python +item: Item +``` + +...and with that single declaration you get: + +* Editor support, including: + * Completion. + * Type checks. +* Validation of data: + * Automatic and clear errors when the data is invalid. + * Validation even for deeply nested JSON objects. +* Conversion of input data: coming from the network to Python data and types. Reading from: + * JSON. + * Path parameters. + * Query parameters. + * Cookies. + * Headers. + * Forms. + * Files. +* Conversion of output data: converting from Python data and types to network data (as JSON): + * Convert Python types (`str`, `int`, `float`, `bool`, `list`, etc). + * `datetime` objects. + * `UUID` objects. + * Database models. + * ...and many more. +* Automatic interactive API documentation, including 2 alternative user interfaces: + * Swagger UI. + * ReDoc. + +--- + +Coming back to the previous code example, **FastAPI** will: + +* Validate that there is an `item_id` in the path for `GET` and `PUT` requests. +* Validate that the `item_id` is of type `int` for `GET` and `PUT` requests. + * If it is not, the client will see a useful, clear error. +* Check if there is an optional query parameter named `q` (as in `http://127.0.0.1:8000/items/foo?q=somequery`) for `GET` requests. + * As the `q` parameter is declared with `= None`, it is optional. + * Without the `None` it would be required (as is the body in the case with `PUT`). +* For `PUT` requests to `/items/{item_id}`, read the body as JSON: + * Check that it has a required attribute `name` that should be a `str`. + * Check that it has a required attribute `price` that has to be a `float`. + * Check that it has an optional attribute `is_offer`, that should be a `bool`, if present. + * All this would also work for deeply nested JSON objects. +* Convert from and to JSON automatically. +* Document everything with OpenAPI, that can be used by: + * Interactive documentation systems. + * Automatic client code generation systems, for many languages. +* Provide 2 interactive documentation web interfaces directly. + +--- + +We just scratched the surface, but you already get the idea of how it all works. + +Try changing the line with: + +```Python + return {"item_name": item.name, "item_id": item_id} +``` + +...from: + +```Python + ... "item_name": item.name ... +``` + +...to: + +```Python + ... "item_price": item.price ... +``` + +...and see how your editor will auto-complete the attributes and know their types: + +![editor support](https://fastapi.tiangolo.com/img/vscode-completion.png) + +For a more complete example including more features, see the Tutorial - User Guide. + +**Spoiler alert**: the tutorial - user guide includes: + +* Declaration of **parameters** from other different places as: **headers**, **cookies**, **form fields** and **files**. +* How to set **validation constraints** as `maximum_length` or `regex`. +* A very powerful and easy to use **Dependency Injection** system. +* Security and authentication, including support for **OAuth2** with **JWT tokens** and **HTTP Basic** auth. +* More advanced (but equally easy) techniques for declaring **deeply nested JSON models** (thanks to Pydantic). +* **GraphQL** integration with Strawberry and other libraries. +* Many extra features (thanks to Starlette) as: + * **WebSockets** + * extremely easy tests based on HTTPX and `pytest` + * **CORS** + * **Cookie Sessions** + * ...and more. + +## Performance + +Independent TechEmpower benchmarks show **FastAPI** applications running under Uvicorn as one of the fastest Python frameworks available, only below Starlette and Uvicorn themselves (used internally by FastAPI). (*) + +To understand more about it, see the section Benchmarks. + +## Dependencies + +FastAPI depends on Pydantic and Starlette. + +### `standard` Dependencies + +When you install FastAPI with `pip install "fastapi[standard]"` it comes with the `standard` group of optional dependencies: + +Used by Pydantic: + +* email-validator - for email validation. + +Used by Starlette: + +* httpx - Required if you want to use the `TestClient`. +* jinja2 - Required if you want to use the default template configuration. +* python-multipart - Required if you want to support form "parsing", with `request.form()`. + +Used by FastAPI: + +* uvicorn - for the server that loads and serves your application. This includes `uvicorn[standard]`, which includes some dependencies (e.g. `uvloop`) needed for high performance serving. +* `fastapi-cli[standard]` - to provide the `fastapi` command. + * This includes `fastapi-cloud-cli`, which allows you to deploy your FastAPI application to FastAPI Cloud. + +### Without `standard` Dependencies + +If you don't want to include the `standard` optional dependencies, you can install with `pip install fastapi` instead of `pip install "fastapi[standard]"`. + +### Without `fastapi-cloud-cli` + +If you want to install FastAPI with the standard dependencies but without the `fastapi-cloud-cli`, you can install with `pip install "fastapi[standard-no-fastapi-cloud-cli]"`. + +### Additional Optional Dependencies + +There are some additional dependencies you might want to install. + +Additional optional Pydantic dependencies: + +* pydantic-settings - for settings management. +* pydantic-extra-types - for extra types to be used with Pydantic. + +Additional optional FastAPI dependencies: + +* orjson - Required if you want to use `ORJSONResponse`. +* ujson - Required if you want to use `UJSONResponse`. + +## License + +This project is licensed under the terms of the MIT license. diff --git a/venv/lib/python3.10/site-packages/fastapi-0.116.1.dist-info/RECORD b/venv/lib/python3.10/site-packages/fastapi-0.116.1.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..a0f0116ba593e3c070e053ffe0d58fd2292c8526 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi-0.116.1.dist-info/RECORD @@ -0,0 +1,96 @@ +../../../bin/fastapi,sha256=Y5FcaevBAVHeU3WZGLIw5iqM7nqstdzLTjsM8HxGjcA,280 +fastapi-0.116.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +fastapi-0.116.1.dist-info/METADATA,sha256=pnu4s6rAsNuB66sYhB59UFxRuZv2ua6fbH_jUM6HM2k,28115 +fastapi-0.116.1.dist-info/RECORD,, +fastapi-0.116.1.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90 +fastapi-0.116.1.dist-info/entry_points.txt,sha256=GCf-WbIZxyGT4MUmrPGj1cOHYZoGsNPHAvNkT6hnGeA,61 +fastapi-0.116.1.dist-info/licenses/LICENSE,sha256=Tsif_IFIW5f-xYSy1KlhAy7v_oNEU4lP2cEnSQbMdE4,1086 +fastapi/__init__.py,sha256=-U8vW9K3Hy78v_3O0ECrEfMmPtSuHaA1yAql96bd8ts,1081 +fastapi/__main__.py,sha256=bKePXLdO4SsVSM6r9SVoLickJDcR2c0cTOxZRKq26YQ,37 +fastapi/__pycache__/__init__.cpython-310.pyc,, +fastapi/__pycache__/__main__.cpython-310.pyc,, +fastapi/__pycache__/_compat.cpython-310.pyc,, +fastapi/__pycache__/applications.cpython-310.pyc,, +fastapi/__pycache__/background.cpython-310.pyc,, +fastapi/__pycache__/cli.cpython-310.pyc,, +fastapi/__pycache__/concurrency.cpython-310.pyc,, +fastapi/__pycache__/datastructures.cpython-310.pyc,, +fastapi/__pycache__/encoders.cpython-310.pyc,, +fastapi/__pycache__/exception_handlers.cpython-310.pyc,, +fastapi/__pycache__/exceptions.cpython-310.pyc,, +fastapi/__pycache__/logger.cpython-310.pyc,, +fastapi/__pycache__/param_functions.cpython-310.pyc,, +fastapi/__pycache__/params.cpython-310.pyc,, +fastapi/__pycache__/requests.cpython-310.pyc,, +fastapi/__pycache__/responses.cpython-310.pyc,, +fastapi/__pycache__/routing.cpython-310.pyc,, +fastapi/__pycache__/staticfiles.cpython-310.pyc,, +fastapi/__pycache__/templating.cpython-310.pyc,, +fastapi/__pycache__/testclient.cpython-310.pyc,, +fastapi/__pycache__/types.cpython-310.pyc,, +fastapi/__pycache__/utils.cpython-310.pyc,, +fastapi/__pycache__/websockets.cpython-310.pyc,, +fastapi/_compat.py,sha256=PwGTZd6d-u2o6YF9M8pQahuBtD_3q3Kpj7vU5-ngChc,24228 +fastapi/applications.py,sha256=rZTr0Ix-vdMwh6MQGCI_NC-Ir9lpfIGHHBY-JnNWZ_E,176550 +fastapi/background.py,sha256=rouLirxUANrcYC824MSMypXL_Qb2HYg2YZqaiEqbEKI,1768 +fastapi/cli.py,sha256=OYhZb0NR_deuT5ofyPF2NoNBzZDNOP8Salef2nk-HqA,418 +fastapi/concurrency.py,sha256=MirfowoSpkMQZ8j_g0ZxaQKpV6eB3G-dB5TgcXCrgEA,1424 +fastapi/datastructures.py,sha256=b2PEz77XGq-u3Ur1Inwk0AGjOsQZO49yF9C7IPJ15cY,5766 +fastapi/dependencies/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fastapi/dependencies/__pycache__/__init__.cpython-310.pyc,, +fastapi/dependencies/__pycache__/models.cpython-310.pyc,, +fastapi/dependencies/__pycache__/utils.cpython-310.pyc,, +fastapi/dependencies/models.py,sha256=Pjl6vx-4nZ5Tta9kJa3-RfQKkXtCpS09-FhMgs9eWNs,1507 +fastapi/dependencies/utils.py,sha256=wGN-BAb0NpG-89nA_OllS0F4wYwGfhHgb8IuT3MTqck,36619 +fastapi/encoders.py,sha256=LvwYmFeOz4tVwvgBoC5rvZnbr7hZr73KGrU8O7zSptU,11068 +fastapi/exception_handlers.py,sha256=MBrIOA-ugjJDivIi4rSsUJBdTsjuzN76q4yh0q1COKw,1332 +fastapi/exceptions.py,sha256=taNixuFEXb67lI1bnX1ubq8y8TseJ4yoPlWjyP0fTzk,4969 +fastapi/logger.py,sha256=I9NNi3ov8AcqbsbC9wl1X-hdItKgYt2XTrx1f99Zpl4,54 +fastapi/middleware/__init__.py,sha256=oQDxiFVcc1fYJUOIFvphnK7pTT5kktmfL32QXpBFvvo,58 +fastapi/middleware/__pycache__/__init__.cpython-310.pyc,, +fastapi/middleware/__pycache__/cors.cpython-310.pyc,, +fastapi/middleware/__pycache__/gzip.cpython-310.pyc,, +fastapi/middleware/__pycache__/httpsredirect.cpython-310.pyc,, +fastapi/middleware/__pycache__/trustedhost.cpython-310.pyc,, +fastapi/middleware/__pycache__/wsgi.cpython-310.pyc,, +fastapi/middleware/cors.py,sha256=ynwjWQZoc_vbhzZ3_ZXceoaSrslHFHPdoM52rXr0WUU,79 +fastapi/middleware/gzip.py,sha256=xM5PcsH8QlAimZw4VDvcmTnqQamslThsfe3CVN2voa0,79 +fastapi/middleware/httpsredirect.py,sha256=rL8eXMnmLijwVkH7_400zHri1AekfeBd6D6qs8ix950,115 +fastapi/middleware/trustedhost.py,sha256=eE5XGRxGa7c5zPnMJDGp3BxaL25k5iVQlhnv-Pk0Pss,109 +fastapi/middleware/wsgi.py,sha256=Z3Ue-7wni4lUZMvH3G9ek__acgYdJstbnpZX_HQAboY,79 +fastapi/openapi/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fastapi/openapi/__pycache__/__init__.cpython-310.pyc,, +fastapi/openapi/__pycache__/constants.cpython-310.pyc,, +fastapi/openapi/__pycache__/docs.cpython-310.pyc,, +fastapi/openapi/__pycache__/models.cpython-310.pyc,, +fastapi/openapi/__pycache__/utils.cpython-310.pyc,, +fastapi/openapi/constants.py,sha256=adGzmis1L1HJRTE3kJ5fmHS_Noq6tIY6pWv_SFzoFDU,153 +fastapi/openapi/docs.py,sha256=zSDv4xY6XHcKsaG4zyk1HqSnrZtfZFBB0J7ZBk5YHPE,10345 +fastapi/openapi/models.py,sha256=PqkxQiqcEgjKuhfUIWPZPQcyTcubtUCB3vcObLsB7VE,15397 +fastapi/openapi/utils.py,sha256=e00G_p0IdpiffBUaq31BUyiloXbpld8RryKYnYKisdY,23964 +fastapi/param_functions.py,sha256=JHNPLIYvoAwdnZZavIVsxOat8x23fX_Kl33reh7HKl8,64019 +fastapi/params.py,sha256=g450axUBQgQJODdtM7WBxZbQj9Z64inFvadrgHikBbU,28237 +fastapi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +fastapi/requests.py,sha256=zayepKFcienBllv3snmWI20Gk0oHNVLU4DDhqXBb4LU,142 +fastapi/responses.py,sha256=QNQQlwpKhQoIPZTTWkpc9d_QGeGZ_aVQPaDV3nQ8m7c,1761 +fastapi/routing.py,sha256=-SaOgqaseKw5mlTCk-FliS6Wx5la_CjdV5FqSPDmW9g,176337 +fastapi/security/__init__.py,sha256=bO8pNmxqVRXUjfl2mOKiVZLn0FpBQ61VUYVjmppnbJw,881 +fastapi/security/__pycache__/__init__.cpython-310.pyc,, +fastapi/security/__pycache__/api_key.cpython-310.pyc,, +fastapi/security/__pycache__/base.cpython-310.pyc,, +fastapi/security/__pycache__/http.cpython-310.pyc,, +fastapi/security/__pycache__/oauth2.cpython-310.pyc,, +fastapi/security/__pycache__/open_id_connect_url.cpython-310.pyc,, +fastapi/security/__pycache__/utils.cpython-310.pyc,, +fastapi/security/api_key.py,sha256=cBI5Z4zWVjL1uJrsjTeLy7MafHPAO2HQPzTrpyoIYWA,9094 +fastapi/security/base.py,sha256=dl4pvbC-RxjfbWgPtCWd8MVU-7CB2SZ22rJDXVCXO6c,141 +fastapi/security/http.py,sha256=rWR2x-5CUsjWmRucYthwRig6MG1o-boyrr4Xo-PuuxU,13606 +fastapi/security/oauth2.py,sha256=M1AFIDT7G3oQChq83poI3eg8ZDeibcvnGmya2CTS7JY,22036 +fastapi/security/open_id_connect_url.py,sha256=8vizZ2tGqEp1ur8SwtVgyHJhGAJ5AqahgcvSpaIioDI,2722 +fastapi/security/utils.py,sha256=bd8T0YM7UQD5ATKucr1bNtAvz_Y3__dVNAv5UebiPvc,293 +fastapi/staticfiles.py,sha256=iirGIt3sdY2QZXd36ijs3Cj-T0FuGFda3cd90kM9Ikw,69 +fastapi/templating.py,sha256=4zsuTWgcjcEainMJFAlW6-gnslm6AgOS1SiiDWfmQxk,76 +fastapi/testclient.py,sha256=nBvaAmX66YldReJNZXPOk1sfuo2Q6hs8bOvIaCep6LQ,66 +fastapi/types.py,sha256=nFb36sK3DSoqoyo7Miwy3meKK5UdFBgkAgLSzQlUVyI,383 +fastapi/utils.py,sha256=y8Bj5ttMaI9tS4D60OUgXqKnktBr99NdYUnHHV9LgoY,7948 +fastapi/websockets.py,sha256=419uncYObEKZG0YcrXscfQQYLSWoE10jqxVMetGdR98,222 diff --git a/venv/lib/python3.10/site-packages/fastapi-0.116.1.dist-info/WHEEL b/venv/lib/python3.10/site-packages/fastapi-0.116.1.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..045c8acdea31cbca5be986e915f784c1aafc720f --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi-0.116.1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: pdm-backend (2.4.5) +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.10/site-packages/fastapi-0.116.1.dist-info/entry_points.txt b/venv/lib/python3.10/site-packages/fastapi-0.116.1.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..b81849e1ef82b793f6f81bb970cdc3cb791f5776 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi-0.116.1.dist-info/entry_points.txt @@ -0,0 +1,5 @@ +[console_scripts] +fastapi = fastapi.cli:main + +[gui_scripts] + diff --git a/venv/lib/python3.10/site-packages/fastapi-0.116.1.dist-info/licenses/LICENSE b/venv/lib/python3.10/site-packages/fastapi-0.116.1.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..3e92463e6bd522a2a21e5f0a80d8089d6c4be20d --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi-0.116.1.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2018 Sebastián Ramírez + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/__init__.py b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1276d0254ffc2b5e4fcf48cd868123238d4ad06f --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/__init__.py @@ -0,0 +1 @@ +__version__ = "0.1.5" diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/__main__.py b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/__main__.py new file mode 100644 index 0000000000000000000000000000000000000000..4e28416e104515e90fca4b69cc60d0c61fd15d61 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/__main__.py @@ -0,0 +1,3 @@ +from .cli import main + +main() diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6b72f7d800acae675197e6e86a47bc5f8ecac207 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/__pycache__/__main__.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/__pycache__/__main__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..40753be94093946b927204d878993354c5c836b9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/__pycache__/__main__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/__pycache__/cli.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/__pycache__/cli.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a4ff4798ddfef7871aea48b8ad1f78ed3e7c2f88 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/__pycache__/cli.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/__pycache__/config.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/__pycache__/config.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e293b4f99b4b27419be6e92b990a875f2985c726 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/__pycache__/config.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/__pycache__/logging.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/__pycache__/logging.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bc9eef43e4c71a106458c010d667481897f15cff Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/__pycache__/logging.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/cli.py b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/cli.py new file mode 100644 index 0000000000000000000000000000000000000000..4ae10f5ee611d9c27517ef3b8498cef6bde180d8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/cli.py @@ -0,0 +1,29 @@ +import typer + +from .commands.deploy import deploy +from .commands.env import env_app +from .commands.login import login +from .commands.logout import logout +from .commands.whoami import whoami +from .logging import setup_logging +from .utils.sentry import init_sentry + +setup_logging() + +app = typer.Typer(rich_markup_mode="rich") + + +# TODO: use the app structure + +# Additional commands +app.command()(deploy) +app.command()(login) +app.command()(logout) +app.command()(whoami) + +app.add_typer(env_app, name="env") + + +def main() -> None: + init_sentry() + app() diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/__init__.py b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cca78b51dfb8630e941e42ab2fcf8b47548bce81 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/__pycache__/deploy.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/__pycache__/deploy.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1d375ad202760172a36f99b0fc57fdbbcacd93e8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/__pycache__/deploy.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/__pycache__/env.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/__pycache__/env.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..589ac0fea12f37fd1be1facea928806289f93628 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/__pycache__/env.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/__pycache__/login.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/__pycache__/login.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2f18af63f0d162efef11cc2b732ff4a2a15b94e9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/__pycache__/login.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/__pycache__/logout.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/__pycache__/logout.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fbad3a9cf7b76f57b381a2637bbec1accea677c8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/__pycache__/logout.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/__pycache__/whoami.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/__pycache__/whoami.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e2c0850991b8ebd8c8ebf9232cacae72d6067036 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/__pycache__/whoami.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/deploy.py b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/deploy.py new file mode 100644 index 0000000000000000000000000000000000000000..123a4d2b06f2f0738afdc2eec21dfa23dcf79608 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/deploy.py @@ -0,0 +1,618 @@ +import contextlib +import json +import logging +import subprocess +import tarfile +import tempfile +import time +import uuid +from enum import Enum +from itertools import cycle +from pathlib import Path +from typing import Any, Dict, Generator, List, Optional, Union + +import rignore +import typer +from httpx import Client +from pydantic import BaseModel, EmailStr, TypeAdapter, ValidationError +from rich.text import Text +from rich_toolkit import RichToolkit +from rich_toolkit.menu import Option +from typing_extensions import Annotated + +from fastapi_cloud_cli.utils.api import APIClient +from fastapi_cloud_cli.utils.apps import AppConfig, get_app_config, write_app_config +from fastapi_cloud_cli.utils.auth import is_logged_in +from fastapi_cloud_cli.utils.cli import get_rich_toolkit, handle_http_errors +from fastapi_cloud_cli.utils.env import validate_environment_variable_name + +logger = logging.getLogger(__name__) + + +def _get_app_name(path: Path) -> str: + # TODO: use pyproject.toml to get the app name + return path.name + + +def _should_exclude_entry(path: Path) -> bool: + parts_to_exclude = [".venv", "__pycache__", ".mypy_cache", ".pytest_cache"] + + if any(part in path.parts for part in parts_to_exclude): + return True + + if path.suffix == ".pyc": + return True + + return False + + +def archive(path: Path) -> Path: + logger.debug("Starting archive creation for path: %s", path) + files = rignore.walk(path, should_exclude_entry=_should_exclude_entry) + + temp_dir = tempfile.mkdtemp() + logger.debug("Created temp directory: %s", temp_dir) + + name = f"fastapi-cloud-deploy-{uuid.uuid4()}" + tar_path = Path(temp_dir) / f"{name}.tar" + logger.debug("Archive will be created at: %s", tar_path) + + file_count = 0 + with tarfile.open(tar_path, "w") as tar: + for filename in files: + if filename.is_dir(): + continue + + tar.add(filename, arcname=filename.relative_to(path)) + file_count += 1 + + logger.debug("Archive created successfully with %s files", file_count) + return tar_path + + +class Team(BaseModel): + id: str + slug: str + name: str + + +def _get_teams() -> List[Team]: + with APIClient() as client: + response = client.get("/teams/") + response.raise_for_status() + + data = response.json()["data"] + + return [Team.model_validate(team) for team in data] + + +class AppResponse(BaseModel): + id: str + slug: str + + +def _create_app(team_id: str, app_name: str) -> AppResponse: + with APIClient() as client: + response = client.post( + "/apps/", + json={"name": app_name, "team_id": team_id}, + ) + + response.raise_for_status() + + return AppResponse.model_validate(response.json()) + + +class DeploymentStatus(str, Enum): + waiting_upload = "waiting_upload" + ready_for_build = "ready_for_build" + building = "building" + extracting = "extracting" + building_image = "building_image" + deploying = "deploying" + success = "success" + failed = "failed" + + @classmethod + def to_human_readable(cls, status: "DeploymentStatus") -> str: + return { + cls.waiting_upload: "Waiting for upload", + cls.ready_for_build: "Ready for build", + cls.building: "Building", + cls.extracting: "Extracting", + cls.building_image: "Building image", + cls.deploying: "Deploying", + cls.success: "Success", + cls.failed: "Failed", + }[status] + + +class CreateDeploymentResponse(BaseModel): + id: str + app_id: str + slug: str + status: DeploymentStatus + dashboard_url: str + url: str + + +def _create_deployment(app_id: str) -> CreateDeploymentResponse: + with APIClient() as client: + response = client.post(f"/apps/{app_id}/deployments/") + response.raise_for_status() + + return CreateDeploymentResponse.model_validate(response.json()) + + +class RequestUploadResponse(BaseModel): + url: str + fields: Dict[str, str] + + +def _upload_deployment(deployment_id: str, archive_path: Path) -> None: + logger.debug( + "Starting deployment upload for deployment: %s", + deployment_id, + ) + logger.debug( + "Archive path: %s, size: %s bytes", + archive_path, + archive_path.stat().st_size, + ) + + with APIClient() as fastapi_client, Client() as client: + # Get the upload URL + logger.debug("Requesting upload URL from API") + response = fastapi_client.post(f"/deployments/{deployment_id}/upload") + response.raise_for_status() + + upload_data = RequestUploadResponse.model_validate(response.json()) + logger.debug("Received upload URL: %s", upload_data.url) + + # Upload the archive + logger.debug("Starting file upload to S3") + upload_response = client.post( + upload_data.url, + data=upload_data.fields, + files={"file": archive_path.open("rb")}, + ) + + upload_response.raise_for_status() + logger.debug("File upload completed successfully") + + # Notify the server that the upload is complete + logger.debug("Notifying API that upload is complete") + notify_response = fastapi_client.post( + f"/deployments/{deployment_id}/upload-complete" + ) + + notify_response.raise_for_status() + logger.debug("Upload notification sent successfully") + + +def _get_app(app_slug: str) -> Optional[AppResponse]: + with APIClient() as client: + response = client.get(f"/apps/{app_slug}") + + if response.status_code == 404: + return None + + response.raise_for_status() + + data = response.json() + + return AppResponse.model_validate(data) + + +def _get_apps(team_id: str) -> List[AppResponse]: + with APIClient() as client: + response = client.get("/apps/", params={"team_id": team_id}) + response.raise_for_status() + + data = response.json()["data"] + + return [AppResponse.model_validate(app) for app in data] + + +def _create_environment_variables(app_id: str, env_vars: Dict[str, str]) -> None: + with APIClient() as client: + response = client.patch(f"/apps/{app_id}/environment-variables/", json=env_vars) + response.raise_for_status() + + +def _stream_build_logs(deployment_id: str) -> Generator[str, None, None]: + with APIClient() as client: + with client.stream( + "GET", f"/deployments/{deployment_id}/build-logs", timeout=60 + ) as response: + response.raise_for_status() + + yield from response.iter_lines() + + +WAITING_MESSAGES = [ + "🚀 Preparing for liftoff! Almost there...", + "👹 Sneaking past the dependency gremlins... Don't wake them up!", + "🤏 Squishing code into a tiny digital sandwich. Nom nom nom.", + "📉 Server space running low. Time to delete those cat videos?", + "🐢 Uploading at blazing speeds of 1 byte per hour. Patience, young padawan.", + "🔌 Connecting to server... Please stand by while we argue with the firewall.", + "💥 Oops! We've angered the Python God. Sacrificing a rubber duck to appease it.", + "🧙 Sprinkling magic deployment dust. Abracadabra!", + "👀 Hoping that @tiangolo doesn't find out about this deployment.", + "🍪 Cookie monster detected on server. Deploying anti-cookie shields.", +] + +LONG_WAIT_MESSAGES = [ + "😅 Well, that's embarrassing. We're still waiting for the deployment to finish...", + "🤔 Maybe we should have brought snacks for this wait...", + "🥱 Yawn... Still waiting...", + "🤯 Time is relative... Especially when you're waiting for a deployment...", +] + + +def _configure_app(toolkit: RichToolkit, path_to_deploy: Path) -> AppConfig: + if not toolkit.confirm(f"Setup and deploy [blue]{path_to_deploy}[/]?", tag="dir"): + raise typer.Exit(0) + + toolkit.print_line() + + with toolkit.progress("Fetching teams...") as progress: + with handle_http_errors( + progress, message="Error fetching teams. Please try again later." + ): + teams = _get_teams() + + toolkit.print_line() + + team = toolkit.ask( + "Select the team you want to deploy to:", + tag="team", + options=[Option({"name": team.name, "value": team}) for team in teams], + ) + + toolkit.print_line() + + create_new_app = toolkit.confirm( + "Do you want to create a new app?", tag="app", default=True + ) + + toolkit.print_line() + + if not create_new_app: + with toolkit.progress("Fetching apps...") as progress: + with handle_http_errors( + progress, message="Error fetching apps. Please try again later." + ): + apps = _get_apps(team.id) + + toolkit.print_line() + + if not apps: + toolkit.print( + "No apps found in this team. You can create a new app instead.", + ) + + raise typer.Exit(1) + + app = toolkit.ask( + "Select the app you want to deploy to:", + options=[Option({"name": app.slug, "value": app}) for app in apps], + ) + else: + app_name = toolkit.input( + title="What's your app name?", + default=_get_app_name(path_to_deploy), + ) + + toolkit.print_line() + + with toolkit.progress(title="Creating app...") as progress: + with handle_http_errors(progress): + app = _create_app(team.id, app_name) + + progress.log(f"App created successfully! App slug: {app.slug}") + + app_config = AppConfig(app_id=app.id, team_id=team.id) + + write_app_config(path_to_deploy, app_config) + + return app_config + + +def _wait_for_deployment( + toolkit: RichToolkit, app_id: str, deployment: CreateDeploymentResponse +) -> None: + messages = cycle(WAITING_MESSAGES) + + toolkit.print( + "Checking the status of your deployment 👀", + tag="cloud", + ) + toolkit.print_line() + + toolkit.print( + f"You can also check the status at [link={deployment.dashboard_url}]{deployment.dashboard_url}[/link]", + ) + toolkit.print_line() + + time_elapsed = 0.0 + + started_at = time.monotonic() + + last_message_changed_at = time.monotonic() + + with toolkit.progress( + next(messages), inline_logs=True, lines_to_show=20 + ) as progress: + with handle_http_errors(progress=progress): + for line in _stream_build_logs(deployment.id): + time_elapsed = time.monotonic() - started_at + + data = json.loads(line) + + if "message" in data: + progress.log(Text.from_ansi(data["message"].rstrip())) + + if data.get("type") == "complete": + progress.log("") + progress.log( + f"🐔 Ready the chicken! Your app is ready at [link={deployment.url}]{deployment.url}[/link]" + ) + + progress.log("") + + progress.log( + f"You can also check the app logs at [link={deployment.dashboard_url}]{deployment.dashboard_url}[/link]" + ) + + break + + if data.get("type") == "failed": + progress.log("") + progress.log( + f"😔 Oh no! Something went wrong. Check out the logs at [link={deployment.dashboard_url}]{deployment.dashboard_url}[/link]" + ) + raise typer.Exit(1) + + if time_elapsed > 30: + messages = cycle(LONG_WAIT_MESSAGES) # pragma: no cover + + if (time.monotonic() - last_message_changed_at) > 2: + progress.title = next(messages) # pragma: no cover + + last_message_changed_at = time.monotonic() # pragma: no cover + + +def _setup_environment_variables(toolkit: RichToolkit, app_id: str) -> None: + if not toolkit.confirm("Do you want to setup environment variables?", tag="env"): + return + + toolkit.print_line() + + env_vars = {} + + while True: + key = toolkit.input( + "Enter the environment variable name: [ENTER to skip]", required=False + ) + + if key.strip() == "": + break + + if not validate_environment_variable_name(key): + toolkit.print( + "[error]Invalid environment variable name.", + ) + + else: + value = toolkit.input( + "Enter the environment variable value:", password=True + ) + + env_vars[key] = value + + toolkit.print_line() + + toolkit.print_line() + + with toolkit.progress("Setting up environment variables...") as progress: + with handle_http_errors(progress): + _create_environment_variables(app_id, env_vars) + + progress.log("Environment variables set up successfully!") + + +class SignupToWaitingList(BaseModel): + email: EmailStr + name: Optional[str] = None + organization: Optional[str] = None + role: Optional[str] = None + team_size: Optional[str] = None + location: Optional[str] = None + use_case: Optional[str] = None + secret_code: Optional[str] = None + + +def _send_waitlist_form( + result: SignupToWaitingList, + toolkit: RichToolkit, +) -> None: + with toolkit.progress("Sending your request...") as progress: + with APIClient() as client: + with handle_http_errors(progress): + response = client.post( + "/users/waiting-list", json=result.model_dump(mode="json") + ) + + response.raise_for_status() + + progress.log("Let's go! Thanks for your interest in FastAPI Cloud! 🚀") + + +def _waitlist_form(toolkit: RichToolkit) -> None: + from rich_toolkit.form import Form + + toolkit.print( + "We're currently in private beta. If you want to be notified when we launch, please fill out the form below.", + tag="waitlist", + ) + + toolkit.print_line() + + email = toolkit.input( + "Enter your email:", + required=True, + validator=TypeAdapter(EmailStr), + ) + + toolkit.print_line() + + result = SignupToWaitingList(email=email) + + if toolkit.confirm( + "Do you want to get access faster by giving us more information?", + tag="waitlist", + ): + toolkit.print_line() + form = Form("Waitlist form", style=toolkit.style) + + form.add_input("name", label="Name", placeholder="John Doe") + form.add_input("organization", label="Organization", placeholder="Acme Inc.") + form.add_input("team", label="Team", placeholder="Team A") + form.add_input("role", label="Role", placeholder="Developer") + form.add_input("location", label="Location", placeholder="San Francisco") + form.add_input( + "use_case", + label="How do you plan to use FastAPI Cloud?", + placeholder="I'm building a web app", + ) + form.add_input("secret_code", label="Secret code", placeholder="123456") + + result = form.run() # type: ignore + + try: + result = SignupToWaitingList.model_validate( + { + "email": email, + **result, # type: ignore + } + ) + except ValidationError: + toolkit.print( + "[error]Invalid form data. Please try again.[/]", + ) + + return + + toolkit.print_line() + + if toolkit.confirm( + ( + "Do you agree to\n" + "- Terms of Service: [link=https://fastapicloud.com/legal/terms]https://fastapicloud.com/legal/terms[/link]\n" + "- Privacy Policy: [link=https://fastapicloud.com/legal/privacy-policy]https://fastapicloud.com/legal/privacy-policy[/link]\n" + ), + tag="terms", + ): + toolkit.print_line() + + _send_waitlist_form( + result, + toolkit, + ) + + with contextlib.suppress(Exception): + subprocess.run( + ["open", "raycast://confetti"], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + check=False, + ) + + +def deploy( + path: Annotated[ + Union[Path, None], + typer.Argument( + help="A path to the folder containing the app you want to deploy" + ), + ] = None, + skip_wait: Annotated[ + bool, typer.Option("--no-wait", help="Skip waiting for deployment status") + ] = False, +) -> Any: + """ + Deploy a [bold]FastAPI[/bold] app to FastAPI Cloud. 🚀 + """ + logger.debug("Deploy command started") + logger.debug("Deploy path: %s, skip_wait: %s", path, skip_wait) + + with get_rich_toolkit() as toolkit: + if not is_logged_in(): + logger.debug("User not logged in, showing waitlist form") + _waitlist_form(toolkit) + + raise typer.Exit(1) + + toolkit.print_title("Starting deployment", tag="FastAPI") + toolkit.print_line() + + path_to_deploy = path or Path.cwd() + logger.debug("Deploying from path: %s", path_to_deploy) + + app_config = get_app_config(path_to_deploy) + + if not app_config: + logger.debug("No app config found, configuring new app") + app_config = _configure_app(toolkit, path_to_deploy=path_to_deploy) + toolkit.print_line() + + _setup_environment_variables(toolkit, app_config.app_id) + toolkit.print_line() + else: + logger.debug("Existing app config found, proceeding with deployment") + toolkit.print("Deploying app...") + toolkit.print_line() + + with toolkit.progress("Checking app...", transient=True) as progress: + with handle_http_errors(progress): + logger.debug("Checking app with ID: %s", app_config.app_id) + app = _get_app(app_config.app_id) + + if not app: + logger.debug("App not found in API") + progress.set_error( + "App not found. Make sure you're logged in the correct account." + ) + + raise typer.Exit(1) + + logger.debug("Creating archive for deployment") + archive_path = archive(path or Path.cwd()) # noqa: F841 + + with toolkit.progress(title="Creating deployment") as progress: + with handle_http_errors(progress): + logger.debug("Creating deployment for app: %s", app.id) + deployment = _create_deployment(app.id) + + progress.log( + f"Deployment created successfully! Deployment slug: {deployment.slug}" + ) + + progress.log("Uploading deployment...") + + _upload_deployment(deployment.id, archive_path) + + progress.log("Deployment uploaded successfully!") + + toolkit.print_line() + + if not skip_wait: + logger.debug("Waiting for deployment to complete") + _wait_for_deployment(toolkit, app.id, deployment=deployment) + else: + logger.debug("Skipping deployment wait as requested") + toolkit.print( + f"Check the status of your deployment at [link={deployment.dashboard_url}]{deployment.dashboard_url}[/link]" + ) diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/env.py b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/env.py new file mode 100644 index 0000000000000000000000000000000000000000..b66d2ea35c3462c9b2372a691b18062edc33ede4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/env.py @@ -0,0 +1,246 @@ +import logging +from pathlib import Path +from typing import Any, List, Union + +import typer +from pydantic import BaseModel +from typing_extensions import Annotated + +from fastapi_cloud_cli.utils.api import APIClient +from fastapi_cloud_cli.utils.apps import get_app_config +from fastapi_cloud_cli.utils.auth import is_logged_in +from fastapi_cloud_cli.utils.cli import get_rich_toolkit, handle_http_errors +from fastapi_cloud_cli.utils.env import validate_environment_variable_name + +logger = logging.getLogger(__name__) + + +class EnvironmentVariable(BaseModel): + name: str + value: str + + +class EnvironmentVariableResponse(BaseModel): + data: List[EnvironmentVariable] + + +def _get_environment_variables(app_id: str) -> EnvironmentVariableResponse: + with APIClient() as client: + response = client.get(f"/apps/{app_id}/environment-variables/") + response.raise_for_status() + + return EnvironmentVariableResponse.model_validate(response.json()) + + +def _delete_environment_variable(app_id: str, name: str) -> bool: + with APIClient() as client: + response = client.delete(f"/apps/{app_id}/environment-variables/{name}") + + if response.status_code == 404: + return False + + response.raise_for_status() + + return True + + +def _set_environment_variable(app_id: str, name: str, value: str) -> None: + with APIClient() as client: + response = client.patch( + f"/apps/{app_id}/environment-variables/", + json={name: value}, + ) + response.raise_for_status() + + +env_app = typer.Typer() + + +@env_app.command() +def list( + path: Annotated[ + Union[Path, None], + typer.Argument( + help="A path to the folder containing the app you want to deploy" + ), + ] = None, +) -> Any: + """ + List the environment variables for the app. + """ + + with get_rich_toolkit(minimal=True) as toolkit: + if not is_logged_in(): + toolkit.print( + "No credentials found. Use [blue]`fastapi login`[/] to login.", + tag="auth", + ) + + raise typer.Exit(1) + + app_path = path or Path.cwd() + + app_config = get_app_config(app_path) + + if not app_config: + toolkit.print( + f"No app found in the folder [bold]{app_path}[/].", + ) + raise typer.Exit(1) + + with toolkit.progress( + "Fetching environment variables...", transient=True + ) as progress: + with handle_http_errors(progress): + environment_variables = _get_environment_variables(app_config.app_id) + + if not environment_variables.data: + toolkit.print("No environment variables found.") + return + + toolkit.print("Environment variables:") + toolkit.print_line() + + for env_var in environment_variables.data: + toolkit.print(f"[bold]{env_var.name}[/]") + + +@env_app.command() +def delete( + name: Union[str, None] = typer.Argument( + None, + help="The name of the environment variable to delete", + ), + path: Annotated[ + Union[Path, None], + typer.Argument( + help="A path to the folder containing the app you want to deploy" + ), + ] = None, +) -> Any: + """ + Delete an environment variable from the app. + """ + + with get_rich_toolkit(minimal=True) as toolkit: + # TODO: maybe this logic can be extracted to a function + if not is_logged_in(): + toolkit.print( + "No credentials found. Use [blue]`fastapi login`[/] to login.", + tag="auth", + ) + + raise typer.Exit(1) + + path_to_deploy = path or Path.cwd() + + app_config = get_app_config(path_to_deploy) + + if not app_config: + toolkit.print( + f"No app found in the folder [bold]{path_to_deploy}[/].", + ) + raise typer.Exit(1) + + if not name: + with toolkit.progress( + "Fetching environment variables...", transient=True + ) as progress: + with handle_http_errors(progress): + environment_variables = _get_environment_variables( + app_config.app_id + ) + + if not environment_variables.data: + toolkit.print("No environment variables found.") + return + + name = toolkit.ask( + "Select the environment variable to delete:", + options=[ + {"name": env_var.name, "value": env_var.name} + for env_var in environment_variables.data + ], + ) + + assert name + else: + if not validate_environment_variable_name(name): + toolkit.print( + f"The environment variable name [bold]{name}[/] is invalid." + ) + raise typer.Exit(1) + + toolkit.print_line() + + with toolkit.progress( + "Deleting environment variable", transient=True + ) as progress: + with handle_http_errors(progress): + deleted = _delete_environment_variable(app_config.app_id, name) + + if not deleted: + toolkit.print("Environment variable not found.") + raise typer.Exit(1) + + toolkit.print(f"Environment variable [bold]{name}[/] deleted.") + + +@env_app.command() +def set( + name: Union[str, None] = typer.Argument( + None, + help="The name of the environment variable to set", + ), + value: Union[str, None] = typer.Argument( + None, + help="The value of the environment variable to set", + ), + path: Annotated[ + Union[Path, None], + typer.Argument( + help="A path to the folder containing the app you want to deploy" + ), + ] = None, +) -> Any: + """ + Set an environment variable for the app. + """ + + with get_rich_toolkit(minimal=True) as toolkit: + if not is_logged_in(): + toolkit.print( + "No credentials found. Use [blue]`fastapi login`[/] to login.", + tag="auth", + ) + + raise typer.Exit(1) + + path_to_deploy = path or Path.cwd() + + app_config = get_app_config(path_to_deploy) + + if not app_config: + toolkit.print( + f"No app found in the folder [bold]{path_to_deploy}[/].", + ) + raise typer.Exit(1) + + if not name: + name = toolkit.input("Enter the name of the environment variable to set:") + + if not value: + value = toolkit.input( + "Enter the value of the environment variable to set:", password=True + ) + + with toolkit.progress( + "Setting environment variable", transient=True + ) as progress: + assert name is not None + assert value is not None + + with handle_http_errors(progress): + _set_environment_variable(app_config.app_id, name, value) + + toolkit.print(f"Environment variable [bold]{name}[/] set.") diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/login.py b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/login.py new file mode 100644 index 0000000000000000000000000000000000000000..3f881ba6aa3a5fa93dc8e7c7ed63954ddf979716 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/login.py @@ -0,0 +1,104 @@ +import logging +import time +from typing import Any + +import httpx +import typer +from pydantic import BaseModel + +from fastapi_cloud_cli.config import Settings +from fastapi_cloud_cli.utils.api import APIClient +from fastapi_cloud_cli.utils.auth import AuthConfig, write_auth_config +from fastapi_cloud_cli.utils.cli import get_rich_toolkit, handle_http_errors + +logger = logging.getLogger(__name__) + + +class AuthorizationData(BaseModel): + user_code: str + device_code: str + verification_uri: str + verification_uri_complete: str + interval: int = 5 + + +class TokenResponse(BaseModel): + access_token: str + + +def _start_device_authorization( + client: httpx.Client, +) -> AuthorizationData: + settings = Settings.get() + + response = client.post( + "/login/device/authorization", data={"client_id": settings.client_id} + ) + + response.raise_for_status() + + return AuthorizationData.model_validate(response.json()) + + +def _fetch_access_token(client: httpx.Client, device_code: str, interval: int) -> str: + settings = Settings.get() + + while True: + response = client.post( + "/login/device/token", + data={ + "device_code": device_code, + "client_id": settings.client_id, + "grant_type": "urn:ietf:params:oauth:grant-type:device_code", + }, + ) + + if response.status_code not in (200, 400): + response.raise_for_status() + + if response.status_code == 400: + data = response.json() + + if data.get("error") != "authorization_pending": + response.raise_for_status() + + if response.status_code == 200: + break + + time.sleep(interval) + + response_data = TokenResponse.model_validate(response.json()) + + return response_data.access_token + + +def login() -> Any: + """ + Login to FastAPI Cloud. 🚀 + """ + with get_rich_toolkit() as toolkit, APIClient() as client: + toolkit.print_title("Login to FastAPI Cloud", tag="FastAPI") + + toolkit.print_line() + + with toolkit.progress("Starting authorization") as progress: + with handle_http_errors(progress): + authorization_data = _start_device_authorization(client) + + url = authorization_data.verification_uri_complete + + progress.log(f"Opening {url}") + + toolkit.print_line() + + with toolkit.progress("Waiting for user to authorize...") as progress: + typer.launch(url) + + with handle_http_errors(progress): + access_token = _fetch_access_token( + client, authorization_data.device_code, authorization_data.interval + ) + + write_auth_config(AuthConfig(access_token=access_token)) + + progress.log("Now you are logged in! 🚀") diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/logout.py b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/logout.py new file mode 100644 index 0000000000000000000000000000000000000000..82473244b16be0a7c43f6a0fd4c23a5d492a4a80 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/logout.py @@ -0,0 +1,12 @@ +from fastapi_cloud_cli.utils.auth import delete_auth_config +from fastapi_cloud_cli.utils.cli import get_rich_toolkit + + +def logout() -> None: + """ + Logout from FastAPI Cloud. 🚀 + """ + with get_rich_toolkit(minimal=True) as toolkit: + delete_auth_config() + + toolkit.print("You are now logged out! 🚀") diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/whoami.py b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/whoami.py new file mode 100644 index 0000000000000000000000000000000000000000..55de3b9f43dbd53ba5702f7a8c54757b6e180027 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/commands/whoami.py @@ -0,0 +1,27 @@ +import logging +from typing import Any + +from rich import print +from rich_toolkit.progress import Progress + +from fastapi_cloud_cli.utils.api import APIClient +from fastapi_cloud_cli.utils.auth import is_logged_in +from fastapi_cloud_cli.utils.cli import handle_http_errors + +logger = logging.getLogger(__name__) + + +def whoami() -> Any: + if not is_logged_in(): + print("No credentials found. Use [blue]`fastapi login`[/] to login.") + return + + with APIClient() as client: + with Progress(title="⚡ Fetching profile", transient=True) as progress: + with handle_http_errors(progress, message=""): + response = client.get("/users/me") + response.raise_for_status() + + data = response.json() + + print(f"⚡ [bold]{data['email']}[/bold]") diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/config.py b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/config.py new file mode 100644 index 0000000000000000000000000000000000000000..1988b6f92ca59201f43c13577b51ccd136960766 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/config.py @@ -0,0 +1,26 @@ +import json +from pathlib import Path + +from pydantic import BaseModel + +from .utils.config import get_cli_config_path + + +class Settings(BaseModel): + base_api_url: str = "https://api.fastapicloud.com/api/v1" + client_id: str = "fastapi-cli" + + @classmethod + def from_user_settings(cls, config_path: Path) -> "Settings": + try: + content = config_path.read_bytes() if config_path.exists() else b"{}" + + user_settings = json.loads(content) + except json.JSONDecodeError: + user_settings = {} + + return cls(**user_settings) + + @classmethod + def get(cls) -> "Settings": + return cls.from_user_settings(get_cli_config_path()) diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/logging.py b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/logging.py new file mode 100644 index 0000000000000000000000000000000000000000..53efe46fcd94b1e9f1721835aefa198a791cf237 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/logging.py @@ -0,0 +1,31 @@ +import logging +import os +from typing import Union + +from rich.console import Console +from rich.logging import RichHandler + + +def setup_logging( + terminal_width: Union[int, None] = None, level: Union[int, None] = None +) -> None: + if level is None: + level = ( + logging.DEBUG if os.getenv("FASTAPI_CLOUD_DEBUG") == "1" else logging.INFO + ) + + logger = logging.getLogger("fastapi_cloud_cli") + console = Console(width=terminal_width) if terminal_width else None + rich_handler = RichHandler( + show_time=False, + rich_tracebacks=True, + tracebacks_show_locals=True, + markup=True, + show_path=False, + console=console, + ) + rich_handler.setFormatter(logging.Formatter("{message}", style="{")) + logger.addHandler(rich_handler) + + logger.setLevel(level) + logger.propagate = False diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/py.typed b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__init__.py b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0a5d706b89571abe8903e1fae0645f8158cfddaa Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/api.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/api.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..640eeb7faf5f6e81f62c43edbb4ec71affc6e3e5 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/api.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/apps.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/apps.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..54c4e05c17407a21a9b30c8ab49d4cf13790eb17 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/apps.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/auth.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/auth.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c05061f194cec3f6722c50b3e4aab6b3cb35fb9e Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/auth.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/cli.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/cli.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..469c01f006343c6d9751d82f53dd137e1f1e1559 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/cli.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/config.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/config.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..71e0216670a6c55ca26a38a1f59838999f0fbec9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/config.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/env.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/env.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3b9d680e75fdbef8e78366a09768e6f27b39a6d8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/env.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/sentry.cpython-310.pyc b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/sentry.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3aab7d9de36941a957395b69265c3d6c3dd945f5 Binary files /dev/null and b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/__pycache__/sentry.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/api.py b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/api.py new file mode 100644 index 0000000000000000000000000000000000000000..fe115612f47c63397488b2c6fa63a632f4559a58 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/api.py @@ -0,0 +1,21 @@ +import httpx + +from fastapi_cloud_cli import __version__ +from fastapi_cloud_cli.config import Settings +from fastapi_cloud_cli.utils.auth import get_auth_token + + +class APIClient(httpx.Client): + def __init__(self) -> None: + settings = Settings.get() + + token = get_auth_token() + + super().__init__( + base_url=settings.base_api_url, + timeout=httpx.Timeout(20), + headers={ + "Authorization": f"Bearer {token}", + "User-Agent": f"fastapi-cloud-cli/{__version__}", + }, + ) diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/apps.py b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/apps.py new file mode 100644 index 0000000000000000000000000000000000000000..5e68c4c43e11c9e90ffd107025acf4f84d61b867 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/apps.py @@ -0,0 +1,59 @@ +import logging +from pathlib import Path +from typing import Optional + +from pydantic import BaseModel + +logger = logging.getLogger("fastapi_cli") + + +class AppConfig(BaseModel): + app_id: str + team_id: str + + +def get_app_config(path_to_deploy: Path) -> Optional[AppConfig]: + config_path = path_to_deploy / ".fastapicloud/cloud.json" + logger.debug("Looking for app config at: %s", config_path) + + if not config_path.exists(): + logger.debug("App config file doesn't exist") + return None + + logger.debug("App config loaded successfully") + return AppConfig.model_validate_json(config_path.read_text(encoding="utf-8")) + + +README = """ +> Why do I have a folder named ".fastapicloud" in my project? 🤔 +The ".fastapicloud" folder is created when you link a directory to a FastAPI Cloud project. + +> What does the "cloud.json" file contain? +The "cloud.json" file contains: +- The ID of the FastAPI app that you linked ("app_id") +- The ID of the team your FastAPI Cloud project is owned by ("team_id") + +> Should I commit the ".fastapicloud" folder? +No, you should not commit the ".fastapicloud" folder to your version control system. +That's why there's a ".gitignore" file in this folder. +""" + + +def write_app_config(path_to_deploy: Path, app_config: AppConfig) -> None: + config_path = path_to_deploy / ".fastapicloud/cloud.json" + readme_path = path_to_deploy / ".fastapicloud/README.md" + gitignore_path = path_to_deploy / ".fastapicloud/.gitignore" + + logger.debug("Writing app config to: %s", config_path) + logger.debug("App config data: %s", app_config) + + config_path.parent.mkdir(parents=True, exist_ok=True) + + config_path.write_text( + app_config.model_dump_json(), + encoding="utf-8", + ) + readme_path.write_text(README, encoding="utf-8") + gitignore_path.write_text("*") + + logger.debug("App config files written successfully") diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/auth.py b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/auth.py new file mode 100644 index 0000000000000000000000000000000000000000..246721bb0f6f975d4b0c2577142092954392207a --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/auth.py @@ -0,0 +1,61 @@ +import logging +from typing import Optional + +from pydantic import BaseModel + +from .config import get_auth_path + +logger = logging.getLogger("fastapi_cli") + + +class AuthConfig(BaseModel): + access_token: str + + +def write_auth_config(auth_data: AuthConfig) -> None: + auth_path = get_auth_path() + logger.debug("Writing auth config to: %s", auth_path) + + auth_path.write_text(auth_data.model_dump_json(), encoding="utf-8") + logger.debug("Auth config written successfully") + + +def delete_auth_config() -> None: + auth_path = get_auth_path() + logger.debug("Deleting auth config at: %s", auth_path) + + if auth_path.exists(): + auth_path.unlink() + logger.debug("Auth config deleted successfully") + else: + logger.debug("Auth config file doesn't exist, nothing to delete") + + +def read_auth_config() -> Optional[AuthConfig]: + auth_path = get_auth_path() + logger.debug("Reading auth config from: %s", auth_path) + + if not auth_path.exists(): + logger.debug("Auth config file doesn't exist") + return None + + logger.debug("Auth config loaded successfully") + return AuthConfig.model_validate_json(auth_path.read_text(encoding="utf-8")) + + +def get_auth_token() -> Optional[str]: + logger.debug("Getting auth token") + auth_data = read_auth_config() + + if auth_data is None: + logger.debug("No auth data found") + return None + + logger.debug("Auth token retrieved successfully") + return auth_data.access_token + + +def is_logged_in() -> bool: + result = get_auth_token() is not None + logger.debug("Login status: %s", result) + return result diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/cli.py b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/cli.py new file mode 100644 index 0000000000000000000000000000000000000000..2e798b5761eb061ea7705385f2934d5c575c27ca --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/cli.py @@ -0,0 +1,101 @@ +import contextlib +import logging +from typing import Any, Dict, Generator, List, Optional, Tuple + +import typer +from httpx import HTTPError, HTTPStatusError, ReadTimeout +from rich.segment import Segment +from rich_toolkit import RichToolkit, RichToolkitTheme +from rich_toolkit.progress import Progress +from rich_toolkit.styles import MinimalStyle, TaggedStyle + +logger = logging.getLogger(__name__) + + +class FastAPIStyle(TaggedStyle): + def __init__(self, tag_width: int = 11): + super().__init__(tag_width=tag_width) + + def _get_tag_segments( + self, + metadata: Dict[str, Any], + is_animated: bool = False, + done: bool = False, + ) -> Tuple[List[Segment], int]: + if not is_animated: + return super()._get_tag_segments(metadata, is_animated, done) + + emojis = [ + "🥚", + "🐣", + "🐤", + "🐥", + "🐓", + "🐔", + ] + + tag = emojis[self.animation_counter % len(emojis)] + + if done: + tag = emojis[-1] + + left_padding = self.tag_width - 1 + left_padding = max(0, left_padding) + + return [Segment(tag)], left_padding + + +def get_rich_toolkit(minimal: bool = False) -> RichToolkit: + style = MinimalStyle() if minimal else FastAPIStyle(tag_width=11) + + theme = RichToolkitTheme( + style=style, + theme={ + "tag.title": "white on #009485", + "tag": "white on #007166", + "placeholder": "grey85", + "text": "white", + "selected": "#007166", + "result": "grey85", + "progress": "on #007166", + "error": "red", + }, + ) + + return RichToolkit(theme=theme) + + +@contextlib.contextmanager +def handle_http_errors( + progress: Progress, + message: Optional[str] = None, +) -> Generator[None, None, None]: + try: + yield + except ReadTimeout as e: + logger.debug(e) + + progress.set_error( + "The request to the FastAPI Cloud server timed out. Please try again later." + ) + + raise typer.Exit(1) from None + except HTTPError as e: + logger.debug(e) + + # Handle validation errors from Pydantic models, this should make it easier to debug :) + if isinstance(e, HTTPStatusError) and e.response.status_code == 422: + logger.debug(e.response.json()) # pragma: no cover + + if isinstance(e, HTTPStatusError) and e.response.status_code in (401, 403): + message = "The specified token is not valid. Use `fastapi login` to generate a new token." + + else: + message = ( + message + or f"Something went wrong while contacting the FastAPI Cloud server. Please try again later. \n\n{e}" + ) + + progress.set_error(message) + + raise typer.Exit(1) from None diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/config.py b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/config.py new file mode 100644 index 0000000000000000000000000000000000000000..3da45cf352f4e0b14566102af67c3c1f9af87d66 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/config.py @@ -0,0 +1,21 @@ +from pathlib import Path + +import typer + + +def get_config_folder() -> Path: + return Path(typer.get_app_dir("fastapi-cli")) + + +def get_auth_path() -> Path: + auth_path = get_config_folder() / "auth.json" + auth_path.parent.mkdir(parents=True, exist_ok=True) + + return auth_path + + +def get_cli_config_path() -> Path: + cli_config_path = get_config_folder() / "cli.json" + cli_config_path.parent.mkdir(parents=True, exist_ok=True) + + return cli_config_path diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/env.py b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/env.py new file mode 100644 index 0000000000000000000000000000000000000000..b11c6c877a2ce63d8883e199cedaae4e753b8413 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/env.py @@ -0,0 +1,5 @@ +def validate_environment_variable_name(name: str) -> bool: + if name.isidentifier(): + return True + + return False diff --git a/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/sentry.py b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/sentry.py new file mode 100644 index 0000000000000000000000000000000000000000..1703796bd6e2c7e6511d048e2b89be5fbb1bc0d9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/fastapi_cloud_cli/utils/sentry.py @@ -0,0 +1,18 @@ +import sentry_sdk +from sentry_sdk.integrations.typer import TyperIntegration + +from .auth import is_logged_in + +SENTRY_DSN = "https://230250605ea4b58a0b69c768e9ec1168@o4506985151856640.ingest.us.sentry.io/4508449198899200" + + +def init_sentry() -> None: + """Initialize Sentry error tracking only if user is logged in.""" + if not is_logged_in(): + return + + sentry_sdk.init( + dsn=SENTRY_DSN, + integrations=[TyperIntegration()], + send_default_pii=False, + ) diff --git a/venv/lib/python3.10/site-packages/grpc/__init__.py b/venv/lib/python3.10/site-packages/grpc/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..067f66693f39cd45c62002a49f4feccda94486bf --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/__init__.py @@ -0,0 +1,2348 @@ +# Copyright 2015-2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""gRPC's Python API.""" + +import abc +import contextlib +import enum +import logging +import sys + +from grpc import _compression +from grpc._cython import cygrpc as _cygrpc +from grpc._runtime_protos import protos +from grpc._runtime_protos import protos_and_services +from grpc._runtime_protos import services + +logging.getLogger(__name__).addHandler(logging.NullHandler()) + +try: + # pylint: disable=ungrouped-imports + from grpc._grpcio_metadata import __version__ +except ImportError: + __version__ = "dev0" + +############################## Future Interface ############################### + + +class FutureTimeoutError(Exception): + """Indicates that a method call on a Future timed out.""" + + +class FutureCancelledError(Exception): + """Indicates that the computation underlying a Future was cancelled.""" + + +class Future(abc.ABC): + """A representation of a computation in another control flow. + + Computations represented by a Future may be yet to be begun, + may be ongoing, or may have already completed. + """ + + @abc.abstractmethod + def cancel(self): + """Attempts to cancel the computation. + + This method does not block. + + Returns: + bool: + Returns True if the computation was canceled. + + Returns False under all other circumstances, for example: + + 1. computation has begun and could not be canceled. + 2. computation has finished + 3. computation is scheduled for execution and it is impossible + to determine its state without blocking. + """ + raise NotImplementedError() + + @abc.abstractmethod + def cancelled(self): + """Describes whether the computation was cancelled. + + This method does not block. + + Returns: + bool: + Returns True if the computation was cancelled before its result became + available. + + Returns False under all other circumstances, for example: + + 1. computation was not cancelled. + 2. computation's result is available. + """ + raise NotImplementedError() + + @abc.abstractmethod + def running(self): + """Describes whether the computation is taking place. + + This method does not block. + + Returns: + Returns True if the computation is scheduled for execution or + currently executing. + + Returns False if the computation already executed or was cancelled. + """ + raise NotImplementedError() + + @abc.abstractmethod + def done(self): + """Describes whether the computation has taken place. + + This method does not block. + + Returns: + bool: + Returns True if the computation already executed or was cancelled. + Returns False if the computation is scheduled for execution or + currently executing. + This is exactly opposite of the running() method's result. + """ + raise NotImplementedError() + + @abc.abstractmethod + def result(self, timeout=None): + """Returns the result of the computation or raises its exception. + + This method may return immediately or may block. + + Args: + timeout: The length of time in seconds to wait for the computation to + finish or be cancelled. If None, the call will block until the + computations's termination. + + Returns: + The return value of the computation. + + Raises: + FutureTimeoutError: If a timeout value is passed and the computation + does not terminate within the allotted time. + FutureCancelledError: If the computation was cancelled. + Exception: If the computation raised an exception, this call will + raise the same exception. + """ + raise NotImplementedError() + + @abc.abstractmethod + def exception(self, timeout=None): + """Return the exception raised by the computation. + + This method may return immediately or may block. + + Args: + timeout: The length of time in seconds to wait for the computation to + terminate or be cancelled. If None, the call will block until the + computations's termination. + + Returns: + The exception raised by the computation, or None if the computation + did not raise an exception. + + Raises: + FutureTimeoutError: If a timeout value is passed and the computation + does not terminate within the allotted time. + FutureCancelledError: If the computation was cancelled. + """ + raise NotImplementedError() + + @abc.abstractmethod + def traceback(self, timeout=None): + """Access the traceback of the exception raised by the computation. + + This method may return immediately or may block. + + Args: + timeout: The length of time in seconds to wait for the computation + to terminate or be cancelled. If None, the call will block until + the computation's termination. + + Returns: + The traceback of the exception raised by the computation, or None + if the computation did not raise an exception. + + Raises: + FutureTimeoutError: If a timeout value is passed and the computation + does not terminate within the allotted time. + FutureCancelledError: If the computation was cancelled. + """ + raise NotImplementedError() + + @abc.abstractmethod + def add_done_callback(self, fn): + """Adds a function to be called at completion of the computation. + + The callback will be passed this Future object describing the outcome + of the computation. Callbacks will be invoked after the future is + terminated, whether successfully or not. + + If the computation has already completed, the callback will be called + immediately. + + Exceptions raised in the callback will be logged at ERROR level, but + will not terminate any threads of execution. + + Args: + fn: A callable taking this Future object as its single parameter. + """ + raise NotImplementedError() + + +################################ gRPC Enums ################################## + + +@enum.unique +class ChannelConnectivity(enum.Enum): + """Mirrors grpc_connectivity_state in the gRPC Core. + + Attributes: + IDLE: The channel is idle. + CONNECTING: The channel is connecting. + READY: The channel is ready to conduct RPCs. + TRANSIENT_FAILURE: The channel has seen a failure from which it expects + to recover. + SHUTDOWN: The channel has seen a failure from which it cannot recover. + """ + + IDLE = (_cygrpc.ConnectivityState.idle, "idle") + CONNECTING = (_cygrpc.ConnectivityState.connecting, "connecting") + READY = (_cygrpc.ConnectivityState.ready, "ready") + TRANSIENT_FAILURE = ( + _cygrpc.ConnectivityState.transient_failure, + "transient failure", + ) + SHUTDOWN = (_cygrpc.ConnectivityState.shutdown, "shutdown") + + +@enum.unique +class StatusCode(enum.Enum): + """Mirrors grpc_status_code in the gRPC Core. + + Attributes: + OK: Not an error; returned on success + CANCELLED: The operation was cancelled (typically by the caller). + UNKNOWN: Unknown error. + INVALID_ARGUMENT: Client specified an invalid argument. + DEADLINE_EXCEEDED: Deadline expired before operation could complete. + NOT_FOUND: Some requested entity (e.g., file or directory) was not found. + ALREADY_EXISTS: Some entity that we attempted to create (e.g., file or directory) + already exists. + PERMISSION_DENIED: The caller does not have permission to execute the specified + operation. + UNAUTHENTICATED: The request does not have valid authentication credentials for the + operation. + RESOURCE_EXHAUSTED: Some resource has been exhausted, perhaps a per-user quota, or + perhaps the entire file system is out of space. + FAILED_PRECONDITION: Operation was rejected because the system is not in a state + required for the operation's execution. + ABORTED: The operation was aborted, typically due to a concurrency issue + like sequencer check failures, transaction aborts, etc. + UNIMPLEMENTED: Operation is not implemented or not supported/enabled in this service. + INTERNAL: Internal errors. Means some invariants expected by underlying + system has been broken. + UNAVAILABLE: The service is currently unavailable. + DATA_LOSS: Unrecoverable data loss or corruption. + """ + + OK = (_cygrpc.StatusCode.ok, "ok") + CANCELLED = (_cygrpc.StatusCode.cancelled, "cancelled") + UNKNOWN = (_cygrpc.StatusCode.unknown, "unknown") + INVALID_ARGUMENT = (_cygrpc.StatusCode.invalid_argument, "invalid argument") + DEADLINE_EXCEEDED = ( + _cygrpc.StatusCode.deadline_exceeded, + "deadline exceeded", + ) + NOT_FOUND = (_cygrpc.StatusCode.not_found, "not found") + ALREADY_EXISTS = (_cygrpc.StatusCode.already_exists, "already exists") + PERMISSION_DENIED = ( + _cygrpc.StatusCode.permission_denied, + "permission denied", + ) + RESOURCE_EXHAUSTED = ( + _cygrpc.StatusCode.resource_exhausted, + "resource exhausted", + ) + FAILED_PRECONDITION = ( + _cygrpc.StatusCode.failed_precondition, + "failed precondition", + ) + ABORTED = (_cygrpc.StatusCode.aborted, "aborted") + OUT_OF_RANGE = (_cygrpc.StatusCode.out_of_range, "out of range") + UNIMPLEMENTED = (_cygrpc.StatusCode.unimplemented, "unimplemented") + INTERNAL = (_cygrpc.StatusCode.internal, "internal") + UNAVAILABLE = (_cygrpc.StatusCode.unavailable, "unavailable") + DATA_LOSS = (_cygrpc.StatusCode.data_loss, "data loss") + UNAUTHENTICATED = (_cygrpc.StatusCode.unauthenticated, "unauthenticated") + + +############################# gRPC Status ################################ + + +class Status(abc.ABC): + """Describes the status of an RPC. + + This is an EXPERIMENTAL API. + + Attributes: + code: A StatusCode object to be sent to the client. + details: A UTF-8-encodable string to be sent to the client upon + termination of the RPC. + trailing_metadata: The trailing :term:`metadata` in the RPC. + """ + + +############################# gRPC Exceptions ################################ + + +class RpcError(Exception): + """Raised by the gRPC library to indicate non-OK-status RPC termination.""" + + +############################## Shared Context ################################ + + +class RpcContext(abc.ABC): + """Provides RPC-related information and action.""" + + @abc.abstractmethod + def is_active(self): + """Describes whether the RPC is active or has terminated. + + Returns: + bool: + True if RPC is active, False otherwise. + """ + raise NotImplementedError() + + @abc.abstractmethod + def time_remaining(self): + """Describes the length of allowed time remaining for the RPC. + + Returns: + A nonnegative float indicating the length of allowed time in seconds + remaining for the RPC to complete before it is considered to have + timed out, or None if no deadline was specified for the RPC. + """ + raise NotImplementedError() + + @abc.abstractmethod + def cancel(self): + """Cancels the RPC. + + Idempotent and has no effect if the RPC has already terminated. + """ + raise NotImplementedError() + + @abc.abstractmethod + def add_callback(self, callback): + """Registers a callback to be called on RPC termination. + + Args: + callback: A no-parameter callable to be called on RPC termination. + + Returns: + True if the callback was added and will be called later; False if + the callback was not added and will not be called (because the RPC + already terminated or some other reason). + """ + raise NotImplementedError() + + +######################### Invocation-Side Context ############################ + + +class Call(RpcContext, metaclass=abc.ABCMeta): + """Invocation-side utility object for an RPC.""" + + @abc.abstractmethod + def initial_metadata(self): + """Accesses the initial metadata sent by the server. + + This method blocks until the value is available. + + Returns: + The initial :term:`metadata`. + """ + raise NotImplementedError() + + @abc.abstractmethod + def trailing_metadata(self): + """Accesses the trailing metadata sent by the server. + + This method blocks until the value is available. + + Returns: + The trailing :term:`metadata`. + """ + raise NotImplementedError() + + @abc.abstractmethod + def code(self): + """Accesses the status code sent by the server. + + This method blocks until the value is available. + + Returns: + The StatusCode value for the RPC. + """ + raise NotImplementedError() + + @abc.abstractmethod + def details(self): + """Accesses the details sent by the server. + + This method blocks until the value is available. + + Returns: + The details string of the RPC. + """ + raise NotImplementedError() + + +############## Invocation-Side Interceptor Interfaces & Classes ############## + + +class ClientCallDetails(abc.ABC): + """Describes an RPC to be invoked. + + Attributes: + method: The method name of the RPC. + timeout: An optional duration of time in seconds to allow for the RPC. + metadata: Optional :term:`metadata` to be transmitted to + the service-side of the RPC. + credentials: An optional CallCredentials for the RPC. + wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. + """ + + +class UnaryUnaryClientInterceptor(abc.ABC): + """Affords intercepting unary-unary invocations.""" + + @abc.abstractmethod + def intercept_unary_unary(self, continuation, client_call_details, request): + """Intercepts a unary-unary invocation asynchronously. + + Args: + continuation: A function that proceeds with the invocation by + executing the next interceptor in chain or invoking the + actual RPC on the underlying Channel. It is the interceptor's + responsibility to call it if it decides to move the RPC forward. + The interceptor can use + `response_future = continuation(client_call_details, request)` + to continue with the RPC. `continuation` returns an object that is + both a Call for the RPC and a Future. In the event of RPC + completion, the return Call-Future's result value will be + the response message of the RPC. Should the event terminate + with non-OK status, the returned Call-Future's exception value + will be an RpcError. + client_call_details: A ClientCallDetails object describing the + outgoing RPC. + request: The request value for the RPC. + + Returns: + An object that is both a Call for the RPC and a Future. + In the event of RPC completion, the return Call-Future's + result value will be the response message of the RPC. + Should the event terminate with non-OK status, the returned + Call-Future's exception value will be an RpcError. + """ + raise NotImplementedError() + + +class UnaryStreamClientInterceptor(abc.ABC): + """Affords intercepting unary-stream invocations.""" + + @abc.abstractmethod + def intercept_unary_stream( + self, continuation, client_call_details, request + ): + """Intercepts a unary-stream invocation. + + Args: + continuation: A function that proceeds with the invocation by + executing the next interceptor in chain or invoking the + actual RPC on the underlying Channel. It is the interceptor's + responsibility to call it if it decides to move the RPC forward. + The interceptor can use + `response_iterator = continuation(client_call_details, request)` + to continue with the RPC. `continuation` returns an object that is + both a Call for the RPC and an iterator for response values. + Drawing response values from the returned Call-iterator may + raise RpcError indicating termination of the RPC with non-OK + status. + client_call_details: A ClientCallDetails object describing the + outgoing RPC. + request: The request value for the RPC. + + Returns: + An object that is both a Call for the RPC and an iterator of + response values. Drawing response values from the returned + Call-iterator may raise RpcError indicating termination of + the RPC with non-OK status. This object *should* also fulfill the + Future interface, though it may not. + """ + raise NotImplementedError() + + +class StreamUnaryClientInterceptor(abc.ABC): + """Affords intercepting stream-unary invocations.""" + + @abc.abstractmethod + def intercept_stream_unary( + self, continuation, client_call_details, request_iterator + ): + """Intercepts a stream-unary invocation asynchronously. + + Args: + continuation: A function that proceeds with the invocation by + executing the next interceptor in chain or invoking the + actual RPC on the underlying Channel. It is the interceptor's + responsibility to call it if it decides to move the RPC forward. + The interceptor can use + `response_future = continuation(client_call_details, request_iterator)` + to continue with the RPC. `continuation` returns an object that is + both a Call for the RPC and a Future. In the event of RPC completion, + the return Call-Future's result value will be the response message + of the RPC. Should the event terminate with non-OK status, the + returned Call-Future's exception value will be an RpcError. + client_call_details: A ClientCallDetails object describing the + outgoing RPC. + request_iterator: An iterator that yields request values for the RPC. + + Returns: + An object that is both a Call for the RPC and a Future. + In the event of RPC completion, the return Call-Future's + result value will be the response message of the RPC. + Should the event terminate with non-OK status, the returned + Call-Future's exception value will be an RpcError. + """ + raise NotImplementedError() + + +class StreamStreamClientInterceptor(abc.ABC): + """Affords intercepting stream-stream invocations.""" + + @abc.abstractmethod + def intercept_stream_stream( + self, continuation, client_call_details, request_iterator + ): + """Intercepts a stream-stream invocation. + + Args: + continuation: A function that proceeds with the invocation by + executing the next interceptor in chain or invoking the + actual RPC on the underlying Channel. It is the interceptor's + responsibility to call it if it decides to move the RPC forward. + The interceptor can use + `response_iterator = continuation(client_call_details, request_iterator)` + to continue with the RPC. `continuation` returns an object that is + both a Call for the RPC and an iterator for response values. + Drawing response values from the returned Call-iterator may + raise RpcError indicating termination of the RPC with non-OK + status. + client_call_details: A ClientCallDetails object describing the + outgoing RPC. + request_iterator: An iterator that yields request values for the RPC. + + Returns: + An object that is both a Call for the RPC and an iterator of + response values. Drawing response values from the returned + Call-iterator may raise RpcError indicating termination of + the RPC with non-OK status. This object *should* also fulfill the + Future interface, though it may not. + """ + raise NotImplementedError() + + +############ Authentication & Authorization Interfaces & Classes ############# + + +class ChannelCredentials(object): + """An encapsulation of the data required to create a secure Channel. + + This class has no supported interface - it exists to define the type of its + instances and its instances exist to be passed to other functions. For + example, ssl_channel_credentials returns an instance of this class and + secure_channel requires an instance of this class. + """ + + def __init__(self, credentials): + self._credentials = credentials + + +class CallCredentials(object): + """An encapsulation of the data required to assert an identity over a call. + + A CallCredentials has to be used with secure Channel, otherwise the + metadata will not be transmitted to the server. + + A CallCredentials may be composed with ChannelCredentials to always assert + identity for every call over that Channel. + + This class has no supported interface - it exists to define the type of its + instances and its instances exist to be passed to other functions. + """ + + def __init__(self, credentials): + self._credentials = credentials + + +class AuthMetadataContext(abc.ABC): + """Provides information to call credentials metadata plugins. + + Attributes: + service_url: A string URL of the service being called into. + method_name: A string of the fully qualified method name being called. + """ + + +class AuthMetadataPluginCallback(abc.ABC): + """Callback object received by a metadata plugin.""" + + def __call__(self, metadata, error): + """Passes to the gRPC runtime authentication metadata for an RPC. + + Args: + metadata: The :term:`metadata` used to construct the CallCredentials. + error: An Exception to indicate error or None to indicate success. + """ + raise NotImplementedError() + + +class AuthMetadataPlugin(abc.ABC): + """A specification for custom authentication.""" + + def __call__(self, context, callback): + """Implements authentication by passing metadata to a callback. + + This method will be invoked asynchronously in a separate thread. + + Args: + context: An AuthMetadataContext providing information on the RPC that + the plugin is being called to authenticate. + callback: An AuthMetadataPluginCallback to be invoked either + synchronously or asynchronously. + """ + raise NotImplementedError() + + +class ServerCredentials(object): + """An encapsulation of the data required to open a secure port on a Server. + + This class has no supported interface - it exists to define the type of its + instances and its instances exist to be passed to other functions. + """ + + def __init__(self, credentials): + self._credentials = credentials + + +class ServerCertificateConfiguration(object): + """A certificate configuration for use with an SSL-enabled Server. + + Instances of this class can be returned in the certificate configuration + fetching callback. + + This class has no supported interface -- it exists to define the + type of its instances and its instances exist to be passed to + other functions. + """ + + def __init__(self, certificate_configuration): + self._certificate_configuration = certificate_configuration + + +######################## Multi-Callable Interfaces ########################### + + +class UnaryUnaryMultiCallable(abc.ABC): + """Affords invoking a unary-unary RPC from client-side.""" + + @abc.abstractmethod + def __call__( + self, + request, + timeout=None, + metadata=None, + credentials=None, + wait_for_ready=None, + compression=None, + ): + """Synchronously invokes the underlying RPC. + + Args: + request: The request value for the RPC. + timeout: An optional duration of time in seconds to allow + for the RPC. + metadata: Optional :term:`metadata` to be transmitted to the + service-side of the RPC. + credentials: An optional CallCredentials for the RPC. Only valid for + secure Channel. + wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. + + Returns: + The response value for the RPC. + + Raises: + RpcError: Indicating that the RPC terminated with non-OK status. The + raised RpcError will also be a Call for the RPC affording the RPC's + metadata, status code, and details. + """ + raise NotImplementedError() + + @abc.abstractmethod + def with_call( + self, + request, + timeout=None, + metadata=None, + credentials=None, + wait_for_ready=None, + compression=None, + ): + """Synchronously invokes the underlying RPC. + + Args: + request: The request value for the RPC. + timeout: An optional durating of time in seconds to allow for + the RPC. + metadata: Optional :term:`metadata` to be transmitted to the + service-side of the RPC. + credentials: An optional CallCredentials for the RPC. Only valid for + secure Channel. + wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. + + Returns: + The response value for the RPC and a Call value for the RPC. + + Raises: + RpcError: Indicating that the RPC terminated with non-OK status. The + raised RpcError will also be a Call for the RPC affording the RPC's + metadata, status code, and details. + """ + raise NotImplementedError() + + @abc.abstractmethod + def future( + self, + request, + timeout=None, + metadata=None, + credentials=None, + wait_for_ready=None, + compression=None, + ): + """Asynchronously invokes the underlying RPC. + + Args: + request: The request value for the RPC. + timeout: An optional duration of time in seconds to allow for + the RPC. + metadata: Optional :term:`metadata` to be transmitted to the + service-side of the RPC. + credentials: An optional CallCredentials for the RPC. Only valid for + secure Channel. + wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. + + Returns: + An object that is both a Call for the RPC and a Future. + In the event of RPC completion, the return Call-Future's result + value will be the response message of the RPC. + Should the event terminate with non-OK status, + the returned Call-Future's exception value will be an RpcError. + """ + raise NotImplementedError() + + +class UnaryStreamMultiCallable(abc.ABC): + """Affords invoking a unary-stream RPC from client-side.""" + + @abc.abstractmethod + def __call__( + self, + request, + timeout=None, + metadata=None, + credentials=None, + wait_for_ready=None, + compression=None, + ): + """Invokes the underlying RPC. + + Args: + request: The request value for the RPC. + timeout: An optional duration of time in seconds to allow for + the RPC. If None, the timeout is considered infinite. + metadata: An optional :term:`metadata` to be transmitted to the + service-side of the RPC. + credentials: An optional CallCredentials for the RPC. Only valid for + secure Channel. + wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. + + Returns: + An object that is a Call for the RPC, an iterator of response + values, and a Future for the RPC. Drawing response values from the + returned Call-iterator may raise RpcError indicating termination of + the RPC with non-OK status. + """ + raise NotImplementedError() + + +class StreamUnaryMultiCallable(abc.ABC): + """Affords invoking a stream-unary RPC from client-side.""" + + @abc.abstractmethod + def __call__( + self, + request_iterator, + timeout=None, + metadata=None, + credentials=None, + wait_for_ready=None, + compression=None, + ): + """Synchronously invokes the underlying RPC. + + Args: + request_iterator: An iterator that yields request values for + the RPC. + timeout: An optional duration of time in seconds to allow for + the RPC. If None, the timeout is considered infinite. + metadata: Optional :term:`metadata` to be transmitted to the + service-side of the RPC. + credentials: An optional CallCredentials for the RPC. Only valid for + secure Channel. + wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. + + Returns: + The response value for the RPC. + + Raises: + RpcError: Indicating that the RPC terminated with non-OK status. The + raised RpcError will also implement grpc.Call, affording methods + such as metadata, code, and details. + """ + raise NotImplementedError() + + @abc.abstractmethod + def with_call( + self, + request_iterator, + timeout=None, + metadata=None, + credentials=None, + wait_for_ready=None, + compression=None, + ): + """Synchronously invokes the underlying RPC on the client. + + Args: + request_iterator: An iterator that yields request values for + the RPC. + timeout: An optional duration of time in seconds to allow for + the RPC. If None, the timeout is considered infinite. + metadata: Optional :term:`metadata` to be transmitted to the + service-side of the RPC. + credentials: An optional CallCredentials for the RPC. Only valid for + secure Channel. + wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. + + Returns: + The response value for the RPC and a Call object for the RPC. + + Raises: + RpcError: Indicating that the RPC terminated with non-OK status. The + raised RpcError will also be a Call for the RPC affording the RPC's + metadata, status code, and details. + """ + raise NotImplementedError() + + @abc.abstractmethod + def future( + self, + request_iterator, + timeout=None, + metadata=None, + credentials=None, + wait_for_ready=None, + compression=None, + ): + """Asynchronously invokes the underlying RPC on the client. + + Args: + request_iterator: An iterator that yields request values for the RPC. + timeout: An optional duration of time in seconds to allow for + the RPC. If None, the timeout is considered infinite. + metadata: Optional :term:`metadata` to be transmitted to the + service-side of the RPC. + credentials: An optional CallCredentials for the RPC. Only valid for + secure Channel. + wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. + + Returns: + An object that is both a Call for the RPC and a Future. + In the event of RPC completion, the return Call-Future's result value + will be the response message of the RPC. Should the event terminate + with non-OK status, the returned Call-Future's exception value will + be an RpcError. + """ + raise NotImplementedError() + + +class StreamStreamMultiCallable(abc.ABC): + """Affords invoking a stream-stream RPC on client-side.""" + + @abc.abstractmethod + def __call__( + self, + request_iterator, + timeout=None, + metadata=None, + credentials=None, + wait_for_ready=None, + compression=None, + ): + """Invokes the underlying RPC on the client. + + Args: + request_iterator: An iterator that yields request values for the RPC. + timeout: An optional duration of time in seconds to allow for + the RPC. If not specified, the timeout is considered infinite. + metadata: Optional :term:`metadata` to be transmitted to the + service-side of the RPC. + credentials: An optional CallCredentials for the RPC. Only valid for + secure Channel. + wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. + + Returns: + An object that is a Call for the RPC, an iterator of response + values, and a Future for the RPC. Drawing response values from the + returned Call-iterator may raise RpcError indicating termination of + the RPC with non-OK status. + """ + raise NotImplementedError() + + +############################# Channel Interface ############################## + + +class Channel(abc.ABC): + """Affords RPC invocation via generic methods on client-side. + + Channel objects implement the Context Manager type, although they need not + support being entered and exited multiple times. + """ + + @abc.abstractmethod + def subscribe(self, callback, try_to_connect=False): + """Subscribe to this Channel's connectivity state machine. + + A Channel may be in any of the states described by ChannelConnectivity. + This method allows application to monitor the state transitions. + The typical use case is to debug or gain better visibility into gRPC + runtime's state. + + Args: + callback: A callable to be invoked with ChannelConnectivity argument. + ChannelConnectivity describes current state of the channel. + The callable will be invoked immediately upon subscription + and again for every change to ChannelConnectivity until it + is unsubscribed or this Channel object goes out of scope. + try_to_connect: A boolean indicating whether or not this Channel + should attempt to connect immediately. If set to False, gRPC + runtime decides when to connect. + """ + raise NotImplementedError() + + @abc.abstractmethod + def unsubscribe(self, callback): + """Unsubscribes a subscribed callback from this Channel's connectivity. + + Args: + callback: A callable previously registered with this Channel from + having been passed to its "subscribe" method. + """ + raise NotImplementedError() + + @abc.abstractmethod + def unary_unary( + self, + method, + request_serializer=None, + response_deserializer=None, + _registered_method=False, + ): + """Creates a UnaryUnaryMultiCallable for a unary-unary method. + + Args: + method: The name of the RPC method. + request_serializer: Optional :term:`serializer` for serializing the request + message. Request goes unserialized in case None is passed. + response_deserializer: Optional :term:`deserializer` for deserializing the + response message. Response goes undeserialized in case None + is passed. + _registered_method: Implementation Private. A bool representing whether the method + is registered. + + Returns: + A UnaryUnaryMultiCallable value for the named unary-unary method. + """ + raise NotImplementedError() + + @abc.abstractmethod + def unary_stream( + self, + method, + request_serializer=None, + response_deserializer=None, + _registered_method=False, + ): + """Creates a UnaryStreamMultiCallable for a unary-stream method. + + Args: + method: The name of the RPC method. + request_serializer: Optional :term:`serializer` for serializing the request + message. Request goes unserialized in case None is passed. + response_deserializer: Optional :term:`deserializer` for deserializing the + response message. Response goes undeserialized in case None is + passed. + _registered_method: Implementation Private. A bool representing whether the method + is registered. + + Returns: + A UnaryStreamMultiCallable value for the name unary-stream method. + """ + raise NotImplementedError() + + @abc.abstractmethod + def stream_unary( + self, + method, + request_serializer=None, + response_deserializer=None, + _registered_method=False, + ): + """Creates a StreamUnaryMultiCallable for a stream-unary method. + + Args: + method: The name of the RPC method. + request_serializer: Optional :term:`serializer` for serializing the request + message. Request goes unserialized in case None is passed. + response_deserializer: Optional :term:`deserializer` for deserializing the + response message. Response goes undeserialized in case None is + passed. + _registered_method: Implementation Private. A bool representing whether the method + is registered. + + Returns: + A StreamUnaryMultiCallable value for the named stream-unary method. + """ + raise NotImplementedError() + + @abc.abstractmethod + def stream_stream( + self, + method, + request_serializer=None, + response_deserializer=None, + _registered_method=False, + ): + """Creates a StreamStreamMultiCallable for a stream-stream method. + + Args: + method: The name of the RPC method. + request_serializer: Optional :term:`serializer` for serializing the request + message. Request goes unserialized in case None is passed. + response_deserializer: Optional :term:`deserializer` for deserializing the + response message. Response goes undeserialized in case None + is passed. + _registered_method: Implementation Private. A bool representing whether the method + is registered. + + Returns: + A StreamStreamMultiCallable value for the named stream-stream method. + """ + raise NotImplementedError() + + @abc.abstractmethod + def close(self): + """Closes this Channel and releases all resources held by it. + + Closing the Channel will immediately terminate all RPCs active with the + Channel and it is not valid to invoke new RPCs with the Channel. + + This method is idempotent. + """ + raise NotImplementedError() + + def __enter__(self): + """Enters the runtime context related to the channel object.""" + raise NotImplementedError() + + def __exit__(self, exc_type, exc_val, exc_tb): + """Exits the runtime context related to the channel object.""" + raise NotImplementedError() + + +########################## Service-Side Context ############################## + + +class ServicerContext(RpcContext, metaclass=abc.ABCMeta): + """A context object passed to method implementations.""" + + @abc.abstractmethod + def invocation_metadata(self): + """Accesses the metadata sent by the client. + + Returns: + The invocation :term:`metadata`. + """ + raise NotImplementedError() + + @abc.abstractmethod + def peer(self): + """Identifies the peer that invoked the RPC being serviced. + + Returns: + A string identifying the peer that invoked the RPC being serviced. + The string format is determined by gRPC runtime. + """ + raise NotImplementedError() + + @abc.abstractmethod + def peer_identities(self): + """Gets one or more peer identity(s). + + Equivalent to + servicer_context.auth_context().get(servicer_context.peer_identity_key()) + + Returns: + An iterable of the identities, or None if the call is not + authenticated. Each identity is returned as a raw bytes type. + """ + raise NotImplementedError() + + @abc.abstractmethod + def peer_identity_key(self): + """The auth property used to identify the peer. + + For example, "x509_common_name" or "x509_subject_alternative_name" are + used to identify an SSL peer. + + Returns: + The auth property (string) that indicates the + peer identity, or None if the call is not authenticated. + """ + raise NotImplementedError() + + @abc.abstractmethod + def auth_context(self): + """Gets the auth context for the call. + + Returns: + A map of strings to an iterable of bytes for each auth property. + """ + raise NotImplementedError() + + def set_compression(self, compression): + """Set the compression algorithm to be used for the entire call. + + Args: + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. + """ + raise NotImplementedError() + + @abc.abstractmethod + def send_initial_metadata(self, initial_metadata): + """Sends the initial metadata value to the client. + + This method need not be called by implementations if they have no + metadata to add to what the gRPC runtime will transmit. + + Args: + initial_metadata: The initial :term:`metadata`. + """ + raise NotImplementedError() + + @abc.abstractmethod + def set_trailing_metadata(self, trailing_metadata): + """Sets the trailing metadata for the RPC. + + Sets the trailing metadata to be sent upon completion of the RPC. + + If this method is invoked multiple times throughout the lifetime of an + RPC, the value supplied in the final invocation will be the value sent + over the wire. + + This method need not be called by implementations if they have no + metadata to add to what the gRPC runtime will transmit. + + Args: + trailing_metadata: The trailing :term:`metadata`. + """ + raise NotImplementedError() + + def trailing_metadata(self): + """Access value to be used as trailing metadata upon RPC completion. + + This is an EXPERIMENTAL API. + + Returns: + The trailing :term:`metadata` for the RPC. + """ + raise NotImplementedError() + + @abc.abstractmethod + def abort(self, code, details): + """Raises an exception to terminate the RPC with a non-OK status. + + The code and details passed as arguments will supersede any existing + ones. + + Args: + code: A StatusCode object to be sent to the client. + It must not be StatusCode.OK. + details: A UTF-8-encodable string to be sent to the client upon + termination of the RPC. + + Raises: + Exception: An exception is always raised to signal the abortion the + RPC to the gRPC runtime. + """ + raise NotImplementedError() + + @abc.abstractmethod + def abort_with_status(self, status): + """Raises an exception to terminate the RPC with a non-OK status. + + The status passed as argument will supersede any existing status code, + status message and trailing metadata. + + This is an EXPERIMENTAL API. + + Args: + status: A grpc.Status object. The status code in it must not be + StatusCode.OK. + + Raises: + Exception: An exception is always raised to signal the abortion the + RPC to the gRPC runtime. + """ + raise NotImplementedError() + + @abc.abstractmethod + def set_code(self, code): + """Sets the value to be used as status code upon RPC completion. + + This method need not be called by method implementations if they wish + the gRPC runtime to determine the status code of the RPC. + + Args: + code: A StatusCode object to be sent to the client. + """ + raise NotImplementedError() + + @abc.abstractmethod + def set_details(self, details): + """Sets the value to be used as detail string upon RPC completion. + + This method need not be called by method implementations if they have + no details to transmit. + + Args: + details: A UTF-8-encodable string to be sent to the client upon + termination of the RPC. + """ + raise NotImplementedError() + + def code(self): + """Accesses the value to be used as status code upon RPC completion. + + This is an EXPERIMENTAL API. + + Returns: + The StatusCode value for the RPC. + """ + raise NotImplementedError() + + def details(self): + """Accesses the value to be used as detail string upon RPC completion. + + This is an EXPERIMENTAL API. + + Returns: + The details string of the RPC. + """ + raise NotImplementedError() + + def disable_next_message_compression(self): + """Disables compression for the next response message. + + This method will override any compression configuration set during + server creation or set on the call. + """ + raise NotImplementedError() + + +##################### Service-Side Handler Interfaces ######################## + + +class RpcMethodHandler(abc.ABC): + """An implementation of a single RPC method. + + Attributes: + request_streaming: Whether the RPC supports exactly one request message + or any arbitrary number of request messages. + response_streaming: Whether the RPC supports exactly one response message + or any arbitrary number of response messages. + request_deserializer: A callable :term:`deserializer` that accepts a byte string and + returns an object suitable to be passed to this object's business + logic, or None to indicate that this object's business logic should be + passed the raw request bytes. + response_serializer: A callable :term:`serializer` that accepts an object produced + by this object's business logic and returns a byte string, or None to + indicate that the byte strings produced by this object's business logic + should be transmitted on the wire as they are. + unary_unary: This object's application-specific business logic as a + callable value that takes a request value and a ServicerContext object + and returns a response value. Only non-None if both request_streaming + and response_streaming are False. + unary_stream: This object's application-specific business logic as a + callable value that takes a request value and a ServicerContext object + and returns an iterator of response values. Only non-None if + request_streaming is False and response_streaming is True. + stream_unary: This object's application-specific business logic as a + callable value that takes an iterator of request values and a + ServicerContext object and returns a response value. Only non-None if + request_streaming is True and response_streaming is False. + stream_stream: This object's application-specific business logic as a + callable value that takes an iterator of request values and a + ServicerContext object and returns an iterator of response values. + Only non-None if request_streaming and response_streaming are both + True. + """ + + +class HandlerCallDetails(abc.ABC): + """Describes an RPC that has just arrived for service. + + Attributes: + method: The method name of the RPC. + invocation_metadata: The :term:`metadata` sent by the client. + """ + + +class GenericRpcHandler(abc.ABC): + """An implementation of arbitrarily many RPC methods.""" + + @abc.abstractmethod + def service(self, handler_call_details): + """Returns the handler for servicing the RPC. + + Args: + handler_call_details: A HandlerCallDetails describing the RPC. + + Returns: + An RpcMethodHandler with which the RPC may be serviced if the + implementation chooses to service this RPC, or None otherwise. + """ + raise NotImplementedError() + + +class ServiceRpcHandler(GenericRpcHandler, metaclass=abc.ABCMeta): + """An implementation of RPC methods belonging to a service. + + A service handles RPC methods with structured names of the form + '/Service.Name/Service.Method', where 'Service.Name' is the value + returned by service_name(), and 'Service.Method' is the method + name. A service can have multiple method names, but only a single + service name. + """ + + @abc.abstractmethod + def service_name(self): + """Returns this service's name. + + Returns: + The service name. + """ + raise NotImplementedError() + + +#################### Service-Side Interceptor Interfaces ##################### + + +class ServerInterceptor(abc.ABC): + """Affords intercepting incoming RPCs on the service-side.""" + + @abc.abstractmethod + def intercept_service(self, continuation, handler_call_details): + """Intercepts incoming RPCs before handing them over to a handler. + + State can be passed from an interceptor to downstream interceptors + via contextvars. The first interceptor is called from an empty + contextvars.Context, and the same Context is used for downstream + interceptors and for the final handler call. Note that there are no + guarantees that interceptors and handlers will be called from the + same thread. + + Args: + continuation: A function that takes a HandlerCallDetails and + proceeds to invoke the next interceptor in the chain, if any, + or the RPC handler lookup logic, with the call details passed + as an argument, and returns an RpcMethodHandler instance if + the RPC is considered serviced, or None otherwise. + handler_call_details: A HandlerCallDetails describing the RPC. + + Returns: + An RpcMethodHandler with which the RPC may be serviced if the + interceptor chooses to service this RPC, or None otherwise. + """ + raise NotImplementedError() + + +############################# Server Interface ############################### + + +class Server(abc.ABC): + """Services RPCs.""" + + @abc.abstractmethod + def add_generic_rpc_handlers(self, generic_rpc_handlers): + """Registers GenericRpcHandlers with this Server. + + This method is only safe to call before the server is started. + + Args: + generic_rpc_handlers: An iterable of GenericRpcHandlers that will be + used to service RPCs. + """ + raise NotImplementedError() + + def add_registered_method_handlers(self, service_name, method_handlers): + """Registers GenericRpcHandlers with this Server. + + This method is only safe to call before the server is started. + + If the same method have both generic and registered handler, + registered handler will take precedence. + + Args: + service_name: The service name. + method_handlers: A dictionary that maps method names to corresponding + RpcMethodHandler. + """ + + @abc.abstractmethod + def add_insecure_port(self, address): + """Opens an insecure port for accepting RPCs. + + This method may only be called before starting the server. + + Args: + address: The address for which to open a port. If the port is 0, + or not specified in the address, then gRPC runtime will choose a port. + + Returns: + An integer port on which server will accept RPC requests. + """ + raise NotImplementedError() + + @abc.abstractmethod + def add_secure_port(self, address, server_credentials): + """Opens a secure port for accepting RPCs. + + This method may only be called before starting the server. + + Args: + address: The address for which to open a port. + if the port is 0, or not specified in the address, then gRPC + runtime will choose a port. + server_credentials: A ServerCredentials object. + + Returns: + An integer port on which server will accept RPC requests. + """ + raise NotImplementedError() + + @abc.abstractmethod + def start(self): + """Starts this Server. + + This method may only be called once. (i.e. it is not idempotent). + """ + raise NotImplementedError() + + @abc.abstractmethod + def stop(self, grace): + """Stops this Server. + + This method immediately stop service of new RPCs in all cases. + + If a grace period is specified, this method waits until all active + RPCs are finished or until the grace period is reached. RPCs that haven't + been terminated within the grace period are aborted. + If a grace period is not specified (by passing None for `grace`), + all existing RPCs are aborted immediately and this method + blocks until the last RPC handler terminates. + + This method is idempotent and may be called at any time. + Passing a smaller grace value in a subsequent call will have + the effect of stopping the Server sooner (passing None will + have the effect of stopping the server immediately). Passing + a larger grace value in a subsequent call *will not* have the + effect of stopping the server later (i.e. the most restrictive + grace value is used). + + Args: + grace: A duration of time in seconds or None. + + Returns: + A threading.Event that will be set when this Server has completely + stopped, i.e. when running RPCs either complete or are aborted and + all handlers have terminated. + """ + raise NotImplementedError() + + def wait_for_termination(self, timeout=None): + """Block current thread until the server stops. + + This is an EXPERIMENTAL API. + + The wait will not consume computational resources during blocking, and + it will block until one of the two following conditions are met: + + 1) The server is stopped or terminated; + 2) A timeout occurs if timeout is not `None`. + + The timeout argument works in the same way as `threading.Event.wait()`. + https://docs.python.org/3/library/threading.html#threading.Event.wait + + Args: + timeout: A floating point number specifying a timeout for the + operation in seconds. + + Returns: + A bool indicates if the operation times out. + """ + raise NotImplementedError() + + +################################# Functions ################################ + + +def unary_unary_rpc_method_handler( + behavior, request_deserializer=None, response_serializer=None +): + """Creates an RpcMethodHandler for a unary-unary RPC method. + + Args: + behavior: The implementation of an RPC that accepts one request + and returns one response. + request_deserializer: An optional :term:`deserializer` for request deserialization. + response_serializer: An optional :term:`serializer` for response serialization. + + Returns: + An RpcMethodHandler object that is typically used by grpc.Server. + """ + from grpc import _utilities # pylint: disable=cyclic-import + + return _utilities.RpcMethodHandler( + False, + False, + request_deserializer, + response_serializer, + behavior, + None, + None, + None, + ) + + +def unary_stream_rpc_method_handler( + behavior, request_deserializer=None, response_serializer=None +): + """Creates an RpcMethodHandler for a unary-stream RPC method. + + Args: + behavior: The implementation of an RPC that accepts one request + and returns an iterator of response values. + request_deserializer: An optional :term:`deserializer` for request deserialization. + response_serializer: An optional :term:`serializer` for response serialization. + + Returns: + An RpcMethodHandler object that is typically used by grpc.Server. + """ + from grpc import _utilities # pylint: disable=cyclic-import + + return _utilities.RpcMethodHandler( + False, + True, + request_deserializer, + response_serializer, + None, + behavior, + None, + None, + ) + + +def stream_unary_rpc_method_handler( + behavior, request_deserializer=None, response_serializer=None +): + """Creates an RpcMethodHandler for a stream-unary RPC method. + + Args: + behavior: The implementation of an RPC that accepts an iterator of + request values and returns a single response value. + request_deserializer: An optional :term:`deserializer` for request deserialization. + response_serializer: An optional :term:`serializer` for response serialization. + + Returns: + An RpcMethodHandler object that is typically used by grpc.Server. + """ + from grpc import _utilities # pylint: disable=cyclic-import + + return _utilities.RpcMethodHandler( + True, + False, + request_deserializer, + response_serializer, + None, + None, + behavior, + None, + ) + + +def stream_stream_rpc_method_handler( + behavior, request_deserializer=None, response_serializer=None +): + """Creates an RpcMethodHandler for a stream-stream RPC method. + + Args: + behavior: The implementation of an RPC that accepts an iterator of + request values and returns an iterator of response values. + request_deserializer: An optional :term:`deserializer` for request deserialization. + response_serializer: An optional :term:`serializer` for response serialization. + + Returns: + An RpcMethodHandler object that is typically used by grpc.Server. + """ + from grpc import _utilities # pylint: disable=cyclic-import + + return _utilities.RpcMethodHandler( + True, + True, + request_deserializer, + response_serializer, + None, + None, + None, + behavior, + ) + + +def method_handlers_generic_handler(service, method_handlers): + """Creates a GenericRpcHandler from RpcMethodHandlers. + + Args: + service: The name of the service that is implemented by the + method_handlers. + method_handlers: A dictionary that maps method names to corresponding + RpcMethodHandler. + + Returns: + A GenericRpcHandler. This is typically added to the grpc.Server object + with add_generic_rpc_handlers() before starting the server. + """ + from grpc import _utilities # pylint: disable=cyclic-import + + return _utilities.DictionaryGenericHandler(service, method_handlers) + + +def ssl_channel_credentials( + root_certificates=None, private_key=None, certificate_chain=None +): + """Creates a ChannelCredentials for use with an SSL-enabled Channel. + + Args: + root_certificates: The PEM-encoded root certificates as a byte string, + or None to retrieve them from a default location chosen by gRPC + runtime. + private_key: The PEM-encoded private key as a byte string, or None if no + private key should be used. + certificate_chain: The PEM-encoded certificate chain as a byte string + to use or None if no certificate chain should be used. + + Returns: + A ChannelCredentials for use with an SSL-enabled Channel. + """ + return ChannelCredentials( + _cygrpc.SSLChannelCredentials( + root_certificates, private_key, certificate_chain + ) + ) + + +def xds_channel_credentials(fallback_credentials=None): + """Creates a ChannelCredentials for use with xDS. This is an EXPERIMENTAL + API. + + Args: + fallback_credentials: Credentials to use in case it is not possible to + establish a secure connection via xDS. If no fallback_credentials + argument is supplied, a default SSLChannelCredentials is used. + """ + fallback_credentials = ( + ssl_channel_credentials() + if fallback_credentials is None + else fallback_credentials + ) + return ChannelCredentials( + _cygrpc.XDSChannelCredentials(fallback_credentials._credentials) + ) + + +def metadata_call_credentials(metadata_plugin, name=None): + """Construct CallCredentials from an AuthMetadataPlugin. + + Args: + metadata_plugin: An AuthMetadataPlugin to use for authentication. + name: An optional name for the plugin. + + Returns: + A CallCredentials. + """ + from grpc import _plugin_wrapping # pylint: disable=cyclic-import + + return _plugin_wrapping.metadata_plugin_call_credentials( + metadata_plugin, name + ) + + +def access_token_call_credentials(access_token): + """Construct CallCredentials from an access token. + + Args: + access_token: A string to place directly in the http request + authorization header, for example + "authorization: Bearer ". + + Returns: + A CallCredentials. + """ + from grpc import _auth # pylint: disable=cyclic-import + from grpc import _plugin_wrapping # pylint: disable=cyclic-import + + return _plugin_wrapping.metadata_plugin_call_credentials( + _auth.AccessTokenAuthMetadataPlugin(access_token), None + ) + + +def composite_call_credentials(*call_credentials): + """Compose multiple CallCredentials to make a new CallCredentials. + + Args: + *call_credentials: At least two CallCredentials objects. + + Returns: + A CallCredentials object composed of the given CallCredentials objects. + """ + return CallCredentials( + _cygrpc.CompositeCallCredentials( + tuple( + single_call_credentials._credentials + for single_call_credentials in call_credentials + ) + ) + ) + + +def composite_channel_credentials(channel_credentials, *call_credentials): + """Compose a ChannelCredentials and one or more CallCredentials objects. + + Args: + channel_credentials: A ChannelCredentials object. + *call_credentials: One or more CallCredentials objects. + + Returns: + A ChannelCredentials composed of the given ChannelCredentials and + CallCredentials objects. + """ + return ChannelCredentials( + _cygrpc.CompositeChannelCredentials( + tuple( + single_call_credentials._credentials + for single_call_credentials in call_credentials + ), + channel_credentials._credentials, + ) + ) + + +def ssl_server_credentials( + private_key_certificate_chain_pairs, + root_certificates=None, + require_client_auth=False, +): + """Creates a ServerCredentials for use with an SSL-enabled Server. + + Args: + private_key_certificate_chain_pairs: A list of pairs of the form + [PEM-encoded private key, PEM-encoded certificate chain]. + root_certificates: An optional byte string of PEM-encoded client root + certificates that the server will use to verify client authentication. + If omitted, require_client_auth must also be False. + require_client_auth: A boolean indicating whether or not to require + clients to be authenticated. May only be True if root_certificates + is not None. + + Returns: + A ServerCredentials for use with an SSL-enabled Server. Typically, this + object is an argument to add_secure_port() method during server setup. + """ + if not private_key_certificate_chain_pairs: + raise ValueError( + "At least one private key-certificate chain pair is required!" + ) + elif require_client_auth and root_certificates is None: + raise ValueError( + "Illegal to require client auth without providing root" + " certificates!" + ) + else: + return ServerCredentials( + _cygrpc.server_credentials_ssl( + root_certificates, + [ + _cygrpc.SslPemKeyCertPair(key, pem) + for key, pem in private_key_certificate_chain_pairs + ], + require_client_auth, + ) + ) + + +def xds_server_credentials(fallback_credentials): + """Creates a ServerCredentials for use with xDS. This is an EXPERIMENTAL + API. + + Args: + fallback_credentials: Credentials to use in case it is not possible to + establish a secure connection via xDS. No default value is provided. + """ + return ServerCredentials( + _cygrpc.xds_server_credentials(fallback_credentials._credentials) + ) + + +def insecure_server_credentials(): + """Creates a credentials object directing the server to use no credentials. + This is an EXPERIMENTAL API. + + This object cannot be used directly in a call to `add_secure_port`. + Instead, it should be used to construct other credentials objects, e.g. + with xds_server_credentials. + """ + return ServerCredentials(_cygrpc.insecure_server_credentials()) + + +def ssl_server_certificate_configuration( + private_key_certificate_chain_pairs, root_certificates=None +): + """Creates a ServerCertificateConfiguration for use with a Server. + + Args: + private_key_certificate_chain_pairs: A collection of pairs of + the form [PEM-encoded private key, PEM-encoded certificate + chain]. + root_certificates: An optional byte string of PEM-encoded client root + certificates that the server will use to verify client authentication. + + Returns: + A ServerCertificateConfiguration that can be returned in the certificate + configuration fetching callback. + """ + if private_key_certificate_chain_pairs: + return ServerCertificateConfiguration( + _cygrpc.server_certificate_config_ssl( + root_certificates, + [ + _cygrpc.SslPemKeyCertPair(key, pem) + for key, pem in private_key_certificate_chain_pairs + ], + ) + ) + else: + raise ValueError( + "At least one private key-certificate chain pair is required!" + ) + + +def dynamic_ssl_server_credentials( + initial_certificate_configuration, + certificate_configuration_fetcher, + require_client_authentication=False, +): + """Creates a ServerCredentials for use with an SSL-enabled Server. + + Args: + initial_certificate_configuration (ServerCertificateConfiguration): The + certificate configuration with which the server will be initialized. + certificate_configuration_fetcher (callable): A callable that takes no + arguments and should return a ServerCertificateConfiguration to + replace the server's current certificate, or None for no change + (i.e., the server will continue its current certificate + config). The library will call this callback on *every* new + client connection before starting the TLS handshake with the + client, thus allowing the user application to optionally + return a new ServerCertificateConfiguration that the server will then + use for the handshake. + require_client_authentication: A boolean indicating whether or not to + require clients to be authenticated. + + Returns: + A ServerCredentials. + """ + return ServerCredentials( + _cygrpc.server_credentials_ssl_dynamic_cert_config( + initial_certificate_configuration, + certificate_configuration_fetcher, + require_client_authentication, + ) + ) + + +@enum.unique +class LocalConnectionType(enum.Enum): + """Types of local connection for local credential creation. + + Attributes: + UDS: Unix domain socket connections + LOCAL_TCP: Local TCP connections. + """ + + UDS = _cygrpc.LocalConnectionType.uds + LOCAL_TCP = _cygrpc.LocalConnectionType.local_tcp + + +def local_channel_credentials(local_connect_type=LocalConnectionType.LOCAL_TCP): + """Creates a local ChannelCredentials used for local connections. + + This is an EXPERIMENTAL API. + + Local credentials are used by local TCP endpoints (e.g. localhost:10000) + also UDS connections. + + The connections created by local channel credentials are not + encrypted, but will be checked if they are local or not. + The UDS connections are considered secure by providing peer authentication + and data confidentiality while TCP connections are considered insecure. + + It is allowed to transmit call credentials over connections created by + local channel credentials. + + Local channel credentials are useful for 1) eliminating insecure_channel usage; + 2) enable unit testing for call credentials without setting up secrets. + + Args: + local_connect_type: Local connection type (either + grpc.LocalConnectionType.UDS or grpc.LocalConnectionType.LOCAL_TCP) + + Returns: + A ChannelCredentials for use with a local Channel + """ + return ChannelCredentials( + _cygrpc.channel_credentials_local(local_connect_type.value) + ) + + +def local_server_credentials(local_connect_type=LocalConnectionType.LOCAL_TCP): + """Creates a local ServerCredentials used for local connections. + + This is an EXPERIMENTAL API. + + Local credentials are used by local TCP endpoints (e.g. localhost:10000) + also UDS connections. + + The connections created by local server credentials are not + encrypted, but will be checked if they are local or not. + The UDS connections are considered secure by providing peer authentication + and data confidentiality while TCP connections are considered insecure. + + It is allowed to transmit call credentials over connections created by local + server credentials. + + Local server credentials are useful for 1) eliminating insecure_channel usage; + 2) enable unit testing for call credentials without setting up secrets. + + Args: + local_connect_type: Local connection type (either + grpc.LocalConnectionType.UDS or grpc.LocalConnectionType.LOCAL_TCP) + + Returns: + A ServerCredentials for use with a local Server + """ + return ServerCredentials( + _cygrpc.server_credentials_local(local_connect_type.value) + ) + + +def alts_channel_credentials(service_accounts=None): + """Creates a ChannelCredentials for use with an ALTS-enabled Channel. + + This is an EXPERIMENTAL API. + ALTS credentials API can only be used in GCP environment as it relies on + handshaker service being available. For more info about ALTS see + https://cloud.google.com/security/encryption-in-transit/application-layer-transport-security + + Args: + service_accounts: A list of server identities accepted by the client. + If target service accounts are provided and none of them matches the + peer identity of the server, handshake will fail. The arg can be empty + if the client does not have any information about trusted server + identity. + Returns: + A ChannelCredentials for use with an ALTS-enabled Channel + """ + return ChannelCredentials( + _cygrpc.channel_credentials_alts(service_accounts or []) + ) + + +def alts_server_credentials(): + """Creates a ServerCredentials for use with an ALTS-enabled connection. + + This is an EXPERIMENTAL API. + ALTS credentials API can only be used in GCP environment as it relies on + handshaker service being available. For more info about ALTS see + https://cloud.google.com/security/encryption-in-transit/application-layer-transport-security + + Returns: + A ServerCredentials for use with an ALTS-enabled Server + """ + return ServerCredentials(_cygrpc.server_credentials_alts()) + + +def compute_engine_channel_credentials(call_credentials): + """Creates a compute engine channel credential. + + This credential can only be used in a GCP environment as it relies on + a handshaker service. For more info about ALTS, see + https://cloud.google.com/security/encryption-in-transit/application-layer-transport-security + + This channel credential is expected to be used as part of a composite + credential in conjunction with a call credentials that authenticates the + VM's default service account. If used with any other sort of call + credential, the connection may suddenly and unexpectedly begin failing RPCs. + """ + return ChannelCredentials( + _cygrpc.channel_credentials_compute_engine( + call_credentials._credentials + ) + ) + + +def channel_ready_future(channel): + """Creates a Future that tracks when a Channel is ready. + + Cancelling the Future does not affect the channel's state machine. + It merely decouples the Future from channel state machine. + + Args: + channel: A Channel object. + + Returns: + A Future object that matures when the channel connectivity is + ChannelConnectivity.READY. + """ + from grpc import _utilities # pylint: disable=cyclic-import + + return _utilities.channel_ready_future(channel) + + +def insecure_channel(target, options=None, compression=None): + """Creates an insecure Channel to a server. + + The returned Channel is thread-safe. + + Args: + target: The server address + options: An optional list of key-value pairs (:term:`channel_arguments` + in gRPC Core runtime) to configure the channel. + compression: An optional value indicating the compression method to be + used over the lifetime of the channel. + + Returns: + A Channel. + """ + from grpc import _channel # pylint: disable=cyclic-import + + return _channel.Channel( + target, () if options is None else options, None, compression + ) + + +def secure_channel(target, credentials, options=None, compression=None): + """Creates a secure Channel to a server. + + The returned Channel is thread-safe. + + Args: + target: The server address. + credentials: A ChannelCredentials instance. + options: An optional list of key-value pairs (:term:`channel_arguments` + in gRPC Core runtime) to configure the channel. + compression: An optional value indicating the compression method to be + used over the lifetime of the channel. + + Returns: + A Channel. + """ + from grpc import _channel # pylint: disable=cyclic-import + from grpc.experimental import _insecure_channel_credentials + + if credentials._credentials is _insecure_channel_credentials: + raise ValueError( + "secure_channel cannot be called with insecure credentials." + + " Call insecure_channel instead." + ) + return _channel.Channel( + target, + () if options is None else options, + credentials._credentials, + compression, + ) + + +def intercept_channel(channel, *interceptors): + """Intercepts a channel through a set of interceptors. + + Args: + channel: A Channel. + interceptors: Zero or more objects of type + UnaryUnaryClientInterceptor, + UnaryStreamClientInterceptor, + StreamUnaryClientInterceptor, or + StreamStreamClientInterceptor. + Interceptors are given control in the order they are listed. + + Returns: + A Channel that intercepts each invocation via the provided interceptors. + + Raises: + TypeError: If interceptor does not derive from any of + UnaryUnaryClientInterceptor, + UnaryStreamClientInterceptor, + StreamUnaryClientInterceptor, or + StreamStreamClientInterceptor. + """ + from grpc import _interceptor # pylint: disable=cyclic-import + + return _interceptor.intercept_channel(channel, *interceptors) + + +def server( + thread_pool, + handlers=None, + interceptors=None, + options=None, + maximum_concurrent_rpcs=None, + compression=None, + xds=False, +): + """Creates a Server with which RPCs can be serviced. + + Args: + thread_pool: A futures.ThreadPoolExecutor to be used by the Server + to execute RPC handlers. + handlers: An optional list of GenericRpcHandlers used for executing RPCs. + More handlers may be added by calling add_generic_rpc_handlers any time + before the server is started. + interceptors: An optional list of ServerInterceptor objects that observe + and optionally manipulate the incoming RPCs before handing them over to + handlers. The interceptors are given control in the order they are + specified. This is an EXPERIMENTAL API. + options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC runtime) + to configure the channel. + maximum_concurrent_rpcs: The maximum number of concurrent RPCs this server + will service before returning RESOURCE_EXHAUSTED status, or None to + indicate no limit. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. This compression algorithm will be used for the + lifetime of the server unless overridden. + xds: If set to true, retrieves server configuration via xDS. This is an + EXPERIMENTAL option. + + Returns: + A Server object. + """ + from grpc import _server # pylint: disable=cyclic-import + + return _server.create_server( + thread_pool, + () if handlers is None else handlers, + () if interceptors is None else interceptors, + () if options is None else options, + maximum_concurrent_rpcs, + compression, + xds, + ) + + +@contextlib.contextmanager +def _create_servicer_context(rpc_event, state, request_deserializer): + from grpc import _server # pylint: disable=cyclic-import + + context = _server._Context(rpc_event, state, request_deserializer) + yield context + context._finalize_state() # pylint: disable=protected-access + + +@enum.unique +class Compression(enum.IntEnum): + """Indicates the compression method to be used for an RPC. + + Attributes: + NoCompression: Do not use compression algorithm. + Deflate: Use "Deflate" compression algorithm. + Gzip: Use "Gzip" compression algorithm. + """ + + NoCompression = _compression.NoCompression + Deflate = _compression.Deflate + Gzip = _compression.Gzip + + +################################### __all__ ################################# + +__all__ = ( + "FutureTimeoutError", + "FutureCancelledError", + "Future", + "ChannelConnectivity", + "StatusCode", + "Status", + "RpcError", + "RpcContext", + "Call", + "ChannelCredentials", + "CallCredentials", + "AuthMetadataContext", + "AuthMetadataPluginCallback", + "AuthMetadataPlugin", + "Compression", + "ClientCallDetails", + "ServerCertificateConfiguration", + "ServerCredentials", + "LocalConnectionType", + "UnaryUnaryMultiCallable", + "UnaryStreamMultiCallable", + "StreamUnaryMultiCallable", + "StreamStreamMultiCallable", + "UnaryUnaryClientInterceptor", + "UnaryStreamClientInterceptor", + "StreamUnaryClientInterceptor", + "StreamStreamClientInterceptor", + "Channel", + "ServicerContext", + "RpcMethodHandler", + "HandlerCallDetails", + "GenericRpcHandler", + "ServiceRpcHandler", + "Server", + "ServerInterceptor", + "unary_unary_rpc_method_handler", + "unary_stream_rpc_method_handler", + "stream_unary_rpc_method_handler", + "stream_stream_rpc_method_handler", + "method_handlers_generic_handler", + "ssl_channel_credentials", + "metadata_call_credentials", + "access_token_call_credentials", + "composite_call_credentials", + "composite_channel_credentials", + "compute_engine_channel_credentials", + "local_channel_credentials", + "local_server_credentials", + "alts_channel_credentials", + "alts_server_credentials", + "ssl_server_credentials", + "ssl_server_certificate_configuration", + "dynamic_ssl_server_credentials", + "channel_ready_future", + "insecure_channel", + "secure_channel", + "intercept_channel", + "server", + "protos", + "services", + "protos_and_services", + "xds_channel_credentials", + "xds_server_credentials", + "insecure_server_credentials", +) + +############################### Extension Shims ################################ + +# Here to maintain backwards compatibility; avoid using these in new code! +try: + import grpc_tools + + sys.modules.update({"grpc.tools": grpc_tools}) +except ImportError: + pass +try: + import grpc_health + + sys.modules.update({"grpc.health": grpc_health}) +except ImportError: + pass +try: + import grpc_reflection + + sys.modules.update({"grpc.reflection": grpc_reflection}) +except ImportError: + pass + +# Prevents import order issue in the case of renamed path. +if sys.version_info >= (3, 6) and __name__ == "grpc": + from grpc import aio # pylint: disable=ungrouped-imports + + sys.modules.update({"grpc.aio": aio}) diff --git a/venv/lib/python3.10/site-packages/grpc/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c3036bbeb6e59937b758439e011f2072b29f472a Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/__pycache__/_auth.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/__pycache__/_auth.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..75305dd5425e258c69167790bd8198c747fb6504 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/__pycache__/_auth.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/__pycache__/_channel.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/__pycache__/_channel.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b54a0f382d470564a52f94a3ae438579e6c149cd Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/__pycache__/_channel.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/__pycache__/_common.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/__pycache__/_common.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bf469b425a0df36d5ce41bc8238063e91492d5ec Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/__pycache__/_common.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/__pycache__/_compression.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/__pycache__/_compression.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..17b483367ec8be1cb3661da7a0bf5cbdc404c132 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/__pycache__/_compression.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/__pycache__/_grpcio_metadata.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/__pycache__/_grpcio_metadata.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5384d90e3ab472e4dc22c5bd321c275d0fbe8c20 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/__pycache__/_grpcio_metadata.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/__pycache__/_interceptor.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/__pycache__/_interceptor.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b37b57c60483cdc2c259f0cd1dc701ba13c12f01 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/__pycache__/_interceptor.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/__pycache__/_observability.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/__pycache__/_observability.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b0a17bf3150de9086af662b9ddb76d9c9e7ccc31 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/__pycache__/_observability.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/__pycache__/_plugin_wrapping.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/__pycache__/_plugin_wrapping.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8c73ed2867e272407c87ba168c34d7bba3e4537e Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/__pycache__/_plugin_wrapping.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/__pycache__/_runtime_protos.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/__pycache__/_runtime_protos.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0fa2484e4fc67c696598394100990382cdb446a0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/__pycache__/_runtime_protos.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/__pycache__/_server.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/__pycache__/_server.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3b2b7b0a658ca1a0d6c3000621dda375de48bad4 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/__pycache__/_server.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/__pycache__/_simple_stubs.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/__pycache__/_simple_stubs.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1e5b9053b37ff4b4f28f1cf11b18ebcf1a090b11 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/__pycache__/_simple_stubs.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/__pycache__/_typing.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/__pycache__/_typing.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9546d951d730558b7a3d4160c24227dfa369fdc4 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/__pycache__/_typing.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/__pycache__/_utilities.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/__pycache__/_utilities.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..622304a43394a7359b6d3ddb5cc352bd68c9a620 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/__pycache__/_utilities.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/_auth.py b/venv/lib/python3.10/site-packages/grpc/_auth.py new file mode 100644 index 0000000000000000000000000000000000000000..9cef38b69105e0f3d924d20e6c8c5d39907266e7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/_auth.py @@ -0,0 +1,80 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""GRPCAuthMetadataPlugins for standard authentication.""" + +import inspect +from typing import Any, Optional + +import grpc + + +def _sign_request( + callback: grpc.AuthMetadataPluginCallback, + token: Optional[str], + error: Optional[Exception], +): + metadata = (("authorization", "Bearer {}".format(token)),) + callback(metadata, error) + + +class GoogleCallCredentials(grpc.AuthMetadataPlugin): + """Metadata wrapper for GoogleCredentials from the oauth2client library.""" + + _is_jwt: bool + _credentials: Any + + # TODO(xuanwn): Give credentials an actual type. + def __init__(self, credentials: Any): + self._credentials = credentials + # Hack to determine if these are JWT creds and we need to pass + # additional_claims when getting a token + self._is_jwt = ( + "additional_claims" + in inspect.getfullargspec(credentials.get_access_token).args + ) + + def __call__( + self, + context: grpc.AuthMetadataContext, + callback: grpc.AuthMetadataPluginCallback, + ): + try: + if self._is_jwt: + access_token = self._credentials.get_access_token( + additional_claims={ + "aud": context.service_url # pytype: disable=attribute-error + } + ).access_token + else: + access_token = self._credentials.get_access_token().access_token + except Exception as exception: # pylint: disable=broad-except + _sign_request(callback, None, exception) + else: + _sign_request(callback, access_token, None) + + +class AccessTokenAuthMetadataPlugin(grpc.AuthMetadataPlugin): + """Metadata wrapper for raw access token credentials.""" + + _access_token: str + + def __init__(self, access_token: str): + self._access_token = access_token + + def __call__( + self, + context: grpc.AuthMetadataContext, + callback: grpc.AuthMetadataPluginCallback, + ): + _sign_request(callback, self._access_token, None) diff --git a/venv/lib/python3.10/site-packages/grpc/_channel.py b/venv/lib/python3.10/site-packages/grpc/_channel.py new file mode 100644 index 0000000000000000000000000000000000000000..66bc7fb2353d1afdb6b7987e52225abe94549d54 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/_channel.py @@ -0,0 +1,2252 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Invocation-side implementation of gRPC Python.""" + +import copy +import functools +import logging +import os +import sys +import threading +import time +import types +from typing import ( + Any, + Callable, + Dict, + Iterator, + List, + Optional, + Sequence, + Set, + Tuple, + Union, +) + +import grpc # pytype: disable=pyi-error +from grpc import _common # pytype: disable=pyi-error +from grpc import _compression # pytype: disable=pyi-error +from grpc import _grpcio_metadata # pytype: disable=pyi-error +from grpc import _observability # pytype: disable=pyi-error +from grpc._cython import cygrpc +from grpc._typing import ChannelArgumentType +from grpc._typing import DeserializingFunction +from grpc._typing import IntegratedCallFactory +from grpc._typing import MetadataType +from grpc._typing import NullaryCallbackType +from grpc._typing import ResponseType +from grpc._typing import SerializingFunction +from grpc._typing import UserTag +import grpc.experimental # pytype: disable=pyi-error + +_LOGGER = logging.getLogger(__name__) + +_USER_AGENT = "grpc-python/{}".format(_grpcio_metadata.__version__) + +_EMPTY_FLAGS = 0 + +# NOTE(rbellevi): No guarantees are given about the maintenance of this +# environment variable. +_DEFAULT_SINGLE_THREADED_UNARY_STREAM = ( + os.getenv("GRPC_SINGLE_THREADED_UNARY_STREAM") is not None +) + +_UNARY_UNARY_INITIAL_DUE = ( + cygrpc.OperationType.send_initial_metadata, + cygrpc.OperationType.send_message, + cygrpc.OperationType.send_close_from_client, + cygrpc.OperationType.receive_initial_metadata, + cygrpc.OperationType.receive_message, + cygrpc.OperationType.receive_status_on_client, +) +_UNARY_STREAM_INITIAL_DUE = ( + cygrpc.OperationType.send_initial_metadata, + cygrpc.OperationType.send_message, + cygrpc.OperationType.send_close_from_client, + cygrpc.OperationType.receive_initial_metadata, + cygrpc.OperationType.receive_status_on_client, +) +_STREAM_UNARY_INITIAL_DUE = ( + cygrpc.OperationType.send_initial_metadata, + cygrpc.OperationType.receive_initial_metadata, + cygrpc.OperationType.receive_message, + cygrpc.OperationType.receive_status_on_client, +) +_STREAM_STREAM_INITIAL_DUE = ( + cygrpc.OperationType.send_initial_metadata, + cygrpc.OperationType.receive_initial_metadata, + cygrpc.OperationType.receive_status_on_client, +) + +_CHANNEL_SUBSCRIPTION_CALLBACK_ERROR_LOG_MESSAGE = ( + "Exception calling channel subscription callback!" +) + +_OK_RENDEZVOUS_REPR_FORMAT = ( + '<{} of RPC that terminated with:\n\tstatus = {}\n\tdetails = "{}"\n>' +) + +_NON_OK_RENDEZVOUS_REPR_FORMAT = ( + "<{} of RPC that terminated with:\n" + "\tstatus = {}\n" + '\tdetails = "{}"\n' + '\tdebug_error_string = "{}"\n' + ">" +) + + +def _deadline(timeout: Optional[float]) -> Optional[float]: + return None if timeout is None else time.time() + timeout + + +def _unknown_code_details( + unknown_cygrpc_code: Optional[grpc.StatusCode], details: Optional[str] +) -> str: + return 'Server sent unknown code {} and details "{}"'.format( + unknown_cygrpc_code, details + ) + + +class _RPCState(object): + condition: threading.Condition + due: Set[cygrpc.OperationType] + initial_metadata: Optional[MetadataType] + response: Any + trailing_metadata: Optional[MetadataType] + code: Optional[grpc.StatusCode] + details: Optional[str] + debug_error_string: Optional[str] + cancelled: bool + callbacks: List[NullaryCallbackType] + fork_epoch: Optional[int] + rpc_start_time: Optional[float] # In relative seconds + rpc_end_time: Optional[float] # In relative seconds + method: Optional[str] + target: Optional[str] + + def __init__( + self, + due: Sequence[cygrpc.OperationType], + initial_metadata: Optional[MetadataType], + trailing_metadata: Optional[MetadataType], + code: Optional[grpc.StatusCode], + details: Optional[str], + ): + # `condition` guards all members of _RPCState. `notify_all` is called on + # `condition` when the state of the RPC has changed. + self.condition = threading.Condition() + + # The cygrpc.OperationType objects representing events due from the RPC's + # completion queue. If an operation is in `due`, it is guaranteed that + # `operate()` has been called on a corresponding operation. But the + # converse is not true. That is, in the case of failed `operate()` + # calls, there may briefly be events in `due` that do not correspond to + # operations submitted to Core. + self.due = set(due) + self.initial_metadata = initial_metadata + self.response = None + self.trailing_metadata = trailing_metadata + self.code = code + self.details = details + self.debug_error_string = None + # The following three fields are used for observability. + # Updates to those fields do not trigger self.condition. + self.rpc_start_time = None + self.rpc_end_time = None + self.method = None + self.target = None + + # The semantics of grpc.Future.cancel and grpc.Future.cancelled are + # slightly wonky, so they have to be tracked separately from the rest of the + # result of the RPC. This field tracks whether cancellation was requested + # prior to termination of the RPC. + self.cancelled = False + self.callbacks = [] + self.fork_epoch = cygrpc.get_fork_epoch() + + def reset_postfork_child(self): + self.condition = threading.Condition() + + +def _abort(state: _RPCState, code: grpc.StatusCode, details: str) -> None: + if state.code is None: + state.code = code + state.details = details + if state.initial_metadata is None: + state.initial_metadata = () + state.trailing_metadata = () + + +def _handle_event( + event: cygrpc.BaseEvent, + state: _RPCState, + response_deserializer: Optional[DeserializingFunction], +) -> List[NullaryCallbackType]: + callbacks = [] + for batch_operation in event.batch_operations: + operation_type = batch_operation.type() + state.due.remove(operation_type) + if operation_type == cygrpc.OperationType.receive_initial_metadata: + state.initial_metadata = batch_operation.initial_metadata() + elif operation_type == cygrpc.OperationType.receive_message: + serialized_response = batch_operation.message() + if serialized_response is not None: + response = _common.deserialize( + serialized_response, response_deserializer + ) + if response is None: + details = "Exception deserializing response!" + _abort(state, grpc.StatusCode.INTERNAL, details) + else: + state.response = response + elif operation_type == cygrpc.OperationType.receive_status_on_client: + state.trailing_metadata = batch_operation.trailing_metadata() + if state.code is None: + code = _common.CYGRPC_STATUS_CODE_TO_STATUS_CODE.get( + batch_operation.code() + ) + if code is None: + state.code = grpc.StatusCode.UNKNOWN + state.details = _unknown_code_details( + code, batch_operation.details() + ) + else: + state.code = code + state.details = batch_operation.details() + state.debug_error_string = batch_operation.error_string() + state.rpc_end_time = time.perf_counter() + _observability.maybe_record_rpc_latency(state) + callbacks.extend(state.callbacks) + state.callbacks = None + return callbacks + + +def _event_handler( + state: _RPCState, response_deserializer: Optional[DeserializingFunction] +) -> UserTag: + def handle_event(event): + with state.condition: + callbacks = _handle_event(event, state, response_deserializer) + state.condition.notify_all() + done = not state.due + for callback in callbacks: + try: + callback() + except Exception as e: # pylint: disable=broad-except + # NOTE(rbellevi): We suppress but log errors here so as not to + # kill the channel spin thread. + logging.error( + "Exception in callback %s: %s", repr(callback.func), repr(e) + ) + return done and state.fork_epoch >= cygrpc.get_fork_epoch() + + return handle_event + + +# TODO(xuanwn): Create a base class for IntegratedCall and SegregatedCall. +# pylint: disable=too-many-statements +def _consume_request_iterator( + request_iterator: Iterator, + state: _RPCState, + call: Union[cygrpc.IntegratedCall, cygrpc.SegregatedCall], + request_serializer: SerializingFunction, + event_handler: Optional[UserTag], +) -> None: + """Consume a request supplied by the user.""" + + def consume_request_iterator(): # pylint: disable=too-many-branches + # Iterate over the request iterator until it is exhausted or an error + # condition is encountered. + while True: + return_from_user_request_generator_invoked = False + try: + # The thread may die in user-code. Do not block fork for this. + cygrpc.enter_user_request_generator() + request = next(request_iterator) + except StopIteration: + break + except Exception: # pylint: disable=broad-except + cygrpc.return_from_user_request_generator() + return_from_user_request_generator_invoked = True + code = grpc.StatusCode.UNKNOWN + details = "Exception iterating requests!" + _LOGGER.exception(details) + call.cancel( + _common.STATUS_CODE_TO_CYGRPC_STATUS_CODE[code], details + ) + _abort(state, code, details) + return + finally: + if not return_from_user_request_generator_invoked: + cygrpc.return_from_user_request_generator() + serialized_request = _common.serialize(request, request_serializer) + with state.condition: + if state.code is None and not state.cancelled: + if serialized_request is None: + code = grpc.StatusCode.INTERNAL + details = "Exception serializing request!" + call.cancel( + _common.STATUS_CODE_TO_CYGRPC_STATUS_CODE[code], + details, + ) + _abort(state, code, details) + return + else: + state.due.add(cygrpc.OperationType.send_message) + operations = ( + cygrpc.SendMessageOperation( + serialized_request, _EMPTY_FLAGS + ), + ) + operating = call.operate(operations, event_handler) + if not operating: + state.due.remove(cygrpc.OperationType.send_message) + return + + def _done(): + return ( + state.code is not None + or cygrpc.OperationType.send_message + not in state.due + ) + + _common.wait( + state.condition.wait, + _done, + spin_cb=functools.partial( + cygrpc.block_if_fork_in_progress, state + ), + ) + if state.code is not None: + return + else: + return + with state.condition: + if state.code is None: + state.due.add(cygrpc.OperationType.send_close_from_client) + operations = ( + cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS), + ) + operating = call.operate(operations, event_handler) + if not operating: + state.due.remove( + cygrpc.OperationType.send_close_from_client + ) + + consumption_thread = cygrpc.ForkManagedThread( + target=consume_request_iterator + ) + consumption_thread.setDaemon(True) + consumption_thread.start() + + +def _rpc_state_string(class_name: str, rpc_state: _RPCState) -> str: + """Calculates error string for RPC.""" + with rpc_state.condition: + if rpc_state.code is None: + return "<{} object>".format(class_name) + elif rpc_state.code is grpc.StatusCode.OK: + return _OK_RENDEZVOUS_REPR_FORMAT.format( + class_name, rpc_state.code, rpc_state.details + ) + else: + return _NON_OK_RENDEZVOUS_REPR_FORMAT.format( + class_name, + rpc_state.code, + rpc_state.details, + rpc_state.debug_error_string, + ) + + +class _InactiveRpcError(grpc.RpcError, grpc.Call, grpc.Future): + """An RPC error not tied to the execution of a particular RPC. + + The RPC represented by the state object must not be in-progress or + cancelled. + + Attributes: + _state: An instance of _RPCState. + """ + + _state: _RPCState + + def __init__(self, state: _RPCState): + with state.condition: + self._state = _RPCState( + (), + copy.deepcopy(state.initial_metadata), + copy.deepcopy(state.trailing_metadata), + state.code, + copy.deepcopy(state.details), + ) + self._state.response = copy.copy(state.response) + self._state.debug_error_string = copy.copy(state.debug_error_string) + + def initial_metadata(self) -> Optional[MetadataType]: + return self._state.initial_metadata + + def trailing_metadata(self) -> Optional[MetadataType]: + return self._state.trailing_metadata + + def code(self) -> Optional[grpc.StatusCode]: + return self._state.code + + def details(self) -> Optional[str]: + return _common.decode(self._state.details) + + def debug_error_string(self) -> Optional[str]: + return _common.decode(self._state.debug_error_string) + + def _repr(self) -> str: + return _rpc_state_string(self.__class__.__name__, self._state) + + def __repr__(self) -> str: + return self._repr() + + def __str__(self) -> str: + return self._repr() + + def cancel(self) -> bool: + """See grpc.Future.cancel.""" + return False + + def cancelled(self) -> bool: + """See grpc.Future.cancelled.""" + return False + + def running(self) -> bool: + """See grpc.Future.running.""" + return False + + def done(self) -> bool: + """See grpc.Future.done.""" + return True + + def result( + self, timeout: Optional[float] = None + ) -> Any: # pylint: disable=unused-argument + """See grpc.Future.result.""" + raise self + + def exception( + self, timeout: Optional[float] = None # pylint: disable=unused-argument + ) -> Optional[Exception]: + """See grpc.Future.exception.""" + return self + + def traceback( + self, timeout: Optional[float] = None # pylint: disable=unused-argument + ) -> Optional[types.TracebackType]: + """See grpc.Future.traceback.""" + try: + raise self + except grpc.RpcError: + return sys.exc_info()[2] + + def add_done_callback( + self, + fn: Callable[[grpc.Future], None], + timeout: Optional[float] = None, # pylint: disable=unused-argument + ) -> None: + """See grpc.Future.add_done_callback.""" + fn(self) + + +class _Rendezvous(grpc.RpcError, grpc.RpcContext): + """An RPC iterator. + + Attributes: + _state: An instance of _RPCState. + _call: An instance of SegregatedCall or IntegratedCall. + In either case, the _call object is expected to have operate, cancel, + and next_event methods. + _response_deserializer: A callable taking bytes and return a Python + object. + _deadline: A float representing the deadline of the RPC in seconds. Or + possibly None, to represent an RPC with no deadline at all. + """ + + _state: _RPCState + _call: Union[cygrpc.SegregatedCall, cygrpc.IntegratedCall] + _response_deserializer: Optional[DeserializingFunction] + _deadline: Optional[float] + + def __init__( + self, + state: _RPCState, + call: Union[cygrpc.SegregatedCall, cygrpc.IntegratedCall], + response_deserializer: Optional[DeserializingFunction], + deadline: Optional[float], + ): + super(_Rendezvous, self).__init__() + self._state = state + self._call = call + self._response_deserializer = response_deserializer + self._deadline = deadline + + def is_active(self) -> bool: + """See grpc.RpcContext.is_active""" + with self._state.condition: + return self._state.code is None + + def time_remaining(self) -> Optional[float]: + """See grpc.RpcContext.time_remaining""" + with self._state.condition: + if self._deadline is None: + return None + else: + return max(self._deadline - time.time(), 0) + + def cancel(self) -> bool: + """See grpc.RpcContext.cancel""" + with self._state.condition: + if self._state.code is None: + code = grpc.StatusCode.CANCELLED + details = "Locally cancelled by application!" + self._call.cancel( + _common.STATUS_CODE_TO_CYGRPC_STATUS_CODE[code], details + ) + self._state.cancelled = True + _abort(self._state, code, details) + self._state.condition.notify_all() + return True + else: + return False + + def add_callback(self, callback: NullaryCallbackType) -> bool: + """See grpc.RpcContext.add_callback""" + with self._state.condition: + if self._state.callbacks is None: + return False + else: + self._state.callbacks.append(callback) + return True + + def __iter__(self): + return self + + def next(self): + return self._next() + + def __next__(self): + return self._next() + + def _next(self): + raise NotImplementedError() + + def debug_error_string(self) -> Optional[str]: + raise NotImplementedError() + + def _repr(self) -> str: + return _rpc_state_string(self.__class__.__name__, self._state) + + def __repr__(self) -> str: + return self._repr() + + def __str__(self) -> str: + return self._repr() + + def __del__(self) -> None: + with self._state.condition: + if self._state.code is None: + self._state.code = grpc.StatusCode.CANCELLED + self._state.details = "Cancelled upon garbage collection!" + self._state.cancelled = True + self._call.cancel( + _common.STATUS_CODE_TO_CYGRPC_STATUS_CODE[self._state.code], + self._state.details, + ) + self._state.condition.notify_all() + + +class _SingleThreadedRendezvous( + _Rendezvous, grpc.Call, grpc.Future +): # pylint: disable=too-many-ancestors + """An RPC iterator operating entirely on a single thread. + + The __next__ method of _SingleThreadedRendezvous does not depend on the + existence of any other thread, including the "channel spin thread". + However, this means that its interface is entirely synchronous. So this + class cannot completely fulfill the grpc.Future interface. The result, + exception, and traceback methods will never block and will instead raise + an exception if calling the method would result in blocking. + + This means that these methods are safe to call from add_done_callback + handlers. + """ + + _state: _RPCState + + def _is_complete(self) -> bool: + return self._state.code is not None + + def cancelled(self) -> bool: + with self._state.condition: + return self._state.cancelled + + def running(self) -> bool: + with self._state.condition: + return self._state.code is None + + def done(self) -> bool: + with self._state.condition: + return self._state.code is not None + + def result(self, timeout: Optional[float] = None) -> Any: + """Returns the result of the computation or raises its exception. + + This method will never block. Instead, it will raise an exception + if calling this method would otherwise result in blocking. + + Since this method will never block, any `timeout` argument passed will + be ignored. + """ + del timeout + with self._state.condition: + if not self._is_complete(): + raise grpc.experimental.UsageError( + "_SingleThreadedRendezvous only supports result() when the" + " RPC is complete." + ) + if self._state.code is grpc.StatusCode.OK: + return self._state.response + elif self._state.cancelled: + raise grpc.FutureCancelledError() + else: + raise self + + def exception(self, timeout: Optional[float] = None) -> Optional[Exception]: + """Return the exception raised by the computation. + + This method will never block. Instead, it will raise an exception + if calling this method would otherwise result in blocking. + + Since this method will never block, any `timeout` argument passed will + be ignored. + """ + del timeout + with self._state.condition: + if not self._is_complete(): + raise grpc.experimental.UsageError( + "_SingleThreadedRendezvous only supports exception() when" + " the RPC is complete." + ) + if self._state.code is grpc.StatusCode.OK: + return None + elif self._state.cancelled: + raise grpc.FutureCancelledError() + else: + return self + + def traceback( + self, timeout: Optional[float] = None + ) -> Optional[types.TracebackType]: + """Access the traceback of the exception raised by the computation. + + This method will never block. Instead, it will raise an exception + if calling this method would otherwise result in blocking. + + Since this method will never block, any `timeout` argument passed will + be ignored. + """ + del timeout + with self._state.condition: + if not self._is_complete(): + raise grpc.experimental.UsageError( + "_SingleThreadedRendezvous only supports traceback() when" + " the RPC is complete." + ) + if self._state.code is grpc.StatusCode.OK: + return None + elif self._state.cancelled: + raise grpc.FutureCancelledError() + else: + try: + raise self + except grpc.RpcError: + return sys.exc_info()[2] + + def add_done_callback(self, fn: Callable[[grpc.Future], None]) -> None: + with self._state.condition: + if self._state.code is None: + self._state.callbacks.append(functools.partial(fn, self)) + return + + fn(self) + + def initial_metadata(self) -> Optional[MetadataType]: + """See grpc.Call.initial_metadata""" + with self._state.condition: + # NOTE(gnossen): Based on our initial call batch, we are guaranteed + # to receive initial metadata before any messages. + while self._state.initial_metadata is None: + self._consume_next_event() + return self._state.initial_metadata + + def trailing_metadata(self) -> Optional[MetadataType]: + """See grpc.Call.trailing_metadata""" + with self._state.condition: + if self._state.trailing_metadata is None: + raise grpc.experimental.UsageError( + "Cannot get trailing metadata until RPC is completed." + ) + return self._state.trailing_metadata + + def code(self) -> Optional[grpc.StatusCode]: + """See grpc.Call.code""" + with self._state.condition: + if self._state.code is None: + raise grpc.experimental.UsageError( + "Cannot get code until RPC is completed." + ) + return self._state.code + + def details(self) -> Optional[str]: + """See grpc.Call.details""" + with self._state.condition: + if self._state.details is None: + raise grpc.experimental.UsageError( + "Cannot get details until RPC is completed." + ) + return _common.decode(self._state.details) + + def _consume_next_event(self) -> Optional[cygrpc.BaseEvent]: + event = self._call.next_event() + with self._state.condition: + callbacks = _handle_event( + event, self._state, self._response_deserializer + ) + for callback in callbacks: + # NOTE(gnossen): We intentionally allow exceptions to bubble up + # to the user when running on a single thread. + callback() + return event + + def _next_response(self) -> Any: + while True: + self._consume_next_event() + with self._state.condition: + if self._state.response is not None: + response = self._state.response + self._state.response = None + return response + elif ( + cygrpc.OperationType.receive_message not in self._state.due + ): + if self._state.code is grpc.StatusCode.OK: + raise StopIteration() + elif self._state.code is not None: + raise self + + def _next(self) -> Any: + with self._state.condition: + if self._state.code is None: + # We tentatively add the operation as expected and remove + # it if the enqueue operation fails. This allows us to guarantee that + # if an event has been submitted to the core completion queue, + # it is in `due`. If we waited until after a successful + # enqueue operation then a signal could interrupt this + # thread between the enqueue operation and the addition of the + # operation to `due`. This would cause an exception on the + # channel spin thread when the operation completes and no + # corresponding operation would be present in state.due. + # Note that, since `condition` is held through this block, there is + # no data race on `due`. + self._state.due.add(cygrpc.OperationType.receive_message) + operating = self._call.operate( + (cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),), None + ) + if not operating: + self._state.due.remove(cygrpc.OperationType.receive_message) + elif self._state.code is grpc.StatusCode.OK: + raise StopIteration() + else: + raise self + return self._next_response() + + def debug_error_string(self) -> Optional[str]: + with self._state.condition: + if self._state.debug_error_string is None: + raise grpc.experimental.UsageError( + "Cannot get debug error string until RPC is completed." + ) + return _common.decode(self._state.debug_error_string) + + +class _MultiThreadedRendezvous( + _Rendezvous, grpc.Call, grpc.Future +): # pylint: disable=too-many-ancestors + """An RPC iterator that depends on a channel spin thread. + + This iterator relies upon a per-channel thread running in the background, + dequeueing events from the completion queue, and notifying threads waiting + on the threading.Condition object in the _RPCState object. + + This extra thread allows _MultiThreadedRendezvous to fulfill the grpc.Future interface + and to mediate a bidirection streaming RPC. + """ + + _state: _RPCState + + def initial_metadata(self) -> Optional[MetadataType]: + """See grpc.Call.initial_metadata""" + with self._state.condition: + + def _done(): + return self._state.initial_metadata is not None + + _common.wait(self._state.condition.wait, _done) + return self._state.initial_metadata + + def trailing_metadata(self) -> Optional[MetadataType]: + """See grpc.Call.trailing_metadata""" + with self._state.condition: + + def _done(): + return self._state.trailing_metadata is not None + + _common.wait(self._state.condition.wait, _done) + return self._state.trailing_metadata + + def code(self) -> Optional[grpc.StatusCode]: + """See grpc.Call.code""" + with self._state.condition: + + def _done(): + return self._state.code is not None + + _common.wait(self._state.condition.wait, _done) + return self._state.code + + def details(self) -> Optional[str]: + """See grpc.Call.details""" + with self._state.condition: + + def _done(): + return self._state.details is not None + + _common.wait(self._state.condition.wait, _done) + return _common.decode(self._state.details) + + def debug_error_string(self) -> Optional[str]: + with self._state.condition: + + def _done(): + return self._state.debug_error_string is not None + + _common.wait(self._state.condition.wait, _done) + return _common.decode(self._state.debug_error_string) + + def cancelled(self) -> bool: + with self._state.condition: + return self._state.cancelled + + def running(self) -> bool: + with self._state.condition: + return self._state.code is None + + def done(self) -> bool: + with self._state.condition: + return self._state.code is not None + + def _is_complete(self) -> bool: + return self._state.code is not None + + def result(self, timeout: Optional[float] = None) -> Any: + """Returns the result of the computation or raises its exception. + + See grpc.Future.result for the full API contract. + """ + with self._state.condition: + timed_out = _common.wait( + self._state.condition.wait, self._is_complete, timeout=timeout + ) + if timed_out: + raise grpc.FutureTimeoutError() + else: + if self._state.code is grpc.StatusCode.OK: + return self._state.response + elif self._state.cancelled: + raise grpc.FutureCancelledError() + else: + raise self + + def exception(self, timeout: Optional[float] = None) -> Optional[Exception]: + """Return the exception raised by the computation. + + See grpc.Future.exception for the full API contract. + """ + with self._state.condition: + timed_out = _common.wait( + self._state.condition.wait, self._is_complete, timeout=timeout + ) + if timed_out: + raise grpc.FutureTimeoutError() + else: + if self._state.code is grpc.StatusCode.OK: + return None + elif self._state.cancelled: + raise grpc.FutureCancelledError() + else: + return self + + def traceback( + self, timeout: Optional[float] = None + ) -> Optional[types.TracebackType]: + """Access the traceback of the exception raised by the computation. + + See grpc.future.traceback for the full API contract. + """ + with self._state.condition: + timed_out = _common.wait( + self._state.condition.wait, self._is_complete, timeout=timeout + ) + if timed_out: + raise grpc.FutureTimeoutError() + else: + if self._state.code is grpc.StatusCode.OK: + return None + elif self._state.cancelled: + raise grpc.FutureCancelledError() + else: + try: + raise self + except grpc.RpcError: + return sys.exc_info()[2] + + def add_done_callback(self, fn: Callable[[grpc.Future], None]) -> None: + with self._state.condition: + if self._state.code is None: + self._state.callbacks.append(functools.partial(fn, self)) + return + + fn(self) + + def _next(self) -> Any: + with self._state.condition: + if self._state.code is None: + event_handler = _event_handler( + self._state, self._response_deserializer + ) + self._state.due.add(cygrpc.OperationType.receive_message) + operating = self._call.operate( + (cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),), + event_handler, + ) + if not operating: + self._state.due.remove(cygrpc.OperationType.receive_message) + elif self._state.code is grpc.StatusCode.OK: + raise StopIteration() + else: + raise self + + def _response_ready(): + return self._state.response is not None or ( + cygrpc.OperationType.receive_message not in self._state.due + and self._state.code is not None + ) + + _common.wait(self._state.condition.wait, _response_ready) + if self._state.response is not None: + response = self._state.response + self._state.response = None + return response + elif cygrpc.OperationType.receive_message not in self._state.due: + if self._state.code is grpc.StatusCode.OK: + raise StopIteration() + elif self._state.code is not None: + raise self + + +def _start_unary_request( + request: Any, + timeout: Optional[float], + request_serializer: SerializingFunction, +) -> Tuple[Optional[float], Optional[bytes], Optional[grpc.RpcError]]: + deadline = _deadline(timeout) + serialized_request = _common.serialize(request, request_serializer) + if serialized_request is None: + state = _RPCState( + (), + (), + (), + grpc.StatusCode.INTERNAL, + "Exception serializing request!", + ) + error = _InactiveRpcError(state) + return deadline, None, error + else: + return deadline, serialized_request, None + + +def _end_unary_response_blocking( + state: _RPCState, + call: cygrpc.SegregatedCall, + with_call: bool, + deadline: Optional[float], +) -> Union[ResponseType, Tuple[ResponseType, grpc.Call]]: + if state.code is grpc.StatusCode.OK: + if with_call: + rendezvous = _MultiThreadedRendezvous(state, call, None, deadline) + return state.response, rendezvous + else: + return state.response + else: + raise _InactiveRpcError(state) # pytype: disable=not-instantiable + + +def _stream_unary_invocation_operations( + metadata: Optional[MetadataType], initial_metadata_flags: int +) -> Sequence[Sequence[cygrpc.Operation]]: + return ( + ( + cygrpc.SendInitialMetadataOperation( + metadata, initial_metadata_flags + ), + cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS), + cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS), + ), + (cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),), + ) + + +def _stream_unary_invocation_operations_and_tags( + metadata: Optional[MetadataType], initial_metadata_flags: int +) -> Sequence[Tuple[Sequence[cygrpc.Operation], Optional[UserTag]]]: + return tuple( + ( + operations, + None, + ) + for operations in _stream_unary_invocation_operations( + metadata, initial_metadata_flags + ) + ) + + +def _determine_deadline(user_deadline: Optional[float]) -> Optional[float]: + parent_deadline = cygrpc.get_deadline_from_context() + if parent_deadline is None and user_deadline is None: + return None + elif parent_deadline is not None and user_deadline is None: + return parent_deadline + elif user_deadline is not None and parent_deadline is None: + return user_deadline + else: + return min(parent_deadline, user_deadline) + + +class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable): + _channel: cygrpc.Channel + _managed_call: IntegratedCallFactory + _method: bytes + _target: bytes + _request_serializer: Optional[SerializingFunction] + _response_deserializer: Optional[DeserializingFunction] + _context: Any + _registered_call_handle: Optional[int] + + __slots__ = [ + "_channel", + "_managed_call", + "_method", + "_target", + "_request_serializer", + "_response_deserializer", + "_context", + ] + + # pylint: disable=too-many-arguments + def __init__( + self, + channel: cygrpc.Channel, + managed_call: IntegratedCallFactory, + method: bytes, + target: bytes, + request_serializer: Optional[SerializingFunction], + response_deserializer: Optional[DeserializingFunction], + _registered_call_handle: Optional[int], + ): + self._channel = channel + self._managed_call = managed_call + self._method = method + self._target = target + self._request_serializer = request_serializer + self._response_deserializer = response_deserializer + self._context = cygrpc.build_census_context() + self._registered_call_handle = _registered_call_handle + + def _prepare( + self, + request: Any, + timeout: Optional[float], + metadata: Optional[MetadataType], + wait_for_ready: Optional[bool], + compression: Optional[grpc.Compression], + ) -> Tuple[ + Optional[_RPCState], + Optional[Sequence[cygrpc.Operation]], + Optional[float], + Optional[grpc.RpcError], + ]: + deadline, serialized_request, rendezvous = _start_unary_request( + request, timeout, self._request_serializer + ) + initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready( + wait_for_ready + ) + augmented_metadata = _compression.augment_metadata( + metadata, compression + ) + if serialized_request is None: + return None, None, None, rendezvous + else: + state = _RPCState(_UNARY_UNARY_INITIAL_DUE, None, None, None, None) + operations = ( + cygrpc.SendInitialMetadataOperation( + augmented_metadata, initial_metadata_flags + ), + cygrpc.SendMessageOperation(serialized_request, _EMPTY_FLAGS), + cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS), + cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS), + cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS), + cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS), + ) + return state, operations, deadline, None + + def _blocking( + self, + request: Any, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> Tuple[_RPCState, cygrpc.SegregatedCall]: + state, operations, deadline, rendezvous = self._prepare( + request, timeout, metadata, wait_for_ready, compression + ) + if state is None: + raise rendezvous # pylint: disable-msg=raising-bad-type + else: + state.rpc_start_time = time.perf_counter() + state.method = _common.decode(self._method) + state.target = _common.decode(self._target) + call = self._channel.segregated_call( + cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS, + self._method, + None, + _determine_deadline(deadline), + metadata, + None if credentials is None else credentials._credentials, + ( + ( + operations, + None, + ), + ), + self._context, + self._registered_call_handle, + ) + event = call.next_event() + _handle_event(event, state, self._response_deserializer) + return state, call + + def __call__( + self, + request: Any, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> Any: + state, call = self._blocking( + request, timeout, metadata, credentials, wait_for_ready, compression + ) + return _end_unary_response_blocking(state, call, False, None) + + def with_call( + self, + request: Any, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> Tuple[Any, grpc.Call]: + state, call = self._blocking( + request, timeout, metadata, credentials, wait_for_ready, compression + ) + return _end_unary_response_blocking(state, call, True, None) + + def future( + self, + request: Any, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> _MultiThreadedRendezvous: + state, operations, deadline, rendezvous = self._prepare( + request, timeout, metadata, wait_for_ready, compression + ) + if state is None: + raise rendezvous # pylint: disable-msg=raising-bad-type + else: + event_handler = _event_handler(state, self._response_deserializer) + state.rpc_start_time = time.perf_counter() + state.method = _common.decode(self._method) + state.target = _common.decode(self._target) + call = self._managed_call( + cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS, + self._method, + None, + deadline, + metadata, + None if credentials is None else credentials._credentials, + (operations,), + event_handler, + self._context, + self._registered_call_handle, + ) + return _MultiThreadedRendezvous( + state, call, self._response_deserializer, deadline + ) + + +class _SingleThreadedUnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable): + _channel: cygrpc.Channel + _method: bytes + _target: bytes + _request_serializer: Optional[SerializingFunction] + _response_deserializer: Optional[DeserializingFunction] + _context: Any + _registered_call_handle: Optional[int] + + __slots__ = [ + "_channel", + "_method", + "_target", + "_request_serializer", + "_response_deserializer", + "_context", + ] + + # pylint: disable=too-many-arguments + def __init__( + self, + channel: cygrpc.Channel, + method: bytes, + target: bytes, + request_serializer: SerializingFunction, + response_deserializer: DeserializingFunction, + _registered_call_handle: Optional[int], + ): + self._channel = channel + self._method = method + self._target = target + self._request_serializer = request_serializer + self._response_deserializer = response_deserializer + self._context = cygrpc.build_census_context() + self._registered_call_handle = _registered_call_handle + + def __call__( # pylint: disable=too-many-locals + self, + request: Any, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> _SingleThreadedRendezvous: + deadline = _deadline(timeout) + serialized_request = _common.serialize( + request, self._request_serializer + ) + if serialized_request is None: + state = _RPCState( + (), + (), + (), + grpc.StatusCode.INTERNAL, + "Exception serializing request!", + ) + raise _InactiveRpcError(state) + + state = _RPCState(_UNARY_STREAM_INITIAL_DUE, None, None, None, None) + call_credentials = ( + None if credentials is None else credentials._credentials + ) + initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready( + wait_for_ready + ) + augmented_metadata = _compression.augment_metadata( + metadata, compression + ) + operations = ( + ( + cygrpc.SendInitialMetadataOperation( + augmented_metadata, initial_metadata_flags + ), + cygrpc.SendMessageOperation(serialized_request, _EMPTY_FLAGS), + cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS), + ), + (cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),), + (cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),), + ) + operations_and_tags = tuple((ops, None) for ops in operations) + state.rpc_start_time = time.perf_counter() + state.method = _common.decode(self._method) + state.target = _common.decode(self._target) + call = self._channel.segregated_call( + cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS, + self._method, + None, + _determine_deadline(deadline), + metadata, + call_credentials, + operations_and_tags, + self._context, + self._registered_call_handle, + ) + return _SingleThreadedRendezvous( + state, call, self._response_deserializer, deadline + ) + + +class _UnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable): + _channel: cygrpc.Channel + _managed_call: IntegratedCallFactory + _method: bytes + _target: bytes + _request_serializer: Optional[SerializingFunction] + _response_deserializer: Optional[DeserializingFunction] + _context: Any + _registered_call_handle: Optional[int] + + __slots__ = [ + "_channel", + "_managed_call", + "_method", + "_target", + "_request_serializer", + "_response_deserializer", + "_context", + ] + + # pylint: disable=too-many-arguments + def __init__( + self, + channel: cygrpc.Channel, + managed_call: IntegratedCallFactory, + method: bytes, + target: bytes, + request_serializer: SerializingFunction, + response_deserializer: DeserializingFunction, + _registered_call_handle: Optional[int], + ): + self._channel = channel + self._managed_call = managed_call + self._method = method + self._target = target + self._request_serializer = request_serializer + self._response_deserializer = response_deserializer + self._context = cygrpc.build_census_context() + self._registered_call_handle = _registered_call_handle + + def __call__( # pylint: disable=too-many-locals + self, + request: Any, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> _MultiThreadedRendezvous: + deadline, serialized_request, rendezvous = _start_unary_request( + request, timeout, self._request_serializer + ) + initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready( + wait_for_ready + ) + if serialized_request is None: + raise rendezvous # pylint: disable-msg=raising-bad-type + else: + augmented_metadata = _compression.augment_metadata( + metadata, compression + ) + state = _RPCState(_UNARY_STREAM_INITIAL_DUE, None, None, None, None) + operations = ( + ( + cygrpc.SendInitialMetadataOperation( + augmented_metadata, initial_metadata_flags + ), + cygrpc.SendMessageOperation( + serialized_request, _EMPTY_FLAGS + ), + cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS), + cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS), + ), + (cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),), + ) + state.rpc_start_time = time.perf_counter() + state.method = _common.decode(self._method) + state.target = _common.decode(self._target) + call = self._managed_call( + cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS, + self._method, + None, + _determine_deadline(deadline), + metadata, + None if credentials is None else credentials._credentials, + operations, + _event_handler(state, self._response_deserializer), + self._context, + self._registered_call_handle, + ) + return _MultiThreadedRendezvous( + state, call, self._response_deserializer, deadline + ) + + +class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable): + _channel: cygrpc.Channel + _managed_call: IntegratedCallFactory + _method: bytes + _target: bytes + _request_serializer: Optional[SerializingFunction] + _response_deserializer: Optional[DeserializingFunction] + _context: Any + _registered_call_handle: Optional[int] + + __slots__ = [ + "_channel", + "_managed_call", + "_method", + "_target", + "_request_serializer", + "_response_deserializer", + "_context", + ] + + # pylint: disable=too-many-arguments + def __init__( + self, + channel: cygrpc.Channel, + managed_call: IntegratedCallFactory, + method: bytes, + target: bytes, + request_serializer: Optional[SerializingFunction], + response_deserializer: Optional[DeserializingFunction], + _registered_call_handle: Optional[int], + ): + self._channel = channel + self._managed_call = managed_call + self._method = method + self._target = target + self._request_serializer = request_serializer + self._response_deserializer = response_deserializer + self._context = cygrpc.build_census_context() + self._registered_call_handle = _registered_call_handle + + def _blocking( + self, + request_iterator: Iterator, + timeout: Optional[float], + metadata: Optional[MetadataType], + credentials: Optional[grpc.CallCredentials], + wait_for_ready: Optional[bool], + compression: Optional[grpc.Compression], + ) -> Tuple[_RPCState, cygrpc.SegregatedCall]: + deadline = _deadline(timeout) + state = _RPCState(_STREAM_UNARY_INITIAL_DUE, None, None, None, None) + initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready( + wait_for_ready + ) + augmented_metadata = _compression.augment_metadata( + metadata, compression + ) + state.rpc_start_time = time.perf_counter() + state.method = _common.decode(self._method) + state.target = _common.decode(self._target) + call = self._channel.segregated_call( + cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS, + self._method, + None, + _determine_deadline(deadline), + augmented_metadata, + None if credentials is None else credentials._credentials, + _stream_unary_invocation_operations_and_tags( + augmented_metadata, initial_metadata_flags + ), + self._context, + self._registered_call_handle, + ) + _consume_request_iterator( + request_iterator, state, call, self._request_serializer, None + ) + while True: + event = call.next_event() + with state.condition: + _handle_event(event, state, self._response_deserializer) + state.condition.notify_all() + if not state.due: + break + return state, call + + def __call__( + self, + request_iterator: Iterator, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> Any: + state, call = self._blocking( + request_iterator, + timeout, + metadata, + credentials, + wait_for_ready, + compression, + ) + return _end_unary_response_blocking(state, call, False, None) + + def with_call( + self, + request_iterator: Iterator, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> Tuple[Any, grpc.Call]: + state, call = self._blocking( + request_iterator, + timeout, + metadata, + credentials, + wait_for_ready, + compression, + ) + return _end_unary_response_blocking(state, call, True, None) + + def future( + self, + request_iterator: Iterator, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> _MultiThreadedRendezvous: + deadline = _deadline(timeout) + state = _RPCState(_STREAM_UNARY_INITIAL_DUE, None, None, None, None) + event_handler = _event_handler(state, self._response_deserializer) + initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready( + wait_for_ready + ) + augmented_metadata = _compression.augment_metadata( + metadata, compression + ) + state.rpc_start_time = time.perf_counter() + state.method = _common.decode(self._method) + state.target = _common.decode(self._target) + call = self._managed_call( + cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS, + self._method, + None, + deadline, + augmented_metadata, + None if credentials is None else credentials._credentials, + _stream_unary_invocation_operations( + metadata, initial_metadata_flags + ), + event_handler, + self._context, + self._registered_call_handle, + ) + _consume_request_iterator( + request_iterator, + state, + call, + self._request_serializer, + event_handler, + ) + return _MultiThreadedRendezvous( + state, call, self._response_deserializer, deadline + ) + + +class _StreamStreamMultiCallable(grpc.StreamStreamMultiCallable): + _channel: cygrpc.Channel + _managed_call: IntegratedCallFactory + _method: bytes + _target: bytes + _request_serializer: Optional[SerializingFunction] + _response_deserializer: Optional[DeserializingFunction] + _context: Any + _registered_call_handle: Optional[int] + + __slots__ = [ + "_channel", + "_managed_call", + "_method", + "_target", + "_request_serializer", + "_response_deserializer", + "_context", + ] + + # pylint: disable=too-many-arguments + def __init__( + self, + channel: cygrpc.Channel, + managed_call: IntegratedCallFactory, + method: bytes, + target: bytes, + request_serializer: Optional[SerializingFunction], + response_deserializer: Optional[DeserializingFunction], + _registered_call_handle: Optional[int], + ): + self._channel = channel + self._managed_call = managed_call + self._method = method + self._target = target + self._request_serializer = request_serializer + self._response_deserializer = response_deserializer + self._context = cygrpc.build_census_context() + self._registered_call_handle = _registered_call_handle + + def __call__( + self, + request_iterator: Iterator, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> _MultiThreadedRendezvous: + deadline = _deadline(timeout) + state = _RPCState(_STREAM_STREAM_INITIAL_DUE, None, None, None, None) + initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready( + wait_for_ready + ) + augmented_metadata = _compression.augment_metadata( + metadata, compression + ) + operations = ( + ( + cygrpc.SendInitialMetadataOperation( + augmented_metadata, initial_metadata_flags + ), + cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS), + ), + (cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),), + ) + event_handler = _event_handler(state, self._response_deserializer) + state.rpc_start_time = time.perf_counter() + state.method = _common.decode(self._method) + state.target = _common.decode(self._target) + call = self._managed_call( + cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS, + self._method, + None, + _determine_deadline(deadline), + augmented_metadata, + None if credentials is None else credentials._credentials, + operations, + event_handler, + self._context, + self._registered_call_handle, + ) + _consume_request_iterator( + request_iterator, + state, + call, + self._request_serializer, + event_handler, + ) + return _MultiThreadedRendezvous( + state, call, self._response_deserializer, deadline + ) + + +class _InitialMetadataFlags(int): + """Stores immutable initial metadata flags""" + + def __new__(cls, value: int = _EMPTY_FLAGS): + value &= cygrpc.InitialMetadataFlags.used_mask + return super(_InitialMetadataFlags, cls).__new__(cls, value) + + def with_wait_for_ready(self, wait_for_ready: Optional[bool]) -> int: + if wait_for_ready is not None: + if wait_for_ready: + return self.__class__( + self + | cygrpc.InitialMetadataFlags.wait_for_ready + | cygrpc.InitialMetadataFlags.wait_for_ready_explicitly_set + ) + elif not wait_for_ready: + return self.__class__( + self & ~cygrpc.InitialMetadataFlags.wait_for_ready + | cygrpc.InitialMetadataFlags.wait_for_ready_explicitly_set + ) + return self + + +class _ChannelCallState(object): + channel: cygrpc.Channel + managed_calls: int + threading: bool + + def __init__(self, channel: cygrpc.Channel): + self.lock = threading.Lock() + self.channel = channel + self.managed_calls = 0 + self.threading = False + + def reset_postfork_child(self) -> None: + self.managed_calls = 0 + + def __del__(self): + try: + self.channel.close( + cygrpc.StatusCode.cancelled, "Channel deallocated!" + ) + except (TypeError, AttributeError): + pass + + +def _run_channel_spin_thread(state: _ChannelCallState) -> None: + def channel_spin(): + while True: + cygrpc.block_if_fork_in_progress(state) + event = state.channel.next_call_event() + if event.completion_type == cygrpc.CompletionType.queue_timeout: + continue + call_completed = event.tag(event) + if call_completed: + with state.lock: + state.managed_calls -= 1 + if state.managed_calls == 0: + return + + channel_spin_thread = cygrpc.ForkManagedThread(target=channel_spin) + channel_spin_thread.setDaemon(True) + channel_spin_thread.start() + + +def _channel_managed_call_management(state: _ChannelCallState): + # pylint: disable=too-many-arguments + def create( + flags: int, + method: bytes, + host: Optional[str], + deadline: Optional[float], + metadata: Optional[MetadataType], + credentials: Optional[cygrpc.CallCredentials], + operations: Sequence[Sequence[cygrpc.Operation]], + event_handler: UserTag, + context: Any, + _registered_call_handle: Optional[int], + ) -> cygrpc.IntegratedCall: + """Creates a cygrpc.IntegratedCall. + + Args: + flags: An integer bitfield of call flags. + method: The RPC method. + host: A host string for the created call. + deadline: A float to be the deadline of the created call or None if + the call is to have an infinite deadline. + metadata: The metadata for the call or None. + credentials: A cygrpc.CallCredentials or None. + operations: A sequence of sequences of cygrpc.Operations to be + started on the call. + event_handler: A behavior to call to handle the events resultant from + the operations on the call. + context: Context object for distributed tracing. + _registered_call_handle: An int representing the call handle of the + method, or None if the method is not registered. + Returns: + A cygrpc.IntegratedCall with which to conduct an RPC. + """ + operations_and_tags = tuple( + ( + operation, + event_handler, + ) + for operation in operations + ) + with state.lock: + call = state.channel.integrated_call( + flags, + method, + host, + deadline, + metadata, + credentials, + operations_and_tags, + context, + _registered_call_handle, + ) + if state.managed_calls == 0: + state.managed_calls = 1 + _run_channel_spin_thread(state) + else: + state.managed_calls += 1 + return call + + return create + + +class _ChannelConnectivityState(object): + lock: threading.RLock + channel: grpc.Channel + polling: bool + connectivity: grpc.ChannelConnectivity + try_to_connect: bool + # TODO(xuanwn): Refactor this: https://github.com/grpc/grpc/issues/31704 + callbacks_and_connectivities: List[ + Sequence[ + Union[ + Callable[[grpc.ChannelConnectivity], None], + Optional[grpc.ChannelConnectivity], + ] + ] + ] + delivering: bool + + def __init__(self, channel: grpc.Channel): + self.lock = threading.RLock() + self.channel = channel + self.polling = False + self.connectivity = None + self.try_to_connect = False + self.callbacks_and_connectivities = [] + self.delivering = False + + def reset_postfork_child(self) -> None: + self.polling = False + self.connectivity = None + self.try_to_connect = False + self.callbacks_and_connectivities = [] + self.delivering = False + + +def _deliveries( + state: _ChannelConnectivityState, +) -> List[Callable[[grpc.ChannelConnectivity], None]]: + callbacks_needing_update = [] + for callback_and_connectivity in state.callbacks_and_connectivities: + callback, callback_connectivity = callback_and_connectivity + if callback_connectivity is not state.connectivity: + callbacks_needing_update.append(callback) + callback_and_connectivity[1] = state.connectivity + return callbacks_needing_update + + +def _deliver( + state: _ChannelConnectivityState, + initial_connectivity: grpc.ChannelConnectivity, + initial_callbacks: Sequence[Callable[[grpc.ChannelConnectivity], None]], +) -> None: + connectivity = initial_connectivity + callbacks = initial_callbacks + while True: + for callback in callbacks: + cygrpc.block_if_fork_in_progress(state) + try: + callback(connectivity) + except Exception: # pylint: disable=broad-except + _LOGGER.exception( + _CHANNEL_SUBSCRIPTION_CALLBACK_ERROR_LOG_MESSAGE + ) + with state.lock: + callbacks = _deliveries(state) + if callbacks: + connectivity = state.connectivity + else: + state.delivering = False + return + + +def _spawn_delivery( + state: _ChannelConnectivityState, + callbacks: Sequence[Callable[[grpc.ChannelConnectivity], None]], +) -> None: + delivering_thread = cygrpc.ForkManagedThread( + target=_deliver, + args=( + state, + state.connectivity, + callbacks, + ), + ) + delivering_thread.setDaemon(True) + delivering_thread.start() + state.delivering = True + + +# NOTE(https://github.com/grpc/grpc/issues/3064): We'd rather not poll. +def _poll_connectivity( + state: _ChannelConnectivityState, + channel: grpc.Channel, + initial_try_to_connect: bool, +) -> None: + try_to_connect = initial_try_to_connect + connectivity = channel.check_connectivity_state(try_to_connect) + with state.lock: + state.connectivity = ( + _common.CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[ + connectivity + ] + ) + callbacks = tuple( + callback for callback, _ in state.callbacks_and_connectivities + ) + for callback_and_connectivity in state.callbacks_and_connectivities: + callback_and_connectivity[1] = state.connectivity + if callbacks: + _spawn_delivery(state, callbacks) + while True: + event = channel.watch_connectivity_state( + connectivity, time.time() + 0.2 + ) + cygrpc.block_if_fork_in_progress(state) + with state.lock: + if ( + not state.callbacks_and_connectivities + and not state.try_to_connect + ): + state.polling = False + state.connectivity = None + break + try_to_connect = state.try_to_connect + state.try_to_connect = False + if event.success or try_to_connect: + connectivity = channel.check_connectivity_state(try_to_connect) + with state.lock: + state.connectivity = ( + _common.CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[ + connectivity + ] + ) + if not state.delivering: + callbacks = _deliveries(state) + if callbacks: + _spawn_delivery(state, callbacks) + + +def _subscribe( + state: _ChannelConnectivityState, + callback: Callable[[grpc.ChannelConnectivity], None], + try_to_connect: bool, +) -> None: + with state.lock: + if not state.callbacks_and_connectivities and not state.polling: + polling_thread = cygrpc.ForkManagedThread( + target=_poll_connectivity, + args=(state, state.channel, bool(try_to_connect)), + ) + polling_thread.setDaemon(True) + polling_thread.start() + state.polling = True + state.callbacks_and_connectivities.append([callback, None]) + elif not state.delivering and state.connectivity is not None: + _spawn_delivery(state, (callback,)) + state.try_to_connect |= bool(try_to_connect) + state.callbacks_and_connectivities.append( + [callback, state.connectivity] + ) + else: + state.try_to_connect |= bool(try_to_connect) + state.callbacks_and_connectivities.append([callback, None]) + + +def _unsubscribe( + state: _ChannelConnectivityState, + callback: Callable[[grpc.ChannelConnectivity], None], +) -> None: + with state.lock: + for index, (subscribed_callback, unused_connectivity) in enumerate( + state.callbacks_and_connectivities + ): + if callback == subscribed_callback: + state.callbacks_and_connectivities.pop(index) + break + + +def _augment_options( + base_options: Sequence[ChannelArgumentType], + compression: Optional[grpc.Compression], +) -> Sequence[ChannelArgumentType]: + compression_option = _compression.create_channel_option(compression) + return ( + tuple(base_options) + + compression_option + + ( + ( + cygrpc.ChannelArgKey.primary_user_agent_string, + _USER_AGENT, + ), + ) + ) + + +def _separate_channel_options( + options: Sequence[ChannelArgumentType], +) -> Tuple[Sequence[ChannelArgumentType], Sequence[ChannelArgumentType]]: + """Separates core channel options from Python channel options.""" + core_options = [] + python_options = [] + for pair in options: + if ( + pair[0] + == grpc.experimental.ChannelOptions.SingleThreadedUnaryStream + ): + python_options.append(pair) + else: + core_options.append(pair) + return python_options, core_options + + +class Channel(grpc.Channel): + """A cygrpc.Channel-backed implementation of grpc.Channel.""" + + _single_threaded_unary_stream: bool + _channel: cygrpc.Channel + _call_state: _ChannelCallState + _connectivity_state: _ChannelConnectivityState + _target: str + _registered_call_handles: Dict[str, int] + + def __init__( + self, + target: str, + options: Sequence[ChannelArgumentType], + credentials: Optional[grpc.ChannelCredentials], + compression: Optional[grpc.Compression], + ): + """Constructor. + + Args: + target: The target to which to connect. + options: Configuration options for the channel. + credentials: A cygrpc.ChannelCredentials or None. + compression: An optional value indicating the compression method to be + used over the lifetime of the channel. + """ + python_options, core_options = _separate_channel_options(options) + self._single_threaded_unary_stream = ( + _DEFAULT_SINGLE_THREADED_UNARY_STREAM + ) + self._process_python_options(python_options) + self._channel = cygrpc.Channel( + _common.encode(target), + _augment_options(core_options, compression), + credentials, + ) + self._target = target + self._call_state = _ChannelCallState(self._channel) + self._connectivity_state = _ChannelConnectivityState(self._channel) + cygrpc.fork_register_channel(self) + if cygrpc.g_gevent_activated: + cygrpc.gevent_increment_channel_count() + + def _get_registered_call_handle(self, method: str) -> int: + """ + Get the registered call handle for a method. + + This is a semi-private method. It is intended for use only by gRPC generated code. + + This method is not thread-safe. + + Args: + method: Required, the method name for the RPC. + + Returns: + The registered call handle pointer in the form of a Python Long. + """ + return self._channel.get_registered_call_handle(_common.encode(method)) + + def _process_python_options( + self, python_options: Sequence[ChannelArgumentType] + ) -> None: + """Sets channel attributes according to python-only channel options.""" + for pair in python_options: + if ( + pair[0] + == grpc.experimental.ChannelOptions.SingleThreadedUnaryStream + ): + self._single_threaded_unary_stream = True + + def subscribe( + self, + callback: Callable[[grpc.ChannelConnectivity], None], + try_to_connect: Optional[bool] = None, + ) -> None: + _subscribe(self._connectivity_state, callback, try_to_connect) + + def unsubscribe( + self, callback: Callable[[grpc.ChannelConnectivity], None] + ) -> None: + _unsubscribe(self._connectivity_state, callback) + + # pylint: disable=arguments-differ + def unary_unary( + self, + method: str, + request_serializer: Optional[SerializingFunction] = None, + response_deserializer: Optional[DeserializingFunction] = None, + _registered_method: Optional[bool] = False, + ) -> grpc.UnaryUnaryMultiCallable: + _registered_call_handle = None + if _registered_method: + _registered_call_handle = self._get_registered_call_handle(method) + return _UnaryUnaryMultiCallable( + self._channel, + _channel_managed_call_management(self._call_state), + _common.encode(method), + _common.encode(self._target), + request_serializer, + response_deserializer, + _registered_call_handle, + ) + + # pylint: disable=arguments-differ + def unary_stream( + self, + method: str, + request_serializer: Optional[SerializingFunction] = None, + response_deserializer: Optional[DeserializingFunction] = None, + _registered_method: Optional[bool] = False, + ) -> grpc.UnaryStreamMultiCallable: + _registered_call_handle = None + if _registered_method: + _registered_call_handle = self._get_registered_call_handle(method) + # NOTE(rbellevi): Benchmarks have shown that running a unary-stream RPC + # on a single Python thread results in an appreciable speed-up. However, + # due to slight differences in capability, the multi-threaded variant + # remains the default. + if self._single_threaded_unary_stream: + return _SingleThreadedUnaryStreamMultiCallable( + self._channel, + _common.encode(method), + _common.encode(self._target), + request_serializer, + response_deserializer, + _registered_call_handle, + ) + else: + return _UnaryStreamMultiCallable( + self._channel, + _channel_managed_call_management(self._call_state), + _common.encode(method), + _common.encode(self._target), + request_serializer, + response_deserializer, + _registered_call_handle, + ) + + # pylint: disable=arguments-differ + def stream_unary( + self, + method: str, + request_serializer: Optional[SerializingFunction] = None, + response_deserializer: Optional[DeserializingFunction] = None, + _registered_method: Optional[bool] = False, + ) -> grpc.StreamUnaryMultiCallable: + _registered_call_handle = None + if _registered_method: + _registered_call_handle = self._get_registered_call_handle(method) + return _StreamUnaryMultiCallable( + self._channel, + _channel_managed_call_management(self._call_state), + _common.encode(method), + _common.encode(self._target), + request_serializer, + response_deserializer, + _registered_call_handle, + ) + + # pylint: disable=arguments-differ + def stream_stream( + self, + method: str, + request_serializer: Optional[SerializingFunction] = None, + response_deserializer: Optional[DeserializingFunction] = None, + _registered_method: Optional[bool] = False, + ) -> grpc.StreamStreamMultiCallable: + _registered_call_handle = None + if _registered_method: + _registered_call_handle = self._get_registered_call_handle(method) + return _StreamStreamMultiCallable( + self._channel, + _channel_managed_call_management(self._call_state), + _common.encode(method), + _common.encode(self._target), + request_serializer, + response_deserializer, + _registered_call_handle, + ) + + def _unsubscribe_all(self) -> None: + state = self._connectivity_state + if state: + with state.lock: + del state.callbacks_and_connectivities[:] + + def _close(self) -> None: + self._unsubscribe_all() + self._channel.close(cygrpc.StatusCode.cancelled, "Channel closed!") + cygrpc.fork_unregister_channel(self) + if cygrpc.g_gevent_activated: + cygrpc.gevent_decrement_channel_count() + + def _close_on_fork(self) -> None: + self._unsubscribe_all() + self._channel.close_on_fork( + cygrpc.StatusCode.cancelled, "Channel closed due to fork" + ) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self._close() + return False + + def close(self) -> None: + self._close() + + def __del__(self): + # TODO(https://github.com/grpc/grpc/issues/12531): Several releases + # after 1.12 (1.16 or thereabouts?) add a "self._channel.close" call + # here (or more likely, call self._close() here). We don't do this today + # because many valid use cases today allow the channel to be deleted + # immediately after stubs are created. After a sufficient period of time + # has passed for all users to be trusted to freeze out to their channels + # for as long as they are in use and to close them after using them, + # then deletion of this grpc._channel.Channel instance can be made to + # effect closure of the underlying cygrpc.Channel instance. + try: + self._unsubscribe_all() + except: # pylint: disable=bare-except + # Exceptions in __del__ are ignored by Python anyway, but they can + # keep spamming logs. Just silence them. + pass diff --git a/venv/lib/python3.10/site-packages/grpc/_common.py b/venv/lib/python3.10/site-packages/grpc/_common.py new file mode 100644 index 0000000000000000000000000000000000000000..475f0510cf84e1c3647d8b468fb18682e7767dab --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/_common.py @@ -0,0 +1,183 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Shared implementation.""" + +import logging +import time +from typing import Any, AnyStr, Callable, Optional, Union + +import grpc +from grpc._cython import cygrpc +from grpc._typing import DeserializingFunction +from grpc._typing import SerializingFunction + +_LOGGER = logging.getLogger(__name__) + +CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY = { + cygrpc.ConnectivityState.idle: grpc.ChannelConnectivity.IDLE, + cygrpc.ConnectivityState.connecting: grpc.ChannelConnectivity.CONNECTING, + cygrpc.ConnectivityState.ready: grpc.ChannelConnectivity.READY, + cygrpc.ConnectivityState.transient_failure: grpc.ChannelConnectivity.TRANSIENT_FAILURE, + cygrpc.ConnectivityState.shutdown: grpc.ChannelConnectivity.SHUTDOWN, +} + +CYGRPC_STATUS_CODE_TO_STATUS_CODE = { + cygrpc.StatusCode.ok: grpc.StatusCode.OK, + cygrpc.StatusCode.cancelled: grpc.StatusCode.CANCELLED, + cygrpc.StatusCode.unknown: grpc.StatusCode.UNKNOWN, + cygrpc.StatusCode.invalid_argument: grpc.StatusCode.INVALID_ARGUMENT, + cygrpc.StatusCode.deadline_exceeded: grpc.StatusCode.DEADLINE_EXCEEDED, + cygrpc.StatusCode.not_found: grpc.StatusCode.NOT_FOUND, + cygrpc.StatusCode.already_exists: grpc.StatusCode.ALREADY_EXISTS, + cygrpc.StatusCode.permission_denied: grpc.StatusCode.PERMISSION_DENIED, + cygrpc.StatusCode.unauthenticated: grpc.StatusCode.UNAUTHENTICATED, + cygrpc.StatusCode.resource_exhausted: grpc.StatusCode.RESOURCE_EXHAUSTED, + cygrpc.StatusCode.failed_precondition: grpc.StatusCode.FAILED_PRECONDITION, + cygrpc.StatusCode.aborted: grpc.StatusCode.ABORTED, + cygrpc.StatusCode.out_of_range: grpc.StatusCode.OUT_OF_RANGE, + cygrpc.StatusCode.unimplemented: grpc.StatusCode.UNIMPLEMENTED, + cygrpc.StatusCode.internal: grpc.StatusCode.INTERNAL, + cygrpc.StatusCode.unavailable: grpc.StatusCode.UNAVAILABLE, + cygrpc.StatusCode.data_loss: grpc.StatusCode.DATA_LOSS, +} +STATUS_CODE_TO_CYGRPC_STATUS_CODE = { + grpc_code: cygrpc_code + for cygrpc_code, grpc_code in CYGRPC_STATUS_CODE_TO_STATUS_CODE.items() +} + +MAXIMUM_WAIT_TIMEOUT = 0.1 + +_ERROR_MESSAGE_PORT_BINDING_FAILED = ( + "Failed to bind to address %s; set " + "GRPC_VERBOSITY=debug environment variable to see detailed error message." +) + + +def encode(s: AnyStr) -> bytes: + if isinstance(s, bytes): + return s + else: + return s.encode("utf8") + + +def decode(b: AnyStr) -> str: + if isinstance(b, bytes): + return b.decode("utf-8", "replace") + return b + + +def _transform( + message: Any, + transformer: Union[SerializingFunction, DeserializingFunction, None], + exception_message: str, +) -> Any: + if transformer is None: + return message + else: + try: + return transformer(message) + except Exception: # pylint: disable=broad-except + _LOGGER.exception(exception_message) + return None + + +def serialize(message: Any, serializer: Optional[SerializingFunction]) -> bytes: + return _transform(message, serializer, "Exception serializing message!") + + +def deserialize( + serialized_message: bytes, deserializer: Optional[DeserializingFunction] +) -> Any: + return _transform( + serialized_message, deserializer, "Exception deserializing message!" + ) + + +def fully_qualified_method(group: str, method: str) -> str: + return "/{}/{}".format(group, method) + + +def _wait_once( + wait_fn: Callable[..., bool], + timeout: float, + spin_cb: Optional[Callable[[], None]], +): + wait_fn(timeout=timeout) + if spin_cb is not None: + spin_cb() + + +def wait( + wait_fn: Callable[..., bool], + wait_complete_fn: Callable[[], bool], + timeout: Optional[float] = None, + spin_cb: Optional[Callable[[], None]] = None, +) -> bool: + """Blocks waiting for an event without blocking the thread indefinitely. + + See https://github.com/grpc/grpc/issues/19464 for full context. CPython's + `threading.Event.wait` and `threading.Condition.wait` methods, if invoked + without a timeout kwarg, may block the calling thread indefinitely. If the + call is made from the main thread, this means that signal handlers may not + run for an arbitrarily long period of time. + + This wrapper calls the supplied wait function with an arbitrary short + timeout to ensure that no signal handler has to wait longer than + MAXIMUM_WAIT_TIMEOUT before executing. + + Args: + wait_fn: A callable acceptable a single float-valued kwarg named + `timeout`. This function is expected to be one of `threading.Event.wait` + or `threading.Condition.wait`. + wait_complete_fn: A callable taking no arguments and returning a bool. + When this function returns true, it indicates that waiting should cease. + timeout: An optional float-valued number of seconds after which the wait + should cease. + spin_cb: An optional Callable taking no arguments and returning nothing. + This callback will be called on each iteration of the spin. This may be + used for, e.g. work related to forking. + + Returns: + True if a timeout was supplied and it was reached. False otherwise. + """ + if timeout is None: + while not wait_complete_fn(): + _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) + else: + end = time.time() + timeout + while not wait_complete_fn(): + remaining = min(end - time.time(), MAXIMUM_WAIT_TIMEOUT) + if remaining < 0: + return True + _wait_once(wait_fn, remaining, spin_cb) + return False + + +def validate_port_binding_result(address: str, port: int) -> int: + """Validates if the port binding succeed. + + If the port returned by Core is 0, the binding is failed. However, in that + case, the Core API doesn't return a detailed failing reason. The best we + can do is raising an exception to prevent further confusion. + + Args: + address: The address string to be bound. + port: An int returned by core + """ + if port == 0: + # The Core API doesn't return a failure message. The best we can do + # is raising an exception to prevent further confusion. + raise RuntimeError(_ERROR_MESSAGE_PORT_BINDING_FAILED % address) + else: + return port diff --git a/venv/lib/python3.10/site-packages/grpc/_compression.py b/venv/lib/python3.10/site-packages/grpc/_compression.py new file mode 100644 index 0000000000000000000000000000000000000000..07fa6f8434f86db776fa01c8ba52e2085fe3df70 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/_compression.py @@ -0,0 +1,71 @@ +# Copyright 2019 The gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from typing import Optional + +import grpc +from grpc._cython import cygrpc +from grpc._typing import MetadataType + +NoCompression = cygrpc.CompressionAlgorithm.none +Deflate = cygrpc.CompressionAlgorithm.deflate +Gzip = cygrpc.CompressionAlgorithm.gzip + +_METADATA_STRING_MAPPING = { + NoCompression: "identity", + Deflate: "deflate", + Gzip: "gzip", +} + + +def _compression_algorithm_to_metadata_value( + compression: grpc.Compression, +) -> str: + return _METADATA_STRING_MAPPING[compression] + + +def compression_algorithm_to_metadata(compression: grpc.Compression): + return ( + cygrpc.GRPC_COMPRESSION_REQUEST_ALGORITHM_MD_KEY, + _compression_algorithm_to_metadata_value(compression), + ) + + +def create_channel_option(compression: Optional[grpc.Compression]): + return ( + ((cygrpc.GRPC_COMPRESSION_CHANNEL_DEFAULT_ALGORITHM, int(compression)),) + if compression + else () + ) + + +def augment_metadata( + metadata: Optional[MetadataType], compression: Optional[grpc.Compression] +): + if not metadata and not compression: + return None + base_metadata = tuple(metadata) if metadata else () + compression_metadata = ( + (compression_algorithm_to_metadata(compression),) if compression else () + ) + return base_metadata + compression_metadata + + +__all__ = ( + "NoCompression", + "Deflate", + "Gzip", +) diff --git a/venv/lib/python3.10/site-packages/grpc/_cython/__init__.py b/venv/lib/python3.10/site-packages/grpc/_cython/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5fb4f3c3cfd5622f4067f3dd22eb49318855325a --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/_cython/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/venv/lib/python3.10/site-packages/grpc/_cython/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/_cython/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a71d9713142d4205832fc3aed67956d9cdf604f1 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/_cython/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/_cython/_credentials/roots.pem b/venv/lib/python3.10/site-packages/grpc/_cython/_credentials/roots.pem new file mode 100644 index 0000000000000000000000000000000000000000..e5c84faed8ee030dce5aec08c09d549376a33d08 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/_cython/_credentials/roots.pem @@ -0,0 +1,4337 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Label: "GlobalSign Root CA" +# Serial: 4835703278459707669005204 +# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a +# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c +# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG +A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv +b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw +MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i +YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT +aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ +jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp +xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp +1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG +snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ +U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 +9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B +AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz +yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE +38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP +AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad +DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME +HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Label: "GlobalSign Root CA - R2" +# Serial: 4835703278459682885658125 +# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 +# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe +# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e +-----BEGIN CERTIFICATE----- +MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 +MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL +v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 +eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq +tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd +C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa +zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB +mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH +V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n +bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG +3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs +J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO +291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS +ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd +AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 +TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Premium 2048 Secure Server CA" +# Serial: 946069240 +# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 +# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 +# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML +RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp +bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 +IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 +MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 +LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp +YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG +A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq +K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe +sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX +MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT +XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ +HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH +4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub +j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo +U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf +zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b +u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ +bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er +fF6adulZkMV8gzURZVE= +-----END CERTIFICATE----- + +# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Label: "Baltimore CyberTrust Root" +# Serial: 33554617 +# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 +# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 +# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ +RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD +VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX +DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y +ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy +VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr +mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr +IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK +mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu +XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy +dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye +jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 +BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 +DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 +9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx +jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 +Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz +ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS +R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=AAA Certificate Services O=Comodo CA Limited +# Subject: CN=AAA Certificate Services O=Comodo CA Limited +# Label: "Comodo AAA Services root" +# Serial: 1 +# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 +# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 +# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj +YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM +GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua +BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe +3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 +YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR +rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm +ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU +oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v +QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t +b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF +AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q +GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 +G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi +l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 +smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 +# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 +# Label: "Security Communication Root CA" +# Serial: 0 +# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a +# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 +# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c +-----BEGIN CERTIFICATE----- +MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY +MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t +dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 +WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD +VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 +9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ +DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 +Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N +QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ +xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G +A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG +kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr +Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 +Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU +JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot +RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== +-----END CERTIFICATE----- + +# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Label: "XRamp Global CA Root" +# Serial: 107108908803651509692980124233745014957 +# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 +# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 +# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB +gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk +MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY +UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx +NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 +dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy +dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 +38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP +KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q +DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 +qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa +JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi +PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P +BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs +jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 +eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR +vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa +IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy +i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ +O+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Label: "Go Daddy Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 +# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 +# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh +MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE +YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 +MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo +ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg +MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN +ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA +PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w +wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi +EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY +avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ +YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE +sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h +/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 +IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD +ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy +OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P +TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER +dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf +ReYNnyicsbkqWletNw+vHX/bvZ8= +-----END CERTIFICATE----- + +# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Label: "Starfield Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 +# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a +# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl +MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp +U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw +NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE +ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp +ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 +DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf +8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN ++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 +X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa +K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA +1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G +A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR +zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 +YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD +bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 +L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D +eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp +VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY +WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Label: "SwissSign Silver CA - G2" +# Serial: 5700383053117599563 +# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 +# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb +# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 +-----BEGIN CERTIFICATE----- +MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE +BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu +IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow +RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY +U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv +Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br +YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF +nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH +6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt +eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ +c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ +MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH +HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf +jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 +5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB +rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c +wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 +cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB +AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp +WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 +xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ +2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ +IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 +aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X +em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR +dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ +OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ +hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy +tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Label: "Network Solutions Certificate Authority" +# Serial: 116697915152937497490437556386812487904 +# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e +# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce +# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c +-----BEGIN CERTIFICATE----- +MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi +MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu +MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp +dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV +UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO +ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz +c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP +OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl +mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF +BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 +qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw +gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu +bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp +dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 +6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ +h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH +/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv +wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN +pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc +# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc +# Label: "Cybertrust Global Root" +# Serial: 4835703278459682877484360 +# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 +# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 +# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 +-----BEGIN CERTIFICATE----- +MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG +A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh +bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE +ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS +b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 +7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS +J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y +HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP +t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz +FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY +XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ +MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw +hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js +MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA +A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj +Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx +XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o +omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc +A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW +WL1WMRJOEcgh4LMRkWXbtKaIOM5V +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) Főtanúsítvány O=NetLock Kft. OU=Tanúsítványkiadók (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) Főtanúsítvány O=NetLock Kft. OU=Tanúsítványkiadók (Certification Services) +# Label: "NetLock Arany (Class Gold) Főtanúsítvány" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post +# Label: "Hongkong Post Root CA 1" +# Serial: 1000 +# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca +# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 +# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 +-----BEGIN CERTIFICATE----- +MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx +FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg +Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG +A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr +b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ +jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn +PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh +ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 +nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h +q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED +MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC +mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 +7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB +oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs +EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO +fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi +AmvZWg== +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Label: "SecureSign RootCA11" +# Serial: 1 +# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 +# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 +# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr +MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG +A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 +MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp +Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD +QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz +i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 +h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV +MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 +UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni +8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC +h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD +VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB +AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm +KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ +X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr +QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 +pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN +QSdJQO7e5iNEOdyhIta6A/I= +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 6047274297262753887 +# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 +# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa +# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy +MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD +VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp +cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv +ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl +AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF +661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 +am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 +ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 +PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS +3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k +SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF +3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM +ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g +StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz +Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB +jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes +# Subject: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes +# Label: "EC-ACC" +# Serial: -23701579247955709139626555126524820479 +# MD5 Fingerprint: eb:f5:9d:29:0d:61:f9:42:1f:7c:c2:ba:6d:e3:15:09 +# SHA1 Fingerprint: 28:90:3a:63:5b:52:80:fa:e6:77:4c:0b:6d:a7:d6:ba:a6:4a:f2:e8 +# SHA256 Fingerprint: 88:49:7f:01:60:2f:31:54:24:6a:e2:8c:4d:5a:ef:10:f1:d8:7e:bb:76:62:6f:4a:e0:b7:f9:5b:a7:96:87:99 +-----BEGIN CERTIFICATE----- +MIIFVjCCBD6gAwIBAgIQ7is969Qh3hSoYqwE893EATANBgkqhkiG9w0BAQUFADCB +8zELMAkGA1UEBhMCRVMxOzA5BgNVBAoTMkFnZW5jaWEgQ2F0YWxhbmEgZGUgQ2Vy +dGlmaWNhY2lvIChOSUYgUS0wODAxMTc2LUkpMSgwJgYDVQQLEx9TZXJ2ZWlzIFB1 +YmxpY3MgZGUgQ2VydGlmaWNhY2lvMTUwMwYDVQQLEyxWZWdldSBodHRwczovL3d3 +dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAoYykwMzE1MDMGA1UECxMsSmVyYXJxdWlh +IEVudGl0YXRzIGRlIENlcnRpZmljYWNpbyBDYXRhbGFuZXMxDzANBgNVBAMTBkVD +LUFDQzAeFw0wMzAxMDcyMzAwMDBaFw0zMTAxMDcyMjU5NTlaMIHzMQswCQYDVQQG +EwJFUzE7MDkGA1UEChMyQWdlbmNpYSBDYXRhbGFuYSBkZSBDZXJ0aWZpY2FjaW8g +KE5JRiBRLTA4MDExNzYtSSkxKDAmBgNVBAsTH1NlcnZlaXMgUHVibGljcyBkZSBD +ZXJ0aWZpY2FjaW8xNTAzBgNVBAsTLFZlZ2V1IGh0dHBzOi8vd3d3LmNhdGNlcnQu +bmV0L3ZlcmFycmVsIChjKTAzMTUwMwYDVQQLEyxKZXJhcnF1aWEgRW50aXRhdHMg +ZGUgQ2VydGlmaWNhY2lvIENhdGFsYW5lczEPMA0GA1UEAxMGRUMtQUNDMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsyLHT+KXQpWIR4NA9h0X84NzJB5R +85iKw5K4/0CQBXCHYMkAqbWUZRkiFRfCQ2xmRJoNBD45b6VLeqpjt4pEndljkYRm +4CgPukLjbo73FCeTae6RDqNfDrHrZqJyTxIThmV6PttPB/SnCWDaOkKZx7J/sxaV +HMf5NLWUhdWZXqBIoH7nF2W4onW4HvPlQn2v7fOKSGRdghST2MDk/7NQcvJ29rNd +QlB50JQ+awwAvthrDk4q7D7SzIKiGGUzE3eeml0aE9jD2z3Il3rucO2n5nzbcc8t +lGLfbdb1OL4/pYUKGbio2Al1QnDE6u/LDsg0qBIimAy4E5S2S+zw0JDnJwIDAQAB +o4HjMIHgMB0GA1UdEQQWMBSBEmVjX2FjY0BjYXRjZXJ0Lm5ldDAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUoMOLRKo3pUW/l4Ba0fF4 +opvpXY0wfwYDVR0gBHgwdjB0BgsrBgEEAfV4AQMBCjBlMCwGCCsGAQUFBwIBFiBo +dHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbDA1BggrBgEFBQcCAjApGidW +ZWdldSBodHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAwDQYJKoZIhvcN +AQEFBQADggEBAKBIW4IB9k1IuDlVNZyAelOZ1Vr/sXE7zDkJlF7W2u++AVtd0x7Y +/X1PzaBB4DSTv8vihpw3kpBWHNzrKQXlxJ7HNd+KDM3FIUPpqojlNcAZQmNaAl6k +SBg6hW/cnbw/nZzBh7h6YQjpdwt/cKt63dmXLGQehb+8dJahw3oS7AwaboMMPOhy +Rp/7SNVel+axofjk70YllJyJ22k4vuxcDlbHZVHlUIiIv0LVKz3l+bqeLrPK9HOS +Agu+TGbrIP65y7WZf+a2E/rKS03Z7lNGBjvGTq2TWoF+bCpLagVFjPIhpDGQh2xl +nJ2lYJU6Un/10asIbvPuW/mIPX64b24D5EI= +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2011" +# Serial: 0 +# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9 +# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d +# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71 +-----BEGIN CERTIFICATE----- +MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix +RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p +YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw +NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK +EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl +cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz +dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ +fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns +bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD +75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP +FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV +HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp +5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu +b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA +A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p +6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 +TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7 +dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys +Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI +l7WdmplNsDz4SgCbZN2fOUvRJ9e4 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=E-Tugra Certification Authority O=E-Tuğra EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. OU=E-Tugra Sertifikasyon Merkezi +# Subject: CN=E-Tugra Certification Authority O=E-Tuğra EBG Bilişim Teknolojileri ve Hizmetleri A.Ş. OU=E-Tugra Sertifikasyon Merkezi +# Label: "E-Tugra Certification Authority" +# Serial: 7667447206703254355 +# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 +# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 +# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV +BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC +aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV +BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 +Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz +MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ +BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp +em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN +ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY +B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH +D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF +Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo +q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D +k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH +fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut +dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM +ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 +zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn +rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX +U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 +Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 +XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF +Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR +HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY +GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c +77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 ++GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK +vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 +FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl +yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P +AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD +y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d +NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 14367148294922964480859022125800977897474 +# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e +# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb +# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c +-----BEGIN CERTIFICATE----- +MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ +FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F +uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX +kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs +ewv4n4Q= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden +# Label: "Staat der Nederlanden EV Root CA" +# Serial: 10000013 +# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba +# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb +# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a +-----BEGIN CERTIFICATE----- +MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO +TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh +dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y +MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg +TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS +b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS +M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC +UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d +Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p +rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l +pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb +j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC +KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS +/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X +cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH +1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP +px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 +MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI +eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u +2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS +v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC +wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy +CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e +vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 +Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa +Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL +eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 +FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc +7uzXLg== +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Label: "GDCA TrustAUTH R5 ROOT" +# Serial: 9009899650740120186 +# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 +# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 +# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE +BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 +MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV +BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w +HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj +Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj +TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u +KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj +qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm +MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 +ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP +zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk +L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC +jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA +HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC +AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm +DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 +COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry +L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf +JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg +IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io +2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV +09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ +XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq +T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe +MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor RootCert CA-1" +# Serial: 15752444095811006489 +# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45 +# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a +# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD +VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk +MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y +IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB +pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h +IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG +A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU +cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid +RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V +seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme +9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV +EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW +hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/ +DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD +ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I +/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf +ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ +yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts +L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN +zl/HHk484IkzlQsPpTLWPFp5LBk= +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor RootCert CA-2" +# Serial: 2711694510199101698 +# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64 +# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0 +# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65 +-----BEGIN CERTIFICATE----- +MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV +BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw +IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy +dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig +Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk +MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg +Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD +VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy +dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+ +QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq +1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp +2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK +DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape +az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF +3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88 +oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM +g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3 +mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh +8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd +BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U +nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw +DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX +dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+ +MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL +/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX +CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa +ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW +2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7 +N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3 +Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB +As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp +5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu +1uwJ +-----END CERTIFICATE----- + +# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority +# Label: "TrustCor ECA-1" +# Serial: 9548242946988625984 +# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c +# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd +# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD +VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk +MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U +cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y +IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV +BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw +IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy +dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig +RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb +3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA +BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5 +3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou +owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/ +wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF +ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf +BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/ +MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv +civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2 +AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F +hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50 +soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI +WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi +tJ/X5g== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Label: "SSL.com Root Certification Authority RSA" +# Serial: 8875640296558310041 +# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 +# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb +# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE +BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK +DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz +OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R +xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX +qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC +C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 +6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh +/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF +YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E +JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc +US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 +ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm ++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi +M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV +cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc +Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs +PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ +q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 +cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr +a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I +H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y +K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu +nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf +oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY +Ic2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com Root Certification Authority ECC" +# Serial: 8495723813297216424 +# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e +# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a +# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz +WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 +b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS +b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI +7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg +CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud +EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD +VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T +kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ +gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority RSA R2" +# Serial: 6248227494352943350 +# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 +# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a +# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV +BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE +CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy +MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G +A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD +DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq +M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf +OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa +4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 +HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR +aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA +b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ +Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV +PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO +pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu +UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY +MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 +9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW +s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 +Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg +cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM +79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz +/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt +ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm +Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK +QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ +w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi +S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 +mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority ECC" +# Serial: 3182246526754555285 +# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 +# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d +# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx +NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv +bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA +VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku +WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX +5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ +ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg +h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Label: "GlobalSign Root CA - R6" +# Serial: 1417766617973444989252670301619537 +# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae +# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 +# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg +MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx +MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET +MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI +xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k +ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD +aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw +LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw +1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX +k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 +SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h +bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n +WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY +rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce +MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu +bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN +nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt +Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 +55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj +vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf +cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz +oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp +nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs +pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v +JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R +8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 +5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GC CA" +# Serial: 44084345621038548146064804565436152554 +# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 +# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 +# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d +-----BEGIN CERTIFICATE----- +MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw +CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 +bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg +Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ +BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu +ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS +b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni +eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W +p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T +rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV +57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg +Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R1 O=Google Trust Services LLC +# Subject: CN=GTS Root R1 O=Google Trust Services LLC +# Label: "GTS Root R1" +# Serial: 146587175971765017618439757810265552097 +# MD5 Fingerprint: 82:1a:ef:d4:d2:4a:f2:9f:e2:3d:97:06:14:70:72:85 +# SHA1 Fingerprint: e1:c9:50:e6:ef:22:f8:4c:56:45:72:8b:92:20:60:d7:d5:a7:a3:e8 +# SHA256 Fingerprint: 2a:57:54:71:e3:13:40:bc:21:58:1c:bd:2c:f1:3e:15:84:63:20:3e:ce:94:bc:f9:d3:cc:19:6b:f0:9a:54:72 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQbkepxUtHDA3sM9CJuRz04TANBgkqhkiG9w0BAQwFADBH +MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM +QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy +MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl +cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaM +f/vo27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vX +mX7wCl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7 +zUjwTcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0P +fyblqAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtc +vfaHszVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4 +Zor8Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUsp +zBmkMiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOO +Rc92wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYW +k70paDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+ +DVrNVjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgF +lQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBADiW +Cu49tJYeX++dnAsznyvgyv3SjgofQXSlfKqE1OXyHuY3UjKcC9FhHb8owbZEKTV1 +d5iyfNm9dKyKaOOpMQkpAWBz40d8U6iQSifvS9efk+eCNs6aaAyC58/UEBZvXw6Z +XPYfcX3v73svfuo21pdwCxXu11xWajOl40k4DLh9+42FpLFZXvRq4d2h9mREruZR +gyFmxhE+885H7pwoHyXa/6xmld01D1zvICxi/ZG6qcz8WpyTgYMpl0p8WnK0OdC3 +d8t5/Wk6kjftbjhlRn7pYL15iJdfOBL07q9bgsiG1eGZbYwE8na6SfZu6W0eX6Dv +J4J2QPim01hcDyxC2kLGe4g0x8HYRZvBPsVhHdljUEn2NIVq4BjFbkerQUIpm/Zg +DdIx02OYI5NaAIFItO/Nis3Jz5nu2Z6qNuFoS3FJFDYoOj0dzpqPJeaAcWErtXvM ++SUWgeExX6GjfhaknBZqlxi9dnKlC54dNuYvoS++cJEPqOba+MSSQGwlfnuzCdyy +F62ARPBopY+Udf90WuioAnwMCeKpSwughQtiue+hMZL77/ZRBIls6Kl0obsXs7X9 +SQ98POyDGCBDTtWTurQ0sR8WNh8M5mQ5Fkzc4P4dyKliPUDqysU0ArSuiYgzNdws +E3PYJ/HQcu51OyLemGhmW/HGY0dVHLqlCFF1pkgl +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R2 O=Google Trust Services LLC +# Subject: CN=GTS Root R2 O=Google Trust Services LLC +# Label: "GTS Root R2" +# Serial: 146587176055767053814479386953112547951 +# MD5 Fingerprint: 44:ed:9a:0e:a4:09:3b:00:f2:ae:4c:a3:c6:61:b0:8b +# SHA1 Fingerprint: d2:73:96:2a:2a:5e:39:9f:73:3f:e1:c7:1e:64:3f:03:38:34:fc:4d +# SHA256 Fingerprint: c4:5d:7b:b0:8e:6d:67:e6:2e:42:35:11:0b:56:4e:5f:78:fd:92:ef:05:8c:84:0a:ea:4e:64:55:d7:58:5c:60 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQbkepxlqz5yDFMJo/aFLybzANBgkqhkiG9w0BAQwFADBH +MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM +QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy +MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl +cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3Lv +CvptnfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3Kg +GjSY6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9Bu +XvAuMC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOd +re7kRXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXu +PuWgf9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1 +mKPV+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K +8YzodDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqj +x5RWIr9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsR +nTKaG73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0 +kzCqgc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9Ok +twIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBALZp +8KZ3/p7uC4Gt4cCpx/k1HUCCq+YEtN/L9x0Pg/B+E02NjO7jMyLDOfxA325BS0JT +vhaI8dI4XsRomRyYUpOM52jtG2pzegVATX9lO9ZY8c6DR2Dj/5epnGB3GFW1fgiT +z9D2PGcDFWEJ+YF59exTpJ/JjwGLc8R3dtyDovUMSRqodt6Sm2T4syzFJ9MHwAiA +pJiS4wGWAqoC7o87xdFtCjMwc3i5T1QWvwsHoaRc5svJXISPD+AVdyx+Jn7axEvb +pxZ3B7DNdehyQtaVhJ2Gg/LkkM0JR9SLA3DaWsYDQvTtN6LwG1BUSw7YhN4ZKJmB +R64JGz9I0cNv4rBgF/XuIwKl2gBbbZCr7qLpGzvpx0QnRY5rn/WkhLx3+WuXrD5R +RaIRpsyF7gpo8j5QOHokYh4XIDdtak23CZvJ/KRY9bb7nE4Yu5UC56GtmwfuNmsk +0jmGwZODUNKBRqhfYlcsu2xkiAhu7xNUX90txGdj08+JN7+dIPT7eoOboB6BAFDC +5AwiWVIQ7UNWhwD4FFKnHYuTjKJNRn8nxnGbJN7k2oaLDX5rIMHAnuFl2GqjpuiF +izoHCBy69Y9Vmhh1fuXsgWbRIXOhNUQLgD1bnF5vKheW0YMjiGZt5obicDIvUiLn +yOd/xCxgXS/Dr55FBcOEArf9LAhST4Ldo/DUhgkC +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R3 O=Google Trust Services LLC +# Subject: CN=GTS Root R3 O=Google Trust Services LLC +# Label: "GTS Root R3" +# Serial: 146587176140553309517047991083707763997 +# MD5 Fingerprint: 1a:79:5b:6b:04:52:9c:5d:c7:74:33:1b:25:9a:f9:25 +# SHA1 Fingerprint: 30:d4:24:6f:07:ff:db:91:89:8a:0b:e9:49:66:11:eb:8c:5e:46:e5 +# SHA256 Fingerprint: 15:d5:b8:77:46:19:ea:7d:54:ce:1c:a6:d0:b0:c4:03:e0:37:a9:17:f1:31:e8:a0:4e:1e:6b:7a:71:ba:bc:e5 +-----BEGIN CERTIFICATE----- +MIICDDCCAZGgAwIBAgIQbkepx2ypcyRAiQ8DVd2NHTAKBggqhkjOPQQDAzBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout +736GjOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2A +DDL24CejQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEAgFuk +fCPAlaUs3L6JbyO5o91lAFJekazInXJ0glMLfalAvWhgxeG4VDvBNhcl2MG9AjEA +njWSdIUlUfUk7GRSJFClH9voy8l27OyCbvWFGFPouOOaKaqW04MjyaR7YbPMAuhd +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R4 O=Google Trust Services LLC +# Subject: CN=GTS Root R4 O=Google Trust Services LLC +# Label: "GTS Root R4" +# Serial: 146587176229350439916519468929765261721 +# MD5 Fingerprint: 5d:b6:6a:c4:60:17:24:6a:1a:99:a8:4b:ee:5e:b4:26 +# SHA1 Fingerprint: 2a:1d:60:27:d9:4a:b1:0a:1c:4d:91:5c:cd:33:a0:cb:3e:2d:54:cb +# SHA256 Fingerprint: 71:cc:a5:39:1f:9e:79:4b:04:80:25:30:b3:63:e1:21:da:8a:30:43:bb:26:66:2f:ea:4d:ca:7f:c9:51:a4:bd +-----BEGIN CERTIFICATE----- +MIICCjCCAZGgAwIBAgIQbkepyIuUtui7OyrYorLBmTAKBggqhkjOPQQDAzBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzu +hXyiQHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/l +xKvRHYqjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNnADBkAjBqUFJ0 +CMRw3J5QdCHojXohw0+WbhXRIjVhLfoIN+4Zba3bssx9BzT1YBkstTTZbyACMANx +sbqjYAuG7ZoIapVon+Kz4ZNkfF6Tpt95LY2F45TPI11xzPKwTdb+mciUqXWi4w== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Global G2 Root O=UniTrust +# Subject: CN=UCA Global G2 Root O=UniTrust +# Label: "UCA Global G2 Root" +# Serial: 124779693093741543919145257850076631279 +# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 +# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a +# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH +bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x +CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds +b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr +b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 +kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm +VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R +VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc +C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj +tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY +D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv +j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl +NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 +iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP +O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV +ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj +L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 +1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl +1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU +b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV +PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj +y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb +EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg +DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI ++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy +YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX +UB+K+wb1whnw0A== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Extended Validation Root O=UniTrust +# Subject: CN=UCA Extended Validation Root O=UniTrust +# Label: "UCA Extended Validation Root" +# Serial: 106100277556486529736699587978573607008 +# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 +# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a +# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF +eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx +MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV +BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog +D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS +sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop +O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk +sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi +c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj +VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz +KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ +TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G +sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs +1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD +fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN +l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR +ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ +VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 +c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp +4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s +t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj +2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO +vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C +xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx +cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM +fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax +-----END CERTIFICATE----- + +# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Label: "Certigna Root CA" +# Serial: 269714418870597844693661054334862075617 +# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 +# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 +# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 +-----BEGIN CERTIFICATE----- +MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw +WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw +MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x +MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD +VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX +BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw +ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO +ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M +CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu +I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm +TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh +C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf +ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz +IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT +Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k +JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 +hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB +GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of +1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov +L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo +dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr +aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq +hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L +6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG +HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 +0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB +lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi +o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 +gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v +faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 +Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh +jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw +3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign Root CA - G1" +# Serial: 235931866688319308814040 +# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac +# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c +# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 +-----BEGIN CERTIFICATE----- +MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD +VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU +ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH +MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO +MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv +Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz +f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO +8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq +d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM +tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt +Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB +o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x +PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM +wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d +GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH +6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby +RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx +iN66zB+Afko= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign ECC Root CA - G3" +# Serial: 287880440101571086945156 +# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 +# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 +# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b +-----BEGIN CERTIFICATE----- +MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG +EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo +bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g +RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ +TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s +b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 +WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS +fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB +zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq +hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB +CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD ++JbNR6iC8hZVdyR+EhCVBCyj +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Label: "emSign Root CA - C1" +# Serial: 825510296613316004955058 +# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 +# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 +# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f +-----BEGIN CERTIFICATE----- +MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG +A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg +SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v +dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ +BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ +HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH +3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH +GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c +xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 +aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq +TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 +/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 +kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG +YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT ++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo +WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Label: "emSign ECC Root CA - C3" +# Serial: 582948710642506000014504 +# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 +# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 +# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 +-----BEGIN CERTIFICATE----- +MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG +EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx +IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND +IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci +MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti +sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O +BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB +Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c +3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J +0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Label: "Hongkong Post Root CA 3" +# Serial: 46170865288971385588281144162979347873371282084 +# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 +# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 +# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 +-----BEGIN CERTIFICATE----- +MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL +BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ +SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n +a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 +NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT +CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u +Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO +dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI +VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV +9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY +2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY +vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt +bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb +x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ +l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK +TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj +Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e +i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw +DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG +7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk +MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr +gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk +GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS +3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm +Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ +l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c +JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP +L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa +LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG +mpv0 +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G4" +# Serial: 289383649854506086828220374796556676440 +# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 +# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 +# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw +gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL +Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg +MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw +BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 +MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 +c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ +bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg +Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ +2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E +T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j +5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM +C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T +DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX +wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A +2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm +nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 +dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl +N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj +c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS +5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS +Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr +hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ +B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI +AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw +H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ +b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk +2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol +IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk +5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY +n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global Certification Authority" +# Serial: 1846098327275375458322922162 +# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e +# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 +# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 +-----BEGIN CERTIFICATE----- +MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw +CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x +ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 +c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx +OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI +SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI +b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn +swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu +7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 +1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW +80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP +JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l +RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw +hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 +coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc +BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n +twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud +DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W +0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe +uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q +lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB +aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE +sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT +MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe +qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh +VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 +h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 +EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK +yeC2nOnOcXHebD8WpHk= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P256 Certification Authority" +# Serial: 4151900041497450638097112925 +# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 +# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf +# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 +-----BEGIN CERTIFICATE----- +MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG +SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN +FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w +DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw +CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh +DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P384 Certification Authority" +# Serial: 2704997926503831671788816187 +# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 +# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 +# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 +-----BEGIN CERTIFICATE----- +MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB +BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ +j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF +1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G +A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 +AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC +MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu +Sw== +-----END CERTIFICATE----- + +# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Label: "NAVER Global Root Certification Authority" +# Serial: 9013692873798656336226253319739695165984492813 +# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b +# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 +# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 +-----BEGIN CERTIFICATE----- +MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM +BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG +T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx +CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD +b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA +iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH +38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE +HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz +kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP +szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq +vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf +nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG +YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo +0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a +CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K +AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I +36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB +Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN +qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj +cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm ++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL +hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe +lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 +p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 +piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR +LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX +5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO +dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul +9XXeifdy +-----END CERTIFICATE----- + +# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" +# Serial: 131542671362353147877283741781055151509 +# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb +# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a +# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb +-----BEGIN CERTIFICATE----- +MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw +CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw +FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S +Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 +MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL +DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS +QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH +sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK +Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu +SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC +MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy +v+c= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Label: "GlobalSign Root R46" +# Serial: 1552617688466950547958867513931858518042577 +# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef +# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 +# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA +MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD +VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy +MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt +c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ +OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG +vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud +316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo +0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE +y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF +zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE ++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN +I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs +x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa +ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC +4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 +7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg +JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti +2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk +pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF +FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt +rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk +ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 +u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP +4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 +N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 +vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Label: "GlobalSign Root E46" +# Serial: 1552617690338932563915843282459653771421763 +# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f +# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 +# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 +-----BEGIN CERTIFICATE----- +MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx +CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD +ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw +MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex +HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq +R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd +yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ +7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 ++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= +-----END CERTIFICATE----- + +# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Label: "GLOBALTRUST 2020" +# Serial: 109160994242082918454945253 +# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8 +# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2 +# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a +-----BEGIN CERTIFICATE----- +MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG +A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw +FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx +MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u +aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq +hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b +RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z +YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3 +QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw +yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+ +BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ +SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH +r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0 +4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me +dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw +q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2 +nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu +H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA +VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC +XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd +6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf ++I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi +kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7 +wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB +TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C +MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn +4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I +aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy +qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg== +-----END CERTIFICATE----- + +# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Label: "ANF Secure Server Root CA" +# Serial: 996390341000653745 +# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 +# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 +# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 +-----BEGIN CERTIFICATE----- +MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV +BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk +YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV +BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN +MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF +UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD +VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v +dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj +cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q +yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH +2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX +H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL +zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR +p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz +W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ +SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn +LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 +n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B +u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj +o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC +AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L +9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej +rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK +pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 +vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq +OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ +/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 +2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI ++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 +MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo +tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= +-----END CERTIFICATE----- + +# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum EC-384 CA" +# Serial: 160250656287871593594747141429395092468 +# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 +# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed +# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 +-----BEGIN CERTIFICATE----- +MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw +CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw +JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT +EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 +WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT +LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX +BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE +KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm +Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 +EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J +UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn +nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Root CA" +# Serial: 40870380103424195783807378461123655149 +# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 +# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 +# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd +-----BEGIN CERTIFICATE----- +MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 +MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu +MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV +BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw +MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg +U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo +b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ +n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q +p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq +NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF +8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 +HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa +mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi +7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF +ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P +qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ +v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 +Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 +vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD +ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 +WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo +zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR +5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ +GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf +5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq +0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D +P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM +qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP +0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf +E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb +-----END CERTIFICATE----- + +# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Label: "TunTrust Root CA" +# Serial: 108534058042236574382096126452369648152337120275 +# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 +# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb +# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL +BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg +Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv +b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG +EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u +IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ +n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd +2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF +VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ +GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF +li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU +r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 +eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb +MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg +jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB +7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW +5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE +ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 +90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z +xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu +QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 +FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH +22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP +xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn +dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 +Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b +nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ +CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH +u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj +d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS RSA Root CA 2021" +# Serial: 76817823531813593706434026085292783742 +# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 +# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d +# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d +-----BEGIN CERTIFICATE----- +MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs +MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg +Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL +MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl +YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv +b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l +mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE +4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv +a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M +pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw +Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b +LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY +AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB +AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq +E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr +W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ +CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU +X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 +f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja +H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP +JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P +zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt +jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 +/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT +BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 +aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW +xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU +63ZTGI0RmLo= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS ECC Root CA 2021" +# Serial: 137515985548005187474074462014555733966 +# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 +# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 +# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 +-----BEGIN CERTIFICATE----- +MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw +CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh +cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v +dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG +A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj +aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg +Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 +KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y +STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD +AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw +SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN +nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps +-----END CERTIFICATE----- diff --git a/venv/lib/python3.10/site-packages/grpc/_cython/_cygrpc/__init__.py b/venv/lib/python3.10/site-packages/grpc/_cython/_cygrpc/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5fb4f3c3cfd5622f4067f3dd22eb49318855325a --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/_cython/_cygrpc/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/venv/lib/python3.10/site-packages/grpc/_cython/_cygrpc/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/_cython/_cygrpc/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..19e569442374556c93e975e35f5c86461d198aba Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/_cython/_cygrpc/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/_grpcio_metadata.py b/venv/lib/python3.10/site-packages/grpc/_grpcio_metadata.py new file mode 100644 index 0000000000000000000000000000000000000000..19f48ea92aea83761e1ac9ac5d1e08280375e3ae --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/_grpcio_metadata.py @@ -0,0 +1 @@ +__version__ = """1.74.0""" \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/grpc/_interceptor.py b/venv/lib/python3.10/site-packages/grpc/_interceptor.py new file mode 100644 index 0000000000000000000000000000000000000000..94abafebaa61db2fa2072d55b0b50f82d6f10385 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/_interceptor.py @@ -0,0 +1,813 @@ +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Implementation of gRPC Python interceptors.""" + +import collections +import sys +import types +from typing import Any, Callable, Optional, Sequence, Tuple, Union + +import grpc + +from ._typing import DeserializingFunction +from ._typing import DoneCallbackType +from ._typing import MetadataType +from ._typing import RequestIterableType +from ._typing import SerializingFunction + + +class _ServicePipeline(object): + interceptors: Tuple[grpc.ServerInterceptor] + + def __init__(self, interceptors: Sequence[grpc.ServerInterceptor]): + self.interceptors = tuple(interceptors) + + def _continuation(self, thunk: Callable, index: int) -> Callable: + return lambda context: self._intercept_at(thunk, index, context) + + def _intercept_at( + self, thunk: Callable, index: int, context: grpc.HandlerCallDetails + ) -> grpc.RpcMethodHandler: + if index < len(self.interceptors): + interceptor = self.interceptors[index] + thunk = self._continuation(thunk, index + 1) + return interceptor.intercept_service(thunk, context) + else: + return thunk(context) + + def execute( + self, thunk: Callable, context: grpc.HandlerCallDetails + ) -> grpc.RpcMethodHandler: + return self._intercept_at(thunk, 0, context) + + +def service_pipeline( + interceptors: Optional[Sequence[grpc.ServerInterceptor]], +) -> Optional[_ServicePipeline]: + return _ServicePipeline(interceptors) if interceptors else None + + +class _ClientCallDetails( + collections.namedtuple( + "_ClientCallDetails", + ( + "method", + "timeout", + "metadata", + "credentials", + "wait_for_ready", + "compression", + ), + ), + grpc.ClientCallDetails, +): + pass + + +def _unwrap_client_call_details( + call_details: grpc.ClientCallDetails, + default_details: grpc.ClientCallDetails, +) -> Tuple[ + str, float, MetadataType, grpc.CallCredentials, bool, grpc.Compression +]: + try: + method = call_details.method # pytype: disable=attribute-error + except AttributeError: + method = default_details.method # pytype: disable=attribute-error + + try: + timeout = call_details.timeout # pytype: disable=attribute-error + except AttributeError: + timeout = default_details.timeout # pytype: disable=attribute-error + + try: + metadata = call_details.metadata # pytype: disable=attribute-error + except AttributeError: + metadata = default_details.metadata # pytype: disable=attribute-error + + try: + credentials = ( + call_details.credentials + ) # pytype: disable=attribute-error + except AttributeError: + credentials = ( + default_details.credentials + ) # pytype: disable=attribute-error + + try: + wait_for_ready = ( + call_details.wait_for_ready + ) # pytype: disable=attribute-error + except AttributeError: + wait_for_ready = ( + default_details.wait_for_ready + ) # pytype: disable=attribute-error + + try: + compression = ( + call_details.compression + ) # pytype: disable=attribute-error + except AttributeError: + compression = ( + default_details.compression + ) # pytype: disable=attribute-error + + return method, timeout, metadata, credentials, wait_for_ready, compression + + +class _FailureOutcome( + grpc.RpcError, grpc.Future, grpc.Call +): # pylint: disable=too-many-ancestors + _exception: Exception + _traceback: types.TracebackType + + def __init__(self, exception: Exception, traceback: types.TracebackType): + super(_FailureOutcome, self).__init__() + self._exception = exception + self._traceback = traceback + + def initial_metadata(self) -> Optional[MetadataType]: + return None + + def trailing_metadata(self) -> Optional[MetadataType]: + return None + + def code(self) -> Optional[grpc.StatusCode]: + return grpc.StatusCode.INTERNAL + + def details(self) -> Optional[str]: + return "Exception raised while intercepting the RPC" + + def cancel(self) -> bool: + return False + + def cancelled(self) -> bool: + return False + + def is_active(self) -> bool: + return False + + def time_remaining(self) -> Optional[float]: + return None + + def running(self) -> bool: + return False + + def done(self) -> bool: + return True + + def result(self, ignored_timeout: Optional[float] = None): + raise self._exception + + def exception( + self, ignored_timeout: Optional[float] = None + ) -> Optional[Exception]: + return self._exception + + def traceback( + self, ignored_timeout: Optional[float] = None + ) -> Optional[types.TracebackType]: + return self._traceback + + def add_callback(self, unused_callback) -> bool: + return False + + def add_done_callback(self, fn: DoneCallbackType) -> None: + fn(self) + + def __iter__(self): + return self + + def __next__(self): + raise self._exception + + def next(self): + return self.__next__() + + +class _UnaryOutcome(grpc.Call, grpc.Future): + _response: Any + _call: grpc.Call + + def __init__(self, response: Any, call: grpc.Call): + self._response = response + self._call = call + + def initial_metadata(self) -> Optional[MetadataType]: + return self._call.initial_metadata() + + def trailing_metadata(self) -> Optional[MetadataType]: + return self._call.trailing_metadata() + + def code(self) -> Optional[grpc.StatusCode]: + return self._call.code() + + def details(self) -> Optional[str]: + return self._call.details() + + def is_active(self) -> bool: + return self._call.is_active() + + def time_remaining(self) -> Optional[float]: + return self._call.time_remaining() + + def cancel(self) -> bool: + return self._call.cancel() + + def add_callback(self, callback) -> bool: + return self._call.add_callback(callback) + + def cancelled(self) -> bool: + return False + + def running(self) -> bool: + return False + + def done(self) -> bool: + return True + + def result(self, ignored_timeout: Optional[float] = None): + return self._response + + def exception(self, ignored_timeout: Optional[float] = None): + return None + + def traceback(self, ignored_timeout: Optional[float] = None): + return None + + def add_done_callback(self, fn: DoneCallbackType) -> None: + fn(self) + + +class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable): + _thunk: Callable + _method: str + _interceptor: grpc.UnaryUnaryClientInterceptor + + def __init__( + self, + thunk: Callable, + method: str, + interceptor: grpc.UnaryUnaryClientInterceptor, + ): + self._thunk = thunk + self._method = method + self._interceptor = interceptor + + def __call__( + self, + request: Any, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> Any: + response, ignored_call = self._with_call( + request, + timeout=timeout, + metadata=metadata, + credentials=credentials, + wait_for_ready=wait_for_ready, + compression=compression, + ) + return response + + def _with_call( + self, + request: Any, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> Tuple[Any, grpc.Call]: + client_call_details = _ClientCallDetails( + self._method, + timeout, + metadata, + credentials, + wait_for_ready, + compression, + ) + + def continuation(new_details, request): + ( + new_method, + new_timeout, + new_metadata, + new_credentials, + new_wait_for_ready, + new_compression, + ) = _unwrap_client_call_details(new_details, client_call_details) + try: + response, call = self._thunk(new_method).with_call( + request, + timeout=new_timeout, + metadata=new_metadata, + credentials=new_credentials, + wait_for_ready=new_wait_for_ready, + compression=new_compression, + ) + return _UnaryOutcome(response, call) + except grpc.RpcError as rpc_error: + return rpc_error + except Exception as exception: # pylint:disable=broad-except + return _FailureOutcome(exception, sys.exc_info()[2]) + + call = self._interceptor.intercept_unary_unary( + continuation, client_call_details, request + ) + return call.result(), call + + def with_call( + self, + request: Any, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> Tuple[Any, grpc.Call]: + return self._with_call( + request, + timeout=timeout, + metadata=metadata, + credentials=credentials, + wait_for_ready=wait_for_ready, + compression=compression, + ) + + def future( + self, + request: Any, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> Any: + client_call_details = _ClientCallDetails( + self._method, + timeout, + metadata, + credentials, + wait_for_ready, + compression, + ) + + def continuation(new_details, request): + ( + new_method, + new_timeout, + new_metadata, + new_credentials, + new_wait_for_ready, + new_compression, + ) = _unwrap_client_call_details(new_details, client_call_details) + return self._thunk(new_method).future( + request, + timeout=new_timeout, + metadata=new_metadata, + credentials=new_credentials, + wait_for_ready=new_wait_for_ready, + compression=new_compression, + ) + + try: + return self._interceptor.intercept_unary_unary( + continuation, client_call_details, request + ) + except Exception as exception: # pylint:disable=broad-except + return _FailureOutcome(exception, sys.exc_info()[2]) + + +class _UnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable): + _thunk: Callable + _method: str + _interceptor: grpc.UnaryStreamClientInterceptor + + def __init__( + self, + thunk: Callable, + method: str, + interceptor: grpc.UnaryStreamClientInterceptor, + ): + self._thunk = thunk + self._method = method + self._interceptor = interceptor + + def __call__( + self, + request: Any, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ): + client_call_details = _ClientCallDetails( + self._method, + timeout, + metadata, + credentials, + wait_for_ready, + compression, + ) + + def continuation(new_details, request): + ( + new_method, + new_timeout, + new_metadata, + new_credentials, + new_wait_for_ready, + new_compression, + ) = _unwrap_client_call_details(new_details, client_call_details) + return self._thunk(new_method)( + request, + timeout=new_timeout, + metadata=new_metadata, + credentials=new_credentials, + wait_for_ready=new_wait_for_ready, + compression=new_compression, + ) + + try: + return self._interceptor.intercept_unary_stream( + continuation, client_call_details, request + ) + except Exception as exception: # pylint:disable=broad-except + return _FailureOutcome(exception, sys.exc_info()[2]) + + +class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable): + _thunk: Callable + _method: str + _interceptor: grpc.StreamUnaryClientInterceptor + + def __init__( + self, + thunk: Callable, + method: str, + interceptor: grpc.StreamUnaryClientInterceptor, + ): + self._thunk = thunk + self._method = method + self._interceptor = interceptor + + def __call__( + self, + request_iterator: RequestIterableType, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> Any: + response, ignored_call = self._with_call( + request_iterator, + timeout=timeout, + metadata=metadata, + credentials=credentials, + wait_for_ready=wait_for_ready, + compression=compression, + ) + return response + + def _with_call( + self, + request_iterator: RequestIterableType, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> Tuple[Any, grpc.Call]: + client_call_details = _ClientCallDetails( + self._method, + timeout, + metadata, + credentials, + wait_for_ready, + compression, + ) + + def continuation(new_details, request_iterator): + ( + new_method, + new_timeout, + new_metadata, + new_credentials, + new_wait_for_ready, + new_compression, + ) = _unwrap_client_call_details(new_details, client_call_details) + try: + response, call = self._thunk(new_method).with_call( + request_iterator, + timeout=new_timeout, + metadata=new_metadata, + credentials=new_credentials, + wait_for_ready=new_wait_for_ready, + compression=new_compression, + ) + return _UnaryOutcome(response, call) + except grpc.RpcError as rpc_error: + return rpc_error + except Exception as exception: # pylint:disable=broad-except + return _FailureOutcome(exception, sys.exc_info()[2]) + + call = self._interceptor.intercept_stream_unary( + continuation, client_call_details, request_iterator + ) + return call.result(), call + + def with_call( + self, + request_iterator: RequestIterableType, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> Tuple[Any, grpc.Call]: + return self._with_call( + request_iterator, + timeout=timeout, + metadata=metadata, + credentials=credentials, + wait_for_ready=wait_for_ready, + compression=compression, + ) + + def future( + self, + request_iterator: RequestIterableType, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> Any: + client_call_details = _ClientCallDetails( + self._method, + timeout, + metadata, + credentials, + wait_for_ready, + compression, + ) + + def continuation(new_details, request_iterator): + ( + new_method, + new_timeout, + new_metadata, + new_credentials, + new_wait_for_ready, + new_compression, + ) = _unwrap_client_call_details(new_details, client_call_details) + return self._thunk(new_method).future( + request_iterator, + timeout=new_timeout, + metadata=new_metadata, + credentials=new_credentials, + wait_for_ready=new_wait_for_ready, + compression=new_compression, + ) + + try: + return self._interceptor.intercept_stream_unary( + continuation, client_call_details, request_iterator + ) + except Exception as exception: # pylint:disable=broad-except + return _FailureOutcome(exception, sys.exc_info()[2]) + + +class _StreamStreamMultiCallable(grpc.StreamStreamMultiCallable): + _thunk: Callable + _method: str + _interceptor: grpc.StreamStreamClientInterceptor + + def __init__( + self, + thunk: Callable, + method: str, + interceptor: grpc.StreamStreamClientInterceptor, + ): + self._thunk = thunk + self._method = method + self._interceptor = interceptor + + def __call__( + self, + request_iterator: RequestIterableType, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ): + client_call_details = _ClientCallDetails( + self._method, + timeout, + metadata, + credentials, + wait_for_ready, + compression, + ) + + def continuation(new_details, request_iterator): + ( + new_method, + new_timeout, + new_metadata, + new_credentials, + new_wait_for_ready, + new_compression, + ) = _unwrap_client_call_details(new_details, client_call_details) + return self._thunk(new_method)( + request_iterator, + timeout=new_timeout, + metadata=new_metadata, + credentials=new_credentials, + wait_for_ready=new_wait_for_ready, + compression=new_compression, + ) + + try: + return self._interceptor.intercept_stream_stream( + continuation, client_call_details, request_iterator + ) + except Exception as exception: # pylint:disable=broad-except + return _FailureOutcome(exception, sys.exc_info()[2]) + + +class _Channel(grpc.Channel): + _channel: grpc.Channel + _interceptor: Union[ + grpc.UnaryUnaryClientInterceptor, + grpc.UnaryStreamClientInterceptor, + grpc.StreamStreamClientInterceptor, + grpc.StreamUnaryClientInterceptor, + ] + + def __init__( + self, + channel: grpc.Channel, + interceptor: Union[ + grpc.UnaryUnaryClientInterceptor, + grpc.UnaryStreamClientInterceptor, + grpc.StreamStreamClientInterceptor, + grpc.StreamUnaryClientInterceptor, + ], + ): + self._channel = channel + self._interceptor = interceptor + + def subscribe( + self, callback: Callable, try_to_connect: Optional[bool] = False + ): + self._channel.subscribe(callback, try_to_connect=try_to_connect) + + def unsubscribe(self, callback: Callable): + self._channel.unsubscribe(callback) + + # pylint: disable=arguments-differ + def unary_unary( + self, + method: str, + request_serializer: Optional[SerializingFunction] = None, + response_deserializer: Optional[DeserializingFunction] = None, + _registered_method: Optional[bool] = False, + ) -> grpc.UnaryUnaryMultiCallable: + # pytype: disable=wrong-arg-count + thunk = lambda m: self._channel.unary_unary( + m, + request_serializer, + response_deserializer, + _registered_method, + ) + # pytype: enable=wrong-arg-count + if isinstance(self._interceptor, grpc.UnaryUnaryClientInterceptor): + return _UnaryUnaryMultiCallable(thunk, method, self._interceptor) + else: + return thunk(method) + + # pylint: disable=arguments-differ + def unary_stream( + self, + method: str, + request_serializer: Optional[SerializingFunction] = None, + response_deserializer: Optional[DeserializingFunction] = None, + _registered_method: Optional[bool] = False, + ) -> grpc.UnaryStreamMultiCallable: + # pytype: disable=wrong-arg-count + thunk = lambda m: self._channel.unary_stream( + m, + request_serializer, + response_deserializer, + _registered_method, + ) + # pytype: enable=wrong-arg-count + if isinstance(self._interceptor, grpc.UnaryStreamClientInterceptor): + return _UnaryStreamMultiCallable(thunk, method, self._interceptor) + else: + return thunk(method) + + # pylint: disable=arguments-differ + def stream_unary( + self, + method: str, + request_serializer: Optional[SerializingFunction] = None, + response_deserializer: Optional[DeserializingFunction] = None, + _registered_method: Optional[bool] = False, + ) -> grpc.StreamUnaryMultiCallable: + # pytype: disable=wrong-arg-count + thunk = lambda m: self._channel.stream_unary( + m, + request_serializer, + response_deserializer, + _registered_method, + ) + # pytype: enable=wrong-arg-count + if isinstance(self._interceptor, grpc.StreamUnaryClientInterceptor): + return _StreamUnaryMultiCallable(thunk, method, self._interceptor) + else: + return thunk(method) + + # pylint: disable=arguments-differ + def stream_stream( + self, + method: str, + request_serializer: Optional[SerializingFunction] = None, + response_deserializer: Optional[DeserializingFunction] = None, + _registered_method: Optional[bool] = False, + ) -> grpc.StreamStreamMultiCallable: + # pytype: disable=wrong-arg-count + thunk = lambda m: self._channel.stream_stream( + m, + request_serializer, + response_deserializer, + _registered_method, + ) + # pytype: enable=wrong-arg-count + if isinstance(self._interceptor, grpc.StreamStreamClientInterceptor): + return _StreamStreamMultiCallable(thunk, method, self._interceptor) + else: + return thunk(method) + + def _close(self): + self._channel.close() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self._close() + return False + + def close(self): + self._channel.close() + + +def intercept_channel( + channel: grpc.Channel, + *interceptors: Optional[ + Sequence[ + Union[ + grpc.UnaryUnaryClientInterceptor, + grpc.UnaryStreamClientInterceptor, + grpc.StreamStreamClientInterceptor, + grpc.StreamUnaryClientInterceptor, + ] + ] + ], +) -> grpc.Channel: + for interceptor in reversed(list(interceptors)): + if ( + not isinstance(interceptor, grpc.UnaryUnaryClientInterceptor) + and not isinstance(interceptor, grpc.UnaryStreamClientInterceptor) + and not isinstance(interceptor, grpc.StreamUnaryClientInterceptor) + and not isinstance(interceptor, grpc.StreamStreamClientInterceptor) + ): + raise TypeError( + "interceptor must be " + "grpc.UnaryUnaryClientInterceptor or " + "grpc.UnaryStreamClientInterceptor or " + "grpc.StreamUnaryClientInterceptor or " + "grpc.StreamStreamClientInterceptor or " + ) + channel = _Channel(channel, interceptor) + return channel diff --git a/venv/lib/python3.10/site-packages/grpc/_observability.py b/venv/lib/python3.10/site-packages/grpc/_observability.py new file mode 100644 index 0000000000000000000000000000000000000000..a4d474720c680002c7f72bd6d82cac31fdff0155 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/_observability.py @@ -0,0 +1,310 @@ +# Copyright 2023 The gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import abc +import contextlib +import logging +import threading +from typing import ( + Any, + Generator, + Generic, + List, + Optional, + Tuple, + TypeVar, + Union, +) + +from grpc._cython import cygrpc as _cygrpc +from grpc._typing import ChannelArgumentType + +_LOGGER = logging.getLogger(__name__) + +_channel = Any # _channel.py imports this module. +ClientCallTracerCapsule = TypeVar("ClientCallTracerCapsule") +ServerCallTracerFactoryCapsule = TypeVar("ServerCallTracerFactoryCapsule") + +_plugin_lock: threading.RLock = threading.RLock() +_OBSERVABILITY_PLUGIN: Optional["ObservabilityPlugin"] = None +_SERVICES_TO_EXCLUDE: List[bytes] = [ + b"google.monitoring.v3.MetricService", + b"google.devtools.cloudtrace.v2.TraceService", +] + + +class ServerCallTracerFactory: + """An encapsulation of a ServerCallTracerFactory. + + Instances of this class can be passed to a Channel as values for the + grpc.experimental.server_call_tracer_factory option + """ + + def __init__(self, address): + self._address = address + + def __int__(self): + return self._address + + +class ObservabilityPlugin( + Generic[ClientCallTracerCapsule, ServerCallTracerFactoryCapsule], + metaclass=abc.ABCMeta, +): + """Abstract base class for observability plugin. + + *This is a semi-private class that was intended for the exclusive use of + the gRPC team.* + + The ClientCallTracerCapsule and ClientCallTracerCapsule created by this + plugin should be injected to gRPC core using observability_init at the + start of a program, before any channels/servers are built. + + Any future methods added to this interface cannot have the + @abc.abstractmethod annotation. + + Attributes: + _stats_enabled: A bool indicates whether tracing is enabled. + _tracing_enabled: A bool indicates whether stats(metrics) is enabled. + _registered_methods: A set which stores the registered method names in + bytes. + """ + + _tracing_enabled: bool = False + _stats_enabled: bool = False + + @abc.abstractmethod + def create_client_call_tracer( + self, method_name: bytes, target: bytes + ) -> ClientCallTracerCapsule: + """Creates a ClientCallTracerCapsule. + + After register the plugin, if tracing or stats is enabled, this method + will be called after a call was created, the ClientCallTracer created + by this method will be saved to call context. + + The ClientCallTracer is an object which implements `grpc_core::ClientCallTracer` + interface and wrapped in a PyCapsule using `client_call_tracer` as name. + + Args: + method_name: The method name of the call in byte format. + target: The channel target of the call in byte format. + registered_method: Whether this method is pre-registered. + + Returns: + A PyCapsule which stores a ClientCallTracer object. + """ + raise NotImplementedError() + + @abc.abstractmethod + def save_trace_context( + self, trace_id: str, span_id: str, is_sampled: bool + ) -> None: + """Saves the trace_id and span_id related to the current span. + + After register the plugin, if tracing is enabled, this method will be + called after the server finished sending response. + + This method can be used to propagate census context. + + Args: + trace_id: The identifier for the trace associated with the span as a + 32-character hexadecimal encoded string, + e.g. 26ed0036f2eff2b7317bccce3e28d01f + span_id: The identifier for the span as a 16-character hexadecimal encoded + string. e.g. 113ec879e62583bc + is_sampled: A bool indicates whether the span is sampled. + """ + raise NotImplementedError() + + @abc.abstractmethod + def create_server_call_tracer_factory( + self, + *, + xds: bool = False, + ) -> Optional[ServerCallTracerFactoryCapsule]: + """Creates a ServerCallTracerFactoryCapsule. + + This method will be called at server initialization time to create a + ServerCallTracerFactory, which will be registered to gRPC core. + + The ServerCallTracerFactory is an object which implements + `grpc_core::ServerCallTracerFactory` interface and wrapped in a PyCapsule + using `server_call_tracer_factory` as name. + + Args: + xds: Whether the server is xds server. + Returns: + A PyCapsule which stores a ServerCallTracerFactory object. Or None if + plugin decides not to create ServerCallTracerFactory. + """ + raise NotImplementedError() + + @abc.abstractmethod + def record_rpc_latency( + self, method: str, target: str, rpc_latency: float, status_code: Any + ) -> None: + """Record the latency of the RPC. + + After register the plugin, if stats is enabled, this method will be + called at the end of each RPC. + + Args: + method: The fully-qualified name of the RPC method being invoked. + target: The target name of the RPC method being invoked. + rpc_latency: The latency for the RPC in seconds, equals to the time between + when the client invokes the RPC and when the client receives the status. + status_code: An element of grpc.StatusCode in string format representing the + final status for the RPC. + """ + raise NotImplementedError() + + def set_tracing(self, enable: bool) -> None: + """Enable or disable tracing. + + Args: + enable: A bool indicates whether tracing should be enabled. + """ + self._tracing_enabled = enable + + def set_stats(self, enable: bool) -> None: + """Enable or disable stats(metrics). + + Args: + enable: A bool indicates whether stats should be enabled. + """ + self._stats_enabled = enable + + def save_registered_method(self, method_name: bytes) -> None: + """Saves the method name to registered_method list. + + When exporting metrics, method name for unregistered methods will be replaced + with 'other' by default. + + Args: + method_name: The method name in bytes. + """ + raise NotImplementedError() + + @property + def tracing_enabled(self) -> bool: + return self._tracing_enabled + + @property + def stats_enabled(self) -> bool: + return self._stats_enabled + + @property + def observability_enabled(self) -> bool: + return self.tracing_enabled or self.stats_enabled + + +@contextlib.contextmanager +def get_plugin() -> Generator[Optional[ObservabilityPlugin], None, None]: + """Get the ObservabilityPlugin in _observability module. + + Returns: + The ObservabilityPlugin currently registered with the _observability + module. Or None if no plugin exists at the time of calling this method. + """ + with _plugin_lock: + yield _OBSERVABILITY_PLUGIN + + +def set_plugin(observability_plugin: Optional[ObservabilityPlugin]) -> None: + """Save ObservabilityPlugin to _observability module. + + Args: + observability_plugin: The ObservabilityPlugin to save. + + Raises: + ValueError: If an ObservabilityPlugin was already registered at the + time of calling this method. + """ + global _OBSERVABILITY_PLUGIN # pylint: disable=global-statement + with _plugin_lock: + if observability_plugin and _OBSERVABILITY_PLUGIN: + raise ValueError("observability_plugin was already set!") + _OBSERVABILITY_PLUGIN = observability_plugin + + +def observability_init(observability_plugin: ObservabilityPlugin) -> None: + """Initialize observability with provided ObservabilityPlugin. + + This method have to be called at the start of a program, before any + channels/servers are built. + + Args: + observability_plugin: The ObservabilityPlugin to use. + + Raises: + ValueError: If an ObservabilityPlugin was already registered at the + time of calling this method. + """ + set_plugin(observability_plugin) + + +def observability_deinit() -> None: + """Clear the observability context, including ObservabilityPlugin and + ServerCallTracerFactory + + This method have to be called after exit observability context so that + it's possible to re-initialize again. + """ + set_plugin(None) + _cygrpc.clear_server_call_tracer_factory() + + +def maybe_record_rpc_latency(state: "_channel._RPCState") -> None: + """Record the latency of the RPC, if the plugin is registered and stats is enabled. + + This method will be called at the end of each RPC. + + Args: + state: a grpc._channel._RPCState object which contains the stats related to the + RPC. + """ + # TODO(xuanwn): use channel args to exclude those metrics. + for exclude_prefix in _SERVICES_TO_EXCLUDE: + if exclude_prefix in state.method.encode("utf8"): + return + with get_plugin() as plugin: + if plugin and plugin.stats_enabled: + rpc_latency_s = state.rpc_end_time - state.rpc_start_time + rpc_latency_ms = rpc_latency_s * 1000 + plugin.record_rpc_latency( + state.method, state.target, rpc_latency_ms, state.code + ) + + +def create_server_call_tracer_factory_option( + xds: bool, +) -> Union[Tuple[ChannelArgumentType], Tuple[()]]: + with get_plugin() as plugin: + if plugin and plugin.stats_enabled: + server_call_tracer_factory_address = ( + _cygrpc.get_server_call_tracer_factory_address(plugin, xds) + ) + if server_call_tracer_factory_address: + return ( + ( + "grpc.experimental.server_call_tracer_factory", + ServerCallTracerFactory( + server_call_tracer_factory_address + ), + ), + ) + return () diff --git a/venv/lib/python3.10/site-packages/grpc/_plugin_wrapping.py b/venv/lib/python3.10/site-packages/grpc/_plugin_wrapping.py new file mode 100644 index 0000000000000000000000000000000000000000..79900ee1dae35c95c5257591288a5058397db487 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/_plugin_wrapping.py @@ -0,0 +1,136 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections +import logging +import threading +from typing import Callable, Optional, Type + +import grpc +from grpc import _common +from grpc._cython import cygrpc +from grpc._typing import MetadataType + +_LOGGER = logging.getLogger(__name__) + + +class _AuthMetadataContext( + collections.namedtuple( + "AuthMetadataContext", + ( + "service_url", + "method_name", + ), + ), + grpc.AuthMetadataContext, +): + pass + + +class _CallbackState(object): + def __init__(self): + self.lock = threading.Lock() + self.called = False + self.exception = None + + +class _AuthMetadataPluginCallback(grpc.AuthMetadataPluginCallback): + _state: _CallbackState + _callback: Callable + + def __init__(self, state: _CallbackState, callback: Callable): + self._state = state + self._callback = callback + + def __call__( + self, metadata: MetadataType, error: Optional[Type[BaseException]] + ): + with self._state.lock: + if self._state.exception is None: + if self._state.called: + raise RuntimeError( + "AuthMetadataPluginCallback invoked more than once!" + ) + else: + self._state.called = True + else: + raise RuntimeError( + 'AuthMetadataPluginCallback raised exception "{}"!'.format( + self._state.exception + ) + ) + if error is None: + self._callback(metadata, cygrpc.StatusCode.ok, None) + else: + self._callback( + None, cygrpc.StatusCode.internal, _common.encode(str(error)) + ) + + +class _Plugin(object): + _metadata_plugin: grpc.AuthMetadataPlugin + + def __init__(self, metadata_plugin: grpc.AuthMetadataPlugin): + self._metadata_plugin = metadata_plugin + self._stored_ctx = None + + try: + import contextvars # pylint: disable=wrong-import-position + + # The plugin may be invoked on a thread created by Core, which will not + # have the context propagated. This context is stored and installed in + # the thread invoking the plugin. + self._stored_ctx = contextvars.copy_context() + except ImportError: + # Support versions predating contextvars. + pass + + def __call__(self, service_url: str, method_name: str, callback: Callable): + context = _AuthMetadataContext( + _common.decode(service_url), _common.decode(method_name) + ) + callback_state = _CallbackState() + try: + self._metadata_plugin( + context, _AuthMetadataPluginCallback(callback_state, callback) + ) + except Exception as exception: # pylint: disable=broad-except + _LOGGER.exception( + 'AuthMetadataPluginCallback "%s" raised exception!', + self._metadata_plugin, + ) + with callback_state.lock: + callback_state.exception = exception + if callback_state.called: + return + callback( + None, cygrpc.StatusCode.internal, _common.encode(str(exception)) + ) + + +def metadata_plugin_call_credentials( + metadata_plugin: grpc.AuthMetadataPlugin, name: Optional[str] +) -> grpc.CallCredentials: + if name is None: + try: + effective_name = metadata_plugin.__name__ + except AttributeError: + effective_name = metadata_plugin.__class__.__name__ + else: + effective_name = name + return grpc.CallCredentials( + cygrpc.MetadataPluginCallCredentials( + _Plugin(metadata_plugin), _common.encode(effective_name) + ) + ) diff --git a/venv/lib/python3.10/site-packages/grpc/_runtime_protos.py b/venv/lib/python3.10/site-packages/grpc/_runtime_protos.py new file mode 100644 index 0000000000000000000000000000000000000000..d0195551dfac91311a36cf3d6fd39201413c9085 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/_runtime_protos.py @@ -0,0 +1,165 @@ +# Copyright 2020 The gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +import types +from typing import Tuple, Union + +_REQUIRED_SYMBOLS = ("_protos", "_services", "_protos_and_services") +_MINIMUM_VERSION = (3, 5, 0) + +_UNINSTALLED_TEMPLATE = ( + "Install the grpcio-tools package (1.32.0+) to use the {} function." +) +_VERSION_ERROR_TEMPLATE = ( + "The {} function is only on available on Python 3.X interpreters." +) + + +def _has_runtime_proto_symbols(mod: types.ModuleType) -> bool: + return all(hasattr(mod, sym) for sym in _REQUIRED_SYMBOLS) + + +def _is_grpc_tools_importable() -> bool: + try: + import grpc_tools # pylint: disable=unused-import # pytype: disable=import-error + + return True + except ImportError as e: + # NOTE: It's possible that we're encountering a transitive ImportError, so + # we check for that and re-raise if so. + if "grpc_tools" not in e.args[0]: + raise + return False + + +def _call_with_lazy_import( + fn_name: str, protobuf_path: str +) -> Union[types.ModuleType, Tuple[types.ModuleType, types.ModuleType]]: + """Calls one of the three functions, lazily importing grpc_tools. + + Args: + fn_name: The name of the function to import from grpc_tools.protoc. + protobuf_path: The path to import. + + Returns: + The appropriate module object. + """ + if sys.version_info < _MINIMUM_VERSION: + raise NotImplementedError(_VERSION_ERROR_TEMPLATE.format(fn_name)) + else: + if not _is_grpc_tools_importable(): + raise NotImplementedError(_UNINSTALLED_TEMPLATE.format(fn_name)) + import grpc_tools.protoc # pytype: disable=import-error + + if _has_runtime_proto_symbols(grpc_tools.protoc): + fn = getattr(grpc_tools.protoc, "_" + fn_name) + return fn(protobuf_path) + else: + raise NotImplementedError(_UNINSTALLED_TEMPLATE.format(fn_name)) + + +def protos(protobuf_path): # pylint: disable=unused-argument + """Returns a module generated by the indicated .proto file. + + THIS IS AN EXPERIMENTAL API. + + Use this function to retrieve classes corresponding to message + definitions in the .proto file. + + To inspect the contents of the returned module, use the dir function. + For example: + + ``` + protos = grpc.protos("foo.proto") + print(dir(protos)) + ``` + + The returned module object corresponds to the _pb2.py file generated + by protoc. The path is expected to be relative to an entry on sys.path + and all transitive dependencies of the file should also be resolvable + from an entry on sys.path. + + To completely disable the machinery behind this function, set the + GRPC_PYTHON_DISABLE_DYNAMIC_STUBS environment variable to "true". + + Args: + protobuf_path: The path to the .proto file on the filesystem. This path + must be resolvable from an entry on sys.path and so must all of its + transitive dependencies. + + Returns: + A module object corresponding to the message code for the indicated + .proto file. Equivalent to a generated _pb2.py file. + """ + return _call_with_lazy_import("protos", protobuf_path) + + +def services(protobuf_path): # pylint: disable=unused-argument + """Returns a module generated by the indicated .proto file. + + THIS IS AN EXPERIMENTAL API. + + Use this function to retrieve classes and functions corresponding to + service definitions in the .proto file, including both stub and servicer + definitions. + + To inspect the contents of the returned module, use the dir function. + For example: + + ``` + services = grpc.services("foo.proto") + print(dir(services)) + ``` + + The returned module object corresponds to the _pb2_grpc.py file generated + by protoc. The path is expected to be relative to an entry on sys.path + and all transitive dependencies of the file should also be resolvable + from an entry on sys.path. + + To completely disable the machinery behind this function, set the + GRPC_PYTHON_DISABLE_DYNAMIC_STUBS environment variable to "true". + + Args: + protobuf_path: The path to the .proto file on the filesystem. This path + must be resolvable from an entry on sys.path and so must all of its + transitive dependencies. + + Returns: + A module object corresponding to the stub/service code for the indicated + .proto file. Equivalent to a generated _pb2_grpc.py file. + """ + return _call_with_lazy_import("services", protobuf_path) + + +def protos_and_services(protobuf_path): # pylint: disable=unused-argument + """Returns a 2-tuple of modules corresponding to protos and services. + + THIS IS AN EXPERIMENTAL API. + + The return value of this function is equivalent to a call to protos and a + call to services. + + To completely disable the machinery behind this function, set the + GRPC_PYTHON_DISABLE_DYNAMIC_STUBS environment variable to "true". + + Args: + protobuf_path: The path to the .proto file on the filesystem. This path + must be resolvable from an entry on sys.path and so must all of its + transitive dependencies. + + Returns: + A 2-tuple of module objects corresponding to (protos(path), services(path)). + """ + return _call_with_lazy_import("protos_and_services", protobuf_path) diff --git a/venv/lib/python3.10/site-packages/grpc/_server.py b/venv/lib/python3.10/site-packages/grpc/_server.py new file mode 100644 index 0000000000000000000000000000000000000000..4080201dd0b00df3005741076b12abd697e07253 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/_server.py @@ -0,0 +1,1528 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Service-side implementation of gRPC Python.""" + +from __future__ import annotations + +import abc +import collections +from concurrent import futures +import contextvars +import enum +import logging +import threading +import time +import traceback +from typing import ( + Any, + Callable, + Dict, + Iterable, + Iterator, + List, + Mapping, + Optional, + Sequence, + Set, + Tuple, + Union, +) + +import grpc # pytype: disable=pyi-error +from grpc import _common # pytype: disable=pyi-error +from grpc import _compression # pytype: disable=pyi-error +from grpc import _interceptor # pytype: disable=pyi-error +from grpc import _observability # pytype: disable=pyi-error +from grpc._cython import cygrpc +from grpc._typing import ArityAgnosticMethodHandler +from grpc._typing import ChannelArgumentType +from grpc._typing import DeserializingFunction +from grpc._typing import MetadataType +from grpc._typing import NullaryCallbackType +from grpc._typing import ResponseType +from grpc._typing import SerializingFunction +from grpc._typing import ServerCallbackTag +from grpc._typing import ServerTagCallbackType + +_LOGGER = logging.getLogger(__name__) + +_SHUTDOWN_TAG = "shutdown" +_REQUEST_CALL_TAG = "request_call" + +_RECEIVE_CLOSE_ON_SERVER_TOKEN = "receive_close_on_server" +_SEND_INITIAL_METADATA_TOKEN = "send_initial_metadata" +_RECEIVE_MESSAGE_TOKEN = "receive_message" +_SEND_MESSAGE_TOKEN = "send_message" +_SEND_INITIAL_METADATA_AND_SEND_MESSAGE_TOKEN = ( + "send_initial_metadata * send_message" +) +_SEND_STATUS_FROM_SERVER_TOKEN = "send_status_from_server" +_SEND_INITIAL_METADATA_AND_SEND_STATUS_FROM_SERVER_TOKEN = ( + "send_initial_metadata * send_status_from_server" +) + +_OPEN = "open" +_CLOSED = "closed" +_CANCELLED = "cancelled" + +_EMPTY_FLAGS = 0 + +_DEALLOCATED_SERVER_CHECK_PERIOD_S = 1.0 +_INF_TIMEOUT = 1e9 + + +def _serialized_request(request_event: cygrpc.BaseEvent) -> bytes: + return request_event.batch_operations[0].message() + + +def _application_code(code: grpc.StatusCode) -> cygrpc.StatusCode: + cygrpc_code = _common.STATUS_CODE_TO_CYGRPC_STATUS_CODE.get(code) + return cygrpc.StatusCode.unknown if cygrpc_code is None else cygrpc_code + + +def _completion_code(state: _RPCState) -> cygrpc.StatusCode: + if state.code is None: + return cygrpc.StatusCode.ok + else: + return _application_code(state.code) + + +def _abortion_code( + state: _RPCState, code: cygrpc.StatusCode +) -> cygrpc.StatusCode: + if state.code is None: + return code + else: + return _application_code(state.code) + + +def _details(state: _RPCState) -> bytes: + return b"" if state.details is None else state.details + + +class _HandlerCallDetails( + collections.namedtuple( + "_HandlerCallDetails", + ( + "method", + "invocation_metadata", + ), + ), + grpc.HandlerCallDetails, +): + pass + + +class _Method(abc.ABC): + @abc.abstractmethod + def name(self) -> Optional[str]: + raise NotImplementedError() + + @abc.abstractmethod + def handler( + self, handler_call_details: _HandlerCallDetails + ) -> Optional[grpc.RpcMethodHandler]: + raise NotImplementedError() + + +class _RegisteredMethod(_Method): + def __init__( + self, + name: str, + registered_handler: Optional[grpc.RpcMethodHandler], + ): + self._name = name + self._registered_handler = registered_handler + + def name(self) -> Optional[str]: + return self._name + + def handler( + self, handler_call_details: _HandlerCallDetails + ) -> Optional[grpc.RpcMethodHandler]: + return self._registered_handler + + +class _GenericMethod(_Method): + def __init__( + self, + generic_handlers: List[grpc.GenericRpcHandler], + ): + self._generic_handlers = generic_handlers + + def name(self) -> Optional[str]: + return None + + def handler( + self, handler_call_details: _HandlerCallDetails + ) -> Optional[grpc.RpcMethodHandler]: + # If the same method have both generic and registered handler, + # registered handler will take precedence. + for generic_handler in self._generic_handlers: + method_handler = generic_handler.service(handler_call_details) + if method_handler is not None: + return method_handler + return None + + +class _RPCState(object): + context: contextvars.Context + condition: threading.Condition + due = Set[str] + request: Any + client: str + initial_metadata_allowed: bool + compression_algorithm: Optional[grpc.Compression] + disable_next_compression: bool + trailing_metadata: Optional[MetadataType] + code: Optional[grpc.StatusCode] + details: Optional[bytes] + statused: bool + rpc_errors: List[Exception] + callbacks: Optional[List[NullaryCallbackType]] + aborted: bool + + def __init__(self): + self.context = contextvars.Context() + self.condition = threading.Condition() + self.due = set() + self.request = None + self.client = _OPEN + self.initial_metadata_allowed = True + self.compression_algorithm = None + self.disable_next_compression = False + self.trailing_metadata = None + self.code = None + self.details = None + self.statused = False + self.rpc_errors = [] + self.callbacks = [] + self.aborted = False + + +def _raise_rpc_error(state: _RPCState) -> None: + rpc_error = grpc.RpcError() + state.rpc_errors.append(rpc_error) + raise rpc_error + + +def _possibly_finish_call( + state: _RPCState, token: str +) -> ServerTagCallbackType: + state.due.remove(token) + if not _is_rpc_state_active(state) and not state.due: + callbacks = state.callbacks + state.callbacks = None + return state, callbacks + else: + return None, () + + +def _send_status_from_server(state: _RPCState, token: str) -> ServerCallbackTag: + def send_status_from_server(unused_send_status_from_server_event): + with state.condition: + return _possibly_finish_call(state, token) + + return send_status_from_server + + +def _get_initial_metadata( + state: _RPCState, metadata: Optional[MetadataType] +) -> Optional[MetadataType]: + with state.condition: + if state.compression_algorithm: + compression_metadata = ( + _compression.compression_algorithm_to_metadata( + state.compression_algorithm + ), + ) + if metadata is None: + return compression_metadata + else: + return compression_metadata + tuple(metadata) + else: + return metadata + + +def _get_initial_metadata_operation( + state: _RPCState, metadata: Optional[MetadataType] +) -> cygrpc.Operation: + operation = cygrpc.SendInitialMetadataOperation( + _get_initial_metadata(state, metadata), _EMPTY_FLAGS + ) + return operation + + +def _abort( + state: _RPCState, call: cygrpc.Call, code: cygrpc.StatusCode, details: bytes +) -> None: + if state.client is not _CANCELLED: + effective_code = _abortion_code(state, code) + effective_details = details if state.details is None else state.details + if state.initial_metadata_allowed: + operations = ( + _get_initial_metadata_operation(state, None), + cygrpc.SendStatusFromServerOperation( + state.trailing_metadata, + effective_code, + effective_details, + _EMPTY_FLAGS, + ), + ) + token = _SEND_INITIAL_METADATA_AND_SEND_STATUS_FROM_SERVER_TOKEN + else: + operations = ( + cygrpc.SendStatusFromServerOperation( + state.trailing_metadata, + effective_code, + effective_details, + _EMPTY_FLAGS, + ), + ) + token = _SEND_STATUS_FROM_SERVER_TOKEN + call.start_server_batch( + operations, _send_status_from_server(state, token) + ) + state.statused = True + state.due.add(token) + + +def _receive_close_on_server(state: _RPCState) -> ServerCallbackTag: + def receive_close_on_server(receive_close_on_server_event): + with state.condition: + if receive_close_on_server_event.batch_operations[0].cancelled(): + state.client = _CANCELLED + elif state.client is _OPEN: + state.client = _CLOSED + state.condition.notify_all() + return _possibly_finish_call(state, _RECEIVE_CLOSE_ON_SERVER_TOKEN) + + return receive_close_on_server + + +def _receive_message( + state: _RPCState, + call: cygrpc.Call, + request_deserializer: Optional[DeserializingFunction], +) -> ServerCallbackTag: + def receive_message(receive_message_event): + serialized_request = _serialized_request(receive_message_event) + if serialized_request is None: + with state.condition: + if state.client is _OPEN: + state.client = _CLOSED + state.condition.notify_all() + return _possibly_finish_call(state, _RECEIVE_MESSAGE_TOKEN) + else: + request = _common.deserialize( + serialized_request, request_deserializer + ) + with state.condition: + if request is None: + _abort( + state, + call, + cygrpc.StatusCode.internal, + b"Exception deserializing request!", + ) + else: + state.request = request + state.condition.notify_all() + return _possibly_finish_call(state, _RECEIVE_MESSAGE_TOKEN) + + return receive_message + + +def _send_initial_metadata(state: _RPCState) -> ServerCallbackTag: + def send_initial_metadata(unused_send_initial_metadata_event): + with state.condition: + return _possibly_finish_call(state, _SEND_INITIAL_METADATA_TOKEN) + + return send_initial_metadata + + +def _send_message(state: _RPCState, token: str) -> ServerCallbackTag: + def send_message(unused_send_message_event): + with state.condition: + state.condition.notify_all() + return _possibly_finish_call(state, token) + + return send_message + + +class _Context(grpc.ServicerContext): + _rpc_event: cygrpc.BaseEvent + _state: _RPCState + request_deserializer: Optional[DeserializingFunction] + + def __init__( + self, + rpc_event: cygrpc.BaseEvent, + state: _RPCState, + request_deserializer: Optional[DeserializingFunction], + ): + self._rpc_event = rpc_event + self._state = state + self._request_deserializer = request_deserializer + + def is_active(self) -> bool: + with self._state.condition: + return _is_rpc_state_active(self._state) + + def time_remaining(self) -> float: + return max(self._rpc_event.call_details.deadline - time.time(), 0) + + def cancel(self) -> None: + self._rpc_event.call.cancel() + + def add_callback(self, callback: NullaryCallbackType) -> bool: + with self._state.condition: + if self._state.callbacks is None: + return False + else: + self._state.callbacks.append(callback) + return True + + def disable_next_message_compression(self) -> None: + with self._state.condition: + self._state.disable_next_compression = True + + def invocation_metadata(self) -> Optional[MetadataType]: + return self._rpc_event.invocation_metadata + + def peer(self) -> str: + return _common.decode(self._rpc_event.call.peer()) + + def peer_identities(self) -> Optional[Sequence[bytes]]: + return cygrpc.peer_identities(self._rpc_event.call) + + def peer_identity_key(self) -> Optional[str]: + id_key = cygrpc.peer_identity_key(self._rpc_event.call) + return id_key if id_key is None else _common.decode(id_key) + + def auth_context(self) -> Mapping[str, Sequence[bytes]]: + auth_context = cygrpc.auth_context(self._rpc_event.call) + auth_context_dict = {} if auth_context is None else auth_context + return { + _common.decode(key): value + for key, value in auth_context_dict.items() + } + + def set_compression(self, compression: grpc.Compression) -> None: + with self._state.condition: + self._state.compression_algorithm = compression + + def send_initial_metadata(self, initial_metadata: MetadataType) -> None: + with self._state.condition: + if self._state.client is _CANCELLED: + _raise_rpc_error(self._state) + else: + if self._state.initial_metadata_allowed: + operation = _get_initial_metadata_operation( + self._state, initial_metadata + ) + self._rpc_event.call.start_server_batch( + (operation,), _send_initial_metadata(self._state) + ) + self._state.initial_metadata_allowed = False + self._state.due.add(_SEND_INITIAL_METADATA_TOKEN) + else: + raise ValueError("Initial metadata no longer allowed!") + + def set_trailing_metadata(self, trailing_metadata: MetadataType) -> None: + with self._state.condition: + self._state.trailing_metadata = trailing_metadata + + def trailing_metadata(self) -> Optional[MetadataType]: + return self._state.trailing_metadata + + def abort(self, code: grpc.StatusCode, details: str) -> None: + # treat OK like other invalid arguments: fail the RPC + if code == grpc.StatusCode.OK: + _LOGGER.error( + "abort() called with StatusCode.OK; returning UNKNOWN" + ) + code = grpc.StatusCode.UNKNOWN + details = "" + with self._state.condition: + self._state.code = code + self._state.details = _common.encode(details) + self._state.aborted = True + raise Exception() + + def abort_with_status(self, status: grpc.Status) -> None: + self._state.trailing_metadata = status.trailing_metadata + self.abort(status.code, status.details) + + def set_code(self, code: grpc.StatusCode) -> None: + with self._state.condition: + self._state.code = code + + def code(self) -> grpc.StatusCode: + return self._state.code + + def set_details(self, details: str) -> None: + with self._state.condition: + self._state.details = _common.encode(details) + + def details(self) -> bytes: + return self._state.details + + def _finalize_state(self) -> None: + pass + + +class _RequestIterator(object): + _state: _RPCState + _call: cygrpc.Call + _request_deserializer: Optional[DeserializingFunction] + + def __init__( + self, + state: _RPCState, + call: cygrpc.Call, + request_deserializer: Optional[DeserializingFunction], + ): + self._state = state + self._call = call + self._request_deserializer = request_deserializer + + def _raise_or_start_receive_message(self) -> None: + if self._state.client is _CANCELLED: + _raise_rpc_error(self._state) + elif not _is_rpc_state_active(self._state): + raise StopIteration() + else: + self._call.start_server_batch( + (cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),), + _receive_message( + self._state, self._call, self._request_deserializer + ), + ) + self._state.due.add(_RECEIVE_MESSAGE_TOKEN) + + def _look_for_request(self) -> Any: + if self._state.client is _CANCELLED: + _raise_rpc_error(self._state) + elif ( + self._state.request is None + and _RECEIVE_MESSAGE_TOKEN not in self._state.due + ): + raise StopIteration() + else: + request = self._state.request + self._state.request = None + return request + + raise AssertionError() # should never run + + def _next(self) -> Any: + with self._state.condition: + self._raise_or_start_receive_message() + while True: + self._state.condition.wait() + request = self._look_for_request() + if request is not None: + return request + + def __iter__(self) -> _RequestIterator: + return self + + def __next__(self) -> Any: + return self._next() + + def next(self) -> Any: + return self._next() + + +def _unary_request( + rpc_event: cygrpc.BaseEvent, + state: _RPCState, + request_deserializer: Optional[DeserializingFunction], +) -> Callable[[], Any]: + def unary_request(): + with state.condition: + if not _is_rpc_state_active(state): + return None + else: + rpc_event.call.start_server_batch( + (cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),), + _receive_message( + state, rpc_event.call, request_deserializer + ), + ) + state.due.add(_RECEIVE_MESSAGE_TOKEN) + while True: + state.condition.wait() + if state.request is None: + if state.client is _CLOSED: + details = '"{}" requires exactly one request message.'.format( + rpc_event.call_details.method + ) + _abort( + state, + rpc_event.call, + cygrpc.StatusCode.unimplemented, + _common.encode(details), + ) + return None + elif state.client is _CANCELLED: + return None + else: + request = state.request + state.request = None + return request + + return unary_request + + +def _call_behavior( + rpc_event: cygrpc.BaseEvent, + state: _RPCState, + behavior: ArityAgnosticMethodHandler, + argument: Any, + request_deserializer: Optional[DeserializingFunction], + send_response_callback: Optional[Callable[[ResponseType], None]] = None, +) -> Tuple[Union[ResponseType, Iterator[ResponseType]], bool]: + from grpc import _create_servicer_context # pytype: disable=pyi-error + + with _create_servicer_context( + rpc_event, state, request_deserializer + ) as context: + try: + response_or_iterator = None + if send_response_callback is not None: + response_or_iterator = behavior( + argument, context, send_response_callback + ) + else: + response_or_iterator = behavior(argument, context) + return response_or_iterator, True + except Exception as exception: # pylint: disable=broad-except + with state.condition: + if state.aborted: + _abort( + state, + rpc_event.call, + cygrpc.StatusCode.unknown, + b"RPC Aborted", + ) + elif exception not in state.rpc_errors: + try: + details = "Exception calling application: {}".format( + exception + ) + except Exception: # pylint: disable=broad-except + details = ( + "Calling application raised unprintable Exception!" + ) + _LOGGER.exception( + traceback.format_exception( + type(exception), + exception, + exception.__traceback__, + ) + ) + traceback.print_exc() + _LOGGER.exception(details) + _abort( + state, + rpc_event.call, + cygrpc.StatusCode.unknown, + _common.encode(details), + ) + return None, False + + +def _take_response_from_response_iterator( + rpc_event: cygrpc.BaseEvent, + state: _RPCState, + response_iterator: Iterator[ResponseType], +) -> Tuple[ResponseType, bool]: + try: + return next(response_iterator), True + except StopIteration: + return None, True + except Exception as exception: # pylint: disable=broad-except + with state.condition: + if state.aborted: + _abort( + state, + rpc_event.call, + cygrpc.StatusCode.unknown, + b"RPC Aborted", + ) + elif exception not in state.rpc_errors: + details = "Exception iterating responses: {}".format(exception) + _LOGGER.exception(details) + _abort( + state, + rpc_event.call, + cygrpc.StatusCode.unknown, + _common.encode(details), + ) + return None, False + + +def _serialize_response( + rpc_event: cygrpc.BaseEvent, + state: _RPCState, + response: Any, + response_serializer: Optional[SerializingFunction], +) -> Optional[bytes]: + serialized_response = _common.serialize(response, response_serializer) + if serialized_response is None: + with state.condition: + _abort( + state, + rpc_event.call, + cygrpc.StatusCode.internal, + b"Failed to serialize response!", + ) + return None + else: + return serialized_response + + +def _get_send_message_op_flags_from_state( + state: _RPCState, +) -> Union[int, cygrpc.WriteFlag]: + if state.disable_next_compression: + return cygrpc.WriteFlag.no_compress + else: + return _EMPTY_FLAGS + + +def _reset_per_message_state(state: _RPCState) -> None: + with state.condition: + state.disable_next_compression = False + + +def _send_response( + rpc_event: cygrpc.BaseEvent, state: _RPCState, serialized_response: bytes +) -> bool: + with state.condition: + if not _is_rpc_state_active(state): + return False + else: + if state.initial_metadata_allowed: + operations = ( + _get_initial_metadata_operation(state, None), + cygrpc.SendMessageOperation( + serialized_response, + _get_send_message_op_flags_from_state(state), + ), + ) + state.initial_metadata_allowed = False + token = _SEND_INITIAL_METADATA_AND_SEND_MESSAGE_TOKEN + else: + operations = ( + cygrpc.SendMessageOperation( + serialized_response, + _get_send_message_op_flags_from_state(state), + ), + ) + token = _SEND_MESSAGE_TOKEN + rpc_event.call.start_server_batch( + operations, _send_message(state, token) + ) + state.due.add(token) + _reset_per_message_state(state) + while True: + state.condition.wait() + if token not in state.due: + return _is_rpc_state_active(state) + + +def _status( + rpc_event: cygrpc.BaseEvent, + state: _RPCState, + serialized_response: Optional[bytes], +) -> None: + with state.condition: + if state.client is not _CANCELLED: + code = _completion_code(state) + details = _details(state) + operations = [ + cygrpc.SendStatusFromServerOperation( + state.trailing_metadata, code, details, _EMPTY_FLAGS + ), + ] + if state.initial_metadata_allowed: + operations.append(_get_initial_metadata_operation(state, None)) + if serialized_response is not None: + operations.append( + cygrpc.SendMessageOperation( + serialized_response, + _get_send_message_op_flags_from_state(state), + ) + ) + rpc_event.call.start_server_batch( + operations, + _send_status_from_server(state, _SEND_STATUS_FROM_SERVER_TOKEN), + ) + state.statused = True + _reset_per_message_state(state) + state.due.add(_SEND_STATUS_FROM_SERVER_TOKEN) + + +def _unary_response_in_pool( + rpc_event: cygrpc.BaseEvent, + state: _RPCState, + behavior: ArityAgnosticMethodHandler, + argument_thunk: Callable[[], Any], + request_deserializer: Optional[SerializingFunction], + response_serializer: Optional[SerializingFunction], +) -> None: + cygrpc.install_context_from_request_call_event(rpc_event) + + try: + argument = argument_thunk() + if argument is not None: + response, proceed = _call_behavior( + rpc_event, state, behavior, argument, request_deserializer + ) + if proceed: + serialized_response = _serialize_response( + rpc_event, state, response, response_serializer + ) + if serialized_response is not None: + _status(rpc_event, state, serialized_response) + except Exception: # pylint: disable=broad-except + traceback.print_exc() + finally: + cygrpc.uninstall_context() + + +def _stream_response_in_pool( + rpc_event: cygrpc.BaseEvent, + state: _RPCState, + behavior: ArityAgnosticMethodHandler, + argument_thunk: Callable[[], Any], + request_deserializer: Optional[DeserializingFunction], + response_serializer: Optional[SerializingFunction], +) -> None: + cygrpc.install_context_from_request_call_event(rpc_event) + + def send_response(response: Any) -> None: + if response is None: + _status(rpc_event, state, None) + else: + serialized_response = _serialize_response( + rpc_event, state, response, response_serializer + ) + if serialized_response is not None: + _send_response(rpc_event, state, serialized_response) + + try: + argument = argument_thunk() + if argument is not None: + if ( + hasattr(behavior, "experimental_non_blocking") + and behavior.experimental_non_blocking + ): + _call_behavior( + rpc_event, + state, + behavior, + argument, + request_deserializer, + send_response_callback=send_response, + ) + else: + response_iterator, proceed = _call_behavior( + rpc_event, state, behavior, argument, request_deserializer + ) + if proceed: + _send_message_callback_to_blocking_iterator_adapter( + rpc_event, state, send_response, response_iterator + ) + except Exception: # pylint: disable=broad-except + traceback.print_exc() + finally: + cygrpc.uninstall_context() + + +def _is_rpc_state_active(state: _RPCState) -> bool: + return state.client is not _CANCELLED and not state.statused + + +def _send_message_callback_to_blocking_iterator_adapter( + rpc_event: cygrpc.BaseEvent, + state: _RPCState, + send_response_callback: Callable[[ResponseType], None], + response_iterator: Iterator[ResponseType], +) -> None: + while True: + response, proceed = _take_response_from_response_iterator( + rpc_event, state, response_iterator + ) + if proceed: + send_response_callback(response) + if not _is_rpc_state_active(state): + break + else: + break + + +def _select_thread_pool_for_behavior( + behavior: ArityAgnosticMethodHandler, + default_thread_pool: futures.ThreadPoolExecutor, +) -> futures.ThreadPoolExecutor: + if hasattr(behavior, "experimental_thread_pool") and isinstance( + behavior.experimental_thread_pool, futures.ThreadPoolExecutor + ): + return behavior.experimental_thread_pool + else: + return default_thread_pool + + +def _handle_unary_unary( + rpc_event: cygrpc.BaseEvent, + state: _RPCState, + method_handler: grpc.RpcMethodHandler, + default_thread_pool: futures.ThreadPoolExecutor, +) -> futures.Future: + unary_request = _unary_request( + rpc_event, state, method_handler.request_deserializer + ) + thread_pool = _select_thread_pool_for_behavior( + method_handler.unary_unary, default_thread_pool + ) + return thread_pool.submit( + state.context.run, + _unary_response_in_pool, + rpc_event, + state, + method_handler.unary_unary, + unary_request, + method_handler.request_deserializer, + method_handler.response_serializer, + ) + + +def _handle_unary_stream( + rpc_event: cygrpc.BaseEvent, + state: _RPCState, + method_handler: grpc.RpcMethodHandler, + default_thread_pool: futures.ThreadPoolExecutor, +) -> futures.Future: + unary_request = _unary_request( + rpc_event, state, method_handler.request_deserializer + ) + thread_pool = _select_thread_pool_for_behavior( + method_handler.unary_stream, default_thread_pool + ) + return thread_pool.submit( + state.context.run, + _stream_response_in_pool, + rpc_event, + state, + method_handler.unary_stream, + unary_request, + method_handler.request_deserializer, + method_handler.response_serializer, + ) + + +def _handle_stream_unary( + rpc_event: cygrpc.BaseEvent, + state: _RPCState, + method_handler: grpc.RpcMethodHandler, + default_thread_pool: futures.ThreadPoolExecutor, +) -> futures.Future: + request_iterator = _RequestIterator( + state, rpc_event.call, method_handler.request_deserializer + ) + thread_pool = _select_thread_pool_for_behavior( + method_handler.stream_unary, default_thread_pool + ) + return thread_pool.submit( + state.context.run, + _unary_response_in_pool, + rpc_event, + state, + method_handler.stream_unary, + lambda: request_iterator, + method_handler.request_deserializer, + method_handler.response_serializer, + ) + + +def _handle_stream_stream( + rpc_event: cygrpc.BaseEvent, + state: _RPCState, + method_handler: grpc.RpcMethodHandler, + default_thread_pool: futures.ThreadPoolExecutor, +) -> futures.Future: + request_iterator = _RequestIterator( + state, rpc_event.call, method_handler.request_deserializer + ) + thread_pool = _select_thread_pool_for_behavior( + method_handler.stream_stream, default_thread_pool + ) + return thread_pool.submit( + state.context.run, + _stream_response_in_pool, + rpc_event, + state, + method_handler.stream_stream, + lambda: request_iterator, + method_handler.request_deserializer, + method_handler.response_serializer, + ) + + +def _find_method_handler( + rpc_event: cygrpc.BaseEvent, + state: _RPCState, + method_with_handler: _Method, + interceptor_pipeline: Optional[_interceptor._ServicePipeline], +) -> Optional[grpc.RpcMethodHandler]: + def query_handlers( + handler_call_details: _HandlerCallDetails, + ) -> Optional[grpc.RpcMethodHandler]: + return method_with_handler.handler(handler_call_details) + + method_name = method_with_handler.name() + if not method_name: + method_name = _common.decode(rpc_event.call_details.method) + + handler_call_details = _HandlerCallDetails( + method_name, + rpc_event.invocation_metadata, + ) + + if interceptor_pipeline is not None: + return state.context.run( + interceptor_pipeline.execute, query_handlers, handler_call_details + ) + else: + return state.context.run(query_handlers, handler_call_details) + + +def _reject_rpc( + rpc_event: cygrpc.BaseEvent, + rpc_state: _RPCState, + status: cygrpc.StatusCode, + details: bytes, +): + operations = ( + _get_initial_metadata_operation(rpc_state, None), + cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS), + cygrpc.SendStatusFromServerOperation( + None, status, details, _EMPTY_FLAGS + ), + ) + rpc_event.call.start_server_batch( + operations, + lambda ignored_event: ( + rpc_state, + (), + ), + ) + + +def _handle_with_method_handler( + rpc_event: cygrpc.BaseEvent, + state: _RPCState, + method_handler: grpc.RpcMethodHandler, + thread_pool: futures.ThreadPoolExecutor, +) -> futures.Future: + with state.condition: + rpc_event.call.start_server_batch( + (cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS),), + _receive_close_on_server(state), + ) + state.due.add(_RECEIVE_CLOSE_ON_SERVER_TOKEN) + if method_handler.request_streaming: + if method_handler.response_streaming: + return _handle_stream_stream( + rpc_event, state, method_handler, thread_pool + ) + else: + return _handle_stream_unary( + rpc_event, state, method_handler, thread_pool + ) + else: + if method_handler.response_streaming: + return _handle_unary_stream( + rpc_event, state, method_handler, thread_pool + ) + else: + return _handle_unary_unary( + rpc_event, state, method_handler, thread_pool + ) + + +def _handle_call( + rpc_event: cygrpc.BaseEvent, + method_with_handler: _Method, + interceptor_pipeline: Optional[_interceptor._ServicePipeline], + thread_pool: futures.ThreadPoolExecutor, + concurrency_exceeded: bool, +) -> Tuple[Optional[_RPCState], Optional[futures.Future]]: + """Handles RPC based on provided handlers. + + When receiving a call event from Core, registered method will have its + name as tag, we pass the tag as registered_method_name to this method, + then we can find the handler in registered_method_handlers based on + the method name. + + For call event with unregistered method, the method name will be included + in rpc_event.call_details.method and we need to query the generics handlers + to find the actual handler. + """ + if not rpc_event.success: + return None, None + if rpc_event.call_details.method or method_with_handler.name(): + rpc_state = _RPCState() + try: + method_handler = _find_method_handler( + rpc_event, + rpc_state, + method_with_handler, + interceptor_pipeline, + ) + except Exception as exception: # pylint: disable=broad-except + details = "Exception servicing handler: {}".format(exception) + _LOGGER.exception(details) + _reject_rpc( + rpc_event, + rpc_state, + cygrpc.StatusCode.unknown, + b"Error in service handler!", + ) + return rpc_state, None + if method_handler is None: + _reject_rpc( + rpc_event, + rpc_state, + cygrpc.StatusCode.unimplemented, + b"Method not found!", + ) + return rpc_state, None + elif concurrency_exceeded: + _reject_rpc( + rpc_event, + rpc_state, + cygrpc.StatusCode.resource_exhausted, + b"Concurrent RPC limit exceeded!", + ) + return rpc_state, None + else: + return ( + rpc_state, + _handle_with_method_handler( + rpc_event, rpc_state, method_handler, thread_pool + ), + ) + else: + return None, None + + +@enum.unique +class _ServerStage(enum.Enum): + STOPPED = "stopped" + STARTED = "started" + GRACE = "grace" + + +class _ServerState(object): + lock: threading.RLock + completion_queue: cygrpc.CompletionQueue + server: cygrpc.Server + generic_handlers: List[grpc.GenericRpcHandler] + registered_method_handlers: Dict[str, grpc.RpcMethodHandler] + interceptor_pipeline: Optional[_interceptor._ServicePipeline] + thread_pool: futures.ThreadPoolExecutor + stage: _ServerStage + termination_event: threading.Event + shutdown_events: List[threading.Event] + maximum_concurrent_rpcs: Optional[int] + active_rpc_count: int + rpc_states: Set[_RPCState] + due: Set[str] + server_deallocated: bool + + # pylint: disable=too-many-arguments + def __init__( + self, + completion_queue: cygrpc.CompletionQueue, + server: cygrpc.Server, + generic_handlers: Sequence[grpc.GenericRpcHandler], + interceptor_pipeline: Optional[_interceptor._ServicePipeline], + thread_pool: futures.ThreadPoolExecutor, + maximum_concurrent_rpcs: Optional[int], + ): + self.lock = threading.RLock() + self.completion_queue = completion_queue + self.server = server + self.generic_handlers = list(generic_handlers) + self.interceptor_pipeline = interceptor_pipeline + self.thread_pool = thread_pool + self.stage = _ServerStage.STOPPED + self.termination_event = threading.Event() + self.shutdown_events = [self.termination_event] + self.maximum_concurrent_rpcs = maximum_concurrent_rpcs + self.active_rpc_count = 0 + self.registered_method_handlers = {} + + # TODO(https://github.com/grpc/grpc/issues/6597): eliminate these fields. + self.rpc_states = set() + self.due = set() + + # A "volatile" flag to interrupt the daemon serving thread + self.server_deallocated = False + + +def _add_generic_handlers( + state: _ServerState, generic_handlers: Iterable[grpc.GenericRpcHandler] +) -> None: + with state.lock: + state.generic_handlers.extend(generic_handlers) + + +def _add_registered_method_handlers( + state: _ServerState, method_handlers: Dict[str, grpc.RpcMethodHandler] +) -> None: + with state.lock: + state.registered_method_handlers.update(method_handlers) + + +def _add_insecure_port(state: _ServerState, address: bytes) -> int: + with state.lock: + return state.server.add_http2_port(address) + + +def _add_secure_port( + state: _ServerState, + address: bytes, + server_credentials: grpc.ServerCredentials, +) -> int: + with state.lock: + return state.server.add_http2_port( + address, server_credentials._credentials + ) + + +def _request_call(state: _ServerState) -> None: + state.server.request_call( + state.completion_queue, state.completion_queue, _REQUEST_CALL_TAG + ) + state.due.add(_REQUEST_CALL_TAG) + + +def _request_registered_call(state: _ServerState, method: str) -> None: + registered_call_tag = method + state.server.request_registered_call( + state.completion_queue, + state.completion_queue, + method, + registered_call_tag, + ) + state.due.add(registered_call_tag) + + +# TODO(https://github.com/grpc/grpc/issues/6597): delete this function. +def _stop_serving(state: _ServerState) -> bool: + if not state.rpc_states and not state.due: + state.server.destroy() + for shutdown_event in state.shutdown_events: + shutdown_event.set() + state.stage = _ServerStage.STOPPED + return True + else: + return False + + +def _on_call_completed(state: _ServerState) -> None: + with state.lock: + state.active_rpc_count -= 1 + + +# pylint: disable=too-many-branches +def _process_event_and_continue( + state: _ServerState, event: cygrpc.BaseEvent +) -> bool: + should_continue = True + if event.tag is _SHUTDOWN_TAG: + with state.lock: + state.due.remove(_SHUTDOWN_TAG) + if _stop_serving(state): + should_continue = False + elif ( + event.tag is _REQUEST_CALL_TAG + or event.tag in state.registered_method_handlers.keys() + ): + registered_method_name = None + if event.tag in state.registered_method_handlers.keys(): + registered_method_name = event.tag + method_with_handler = _RegisteredMethod( + registered_method_name, + state.registered_method_handlers.get( + registered_method_name, None + ), + ) + else: + method_with_handler = _GenericMethod( + state.generic_handlers, + ) + with state.lock: + state.due.remove(event.tag) + concurrency_exceeded = ( + state.maximum_concurrent_rpcs is not None + and state.active_rpc_count >= state.maximum_concurrent_rpcs + ) + rpc_state, rpc_future = _handle_call( + event, + method_with_handler, + state.interceptor_pipeline, + state.thread_pool, + concurrency_exceeded, + ) + if rpc_state is not None: + state.rpc_states.add(rpc_state) + if rpc_future is not None: + state.active_rpc_count += 1 + rpc_future.add_done_callback( + lambda unused_future: _on_call_completed(state) + ) + if state.stage is _ServerStage.STARTED: + if ( + registered_method_name + in state.registered_method_handlers.keys() + ): + _request_registered_call(state, registered_method_name) + else: + _request_call(state) + elif _stop_serving(state): + should_continue = False + else: + rpc_state, callbacks = event.tag(event) + for callback in callbacks: + try: + callback() + except Exception: # pylint: disable=broad-except + _LOGGER.exception("Exception calling callback!") + if rpc_state is not None: + with state.lock: + state.rpc_states.remove(rpc_state) + if _stop_serving(state): + should_continue = False + return should_continue + + +def _serve(state: _ServerState) -> None: + while True: + timeout = time.time() + _DEALLOCATED_SERVER_CHECK_PERIOD_S + event = state.completion_queue.poll(timeout) + if state.server_deallocated: + _begin_shutdown_once(state) + if event.completion_type != cygrpc.CompletionType.queue_timeout: + if not _process_event_and_continue(state, event): + return + # We want to force the deletion of the previous event + # ~before~ we poll again; if the event has a reference + # to a shutdown Call object, this can induce spinlock. + event = None + + +def _begin_shutdown_once(state: _ServerState) -> None: + with state.lock: + if state.stage is _ServerStage.STARTED: + state.server.shutdown(state.completion_queue, _SHUTDOWN_TAG) + state.stage = _ServerStage.GRACE + state.due.add(_SHUTDOWN_TAG) + + +def _stop(state: _ServerState, grace: Optional[float]) -> threading.Event: + with state.lock: + if state.stage is _ServerStage.STOPPED: + shutdown_event = threading.Event() + shutdown_event.set() + return shutdown_event + else: + _begin_shutdown_once(state) + shutdown_event = threading.Event() + state.shutdown_events.append(shutdown_event) + if grace is None: + state.server.cancel_all_calls() + else: + + def cancel_all_calls_after_grace(): + shutdown_event.wait(timeout=grace) + with state.lock: + state.server.cancel_all_calls() + + thread = threading.Thread(target=cancel_all_calls_after_grace) + thread.start() + return shutdown_event + shutdown_event.wait() + return shutdown_event + + +def _start(state: _ServerState) -> None: + with state.lock: + if state.stage is not _ServerStage.STOPPED: + raise ValueError("Cannot start already-started server!") + state.server.start() + state.stage = _ServerStage.STARTED + # Request a call for each registered method so we can handle any of them. + for method in state.registered_method_handlers.keys(): + _request_registered_call(state, method) + # Also request a call for non-registered method. + _request_call(state) + thread = threading.Thread(target=_serve, args=(state,)) + thread.daemon = True + thread.start() + + +def _validate_generic_rpc_handlers( + generic_rpc_handlers: Iterable[grpc.GenericRpcHandler], +) -> None: + for generic_rpc_handler in generic_rpc_handlers: + service_attribute = getattr(generic_rpc_handler, "service", None) + if service_attribute is None: + raise AttributeError( + '"{}" must conform to grpc.GenericRpcHandler type but does ' + 'not have "service" method!'.format(generic_rpc_handler) + ) + + +def _augment_options( + base_options: Sequence[ChannelArgumentType], + compression: Optional[grpc.Compression], + xds: bool, +) -> Sequence[ChannelArgumentType]: + compression_option = _compression.create_channel_option(compression) + maybe_server_call_tracer_factory_option = ( + _observability.create_server_call_tracer_factory_option(xds) + ) + return ( + tuple(base_options) + + compression_option + + maybe_server_call_tracer_factory_option + ) + + +class _Server(grpc.Server): + _state: _ServerState + + # pylint: disable=too-many-arguments + def __init__( + self, + thread_pool: futures.ThreadPoolExecutor, + generic_handlers: Sequence[grpc.GenericRpcHandler], + interceptors: Sequence[grpc.ServerInterceptor], + options: Sequence[ChannelArgumentType], + maximum_concurrent_rpcs: Optional[int], + compression: Optional[grpc.Compression], + xds: bool, + ): + completion_queue = cygrpc.CompletionQueue() + server = cygrpc.Server(_augment_options(options, compression, xds), xds) + server.register_completion_queue(completion_queue) + self._state = _ServerState( + completion_queue, + server, + generic_handlers, + _interceptor.service_pipeline(interceptors), + thread_pool, + maximum_concurrent_rpcs, + ) + self._cy_server = server + + def add_generic_rpc_handlers( + self, generic_rpc_handlers: Iterable[grpc.GenericRpcHandler] + ) -> None: + _validate_generic_rpc_handlers(generic_rpc_handlers) + _add_generic_handlers(self._state, generic_rpc_handlers) + + def add_registered_method_handlers( + self, + service_name: str, + method_handlers: Dict[str, grpc.RpcMethodHandler], + ) -> None: + # Can't register method once server started. + with self._state.lock: + if self._state.stage is _ServerStage.STARTED: + return + + # TODO(xuanwn): We should validate method_handlers first. + method_to_handlers = { + _common.fully_qualified_method(service_name, method): method_handler + for method, method_handler in method_handlers.items() + } + for fully_qualified_method in method_to_handlers.keys(): + self._cy_server.register_method(fully_qualified_method) + _add_registered_method_handlers(self._state, method_to_handlers) + + def add_insecure_port(self, address: str) -> int: + return _common.validate_port_binding_result( + address, _add_insecure_port(self._state, _common.encode(address)) + ) + + def add_secure_port( + self, address: str, server_credentials: grpc.ServerCredentials + ) -> int: + return _common.validate_port_binding_result( + address, + _add_secure_port( + self._state, _common.encode(address), server_credentials + ), + ) + + def start(self) -> None: + _start(self._state) + + def wait_for_termination(self, timeout: Optional[float] = None) -> bool: + # NOTE(https://bugs.python.org/issue35935) + # Remove this workaround once threading.Event.wait() is working with + # CTRL+C across platforms. + return _common.wait( + self._state.termination_event.wait, + self._state.termination_event.is_set, + timeout=timeout, + ) + + def stop(self, grace: Optional[float]) -> threading.Event: + return _stop(self._state, grace) + + def __del__(self): + if hasattr(self, "_state"): + # We can not grab a lock in __del__(), so set a flag to signal the + # serving daemon thread (if it exists) to initiate shutdown. + self._state.server_deallocated = True + + +def create_server( + thread_pool: futures.ThreadPoolExecutor, + generic_rpc_handlers: Sequence[grpc.GenericRpcHandler], + interceptors: Sequence[grpc.ServerInterceptor], + options: Sequence[ChannelArgumentType], + maximum_concurrent_rpcs: Optional[int], + compression: Optional[grpc.Compression], + xds: bool, +) -> _Server: + _validate_generic_rpc_handlers(generic_rpc_handlers) + return _Server( + thread_pool, + generic_rpc_handlers, + interceptors, + options, + maximum_concurrent_rpcs, + compression, + xds, + ) diff --git a/venv/lib/python3.10/site-packages/grpc/_simple_stubs.py b/venv/lib/python3.10/site-packages/grpc/_simple_stubs.py new file mode 100644 index 0000000000000000000000000000000000000000..3e88670aa089d3ed26a953f98b806f4b410a47d1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/_simple_stubs.py @@ -0,0 +1,588 @@ +# Copyright 2020 The gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Functions that obviate explicit stubs and explicit channels.""" + +import collections +import datetime +import logging +import os +import threading +from typing import ( + Any, + AnyStr, + Callable, + Dict, + Iterator, + Optional, + Sequence, + Tuple, + TypeVar, + Union, +) + +import grpc +from grpc.experimental import experimental_api + +RequestType = TypeVar("RequestType") +ResponseType = TypeVar("ResponseType") + +OptionsType = Sequence[Tuple[str, str]] +CacheKey = Tuple[ + str, + OptionsType, + Optional[grpc.ChannelCredentials], + Optional[grpc.Compression], +] + +_LOGGER = logging.getLogger(__name__) + +_EVICTION_PERIOD_KEY = "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" +if _EVICTION_PERIOD_KEY in os.environ: + _EVICTION_PERIOD = datetime.timedelta( + seconds=float(os.environ[_EVICTION_PERIOD_KEY]) + ) + _LOGGER.debug( + "Setting managed channel eviction period to %s", _EVICTION_PERIOD + ) +else: + _EVICTION_PERIOD = datetime.timedelta(minutes=10) + +_MAXIMUM_CHANNELS_KEY = "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" +if _MAXIMUM_CHANNELS_KEY in os.environ: + _MAXIMUM_CHANNELS = int(os.environ[_MAXIMUM_CHANNELS_KEY]) + _LOGGER.debug("Setting maximum managed channels to %d", _MAXIMUM_CHANNELS) +else: + _MAXIMUM_CHANNELS = 2**8 + +_DEFAULT_TIMEOUT_KEY = "GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS" +if _DEFAULT_TIMEOUT_KEY in os.environ: + _DEFAULT_TIMEOUT = float(os.environ[_DEFAULT_TIMEOUT_KEY]) + _LOGGER.debug("Setting default timeout seconds to %f", _DEFAULT_TIMEOUT) +else: + _DEFAULT_TIMEOUT = 60.0 + + +def _create_channel( + target: str, + options: Sequence[Tuple[str, str]], + channel_credentials: Optional[grpc.ChannelCredentials], + compression: Optional[grpc.Compression], +) -> grpc.Channel: + _LOGGER.debug( + f"Creating secure channel with credentials '{channel_credentials}', " + + f"options '{options}' and compression '{compression}'" + ) + return grpc.secure_channel( + target, + credentials=channel_credentials, + options=options, + compression=compression, + ) + + +class ChannelCache: + # NOTE(rbellevi): Untyped due to reference cycle. + _singleton = None + _lock: threading.RLock = threading.RLock() + _condition: threading.Condition = threading.Condition(lock=_lock) + _eviction_ready: threading.Event = threading.Event() + + _mapping: Dict[CacheKey, Tuple[grpc.Channel, datetime.datetime]] + _eviction_thread: threading.Thread + + def __init__(self): + self._mapping = collections.OrderedDict() + self._eviction_thread = threading.Thread( + target=ChannelCache._perform_evictions, daemon=True + ) + self._eviction_thread.start() + + @staticmethod + def get(): + with ChannelCache._lock: + if ChannelCache._singleton is None: + ChannelCache._singleton = ChannelCache() + ChannelCache._eviction_ready.wait() + return ChannelCache._singleton + + def _evict_locked(self, key: CacheKey): + channel, _ = self._mapping.pop(key) + _LOGGER.debug( + "Evicting channel %s with configuration %s.", channel, key + ) + channel.close() + del channel + + @staticmethod + def _perform_evictions(): + while True: + with ChannelCache._lock: + ChannelCache._eviction_ready.set() + if not ChannelCache._singleton._mapping: + ChannelCache._condition.wait() + elif len(ChannelCache._singleton._mapping) > _MAXIMUM_CHANNELS: + key = next(iter(ChannelCache._singleton._mapping.keys())) + ChannelCache._singleton._evict_locked(key) + # And immediately reevaluate. + else: + key, (_, eviction_time) = next( + iter(ChannelCache._singleton._mapping.items()) + ) + now = datetime.datetime.now() + if eviction_time <= now: + ChannelCache._singleton._evict_locked(key) + continue + else: + time_to_eviction = (eviction_time - now).total_seconds() + # NOTE: We aim to *eventually* coalesce to a state in + # which no overdue channels are in the cache and the + # length of the cache is longer than _MAXIMUM_CHANNELS. + # We tolerate momentary states in which these two + # criteria are not met. + ChannelCache._condition.wait(timeout=time_to_eviction) + + def get_channel( + self, + target: str, + options: Sequence[Tuple[str, str]], + channel_credentials: Optional[grpc.ChannelCredentials], + insecure: bool, + compression: Optional[grpc.Compression], + method: str, + _registered_method: bool, + ) -> Tuple[grpc.Channel, Optional[int]]: + """Get a channel from cache or creates a new channel. + + This method also takes care of register method for channel, + which means we'll register a new call handle if we're calling a + non-registered method for an existing channel. + + Returns: + A tuple with two items. The first item is the channel, second item is + the call handle if the method is registered, None if it's not registered. + """ + if insecure and channel_credentials: + raise ValueError( + "The insecure option is mutually exclusive with " + + "the channel_credentials option. Please use one " + + "or the other." + ) + if insecure: + channel_credentials = ( + grpc.experimental.insecure_channel_credentials() + ) + elif channel_credentials is None: + _LOGGER.debug("Defaulting to SSL channel credentials.") + channel_credentials = grpc.ssl_channel_credentials() + key = (target, options, channel_credentials, compression) + with self._lock: + channel_data = self._mapping.get(key, None) + call_handle = None + if channel_data is not None: + channel = channel_data[0] + # Register a new call handle if we're calling a registered method for an + # existing channel and this method is not registered. + if _registered_method: + call_handle = channel._get_registered_call_handle(method) + self._mapping.pop(key) + self._mapping[key] = ( + channel, + datetime.datetime.now() + _EVICTION_PERIOD, + ) + return channel, call_handle + else: + channel = _create_channel( + target, options, channel_credentials, compression + ) + if _registered_method: + call_handle = channel._get_registered_call_handle(method) + self._mapping[key] = ( + channel, + datetime.datetime.now() + _EVICTION_PERIOD, + ) + if ( + len(self._mapping) == 1 + or len(self._mapping) >= _MAXIMUM_CHANNELS + ): + self._condition.notify() + return channel, call_handle + + def _test_only_channel_count(self) -> int: + with self._lock: + return len(self._mapping) + + +@experimental_api +# pylint: disable=too-many-locals +def unary_unary( + request: RequestType, + target: str, + method: str, + request_serializer: Optional[Callable[[Any], bytes]] = None, + response_deserializer: Optional[Callable[[bytes], Any]] = None, + options: Sequence[Tuple[AnyStr, AnyStr]] = (), + channel_credentials: Optional[grpc.ChannelCredentials] = None, + insecure: bool = False, + call_credentials: Optional[grpc.CallCredentials] = None, + compression: Optional[grpc.Compression] = None, + wait_for_ready: Optional[bool] = None, + timeout: Optional[float] = _DEFAULT_TIMEOUT, + metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None, + _registered_method: Optional[bool] = False, +) -> ResponseType: + """Invokes a unary-unary RPC without an explicitly specified channel. + + THIS IS AN EXPERIMENTAL API. + + This is backed by a per-process cache of channels. Channels are evicted + from the cache after a fixed period by a background. Channels will also be + evicted if more than a configured maximum accumulate. + + The default eviction period is 10 minutes. One may set the environment + variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this. + + The default maximum number of channels is 256. One may set the + environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure + this. + + Args: + request: An iterator that yields request values for the RPC. + target: The server address. + method: The name of the RPC method. + request_serializer: Optional :term:`serializer` for serializing the request + message. Request goes unserialized in case None is passed. + response_deserializer: Optional :term:`deserializer` for deserializing the response + message. Response goes undeserialized in case None is passed. + options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC Core + runtime) to configure the channel. + channel_credentials: A credential applied to the whole channel, e.g. the + return value of grpc.ssl_channel_credentials() or + grpc.insecure_channel_credentials(). + insecure: If True, specifies channel_credentials as + :term:`grpc.insecure_channel_credentials()`. This option is mutually + exclusive with the `channel_credentials` option. + call_credentials: A call credential applied to each call individually, + e.g. the output of grpc.metadata_call_credentials() or + grpc.access_token_call_credentials(). + compression: An optional value indicating the compression method to be + used over the lifetime of the channel, e.g. grpc.Compression.Gzip. + wait_for_ready: An optional flag indicating whether the RPC should fail + immediately if the connection is not ready at the time the RPC is + invoked, or if it should wait until the connection to the server + becomes ready. When using this option, the user will likely also want + to set a timeout. Defaults to True. + timeout: An optional duration of time in seconds to allow for the RPC, + after which an exception will be raised. If timeout is unspecified, + defaults to a timeout controlled by the + GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS environment variable. If that is + unset, defaults to 60 seconds. Supply a value of None to indicate that + no timeout should be enforced. + metadata: Optional metadata to send to the server. + + Returns: + The response to the RPC. + """ + channel, method_handle = ChannelCache.get().get_channel( + target, + options, + channel_credentials, + insecure, + compression, + method, + _registered_method, + ) + multicallable = channel.unary_unary( + method, request_serializer, response_deserializer, method_handle + ) + wait_for_ready = wait_for_ready if wait_for_ready is not None else True + return multicallable( + request, + metadata=metadata, + wait_for_ready=wait_for_ready, + credentials=call_credentials, + timeout=timeout, + ) + + +@experimental_api +# pylint: disable=too-many-locals +def unary_stream( + request: RequestType, + target: str, + method: str, + request_serializer: Optional[Callable[[Any], bytes]] = None, + response_deserializer: Optional[Callable[[bytes], Any]] = None, + options: Sequence[Tuple[AnyStr, AnyStr]] = (), + channel_credentials: Optional[grpc.ChannelCredentials] = None, + insecure: bool = False, + call_credentials: Optional[grpc.CallCredentials] = None, + compression: Optional[grpc.Compression] = None, + wait_for_ready: Optional[bool] = None, + timeout: Optional[float] = _DEFAULT_TIMEOUT, + metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None, + _registered_method: Optional[bool] = False, +) -> Iterator[ResponseType]: + """Invokes a unary-stream RPC without an explicitly specified channel. + + THIS IS AN EXPERIMENTAL API. + + This is backed by a per-process cache of channels. Channels are evicted + from the cache after a fixed period by a background. Channels will also be + evicted if more than a configured maximum accumulate. + + The default eviction period is 10 minutes. One may set the environment + variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this. + + The default maximum number of channels is 256. One may set the + environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure + this. + + Args: + request: An iterator that yields request values for the RPC. + target: The server address. + method: The name of the RPC method. + request_serializer: Optional :term:`serializer` for serializing the request + message. Request goes unserialized in case None is passed. + response_deserializer: Optional :term:`deserializer` for deserializing the response + message. Response goes undeserialized in case None is passed. + options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC Core + runtime) to configure the channel. + channel_credentials: A credential applied to the whole channel, e.g. the + return value of grpc.ssl_channel_credentials(). + insecure: If True, specifies channel_credentials as + :term:`grpc.insecure_channel_credentials()`. This option is mutually + exclusive with the `channel_credentials` option. + call_credentials: A call credential applied to each call individually, + e.g. the output of grpc.metadata_call_credentials() or + grpc.access_token_call_credentials(). + compression: An optional value indicating the compression method to be + used over the lifetime of the channel, e.g. grpc.Compression.Gzip. + wait_for_ready: An optional flag indicating whether the RPC should fail + immediately if the connection is not ready at the time the RPC is + invoked, or if it should wait until the connection to the server + becomes ready. When using this option, the user will likely also want + to set a timeout. Defaults to True. + timeout: An optional duration of time in seconds to allow for the RPC, + after which an exception will be raised. If timeout is unspecified, + defaults to a timeout controlled by the + GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS environment variable. If that is + unset, defaults to 60 seconds. Supply a value of None to indicate that + no timeout should be enforced. + metadata: Optional metadata to send to the server. + + Returns: + An iterator of responses. + """ + channel, method_handle = ChannelCache.get().get_channel( + target, + options, + channel_credentials, + insecure, + compression, + method, + _registered_method, + ) + multicallable = channel.unary_stream( + method, request_serializer, response_deserializer, method_handle + ) + wait_for_ready = wait_for_ready if wait_for_ready is not None else True + return multicallable( + request, + metadata=metadata, + wait_for_ready=wait_for_ready, + credentials=call_credentials, + timeout=timeout, + ) + + +@experimental_api +# pylint: disable=too-many-locals +def stream_unary( + request_iterator: Iterator[RequestType], + target: str, + method: str, + request_serializer: Optional[Callable[[Any], bytes]] = None, + response_deserializer: Optional[Callable[[bytes], Any]] = None, + options: Sequence[Tuple[AnyStr, AnyStr]] = (), + channel_credentials: Optional[grpc.ChannelCredentials] = None, + insecure: bool = False, + call_credentials: Optional[grpc.CallCredentials] = None, + compression: Optional[grpc.Compression] = None, + wait_for_ready: Optional[bool] = None, + timeout: Optional[float] = _DEFAULT_TIMEOUT, + metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None, + _registered_method: Optional[bool] = False, +) -> ResponseType: + """Invokes a stream-unary RPC without an explicitly specified channel. + + THIS IS AN EXPERIMENTAL API. + + This is backed by a per-process cache of channels. Channels are evicted + from the cache after a fixed period by a background. Channels will also be + evicted if more than a configured maximum accumulate. + + The default eviction period is 10 minutes. One may set the environment + variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this. + + The default maximum number of channels is 256. One may set the + environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure + this. + + Args: + request_iterator: An iterator that yields request values for the RPC. + target: The server address. + method: The name of the RPC method. + request_serializer: Optional :term:`serializer` for serializing the request + message. Request goes unserialized in case None is passed. + response_deserializer: Optional :term:`deserializer` for deserializing the response + message. Response goes undeserialized in case None is passed. + options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC Core + runtime) to configure the channel. + channel_credentials: A credential applied to the whole channel, e.g. the + return value of grpc.ssl_channel_credentials(). + call_credentials: A call credential applied to each call individually, + e.g. the output of grpc.metadata_call_credentials() or + grpc.access_token_call_credentials(). + insecure: If True, specifies channel_credentials as + :term:`grpc.insecure_channel_credentials()`. This option is mutually + exclusive with the `channel_credentials` option. + compression: An optional value indicating the compression method to be + used over the lifetime of the channel, e.g. grpc.Compression.Gzip. + wait_for_ready: An optional flag indicating whether the RPC should fail + immediately if the connection is not ready at the time the RPC is + invoked, or if it should wait until the connection to the server + becomes ready. When using this option, the user will likely also want + to set a timeout. Defaults to True. + timeout: An optional duration of time in seconds to allow for the RPC, + after which an exception will be raised. If timeout is unspecified, + defaults to a timeout controlled by the + GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS environment variable. If that is + unset, defaults to 60 seconds. Supply a value of None to indicate that + no timeout should be enforced. + metadata: Optional metadata to send to the server. + + Returns: + The response to the RPC. + """ + channel, method_handle = ChannelCache.get().get_channel( + target, + options, + channel_credentials, + insecure, + compression, + method, + _registered_method, + ) + multicallable = channel.stream_unary( + method, request_serializer, response_deserializer, method_handle + ) + wait_for_ready = wait_for_ready if wait_for_ready is not None else True + return multicallable( + request_iterator, + metadata=metadata, + wait_for_ready=wait_for_ready, + credentials=call_credentials, + timeout=timeout, + ) + + +@experimental_api +# pylint: disable=too-many-locals +def stream_stream( + request_iterator: Iterator[RequestType], + target: str, + method: str, + request_serializer: Optional[Callable[[Any], bytes]] = None, + response_deserializer: Optional[Callable[[bytes], Any]] = None, + options: Sequence[Tuple[AnyStr, AnyStr]] = (), + channel_credentials: Optional[grpc.ChannelCredentials] = None, + insecure: bool = False, + call_credentials: Optional[grpc.CallCredentials] = None, + compression: Optional[grpc.Compression] = None, + wait_for_ready: Optional[bool] = None, + timeout: Optional[float] = _DEFAULT_TIMEOUT, + metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None, + _registered_method: Optional[bool] = False, +) -> Iterator[ResponseType]: + """Invokes a stream-stream RPC without an explicitly specified channel. + + THIS IS AN EXPERIMENTAL API. + + This is backed by a per-process cache of channels. Channels are evicted + from the cache after a fixed period by a background. Channels will also be + evicted if more than a configured maximum accumulate. + + The default eviction period is 10 minutes. One may set the environment + variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this. + + The default maximum number of channels is 256. One may set the + environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure + this. + + Args: + request_iterator: An iterator that yields request values for the RPC. + target: The server address. + method: The name of the RPC method. + request_serializer: Optional :term:`serializer` for serializing the request + message. Request goes unserialized in case None is passed. + response_deserializer: Optional :term:`deserializer` for deserializing the response + message. Response goes undeserialized in case None is passed. + options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC Core + runtime) to configure the channel. + channel_credentials: A credential applied to the whole channel, e.g. the + return value of grpc.ssl_channel_credentials(). + call_credentials: A call credential applied to each call individually, + e.g. the output of grpc.metadata_call_credentials() or + grpc.access_token_call_credentials(). + insecure: If True, specifies channel_credentials as + :term:`grpc.insecure_channel_credentials()`. This option is mutually + exclusive with the `channel_credentials` option. + compression: An optional value indicating the compression method to be + used over the lifetime of the channel, e.g. grpc.Compression.Gzip. + wait_for_ready: An optional flag indicating whether the RPC should fail + immediately if the connection is not ready at the time the RPC is + invoked, or if it should wait until the connection to the server + becomes ready. When using this option, the user will likely also want + to set a timeout. Defaults to True. + timeout: An optional duration of time in seconds to allow for the RPC, + after which an exception will be raised. If timeout is unspecified, + defaults to a timeout controlled by the + GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS environment variable. If that is + unset, defaults to 60 seconds. Supply a value of None to indicate that + no timeout should be enforced. + metadata: Optional metadata to send to the server. + + Returns: + An iterator of responses. + """ + channel, method_handle = ChannelCache.get().get_channel( + target, + options, + channel_credentials, + insecure, + compression, + method, + _registered_method, + ) + multicallable = channel.stream_stream( + method, request_serializer, response_deserializer, method_handle + ) + wait_for_ready = wait_for_ready if wait_for_ready is not None else True + return multicallable( + request_iterator, + metadata=metadata, + wait_for_ready=wait_for_ready, + credentials=call_credentials, + timeout=timeout, + ) diff --git a/venv/lib/python3.10/site-packages/grpc/_typing.py b/venv/lib/python3.10/site-packages/grpc/_typing.py new file mode 100644 index 0000000000000000000000000000000000000000..f77d0ec5d28c408f679122aab28aafb60704ace0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/_typing.py @@ -0,0 +1,96 @@ +# Copyright 2022 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Common types for gRPC Sync API""" + +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Iterable, + Iterator, + Optional, + Sequence, + Tuple, + TypeVar, + Union, +) + +from grpc._cython import cygrpc + +if TYPE_CHECKING: + from grpc import ServicerContext + from grpc._server import _RPCState + +RequestType = TypeVar("RequestType") +ResponseType = TypeVar("ResponseType") +SerializingFunction = Callable[[Any], bytes] +DeserializingFunction = Callable[[bytes], Any] +MetadataType = Sequence[Tuple[str, Union[str, bytes]]] +ChannelArgumentType = Tuple[str, Any] +DoneCallbackType = Callable[[Any], None] +NullaryCallbackType = Callable[[], None] +RequestIterableType = Iterable[Any] +ResponseIterableType = Iterable[Any] +UserTag = Callable[[cygrpc.BaseEvent], bool] +IntegratedCallFactory = Callable[ + [ + int, + bytes, + Optional[str], + Optional[float], + Optional[MetadataType], + Optional[cygrpc.CallCredentials], + Sequence[Sequence[cygrpc.Operation]], + UserTag, + Any, + Optional[int], + ], + cygrpc.IntegratedCall, +] +ServerTagCallbackType = Tuple[ + Optional["_RPCState"], Sequence[NullaryCallbackType] +] +ServerCallbackTag = Callable[[cygrpc.BaseEvent], ServerTagCallbackType] +ArityAgnosticMethodHandler = Union[ + Callable[ + [RequestType, "ServicerContext", Callable[[ResponseType], None]], + ResponseType, + ], + Callable[ + [RequestType, "ServicerContext", Callable[[ResponseType], None]], + Iterator[ResponseType], + ], + Callable[ + [ + Iterator[RequestType], + "ServicerContext", + Callable[[ResponseType], None], + ], + ResponseType, + ], + Callable[ + [ + Iterator[RequestType], + "ServicerContext", + Callable[[ResponseType], None], + ], + Iterator[ResponseType], + ], + Callable[[RequestType, "ServicerContext"], ResponseType], + Callable[[RequestType, "ServicerContext"], Iterator[ResponseType]], + Callable[[Iterator[RequestType], "ServicerContext"], ResponseType], + Callable[ + [Iterator[RequestType], "ServicerContext"], Iterator[ResponseType] + ], +] diff --git a/venv/lib/python3.10/site-packages/grpc/_utilities.py b/venv/lib/python3.10/site-packages/grpc/_utilities.py new file mode 100644 index 0000000000000000000000000000000000000000..620cab3838cf0429b6e1c2a5299c308be3b2f945 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/_utilities.py @@ -0,0 +1,222 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Internal utilities for gRPC Python.""" + +import collections +import logging +import threading +import time +from typing import Callable, Dict, Optional, Sequence + +import grpc # pytype: disable=pyi-error +from grpc import _common # pytype: disable=pyi-error +from grpc._typing import DoneCallbackType + +_LOGGER = logging.getLogger(__name__) + +_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE = ( + 'Exception calling connectivity future "done" callback!' +) + + +class RpcMethodHandler( + collections.namedtuple( + "_RpcMethodHandler", + ( + "request_streaming", + "response_streaming", + "request_deserializer", + "response_serializer", + "unary_unary", + "unary_stream", + "stream_unary", + "stream_stream", + ), + ), + grpc.RpcMethodHandler, +): + pass + + +class DictionaryGenericHandler(grpc.ServiceRpcHandler): + _name: str + _method_handlers: Dict[str, grpc.RpcMethodHandler] + + def __init__( + self, service: str, method_handlers: Dict[str, grpc.RpcMethodHandler] + ): + self._name = service + self._method_handlers = { + _common.fully_qualified_method(service, method): method_handler + for method, method_handler in method_handlers.items() + } + + def service_name(self) -> str: + return self._name + + def service( + self, handler_call_details: grpc.HandlerCallDetails + ) -> Optional[grpc.RpcMethodHandler]: + details_method = handler_call_details.method + return self._method_handlers.get( + details_method + ) # pytype: disable=attribute-error + + +class _ChannelReadyFuture(grpc.Future): + _condition: threading.Condition + _channel: grpc.Channel + _matured: bool + _cancelled: bool + _done_callbacks: Sequence[Callable] + + def __init__(self, channel: grpc.Channel): + self._condition = threading.Condition() + self._channel = channel + + self._matured = False + self._cancelled = False + self._done_callbacks = [] + + def _block(self, timeout: Optional[float]) -> None: + until = None if timeout is None else time.time() + timeout + with self._condition: + while True: + if self._cancelled: + raise grpc.FutureCancelledError() + elif self._matured: + return + else: + if until is None: + self._condition.wait() + else: + remaining = until - time.time() + if remaining < 0: + raise grpc.FutureTimeoutError() + else: + self._condition.wait(timeout=remaining) + + def _update(self, connectivity: Optional[grpc.ChannelConnectivity]) -> None: + with self._condition: + if ( + not self._cancelled + and connectivity is grpc.ChannelConnectivity.READY + ): + self._matured = True + self._channel.unsubscribe(self._update) + self._condition.notify_all() + done_callbacks = tuple(self._done_callbacks) + self._done_callbacks = None + else: + return + + for done_callback in done_callbacks: + try: + done_callback(self) + except Exception: # pylint: disable=broad-except + _LOGGER.exception(_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE) + + def cancel(self) -> bool: + with self._condition: + if not self._matured: + self._cancelled = True + self._channel.unsubscribe(self._update) + self._condition.notify_all() + done_callbacks = tuple(self._done_callbacks) + self._done_callbacks = None + else: + return False + + for done_callback in done_callbacks: + try: + done_callback(self) + except Exception: # pylint: disable=broad-except + _LOGGER.exception(_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE) + + return True + + def cancelled(self) -> bool: + with self._condition: + return self._cancelled + + def running(self) -> bool: + with self._condition: + return not self._cancelled and not self._matured + + def done(self) -> bool: + with self._condition: + return self._cancelled or self._matured + + def result(self, timeout: Optional[float] = None) -> None: + self._block(timeout) + + def exception(self, timeout: Optional[float] = None) -> None: + self._block(timeout) + + def traceback(self, timeout: Optional[float] = None) -> None: + self._block(timeout) + + def add_done_callback(self, fn: DoneCallbackType): + with self._condition: + if not self._cancelled and not self._matured: + self._done_callbacks.append(fn) + return + + fn(self) + + def start(self): + with self._condition: + self._channel.subscribe(self._update, try_to_connect=True) + + def __del__(self): + with self._condition: + if not self._cancelled and not self._matured: + self._channel.unsubscribe(self._update) + + +def channel_ready_future(channel: grpc.Channel) -> _ChannelReadyFuture: + ready_future = _ChannelReadyFuture(channel) + ready_future.start() + return ready_future + + +def first_version_is_lower(version1: str, version2: str) -> bool: + """ + Compares two versions in the format '1.60.1' or '1.60.1.dev0'. + + This method will be used in all stubs generated by grpcio-tools to check whether + the stub version is compatible with the runtime grpcio. + + Args: + version1: The first version string. + version2: The second version string. + + Returns: + True if version1 is lower, False otherwise. + """ + version1_list = version1.split(".") + version2_list = version2.split(".") + + try: + for i in range(3): + if int(version1_list[i]) < int(version2_list[i]): + return True + elif int(version1_list[i]) > int(version2_list[i]): + return False + except ValueError: + # Return false in case we can't convert version to int. + return False + + # The version without dev0 will be considered lower. + return len(version1_list) < len(version2_list) diff --git a/venv/lib/python3.10/site-packages/grpc/aio/__init__.py b/venv/lib/python3.10/site-packages/grpc/aio/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a4e104ad51b5d9c6f6b117de247f39d54db410d9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/aio/__init__.py @@ -0,0 +1,95 @@ +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""gRPC's Asynchronous Python API. + +gRPC Async API objects may only be used on the thread on which they were +created. AsyncIO doesn't provide thread safety for most of its APIs. +""" + +from typing import Any, Optional, Sequence, Tuple + +import grpc +from grpc._cython.cygrpc import AbortError +from grpc._cython.cygrpc import BaseError +from grpc._cython.cygrpc import EOF +from grpc._cython.cygrpc import InternalError +from grpc._cython.cygrpc import UsageError +from grpc._cython.cygrpc import init_grpc_aio +from grpc._cython.cygrpc import shutdown_grpc_aio + +from ._base_call import Call +from ._base_call import RpcContext +from ._base_call import StreamStreamCall +from ._base_call import StreamUnaryCall +from ._base_call import UnaryStreamCall +from ._base_call import UnaryUnaryCall +from ._base_channel import Channel +from ._base_channel import StreamStreamMultiCallable +from ._base_channel import StreamUnaryMultiCallable +from ._base_channel import UnaryStreamMultiCallable +from ._base_channel import UnaryUnaryMultiCallable +from ._base_server import Server +from ._base_server import ServicerContext +from ._call import AioRpcError +from ._channel import insecure_channel +from ._channel import secure_channel +from ._interceptor import ClientCallDetails +from ._interceptor import ClientInterceptor +from ._interceptor import InterceptedUnaryUnaryCall +from ._interceptor import ServerInterceptor +from ._interceptor import StreamStreamClientInterceptor +from ._interceptor import StreamUnaryClientInterceptor +from ._interceptor import UnaryStreamClientInterceptor +from ._interceptor import UnaryUnaryClientInterceptor +from ._metadata import Metadata +from ._server import server +from ._typing import ChannelArgumentType + +################################### __all__ ################################# + +__all__ = ( + "init_grpc_aio", + "shutdown_grpc_aio", + "AioRpcError", + "RpcContext", + "Call", + "UnaryUnaryCall", + "UnaryStreamCall", + "StreamUnaryCall", + "StreamStreamCall", + "Channel", + "UnaryUnaryMultiCallable", + "UnaryStreamMultiCallable", + "StreamUnaryMultiCallable", + "StreamStreamMultiCallable", + "ClientCallDetails", + "ClientInterceptor", + "UnaryStreamClientInterceptor", + "UnaryUnaryClientInterceptor", + "StreamUnaryClientInterceptor", + "StreamStreamClientInterceptor", + "InterceptedUnaryUnaryCall", + "ServerInterceptor", + "insecure_channel", + "server", + "Server", + "ServicerContext", + "EOF", + "secure_channel", + "AbortError", + "BaseError", + "UsageError", + "InternalError", + "Metadata", +) diff --git a/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..129888d5f37e8f0bc9024a3c4675e4ee67b1d7de Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_base_call.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_base_call.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f9c7f1d54b144cd1511c7745e7429f8625a98b8b Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_base_call.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_base_channel.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_base_channel.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e468a144a905a5998ea61e666dfd28d829098bd8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_base_channel.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_base_server.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_base_server.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7c61e733713b723e761e097dae452066a7b39c9e Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_base_server.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_call.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_call.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..205a446bb0652da26f388c769a49f0ba8301c780 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_call.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_channel.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_channel.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dff9266f1289550adc5db3461ce5c824edbeea39 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_channel.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_interceptor.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_interceptor.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aa071276dbcf92ba81e4ee6a5641e3cdf87fff99 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_interceptor.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_metadata.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_metadata.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..52a4fa90562c4148a3294e1fd88a369be59b35b9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_metadata.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_server.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_server.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b4aa0c55a69895b670df24e84ef9acbb3144a184 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_server.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_typing.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_typing.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..80d1d0edee7b74af4da69fe0304dc1869cb9ee12 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_typing.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e8512eae9550a4f332993de860b81a1045842a4b Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/aio/__pycache__/_utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/aio/_base_call.py b/venv/lib/python3.10/site-packages/grpc/aio/_base_call.py new file mode 100644 index 0000000000000000000000000000000000000000..69b89c0afdbdbd0a627c45e299997191cf7c8a77 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/aio/_base_call.py @@ -0,0 +1,257 @@ +# Copyright 2019 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Abstract base classes for client-side Call objects. + +Call objects represents the RPC itself, and offer methods to access / modify +its information. They also offer methods to manipulate the life-cycle of the +RPC, e.g. cancellation. +""" + +from abc import ABCMeta +from abc import abstractmethod +from typing import Any, AsyncIterator, Generator, Generic, Optional, Union + +import grpc + +from ._metadata import Metadata +from ._typing import DoneCallbackType +from ._typing import EOFType +from ._typing import RequestType +from ._typing import ResponseType + +__all__ = "RpcContext", "Call", "UnaryUnaryCall", "UnaryStreamCall" + + +class RpcContext(metaclass=ABCMeta): + """Provides RPC-related information and action.""" + + @abstractmethod + def cancelled(self) -> bool: + """Return True if the RPC is cancelled. + + The RPC is cancelled when the cancellation was requested with cancel(). + + Returns: + A bool indicates whether the RPC is cancelled or not. + """ + + @abstractmethod + def done(self) -> bool: + """Return True if the RPC is done. + + An RPC is done if the RPC is completed, cancelled or aborted. + + Returns: + A bool indicates if the RPC is done. + """ + + @abstractmethod + def time_remaining(self) -> Optional[float]: + """Describes the length of allowed time remaining for the RPC. + + Returns: + A nonnegative float indicating the length of allowed time in seconds + remaining for the RPC to complete before it is considered to have + timed out, or None if no deadline was specified for the RPC. + """ + + @abstractmethod + def cancel(self) -> bool: + """Cancels the RPC. + + Idempotent and has no effect if the RPC has already terminated. + + Returns: + A bool indicates if the cancellation is performed or not. + """ + + @abstractmethod + def add_done_callback(self, callback: DoneCallbackType) -> None: + """Registers a callback to be called on RPC termination. + + Args: + callback: A callable object will be called with the call object as + its only argument. + """ + + +class Call(RpcContext, metaclass=ABCMeta): + """The abstract base class of an RPC on the client-side.""" + + @abstractmethod + async def initial_metadata(self) -> Metadata: + """Accesses the initial metadata sent by the server. + + Returns: + The initial :term:`metadata`. + """ + + @abstractmethod + async def trailing_metadata(self) -> Metadata: + """Accesses the trailing metadata sent by the server. + + Returns: + The trailing :term:`metadata`. + """ + + @abstractmethod + async def code(self) -> grpc.StatusCode: + """Accesses the status code sent by the server. + + Returns: + The StatusCode value for the RPC. + """ + + @abstractmethod + async def details(self) -> str: + """Accesses the details sent by the server. + + Returns: + The details string of the RPC. + """ + + @abstractmethod + async def wait_for_connection(self) -> None: + """Waits until connected to peer and raises aio.AioRpcError if failed. + + This is an EXPERIMENTAL method. + + This method ensures the RPC has been successfully connected. Otherwise, + an AioRpcError will be raised to explain the reason of the connection + failure. + + This method is recommended for building retry mechanisms. + """ + + +class UnaryUnaryCall( + Generic[RequestType, ResponseType], Call, metaclass=ABCMeta +): + """The abstract base class of a unary-unary RPC on the client-side.""" + + @abstractmethod + def __await__(self) -> Generator[Any, None, ResponseType]: + """Await the response message to be ready. + + Returns: + The response message of the RPC. + """ + + +class UnaryStreamCall( + Generic[RequestType, ResponseType], Call, metaclass=ABCMeta +): + @abstractmethod + def __aiter__(self) -> AsyncIterator[ResponseType]: + """Returns the async iterator representation that yields messages. + + Under the hood, it is calling the "read" method. + + Returns: + An async iterator object that yields messages. + """ + + @abstractmethod + async def read(self) -> Union[EOFType, ResponseType]: + """Reads one message from the stream. + + Read operations must be serialized when called from multiple + coroutines. + + Note that the iterator and read/write APIs may not be mixed on + a single RPC. + + Returns: + A response message, or an `grpc.aio.EOF` to indicate the end of the + stream. + """ + + +class StreamUnaryCall( + Generic[RequestType, ResponseType], Call, metaclass=ABCMeta +): + @abstractmethod + async def write(self, request: RequestType) -> None: + """Writes one message to the stream. + + Note that the iterator and read/write APIs may not be mixed on + a single RPC. + + Raises: + An RpcError exception if the write failed. + """ + + @abstractmethod + async def done_writing(self) -> None: + """Notifies server that the client is done sending messages. + + After done_writing is called, any additional invocation to the write + function will fail. This function is idempotent. + """ + + @abstractmethod + def __await__(self) -> Generator[Any, None, ResponseType]: + """Await the response message to be ready. + + Returns: + The response message of the stream. + """ + + +class StreamStreamCall( + Generic[RequestType, ResponseType], Call, metaclass=ABCMeta +): + @abstractmethod + def __aiter__(self) -> AsyncIterator[ResponseType]: + """Returns the async iterator representation that yields messages. + + Under the hood, it is calling the "read" method. + + Returns: + An async iterator object that yields messages. + """ + + @abstractmethod + async def read(self) -> Union[EOFType, ResponseType]: + """Reads one message from the stream. + + Read operations must be serialized when called from multiple + coroutines. + + Note that the iterator and read/write APIs may not be mixed on + a single RPC. + + Returns: + A response message, or an `grpc.aio.EOF` to indicate the end of the + stream. + """ + + @abstractmethod + async def write(self, request: RequestType) -> None: + """Writes one message to the stream. + + Note that the iterator and read/write APIs may not be mixed on + a single RPC. + + Raises: + An RpcError exception if the write failed. + """ + + @abstractmethod + async def done_writing(self) -> None: + """Notifies server that the client is done sending messages. + + After done_writing is called, any additional invocation to the write + function will fail. This function is idempotent. + """ diff --git a/venv/lib/python3.10/site-packages/grpc/aio/_base_channel.py b/venv/lib/python3.10/site-packages/grpc/aio/_base_channel.py new file mode 100644 index 0000000000000000000000000000000000000000..0616f243494c34b72a74c12bce9c18ac704c4cd2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/aio/_base_channel.py @@ -0,0 +1,364 @@ +# Copyright 2020 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Abstract base classes for Channel objects and Multicallable objects.""" + +import abc +from typing import Generic, Optional + +import grpc + +from . import _base_call +from ._typing import DeserializingFunction +from ._typing import MetadataType +from ._typing import RequestIterableType +from ._typing import RequestType +from ._typing import ResponseType +from ._typing import SerializingFunction + + +class UnaryUnaryMultiCallable(Generic[RequestType, ResponseType], abc.ABC): + """Enables asynchronous invocation of a unary-call RPC.""" + + @abc.abstractmethod + def __call__( + self, + request: RequestType, + *, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> _base_call.UnaryUnaryCall[RequestType, ResponseType]: + """Asynchronously invokes the underlying RPC. + + Args: + request: The request value for the RPC. + timeout: An optional duration of time in seconds to allow + for the RPC. + metadata: Optional :term:`metadata` to be transmitted to the + service-side of the RPC. + credentials: An optional CallCredentials for the RPC. Only valid for + secure Channel. + wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. + + Returns: + A UnaryUnaryCall object. + + Raises: + RpcError: Indicates that the RPC terminated with non-OK status. The + raised RpcError will also be a Call for the RPC affording the RPC's + metadata, status code, and details. + """ + + +class UnaryStreamMultiCallable(Generic[RequestType, ResponseType], abc.ABC): + """Enables asynchronous invocation of a server-streaming RPC.""" + + @abc.abstractmethod + def __call__( + self, + request: RequestType, + *, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> _base_call.UnaryStreamCall[RequestType, ResponseType]: + """Asynchronously invokes the underlying RPC. + + Args: + request: The request value for the RPC. + timeout: An optional duration of time in seconds to allow + for the RPC. + metadata: Optional :term:`metadata` to be transmitted to the + service-side of the RPC. + credentials: An optional CallCredentials for the RPC. Only valid for + secure Channel. + wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. + + Returns: + A UnaryStreamCall object. + + Raises: + RpcError: Indicates that the RPC terminated with non-OK status. The + raised RpcError will also be a Call for the RPC affording the RPC's + metadata, status code, and details. + """ + + +class StreamUnaryMultiCallable(abc.ABC): + """Enables asynchronous invocation of a client-streaming RPC.""" + + @abc.abstractmethod + def __call__( + self, + request_iterator: Optional[RequestIterableType] = None, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> _base_call.StreamUnaryCall: + """Asynchronously invokes the underlying RPC. + + Args: + request_iterator: An optional async iterable or iterable of request + messages for the RPC. + timeout: An optional duration of time in seconds to allow + for the RPC. + metadata: Optional :term:`metadata` to be transmitted to the + service-side of the RPC. + credentials: An optional CallCredentials for the RPC. Only valid for + secure Channel. + wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. + + Returns: + A StreamUnaryCall object. + + Raises: + RpcError: Indicates that the RPC terminated with non-OK status. The + raised RpcError will also be a Call for the RPC affording the RPC's + metadata, status code, and details. + """ + + +class StreamStreamMultiCallable(abc.ABC): + """Enables asynchronous invocation of a bidirectional-streaming RPC.""" + + @abc.abstractmethod + def __call__( + self, + request_iterator: Optional[RequestIterableType] = None, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> _base_call.StreamStreamCall: + """Asynchronously invokes the underlying RPC. + + Args: + request_iterator: An optional async iterable or iterable of request + messages for the RPC. + timeout: An optional duration of time in seconds to allow + for the RPC. + metadata: Optional :term:`metadata` to be transmitted to the + service-side of the RPC. + credentials: An optional CallCredentials for the RPC. Only valid for + secure Channel. + wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. + + Returns: + A StreamStreamCall object. + + Raises: + RpcError: Indicates that the RPC terminated with non-OK status. The + raised RpcError will also be a Call for the RPC affording the RPC's + metadata, status code, and details. + """ + + +class Channel(abc.ABC): + """Enables asynchronous RPC invocation as a client. + + Channel objects implement the Asynchronous Context Manager (aka. async + with) type, although they are not supported to be entered and exited + multiple times. + """ + + @abc.abstractmethod + async def __aenter__(self): + """Starts an asynchronous context manager. + + Returns: + Channel the channel that was instantiated. + """ + + @abc.abstractmethod + async def __aexit__(self, exc_type, exc_val, exc_tb): + """Finishes the asynchronous context manager by closing the channel. + + Still active RPCs will be cancelled. + """ + + @abc.abstractmethod + async def close(self, grace: Optional[float] = None): + """Closes this Channel and releases all resources held by it. + + This method immediately stops the channel from executing new RPCs in + all cases. + + If a grace period is specified, this method waits until all active + RPCs are finished or until the grace period is reached. RPCs that haven't + been terminated within the grace period are aborted. + If a grace period is not specified (by passing None for grace), + all existing RPCs are cancelled immediately. + + This method is idempotent. + """ + + @abc.abstractmethod + def get_state( + self, try_to_connect: bool = False + ) -> grpc.ChannelConnectivity: + """Checks the connectivity state of a channel. + + This is an EXPERIMENTAL API. + + If the channel reaches a stable connectivity state, it is guaranteed + that the return value of this function will eventually converge to that + state. + + Args: + try_to_connect: a bool indicate whether the Channel should try to + connect to peer or not. + + Returns: A ChannelConnectivity object. + """ + + @abc.abstractmethod + async def wait_for_state_change( + self, + last_observed_state: grpc.ChannelConnectivity, + ) -> None: + """Waits for a change in connectivity state. + + This is an EXPERIMENTAL API. + + The function blocks until there is a change in the channel connectivity + state from the "last_observed_state". If the state is already + different, this function will return immediately. + + There is an inherent race between the invocation of + "Channel.wait_for_state_change" and "Channel.get_state". The state can + change arbitrary many times during the race, so there is no way to + observe every state transition. + + If there is a need to put a timeout for this function, please refer to + "asyncio.wait_for". + + Args: + last_observed_state: A grpc.ChannelConnectivity object representing + the last known state. + """ + + @abc.abstractmethod + async def channel_ready(self) -> None: + """Creates a coroutine that blocks until the Channel is READY.""" + + @abc.abstractmethod + def unary_unary( + self, + method: str, + request_serializer: Optional[SerializingFunction] = None, + response_deserializer: Optional[DeserializingFunction] = None, + _registered_method: Optional[bool] = False, + ) -> UnaryUnaryMultiCallable: + """Creates a UnaryUnaryMultiCallable for a unary-unary method. + + Args: + method: The name of the RPC method. + request_serializer: Optional :term:`serializer` for serializing the request + message. Request goes unserialized in case None is passed. + response_deserializer: Optional :term:`deserializer` for deserializing the + response message. Response goes undeserialized in case None + is passed. + _registered_method: Implementation Private. Optional: A bool representing + whether the method is registered. + + Returns: + A UnaryUnaryMultiCallable value for the named unary-unary method. + """ + + @abc.abstractmethod + def unary_stream( + self, + method: str, + request_serializer: Optional[SerializingFunction] = None, + response_deserializer: Optional[DeserializingFunction] = None, + _registered_method: Optional[bool] = False, + ) -> UnaryStreamMultiCallable: + """Creates a UnaryStreamMultiCallable for a unary-stream method. + + Args: + method: The name of the RPC method. + request_serializer: Optional :term:`serializer` for serializing the request + message. Request goes unserialized in case None is passed. + response_deserializer: Optional :term:`deserializer` for deserializing the + response message. Response goes undeserialized in case None + is passed. + _registered_method: Implementation Private. Optional: A bool representing + whether the method is registered. + + Returns: + A UnaryStreamMultiCallable value for the named unary-stream method. + """ + + @abc.abstractmethod + def stream_unary( + self, + method: str, + request_serializer: Optional[SerializingFunction] = None, + response_deserializer: Optional[DeserializingFunction] = None, + _registered_method: Optional[bool] = False, + ) -> StreamUnaryMultiCallable: + """Creates a StreamUnaryMultiCallable for a stream-unary method. + + Args: + method: The name of the RPC method. + request_serializer: Optional :term:`serializer` for serializing the request + message. Request goes unserialized in case None is passed. + response_deserializer: Optional :term:`deserializer` for deserializing the + response message. Response goes undeserialized in case None + is passed. + _registered_method: Implementation Private. Optional: A bool representing + whether the method is registered. + + Returns: + A StreamUnaryMultiCallable value for the named stream-unary method. + """ + + @abc.abstractmethod + def stream_stream( + self, + method: str, + request_serializer: Optional[SerializingFunction] = None, + response_deserializer: Optional[DeserializingFunction] = None, + _registered_method: Optional[bool] = False, + ) -> StreamStreamMultiCallable: + """Creates a StreamStreamMultiCallable for a stream-stream method. + + Args: + method: The name of the RPC method. + request_serializer: Optional :term:`serializer` for serializing the request + message. Request goes unserialized in case None is passed. + response_deserializer: Optional :term:`deserializer` for deserializing the + response message. Response goes undeserialized in case None + is passed. + _registered_method: Implementation Private. Optional: A bool representing + whether the method is registered. + + Returns: + A StreamStreamMultiCallable value for the named stream-stream method. + """ diff --git a/venv/lib/python3.10/site-packages/grpc/aio/_base_server.py b/venv/lib/python3.10/site-packages/grpc/aio/_base_server.py new file mode 100644 index 0000000000000000000000000000000000000000..526e21aeb5e38ba32c1036b6afb2db1e9c873122 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/aio/_base_server.py @@ -0,0 +1,385 @@ +# Copyright 2020 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Abstract base classes for server-side classes.""" + +import abc +from typing import Generic, Iterable, Mapping, NoReturn, Optional, Sequence + +import grpc + +from ._metadata import Metadata # pylint: disable=unused-import +from ._typing import DoneCallbackType +from ._typing import MetadataType +from ._typing import RequestType +from ._typing import ResponseType + + +class Server(abc.ABC): + """Serves RPCs.""" + + @abc.abstractmethod + def add_generic_rpc_handlers( + self, generic_rpc_handlers: Sequence[grpc.GenericRpcHandler] + ) -> None: + """Registers GenericRpcHandlers with this Server. + + This method is only safe to call before the server is started. + + Args: + generic_rpc_handlers: A sequence of GenericRpcHandlers that will be + used to service RPCs. + """ + + @abc.abstractmethod + def add_insecure_port(self, address: str) -> int: + """Opens an insecure port for accepting RPCs. + + A port is a communication endpoint that used by networking protocols, + like TCP and UDP. To date, we only support TCP. + + This method may only be called before starting the server. + + Args: + address: The address for which to open a port. If the port is 0, + or not specified in the address, then the gRPC runtime will choose a port. + + Returns: + An integer port on which the server will accept RPC requests. + """ + + @abc.abstractmethod + def add_secure_port( + self, address: str, server_credentials: grpc.ServerCredentials + ) -> int: + """Opens a secure port for accepting RPCs. + + A port is a communication endpoint that used by networking protocols, + like TCP and UDP. To date, we only support TCP. + + This method may only be called before starting the server. + + Args: + address: The address for which to open a port. + if the port is 0, or not specified in the address, then the gRPC + runtime will choose a port. + server_credentials: A ServerCredentials object. + + Returns: + An integer port on which the server will accept RPC requests. + """ + + @abc.abstractmethod + async def start(self) -> None: + """Starts this Server. + + This method may only be called once. (i.e. it is not idempotent). + """ + + @abc.abstractmethod + async def stop(self, grace: Optional[float]) -> None: + """Stops this Server. + + This method immediately stops the server from servicing new RPCs in + all cases. + + If a grace period is specified, this method waits until all active + RPCs are finished or until the grace period is reached. RPCs that haven't + been terminated within the grace period are aborted. + If a grace period is not specified (by passing None for grace), all + existing RPCs are aborted immediately and this method blocks until + the last RPC handler terminates. + + This method is idempotent and may be called at any time. Passing a + smaller grace value in a subsequent call will have the effect of + stopping the Server sooner (passing None will have the effect of + stopping the server immediately). Passing a larger grace value in a + subsequent call will not have the effect of stopping the server later + (i.e. the most restrictive grace value is used). + + Args: + grace: A duration of time in seconds or None. + """ + + @abc.abstractmethod + async def wait_for_termination( + self, timeout: Optional[float] = None + ) -> bool: + """Continues current coroutine once the server stops. + + This is an EXPERIMENTAL API. + + The wait will not consume computational resources during blocking, and + it will block until one of the two following conditions are met: + + 1) The server is stopped or terminated; + 2) A timeout occurs if timeout is not `None`. + + The timeout argument works in the same way as `threading.Event.wait()`. + https://docs.python.org/3/library/threading.html#threading.Event.wait + + Args: + timeout: A floating point number specifying a timeout for the + operation in seconds. + + Returns: + A bool indicates if the operation times out. + """ + + def add_registered_method_handlers(self, service_name, method_handlers): + """Registers GenericRpcHandlers with this Server. + + This method is only safe to call before the server is started. + + Args: + service_name: The service name. + method_handlers: A dictionary that maps method names to corresponding + RpcMethodHandler. + """ + + +# pylint: disable=too-many-public-methods +class ServicerContext(Generic[RequestType, ResponseType], abc.ABC): + """A context object passed to method implementations.""" + + @abc.abstractmethod + async def read(self) -> RequestType: + """Reads one message from the RPC. + + Only one read operation is allowed simultaneously. + + Returns: + A response message of the RPC. + + Raises: + An RpcError exception if the read failed. + """ + + @abc.abstractmethod + async def write(self, message: ResponseType) -> None: + """Writes one message to the RPC. + + Only one write operation is allowed simultaneously. + + Raises: + An RpcError exception if the write failed. + """ + + @abc.abstractmethod + async def send_initial_metadata( + self, initial_metadata: MetadataType + ) -> None: + """Sends the initial metadata value to the client. + + This method need not be called by implementations if they have no + metadata to add to what the gRPC runtime will transmit. + + Args: + initial_metadata: The initial :term:`metadata`. + """ + + @abc.abstractmethod + async def abort( + self, + code: grpc.StatusCode, + details: str = "", + trailing_metadata: MetadataType = tuple(), + ) -> NoReturn: + """Raises an exception to terminate the RPC with a non-OK status. + + The code and details passed as arguments will supersede any existing + ones. + + Args: + code: A StatusCode object to be sent to the client. + It must not be StatusCode.OK. + details: A UTF-8-encodable string to be sent to the client upon + termination of the RPC. + trailing_metadata: A sequence of tuple represents the trailing + :term:`metadata`. + + Raises: + Exception: An exception is always raised to signal the abortion the + RPC to the gRPC runtime. + """ + + @abc.abstractmethod + def set_trailing_metadata(self, trailing_metadata: MetadataType) -> None: + """Sends the trailing metadata for the RPC. + + This method need not be called by implementations if they have no + metadata to add to what the gRPC runtime will transmit. + + Args: + trailing_metadata: The trailing :term:`metadata`. + """ + + @abc.abstractmethod + def invocation_metadata(self) -> Optional[MetadataType]: + """Accesses the metadata sent by the client. + + Returns: + The invocation :term:`metadata`. + """ + + @abc.abstractmethod + def set_code(self, code: grpc.StatusCode) -> None: + """Sets the value to be used as status code upon RPC completion. + + This method need not be called by method implementations if they wish + the gRPC runtime to determine the status code of the RPC. + + Args: + code: A StatusCode object to be sent to the client. + """ + + @abc.abstractmethod + def set_details(self, details: str) -> None: + """Sets the value to be used the as detail string upon RPC completion. + + This method need not be called by method implementations if they have + no details to transmit. + + Args: + details: A UTF-8-encodable string to be sent to the client upon + termination of the RPC. + """ + + @abc.abstractmethod + def set_compression(self, compression: grpc.Compression) -> None: + """Set the compression algorithm to be used for the entire call. + + Args: + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. + """ + + @abc.abstractmethod + def disable_next_message_compression(self) -> None: + """Disables compression for the next response message. + + This method will override any compression configuration set during + server creation or set on the call. + """ + + @abc.abstractmethod + def peer(self) -> str: + """Identifies the peer that invoked the RPC being serviced. + + Returns: + A string identifying the peer that invoked the RPC being serviced. + The string format is determined by gRPC runtime. + """ + + @abc.abstractmethod + def peer_identities(self) -> Optional[Iterable[bytes]]: + """Gets one or more peer identity(s). + + Equivalent to + servicer_context.auth_context().get(servicer_context.peer_identity_key()) + + Returns: + An iterable of the identities, or None if the call is not + authenticated. Each identity is returned as a raw bytes type. + """ + + @abc.abstractmethod + def peer_identity_key(self) -> Optional[str]: + """The auth property used to identify the peer. + + For example, "x509_common_name" or "x509_subject_alternative_name" are + used to identify an SSL peer. + + Returns: + The auth property (string) that indicates the + peer identity, or None if the call is not authenticated. + """ + + @abc.abstractmethod + def auth_context(self) -> Mapping[str, Iterable[bytes]]: + """Gets the auth context for the call. + + Returns: + A map of strings to an iterable of bytes for each auth property. + """ + + def time_remaining(self) -> float: + """Describes the length of allowed time remaining for the RPC. + + Returns: + A nonnegative float indicating the length of allowed time in seconds + remaining for the RPC to complete before it is considered to have + timed out, or None if no deadline was specified for the RPC. + """ + + def trailing_metadata(self): + """Access value to be used as trailing metadata upon RPC completion. + + This is an EXPERIMENTAL API. + + Returns: + The trailing :term:`metadata` for the RPC. + """ + raise NotImplementedError() + + def code(self): + """Accesses the value to be used as status code upon RPC completion. + + This is an EXPERIMENTAL API. + + Returns: + The StatusCode value for the RPC. + """ + raise NotImplementedError() + + def details(self): + """Accesses the value to be used as detail string upon RPC completion. + + This is an EXPERIMENTAL API. + + Returns: + The details string of the RPC. + """ + raise NotImplementedError() + + def add_done_callback(self, callback: DoneCallbackType) -> None: + """Registers a callback to be called on RPC termination. + + This is an EXPERIMENTAL API. + + Args: + callback: A callable object will be called with the servicer context + object as its only argument. + """ + + def cancelled(self) -> bool: + """Return True if the RPC is cancelled. + + The RPC is cancelled when the cancellation was requested with cancel(). + + This is an EXPERIMENTAL API. + + Returns: + A bool indicates whether the RPC is cancelled or not. + """ + + def done(self) -> bool: + """Return True if the RPC is done. + + An RPC is done if the RPC is completed, cancelled or aborted. + + This is an EXPERIMENTAL API. + + Returns: + A bool indicates if the RPC is done. + """ diff --git a/venv/lib/python3.10/site-packages/grpc/aio/_call.py b/venv/lib/python3.10/site-packages/grpc/aio/_call.py new file mode 100644 index 0000000000000000000000000000000000000000..24f2090651a8afb5564d8dc01995ec93470f953a --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/aio/_call.py @@ -0,0 +1,764 @@ +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Invocation-side implementation of gRPC Asyncio Python.""" + +import asyncio +import enum +from functools import partial +import inspect +import logging +import traceback +from typing import ( + Any, + AsyncIterator, + Generator, + Generic, + Optional, + Tuple, + Union, +) + +import grpc +from grpc import _common +from grpc._cython import cygrpc + +from . import _base_call +from ._metadata import Metadata +from ._typing import DeserializingFunction +from ._typing import DoneCallbackType +from ._typing import EOFType +from ._typing import MetadatumType +from ._typing import RequestIterableType +from ._typing import RequestType +from ._typing import ResponseType +from ._typing import SerializingFunction + +__all__ = "AioRpcError", "Call", "UnaryUnaryCall", "UnaryStreamCall" + +_LOCAL_CANCELLATION_DETAILS = "Locally cancelled by application!" +_GC_CANCELLATION_DETAILS = "Cancelled upon garbage collection!" +_RPC_ALREADY_FINISHED_DETAILS = "RPC already finished." +_RPC_HALF_CLOSED_DETAILS = 'RPC is half closed after calling "done_writing".' +_API_STYLE_ERROR = ( + "The iterator and read/write APIs may not be mixed on a single RPC." +) + +_OK_CALL_REPRESENTATION = ( + '<{} of RPC that terminated with:\n\tstatus = {}\n\tdetails = "{}"\n>' +) + +_NON_OK_CALL_REPRESENTATION = ( + "<{} of RPC that terminated with:\n" + "\tstatus = {}\n" + '\tdetails = "{}"\n' + '\tdebug_error_string = "{}"\n' + ">" +) + +_LOGGER = logging.getLogger(__name__) + + +class AioRpcError(grpc.RpcError): + """An implementation of RpcError to be used by the asynchronous API. + + Raised RpcError is a snapshot of the final status of the RPC, values are + determined. Hence, its methods no longer needs to be coroutines. + """ + + _code: grpc.StatusCode + _details: Optional[str] + _initial_metadata: Optional[Metadata] + _trailing_metadata: Optional[Metadata] + _debug_error_string: Optional[str] + + def __init__( + self, + code: grpc.StatusCode, + initial_metadata: Metadata, + trailing_metadata: Metadata, + details: Optional[str] = None, + debug_error_string: Optional[str] = None, + ) -> None: + """Constructor. + + Args: + code: The status code with which the RPC has been finalized. + details: Optional details explaining the reason of the error. + initial_metadata: Optional initial metadata that could be sent by the + Server. + trailing_metadata: Optional metadata that could be sent by the Server. + """ + + super().__init__() + self._code = code + self._details = details + self._initial_metadata = initial_metadata + self._trailing_metadata = trailing_metadata + self._debug_error_string = debug_error_string + + def code(self) -> grpc.StatusCode: + """Accesses the status code sent by the server. + + Returns: + The `grpc.StatusCode` status code. + """ + return self._code + + def details(self) -> Optional[str]: + """Accesses the details sent by the server. + + Returns: + The description of the error. + """ + return self._details + + def initial_metadata(self) -> Metadata: + """Accesses the initial metadata sent by the server. + + Returns: + The initial metadata received. + """ + return self._initial_metadata + + def trailing_metadata(self) -> Metadata: + """Accesses the trailing metadata sent by the server. + + Returns: + The trailing metadata received. + """ + return self._trailing_metadata + + def debug_error_string(self) -> str: + """Accesses the debug error string sent by the server. + + Returns: + The debug error string received. + """ + return self._debug_error_string + + def _repr(self) -> str: + """Assembles the error string for the RPC error.""" + return _NON_OK_CALL_REPRESENTATION.format( + self.__class__.__name__, + self._code, + self._details, + self._debug_error_string, + ) + + def __repr__(self) -> str: + return self._repr() + + def __str__(self) -> str: + return self._repr() + + def __reduce__(self): + return ( + type(self), + ( + self._code, + self._initial_metadata, + self._trailing_metadata, + self._details, + self._debug_error_string, + ), + ) + + +def _create_rpc_error( + initial_metadata: Metadata, status: cygrpc.AioRpcStatus +) -> AioRpcError: + return AioRpcError( + _common.CYGRPC_STATUS_CODE_TO_STATUS_CODE[status.code()], + Metadata.from_tuple(initial_metadata), + Metadata.from_tuple(status.trailing_metadata()), + details=status.details(), + debug_error_string=status.debug_error_string(), + ) + + +class Call: + """Base implementation of client RPC Call object. + + Implements logic around final status, metadata and cancellation. + """ + + _loop: asyncio.AbstractEventLoop + _code: grpc.StatusCode + _cython_call: cygrpc._AioCall + _metadata: Tuple[MetadatumType, ...] + _request_serializer: SerializingFunction + _response_deserializer: DeserializingFunction + + def __init__( + self, + cython_call: cygrpc._AioCall, + metadata: Metadata, + request_serializer: SerializingFunction, + response_deserializer: DeserializingFunction, + loop: asyncio.AbstractEventLoop, + ) -> None: + self._loop = loop + self._cython_call = cython_call + self._metadata = tuple(metadata) + self._request_serializer = request_serializer + self._response_deserializer = response_deserializer + + def __del__(self) -> None: + # The '_cython_call' object might be destructed before Call object + if hasattr(self, "_cython_call"): + if not self._cython_call.done(): + self._cancel(_GC_CANCELLATION_DETAILS) + + def cancelled(self) -> bool: + return self._cython_call.cancelled() + + def _cancel(self, details: str) -> bool: + """Forwards the application cancellation reasoning.""" + if not self._cython_call.done(): + self._cython_call.cancel(details) + return True + else: + return False + + def cancel(self) -> bool: + return self._cancel(_LOCAL_CANCELLATION_DETAILS) + + def done(self) -> bool: + return self._cython_call.done() + + def add_done_callback(self, callback: DoneCallbackType) -> None: + cb = partial(callback, self) + self._cython_call.add_done_callback(cb) + + def time_remaining(self) -> Optional[float]: + return self._cython_call.time_remaining() + + async def initial_metadata(self) -> Metadata: + raw_metadata_tuple = await self._cython_call.initial_metadata() + return Metadata.from_tuple(raw_metadata_tuple) + + async def trailing_metadata(self) -> Metadata: + raw_metadata_tuple = ( + await self._cython_call.status() + ).trailing_metadata() + return Metadata.from_tuple(raw_metadata_tuple) + + async def code(self) -> grpc.StatusCode: + cygrpc_code = (await self._cython_call.status()).code() + return _common.CYGRPC_STATUS_CODE_TO_STATUS_CODE[cygrpc_code] + + async def details(self) -> str: + return (await self._cython_call.status()).details() + + async def debug_error_string(self) -> str: + return (await self._cython_call.status()).debug_error_string() + + async def _raise_for_status(self) -> None: + if self._cython_call.is_locally_cancelled(): + raise asyncio.CancelledError() + code = await self.code() + if code != grpc.StatusCode.OK: + raise _create_rpc_error( + await self.initial_metadata(), await self._cython_call.status() + ) + + def _repr(self) -> str: + return repr(self._cython_call) + + def __repr__(self) -> str: + return self._repr() + + def __str__(self) -> str: + return self._repr() + + +class _APIStyle(enum.IntEnum): + UNKNOWN = 0 + ASYNC_GENERATOR = 1 + READER_WRITER = 2 + + +class _UnaryResponseMixin(Call, Generic[ResponseType]): + _call_response: asyncio.Task + + def _init_unary_response_mixin(self, response_task: asyncio.Task): + self._call_response = response_task + + def cancel(self) -> bool: + if super().cancel(): + self._call_response.cancel() + return True + else: + return False + + def __await__(self) -> Generator[Any, None, ResponseType]: + """Wait till the ongoing RPC request finishes.""" + try: + response = yield from self._call_response + except asyncio.CancelledError: + # Even if we caught all other CancelledError, there is still + # this corner case. If the application cancels immediately after + # the Call object is created, we will observe this + # `CancelledError`. + if not self.cancelled(): + self.cancel() + raise + + # NOTE(lidiz) If we raise RpcError in the task, and users doesn't + # 'await' on it. AsyncIO will log 'Task exception was never retrieved'. + # Instead, if we move the exception raising here, the spam stops. + # Unfortunately, there can only be one 'yield from' in '__await__'. So, + # we need to access the private instance variable. + if response is cygrpc.EOF: + if self._cython_call.is_locally_cancelled(): + raise asyncio.CancelledError() + else: + raise _create_rpc_error( + self._cython_call._initial_metadata, + self._cython_call._status, + ) + else: + return response + + +class _StreamResponseMixin(Call): + _message_aiter: AsyncIterator[ResponseType] + _preparation: asyncio.Task + _response_style: _APIStyle + + def _init_stream_response_mixin(self, preparation: asyncio.Task): + self._message_aiter = None + self._preparation = preparation + self._response_style = _APIStyle.UNKNOWN + + def _update_response_style(self, style: _APIStyle): + if self._response_style is _APIStyle.UNKNOWN: + self._response_style = style + elif self._response_style is not style: + raise cygrpc.UsageError(_API_STYLE_ERROR) + + def cancel(self) -> bool: + if super().cancel(): + self._preparation.cancel() + return True + else: + return False + + async def _fetch_stream_responses(self) -> ResponseType: + message = await self._read() + while message is not cygrpc.EOF: + yield message + message = await self._read() + + # If the read operation failed, Core should explain why. + await self._raise_for_status() + + def __aiter__(self) -> AsyncIterator[ResponseType]: + self._update_response_style(_APIStyle.ASYNC_GENERATOR) + if self._message_aiter is None: + self._message_aiter = self._fetch_stream_responses() + return self._message_aiter + + async def _read(self) -> ResponseType: + # Wait for the request being sent + await self._preparation + + # Reads response message from Core + try: + raw_response = await self._cython_call.receive_serialized_message() + except asyncio.CancelledError: + if not self.cancelled(): + self.cancel() + raise + + if raw_response is cygrpc.EOF: + return cygrpc.EOF + else: + return _common.deserialize( + raw_response, self._response_deserializer + ) + + async def read(self) -> Union[EOFType, ResponseType]: + if self.done(): + await self._raise_for_status() + return cygrpc.EOF + self._update_response_style(_APIStyle.READER_WRITER) + + response_message = await self._read() + + if response_message is cygrpc.EOF: + # If the read operation failed, Core should explain why. + await self._raise_for_status() + return response_message + + +class _StreamRequestMixin(Call): + _metadata_sent: asyncio.Event + _done_writing_flag: bool + _async_request_poller: Optional[asyncio.Task] + _request_style: _APIStyle + + def _init_stream_request_mixin( + self, request_iterator: Optional[RequestIterableType] + ): + self._metadata_sent = asyncio.Event() + self._done_writing_flag = False + + # If user passes in an async iterator, create a consumer Task. + if request_iterator is not None: + self._async_request_poller = self._loop.create_task( + self._consume_request_iterator(request_iterator) + ) + self._request_style = _APIStyle.ASYNC_GENERATOR + else: + self._async_request_poller = None + self._request_style = _APIStyle.READER_WRITER + + def _raise_for_different_style(self, style: _APIStyle): + if self._request_style is not style: + raise cygrpc.UsageError(_API_STYLE_ERROR) + + def cancel(self) -> bool: + if super().cancel(): + if self._async_request_poller is not None: + self._async_request_poller.cancel() + return True + else: + return False + + def _metadata_sent_observer(self): + self._metadata_sent.set() + + async def _consume_request_iterator( + self, request_iterator: RequestIterableType + ) -> None: + try: + if inspect.isasyncgen(request_iterator) or hasattr( + request_iterator, "__aiter__" + ): + async for request in request_iterator: + try: + await self._write(request) + except AioRpcError as rpc_error: + _LOGGER.debug( + ( + "Exception while consuming the" + " request_iterator: %s" + ), + rpc_error, + ) + return + else: + for request in request_iterator: + try: + await self._write(request) + except AioRpcError as rpc_error: + _LOGGER.debug( + ( + "Exception while consuming the" + " request_iterator: %s" + ), + rpc_error, + ) + return + + await self._done_writing() + except: # pylint: disable=bare-except + # Client iterators can raise exceptions, which we should handle by + # cancelling the RPC and logging the client's error. No exceptions + # should escape this function. + _LOGGER.debug( + "Client request_iterator raised exception:\n%s", + traceback.format_exc(), + ) + self.cancel() + + async def _write(self, request: RequestType) -> None: + if self.done(): + raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS) + if self._done_writing_flag: + raise asyncio.InvalidStateError(_RPC_HALF_CLOSED_DETAILS) + if not self._metadata_sent.is_set(): + await self._metadata_sent.wait() + if self.done(): + await self._raise_for_status() + + serialized_request = _common.serialize( + request, self._request_serializer + ) + try: + await self._cython_call.send_serialized_message(serialized_request) + except cygrpc.InternalError as err: + self._cython_call.set_internal_error(str(err)) + await self._raise_for_status() + except asyncio.CancelledError: + if not self.cancelled(): + self.cancel() + raise + + async def _done_writing(self) -> None: + if self.done(): + # If the RPC is finished, do nothing. + return + if not self._done_writing_flag: + # If the done writing is not sent before, try to send it. + self._done_writing_flag = True + try: + await self._cython_call.send_receive_close() + except asyncio.CancelledError: + if not self.cancelled(): + self.cancel() + raise + + async def write(self, request: RequestType) -> None: + self._raise_for_different_style(_APIStyle.READER_WRITER) + await self._write(request) + + async def done_writing(self) -> None: + """Signal peer that client is done writing. + + This method is idempotent. + """ + self._raise_for_different_style(_APIStyle.READER_WRITER) + await self._done_writing() + + async def wait_for_connection(self) -> None: + await self._metadata_sent.wait() + if self.done(): + await self._raise_for_status() + + +class UnaryUnaryCall(_UnaryResponseMixin, Call, _base_call.UnaryUnaryCall): + """Object for managing unary-unary RPC calls. + + Returned when an instance of `UnaryUnaryMultiCallable` object is called. + """ + + _request: RequestType + _invocation_task: asyncio.Task + + # pylint: disable=too-many-arguments + def __init__( + self, + request: RequestType, + deadline: Optional[float], + metadata: Metadata, + credentials: Optional[grpc.CallCredentials], + wait_for_ready: Optional[bool], + channel: cygrpc.AioChannel, + method: bytes, + request_serializer: SerializingFunction, + response_deserializer: DeserializingFunction, + loop: asyncio.AbstractEventLoop, + ) -> None: + super().__init__( + channel.call(method, deadline, credentials, wait_for_ready), + metadata, + request_serializer, + response_deserializer, + loop, + ) + self._request = request + self._context = cygrpc.build_census_context() + self._invocation_task = loop.create_task(self._invoke()) + self._init_unary_response_mixin(self._invocation_task) + + async def _invoke(self) -> ResponseType: + serialized_request = _common.serialize( + self._request, self._request_serializer + ) + + # NOTE(lidiz) asyncio.CancelledError is not a good transport for status, + # because the asyncio.Task class do not cache the exception object. + # https://github.com/python/cpython/blob/edad4d89e357c92f70c0324b937845d652b20afd/Lib/asyncio/tasks.py#L785 + try: + serialized_response = await self._cython_call.unary_unary( + serialized_request, self._metadata, self._context + ) + except asyncio.CancelledError: + if not self.cancelled(): + self.cancel() + + if self._cython_call.is_ok(): + return _common.deserialize( + serialized_response, self._response_deserializer + ) + else: + return cygrpc.EOF + + async def wait_for_connection(self) -> None: + await self._invocation_task + if self.done(): + await self._raise_for_status() + + +class UnaryStreamCall(_StreamResponseMixin, Call, _base_call.UnaryStreamCall): + """Object for managing unary-stream RPC calls. + + Returned when an instance of `UnaryStreamMultiCallable` object is called. + """ + + _request: RequestType + _send_unary_request_task: asyncio.Task + + # pylint: disable=too-many-arguments + def __init__( + self, + request: RequestType, + deadline: Optional[float], + metadata: Metadata, + credentials: Optional[grpc.CallCredentials], + wait_for_ready: Optional[bool], + channel: cygrpc.AioChannel, + method: bytes, + request_serializer: SerializingFunction, + response_deserializer: DeserializingFunction, + loop: asyncio.AbstractEventLoop, + ) -> None: + super().__init__( + channel.call(method, deadline, credentials, wait_for_ready), + metadata, + request_serializer, + response_deserializer, + loop, + ) + self._request = request + self._context = cygrpc.build_census_context() + self._send_unary_request_task = loop.create_task( + self._send_unary_request() + ) + self._init_stream_response_mixin(self._send_unary_request_task) + + async def _send_unary_request(self) -> ResponseType: + serialized_request = _common.serialize( + self._request, self._request_serializer + ) + try: + await self._cython_call.initiate_unary_stream( + serialized_request, self._metadata, self._context + ) + except asyncio.CancelledError: + if not self.cancelled(): + self.cancel() + raise + + async def wait_for_connection(self) -> None: + await self._send_unary_request_task + if self.done(): + await self._raise_for_status() + + +# pylint: disable=too-many-ancestors +class StreamUnaryCall( + _StreamRequestMixin, _UnaryResponseMixin, Call, _base_call.StreamUnaryCall +): + """Object for managing stream-unary RPC calls. + + Returned when an instance of `StreamUnaryMultiCallable` object is called. + """ + + # pylint: disable=too-many-arguments + def __init__( + self, + request_iterator: Optional[RequestIterableType], + deadline: Optional[float], + metadata: Metadata, + credentials: Optional[grpc.CallCredentials], + wait_for_ready: Optional[bool], + channel: cygrpc.AioChannel, + method: bytes, + request_serializer: SerializingFunction, + response_deserializer: DeserializingFunction, + loop: asyncio.AbstractEventLoop, + ) -> None: + super().__init__( + channel.call(method, deadline, credentials, wait_for_ready), + metadata, + request_serializer, + response_deserializer, + loop, + ) + + self._context = cygrpc.build_census_context() + self._init_stream_request_mixin(request_iterator) + self._init_unary_response_mixin(loop.create_task(self._conduct_rpc())) + + async def _conduct_rpc(self) -> ResponseType: + try: + serialized_response = await self._cython_call.stream_unary( + self._metadata, self._metadata_sent_observer, self._context + ) + except asyncio.CancelledError: + if not self.cancelled(): + self.cancel() + raise + + if self._cython_call.is_ok(): + return _common.deserialize( + serialized_response, self._response_deserializer + ) + else: + return cygrpc.EOF + + +class StreamStreamCall( + _StreamRequestMixin, _StreamResponseMixin, Call, _base_call.StreamStreamCall +): + """Object for managing stream-stream RPC calls. + + Returned when an instance of `StreamStreamMultiCallable` object is called. + """ + + _initializer: asyncio.Task + + # pylint: disable=too-many-arguments + def __init__( + self, + request_iterator: Optional[RequestIterableType], + deadline: Optional[float], + metadata: Metadata, + credentials: Optional[grpc.CallCredentials], + wait_for_ready: Optional[bool], + channel: cygrpc.AioChannel, + method: bytes, + request_serializer: SerializingFunction, + response_deserializer: DeserializingFunction, + loop: asyncio.AbstractEventLoop, + ) -> None: + super().__init__( + channel.call(method, deadline, credentials, wait_for_ready), + metadata, + request_serializer, + response_deserializer, + loop, + ) + self._context = cygrpc.build_census_context() + self._initializer = self._loop.create_task(self._prepare_rpc()) + self._init_stream_request_mixin(request_iterator) + self._init_stream_response_mixin(self._initializer) + + async def _prepare_rpc(self): + """This method prepares the RPC for receiving/sending messages. + + All other operations around the stream should only happen after the + completion of this method. + """ + try: + await self._cython_call.initiate_stream_stream( + self._metadata, self._metadata_sent_observer, self._context + ) + except asyncio.CancelledError: + if not self.cancelled(): + self.cancel() + # No need to raise RpcError here, because no one will `await` this task. diff --git a/venv/lib/python3.10/site-packages/grpc/aio/_channel.py b/venv/lib/python3.10/site-packages/grpc/aio/_channel.py new file mode 100644 index 0000000000000000000000000000000000000000..62537f12670e6b12afb6c0124a0ac0d49eaf4144 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/aio/_channel.py @@ -0,0 +1,627 @@ +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Invocation-side implementation of gRPC Asyncio Python.""" + +import asyncio +import sys +from typing import Any, Iterable, List, Optional, Sequence + +import grpc +from grpc import _common +from grpc import _compression +from grpc import _grpcio_metadata +from grpc._cython import cygrpc + +from . import _base_call +from . import _base_channel +from ._call import StreamStreamCall +from ._call import StreamUnaryCall +from ._call import UnaryStreamCall +from ._call import UnaryUnaryCall +from ._interceptor import ClientInterceptor +from ._interceptor import InterceptedStreamStreamCall +from ._interceptor import InterceptedStreamUnaryCall +from ._interceptor import InterceptedUnaryStreamCall +from ._interceptor import InterceptedUnaryUnaryCall +from ._interceptor import StreamStreamClientInterceptor +from ._interceptor import StreamUnaryClientInterceptor +from ._interceptor import UnaryStreamClientInterceptor +from ._interceptor import UnaryUnaryClientInterceptor +from ._metadata import Metadata +from ._typing import ChannelArgumentType +from ._typing import DeserializingFunction +from ._typing import MetadataType +from ._typing import RequestIterableType +from ._typing import RequestType +from ._typing import ResponseType +from ._typing import SerializingFunction +from ._utils import _timeout_to_deadline + +_USER_AGENT = "grpc-python-asyncio/{}".format(_grpcio_metadata.__version__) + +if sys.version_info[1] < 7: + + def _all_tasks() -> Iterable[asyncio.Task]: + return asyncio.Task.all_tasks() # pylint: disable=no-member + +else: + + def _all_tasks() -> Iterable[asyncio.Task]: + return asyncio.all_tasks() + + +def _augment_channel_arguments( + base_options: ChannelArgumentType, compression: Optional[grpc.Compression] +): + compression_channel_argument = _compression.create_channel_option( + compression + ) + user_agent_channel_argument = ( + ( + cygrpc.ChannelArgKey.primary_user_agent_string, + _USER_AGENT, + ), + ) + return ( + tuple(base_options) + + compression_channel_argument + + user_agent_channel_argument + ) + + +class _BaseMultiCallable: + """Base class of all multi callable objects. + + Handles the initialization logic and stores common attributes. + """ + + _loop: asyncio.AbstractEventLoop + _channel: cygrpc.AioChannel + _method: bytes + _request_serializer: SerializingFunction + _response_deserializer: DeserializingFunction + _interceptors: Optional[Sequence[ClientInterceptor]] + _references: List[Any] + _loop: asyncio.AbstractEventLoop + + # pylint: disable=too-many-arguments + def __init__( + self, + channel: cygrpc.AioChannel, + method: bytes, + request_serializer: SerializingFunction, + response_deserializer: DeserializingFunction, + interceptors: Optional[Sequence[ClientInterceptor]], + references: List[Any], + loop: asyncio.AbstractEventLoop, + ) -> None: + self._loop = loop + self._channel = channel + self._method = method + self._request_serializer = request_serializer + self._response_deserializer = response_deserializer + self._interceptors = interceptors + self._references = references + + @staticmethod + def _init_metadata( + metadata: Optional[MetadataType] = None, + compression: Optional[grpc.Compression] = None, + ) -> Metadata: + """Based on the provided values for or initialise the final + metadata, as it should be used for the current call. + """ + metadata = metadata or Metadata() + if not isinstance(metadata, Metadata) and isinstance(metadata, tuple): + metadata = Metadata.from_tuple(metadata) + if compression: + metadata = Metadata( + *_compression.augment_metadata(metadata, compression) + ) + return metadata + + +class UnaryUnaryMultiCallable( + _BaseMultiCallable, _base_channel.UnaryUnaryMultiCallable +): + def __call__( + self, + request: RequestType, + *, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> _base_call.UnaryUnaryCall[RequestType, ResponseType]: + metadata = self._init_metadata(metadata, compression) + if not self._interceptors: + call = UnaryUnaryCall( + request, + _timeout_to_deadline(timeout), + metadata, + credentials, + wait_for_ready, + self._channel, + self._method, + self._request_serializer, + self._response_deserializer, + self._loop, + ) + else: + call = InterceptedUnaryUnaryCall( + self._interceptors, + request, + timeout, + metadata, + credentials, + wait_for_ready, + self._channel, + self._method, + self._request_serializer, + self._response_deserializer, + self._loop, + ) + + return call + + +class UnaryStreamMultiCallable( + _BaseMultiCallable, _base_channel.UnaryStreamMultiCallable +): + def __call__( + self, + request: RequestType, + *, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> _base_call.UnaryStreamCall[RequestType, ResponseType]: + metadata = self._init_metadata(metadata, compression) + + if not self._interceptors: + call = UnaryStreamCall( + request, + _timeout_to_deadline(timeout), + metadata, + credentials, + wait_for_ready, + self._channel, + self._method, + self._request_serializer, + self._response_deserializer, + self._loop, + ) + else: + call = InterceptedUnaryStreamCall( + self._interceptors, + request, + timeout, + metadata, + credentials, + wait_for_ready, + self._channel, + self._method, + self._request_serializer, + self._response_deserializer, + self._loop, + ) + + return call + + +class StreamUnaryMultiCallable( + _BaseMultiCallable, _base_channel.StreamUnaryMultiCallable +): + def __call__( + self, + request_iterator: Optional[RequestIterableType] = None, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> _base_call.StreamUnaryCall: + metadata = self._init_metadata(metadata, compression) + + if not self._interceptors: + call = StreamUnaryCall( + request_iterator, + _timeout_to_deadline(timeout), + metadata, + credentials, + wait_for_ready, + self._channel, + self._method, + self._request_serializer, + self._response_deserializer, + self._loop, + ) + else: + call = InterceptedStreamUnaryCall( + self._interceptors, + request_iterator, + timeout, + metadata, + credentials, + wait_for_ready, + self._channel, + self._method, + self._request_serializer, + self._response_deserializer, + self._loop, + ) + + return call + + +class StreamStreamMultiCallable( + _BaseMultiCallable, _base_channel.StreamStreamMultiCallable +): + def __call__( + self, + request_iterator: Optional[RequestIterableType] = None, + timeout: Optional[float] = None, + metadata: Optional[MetadataType] = None, + credentials: Optional[grpc.CallCredentials] = None, + wait_for_ready: Optional[bool] = None, + compression: Optional[grpc.Compression] = None, + ) -> _base_call.StreamStreamCall: + metadata = self._init_metadata(metadata, compression) + + if not self._interceptors: + call = StreamStreamCall( + request_iterator, + _timeout_to_deadline(timeout), + metadata, + credentials, + wait_for_ready, + self._channel, + self._method, + self._request_serializer, + self._response_deserializer, + self._loop, + ) + else: + call = InterceptedStreamStreamCall( + self._interceptors, + request_iterator, + timeout, + metadata, + credentials, + wait_for_ready, + self._channel, + self._method, + self._request_serializer, + self._response_deserializer, + self._loop, + ) + + return call + + +class Channel(_base_channel.Channel): + _loop: asyncio.AbstractEventLoop + _channel: cygrpc.AioChannel + _unary_unary_interceptors: List[UnaryUnaryClientInterceptor] + _unary_stream_interceptors: List[UnaryStreamClientInterceptor] + _stream_unary_interceptors: List[StreamUnaryClientInterceptor] + _stream_stream_interceptors: List[StreamStreamClientInterceptor] + + def __init__( + self, + target: str, + options: ChannelArgumentType, + credentials: Optional[grpc.ChannelCredentials], + compression: Optional[grpc.Compression], + interceptors: Optional[Sequence[ClientInterceptor]], + ): + """Constructor. + + Args: + target: The target to which to connect. + options: Configuration options for the channel. + credentials: A cygrpc.ChannelCredentials or None. + compression: An optional value indicating the compression method to be + used over the lifetime of the channel. + interceptors: An optional list of interceptors that would be used for + intercepting any RPC executed with that channel. + """ + self._unary_unary_interceptors = [] + self._unary_stream_interceptors = [] + self._stream_unary_interceptors = [] + self._stream_stream_interceptors = [] + + if interceptors is not None: + for interceptor in interceptors: + if isinstance(interceptor, UnaryUnaryClientInterceptor): + self._unary_unary_interceptors.append(interceptor) + elif isinstance(interceptor, UnaryStreamClientInterceptor): + self._unary_stream_interceptors.append(interceptor) + elif isinstance(interceptor, StreamUnaryClientInterceptor): + self._stream_unary_interceptors.append(interceptor) + elif isinstance(interceptor, StreamStreamClientInterceptor): + self._stream_stream_interceptors.append(interceptor) + else: + raise ValueError( + "Interceptor {} must be ".format(interceptor) + + "{} or ".format(UnaryUnaryClientInterceptor.__name__) + + "{} or ".format(UnaryStreamClientInterceptor.__name__) + + "{} or ".format(StreamUnaryClientInterceptor.__name__) + + "{}. ".format(StreamStreamClientInterceptor.__name__) + ) + + self._loop = cygrpc.get_working_loop() + self._channel = cygrpc.AioChannel( + _common.encode(target), + _augment_channel_arguments(options, compression), + credentials, + self._loop, + ) + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + await self._close(None) + + async def _close(self, grace): # pylint: disable=too-many-branches + if self._channel.closed(): + return + + # No new calls will be accepted by the Cython channel. + self._channel.closing() + + # Iterate through running tasks + tasks = _all_tasks() + calls = [] + call_tasks = [] + for task in tasks: + try: + stack = task.get_stack(limit=1) + except AttributeError as attribute_error: + # NOTE(lidiz) tl;dr: If the Task is created with a CPython + # object, it will trigger AttributeError. + # + # In the global finalizer, the event loop schedules + # a CPython PyAsyncGenAThrow object. + # https://github.com/python/cpython/blob/00e45877e33d32bb61aa13a2033e3bba370bda4d/Lib/asyncio/base_events.py#L484 + # + # However, the PyAsyncGenAThrow object is written in C and + # failed to include the normal Python frame objects. Hence, + # this exception is a false negative, and it is safe to ignore + # the failure. It is fixed by https://github.com/python/cpython/pull/18669, + # but not available until 3.9 or 3.8.3. So, we have to keep it + # for a while. + # TODO(lidiz) drop this hack after 3.8 deprecation + if "frame" in str(attribute_error): + continue + else: + raise + + # If the Task is created by a C-extension, the stack will be empty. + if not stack: + continue + + # Locate ones created by `aio.Call`. + frame = stack[0] + candidate = frame.f_locals.get("self") + # Explicitly check for a non-null candidate instead of the more pythonic 'if candidate:' + # because doing 'if candidate:' assumes that the coroutine implements '__bool__' which + # might not always be the case. + if candidate is not None: + if isinstance(candidate, _base_call.Call): + if hasattr(candidate, "_channel"): + # For intercepted Call object + if candidate._channel is not self._channel: + continue + elif hasattr(candidate, "_cython_call"): + # For normal Call object + if candidate._cython_call._channel is not self._channel: + continue + else: + # Unidentified Call object + raise cygrpc.InternalError( + f"Unrecognized call object: {candidate}" + ) + + calls.append(candidate) + call_tasks.append(task) + + # If needed, try to wait for them to finish. + # Call objects are not always awaitables. + if grace and call_tasks: + await asyncio.wait(call_tasks, timeout=grace) + + # Time to cancel existing calls. + for call in calls: + call.cancel() + + # Destroy the channel + self._channel.close() + + async def close(self, grace: Optional[float] = None): + await self._close(grace) + + def __del__(self): + if hasattr(self, "_channel"): + if not self._channel.closed(): + self._channel.close() + + def get_state( + self, try_to_connect: bool = False + ) -> grpc.ChannelConnectivity: + result = self._channel.check_connectivity_state(try_to_connect) + return _common.CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[result] + + async def wait_for_state_change( + self, + last_observed_state: grpc.ChannelConnectivity, + ) -> None: + assert await self._channel.watch_connectivity_state( + last_observed_state.value[0], None + ) + + async def channel_ready(self) -> None: + state = self.get_state(try_to_connect=True) + while state != grpc.ChannelConnectivity.READY: + await self.wait_for_state_change(state) + state = self.get_state(try_to_connect=True) + + # TODO(xuanwn): Implement this method after we have + # observability for Asyncio. + def _get_registered_call_handle(self, method: str) -> int: + pass + + # TODO(xuanwn): Implement _registered_method after we have + # observability for Asyncio. + # pylint: disable=arguments-differ,unused-argument + def unary_unary( + self, + method: str, + request_serializer: Optional[SerializingFunction] = None, + response_deserializer: Optional[DeserializingFunction] = None, + _registered_method: Optional[bool] = False, + ) -> UnaryUnaryMultiCallable: + return UnaryUnaryMultiCallable( + self._channel, + _common.encode(method), + request_serializer, + response_deserializer, + self._unary_unary_interceptors, + [self], + self._loop, + ) + + # TODO(xuanwn): Implement _registered_method after we have + # observability for Asyncio. + # pylint: disable=arguments-differ,unused-argument + def unary_stream( + self, + method: str, + request_serializer: Optional[SerializingFunction] = None, + response_deserializer: Optional[DeserializingFunction] = None, + _registered_method: Optional[bool] = False, + ) -> UnaryStreamMultiCallable: + return UnaryStreamMultiCallable( + self._channel, + _common.encode(method), + request_serializer, + response_deserializer, + self._unary_stream_interceptors, + [self], + self._loop, + ) + + # TODO(xuanwn): Implement _registered_method after we have + # observability for Asyncio. + # pylint: disable=arguments-differ,unused-argument + def stream_unary( + self, + method: str, + request_serializer: Optional[SerializingFunction] = None, + response_deserializer: Optional[DeserializingFunction] = None, + _registered_method: Optional[bool] = False, + ) -> StreamUnaryMultiCallable: + return StreamUnaryMultiCallable( + self._channel, + _common.encode(method), + request_serializer, + response_deserializer, + self._stream_unary_interceptors, + [self], + self._loop, + ) + + # TODO(xuanwn): Implement _registered_method after we have + # observability for Asyncio. + # pylint: disable=arguments-differ,unused-argument + def stream_stream( + self, + method: str, + request_serializer: Optional[SerializingFunction] = None, + response_deserializer: Optional[DeserializingFunction] = None, + _registered_method: Optional[bool] = False, + ) -> StreamStreamMultiCallable: + return StreamStreamMultiCallable( + self._channel, + _common.encode(method), + request_serializer, + response_deserializer, + self._stream_stream_interceptors, + [self], + self._loop, + ) + + +def insecure_channel( + target: str, + options: Optional[ChannelArgumentType] = None, + compression: Optional[grpc.Compression] = None, + interceptors: Optional[Sequence[ClientInterceptor]] = None, +): + """Creates an insecure asynchronous Channel to a server. + + Args: + target: The server address + options: An optional list of key-value pairs (:term:`channel_arguments` + in gRPC Core runtime) to configure the channel. + compression: An optional value indicating the compression method to be + used over the lifetime of the channel. + interceptors: An optional sequence of interceptors that will be executed for + any call executed with this channel. + + Returns: + A Channel. + """ + return Channel( + target, + () if options is None else options, + None, + compression, + interceptors, + ) + + +def secure_channel( + target: str, + credentials: grpc.ChannelCredentials, + options: Optional[ChannelArgumentType] = None, + compression: Optional[grpc.Compression] = None, + interceptors: Optional[Sequence[ClientInterceptor]] = None, +): + """Creates a secure asynchronous Channel to a server. + + Args: + target: The server address. + credentials: A ChannelCredentials instance. + options: An optional list of key-value pairs (:term:`channel_arguments` + in gRPC Core runtime) to configure the channel. + compression: An optional value indicating the compression method to be + used over the lifetime of the channel. + interceptors: An optional sequence of interceptors that will be executed for + any call executed with this channel. + + Returns: + An aio.Channel. + """ + return Channel( + target, + () if options is None else options, + credentials._credentials, + compression, + interceptors, + ) diff --git a/venv/lib/python3.10/site-packages/grpc/aio/_interceptor.py b/venv/lib/python3.10/site-packages/grpc/aio/_interceptor.py new file mode 100644 index 0000000000000000000000000000000000000000..1d609534108fe102099e3a35a6c0623f05d0a729 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/aio/_interceptor.py @@ -0,0 +1,1178 @@ +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Interceptors implementation of gRPC Asyncio Python.""" +from abc import ABCMeta +from abc import abstractmethod +import asyncio +import collections +import functools +from typing import ( + AsyncIterable, + Awaitable, + Callable, + Iterator, + List, + Optional, + Sequence, + Union, +) + +import grpc +from grpc._cython import cygrpc + +from . import _base_call +from ._call import AioRpcError +from ._call import StreamStreamCall +from ._call import StreamUnaryCall +from ._call import UnaryStreamCall +from ._call import UnaryUnaryCall +from ._call import _API_STYLE_ERROR +from ._call import _RPC_ALREADY_FINISHED_DETAILS +from ._call import _RPC_HALF_CLOSED_DETAILS +from ._metadata import Metadata +from ._typing import DeserializingFunction +from ._typing import DoneCallbackType +from ._typing import EOFType +from ._typing import RequestIterableType +from ._typing import RequestType +from ._typing import ResponseIterableType +from ._typing import ResponseType +from ._typing import SerializingFunction +from ._utils import _timeout_to_deadline + +_LOCAL_CANCELLATION_DETAILS = "Locally cancelled by application!" + + +class ServerInterceptor(metaclass=ABCMeta): + """Affords intercepting incoming RPCs on the service-side. + + This is an EXPERIMENTAL API. + """ + + @abstractmethod + async def intercept_service( + self, + continuation: Callable[ + [grpc.HandlerCallDetails], Awaitable[grpc.RpcMethodHandler] + ], + handler_call_details: grpc.HandlerCallDetails, + ) -> grpc.RpcMethodHandler: + """Intercepts incoming RPCs before handing them over to a handler. + + State can be passed from an interceptor to downstream interceptors + via contextvars. The first interceptor is called from an empty + contextvars.Context, and the same Context is used for downstream + interceptors and for the final handler call. Note that there are no + guarantees that interceptors and handlers will be called from the + same thread. + + Args: + continuation: A function that takes a HandlerCallDetails and + proceeds to invoke the next interceptor in the chain, if any, + or the RPC handler lookup logic, with the call details passed + as an argument, and returns an RpcMethodHandler instance if + the RPC is considered serviced, or None otherwise. + handler_call_details: A HandlerCallDetails describing the RPC. + + Returns: + An RpcMethodHandler with which the RPC may be serviced if the + interceptor chooses to service this RPC, or None otherwise. + """ + + +class ClientCallDetails( + collections.namedtuple( + "ClientCallDetails", + ("method", "timeout", "metadata", "credentials", "wait_for_ready"), + ), + grpc.ClientCallDetails, +): + """Describes an RPC to be invoked. + + This is an EXPERIMENTAL API. + + Args: + method: The method name of the RPC. + timeout: An optional duration of time in seconds to allow for the RPC. + metadata: Optional metadata to be transmitted to the service-side of + the RPC. + credentials: An optional CallCredentials for the RPC. + wait_for_ready: An optional flag to enable :term:`wait_for_ready` mechanism. + """ + + method: str + timeout: Optional[float] + metadata: Optional[Metadata] + credentials: Optional[grpc.CallCredentials] + wait_for_ready: Optional[bool] + + +class ClientInterceptor(metaclass=ABCMeta): + """Base class used for all Aio Client Interceptor classes""" + + +class UnaryUnaryClientInterceptor(ClientInterceptor, metaclass=ABCMeta): + """Affords intercepting unary-unary invocations.""" + + @abstractmethod + async def intercept_unary_unary( + self, + continuation: Callable[ + [ClientCallDetails, RequestType], UnaryUnaryCall + ], + client_call_details: ClientCallDetails, + request: RequestType, + ) -> Union[UnaryUnaryCall, ResponseType]: + """Intercepts a unary-unary invocation asynchronously. + + Args: + continuation: A coroutine that proceeds with the invocation by + executing the next interceptor in the chain or invoking the + actual RPC on the underlying Channel. It is the interceptor's + responsibility to call it if it decides to move the RPC forward. + The interceptor can use + `call = await continuation(client_call_details, request)` + to continue with the RPC. `continuation` returns the call to the + RPC. + client_call_details: A ClientCallDetails object describing the + outgoing RPC. + request: The request value for the RPC. + + Returns: + An object with the RPC response. + + Raises: + AioRpcError: Indicating that the RPC terminated with non-OK status. + asyncio.CancelledError: Indicating that the RPC was canceled. + """ + + +class UnaryStreamClientInterceptor(ClientInterceptor, metaclass=ABCMeta): + """Affords intercepting unary-stream invocations.""" + + @abstractmethod + async def intercept_unary_stream( + self, + continuation: Callable[ + [ClientCallDetails, RequestType], UnaryStreamCall + ], + client_call_details: ClientCallDetails, + request: RequestType, + ) -> Union[ResponseIterableType, UnaryStreamCall]: + """Intercepts a unary-stream invocation asynchronously. + + The function could return the call object or an asynchronous + iterator, in case of being an asyncrhonous iterator this will + become the source of the reads done by the caller. + + Args: + continuation: A coroutine that proceeds with the invocation by + executing the next interceptor in the chain or invoking the + actual RPC on the underlying Channel. It is the interceptor's + responsibility to call it if it decides to move the RPC forward. + The interceptor can use + `call = await continuation(client_call_details, request)` + to continue with the RPC. `continuation` returns the call to the + RPC. + client_call_details: A ClientCallDetails object describing the + outgoing RPC. + request: The request value for the RPC. + + Returns: + The RPC Call or an asynchronous iterator. + + Raises: + AioRpcError: Indicating that the RPC terminated with non-OK status. + asyncio.CancelledError: Indicating that the RPC was canceled. + """ + + +class StreamUnaryClientInterceptor(ClientInterceptor, metaclass=ABCMeta): + """Affords intercepting stream-unary invocations.""" + + @abstractmethod + async def intercept_stream_unary( + self, + continuation: Callable[ + [ClientCallDetails, RequestType], StreamUnaryCall + ], + client_call_details: ClientCallDetails, + request_iterator: RequestIterableType, + ) -> StreamUnaryCall: + """Intercepts a stream-unary invocation asynchronously. + + Within the interceptor the usage of the call methods like `write` or + even awaiting the call should be done carefully, since the caller + could be expecting an untouched call, for example for start writing + messages to it. + + Args: + continuation: A coroutine that proceeds with the invocation by + executing the next interceptor in the chain or invoking the + actual RPC on the underlying Channel. It is the interceptor's + responsibility to call it if it decides to move the RPC forward. + The interceptor can use + `call = await continuation(client_call_details, request_iterator)` + to continue with the RPC. `continuation` returns the call to the + RPC. + client_call_details: A ClientCallDetails object describing the + outgoing RPC. + request_iterator: The request iterator that will produce requests + for the RPC. + + Returns: + The RPC Call. + + Raises: + AioRpcError: Indicating that the RPC terminated with non-OK status. + asyncio.CancelledError: Indicating that the RPC was canceled. + """ + + +class StreamStreamClientInterceptor(ClientInterceptor, metaclass=ABCMeta): + """Affords intercepting stream-stream invocations.""" + + @abstractmethod + async def intercept_stream_stream( + self, + continuation: Callable[ + [ClientCallDetails, RequestType], StreamStreamCall + ], + client_call_details: ClientCallDetails, + request_iterator: RequestIterableType, + ) -> Union[ResponseIterableType, StreamStreamCall]: + """Intercepts a stream-stream invocation asynchronously. + + Within the interceptor the usage of the call methods like `write` or + even awaiting the call should be done carefully, since the caller + could be expecting an untouched call, for example for start writing + messages to it. + + The function could return the call object or an asynchronous + iterator, in case of being an asyncrhonous iterator this will + become the source of the reads done by the caller. + + Args: + continuation: A coroutine that proceeds with the invocation by + executing the next interceptor in the chain or invoking the + actual RPC on the underlying Channel. It is the interceptor's + responsibility to call it if it decides to move the RPC forward. + The interceptor can use + `call = await continuation(client_call_details, request_iterator)` + to continue with the RPC. `continuation` returns the call to the + RPC. + client_call_details: A ClientCallDetails object describing the + outgoing RPC. + request_iterator: The request iterator that will produce requests + for the RPC. + + Returns: + The RPC Call or an asynchronous iterator. + + Raises: + AioRpcError: Indicating that the RPC terminated with non-OK status. + asyncio.CancelledError: Indicating that the RPC was canceled. + """ + + +class InterceptedCall: + """Base implementation for all intercepted call arities. + + Interceptors might have some work to do before the RPC invocation with + the capacity of changing the invocation parameters, and some work to do + after the RPC invocation with the capacity for accessing to the wrapped + `UnaryUnaryCall`. + + It handles also early and later cancellations, when the RPC has not even + started and the execution is still held by the interceptors or when the + RPC has finished but again the execution is still held by the interceptors. + + Once the RPC is finally executed, all methods are finally done against the + intercepted call, being at the same time the same call returned to the + interceptors. + + As a base class for all of the interceptors implements the logic around + final status, metadata and cancellation. + """ + + _interceptors_task: asyncio.Task + _pending_add_done_callbacks: Sequence[DoneCallbackType] + + def __init__(self, interceptors_task: asyncio.Task) -> None: + self._interceptors_task = interceptors_task + self._pending_add_done_callbacks = [] + self._interceptors_task.add_done_callback( + self._fire_or_add_pending_done_callbacks + ) + + def __del__(self): + self.cancel() + + def _fire_or_add_pending_done_callbacks( + self, interceptors_task: asyncio.Task + ) -> None: + if not self._pending_add_done_callbacks: + return + + call_completed = False + + try: + call = interceptors_task.result() + if call.done(): + call_completed = True + except (AioRpcError, asyncio.CancelledError): + call_completed = True + + if call_completed: + for callback in self._pending_add_done_callbacks: + callback(self) + else: + for callback in self._pending_add_done_callbacks: + callback = functools.partial( + self._wrap_add_done_callback, callback + ) + call.add_done_callback(callback) + + self._pending_add_done_callbacks = [] + + def _wrap_add_done_callback( + self, callback: DoneCallbackType, unused_call: _base_call.Call + ) -> None: + callback(self) + + def cancel(self) -> bool: + if not self._interceptors_task.done(): + # There is no yet the intercepted call available, + # Trying to cancel it by using the generic Asyncio + # cancellation method. + return self._interceptors_task.cancel() + + try: + call = self._interceptors_task.result() + except AioRpcError: + return False + except asyncio.CancelledError: + return False + + return call.cancel() + + def cancelled(self) -> bool: + if not self._interceptors_task.done(): + return False + + try: + call = self._interceptors_task.result() + except AioRpcError as err: + return err.code() == grpc.StatusCode.CANCELLED + except asyncio.CancelledError: + return True + + return call.cancelled() + + def done(self) -> bool: + if not self._interceptors_task.done(): + return False + + try: + call = self._interceptors_task.result() + except (AioRpcError, asyncio.CancelledError): + return True + + return call.done() + + def add_done_callback(self, callback: DoneCallbackType) -> None: + if not self._interceptors_task.done(): + self._pending_add_done_callbacks.append(callback) + return + + try: + call = self._interceptors_task.result() + except (AioRpcError, asyncio.CancelledError): + callback(self) + return + + if call.done(): + callback(self) + else: + callback = functools.partial(self._wrap_add_done_callback, callback) + call.add_done_callback(callback) + + def time_remaining(self) -> Optional[float]: + raise NotImplementedError() + + async def initial_metadata(self) -> Optional[Metadata]: + try: + call = await self._interceptors_task + except AioRpcError as err: + return err.initial_metadata() + except asyncio.CancelledError: + return None + + return await call.initial_metadata() + + async def trailing_metadata(self) -> Optional[Metadata]: + try: + call = await self._interceptors_task + except AioRpcError as err: + return err.trailing_metadata() + except asyncio.CancelledError: + return None + + return await call.trailing_metadata() + + async def code(self) -> grpc.StatusCode: + try: + call = await self._interceptors_task + except AioRpcError as err: + return err.code() + except asyncio.CancelledError: + return grpc.StatusCode.CANCELLED + + return await call.code() + + async def details(self) -> str: + try: + call = await self._interceptors_task + except AioRpcError as err: + return err.details() + except asyncio.CancelledError: + return _LOCAL_CANCELLATION_DETAILS + + return await call.details() + + async def debug_error_string(self) -> Optional[str]: + try: + call = await self._interceptors_task + except AioRpcError as err: + return err.debug_error_string() + except asyncio.CancelledError: + return "" + + return await call.debug_error_string() + + async def wait_for_connection(self) -> None: + call = await self._interceptors_task + return await call.wait_for_connection() + + +class _InterceptedUnaryResponseMixin: + def __await__(self): + call = yield from self._interceptors_task.__await__() + response = yield from call.__await__() + return response + + +class _InterceptedStreamResponseMixin: + _response_aiter: Optional[AsyncIterable[ResponseType]] + + def _init_stream_response_mixin(self) -> None: + # Is initialized later, otherwise if the iterator is not finally + # consumed a logging warning is emitted by Asyncio. + self._response_aiter = None + + async def _wait_for_interceptor_task_response_iterator( + self, + ) -> ResponseType: + call = await self._interceptors_task + async for response in call: + yield response + + def __aiter__(self) -> AsyncIterable[ResponseType]: + if self._response_aiter is None: + self._response_aiter = ( + self._wait_for_interceptor_task_response_iterator() + ) + return self._response_aiter + + async def read(self) -> Union[EOFType, ResponseType]: + if self._response_aiter is None: + self._response_aiter = ( + self._wait_for_interceptor_task_response_iterator() + ) + try: + return await self._response_aiter.asend(None) + except StopAsyncIteration: + return cygrpc.EOF + + +class _InterceptedStreamRequestMixin: + _write_to_iterator_async_gen: Optional[AsyncIterable[RequestType]] + _write_to_iterator_queue: Optional[asyncio.Queue] + _status_code_task: Optional[asyncio.Task] + + _FINISH_ITERATOR_SENTINEL = object() + + def _init_stream_request_mixin( + self, request_iterator: Optional[RequestIterableType] + ) -> RequestIterableType: + if request_iterator is None: + # We provide our own request iterator which is a proxy + # of the futures writes that will be done by the caller. + self._write_to_iterator_queue = asyncio.Queue(maxsize=1) + self._write_to_iterator_async_gen = ( + self._proxy_writes_as_request_iterator() + ) + self._status_code_task = None + request_iterator = self._write_to_iterator_async_gen + else: + self._write_to_iterator_queue = None + + return request_iterator + + async def _proxy_writes_as_request_iterator(self): + await self._interceptors_task + + while True: + value = await self._write_to_iterator_queue.get() + if ( + value + is _InterceptedStreamRequestMixin._FINISH_ITERATOR_SENTINEL + ): + break + yield value + + async def _write_to_iterator_queue_interruptible( + self, request: RequestType, call: InterceptedCall + ): + # Write the specified 'request' to the request iterator queue using the + # specified 'call' to allow for interruption of the write in the case + # of abrupt termination of the call. + if self._status_code_task is None: + self._status_code_task = self._loop.create_task(call.code()) + + await asyncio.wait( + ( + self._loop.create_task( + self._write_to_iterator_queue.put(request) + ), + self._status_code_task, + ), + return_when=asyncio.FIRST_COMPLETED, + ) + + async def write(self, request: RequestType) -> None: + # If no queue was created it means that requests + # should be expected through an iterators provided + # by the caller. + if self._write_to_iterator_queue is None: + raise cygrpc.UsageError(_API_STYLE_ERROR) + + try: + call = await self._interceptors_task + except (asyncio.CancelledError, AioRpcError): + raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS) + + if call.done(): + raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS) + elif call._done_writing_flag: + raise asyncio.InvalidStateError(_RPC_HALF_CLOSED_DETAILS) + + await self._write_to_iterator_queue_interruptible(request, call) + + if call.done(): + raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS) + + async def done_writing(self) -> None: + """Signal peer that client is done writing. + + This method is idempotent. + """ + # If no queue was created it means that requests + # should be expected through an iterators provided + # by the caller. + if self._write_to_iterator_queue is None: + raise cygrpc.UsageError(_API_STYLE_ERROR) + + try: + call = await self._interceptors_task + except asyncio.CancelledError: + raise asyncio.InvalidStateError(_RPC_ALREADY_FINISHED_DETAILS) + + await self._write_to_iterator_queue_interruptible( + _InterceptedStreamRequestMixin._FINISH_ITERATOR_SENTINEL, call + ) + + +class InterceptedUnaryUnaryCall( + _InterceptedUnaryResponseMixin, InterceptedCall, _base_call.UnaryUnaryCall +): + """Used for running a `UnaryUnaryCall` wrapped by interceptors. + + For the `__await__` method is it is proxied to the intercepted call only when + the interceptor task is finished. + """ + + _loop: asyncio.AbstractEventLoop + _channel: cygrpc.AioChannel + + # pylint: disable=too-many-arguments + def __init__( + self, + interceptors: Sequence[UnaryUnaryClientInterceptor], + request: RequestType, + timeout: Optional[float], + metadata: Metadata, + credentials: Optional[grpc.CallCredentials], + wait_for_ready: Optional[bool], + channel: cygrpc.AioChannel, + method: bytes, + request_serializer: SerializingFunction, + response_deserializer: DeserializingFunction, + loop: asyncio.AbstractEventLoop, + ) -> None: + self._loop = loop + self._channel = channel + interceptors_task = loop.create_task( + self._invoke( + interceptors, + method, + timeout, + metadata, + credentials, + wait_for_ready, + request, + request_serializer, + response_deserializer, + ) + ) + super().__init__(interceptors_task) + + # pylint: disable=too-many-arguments + async def _invoke( + self, + interceptors: Sequence[UnaryUnaryClientInterceptor], + method: bytes, + timeout: Optional[float], + metadata: Optional[Metadata], + credentials: Optional[grpc.CallCredentials], + wait_for_ready: Optional[bool], + request: RequestType, + request_serializer: SerializingFunction, + response_deserializer: DeserializingFunction, + ) -> UnaryUnaryCall: + """Run the RPC call wrapped in interceptors""" + + async def _run_interceptor( + interceptors: List[UnaryUnaryClientInterceptor], + client_call_details: ClientCallDetails, + request: RequestType, + ) -> _base_call.UnaryUnaryCall: + if interceptors: + continuation = functools.partial( + _run_interceptor, interceptors[1:] + ) + call_or_response = await interceptors[0].intercept_unary_unary( + continuation, client_call_details, request + ) + + if isinstance(call_or_response, _base_call.UnaryUnaryCall): + return call_or_response + else: + return UnaryUnaryCallResponse(call_or_response) + + else: + return UnaryUnaryCall( + request, + _timeout_to_deadline(client_call_details.timeout), + client_call_details.metadata, + client_call_details.credentials, + client_call_details.wait_for_ready, + self._channel, + client_call_details.method, + request_serializer, + response_deserializer, + self._loop, + ) + + client_call_details = ClientCallDetails( + method, timeout, metadata, credentials, wait_for_ready + ) + return await _run_interceptor( + list(interceptors), client_call_details, request + ) + + def time_remaining(self) -> Optional[float]: + raise NotImplementedError() + + +class InterceptedUnaryStreamCall( + _InterceptedStreamResponseMixin, InterceptedCall, _base_call.UnaryStreamCall +): + """Used for running a `UnaryStreamCall` wrapped by interceptors.""" + + _loop: asyncio.AbstractEventLoop + _channel: cygrpc.AioChannel + _last_returned_call_from_interceptors = Optional[_base_call.UnaryStreamCall] + + # pylint: disable=too-many-arguments + def __init__( + self, + interceptors: Sequence[UnaryStreamClientInterceptor], + request: RequestType, + timeout: Optional[float], + metadata: Metadata, + credentials: Optional[grpc.CallCredentials], + wait_for_ready: Optional[bool], + channel: cygrpc.AioChannel, + method: bytes, + request_serializer: SerializingFunction, + response_deserializer: DeserializingFunction, + loop: asyncio.AbstractEventLoop, + ) -> None: + self._loop = loop + self._channel = channel + self._init_stream_response_mixin() + self._last_returned_call_from_interceptors = None + interceptors_task = loop.create_task( + self._invoke( + interceptors, + method, + timeout, + metadata, + credentials, + wait_for_ready, + request, + request_serializer, + response_deserializer, + ) + ) + super().__init__(interceptors_task) + + # pylint: disable=too-many-arguments + async def _invoke( + self, + interceptors: Sequence[UnaryStreamClientInterceptor], + method: bytes, + timeout: Optional[float], + metadata: Optional[Metadata], + credentials: Optional[grpc.CallCredentials], + wait_for_ready: Optional[bool], + request: RequestType, + request_serializer: SerializingFunction, + response_deserializer: DeserializingFunction, + ) -> UnaryStreamCall: + """Run the RPC call wrapped in interceptors""" + + async def _run_interceptor( + interceptors: List[UnaryStreamClientInterceptor], + client_call_details: ClientCallDetails, + request: RequestType, + ) -> _base_call.UnaryStreamCall: + if interceptors: + continuation = functools.partial( + _run_interceptor, interceptors[1:] + ) + + call_or_response_iterator = await interceptors[ + 0 + ].intercept_unary_stream( + continuation, client_call_details, request + ) + + if isinstance( + call_or_response_iterator, _base_call.UnaryStreamCall + ): + self._last_returned_call_from_interceptors = ( + call_or_response_iterator + ) + else: + self._last_returned_call_from_interceptors = ( + UnaryStreamCallResponseIterator( + self._last_returned_call_from_interceptors, + call_or_response_iterator, + ) + ) + return self._last_returned_call_from_interceptors + else: + self._last_returned_call_from_interceptors = UnaryStreamCall( + request, + _timeout_to_deadline(client_call_details.timeout), + client_call_details.metadata, + client_call_details.credentials, + client_call_details.wait_for_ready, + self._channel, + client_call_details.method, + request_serializer, + response_deserializer, + self._loop, + ) + + return self._last_returned_call_from_interceptors + + client_call_details = ClientCallDetails( + method, timeout, metadata, credentials, wait_for_ready + ) + return await _run_interceptor( + list(interceptors), client_call_details, request + ) + + def time_remaining(self) -> Optional[float]: + raise NotImplementedError() + + +class InterceptedStreamUnaryCall( + _InterceptedUnaryResponseMixin, + _InterceptedStreamRequestMixin, + InterceptedCall, + _base_call.StreamUnaryCall, +): + """Used for running a `StreamUnaryCall` wrapped by interceptors. + + For the `__await__` method is it is proxied to the intercepted call only when + the interceptor task is finished. + """ + + _loop: asyncio.AbstractEventLoop + _channel: cygrpc.AioChannel + + # pylint: disable=too-many-arguments + def __init__( + self, + interceptors: Sequence[StreamUnaryClientInterceptor], + request_iterator: Optional[RequestIterableType], + timeout: Optional[float], + metadata: Metadata, + credentials: Optional[grpc.CallCredentials], + wait_for_ready: Optional[bool], + channel: cygrpc.AioChannel, + method: bytes, + request_serializer: SerializingFunction, + response_deserializer: DeserializingFunction, + loop: asyncio.AbstractEventLoop, + ) -> None: + self._loop = loop + self._channel = channel + request_iterator = self._init_stream_request_mixin(request_iterator) + interceptors_task = loop.create_task( + self._invoke( + interceptors, + method, + timeout, + metadata, + credentials, + wait_for_ready, + request_iterator, + request_serializer, + response_deserializer, + ) + ) + super().__init__(interceptors_task) + + # pylint: disable=too-many-arguments + async def _invoke( + self, + interceptors: Sequence[StreamUnaryClientInterceptor], + method: bytes, + timeout: Optional[float], + metadata: Optional[Metadata], + credentials: Optional[grpc.CallCredentials], + wait_for_ready: Optional[bool], + request_iterator: RequestIterableType, + request_serializer: SerializingFunction, + response_deserializer: DeserializingFunction, + ) -> StreamUnaryCall: + """Run the RPC call wrapped in interceptors""" + + async def _run_interceptor( + interceptors: Iterator[StreamUnaryClientInterceptor], + client_call_details: ClientCallDetails, + request_iterator: RequestIterableType, + ) -> _base_call.StreamUnaryCall: + if interceptors: + continuation = functools.partial( + _run_interceptor, interceptors[1:] + ) + + return await interceptors[0].intercept_stream_unary( + continuation, client_call_details, request_iterator + ) + else: + return StreamUnaryCall( + request_iterator, + _timeout_to_deadline(client_call_details.timeout), + client_call_details.metadata, + client_call_details.credentials, + client_call_details.wait_for_ready, + self._channel, + client_call_details.method, + request_serializer, + response_deserializer, + self._loop, + ) + + client_call_details = ClientCallDetails( + method, timeout, metadata, credentials, wait_for_ready + ) + return await _run_interceptor( + list(interceptors), client_call_details, request_iterator + ) + + def time_remaining(self) -> Optional[float]: + raise NotImplementedError() + + +class InterceptedStreamStreamCall( + _InterceptedStreamResponseMixin, + _InterceptedStreamRequestMixin, + InterceptedCall, + _base_call.StreamStreamCall, +): + """Used for running a `StreamStreamCall` wrapped by interceptors.""" + + _loop: asyncio.AbstractEventLoop + _channel: cygrpc.AioChannel + _last_returned_call_from_interceptors = Optional[ + _base_call.StreamStreamCall + ] + + # pylint: disable=too-many-arguments + def __init__( + self, + interceptors: Sequence[StreamStreamClientInterceptor], + request_iterator: Optional[RequestIterableType], + timeout: Optional[float], + metadata: Metadata, + credentials: Optional[grpc.CallCredentials], + wait_for_ready: Optional[bool], + channel: cygrpc.AioChannel, + method: bytes, + request_serializer: SerializingFunction, + response_deserializer: DeserializingFunction, + loop: asyncio.AbstractEventLoop, + ) -> None: + self._loop = loop + self._channel = channel + self._init_stream_response_mixin() + request_iterator = self._init_stream_request_mixin(request_iterator) + self._last_returned_call_from_interceptors = None + interceptors_task = loop.create_task( + self._invoke( + interceptors, + method, + timeout, + metadata, + credentials, + wait_for_ready, + request_iterator, + request_serializer, + response_deserializer, + ) + ) + super().__init__(interceptors_task) + + # pylint: disable=too-many-arguments + async def _invoke( + self, + interceptors: Sequence[StreamStreamClientInterceptor], + method: bytes, + timeout: Optional[float], + metadata: Optional[Metadata], + credentials: Optional[grpc.CallCredentials], + wait_for_ready: Optional[bool], + request_iterator: RequestIterableType, + request_serializer: SerializingFunction, + response_deserializer: DeserializingFunction, + ) -> StreamStreamCall: + """Run the RPC call wrapped in interceptors""" + + async def _run_interceptor( + interceptors: List[StreamStreamClientInterceptor], + client_call_details: ClientCallDetails, + request_iterator: RequestIterableType, + ) -> _base_call.StreamStreamCall: + if interceptors: + continuation = functools.partial( + _run_interceptor, interceptors[1:] + ) + + call_or_response_iterator = await interceptors[ + 0 + ].intercept_stream_stream( + continuation, client_call_details, request_iterator + ) + + if isinstance( + call_or_response_iterator, _base_call.StreamStreamCall + ): + self._last_returned_call_from_interceptors = ( + call_or_response_iterator + ) + else: + self._last_returned_call_from_interceptors = ( + StreamStreamCallResponseIterator( + self._last_returned_call_from_interceptors, + call_or_response_iterator, + ) + ) + return self._last_returned_call_from_interceptors + else: + self._last_returned_call_from_interceptors = StreamStreamCall( + request_iterator, + _timeout_to_deadline(client_call_details.timeout), + client_call_details.metadata, + client_call_details.credentials, + client_call_details.wait_for_ready, + self._channel, + client_call_details.method, + request_serializer, + response_deserializer, + self._loop, + ) + return self._last_returned_call_from_interceptors + + client_call_details = ClientCallDetails( + method, timeout, metadata, credentials, wait_for_ready + ) + return await _run_interceptor( + list(interceptors), client_call_details, request_iterator + ) + + def time_remaining(self) -> Optional[float]: + raise NotImplementedError() + + +class UnaryUnaryCallResponse(_base_call.UnaryUnaryCall): + """Final UnaryUnaryCall class finished with a response.""" + + _response: ResponseType + + def __init__(self, response: ResponseType) -> None: + self._response = response + + def cancel(self) -> bool: + return False + + def cancelled(self) -> bool: + return False + + def done(self) -> bool: + return True + + def add_done_callback(self, unused_callback) -> None: + raise NotImplementedError() + + def time_remaining(self) -> Optional[float]: + raise NotImplementedError() + + async def initial_metadata(self) -> Optional[Metadata]: + return None + + async def trailing_metadata(self) -> Optional[Metadata]: + return None + + async def code(self) -> grpc.StatusCode: + return grpc.StatusCode.OK + + async def details(self) -> str: + return "" + + async def debug_error_string(self) -> Optional[str]: + return None + + def __await__(self): + if False: # pylint: disable=using-constant-test + # This code path is never used, but a yield statement is needed + # for telling the interpreter that __await__ is a generator. + yield None + return self._response + + async def wait_for_connection(self) -> None: + pass + + +class _StreamCallResponseIterator: + _call: Union[_base_call.UnaryStreamCall, _base_call.StreamStreamCall] + _response_iterator: AsyncIterable[ResponseType] + + def __init__( + self, + call: Union[_base_call.UnaryStreamCall, _base_call.StreamStreamCall], + response_iterator: AsyncIterable[ResponseType], + ) -> None: + self._response_iterator = response_iterator + self._call = call + + def cancel(self) -> bool: + return self._call.cancel() + + def cancelled(self) -> bool: + return self._call.cancelled() + + def done(self) -> bool: + return self._call.done() + + def add_done_callback(self, callback) -> None: + self._call.add_done_callback(callback) + + def time_remaining(self) -> Optional[float]: + return self._call.time_remaining() + + async def initial_metadata(self) -> Optional[Metadata]: + return await self._call.initial_metadata() + + async def trailing_metadata(self) -> Optional[Metadata]: + return await self._call.trailing_metadata() + + async def code(self) -> grpc.StatusCode: + return await self._call.code() + + async def details(self) -> str: + return await self._call.details() + + async def debug_error_string(self) -> Optional[str]: + return await self._call.debug_error_string() + + def __aiter__(self): + return self._response_iterator.__aiter__() + + async def wait_for_connection(self) -> None: + return await self._call.wait_for_connection() + + +class UnaryStreamCallResponseIterator( + _StreamCallResponseIterator, _base_call.UnaryStreamCall +): + """UnaryStreamCall class which uses an alternative response iterator.""" + + async def read(self) -> Union[EOFType, ResponseType]: + # Behind the scenes everything goes through the + # async iterator. So this path should not be reached. + raise NotImplementedError() + + +class StreamStreamCallResponseIterator( + _StreamCallResponseIterator, _base_call.StreamStreamCall +): + """StreamStreamCall class which uses an alternative response iterator.""" + + async def read(self) -> Union[EOFType, ResponseType]: + # Behind the scenes everything goes through the + # async iterator. So this path should not be reached. + raise NotImplementedError() + + async def write(self, request: RequestType) -> None: + # Behind the scenes everything goes through the + # async iterator provided by the InterceptedStreamStreamCall. + # So this path should not be reached. + raise NotImplementedError() + + async def done_writing(self) -> None: + # Behind the scenes everything goes through the + # async iterator provided by the InterceptedStreamStreamCall. + # So this path should not be reached. + raise NotImplementedError() + + @property + def _done_writing_flag(self) -> bool: + return self._call._done_writing_flag diff --git a/venv/lib/python3.10/site-packages/grpc/aio/_metadata.py b/venv/lib/python3.10/site-packages/grpc/aio/_metadata.py new file mode 100644 index 0000000000000000000000000000000000000000..6303c175297c158bf4bfe88651b9907b0a21ac6b --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/aio/_metadata.py @@ -0,0 +1,137 @@ +# Copyright 2020 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Implementation of the metadata abstraction for gRPC Asyncio Python.""" +from collections import OrderedDict +from collections import abc +from typing import Any, Iterator, List, Optional, Tuple, Union + +MetadataKey = str +MetadataValue = Union[str, bytes] + + +class Metadata(abc.Collection): + """Metadata abstraction for the asynchronous calls and interceptors. + + The metadata is a mapping from str -> List[str] + + Traits + * Multiple entries are allowed for the same key + * The order of the values by key is preserved + * Getting by an element by key, retrieves the first mapped value + * Supports an immutable view of the data + * Allows partial mutation on the data without recreating the new object from scratch. + """ + + def __init__(self, *args: Tuple[MetadataKey, MetadataValue]) -> None: + self._metadata = OrderedDict() + for md_key, md_value in args: + self.add(md_key, md_value) + + @classmethod + def from_tuple(cls, raw_metadata: tuple): + if raw_metadata: + return cls(*raw_metadata) + return cls() + + def add(self, key: MetadataKey, value: MetadataValue) -> None: + self._metadata.setdefault(key, []) + self._metadata[key].append(value) + + def __len__(self) -> int: + """Return the total number of elements that there are in the metadata, + including multiple values for the same key. + """ + return sum(map(len, self._metadata.values())) + + def __getitem__(self, key: MetadataKey) -> MetadataValue: + """When calling [], the first element of all those + mapped for is returned. + """ + try: + return self._metadata[key][0] + except (ValueError, IndexError) as e: + raise KeyError("{0!r}".format(key)) from e + + def __setitem__(self, key: MetadataKey, value: MetadataValue) -> None: + """Calling metadata[] = + Maps to the first instance of . + """ + if key not in self: + self._metadata[key] = [value] + else: + current_values = self.get_all(key) + self._metadata[key] = [value, *current_values[1:]] + + def __delitem__(self, key: MetadataKey) -> None: + """``del metadata[]`` deletes the first mapping for .""" + current_values = self.get_all(key) + if not current_values: + raise KeyError(repr(key)) + self._metadata[key] = current_values[1:] + + def delete_all(self, key: MetadataKey) -> None: + """Delete all mappings for .""" + del self._metadata[key] + + def __iter__(self) -> Iterator[Tuple[MetadataKey, MetadataValue]]: + for key, values in self._metadata.items(): + for value in values: + yield (key, value) + + def keys(self) -> abc.KeysView: + return abc.KeysView(self) + + def values(self) -> abc.ValuesView: + return abc.ValuesView(self) + + def items(self) -> abc.ItemsView: + return abc.ItemsView(self) + + def get( + self, key: MetadataKey, default: MetadataValue = None + ) -> Optional[MetadataValue]: + try: + return self[key] + except KeyError: + return default + + def get_all(self, key: MetadataKey) -> List[MetadataValue]: + """For compatibility with other Metadata abstraction objects (like in Java), + this would return all items under the desired . + """ + return self._metadata.get(key, []) + + def set_all(self, key: MetadataKey, values: List[MetadataValue]) -> None: + self._metadata[key] = values + + def __contains__(self, key: MetadataKey) -> bool: + return key in self._metadata + + def __eq__(self, other: Any) -> bool: + if isinstance(other, self.__class__): + return self._metadata == other._metadata + if isinstance(other, tuple): + return tuple(self) == other + return NotImplemented # pytype: disable=bad-return-type + + def __add__(self, other: Any) -> "Metadata": + if isinstance(other, self.__class__): + return Metadata(*(tuple(self) + tuple(other))) + if isinstance(other, tuple): + return Metadata(*(tuple(self) + other)) + return NotImplemented # pytype: disable=bad-return-type + + def __repr__(self) -> str: + view = tuple(self) + return "{0}({1!r})".format(self.__class__.__name__, view) diff --git a/venv/lib/python3.10/site-packages/grpc/aio/_server.py b/venv/lib/python3.10/site-packages/grpc/aio/_server.py new file mode 100644 index 0000000000000000000000000000000000000000..0eee6b1470e6a2975a46c386027d9d5a76fb1946 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/aio/_server.py @@ -0,0 +1,239 @@ +# Copyright 2019 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Server-side implementation of gRPC Asyncio Python.""" + +from concurrent.futures import Executor +from typing import Any, Dict, Optional, Sequence + +import grpc +from grpc import _common +from grpc import _compression +from grpc._cython import cygrpc + +from . import _base_server +from ._interceptor import ServerInterceptor +from ._typing import ChannelArgumentType + + +def _augment_channel_arguments( + base_options: ChannelArgumentType, compression: Optional[grpc.Compression] +): + compression_option = _compression.create_channel_option(compression) + return tuple(base_options) + compression_option + + +class Server(_base_server.Server): + """Serves RPCs.""" + + def __init__( + self, + thread_pool: Optional[Executor], + generic_handlers: Optional[Sequence[grpc.GenericRpcHandler]], + interceptors: Optional[Sequence[Any]], + options: ChannelArgumentType, + maximum_concurrent_rpcs: Optional[int], + compression: Optional[grpc.Compression], + ): + self._loop = cygrpc.get_working_loop() + if interceptors: + invalid_interceptors = [ + interceptor + for interceptor in interceptors + if not isinstance(interceptor, ServerInterceptor) + ] + if invalid_interceptors: + raise ValueError( + "Interceptor must be ServerInterceptor, the " + f"following are invalid: {invalid_interceptors}" + ) + self._server = cygrpc.AioServer( + self._loop, + thread_pool, + generic_handlers, + interceptors, + _augment_channel_arguments(options, compression), + maximum_concurrent_rpcs, + ) + + def add_generic_rpc_handlers( + self, generic_rpc_handlers: Sequence[grpc.GenericRpcHandler] + ) -> None: + """Registers GenericRpcHandlers with this Server. + + This method is only safe to call before the server is started. + + Args: + generic_rpc_handlers: A sequence of GenericRpcHandlers that will be + used to service RPCs. + """ + self._server.add_generic_rpc_handlers(generic_rpc_handlers) + + def add_registered_method_handlers( + self, + service_name: str, + method_handlers: Dict[str, grpc.RpcMethodHandler], + ) -> None: + # TODO(xuanwn): Implement this for AsyncIO. + pass + + def add_insecure_port(self, address: str) -> int: + """Opens an insecure port for accepting RPCs. + + This method may only be called before starting the server. + + Args: + address: The address for which to open a port. If the port is 0, + or not specified in the address, then the gRPC runtime will choose a port. + + Returns: + An integer port on which the server will accept RPC requests. + """ + return _common.validate_port_binding_result( + address, self._server.add_insecure_port(_common.encode(address)) + ) + + def add_secure_port( + self, address: str, server_credentials: grpc.ServerCredentials + ) -> int: + """Opens a secure port for accepting RPCs. + + This method may only be called before starting the server. + + Args: + address: The address for which to open a port. + if the port is 0, or not specified in the address, then the gRPC + runtime will choose a port. + server_credentials: A ServerCredentials object. + + Returns: + An integer port on which the server will accept RPC requests. + """ + return _common.validate_port_binding_result( + address, + self._server.add_secure_port( + _common.encode(address), server_credentials + ), + ) + + async def start(self) -> None: + """Starts this Server. + + This method may only be called once. (i.e. it is not idempotent). + """ + await self._server.start() + + async def stop(self, grace: Optional[float]) -> None: + """Stops this Server. + + This method immediately stops the server from servicing new RPCs in + all cases. + + If a grace period is specified, this method waits until all active + RPCs are finished or until the grace period is reached. RPCs that haven't + been terminated within the grace period are aborted. + If a grace period is not specified (by passing None for grace), all + existing RPCs are aborted immediately and this method blocks until + the last RPC handler terminates. + + This method is idempotent and may be called at any time. Passing a + smaller grace value in a subsequent call will have the effect of + stopping the Server sooner (passing None will have the effect of + stopping the server immediately). Passing a larger grace value in a + subsequent call will not have the effect of stopping the server later + (i.e. the most restrictive grace value is used). + + Args: + grace: A duration of time in seconds or None. + """ + await self._server.shutdown(grace) + + async def wait_for_termination( + self, timeout: Optional[float] = None + ) -> bool: + """Block current coroutine until the server stops. + + This is an EXPERIMENTAL API. + + The wait will not consume computational resources during blocking, and + it will block until one of the two following conditions are met: + + 1) The server is stopped or terminated; + 2) A timeout occurs if timeout is not `None`. + + The timeout argument works in the same way as `threading.Event.wait()`. + https://docs.python.org/3/library/threading.html#threading.Event.wait + + Args: + timeout: A floating point number specifying a timeout for the + operation in seconds. + + Returns: + A bool indicates if the operation times out. + """ + return await self._server.wait_for_termination(timeout) + + def __del__(self): + """Schedules a graceful shutdown in current event loop. + + The Cython AioServer doesn't hold a ref-count to this class. It should + be safe to slightly extend the underlying Cython object's life span. + """ + if hasattr(self, "_server"): + if self._server.is_running(): + cygrpc.schedule_coro_threadsafe( + self._server.shutdown(None), + self._loop, + ) + + +def server( + migration_thread_pool: Optional[Executor] = None, + handlers: Optional[Sequence[grpc.GenericRpcHandler]] = None, + interceptors: Optional[Sequence[Any]] = None, + options: Optional[ChannelArgumentType] = None, + maximum_concurrent_rpcs: Optional[int] = None, + compression: Optional[grpc.Compression] = None, +): + """Creates a Server with which RPCs can be serviced. + + Args: + migration_thread_pool: A futures.ThreadPoolExecutor to be used by the + Server to execute non-AsyncIO RPC handlers for migration purpose. + handlers: An optional list of GenericRpcHandlers used for executing RPCs. + More handlers may be added by calling add_generic_rpc_handlers any time + before the server is started. + interceptors: An optional list of ServerInterceptor objects that observe + and optionally manipulate the incoming RPCs before handing them over to + handlers. The interceptors are given control in the order they are + specified. This is an EXPERIMENTAL API. + options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC runtime) + to configure the channel. + maximum_concurrent_rpcs: The maximum number of concurrent RPCs this server + will service before returning RESOURCE_EXHAUSTED status, or None to + indicate no limit. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. This compression algorithm will be used for the + lifetime of the server unless overridden by set_compression. + + Returns: + A Server object. + """ + return Server( + migration_thread_pool, + () if handlers is None else handlers, + () if interceptors is None else interceptors, + () if options is None else options, + maximum_concurrent_rpcs, + compression, + ) diff --git a/venv/lib/python3.10/site-packages/grpc/aio/_typing.py b/venv/lib/python3.10/site-packages/grpc/aio/_typing.py new file mode 100644 index 0000000000000000000000000000000000000000..0bc32b22e6fe89a8183df9eeeb3f775a61ebadc4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/aio/_typing.py @@ -0,0 +1,43 @@ +# Copyright 2019 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Common types for gRPC Async API""" + +from typing import ( + Any, + AsyncIterable, + Callable, + Iterable, + Sequence, + Tuple, + TypeVar, + Union, +) + +from grpc._cython.cygrpc import EOF + +from ._metadata import Metadata +from ._metadata import MetadataKey +from ._metadata import MetadataValue + +RequestType = TypeVar("RequestType") +ResponseType = TypeVar("ResponseType") +SerializingFunction = Callable[[Any], bytes] +DeserializingFunction = Callable[[bytes], Any] +MetadatumType = Tuple[MetadataKey, MetadataValue] +MetadataType = Union[Metadata, Sequence[MetadatumType]] +ChannelArgumentType = Sequence[Tuple[str, Any]] +EOFType = type(EOF) +DoneCallbackType = Callable[[Any], None] +RequestIterableType = Union[Iterable[Any], AsyncIterable[Any]] +ResponseIterableType = AsyncIterable[Any] diff --git a/venv/lib/python3.10/site-packages/grpc/aio/_utils.py b/venv/lib/python3.10/site-packages/grpc/aio/_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..e5772dce2da1d35f5b76bce36fc3612fd97151b4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/aio/_utils.py @@ -0,0 +1,22 @@ +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Internal utilities used by the gRPC Aio module.""" +import time +from typing import Optional + + +def _timeout_to_deadline(timeout: Optional[float]) -> Optional[float]: + if timeout is None: + return None + return time.time() + timeout diff --git a/venv/lib/python3.10/site-packages/grpc/beta/__init__.py b/venv/lib/python3.10/site-packages/grpc/beta/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5fb4f3c3cfd5622f4067f3dd22eb49318855325a --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/beta/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/venv/lib/python3.10/site-packages/grpc/beta/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/beta/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..70b77521b8b29c9a1548eb5b260e1153f0257f20 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/beta/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/beta/__pycache__/_client_adaptations.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/beta/__pycache__/_client_adaptations.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..46bb2bdb3608148f325702d20ba0a3e259868508 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/beta/__pycache__/_client_adaptations.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/beta/__pycache__/_metadata.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/beta/__pycache__/_metadata.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e16e5b038af54e28673b11d0a485026f5daf83fe Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/beta/__pycache__/_metadata.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/beta/__pycache__/_server_adaptations.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/beta/__pycache__/_server_adaptations.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fe95e59c21dfe2215c7fdffd57b7e006ad62913f Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/beta/__pycache__/_server_adaptations.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/beta/__pycache__/implementations.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/beta/__pycache__/implementations.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..182a2f8387d3f6e2e53419f07b1ac2dc18939c13 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/beta/__pycache__/implementations.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/beta/__pycache__/interfaces.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/beta/__pycache__/interfaces.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1fd30e7a9362c98a820d125282cb1a9dd01fa888 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/beta/__pycache__/interfaces.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/beta/__pycache__/utilities.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/beta/__pycache__/utilities.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d152dcd3f0cf5906d0243e1311066bdc10eba3da Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/beta/__pycache__/utilities.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/beta/_client_adaptations.py b/venv/lib/python3.10/site-packages/grpc/beta/_client_adaptations.py new file mode 100644 index 0000000000000000000000000000000000000000..012149212a23817e824df9453950a1721f41ed7b --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/beta/_client_adaptations.py @@ -0,0 +1,1015 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Translates gRPC's client-side API into gRPC's client-side Beta API.""" + +import grpc +from grpc import _common +from grpc.beta import _metadata +from grpc.beta import interfaces +from grpc.framework.common import cardinality +from grpc.framework.foundation import future +from grpc.framework.interfaces.face import face + +# pylint: disable=too-many-arguments,too-many-locals,unused-argument + +_STATUS_CODE_TO_ABORTION_KIND_AND_ABORTION_ERROR_CLASS = { + grpc.StatusCode.CANCELLED: ( + face.Abortion.Kind.CANCELLED, + face.CancellationError, + ), + grpc.StatusCode.UNKNOWN: ( + face.Abortion.Kind.REMOTE_FAILURE, + face.RemoteError, + ), + grpc.StatusCode.DEADLINE_EXCEEDED: ( + face.Abortion.Kind.EXPIRED, + face.ExpirationError, + ), + grpc.StatusCode.UNIMPLEMENTED: ( + face.Abortion.Kind.LOCAL_FAILURE, + face.LocalError, + ), +} + + +def _effective_metadata(metadata, metadata_transformer): + non_none_metadata = () if metadata is None else metadata + if metadata_transformer is None: + return non_none_metadata + else: + return metadata_transformer(non_none_metadata) + + +def _credentials(grpc_call_options): + return None if grpc_call_options is None else grpc_call_options.credentials + + +def _abortion(rpc_error_call): + code = rpc_error_call.code() + pair = _STATUS_CODE_TO_ABORTION_KIND_AND_ABORTION_ERROR_CLASS.get(code) + error_kind = face.Abortion.Kind.LOCAL_FAILURE if pair is None else pair[0] + return face.Abortion( + error_kind, + rpc_error_call.initial_metadata(), + rpc_error_call.trailing_metadata(), + code, + rpc_error_call.details(), + ) + + +def _abortion_error(rpc_error_call): + code = rpc_error_call.code() + pair = _STATUS_CODE_TO_ABORTION_KIND_AND_ABORTION_ERROR_CLASS.get(code) + exception_class = face.AbortionError if pair is None else pair[1] + return exception_class( + rpc_error_call.initial_metadata(), + rpc_error_call.trailing_metadata(), + code, + rpc_error_call.details(), + ) + + +class _InvocationProtocolContext(interfaces.GRPCInvocationContext): + def disable_next_request_compression(self): + pass # TODO(https://github.com/grpc/grpc/issues/4078): design, implement. + + +class _Rendezvous(future.Future, face.Call): + def __init__(self, response_future, response_iterator, call): + self._future = response_future + self._iterator = response_iterator + self._call = call + + def cancel(self): + return self._call.cancel() + + def cancelled(self): + return self._future.cancelled() + + def running(self): + return self._future.running() + + def done(self): + return self._future.done() + + def result(self, timeout=None): + try: + return self._future.result(timeout=timeout) + except grpc.RpcError as rpc_error_call: + raise _abortion_error(rpc_error_call) + except grpc.FutureTimeoutError: + raise future.TimeoutError() + except grpc.FutureCancelledError: + raise future.CancelledError() + + def exception(self, timeout=None): + try: + rpc_error_call = self._future.exception(timeout=timeout) + if rpc_error_call is None: + return None + else: + return _abortion_error(rpc_error_call) + except grpc.FutureTimeoutError: + raise future.TimeoutError() + except grpc.FutureCancelledError: + raise future.CancelledError() + + def traceback(self, timeout=None): + try: + return self._future.traceback(timeout=timeout) + except grpc.FutureTimeoutError: + raise future.TimeoutError() + except grpc.FutureCancelledError: + raise future.CancelledError() + + def add_done_callback(self, fn): + self._future.add_done_callback(lambda ignored_callback: fn(self)) + + def __iter__(self): + return self + + def _next(self): + try: + return next(self._iterator) + except grpc.RpcError as rpc_error_call: + raise _abortion_error(rpc_error_call) + + def __next__(self): + return self._next() + + def next(self): + return self._next() + + def is_active(self): + return self._call.is_active() + + def time_remaining(self): + return self._call.time_remaining() + + def add_abortion_callback(self, abortion_callback): + def done_callback(): + if self.code() is not grpc.StatusCode.OK: + abortion_callback(_abortion(self._call)) + + registered = self._call.add_callback(done_callback) + return None if registered else done_callback() + + def protocol_context(self): + return _InvocationProtocolContext() + + def initial_metadata(self): + return _metadata.beta(self._call.initial_metadata()) + + def terminal_metadata(self): + return _metadata.beta(self._call.terminal_metadata()) + + def code(self): + return self._call.code() + + def details(self): + return self._call.details() + + +def _blocking_unary_unary( + channel, + group, + method, + timeout, + with_call, + protocol_options, + metadata, + metadata_transformer, + request, + request_serializer, + response_deserializer, +): + try: + multi_callable = channel.unary_unary( + _common.fully_qualified_method(group, method), + request_serializer=request_serializer, + response_deserializer=response_deserializer, + ) + effective_metadata = _effective_metadata(metadata, metadata_transformer) + if with_call: + response, call = multi_callable.with_call( + request, + timeout=timeout, + metadata=_metadata.unbeta(effective_metadata), + credentials=_credentials(protocol_options), + ) + return response, _Rendezvous(None, None, call) + else: + return multi_callable( + request, + timeout=timeout, + metadata=_metadata.unbeta(effective_metadata), + credentials=_credentials(protocol_options), + ) + except grpc.RpcError as rpc_error_call: + raise _abortion_error(rpc_error_call) + + +def _future_unary_unary( + channel, + group, + method, + timeout, + protocol_options, + metadata, + metadata_transformer, + request, + request_serializer, + response_deserializer, +): + multi_callable = channel.unary_unary( + _common.fully_qualified_method(group, method), + request_serializer=request_serializer, + response_deserializer=response_deserializer, + ) + effective_metadata = _effective_metadata(metadata, metadata_transformer) + response_future = multi_callable.future( + request, + timeout=timeout, + metadata=_metadata.unbeta(effective_metadata), + credentials=_credentials(protocol_options), + ) + return _Rendezvous(response_future, None, response_future) + + +def _unary_stream( + channel, + group, + method, + timeout, + protocol_options, + metadata, + metadata_transformer, + request, + request_serializer, + response_deserializer, +): + multi_callable = channel.unary_stream( + _common.fully_qualified_method(group, method), + request_serializer=request_serializer, + response_deserializer=response_deserializer, + ) + effective_metadata = _effective_metadata(metadata, metadata_transformer) + response_iterator = multi_callable( + request, + timeout=timeout, + metadata=_metadata.unbeta(effective_metadata), + credentials=_credentials(protocol_options), + ) + return _Rendezvous(None, response_iterator, response_iterator) + + +def _blocking_stream_unary( + channel, + group, + method, + timeout, + with_call, + protocol_options, + metadata, + metadata_transformer, + request_iterator, + request_serializer, + response_deserializer, +): + try: + multi_callable = channel.stream_unary( + _common.fully_qualified_method(group, method), + request_serializer=request_serializer, + response_deserializer=response_deserializer, + ) + effective_metadata = _effective_metadata(metadata, metadata_transformer) + if with_call: + response, call = multi_callable.with_call( + request_iterator, + timeout=timeout, + metadata=_metadata.unbeta(effective_metadata), + credentials=_credentials(protocol_options), + ) + return response, _Rendezvous(None, None, call) + else: + return multi_callable( + request_iterator, + timeout=timeout, + metadata=_metadata.unbeta(effective_metadata), + credentials=_credentials(protocol_options), + ) + except grpc.RpcError as rpc_error_call: + raise _abortion_error(rpc_error_call) + + +def _future_stream_unary( + channel, + group, + method, + timeout, + protocol_options, + metadata, + metadata_transformer, + request_iterator, + request_serializer, + response_deserializer, +): + multi_callable = channel.stream_unary( + _common.fully_qualified_method(group, method), + request_serializer=request_serializer, + response_deserializer=response_deserializer, + ) + effective_metadata = _effective_metadata(metadata, metadata_transformer) + response_future = multi_callable.future( + request_iterator, + timeout=timeout, + metadata=_metadata.unbeta(effective_metadata), + credentials=_credentials(protocol_options), + ) + return _Rendezvous(response_future, None, response_future) + + +def _stream_stream( + channel, + group, + method, + timeout, + protocol_options, + metadata, + metadata_transformer, + request_iterator, + request_serializer, + response_deserializer, +): + multi_callable = channel.stream_stream( + _common.fully_qualified_method(group, method), + request_serializer=request_serializer, + response_deserializer=response_deserializer, + ) + effective_metadata = _effective_metadata(metadata, metadata_transformer) + response_iterator = multi_callable( + request_iterator, + timeout=timeout, + metadata=_metadata.unbeta(effective_metadata), + credentials=_credentials(protocol_options), + ) + return _Rendezvous(None, response_iterator, response_iterator) + + +class _UnaryUnaryMultiCallable(face.UnaryUnaryMultiCallable): + def __init__( + self, + channel, + group, + method, + metadata_transformer, + request_serializer, + response_deserializer, + ): + self._channel = channel + self._group = group + self._method = method + self._metadata_transformer = metadata_transformer + self._request_serializer = request_serializer + self._response_deserializer = response_deserializer + + def __call__( + self, + request, + timeout, + metadata=None, + with_call=False, + protocol_options=None, + ): + return _blocking_unary_unary( + self._channel, + self._group, + self._method, + timeout, + with_call, + protocol_options, + metadata, + self._metadata_transformer, + request, + self._request_serializer, + self._response_deserializer, + ) + + def future(self, request, timeout, metadata=None, protocol_options=None): + return _future_unary_unary( + self._channel, + self._group, + self._method, + timeout, + protocol_options, + metadata, + self._metadata_transformer, + request, + self._request_serializer, + self._response_deserializer, + ) + + def event( + self, + request, + receiver, + abortion_callback, + timeout, + metadata=None, + protocol_options=None, + ): + raise NotImplementedError() + + +class _UnaryStreamMultiCallable(face.UnaryStreamMultiCallable): + def __init__( + self, + channel, + group, + method, + metadata_transformer, + request_serializer, + response_deserializer, + ): + self._channel = channel + self._group = group + self._method = method + self._metadata_transformer = metadata_transformer + self._request_serializer = request_serializer + self._response_deserializer = response_deserializer + + def __call__(self, request, timeout, metadata=None, protocol_options=None): + return _unary_stream( + self._channel, + self._group, + self._method, + timeout, + protocol_options, + metadata, + self._metadata_transformer, + request, + self._request_serializer, + self._response_deserializer, + ) + + def event( + self, + request, + receiver, + abortion_callback, + timeout, + metadata=None, + protocol_options=None, + ): + raise NotImplementedError() + + +class _StreamUnaryMultiCallable(face.StreamUnaryMultiCallable): + def __init__( + self, + channel, + group, + method, + metadata_transformer, + request_serializer, + response_deserializer, + ): + self._channel = channel + self._group = group + self._method = method + self._metadata_transformer = metadata_transformer + self._request_serializer = request_serializer + self._response_deserializer = response_deserializer + + def __call__( + self, + request_iterator, + timeout, + metadata=None, + with_call=False, + protocol_options=None, + ): + return _blocking_stream_unary( + self._channel, + self._group, + self._method, + timeout, + with_call, + protocol_options, + metadata, + self._metadata_transformer, + request_iterator, + self._request_serializer, + self._response_deserializer, + ) + + def future( + self, request_iterator, timeout, metadata=None, protocol_options=None + ): + return _future_stream_unary( + self._channel, + self._group, + self._method, + timeout, + protocol_options, + metadata, + self._metadata_transformer, + request_iterator, + self._request_serializer, + self._response_deserializer, + ) + + def event( + self, + receiver, + abortion_callback, + timeout, + metadata=None, + protocol_options=None, + ): + raise NotImplementedError() + + +class _StreamStreamMultiCallable(face.StreamStreamMultiCallable): + def __init__( + self, + channel, + group, + method, + metadata_transformer, + request_serializer, + response_deserializer, + ): + self._channel = channel + self._group = group + self._method = method + self._metadata_transformer = metadata_transformer + self._request_serializer = request_serializer + self._response_deserializer = response_deserializer + + def __call__( + self, request_iterator, timeout, metadata=None, protocol_options=None + ): + return _stream_stream( + self._channel, + self._group, + self._method, + timeout, + protocol_options, + metadata, + self._metadata_transformer, + request_iterator, + self._request_serializer, + self._response_deserializer, + ) + + def event( + self, + receiver, + abortion_callback, + timeout, + metadata=None, + protocol_options=None, + ): + raise NotImplementedError() + + +class _GenericStub(face.GenericStub): + def __init__( + self, + channel, + metadata_transformer, + request_serializers, + response_deserializers, + ): + self._channel = channel + self._metadata_transformer = metadata_transformer + self._request_serializers = request_serializers or {} + self._response_deserializers = response_deserializers or {} + + def blocking_unary_unary( + self, + group, + method, + request, + timeout, + metadata=None, + with_call=None, + protocol_options=None, + ): + request_serializer = self._request_serializers.get( + ( + group, + method, + ) + ) + response_deserializer = self._response_deserializers.get( + ( + group, + method, + ) + ) + return _blocking_unary_unary( + self._channel, + group, + method, + timeout, + with_call, + protocol_options, + metadata, + self._metadata_transformer, + request, + request_serializer, + response_deserializer, + ) + + def future_unary_unary( + self, + group, + method, + request, + timeout, + metadata=None, + protocol_options=None, + ): + request_serializer = self._request_serializers.get( + ( + group, + method, + ) + ) + response_deserializer = self._response_deserializers.get( + ( + group, + method, + ) + ) + return _future_unary_unary( + self._channel, + group, + method, + timeout, + protocol_options, + metadata, + self._metadata_transformer, + request, + request_serializer, + response_deserializer, + ) + + def inline_unary_stream( + self, + group, + method, + request, + timeout, + metadata=None, + protocol_options=None, + ): + request_serializer = self._request_serializers.get( + ( + group, + method, + ) + ) + response_deserializer = self._response_deserializers.get( + ( + group, + method, + ) + ) + return _unary_stream( + self._channel, + group, + method, + timeout, + protocol_options, + metadata, + self._metadata_transformer, + request, + request_serializer, + response_deserializer, + ) + + def blocking_stream_unary( + self, + group, + method, + request_iterator, + timeout, + metadata=None, + with_call=None, + protocol_options=None, + ): + request_serializer = self._request_serializers.get( + ( + group, + method, + ) + ) + response_deserializer = self._response_deserializers.get( + ( + group, + method, + ) + ) + return _blocking_stream_unary( + self._channel, + group, + method, + timeout, + with_call, + protocol_options, + metadata, + self._metadata_transformer, + request_iterator, + request_serializer, + response_deserializer, + ) + + def future_stream_unary( + self, + group, + method, + request_iterator, + timeout, + metadata=None, + protocol_options=None, + ): + request_serializer = self._request_serializers.get( + ( + group, + method, + ) + ) + response_deserializer = self._response_deserializers.get( + ( + group, + method, + ) + ) + return _future_stream_unary( + self._channel, + group, + method, + timeout, + protocol_options, + metadata, + self._metadata_transformer, + request_iterator, + request_serializer, + response_deserializer, + ) + + def inline_stream_stream( + self, + group, + method, + request_iterator, + timeout, + metadata=None, + protocol_options=None, + ): + request_serializer = self._request_serializers.get( + ( + group, + method, + ) + ) + response_deserializer = self._response_deserializers.get( + ( + group, + method, + ) + ) + return _stream_stream( + self._channel, + group, + method, + timeout, + protocol_options, + metadata, + self._metadata_transformer, + request_iterator, + request_serializer, + response_deserializer, + ) + + def event_unary_unary( + self, + group, + method, + request, + receiver, + abortion_callback, + timeout, + metadata=None, + protocol_options=None, + ): + raise NotImplementedError() + + def event_unary_stream( + self, + group, + method, + request, + receiver, + abortion_callback, + timeout, + metadata=None, + protocol_options=None, + ): + raise NotImplementedError() + + def event_stream_unary( + self, + group, + method, + receiver, + abortion_callback, + timeout, + metadata=None, + protocol_options=None, + ): + raise NotImplementedError() + + def event_stream_stream( + self, + group, + method, + receiver, + abortion_callback, + timeout, + metadata=None, + protocol_options=None, + ): + raise NotImplementedError() + + def unary_unary(self, group, method): + request_serializer = self._request_serializers.get( + ( + group, + method, + ) + ) + response_deserializer = self._response_deserializers.get( + ( + group, + method, + ) + ) + return _UnaryUnaryMultiCallable( + self._channel, + group, + method, + self._metadata_transformer, + request_serializer, + response_deserializer, + ) + + def unary_stream(self, group, method): + request_serializer = self._request_serializers.get( + ( + group, + method, + ) + ) + response_deserializer = self._response_deserializers.get( + ( + group, + method, + ) + ) + return _UnaryStreamMultiCallable( + self._channel, + group, + method, + self._metadata_transformer, + request_serializer, + response_deserializer, + ) + + def stream_unary(self, group, method): + request_serializer = self._request_serializers.get( + ( + group, + method, + ) + ) + response_deserializer = self._response_deserializers.get( + ( + group, + method, + ) + ) + return _StreamUnaryMultiCallable( + self._channel, + group, + method, + self._metadata_transformer, + request_serializer, + response_deserializer, + ) + + def stream_stream(self, group, method): + request_serializer = self._request_serializers.get( + ( + group, + method, + ) + ) + response_deserializer = self._response_deserializers.get( + ( + group, + method, + ) + ) + return _StreamStreamMultiCallable( + self._channel, + group, + method, + self._metadata_transformer, + request_serializer, + response_deserializer, + ) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + return False + + +class _DynamicStub(face.DynamicStub): + def __init__(self, backing_generic_stub, group, cardinalities): + self._generic_stub = backing_generic_stub + self._group = group + self._cardinalities = cardinalities + + def __getattr__(self, attr): + method_cardinality = self._cardinalities.get(attr) + if method_cardinality is cardinality.Cardinality.UNARY_UNARY: + return self._generic_stub.unary_unary(self._group, attr) + elif method_cardinality is cardinality.Cardinality.UNARY_STREAM: + return self._generic_stub.unary_stream(self._group, attr) + elif method_cardinality is cardinality.Cardinality.STREAM_UNARY: + return self._generic_stub.stream_unary(self._group, attr) + elif method_cardinality is cardinality.Cardinality.STREAM_STREAM: + return self._generic_stub.stream_stream(self._group, attr) + else: + raise AttributeError( + '_DynamicStub object has no attribute "%s"!' % attr + ) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + return False + + +def generic_stub( + channel, + host, + metadata_transformer, + request_serializers, + response_deserializers, +): + return _GenericStub( + channel, + metadata_transformer, + request_serializers, + response_deserializers, + ) + + +def dynamic_stub( + channel, + service, + cardinalities, + host, + metadata_transformer, + request_serializers, + response_deserializers, +): + return _DynamicStub( + _GenericStub( + channel, + metadata_transformer, + request_serializers, + response_deserializers, + ), + service, + cardinalities, + ) diff --git a/venv/lib/python3.10/site-packages/grpc/beta/_metadata.py b/venv/lib/python3.10/site-packages/grpc/beta/_metadata.py new file mode 100644 index 0000000000000000000000000000000000000000..301010878d67f4a87cc09e051a70e058dd4f34e4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/beta/_metadata.py @@ -0,0 +1,56 @@ +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""API metadata conversion utilities.""" + +import collections + +_Metadatum = collections.namedtuple( + "_Metadatum", + ( + "key", + "value", + ), +) + + +def _beta_metadatum(key, value): + beta_key = key if isinstance(key, (bytes,)) else key.encode("ascii") + beta_value = value if isinstance(value, (bytes,)) else value.encode("ascii") + return _Metadatum(beta_key, beta_value) + + +def _metadatum(beta_key, beta_value): + key = beta_key if isinstance(beta_key, (str,)) else beta_key.decode("utf8") + if isinstance(beta_value, (str,)) or key[-4:] == "-bin": + value = beta_value + else: + value = beta_value.decode("utf8") + return _Metadatum(key, value) + + +def beta(metadata): + if metadata is None: + return () + else: + return tuple(_beta_metadatum(key, value) for key, value in metadata) + + +def unbeta(beta_metadata): + if beta_metadata is None: + return () + else: + return tuple( + _metadatum(beta_key, beta_value) + for beta_key, beta_value in beta_metadata + ) diff --git a/venv/lib/python3.10/site-packages/grpc/beta/_server_adaptations.py b/venv/lib/python3.10/site-packages/grpc/beta/_server_adaptations.py new file mode 100644 index 0000000000000000000000000000000000000000..a6f730bb29bf6d18efd446cb8e6a453015c00e67 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/beta/_server_adaptations.py @@ -0,0 +1,465 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Translates gRPC's server-side API into gRPC's server-side Beta API.""" + +import collections +import threading + +import grpc +from grpc import _common +from grpc.beta import _metadata +from grpc.beta import interfaces +from grpc.framework.common import cardinality +from grpc.framework.common import style +from grpc.framework.foundation import abandonment +from grpc.framework.foundation import logging_pool +from grpc.framework.foundation import stream +from grpc.framework.interfaces.face import face + +# pylint: disable=too-many-return-statements + +_DEFAULT_POOL_SIZE = 8 + + +class _ServerProtocolContext(interfaces.GRPCServicerContext): + def __init__(self, servicer_context): + self._servicer_context = servicer_context + + def peer(self): + return self._servicer_context.peer() + + def disable_next_response_compression(self): + pass # TODO(https://github.com/grpc/grpc/issues/4078): design, implement. + + +class _FaceServicerContext(face.ServicerContext): + def __init__(self, servicer_context): + self._servicer_context = servicer_context + + def is_active(self): + return self._servicer_context.is_active() + + def time_remaining(self): + return self._servicer_context.time_remaining() + + def add_abortion_callback(self, abortion_callback): + raise NotImplementedError( + "add_abortion_callback no longer supported server-side!" + ) + + def cancel(self): + self._servicer_context.cancel() + + def protocol_context(self): + return _ServerProtocolContext(self._servicer_context) + + def invocation_metadata(self): + return _metadata.beta(self._servicer_context.invocation_metadata()) + + def initial_metadata(self, initial_metadata): + self._servicer_context.send_initial_metadata( + _metadata.unbeta(initial_metadata) + ) + + def terminal_metadata(self, terminal_metadata): + self._servicer_context.set_terminal_metadata( + _metadata.unbeta(terminal_metadata) + ) + + def code(self, code): + self._servicer_context.set_code(code) + + def details(self, details): + self._servicer_context.set_details(details) + + +def _adapt_unary_request_inline(unary_request_inline): + def adaptation(request, servicer_context): + return unary_request_inline( + request, _FaceServicerContext(servicer_context) + ) + + return adaptation + + +def _adapt_stream_request_inline(stream_request_inline): + def adaptation(request_iterator, servicer_context): + return stream_request_inline( + request_iterator, _FaceServicerContext(servicer_context) + ) + + return adaptation + + +class _Callback(stream.Consumer): + def __init__(self): + self._condition = threading.Condition() + self._values = [] + self._terminated = False + self._cancelled = False + + def consume(self, value): + with self._condition: + self._values.append(value) + self._condition.notify_all() + + def terminate(self): + with self._condition: + self._terminated = True + self._condition.notify_all() + + def consume_and_terminate(self, value): + with self._condition: + self._values.append(value) + self._terminated = True + self._condition.notify_all() + + def cancel(self): + with self._condition: + self._cancelled = True + self._condition.notify_all() + + def draw_one_value(self): + with self._condition: + while True: + if self._cancelled: + raise abandonment.Abandoned() + elif self._values: + return self._values.pop(0) + elif self._terminated: + return None + else: + self._condition.wait() + + def draw_all_values(self): + with self._condition: + while True: + if self._cancelled: + raise abandonment.Abandoned() + elif self._terminated: + all_values = tuple(self._values) + self._values = None + return all_values + else: + self._condition.wait() + + +def _run_request_pipe_thread( + request_iterator, request_consumer, servicer_context +): + thread_joined = threading.Event() + + def pipe_requests(): + for request in request_iterator: + if not servicer_context.is_active() or thread_joined.is_set(): + return + request_consumer.consume(request) + if not servicer_context.is_active() or thread_joined.is_set(): + return + request_consumer.terminate() + + request_pipe_thread = threading.Thread(target=pipe_requests) + request_pipe_thread.daemon = True + request_pipe_thread.start() + + +def _adapt_unary_unary_event(unary_unary_event): + def adaptation(request, servicer_context): + callback = _Callback() + if not servicer_context.add_callback(callback.cancel): + raise abandonment.Abandoned() + unary_unary_event( + request, + callback.consume_and_terminate, + _FaceServicerContext(servicer_context), + ) + return callback.draw_all_values()[0] + + return adaptation + + +def _adapt_unary_stream_event(unary_stream_event): + def adaptation(request, servicer_context): + callback = _Callback() + if not servicer_context.add_callback(callback.cancel): + raise abandonment.Abandoned() + unary_stream_event( + request, callback, _FaceServicerContext(servicer_context) + ) + while True: + response = callback.draw_one_value() + if response is None: + return + else: + yield response + + return adaptation + + +def _adapt_stream_unary_event(stream_unary_event): + def adaptation(request_iterator, servicer_context): + callback = _Callback() + if not servicer_context.add_callback(callback.cancel): + raise abandonment.Abandoned() + request_consumer = stream_unary_event( + callback.consume_and_terminate, + _FaceServicerContext(servicer_context), + ) + _run_request_pipe_thread( + request_iterator, request_consumer, servicer_context + ) + return callback.draw_all_values()[0] + + return adaptation + + +def _adapt_stream_stream_event(stream_stream_event): + def adaptation(request_iterator, servicer_context): + callback = _Callback() + if not servicer_context.add_callback(callback.cancel): + raise abandonment.Abandoned() + request_consumer = stream_stream_event( + callback, _FaceServicerContext(servicer_context) + ) + _run_request_pipe_thread( + request_iterator, request_consumer, servicer_context + ) + while True: + response = callback.draw_one_value() + if response is None: + return + else: + yield response + + return adaptation + + +class _SimpleMethodHandler( + collections.namedtuple( + "_MethodHandler", + ( + "request_streaming", + "response_streaming", + "request_deserializer", + "response_serializer", + "unary_unary", + "unary_stream", + "stream_unary", + "stream_stream", + ), + ), + grpc.RpcMethodHandler, +): + pass + + +def _simple_method_handler( + implementation, request_deserializer, response_serializer +): + if implementation.style is style.Service.INLINE: + if implementation.cardinality is cardinality.Cardinality.UNARY_UNARY: + return _SimpleMethodHandler( + False, + False, + request_deserializer, + response_serializer, + _adapt_unary_request_inline(implementation.unary_unary_inline), + None, + None, + None, + ) + elif implementation.cardinality is cardinality.Cardinality.UNARY_STREAM: + return _SimpleMethodHandler( + False, + True, + request_deserializer, + response_serializer, + None, + _adapt_unary_request_inline(implementation.unary_stream_inline), + None, + None, + ) + elif implementation.cardinality is cardinality.Cardinality.STREAM_UNARY: + return _SimpleMethodHandler( + True, + False, + request_deserializer, + response_serializer, + None, + None, + _adapt_stream_request_inline( + implementation.stream_unary_inline + ), + None, + ) + elif ( + implementation.cardinality is cardinality.Cardinality.STREAM_STREAM + ): + return _SimpleMethodHandler( + True, + True, + request_deserializer, + response_serializer, + None, + None, + None, + _adapt_stream_request_inline( + implementation.stream_stream_inline + ), + ) + elif implementation.style is style.Service.EVENT: + if implementation.cardinality is cardinality.Cardinality.UNARY_UNARY: + return _SimpleMethodHandler( + False, + False, + request_deserializer, + response_serializer, + _adapt_unary_unary_event(implementation.unary_unary_event), + None, + None, + None, + ) + elif implementation.cardinality is cardinality.Cardinality.UNARY_STREAM: + return _SimpleMethodHandler( + False, + True, + request_deserializer, + response_serializer, + None, + _adapt_unary_stream_event(implementation.unary_stream_event), + None, + None, + ) + elif implementation.cardinality is cardinality.Cardinality.STREAM_UNARY: + return _SimpleMethodHandler( + True, + False, + request_deserializer, + response_serializer, + None, + None, + _adapt_stream_unary_event(implementation.stream_unary_event), + None, + ) + elif ( + implementation.cardinality is cardinality.Cardinality.STREAM_STREAM + ): + return _SimpleMethodHandler( + True, + True, + request_deserializer, + response_serializer, + None, + None, + None, + _adapt_stream_stream_event(implementation.stream_stream_event), + ) + raise ValueError() + + +def _flatten_method_pair_map(method_pair_map): + method_pair_map = method_pair_map or {} + flat_map = {} + for method_pair in method_pair_map: + method = _common.fully_qualified_method(method_pair[0], method_pair[1]) + flat_map[method] = method_pair_map[method_pair] + return flat_map + + +class _GenericRpcHandler(grpc.GenericRpcHandler): + def __init__( + self, + method_implementations, + multi_method_implementation, + request_deserializers, + response_serializers, + ): + self._method_implementations = _flatten_method_pair_map( + method_implementations + ) + self._request_deserializers = _flatten_method_pair_map( + request_deserializers + ) + self._response_serializers = _flatten_method_pair_map( + response_serializers + ) + self._multi_method_implementation = multi_method_implementation + + def service(self, handler_call_details): + method_implementation = self._method_implementations.get( + handler_call_details.method + ) + if method_implementation is not None: + return _simple_method_handler( + method_implementation, + self._request_deserializers.get(handler_call_details.method), + self._response_serializers.get(handler_call_details.method), + ) + elif self._multi_method_implementation is None: + return None + else: + try: + return None # TODO(nathaniel): call the multimethod. + except face.NoSuchMethodError: + return None + + +class _Server(interfaces.Server): + def __init__(self, grpc_server): + self._grpc_server = grpc_server + + def add_insecure_port(self, address): + return self._grpc_server.add_insecure_port(address) + + def add_secure_port(self, address, server_credentials): + return self._grpc_server.add_secure_port(address, server_credentials) + + def start(self): + self._grpc_server.start() + + def stop(self, grace): + return self._grpc_server.stop(grace) + + def __enter__(self): + self._grpc_server.start() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self._grpc_server.stop(None) + return False + + +def server( + service_implementations, + multi_method_implementation, + request_deserializers, + response_serializers, + thread_pool, + thread_pool_size, +): + generic_rpc_handler = _GenericRpcHandler( + service_implementations, + multi_method_implementation, + request_deserializers, + response_serializers, + ) + if thread_pool is None: + effective_thread_pool = logging_pool.pool( + _DEFAULT_POOL_SIZE if thread_pool_size is None else thread_pool_size + ) + else: + effective_thread_pool = thread_pool + return _Server( + grpc.server(effective_thread_pool, handlers=(generic_rpc_handler,)) + ) diff --git a/venv/lib/python3.10/site-packages/grpc/beta/implementations.py b/venv/lib/python3.10/site-packages/grpc/beta/implementations.py new file mode 100644 index 0000000000000000000000000000000000000000..ffa4f0d4bfe00a8cd743afbaf99cea97077680ac --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/beta/implementations.py @@ -0,0 +1,345 @@ +# Copyright 2015-2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Entry points into the Beta API of gRPC Python.""" + +# threading is referenced from specification in this module. +import threading # pylint: disable=unused-import + +# interfaces, cardinality, and face are referenced from specification in this +# module. +import grpc +from grpc import _auth +from grpc.beta import _client_adaptations +from grpc.beta import _metadata +from grpc.beta import _server_adaptations +from grpc.beta import interfaces # pylint: disable=unused-import +from grpc.framework.common import cardinality # pylint: disable=unused-import +from grpc.framework.interfaces.face import face # pylint: disable=unused-import + +# pylint: disable=too-many-arguments + +ChannelCredentials = grpc.ChannelCredentials +ssl_channel_credentials = grpc.ssl_channel_credentials +CallCredentials = grpc.CallCredentials + + +def metadata_call_credentials(metadata_plugin, name=None): + def plugin(context, callback): + def wrapped_callback(beta_metadata, error): + callback(_metadata.unbeta(beta_metadata), error) + + metadata_plugin(context, wrapped_callback) + + return grpc.metadata_call_credentials(plugin, name=name) + + +def google_call_credentials(credentials): + """Construct CallCredentials from GoogleCredentials. + + Args: + credentials: A GoogleCredentials object from the oauth2client library. + + Returns: + A CallCredentials object for use in a GRPCCallOptions object. + """ + return metadata_call_credentials(_auth.GoogleCallCredentials(credentials)) + + +access_token_call_credentials = grpc.access_token_call_credentials +composite_call_credentials = grpc.composite_call_credentials +composite_channel_credentials = grpc.composite_channel_credentials + + +class Channel(object): + """A channel to a remote host through which RPCs may be conducted. + + Only the "subscribe" and "unsubscribe" methods are supported for application + use. This class' instance constructor and all other attributes are + unsupported. + """ + + def __init__(self, channel): + self._channel = channel + + def subscribe(self, callback, try_to_connect=None): + """Subscribes to this Channel's connectivity. + + Args: + callback: A callable to be invoked and passed an + interfaces.ChannelConnectivity identifying this Channel's connectivity. + The callable will be invoked immediately upon subscription and again for + every change to this Channel's connectivity thereafter until it is + unsubscribed. + try_to_connect: A boolean indicating whether or not this Channel should + attempt to connect if it is not already connected and ready to conduct + RPCs. + """ + self._channel.subscribe(callback, try_to_connect=try_to_connect) + + def unsubscribe(self, callback): + """Unsubscribes a callback from this Channel's connectivity. + + Args: + callback: A callable previously registered with this Channel from having + been passed to its "subscribe" method. + """ + self._channel.unsubscribe(callback) + + +def insecure_channel(host, port): + """Creates an insecure Channel to a remote host. + + Args: + host: The name of the remote host to which to connect. + port: The port of the remote host to which to connect. + If None only the 'host' part will be used. + + Returns: + A Channel to the remote host through which RPCs may be conducted. + """ + channel = grpc.insecure_channel( + host if port is None else "%s:%d" % (host, port) + ) + return Channel(channel) + + +def secure_channel(host, port, channel_credentials): + """Creates a secure Channel to a remote host. + + Args: + host: The name of the remote host to which to connect. + port: The port of the remote host to which to connect. + If None only the 'host' part will be used. + channel_credentials: A ChannelCredentials. + + Returns: + A secure Channel to the remote host through which RPCs may be conducted. + """ + channel = grpc.secure_channel( + host if port is None else "%s:%d" % (host, port), channel_credentials + ) + return Channel(channel) + + +class StubOptions(object): + """A value encapsulating the various options for creation of a Stub. + + This class and its instances have no supported interface - it exists to define + the type of its instances and its instances exist to be passed to other + functions. + """ + + def __init__( + self, + host, + request_serializers, + response_deserializers, + metadata_transformer, + thread_pool, + thread_pool_size, + ): + self.host = host + self.request_serializers = request_serializers + self.response_deserializers = response_deserializers + self.metadata_transformer = metadata_transformer + self.thread_pool = thread_pool + self.thread_pool_size = thread_pool_size + + +_EMPTY_STUB_OPTIONS = StubOptions(None, None, None, None, None, None) + + +def stub_options( + host=None, + request_serializers=None, + response_deserializers=None, + metadata_transformer=None, + thread_pool=None, + thread_pool_size=None, +): + """Creates a StubOptions value to be passed at stub creation. + + All parameters are optional and should always be passed by keyword. + + Args: + host: A host string to set on RPC calls. + request_serializers: A dictionary from service name-method name pair to + request serialization behavior. + response_deserializers: A dictionary from service name-method name pair to + response deserialization behavior. + metadata_transformer: A callable that given a metadata object produces + another metadata object to be used in the underlying communication on the + wire. + thread_pool: A thread pool to use in stubs. + thread_pool_size: The size of thread pool to create for use in stubs; + ignored if thread_pool has been passed. + + Returns: + A StubOptions value created from the passed parameters. + """ + return StubOptions( + host, + request_serializers, + response_deserializers, + metadata_transformer, + thread_pool, + thread_pool_size, + ) + + +def generic_stub(channel, options=None): + """Creates a face.GenericStub on which RPCs can be made. + + Args: + channel: A Channel for use by the created stub. + options: A StubOptions customizing the created stub. + + Returns: + A face.GenericStub on which RPCs can be made. + """ + effective_options = _EMPTY_STUB_OPTIONS if options is None else options + return _client_adaptations.generic_stub( + channel._channel, # pylint: disable=protected-access + effective_options.host, + effective_options.metadata_transformer, + effective_options.request_serializers, + effective_options.response_deserializers, + ) + + +def dynamic_stub(channel, service, cardinalities, options=None): + """Creates a face.DynamicStub with which RPCs can be invoked. + + Args: + channel: A Channel for the returned face.DynamicStub to use. + service: The package-qualified full name of the service. + cardinalities: A dictionary from RPC method name to cardinality.Cardinality + value identifying the cardinality of the RPC method. + options: An optional StubOptions value further customizing the functionality + of the returned face.DynamicStub. + + Returns: + A face.DynamicStub with which RPCs can be invoked. + """ + effective_options = _EMPTY_STUB_OPTIONS if options is None else options + return _client_adaptations.dynamic_stub( + channel._channel, # pylint: disable=protected-access + service, + cardinalities, + effective_options.host, + effective_options.metadata_transformer, + effective_options.request_serializers, + effective_options.response_deserializers, + ) + + +ServerCredentials = grpc.ServerCredentials +ssl_server_credentials = grpc.ssl_server_credentials + + +class ServerOptions(object): + """A value encapsulating the various options for creation of a Server. + + This class and its instances have no supported interface - it exists to define + the type of its instances and its instances exist to be passed to other + functions. + """ + + def __init__( + self, + multi_method_implementation, + request_deserializers, + response_serializers, + thread_pool, + thread_pool_size, + default_timeout, + maximum_timeout, + ): + self.multi_method_implementation = multi_method_implementation + self.request_deserializers = request_deserializers + self.response_serializers = response_serializers + self.thread_pool = thread_pool + self.thread_pool_size = thread_pool_size + self.default_timeout = default_timeout + self.maximum_timeout = maximum_timeout + + +_EMPTY_SERVER_OPTIONS = ServerOptions(None, None, None, None, None, None, None) + + +def server_options( + multi_method_implementation=None, + request_deserializers=None, + response_serializers=None, + thread_pool=None, + thread_pool_size=None, + default_timeout=None, + maximum_timeout=None, +): + """Creates a ServerOptions value to be passed at server creation. + + All parameters are optional and should always be passed by keyword. + + Args: + multi_method_implementation: A face.MultiMethodImplementation to be called + to service an RPC if the server has no specific method implementation for + the name of the RPC for which service was requested. + request_deserializers: A dictionary from service name-method name pair to + request deserialization behavior. + response_serializers: A dictionary from service name-method name pair to + response serialization behavior. + thread_pool: A thread pool to use in stubs. + thread_pool_size: The size of thread pool to create for use in stubs; + ignored if thread_pool has been passed. + default_timeout: A duration in seconds to allow for RPC service when + servicing RPCs that did not include a timeout value when invoked. + maximum_timeout: A duration in seconds to allow for RPC service when + servicing RPCs no matter what timeout value was passed when the RPC was + invoked. + + Returns: + A StubOptions value created from the passed parameters. + """ + return ServerOptions( + multi_method_implementation, + request_deserializers, + response_serializers, + thread_pool, + thread_pool_size, + default_timeout, + maximum_timeout, + ) + + +def server(service_implementations, options=None): + """Creates an interfaces.Server with which RPCs can be serviced. + + Args: + service_implementations: A dictionary from service name-method name pair to + face.MethodImplementation. + options: An optional ServerOptions value further customizing the + functionality of the returned Server. + + Returns: + An interfaces.Server with which RPCs can be serviced. + """ + effective_options = _EMPTY_SERVER_OPTIONS if options is None else options + return _server_adaptations.server( + service_implementations, + effective_options.multi_method_implementation, + effective_options.request_deserializers, + effective_options.response_serializers, + effective_options.thread_pool, + effective_options.thread_pool_size, + ) diff --git a/venv/lib/python3.10/site-packages/grpc/beta/interfaces.py b/venv/lib/python3.10/site-packages/grpc/beta/interfaces.py new file mode 100644 index 0000000000000000000000000000000000000000..c29b2915854919b46691046f8135da53553e055b --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/beta/interfaces.py @@ -0,0 +1,163 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Constants and interfaces of the Beta API of gRPC Python.""" + +import abc + +import grpc + +ChannelConnectivity = grpc.ChannelConnectivity +# FATAL_FAILURE was a Beta-API name for SHUTDOWN +ChannelConnectivity.FATAL_FAILURE = ChannelConnectivity.SHUTDOWN + +StatusCode = grpc.StatusCode + + +class GRPCCallOptions(object): + """A value encapsulating gRPC-specific options passed on RPC invocation. + + This class and its instances have no supported interface - it exists to + define the type of its instances and its instances exist to be passed to + other functions. + """ + + def __init__(self, disable_compression, subcall_of, credentials): + self.disable_compression = disable_compression + self.subcall_of = subcall_of + self.credentials = credentials + + +def grpc_call_options(disable_compression=False, credentials=None): + """Creates a GRPCCallOptions value to be passed at RPC invocation. + + All parameters are optional and should always be passed by keyword. + + Args: + disable_compression: A boolean indicating whether or not compression should + be disabled for the request object of the RPC. Only valid for + request-unary RPCs. + credentials: A CallCredentials object to use for the invoked RPC. + """ + return GRPCCallOptions(disable_compression, None, credentials) + + +GRPCAuthMetadataContext = grpc.AuthMetadataContext +GRPCAuthMetadataPluginCallback = grpc.AuthMetadataPluginCallback +GRPCAuthMetadataPlugin = grpc.AuthMetadataPlugin + + +class GRPCServicerContext(abc.ABC): + """Exposes gRPC-specific options and behaviors to code servicing RPCs.""" + + @abc.abstractmethod + def peer(self): + """Identifies the peer that invoked the RPC being serviced. + + Returns: + A string identifying the peer that invoked the RPC being serviced. + """ + raise NotImplementedError() + + @abc.abstractmethod + def disable_next_response_compression(self): + """Disables compression of the next response passed by the application.""" + raise NotImplementedError() + + +class GRPCInvocationContext(abc.ABC): + """Exposes gRPC-specific options and behaviors to code invoking RPCs.""" + + @abc.abstractmethod + def disable_next_request_compression(self): + """Disables compression of the next request passed by the application.""" + raise NotImplementedError() + + +class Server(abc.ABC): + """Services RPCs.""" + + @abc.abstractmethod + def add_insecure_port(self, address): + """Reserves a port for insecure RPC service once this Server becomes active. + + This method may only be called before calling this Server's start method is + called. + + Args: + address: The address for which to open a port. + + Returns: + An integer port on which RPCs will be serviced after this link has been + started. This is typically the same number as the port number contained + in the passed address, but will likely be different if the port number + contained in the passed address was zero. + """ + raise NotImplementedError() + + @abc.abstractmethod + def add_secure_port(self, address, server_credentials): + """Reserves a port for secure RPC service after this Server becomes active. + + This method may only be called before calling this Server's start method is + called. + + Args: + address: The address for which to open a port. + server_credentials: A ServerCredentials. + + Returns: + An integer port on which RPCs will be serviced after this link has been + started. This is typically the same number as the port number contained + in the passed address, but will likely be different if the port number + contained in the passed address was zero. + """ + raise NotImplementedError() + + @abc.abstractmethod + def start(self): + """Starts this Server's service of RPCs. + + This method may only be called while the server is not serving RPCs (i.e. it + is not idempotent). + """ + raise NotImplementedError() + + @abc.abstractmethod + def stop(self, grace): + """Stops this Server's service of RPCs. + + All calls to this method immediately stop service of new RPCs. When existing + RPCs are aborted is controlled by the grace period parameter passed to this + method. + + This method may be called at any time and is idempotent. Passing a smaller + grace value than has been passed in a previous call will have the effect of + stopping the Server sooner. Passing a larger grace value than has been + passed in a previous call will not have the effect of stopping the server + later. + + Args: + grace: A duration of time in seconds to allow existing RPCs to complete + before being aborted by this Server's stopping. May be zero for + immediate abortion of all in-progress RPCs. + + Returns: + A threading.Event that will be set when this Server has completely + stopped. The returned event may not be set until after the full grace + period (if some ongoing RPC continues for the full length of the period) + of it may be set much sooner (such as if this Server had no RPCs underway + at the time it was stopped or if all RPCs that it had underway completed + very early in the grace period). + """ + raise NotImplementedError() diff --git a/venv/lib/python3.10/site-packages/grpc/beta/utilities.py b/venv/lib/python3.10/site-packages/grpc/beta/utilities.py new file mode 100644 index 0000000000000000000000000000000000000000..90e54715cff72002ef4cfdce61867e0c3098e3d0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/beta/utilities.py @@ -0,0 +1,153 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Utilities for the gRPC Python Beta API.""" + +import threading +import time + +# implementations is referenced from specification in this module. +from grpc.beta import implementations # pylint: disable=unused-import +from grpc.beta import interfaces +from grpc.framework.foundation import callable_util +from grpc.framework.foundation import future + +_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE = ( + 'Exception calling connectivity future "done" callback!' +) + + +class _ChannelReadyFuture(future.Future): + def __init__(self, channel): + self._condition = threading.Condition() + self._channel = channel + + self._matured = False + self._cancelled = False + self._done_callbacks = [] + + def _block(self, timeout): + until = None if timeout is None else time.time() + timeout + with self._condition: + while True: + if self._cancelled: + raise future.CancelledError() + elif self._matured: + return + else: + if until is None: + self._condition.wait() + else: + remaining = until - time.time() + if remaining < 0: + raise future.TimeoutError() + else: + self._condition.wait(timeout=remaining) + + def _update(self, connectivity): + with self._condition: + if ( + not self._cancelled + and connectivity is interfaces.ChannelConnectivity.READY + ): + self._matured = True + self._channel.unsubscribe(self._update) + self._condition.notify_all() + done_callbacks = tuple(self._done_callbacks) + self._done_callbacks = None + else: + return + + for done_callback in done_callbacks: + callable_util.call_logging_exceptions( + done_callback, _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE, self + ) + + def cancel(self): + with self._condition: + if not self._matured: + self._cancelled = True + self._channel.unsubscribe(self._update) + self._condition.notify_all() + done_callbacks = tuple(self._done_callbacks) + self._done_callbacks = None + else: + return False + + for done_callback in done_callbacks: + callable_util.call_logging_exceptions( + done_callback, _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE, self + ) + + return True + + def cancelled(self): + with self._condition: + return self._cancelled + + def running(self): + with self._condition: + return not self._cancelled and not self._matured + + def done(self): + with self._condition: + return self._cancelled or self._matured + + def result(self, timeout=None): + self._block(timeout) + return None + + def exception(self, timeout=None): + self._block(timeout) + return None + + def traceback(self, timeout=None): + self._block(timeout) + return None + + def add_done_callback(self, fn): + with self._condition: + if not self._cancelled and not self._matured: + self._done_callbacks.append(fn) + return + + fn(self) + + def start(self): + with self._condition: + self._channel.subscribe(self._update, try_to_connect=True) + + def __del__(self): + with self._condition: + if not self._cancelled and not self._matured: + self._channel.unsubscribe(self._update) + + +def channel_ready_future(channel): + """Creates a future.Future tracking when an implementations.Channel is ready. + + Cancelling the returned future.Future does not tell the given + implementations.Channel to abandon attempts it may have been making to + connect; cancelling merely deactivates the return future.Future's + subscription to the given implementations.Channel's connectivity. + + Args: + channel: An implementations.Channel. + + Returns: + A future.Future that matures when the given Channel has connectivity + interfaces.ChannelConnectivity.READY. + """ + ready_future = _ChannelReadyFuture(channel) + ready_future.start() + return ready_future diff --git a/venv/lib/python3.10/site-packages/grpc/experimental/__init__.py b/venv/lib/python3.10/site-packages/grpc/experimental/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..32a53bf7f3434d855ba46e2d5a5924172735d2cd --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/experimental/__init__.py @@ -0,0 +1,134 @@ +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""gRPC's experimental APIs. + +These APIs are subject to be removed during any minor version release. +""" + +import copy +import functools +import sys +import warnings + +import grpc +from grpc._cython import cygrpc as _cygrpc + +_EXPERIMENTAL_APIS_USED = set() + + +class ChannelOptions(object): + """Indicates a channel option unique to gRPC Python. + + This enumeration is part of an EXPERIMENTAL API. + + Attributes: + SingleThreadedUnaryStream: Perform unary-stream RPCs on a single thread. + """ + + SingleThreadedUnaryStream = "SingleThreadedUnaryStream" + + +class UsageError(Exception): + """Raised by the gRPC library to indicate usage not allowed by the API.""" + + +# It's important that there be a single insecure credentials object so that its +# hash is deterministic and can be used for indexing in the simple stubs cache. +_insecure_channel_credentials = grpc.ChannelCredentials( + _cygrpc.channel_credentials_insecure() +) + + +def insecure_channel_credentials(): + """Creates a ChannelCredentials for use with an insecure channel. + + THIS IS AN EXPERIMENTAL API. + """ + return _insecure_channel_credentials + + +class ExperimentalApiWarning(Warning): + """A warning that an API is experimental.""" + + +def _warn_experimental(api_name, stack_offset): + if api_name not in _EXPERIMENTAL_APIS_USED: + _EXPERIMENTAL_APIS_USED.add(api_name) + msg = ( + "'{}' is an experimental API. It is subject to change or ".format( + api_name + ) + + "removal between minor releases. Proceed with caution." + ) + warnings.warn(msg, ExperimentalApiWarning, stacklevel=2 + stack_offset) + + +def experimental_api(f): + @functools.wraps(f) + def _wrapper(*args, **kwargs): + _warn_experimental(f.__name__, 1) + return f(*args, **kwargs) + + return _wrapper + + +def wrap_server_method_handler(wrapper, handler): + """Wraps the server method handler function. + + The server implementation requires all server handlers being wrapped as + RpcMethodHandler objects. This helper function ease the pain of writing + server handler wrappers. + + Args: + wrapper: A wrapper function that takes in a method handler behavior + (the actual function) and returns a wrapped function. + handler: A RpcMethodHandler object to be wrapped. + + Returns: + A newly created RpcMethodHandler. + """ + if not handler: + return None + + if not handler.request_streaming: + if not handler.response_streaming: + # NOTE(lidiz) _replace is a public API: + # https://docs.python.org/dev/library/collections.html + return handler._replace(unary_unary=wrapper(handler.unary_unary)) + else: + return handler._replace(unary_stream=wrapper(handler.unary_stream)) + else: + if not handler.response_streaming: + return handler._replace(stream_unary=wrapper(handler.stream_unary)) + else: + return handler._replace( + stream_stream=wrapper(handler.stream_stream) + ) + + +__all__ = ( + "ChannelOptions", + "ExperimentalApiWarning", + "UsageError", + "insecure_channel_credentials", + "wrap_server_method_handler", +) + +if sys.version_info > (3, 6): + from grpc._simple_stubs import stream_stream + from grpc._simple_stubs import stream_unary + from grpc._simple_stubs import unary_stream + from grpc._simple_stubs import unary_unary + + __all__ = __all__ + (unary_unary, unary_stream, stream_unary, stream_stream) diff --git a/venv/lib/python3.10/site-packages/grpc/experimental/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/experimental/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6089453928207fb0dc84a69c3853d53ef605128d Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/experimental/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/experimental/__pycache__/gevent.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/experimental/__pycache__/gevent.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1c65c53c090d4804af27b9eae47aec1f78a10b03 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/experimental/__pycache__/gevent.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/experimental/__pycache__/session_cache.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/experimental/__pycache__/session_cache.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..33255a55b43101b99a867729dffa783bd922d19c Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/experimental/__pycache__/session_cache.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/experimental/aio/__init__.py b/venv/lib/python3.10/site-packages/grpc/experimental/aio/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..576cb8dcde410bcf33ebd2721ae2eaef5c4c4e0a --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/experimental/aio/__init__.py @@ -0,0 +1,16 @@ +# Copyright 2020 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Alias of grpc.aio to keep backward compatibility.""" + +from grpc.aio import * diff --git a/venv/lib/python3.10/site-packages/grpc/experimental/aio/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/experimental/aio/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bc500a5ebe2ce6ce50366e30ee362ff182c97c2d Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/experimental/aio/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/experimental/gevent.py b/venv/lib/python3.10/site-packages/grpc/experimental/gevent.py new file mode 100644 index 0000000000000000000000000000000000000000..159d612b4ed1fe3debf7a96d04f6e8057262a62f --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/experimental/gevent.py @@ -0,0 +1,27 @@ +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""gRPC's Python gEvent APIs.""" + +from grpc._cython import cygrpc as _cygrpc + + +def init_gevent(): + """Patches gRPC's libraries to be compatible with gevent. + + This must be called AFTER the python standard lib has been patched, + but BEFORE creating and gRPC objects. + + In order for progress to be made, the application must drive the event loop. + """ + _cygrpc.init_grpc_gevent() diff --git a/venv/lib/python3.10/site-packages/grpc/experimental/session_cache.py b/venv/lib/python3.10/site-packages/grpc/experimental/session_cache.py new file mode 100644 index 0000000000000000000000000000000000000000..5c55f7c327c5062f232c41f09046465fb4f77214 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/experimental/session_cache.py @@ -0,0 +1,45 @@ +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""gRPC's APIs for TLS Session Resumption support""" + +from grpc._cython import cygrpc as _cygrpc + + +def ssl_session_cache_lru(capacity): + """Creates an SSLSessionCache with LRU replacement policy + + Args: + capacity: Size of the cache + + Returns: + An SSLSessionCache with LRU replacement policy that can be passed as a value for + the grpc.ssl_session_cache option to a grpc.Channel. SSL session caches are used + to store session tickets, which clients can present to resume previous TLS sessions + with a server. + """ + return SSLSessionCache(_cygrpc.SSLSessionCacheLRU(capacity)) + + +class SSLSessionCache(object): + """An encapsulation of a session cache used for TLS session resumption. + + Instances of this class can be passed to a Channel as values for the + grpc.ssl_session_cache option + """ + + def __init__(self, cache): + self._cache = cache + + def __int__(self): + return int(self._cache) diff --git a/venv/lib/python3.10/site-packages/grpc/framework/__init__.py b/venv/lib/python3.10/site-packages/grpc/framework/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5fb4f3c3cfd5622f4067f3dd22eb49318855325a --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/framework/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/venv/lib/python3.10/site-packages/grpc/framework/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/framework/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c8e9a4fa9f5677f18b5427298b55ca235d53711b Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/framework/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/framework/common/__init__.py b/venv/lib/python3.10/site-packages/grpc/framework/common/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5fb4f3c3cfd5622f4067f3dd22eb49318855325a --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/framework/common/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/venv/lib/python3.10/site-packages/grpc/framework/common/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/framework/common/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cbe5978858adea831a92282e6d578d2d444b729d Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/framework/common/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/framework/common/__pycache__/cardinality.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/framework/common/__pycache__/cardinality.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d067a37ed959833260fdbaa66e9f421c2056a23f Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/framework/common/__pycache__/cardinality.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/framework/common/__pycache__/style.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/framework/common/__pycache__/style.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..987ccdff0e3694f73508fabcd2a409482b7abbba Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/framework/common/__pycache__/style.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/framework/common/cardinality.py b/venv/lib/python3.10/site-packages/grpc/framework/common/cardinality.py new file mode 100644 index 0000000000000000000000000000000000000000..3d3d4d3427cb821cae762ffea7cbd4a7d55600a3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/framework/common/cardinality.py @@ -0,0 +1,26 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Defines an enum for classifying RPC methods by streaming semantics.""" + +import enum + + +@enum.unique +class Cardinality(enum.Enum): + """Describes the streaming semantics of an RPC method.""" + + UNARY_UNARY = "request-unary/response-unary" + UNARY_STREAM = "request-unary/response-streaming" + STREAM_UNARY = "request-streaming/response-unary" + STREAM_STREAM = "request-streaming/response-streaming" diff --git a/venv/lib/python3.10/site-packages/grpc/framework/common/style.py b/venv/lib/python3.10/site-packages/grpc/framework/common/style.py new file mode 100644 index 0000000000000000000000000000000000000000..10bf5f17697afd3788cef3f22af8341decd7a4f8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/framework/common/style.py @@ -0,0 +1,24 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Defines an enum for classifying RPC methods by control flow semantics.""" + +import enum + + +@enum.unique +class Service(enum.Enum): + """Describes the control flow style of RPC method implementation.""" + + INLINE = "inline" + EVENT = "event" diff --git a/venv/lib/python3.10/site-packages/grpc/framework/foundation/__init__.py b/venv/lib/python3.10/site-packages/grpc/framework/foundation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5fb4f3c3cfd5622f4067f3dd22eb49318855325a --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/framework/foundation/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/venv/lib/python3.10/site-packages/grpc/framework/foundation/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/framework/foundation/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e2f67b94cda38beecd2c8a4fff7313cdd7903927 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/framework/foundation/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/framework/foundation/__pycache__/abandonment.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/framework/foundation/__pycache__/abandonment.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..176c1474d7d5caf4335d5aa1f27267d28801e63e Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/framework/foundation/__pycache__/abandonment.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/framework/foundation/__pycache__/callable_util.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/framework/foundation/__pycache__/callable_util.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..712b9584ba7689b77d58765e51b80bd40b25838f Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/framework/foundation/__pycache__/callable_util.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/framework/foundation/__pycache__/future.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/framework/foundation/__pycache__/future.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..670fcdbfa648a3650dc7298cafe89c72e9665aa7 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/framework/foundation/__pycache__/future.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/framework/foundation/__pycache__/logging_pool.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/framework/foundation/__pycache__/logging_pool.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..671003501054fbb631438699863c8ecdc1c0acc9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/framework/foundation/__pycache__/logging_pool.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/framework/foundation/__pycache__/stream.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/framework/foundation/__pycache__/stream.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9a3a0a8727f5d0e97c1fdd18b5af9dd36557e3e6 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/framework/foundation/__pycache__/stream.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/framework/foundation/__pycache__/stream_util.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/framework/foundation/__pycache__/stream_util.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2b3f19e66e73c200332c006a2752a8a12a51d2c4 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/framework/foundation/__pycache__/stream_util.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/framework/foundation/abandonment.py b/venv/lib/python3.10/site-packages/grpc/framework/foundation/abandonment.py new file mode 100644 index 0000000000000000000000000000000000000000..c4cb7d5c0725474f8d00a930d9d25fe6565ec060 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/framework/foundation/abandonment.py @@ -0,0 +1,22 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Utilities for indicating abandonment of computation.""" + + +class Abandoned(Exception): + """Indicates that some computation is being abandoned. + + Abandoning a computation is different than returning a value or raising + an exception indicating some operational or programming defect. + """ diff --git a/venv/lib/python3.10/site-packages/grpc/framework/foundation/callable_util.py b/venv/lib/python3.10/site-packages/grpc/framework/foundation/callable_util.py new file mode 100644 index 0000000000000000000000000000000000000000..b64131b40294f97bb3d4891b70fc86c5155261cf --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/framework/foundation/callable_util.py @@ -0,0 +1,98 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Utilities for working with callables.""" + +from abc import ABC +import collections +import enum +import functools +import logging + +_LOGGER = logging.getLogger(__name__) + + +class Outcome(ABC): + """A sum type describing the outcome of some call. + + Attributes: + kind: One of Kind.RETURNED or Kind.RAISED respectively indicating that the + call returned a value or raised an exception. + return_value: The value returned by the call. Must be present if kind is + Kind.RETURNED. + exception: The exception raised by the call. Must be present if kind is + Kind.RAISED. + """ + + @enum.unique + class Kind(enum.Enum): + """Identifies the general kind of the outcome of some call.""" + + RETURNED = object() + RAISED = object() + + +class _EasyOutcome( + collections.namedtuple( + "_EasyOutcome", ["kind", "return_value", "exception"] + ), + Outcome, +): + """A trivial implementation of Outcome.""" + + +def _call_logging_exceptions(behavior, message, *args, **kwargs): + try: + return _EasyOutcome( + Outcome.Kind.RETURNED, behavior(*args, **kwargs), None + ) + except Exception as e: # pylint: disable=broad-except + _LOGGER.exception(message) + return _EasyOutcome(Outcome.Kind.RAISED, None, e) + + +def with_exceptions_logged(behavior, message): + """Wraps a callable in a try-except that logs any exceptions it raises. + + Args: + behavior: Any callable. + message: A string to log if the behavior raises an exception. + + Returns: + A callable that when executed invokes the given behavior. The returned + callable takes the same arguments as the given behavior but returns a + future.Outcome describing whether the given behavior returned a value or + raised an exception. + """ + + @functools.wraps(behavior) + def wrapped_behavior(*args, **kwargs): + return _call_logging_exceptions(behavior, message, *args, **kwargs) + + return wrapped_behavior + + +def call_logging_exceptions(behavior, message, *args, **kwargs): + """Calls a behavior in a try-except that logs any exceptions it raises. + + Args: + behavior: Any callable. + message: A string to log if the behavior raises an exception. + *args: Positional arguments to pass to the given behavior. + **kwargs: Keyword arguments to pass to the given behavior. + + Returns: + An Outcome describing whether the given behavior returned a value or raised + an exception. + """ + return _call_logging_exceptions(behavior, message, *args, **kwargs) diff --git a/venv/lib/python3.10/site-packages/grpc/framework/foundation/future.py b/venv/lib/python3.10/site-packages/grpc/framework/foundation/future.py new file mode 100644 index 0000000000000000000000000000000000000000..73b0d0bdbe11877922464838a62cfea7b8584331 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/framework/foundation/future.py @@ -0,0 +1,219 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""A Future interface. + +Python doesn't have a Future interface in its standard library. In the absence +of such a standard, three separate, incompatible implementations +(concurrent.futures.Future, ndb.Future, and asyncio.Future) have appeared. This +interface attempts to be as compatible as possible with +concurrent.futures.Future. From ndb.Future it adopts a traceback-object accessor +method. + +Unlike the concrete and implemented Future classes listed above, the Future +class defined in this module is an entirely abstract interface that anyone may +implement and use. + +The one known incompatibility between this interface and the interface of +concurrent.futures.Future is that this interface defines its own CancelledError +and TimeoutError exceptions rather than raising the implementation-private +concurrent.futures._base.CancelledError and the +built-in-but-only-in-3.3-and-later TimeoutError. +""" + +import abc + + +class TimeoutError(Exception): + """Indicates that a particular call timed out.""" + + +class CancelledError(Exception): + """Indicates that the computation underlying a Future was cancelled.""" + + +class Future(abc.ABC): + """A representation of a computation in another control flow. + + Computations represented by a Future may be yet to be begun, may be ongoing, + or may have already completed. + """ + + # NOTE(nathaniel): This isn't the return type that I would want to have if it + # were up to me. Were this interface being written from scratch, the return + # type of this method would probably be a sum type like: + # + # NOT_COMMENCED + # COMMENCED_AND_NOT_COMPLETED + # PARTIAL_RESULT + # COMPLETED + # UNCANCELLABLE + # NOT_IMMEDIATELY_DETERMINABLE + @abc.abstractmethod + def cancel(self): + """Attempts to cancel the computation. + + This method does not block. + + Returns: + True if the computation has not yet begun, will not be allowed to take + place, and determination of both was possible without blocking. False + under all other circumstances including but not limited to the + computation's already having begun, the computation's already having + finished, and the computation's having been scheduled for execution on a + remote system for which a determination of whether or not it commenced + before being cancelled cannot be made without blocking. + """ + raise NotImplementedError() + + # NOTE(nathaniel): Here too this isn't the return type that I'd want this + # method to have if it were up to me. I think I'd go with another sum type + # like: + # + # NOT_CANCELLED (this object's cancel method hasn't been called) + # NOT_COMMENCED + # COMMENCED_AND_NOT_COMPLETED + # PARTIAL_RESULT + # COMPLETED + # UNCANCELLABLE + # NOT_IMMEDIATELY_DETERMINABLE + # + # Notice how giving the cancel method the right semantics obviates most + # reasons for this method to exist. + @abc.abstractmethod + def cancelled(self): + """Describes whether the computation was cancelled. + + This method does not block. + + Returns: + True if the computation was cancelled any time before its result became + immediately available. False under all other circumstances including but + not limited to this object's cancel method not having been called and + the computation's result having become immediately available. + """ + raise NotImplementedError() + + @abc.abstractmethod + def running(self): + """Describes whether the computation is taking place. + + This method does not block. + + Returns: + True if the computation is scheduled to take place in the future or is + taking place now, or False if the computation took place in the past or + was cancelled. + """ + raise NotImplementedError() + + # NOTE(nathaniel): These aren't quite the semantics I'd like here either. I + # would rather this only returned True in cases in which the underlying + # computation completed successfully. A computation's having been cancelled + # conflicts with considering that computation "done". + @abc.abstractmethod + def done(self): + """Describes whether the computation has taken place. + + This method does not block. + + Returns: + True if the computation is known to have either completed or have been + unscheduled or interrupted. False if the computation may possibly be + executing or scheduled to execute later. + """ + raise NotImplementedError() + + @abc.abstractmethod + def result(self, timeout=None): + """Accesses the outcome of the computation or raises its exception. + + This method may return immediately or may block. + + Args: + timeout: The length of time in seconds to wait for the computation to + finish or be cancelled, or None if this method should block until the + computation has finished or is cancelled no matter how long that takes. + + Returns: + The return value of the computation. + + Raises: + TimeoutError: If a timeout value is passed and the computation does not + terminate within the allotted time. + CancelledError: If the computation was cancelled. + Exception: If the computation raised an exception, this call will raise + the same exception. + """ + raise NotImplementedError() + + @abc.abstractmethod + def exception(self, timeout=None): + """Return the exception raised by the computation. + + This method may return immediately or may block. + + Args: + timeout: The length of time in seconds to wait for the computation to + terminate or be cancelled, or None if this method should block until + the computation is terminated or is cancelled no matter how long that + takes. + + Returns: + The exception raised by the computation, or None if the computation did + not raise an exception. + + Raises: + TimeoutError: If a timeout value is passed and the computation does not + terminate within the allotted time. + CancelledError: If the computation was cancelled. + """ + raise NotImplementedError() + + @abc.abstractmethod + def traceback(self, timeout=None): + """Access the traceback of the exception raised by the computation. + + This method may return immediately or may block. + + Args: + timeout: The length of time in seconds to wait for the computation to + terminate or be cancelled, or None if this method should block until + the computation is terminated or is cancelled no matter how long that + takes. + + Returns: + The traceback of the exception raised by the computation, or None if the + computation did not raise an exception. + + Raises: + TimeoutError: If a timeout value is passed and the computation does not + terminate within the allotted time. + CancelledError: If the computation was cancelled. + """ + raise NotImplementedError() + + @abc.abstractmethod + def add_done_callback(self, fn): + """Adds a function to be called at completion of the computation. + + The callback will be passed this Future object describing the outcome of + the computation. + + If the computation has already completed, the callback will be called + immediately. + + Args: + fn: A callable taking this Future object as its single parameter. + """ + raise NotImplementedError() diff --git a/venv/lib/python3.10/site-packages/grpc/framework/foundation/logging_pool.py b/venv/lib/python3.10/site-packages/grpc/framework/foundation/logging_pool.py new file mode 100644 index 0000000000000000000000000000000000000000..a4e140f174de5eecab864928f3252b0078cca7d6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/framework/foundation/logging_pool.py @@ -0,0 +1,72 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""A thread pool that logs exceptions raised by tasks executed within it.""" + +from concurrent import futures +import logging + +_LOGGER = logging.getLogger(__name__) + + +def _wrap(behavior): + """Wraps an arbitrary callable behavior in exception-logging.""" + + def _wrapping(*args, **kwargs): + try: + return behavior(*args, **kwargs) + except Exception: + _LOGGER.exception( + "Unexpected exception from %s executed in logging pool!", + behavior, + ) + raise + + return _wrapping + + +class _LoggingPool(object): + """An exception-logging futures.ThreadPoolExecutor-compatible thread pool.""" + + def __init__(self, backing_pool): + self._backing_pool = backing_pool + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self._backing_pool.shutdown(wait=True) + + def submit(self, fn, *args, **kwargs): + return self._backing_pool.submit(_wrap(fn), *args, **kwargs) + + def map(self, func, *iterables, **kwargs): + return self._backing_pool.map( + _wrap(func), *iterables, timeout=kwargs.get("timeout", None) + ) + + def shutdown(self, wait=True): + self._backing_pool.shutdown(wait=wait) + + +def pool(max_workers): + """Creates a thread pool that logs exceptions raised by the tasks within it. + + Args: + max_workers: The maximum number of worker threads to allow the pool. + + Returns: + A futures.ThreadPoolExecutor-compatible thread pool that logs exceptions + raised by the tasks executed within it. + """ + return _LoggingPool(futures.ThreadPoolExecutor(max_workers)) diff --git a/venv/lib/python3.10/site-packages/grpc/framework/foundation/stream.py b/venv/lib/python3.10/site-packages/grpc/framework/foundation/stream.py new file mode 100644 index 0000000000000000000000000000000000000000..70ca1d915756a0605d1b85a72ef5fb80590816d3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/framework/foundation/stream.py @@ -0,0 +1,43 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Interfaces related to streams of values or objects.""" + +import abc + + +class Consumer(abc.ABC): + """Interface for consumers of finite streams of values or objects.""" + + @abc.abstractmethod + def consume(self, value): + """Accepts a value. + + Args: + value: Any value accepted by this Consumer. + """ + raise NotImplementedError() + + @abc.abstractmethod + def terminate(self): + """Indicates to this Consumer that no more values will be supplied.""" + raise NotImplementedError() + + @abc.abstractmethod + def consume_and_terminate(self, value): + """Supplies a value and signals that no more values will be supplied. + + Args: + value: Any value accepted by this Consumer. + """ + raise NotImplementedError() diff --git a/venv/lib/python3.10/site-packages/grpc/framework/foundation/stream_util.py b/venv/lib/python3.10/site-packages/grpc/framework/foundation/stream_util.py new file mode 100644 index 0000000000000000000000000000000000000000..1faaf29bd7e3c69006d825ea6f0a21dbc8f5234d --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/framework/foundation/stream_util.py @@ -0,0 +1,148 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Helpful utilities related to the stream module.""" + +import logging +import threading + +from grpc.framework.foundation import stream + +_NO_VALUE = object() +_LOGGER = logging.getLogger(__name__) + + +class TransformingConsumer(stream.Consumer): + """A stream.Consumer that passes a transformation of its input to another.""" + + def __init__(self, transformation, downstream): + self._transformation = transformation + self._downstream = downstream + + def consume(self, value): + self._downstream.consume(self._transformation(value)) + + def terminate(self): + self._downstream.terminate() + + def consume_and_terminate(self, value): + self._downstream.consume_and_terminate(self._transformation(value)) + + +class IterableConsumer(stream.Consumer): + """A Consumer that when iterated over emits the values it has consumed.""" + + def __init__(self): + self._condition = threading.Condition() + self._values = [] + self._active = True + + def consume(self, value): + with self._condition: + if self._active: + self._values.append(value) + self._condition.notify() + + def terminate(self): + with self._condition: + self._active = False + self._condition.notify() + + def consume_and_terminate(self, value): + with self._condition: + if self._active: + self._values.append(value) + self._active = False + self._condition.notify() + + def __iter__(self): + return self + + def __next__(self): + return self.next() + + def next(self): + with self._condition: + while self._active and not self._values: + self._condition.wait() + if self._values: + return self._values.pop(0) + else: + raise StopIteration() + + +class ThreadSwitchingConsumer(stream.Consumer): + """A Consumer decorator that affords serialization and asynchrony.""" + + def __init__(self, sink, pool): + self._lock = threading.Lock() + self._sink = sink + self._pool = pool + # True if self._spin has been submitted to the pool to be called once and + # that call has not yet returned, False otherwise. + self._spinning = False + self._values = [] + self._active = True + + def _spin(self, sink, value, terminate): + while True: + try: + if value is _NO_VALUE: + sink.terminate() + elif terminate: + sink.consume_and_terminate(value) + else: + sink.consume(value) + except Exception as e: # pylint:disable=broad-except + _LOGGER.exception(e) + + with self._lock: + if terminate: + self._spinning = False + return + elif self._values: + value = self._values.pop(0) + terminate = not self._values and not self._active + elif not self._active: + value = _NO_VALUE + terminate = True + else: + self._spinning = False + return + + def consume(self, value): + with self._lock: + if self._active: + if self._spinning: + self._values.append(value) + else: + self._pool.submit(self._spin, self._sink, value, False) + self._spinning = True + + def terminate(self): + with self._lock: + if self._active: + self._active = False + if not self._spinning: + self._pool.submit(self._spin, self._sink, _NO_VALUE, True) + self._spinning = True + + def consume_and_terminate(self, value): + with self._lock: + if self._active: + self._active = False + if self._spinning: + self._values.append(value) + else: + self._pool.submit(self._spin, self._sink, value, True) + self._spinning = True diff --git a/venv/lib/python3.10/site-packages/grpc/framework/interfaces/__init__.py b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5fb4f3c3cfd5622f4067f3dd22eb49318855325a --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/venv/lib/python3.10/site-packages/grpc/framework/interfaces/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..59cf95793c97c4d3902f6fb028781c89c56f082d Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/framework/interfaces/base/__init__.py b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/base/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5fb4f3c3cfd5622f4067f3dd22eb49318855325a --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/base/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/venv/lib/python3.10/site-packages/grpc/framework/interfaces/base/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/base/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..48f1a53350aea2c6b37bc4a349a9a6b3340d4ab5 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/base/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/framework/interfaces/base/__pycache__/base.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/base/__pycache__/base.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b6ee84a4aecfd9a7bf08cb950ee72312d2017abc Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/base/__pycache__/base.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/framework/interfaces/base/__pycache__/utilities.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/base/__pycache__/utilities.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cbcc7fcb6f01c5dea89d53a563992d7c702a1e4f Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/base/__pycache__/utilities.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/framework/interfaces/base/base.py b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/base/base.py new file mode 100644 index 0000000000000000000000000000000000000000..ea71ff6a181bea0bf8c3f48fddbd15a64c6d37e6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/base/base.py @@ -0,0 +1,328 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The base interface of RPC Framework. + +Implementations of this interface support the conduct of "operations": +exchanges between two distinct ends of an arbitrary number of data payloads +and metadata such as a name for the operation, initial and terminal metadata +in each direction, and flow control. These operations may be used for transfers +of data, remote procedure calls, status indication, or anything else +applications choose. +""" + +# threading is referenced from specification in this module. +import abc +import enum +import threading # pylint: disable=unused-import + +# pylint: disable=too-many-arguments + + +class NoSuchMethodError(Exception): + """Indicates that an unrecognized operation has been called. + + Attributes: + code: A code value to communicate to the other side of the operation + along with indication of operation termination. May be None. + details: A details value to communicate to the other side of the + operation along with indication of operation termination. May be None. + """ + + def __init__(self, code, details): + """Constructor. + + Args: + code: A code value to communicate to the other side of the operation + along with indication of operation termination. May be None. + details: A details value to communicate to the other side of the + operation along with indication of operation termination. May be None. + """ + super(NoSuchMethodError, self).__init__() + self.code = code + self.details = details + + +class Outcome(object): + """The outcome of an operation. + + Attributes: + kind: A Kind value coarsely identifying how the operation terminated. + code: An application-specific code value or None if no such value was + provided. + details: An application-specific details value or None if no such value was + provided. + """ + + @enum.unique + class Kind(enum.Enum): + """Ways in which an operation can terminate.""" + + COMPLETED = "completed" + CANCELLED = "cancelled" + EXPIRED = "expired" + LOCAL_SHUTDOWN = "local shutdown" + REMOTE_SHUTDOWN = "remote shutdown" + RECEPTION_FAILURE = "reception failure" + TRANSMISSION_FAILURE = "transmission failure" + LOCAL_FAILURE = "local failure" + REMOTE_FAILURE = "remote failure" + + +class Completion(abc.ABC): + """An aggregate of the values exchanged upon operation completion. + + Attributes: + terminal_metadata: A terminal metadata value for the operation. + code: A code value for the operation. + message: A message value for the operation. + """ + + +class OperationContext(abc.ABC): + """Provides operation-related information and action.""" + + @abc.abstractmethod + def outcome(self): + """Indicates the operation's outcome (or that the operation is ongoing). + + Returns: + None if the operation is still active or the Outcome value for the + operation if it has terminated. + """ + raise NotImplementedError() + + @abc.abstractmethod + def add_termination_callback(self, callback): + """Adds a function to be called upon operation termination. + + Args: + callback: A callable to be passed an Outcome value on operation + termination. + + Returns: + None if the operation has not yet terminated and the passed callback will + later be called when it does terminate, or if the operation has already + terminated an Outcome value describing the operation termination and the + passed callback will not be called as a result of this method call. + """ + raise NotImplementedError() + + @abc.abstractmethod + def time_remaining(self): + """Describes the length of allowed time remaining for the operation. + + Returns: + A nonnegative float indicating the length of allowed time in seconds + remaining for the operation to complete before it is considered to have + timed out. Zero is returned if the operation has terminated. + """ + raise NotImplementedError() + + @abc.abstractmethod + def cancel(self): + """Cancels the operation if the operation has not yet terminated.""" + raise NotImplementedError() + + @abc.abstractmethod + def fail(self, exception): + """Indicates that the operation has failed. + + Args: + exception: An exception germane to the operation failure. May be None. + """ + raise NotImplementedError() + + +class Operator(abc.ABC): + """An interface through which to participate in an operation.""" + + @abc.abstractmethod + def advance( + self, + initial_metadata=None, + payload=None, + completion=None, + allowance=None, + ): + """Progresses the operation. + + Args: + initial_metadata: An initial metadata value. Only one may ever be + communicated in each direction for an operation, and they must be + communicated no later than either the first payload or the completion. + payload: A payload value. + completion: A Completion value. May only ever be non-None once in either + direction, and no payloads may be passed after it has been communicated. + allowance: A positive integer communicating the number of additional + payloads allowed to be passed by the remote side of the operation. + """ + raise NotImplementedError() + + +class ProtocolReceiver(abc.ABC): + """A means of receiving protocol values during an operation.""" + + @abc.abstractmethod + def context(self, protocol_context): + """Accepts the protocol context object for the operation. + + Args: + protocol_context: The protocol context object for the operation. + """ + raise NotImplementedError() + + +class Subscription(abc.ABC): + """Describes customer code's interest in values from the other side. + + Attributes: + kind: A Kind value describing the overall kind of this value. + termination_callback: A callable to be passed the Outcome associated with + the operation after it has terminated. Must be non-None if kind is + Kind.TERMINATION_ONLY. Must be None otherwise. + allowance: A callable behavior that accepts positive integers representing + the number of additional payloads allowed to be passed to the other side + of the operation. Must be None if kind is Kind.FULL. Must not be None + otherwise. + operator: An Operator to be passed values from the other side of the + operation. Must be non-None if kind is Kind.FULL. Must be None otherwise. + protocol_receiver: A ProtocolReceiver to be passed protocol objects as they + become available during the operation. Must be non-None if kind is + Kind.FULL. + """ + + @enum.unique + class Kind(enum.Enum): + NONE = "none" + TERMINATION_ONLY = "termination only" + FULL = "full" + + +class Servicer(abc.ABC): + """Interface for service implementations.""" + + @abc.abstractmethod + def service(self, group, method, context, output_operator): + """Services an operation. + + Args: + group: The group identifier of the operation to be serviced. + method: The method identifier of the operation to be serviced. + context: An OperationContext object affording contextual information and + actions. + output_operator: An Operator that will accept output values of the + operation. + + Returns: + A Subscription via which this object may or may not accept more values of + the operation. + + Raises: + NoSuchMethodError: If this Servicer does not handle operations with the + given group and method. + abandonment.Abandoned: If the operation has been aborted and there no + longer is any reason to service the operation. + """ + raise NotImplementedError() + + +class End(abc.ABC): + """Common type for entry-point objects on both sides of an operation.""" + + @abc.abstractmethod + def start(self): + """Starts this object's service of operations.""" + raise NotImplementedError() + + @abc.abstractmethod + def stop(self, grace): + """Stops this object's service of operations. + + This object will refuse service of new operations as soon as this method is + called but operations under way at the time of the call may be given a + grace period during which they are allowed to finish. + + Args: + grace: A duration of time in seconds to allow ongoing operations to + terminate before being forcefully terminated by the stopping of this + End. May be zero to terminate all ongoing operations and immediately + stop. + + Returns: + A threading.Event that will be set to indicate all operations having + terminated and this End having completely stopped. The returned event + may not be set until after the full grace period (if some ongoing + operation continues for the full length of the period) or it may be set + much sooner (if for example this End had no operations in progress at + the time its stop method was called). + """ + raise NotImplementedError() + + @abc.abstractmethod + def operate( + self, + group, + method, + subscription, + timeout, + initial_metadata=None, + payload=None, + completion=None, + protocol_options=None, + ): + """Commences an operation. + + Args: + group: The group identifier of the invoked operation. + method: The method identifier of the invoked operation. + subscription: A Subscription to which the results of the operation will be + passed. + timeout: A length of time in seconds to allow for the operation. + initial_metadata: An initial metadata value to be sent to the other side + of the operation. May be None if the initial metadata will be later + passed via the returned operator or if there will be no initial metadata + passed at all. + payload: An initial payload for the operation. + completion: A Completion value indicating the end of transmission to the + other side of the operation. + protocol_options: A value specified by the provider of a Base interface + implementation affording custom state and behavior. + + Returns: + A pair of objects affording information about the operation and action + continuing the operation. The first element of the returned pair is an + OperationContext for the operation and the second element of the + returned pair is an Operator to which operation values not passed in + this call should later be passed. + """ + raise NotImplementedError() + + @abc.abstractmethod + def operation_stats(self): + """Reports the number of terminated operations broken down by outcome. + + Returns: + A dictionary from Outcome.Kind value to an integer identifying the number + of operations that terminated with that outcome kind. + """ + raise NotImplementedError() + + @abc.abstractmethod + def add_idle_action(self, action): + """Adds an action to be called when this End has no ongoing operations. + + Args: + action: A callable that accepts no arguments. + """ + raise NotImplementedError() diff --git a/venv/lib/python3.10/site-packages/grpc/framework/interfaces/base/utilities.py b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/base/utilities.py new file mode 100644 index 0000000000000000000000000000000000000000..5b601f3bde839ab8293ae132a58d3b99dfb50c36 --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/base/utilities.py @@ -0,0 +1,83 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Utilities for use with the base interface of RPC Framework.""" + +import collections + +from grpc.framework.interfaces.base import base + + +class _Completion( + base.Completion, + collections.namedtuple( + "_Completion", + ( + "terminal_metadata", + "code", + "message", + ), + ), +): + """A trivial implementation of base.Completion.""" + + +class _Subscription( + base.Subscription, + collections.namedtuple( + "_Subscription", + ( + "kind", + "termination_callback", + "allowance", + "operator", + "protocol_receiver", + ), + ), +): + """A trivial implementation of base.Subscription.""" + + +_NONE_SUBSCRIPTION = _Subscription( + base.Subscription.Kind.NONE, None, None, None, None +) + + +def completion(terminal_metadata, code, message): + """Creates a base.Completion aggregating the given operation values. + + Args: + terminal_metadata: A terminal metadata value for an operation. + code: A code value for an operation. + message: A message value for an operation. + + Returns: + A base.Completion aggregating the given operation values. + """ + return _Completion(terminal_metadata, code, message) + + +def full_subscription(operator, protocol_receiver): + """Creates a "full" base.Subscription for the given base.Operator. + + Args: + operator: A base.Operator to be used in an operation. + protocol_receiver: A base.ProtocolReceiver to be used in an operation. + + Returns: + A base.Subscription of kind base.Subscription.Kind.FULL wrapping the given + base.Operator and base.ProtocolReceiver. + """ + return _Subscription( + base.Subscription.Kind.FULL, None, None, operator, protocol_receiver + ) diff --git a/venv/lib/python3.10/site-packages/grpc/framework/interfaces/face/__init__.py b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/face/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5fb4f3c3cfd5622f4067f3dd22eb49318855325a --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/face/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/venv/lib/python3.10/site-packages/grpc/framework/interfaces/face/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/face/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4f288e4a34305d3a0461130e1d93408c2ea37558 Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/face/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/framework/interfaces/face/__pycache__/face.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/face/__pycache__/face.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..571fdb6e1f40d5f5730f02fc41542b33822d9d5b Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/face/__pycache__/face.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/framework/interfaces/face/__pycache__/utilities.cpython-310.pyc b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/face/__pycache__/utilities.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e05d93b7001c08dcc4d86639f19d92e47765ed0b Binary files /dev/null and b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/face/__pycache__/utilities.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/grpc/framework/interfaces/face/face.py b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/face/face.py new file mode 100644 index 0000000000000000000000000000000000000000..9239fcc9eb9967a3d303e7ebaafbcb7dae82baed --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/face/face.py @@ -0,0 +1,1084 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Interfaces defining the Face layer of RPC Framework.""" + +import abc +import collections +import enum + +# cardinality, style, abandonment, future, and stream are +# referenced from specification in this module. +from grpc.framework.common import cardinality # pylint: disable=unused-import +from grpc.framework.common import style # pylint: disable=unused-import +from grpc.framework.foundation import future # pylint: disable=unused-import +from grpc.framework.foundation import stream # pylint: disable=unused-import + +# pylint: disable=too-many-arguments + + +class NoSuchMethodError(Exception): + """Raised by customer code to indicate an unrecognized method. + + Attributes: + group: The group of the unrecognized method. + name: The name of the unrecognized method. + """ + + def __init__(self, group, method): + """Constructor. + + Args: + group: The group identifier of the unrecognized RPC name. + method: The method identifier of the unrecognized RPC name. + """ + super(NoSuchMethodError, self).__init__() + self.group = group + self.method = method + + def __repr__(self): + return "face.NoSuchMethodError(%s, %s)" % ( + self.group, + self.method, + ) + + +class Abortion( + collections.namedtuple( + "Abortion", + ( + "kind", + "initial_metadata", + "terminal_metadata", + "code", + "details", + ), + ) +): + """A value describing RPC abortion. + + Attributes: + kind: A Kind value identifying how the RPC failed. + initial_metadata: The initial metadata from the other side of the RPC or + None if no initial metadata value was received. + terminal_metadata: The terminal metadata from the other side of the RPC or + None if no terminal metadata value was received. + code: The code value from the other side of the RPC or None if no code value + was received. + details: The details value from the other side of the RPC or None if no + details value was received. + """ + + @enum.unique + class Kind(enum.Enum): + """Types of RPC abortion.""" + + CANCELLED = "cancelled" + EXPIRED = "expired" + LOCAL_SHUTDOWN = "local shutdown" + REMOTE_SHUTDOWN = "remote shutdown" + NETWORK_FAILURE = "network failure" + LOCAL_FAILURE = "local failure" + REMOTE_FAILURE = "remote failure" + + +class AbortionError(Exception, metaclass=abc.ABCMeta): + """Common super type for exceptions indicating RPC abortion. + + initial_metadata: The initial metadata from the other side of the RPC or + None if no initial metadata value was received. + terminal_metadata: The terminal metadata from the other side of the RPC or + None if no terminal metadata value was received. + code: The code value from the other side of the RPC or None if no code value + was received. + details: The details value from the other side of the RPC or None if no + details value was received. + """ + + def __init__(self, initial_metadata, terminal_metadata, code, details): + super(AbortionError, self).__init__() + self.initial_metadata = initial_metadata + self.terminal_metadata = terminal_metadata + self.code = code + self.details = details + + def __str__(self): + return '%s(code=%s, details="%s")' % ( + self.__class__.__name__, + self.code, + self.details, + ) + + +class CancellationError(AbortionError): + """Indicates that an RPC has been cancelled.""" + + +class ExpirationError(AbortionError): + """Indicates that an RPC has expired ("timed out").""" + + +class LocalShutdownError(AbortionError): + """Indicates that an RPC has terminated due to local shutdown of RPCs.""" + + +class RemoteShutdownError(AbortionError): + """Indicates that an RPC has terminated due to remote shutdown of RPCs.""" + + +class NetworkError(AbortionError): + """Indicates that some error occurred on the network.""" + + +class LocalError(AbortionError): + """Indicates that an RPC has terminated due to a local defect.""" + + +class RemoteError(AbortionError): + """Indicates that an RPC has terminated due to a remote defect.""" + + +class RpcContext(abc.ABC): + """Provides RPC-related information and action.""" + + @abc.abstractmethod + def is_active(self): + """Describes whether the RPC is active or has terminated.""" + raise NotImplementedError() + + @abc.abstractmethod + def time_remaining(self): + """Describes the length of allowed time remaining for the RPC. + + Returns: + A nonnegative float indicating the length of allowed time in seconds + remaining for the RPC to complete before it is considered to have timed + out. + """ + raise NotImplementedError() + + @abc.abstractmethod + def add_abortion_callback(self, abortion_callback): + """Registers a callback to be called if the RPC is aborted. + + Args: + abortion_callback: A callable to be called and passed an Abortion value + in the event of RPC abortion. + """ + raise NotImplementedError() + + @abc.abstractmethod + def cancel(self): + """Cancels the RPC. + + Idempotent and has no effect if the RPC has already terminated. + """ + raise NotImplementedError() + + @abc.abstractmethod + def protocol_context(self): + """Accesses a custom object specified by an implementation provider. + + Returns: + A value specified by the provider of a Face interface implementation + affording custom state and behavior. + """ + raise NotImplementedError() + + +class Call(RpcContext, metaclass=abc.ABCMeta): + """Invocation-side utility object for an RPC.""" + + @abc.abstractmethod + def initial_metadata(self): + """Accesses the initial metadata from the service-side of the RPC. + + This method blocks until the value is available or is known not to have been + emitted from the service-side of the RPC. + + Returns: + The initial metadata object emitted by the service-side of the RPC, or + None if there was no such value. + """ + raise NotImplementedError() + + @abc.abstractmethod + def terminal_metadata(self): + """Accesses the terminal metadata from the service-side of the RPC. + + This method blocks until the value is available or is known not to have been + emitted from the service-side of the RPC. + + Returns: + The terminal metadata object emitted by the service-side of the RPC, or + None if there was no such value. + """ + raise NotImplementedError() + + @abc.abstractmethod + def code(self): + """Accesses the code emitted by the service-side of the RPC. + + This method blocks until the value is available or is known not to have been + emitted from the service-side of the RPC. + + Returns: + The code object emitted by the service-side of the RPC, or None if there + was no such value. + """ + raise NotImplementedError() + + @abc.abstractmethod + def details(self): + """Accesses the details value emitted by the service-side of the RPC. + + This method blocks until the value is available or is known not to have been + emitted from the service-side of the RPC. + + Returns: + The details value emitted by the service-side of the RPC, or None if there + was no such value. + """ + raise NotImplementedError() + + +class ServicerContext(RpcContext, metaclass=abc.ABCMeta): + """A context object passed to method implementations.""" + + @abc.abstractmethod + def invocation_metadata(self): + """Accesses the metadata from the invocation-side of the RPC. + + This method blocks until the value is available or is known not to have been + emitted from the invocation-side of the RPC. + + Returns: + The metadata object emitted by the invocation-side of the RPC, or None if + there was no such value. + """ + raise NotImplementedError() + + @abc.abstractmethod + def initial_metadata(self, initial_metadata): + """Accepts the service-side initial metadata value of the RPC. + + This method need not be called by method implementations if they have no + service-side initial metadata to transmit. + + Args: + initial_metadata: The service-side initial metadata value of the RPC to + be transmitted to the invocation side of the RPC. + """ + raise NotImplementedError() + + @abc.abstractmethod + def terminal_metadata(self, terminal_metadata): + """Accepts the service-side terminal metadata value of the RPC. + + This method need not be called by method implementations if they have no + service-side terminal metadata to transmit. + + Args: + terminal_metadata: The service-side terminal metadata value of the RPC to + be transmitted to the invocation side of the RPC. + """ + raise NotImplementedError() + + @abc.abstractmethod + def code(self, code): + """Accepts the service-side code of the RPC. + + This method need not be called by method implementations if they have no + code to transmit. + + Args: + code: The code of the RPC to be transmitted to the invocation side of the + RPC. + """ + raise NotImplementedError() + + @abc.abstractmethod + def details(self, details): + """Accepts the service-side details of the RPC. + + This method need not be called by method implementations if they have no + service-side details to transmit. + + Args: + details: The service-side details value of the RPC to be transmitted to + the invocation side of the RPC. + """ + raise NotImplementedError() + + +class ResponseReceiver(abc.ABC): + """Invocation-side object used to accept the output of an RPC.""" + + @abc.abstractmethod + def initial_metadata(self, initial_metadata): + """Receives the initial metadata from the service-side of the RPC. + + Args: + initial_metadata: The initial metadata object emitted from the + service-side of the RPC. + """ + raise NotImplementedError() + + @abc.abstractmethod + def response(self, response): + """Receives a response from the service-side of the RPC. + + Args: + response: A response object emitted from the service-side of the RPC. + """ + raise NotImplementedError() + + @abc.abstractmethod + def complete(self, terminal_metadata, code, details): + """Receives the completion values emitted from the service-side of the RPC. + + Args: + terminal_metadata: The terminal metadata object emitted from the + service-side of the RPC. + code: The code object emitted from the service-side of the RPC. + details: The details object emitted from the service-side of the RPC. + """ + raise NotImplementedError() + + +class UnaryUnaryMultiCallable(abc.ABC): + """Affords invoking a unary-unary RPC in any call style.""" + + @abc.abstractmethod + def __call__( + self, + request, + timeout, + metadata=None, + with_call=False, + protocol_options=None, + ): + """Synchronously invokes the underlying RPC. + + Args: + request: The request value for the RPC. + timeout: A duration of time in seconds to allow for the RPC. + metadata: A metadata value to be passed to the service-side of + the RPC. + with_call: Whether or not to include return a Call for the RPC in addition + to the response. + protocol_options: A value specified by the provider of a Face interface + implementation affording custom state and behavior. + + Returns: + The response value for the RPC, and a Call for the RPC if with_call was + set to True at invocation. + + Raises: + AbortionError: Indicating that the RPC was aborted. + """ + raise NotImplementedError() + + @abc.abstractmethod + def future(self, request, timeout, metadata=None, protocol_options=None): + """Asynchronously invokes the underlying RPC. + + Args: + request: The request value for the RPC. + timeout: A duration of time in seconds to allow for the RPC. + metadata: A metadata value to be passed to the service-side of + the RPC. + protocol_options: A value specified by the provider of a Face interface + implementation affording custom state and behavior. + + Returns: + An object that is both a Call for the RPC and a future.Future. In the + event of RPC completion, the return Future's result value will be the + response value of the RPC. In the event of RPC abortion, the returned + Future's exception value will be an AbortionError. + """ + raise NotImplementedError() + + @abc.abstractmethod + def event( + self, + request, + receiver, + abortion_callback, + timeout, + metadata=None, + protocol_options=None, + ): + """Asynchronously invokes the underlying RPC. + + Args: + request: The request value for the RPC. + receiver: A ResponseReceiver to be passed the response data of the RPC. + abortion_callback: A callback to be called and passed an Abortion value + in the event of RPC abortion. + timeout: A duration of time in seconds to allow for the RPC. + metadata: A metadata value to be passed to the service-side of + the RPC. + protocol_options: A value specified by the provider of a Face interface + implementation affording custom state and behavior. + + Returns: + A Call for the RPC. + """ + raise NotImplementedError() + + +class UnaryStreamMultiCallable(abc.ABC): + """Affords invoking a unary-stream RPC in any call style.""" + + @abc.abstractmethod + def __call__(self, request, timeout, metadata=None, protocol_options=None): + """Invokes the underlying RPC. + + Args: + request: The request value for the RPC. + timeout: A duration of time in seconds to allow for the RPC. + metadata: A metadata value to be passed to the service-side of + the RPC. + protocol_options: A value specified by the provider of a Face interface + implementation affording custom state and behavior. + + Returns: + An object that is both a Call for the RPC and an iterator of response + values. Drawing response values from the returned iterator may raise + AbortionError indicating abortion of the RPC. + """ + raise NotImplementedError() + + @abc.abstractmethod + def event( + self, + request, + receiver, + abortion_callback, + timeout, + metadata=None, + protocol_options=None, + ): + """Asynchronously invokes the underlying RPC. + + Args: + request: The request value for the RPC. + receiver: A ResponseReceiver to be passed the response data of the RPC. + abortion_callback: A callback to be called and passed an Abortion value + in the event of RPC abortion. + timeout: A duration of time in seconds to allow for the RPC. + metadata: A metadata value to be passed to the service-side of + the RPC. + protocol_options: A value specified by the provider of a Face interface + implementation affording custom state and behavior. + + Returns: + A Call object for the RPC. + """ + raise NotImplementedError() + + +class StreamUnaryMultiCallable(abc.ABC): + """Affords invoking a stream-unary RPC in any call style.""" + + @abc.abstractmethod + def __call__( + self, + request_iterator, + timeout, + metadata=None, + with_call=False, + protocol_options=None, + ): + """Synchronously invokes the underlying RPC. + + Args: + request_iterator: An iterator that yields request values for the RPC. + timeout: A duration of time in seconds to allow for the RPC. + metadata: A metadata value to be passed to the service-side of + the RPC. + with_call: Whether or not to include return a Call for the RPC in addition + to the response. + protocol_options: A value specified by the provider of a Face interface + implementation affording custom state and behavior. + + Returns: + The response value for the RPC, and a Call for the RPC if with_call was + set to True at invocation. + + Raises: + AbortionError: Indicating that the RPC was aborted. + """ + raise NotImplementedError() + + @abc.abstractmethod + def future( + self, request_iterator, timeout, metadata=None, protocol_options=None + ): + """Asynchronously invokes the underlying RPC. + + Args: + request_iterator: An iterator that yields request values for the RPC. + timeout: A duration of time in seconds to allow for the RPC. + metadata: A metadata value to be passed to the service-side of + the RPC. + protocol_options: A value specified by the provider of a Face interface + implementation affording custom state and behavior. + + Returns: + An object that is both a Call for the RPC and a future.Future. In the + event of RPC completion, the return Future's result value will be the + response value of the RPC. In the event of RPC abortion, the returned + Future's exception value will be an AbortionError. + """ + raise NotImplementedError() + + @abc.abstractmethod + def event( + self, + receiver, + abortion_callback, + timeout, + metadata=None, + protocol_options=None, + ): + """Asynchronously invokes the underlying RPC. + + Args: + receiver: A ResponseReceiver to be passed the response data of the RPC. + abortion_callback: A callback to be called and passed an Abortion value + in the event of RPC abortion. + timeout: A duration of time in seconds to allow for the RPC. + metadata: A metadata value to be passed to the service-side of + the RPC. + protocol_options: A value specified by the provider of a Face interface + implementation affording custom state and behavior. + + Returns: + A single object that is both a Call object for the RPC and a + stream.Consumer to which the request values of the RPC should be passed. + """ + raise NotImplementedError() + + +class StreamStreamMultiCallable(abc.ABC): + """Affords invoking a stream-stream RPC in any call style.""" + + @abc.abstractmethod + def __call__( + self, request_iterator, timeout, metadata=None, protocol_options=None + ): + """Invokes the underlying RPC. + + Args: + request_iterator: An iterator that yields request values for the RPC. + timeout: A duration of time in seconds to allow for the RPC. + metadata: A metadata value to be passed to the service-side of + the RPC. + protocol_options: A value specified by the provider of a Face interface + implementation affording custom state and behavior. + + Returns: + An object that is both a Call for the RPC and an iterator of response + values. Drawing response values from the returned iterator may raise + AbortionError indicating abortion of the RPC. + """ + raise NotImplementedError() + + @abc.abstractmethod + def event( + self, + receiver, + abortion_callback, + timeout, + metadata=None, + protocol_options=None, + ): + """Asynchronously invokes the underlying RPC. + + Args: + receiver: A ResponseReceiver to be passed the response data of the RPC. + abortion_callback: A callback to be called and passed an Abortion value + in the event of RPC abortion. + timeout: A duration of time in seconds to allow for the RPC. + metadata: A metadata value to be passed to the service-side of + the RPC. + protocol_options: A value specified by the provider of a Face interface + implementation affording custom state and behavior. + + Returns: + A single object that is both a Call object for the RPC and a + stream.Consumer to which the request values of the RPC should be passed. + """ + raise NotImplementedError() + + +class MethodImplementation(abc.ABC): + """A sum type that describes a method implementation. + + Attributes: + cardinality: A cardinality.Cardinality value. + style: A style.Service value. + unary_unary_inline: The implementation of the method as a callable value + that takes a request value and a ServicerContext object and returns a + response value. Only non-None if cardinality is + cardinality.Cardinality.UNARY_UNARY and style is style.Service.INLINE. + unary_stream_inline: The implementation of the method as a callable value + that takes a request value and a ServicerContext object and returns an + iterator of response values. Only non-None if cardinality is + cardinality.Cardinality.UNARY_STREAM and style is style.Service.INLINE. + stream_unary_inline: The implementation of the method as a callable value + that takes an iterator of request values and a ServicerContext object and + returns a response value. Only non-None if cardinality is + cardinality.Cardinality.STREAM_UNARY and style is style.Service.INLINE. + stream_stream_inline: The implementation of the method as a callable value + that takes an iterator of request values and a ServicerContext object and + returns an iterator of response values. Only non-None if cardinality is + cardinality.Cardinality.STREAM_STREAM and style is style.Service.INLINE. + unary_unary_event: The implementation of the method as a callable value that + takes a request value, a response callback to which to pass the response + value of the RPC, and a ServicerContext. Only non-None if cardinality is + cardinality.Cardinality.UNARY_UNARY and style is style.Service.EVENT. + unary_stream_event: The implementation of the method as a callable value + that takes a request value, a stream.Consumer to which to pass the + response values of the RPC, and a ServicerContext. Only non-None if + cardinality is cardinality.Cardinality.UNARY_STREAM and style is + style.Service.EVENT. + stream_unary_event: The implementation of the method as a callable value + that takes a response callback to which to pass the response value of the + RPC and a ServicerContext and returns a stream.Consumer to which the + request values of the RPC should be passed. Only non-None if cardinality + is cardinality.Cardinality.STREAM_UNARY and style is style.Service.EVENT. + stream_stream_event: The implementation of the method as a callable value + that takes a stream.Consumer to which to pass the response values of the + RPC and a ServicerContext and returns a stream.Consumer to which the + request values of the RPC should be passed. Only non-None if cardinality + is cardinality.Cardinality.STREAM_STREAM and style is + style.Service.EVENT. + """ + + +class MultiMethodImplementation(abc.ABC): + """A general type able to service many methods.""" + + @abc.abstractmethod + def service(self, group, method, response_consumer, context): + """Services an RPC. + + Args: + group: The group identifier of the RPC. + method: The method identifier of the RPC. + response_consumer: A stream.Consumer to be called to accept the response + values of the RPC. + context: a ServicerContext object. + + Returns: + A stream.Consumer with which to accept the request values of the RPC. The + consumer returned from this method may or may not be invoked to + completion: in the case of RPC abortion, RPC Framework will simply stop + passing values to this object. Implementations must not assume that this + object will be called to completion of the request stream or even called + at all. + + Raises: + abandonment.Abandoned: May or may not be raised when the RPC has been + aborted. + NoSuchMethodError: If this MultiMethod does not recognize the given group + and name for the RPC and is not able to service the RPC. + """ + raise NotImplementedError() + + +class GenericStub(abc.ABC): + """Affords RPC invocation via generic methods.""" + + @abc.abstractmethod + def blocking_unary_unary( + self, + group, + method, + request, + timeout, + metadata=None, + with_call=False, + protocol_options=None, + ): + """Invokes a unary-request-unary-response method. + + This method blocks until either returning the response value of the RPC + (in the event of RPC completion) or raising an exception (in the event of + RPC abortion). + + Args: + group: The group identifier of the RPC. + method: The method identifier of the RPC. + request: The request value for the RPC. + timeout: A duration of time in seconds to allow for the RPC. + metadata: A metadata value to be passed to the service-side of the RPC. + with_call: Whether or not to include return a Call for the RPC in addition + to the response. + protocol_options: A value specified by the provider of a Face interface + implementation affording custom state and behavior. + + Returns: + The response value for the RPC, and a Call for the RPC if with_call was + set to True at invocation. + + Raises: + AbortionError: Indicating that the RPC was aborted. + """ + raise NotImplementedError() + + @abc.abstractmethod + def future_unary_unary( + self, + group, + method, + request, + timeout, + metadata=None, + protocol_options=None, + ): + """Invokes a unary-request-unary-response method. + + Args: + group: The group identifier of the RPC. + method: The method identifier of the RPC. + request: The request value for the RPC. + timeout: A duration of time in seconds to allow for the RPC. + metadata: A metadata value to be passed to the service-side of the RPC. + protocol_options: A value specified by the provider of a Face interface + implementation affording custom state and behavior. + + Returns: + An object that is both a Call for the RPC and a future.Future. In the + event of RPC completion, the return Future's result value will be the + response value of the RPC. In the event of RPC abortion, the returned + Future's exception value will be an AbortionError. + """ + raise NotImplementedError() + + @abc.abstractmethod + def inline_unary_stream( + self, + group, + method, + request, + timeout, + metadata=None, + protocol_options=None, + ): + """Invokes a unary-request-stream-response method. + + Args: + group: The group identifier of the RPC. + method: The method identifier of the RPC. + request: The request value for the RPC. + timeout: A duration of time in seconds to allow for the RPC. + metadata: A metadata value to be passed to the service-side of the RPC. + protocol_options: A value specified by the provider of a Face interface + implementation affording custom state and behavior. + + Returns: + An object that is both a Call for the RPC and an iterator of response + values. Drawing response values from the returned iterator may raise + AbortionError indicating abortion of the RPC. + """ + raise NotImplementedError() + + @abc.abstractmethod + def blocking_stream_unary( + self, + group, + method, + request_iterator, + timeout, + metadata=None, + with_call=False, + protocol_options=None, + ): + """Invokes a stream-request-unary-response method. + + This method blocks until either returning the response value of the RPC + (in the event of RPC completion) or raising an exception (in the event of + RPC abortion). + + Args: + group: The group identifier of the RPC. + method: The method identifier of the RPC. + request_iterator: An iterator that yields request values for the RPC. + timeout: A duration of time in seconds to allow for the RPC. + metadata: A metadata value to be passed to the service-side of the RPC. + with_call: Whether or not to include return a Call for the RPC in addition + to the response. + protocol_options: A value specified by the provider of a Face interface + implementation affording custom state and behavior. + + Returns: + The response value for the RPC, and a Call for the RPC if with_call was + set to True at invocation. + + Raises: + AbortionError: Indicating that the RPC was aborted. + """ + raise NotImplementedError() + + @abc.abstractmethod + def future_stream_unary( + self, + group, + method, + request_iterator, + timeout, + metadata=None, + protocol_options=None, + ): + """Invokes a stream-request-unary-response method. + + Args: + group: The group identifier of the RPC. + method: The method identifier of the RPC. + request_iterator: An iterator that yields request values for the RPC. + timeout: A duration of time in seconds to allow for the RPC. + metadata: A metadata value to be passed to the service-side of the RPC. + protocol_options: A value specified by the provider of a Face interface + implementation affording custom state and behavior. + + Returns: + An object that is both a Call for the RPC and a future.Future. In the + event of RPC completion, the return Future's result value will be the + response value of the RPC. In the event of RPC abortion, the returned + Future's exception value will be an AbortionError. + """ + raise NotImplementedError() + + @abc.abstractmethod + def inline_stream_stream( + self, + group, + method, + request_iterator, + timeout, + metadata=None, + protocol_options=None, + ): + """Invokes a stream-request-stream-response method. + + Args: + group: The group identifier of the RPC. + method: The method identifier of the RPC. + request_iterator: An iterator that yields request values for the RPC. + timeout: A duration of time in seconds to allow for the RPC. + metadata: A metadata value to be passed to the service-side of the RPC. + protocol_options: A value specified by the provider of a Face interface + implementation affording custom state and behavior. + + Returns: + An object that is both a Call for the RPC and an iterator of response + values. Drawing response values from the returned iterator may raise + AbortionError indicating abortion of the RPC. + """ + raise NotImplementedError() + + @abc.abstractmethod + def event_unary_unary( + self, + group, + method, + request, + receiver, + abortion_callback, + timeout, + metadata=None, + protocol_options=None, + ): + """Event-driven invocation of a unary-request-unary-response method. + + Args: + group: The group identifier of the RPC. + method: The method identifier of the RPC. + request: The request value for the RPC. + receiver: A ResponseReceiver to be passed the response data of the RPC. + abortion_callback: A callback to be called and passed an Abortion value + in the event of RPC abortion. + timeout: A duration of time in seconds to allow for the RPC. + metadata: A metadata value to be passed to the service-side of the RPC. + protocol_options: A value specified by the provider of a Face interface + implementation affording custom state and behavior. + + Returns: + A Call for the RPC. + """ + raise NotImplementedError() + + @abc.abstractmethod + def event_unary_stream( + self, + group, + method, + request, + receiver, + abortion_callback, + timeout, + metadata=None, + protocol_options=None, + ): + """Event-driven invocation of a unary-request-stream-response method. + + Args: + group: The group identifier of the RPC. + method: The method identifier of the RPC. + request: The request value for the RPC. + receiver: A ResponseReceiver to be passed the response data of the RPC. + abortion_callback: A callback to be called and passed an Abortion value + in the event of RPC abortion. + timeout: A duration of time in seconds to allow for the RPC. + metadata: A metadata value to be passed to the service-side of the RPC. + protocol_options: A value specified by the provider of a Face interface + implementation affording custom state and behavior. + + Returns: + A Call for the RPC. + """ + raise NotImplementedError() + + @abc.abstractmethod + def event_stream_unary( + self, + group, + method, + receiver, + abortion_callback, + timeout, + metadata=None, + protocol_options=None, + ): + """Event-driven invocation of a unary-request-unary-response method. + + Args: + group: The group identifier of the RPC. + method: The method identifier of the RPC. + receiver: A ResponseReceiver to be passed the response data of the RPC. + abortion_callback: A callback to be called and passed an Abortion value + in the event of RPC abortion. + timeout: A duration of time in seconds to allow for the RPC. + metadata: A metadata value to be passed to the service-side of the RPC. + protocol_options: A value specified by the provider of a Face interface + implementation affording custom state and behavior. + + Returns: + A pair of a Call object for the RPC and a stream.Consumer to which the + request values of the RPC should be passed. + """ + raise NotImplementedError() + + @abc.abstractmethod + def event_stream_stream( + self, + group, + method, + receiver, + abortion_callback, + timeout, + metadata=None, + protocol_options=None, + ): + """Event-driven invocation of a unary-request-stream-response method. + + Args: + group: The group identifier of the RPC. + method: The method identifier of the RPC. + receiver: A ResponseReceiver to be passed the response data of the RPC. + abortion_callback: A callback to be called and passed an Abortion value + in the event of RPC abortion. + timeout: A duration of time in seconds to allow for the RPC. + metadata: A metadata value to be passed to the service-side of the RPC. + protocol_options: A value specified by the provider of a Face interface + implementation affording custom state and behavior. + + Returns: + A pair of a Call object for the RPC and a stream.Consumer to which the + request values of the RPC should be passed. + """ + raise NotImplementedError() + + @abc.abstractmethod + def unary_unary(self, group, method): + """Creates a UnaryUnaryMultiCallable for a unary-unary method. + + Args: + group: The group identifier of the RPC. + method: The method identifier of the RPC. + + Returns: + A UnaryUnaryMultiCallable value for the named unary-unary method. + """ + raise NotImplementedError() + + @abc.abstractmethod + def unary_stream(self, group, method): + """Creates a UnaryStreamMultiCallable for a unary-stream method. + + Args: + group: The group identifier of the RPC. + method: The method identifier of the RPC. + + Returns: + A UnaryStreamMultiCallable value for the name unary-stream method. + """ + raise NotImplementedError() + + @abc.abstractmethod + def stream_unary(self, group, method): + """Creates a StreamUnaryMultiCallable for a stream-unary method. + + Args: + group: The group identifier of the RPC. + method: The method identifier of the RPC. + + Returns: + A StreamUnaryMultiCallable value for the named stream-unary method. + """ + raise NotImplementedError() + + @abc.abstractmethod + def stream_stream(self, group, method): + """Creates a StreamStreamMultiCallable for a stream-stream method. + + Args: + group: The group identifier of the RPC. + method: The method identifier of the RPC. + + Returns: + A StreamStreamMultiCallable value for the named stream-stream method. + """ + raise NotImplementedError() + + +class DynamicStub(abc.ABC): + """Affords RPC invocation via attributes corresponding to afforded methods. + + Instances of this type may be scoped to a single group so that attribute + access is unambiguous. + + Instances of this type respond to attribute access as follows: if the + requested attribute is the name of a unary-unary method, the value of the + attribute will be a UnaryUnaryMultiCallable with which to invoke an RPC; if + the requested attribute is the name of a unary-stream method, the value of the + attribute will be a UnaryStreamMultiCallable with which to invoke an RPC; if + the requested attribute is the name of a stream-unary method, the value of the + attribute will be a StreamUnaryMultiCallable with which to invoke an RPC; and + if the requested attribute is the name of a stream-stream method, the value of + the attribute will be a StreamStreamMultiCallable with which to invoke an RPC. + """ diff --git a/venv/lib/python3.10/site-packages/grpc/framework/interfaces/face/utilities.py b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/face/utilities.py new file mode 100644 index 0000000000000000000000000000000000000000..b02ea530963147242af170ae895c9e7ba152592a --- /dev/null +++ b/venv/lib/python3.10/site-packages/grpc/framework/interfaces/face/utilities.py @@ -0,0 +1,245 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Utilities for RPC Framework's Face interface.""" + +import collections + +# stream is referenced from specification in this module. +from grpc.framework.common import cardinality +from grpc.framework.common import style +from grpc.framework.foundation import stream # pylint: disable=unused-import +from grpc.framework.interfaces.face import face + + +class _MethodImplementation( + face.MethodImplementation, + collections.namedtuple( + "_MethodImplementation", + [ + "cardinality", + "style", + "unary_unary_inline", + "unary_stream_inline", + "stream_unary_inline", + "stream_stream_inline", + "unary_unary_event", + "unary_stream_event", + "stream_unary_event", + "stream_stream_event", + ], + ), +): + pass + + +def unary_unary_inline(behavior): + """Creates an face.MethodImplementation for the given behavior. + + Args: + behavior: The implementation of a unary-unary RPC method as a callable value + that takes a request value and an face.ServicerContext object and + returns a response value. + + Returns: + An face.MethodImplementation derived from the given behavior. + """ + return _MethodImplementation( + cardinality.Cardinality.UNARY_UNARY, + style.Service.INLINE, + behavior, + None, + None, + None, + None, + None, + None, + None, + ) + + +def unary_stream_inline(behavior): + """Creates an face.MethodImplementation for the given behavior. + + Args: + behavior: The implementation of a unary-stream RPC method as a callable + value that takes a request value and an face.ServicerContext object and + returns an iterator of response values. + + Returns: + An face.MethodImplementation derived from the given behavior. + """ + return _MethodImplementation( + cardinality.Cardinality.UNARY_STREAM, + style.Service.INLINE, + None, + behavior, + None, + None, + None, + None, + None, + None, + ) + + +def stream_unary_inline(behavior): + """Creates an face.MethodImplementation for the given behavior. + + Args: + behavior: The implementation of a stream-unary RPC method as a callable + value that takes an iterator of request values and an + face.ServicerContext object and returns a response value. + + Returns: + An face.MethodImplementation derived from the given behavior. + """ + return _MethodImplementation( + cardinality.Cardinality.STREAM_UNARY, + style.Service.INLINE, + None, + None, + behavior, + None, + None, + None, + None, + None, + ) + + +def stream_stream_inline(behavior): + """Creates an face.MethodImplementation for the given behavior. + + Args: + behavior: The implementation of a stream-stream RPC method as a callable + value that takes an iterator of request values and an + face.ServicerContext object and returns an iterator of response values. + + Returns: + An face.MethodImplementation derived from the given behavior. + """ + return _MethodImplementation( + cardinality.Cardinality.STREAM_STREAM, + style.Service.INLINE, + None, + None, + None, + behavior, + None, + None, + None, + None, + ) + + +def unary_unary_event(behavior): + """Creates an face.MethodImplementation for the given behavior. + + Args: + behavior: The implementation of a unary-unary RPC method as a callable + value that takes a request value, a response callback to which to pass + the response value of the RPC, and an face.ServicerContext. + + Returns: + An face.MethodImplementation derived from the given behavior. + """ + return _MethodImplementation( + cardinality.Cardinality.UNARY_UNARY, + style.Service.EVENT, + None, + None, + None, + None, + behavior, + None, + None, + None, + ) + + +def unary_stream_event(behavior): + """Creates an face.MethodImplementation for the given behavior. + + Args: + behavior: The implementation of a unary-stream RPC method as a callable + value that takes a request value, a stream.Consumer to which to pass the + response values of the RPC, and an face.ServicerContext. + + Returns: + An face.MethodImplementation derived from the given behavior. + """ + return _MethodImplementation( + cardinality.Cardinality.UNARY_STREAM, + style.Service.EVENT, + None, + None, + None, + None, + None, + behavior, + None, + None, + ) + + +def stream_unary_event(behavior): + """Creates an face.MethodImplementation for the given behavior. + + Args: + behavior: The implementation of a stream-unary RPC method as a callable + value that takes a response callback to which to pass the response value + of the RPC and an face.ServicerContext and returns a stream.Consumer to + which the request values of the RPC should be passed. + + Returns: + An face.MethodImplementation derived from the given behavior. + """ + return _MethodImplementation( + cardinality.Cardinality.STREAM_UNARY, + style.Service.EVENT, + None, + None, + None, + None, + None, + None, + behavior, + None, + ) + + +def stream_stream_event(behavior): + """Creates an face.MethodImplementation for the given behavior. + + Args: + behavior: The implementation of a stream-stream RPC method as a callable + value that takes a stream.Consumer to which to pass the response values + of the RPC and an face.ServicerContext and returns a stream.Consumer to + which the request values of the RPC should be passed. + + Returns: + An face.MethodImplementation derived from the given behavior. + """ + return _MethodImplementation( + cardinality.Cardinality.STREAM_STREAM, + style.Service.EVENT, + None, + None, + None, + None, + None, + None, + None, + behavior, + ) diff --git a/venv/lib/python3.10/site-packages/httptools-0.6.4.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/httptools-0.6.4.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/httptools-0.6.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/httptools-0.6.4.dist-info/LICENSE b/venv/lib/python3.10/site-packages/httptools-0.6.4.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..79a03ca513a7c94562cb1f8d83aa8bfc736ba0c2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/httptools-0.6.4.dist-info/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2015 MagicStack Inc. http://magic.io + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/httptools-0.6.4.dist-info/METADATA b/venv/lib/python3.10/site-packages/httptools-0.6.4.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..57c5f5c27548ff066a3641292a6688101873ae4a --- /dev/null +++ b/venv/lib/python3.10/site-packages/httptools-0.6.4.dist-info/METADATA @@ -0,0 +1,133 @@ +Metadata-Version: 2.1 +Name: httptools +Version: 0.6.4 +Summary: A collection of framework independent HTTP protocol utils. +Home-page: https://github.com/MagicStack/httptools +Author: Yury Selivanov +Author-email: yury@magic.io +License: MIT +Platform: macOS +Platform: POSIX +Platform: Windows +Classifier: License :: OSI Approved :: MIT License +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python :: 3 +Classifier: Operating System :: POSIX +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Environment :: Web Environment +Classifier: Development Status :: 5 - Production/Stable +Requires-Python: >=3.8.0 +Description-Content-Type: text/markdown +License-File: LICENSE +Provides-Extra: test +Requires-Dist: Cython>=0.29.24; extra == "test" + +![Tests](https://github.com/MagicStack/httptools/workflows/Tests/badge.svg) + +httptools is a Python binding for the nodejs HTTP parser. + +The package is available on PyPI: `pip install httptools`. + + +# APIs + +httptools contains two classes `httptools.HttpRequestParser`, +`httptools.HttpResponseParser` (fulfilled through +[llhttp](https://github.com/nodejs/llhttp)) and a function for +parsing URLs `httptools.parse_url` (through +[http-parse](https://github.com/nodejs/http-parser) for now). +See unittests for examples. + + +```python + +class HttpRequestParser: + + def __init__(self, protocol): + """HttpRequestParser + + protocol -- a Python object with the following methods + (all optional): + + - on_message_begin() + - on_url(url: bytes) + - on_header(name: bytes, value: bytes) + - on_headers_complete() + - on_body(body: bytes) + - on_message_complete() + - on_chunk_header() + - on_chunk_complete() + - on_status(status: bytes) + """ + + def get_http_version(self) -> str: + """Return an HTTP protocol version.""" + + def should_keep_alive(self) -> bool: + """Return ``True`` if keep-alive mode is preferred.""" + + def should_upgrade(self) -> bool: + """Return ``True`` if the parsed request is a valid Upgrade request. + The method exposes a flag set just before on_headers_complete. + Calling this method earlier will only yield `False`. + """ + + def feed_data(self, data: bytes): + """Feed data to the parser. + + Will eventually trigger callbacks on the ``protocol`` + object. + + On HTTP upgrade, this method will raise an + ``HttpParserUpgrade`` exception, with its sole argument + set to the offset of the non-HTTP data in ``data``. + """ + + def get_method(self) -> bytes: + """Return HTTP request method (GET, HEAD, etc)""" + + +class HttpResponseParser: + + """Has all methods except ``get_method()`` that + HttpRequestParser has.""" + + def get_status_code(self) -> int: + """Return the status code of the HTTP response""" + + +def parse_url(url: bytes): + """Parse URL strings into a structured Python object. + + Returns an instance of ``httptools.URL`` class with the + following attributes: + + - schema: bytes + - host: bytes + - port: int + - path: bytes + - query: bytes + - fragment: bytes + - userinfo: bytes + """ +``` + + +# Development + +1. Clone this repository with + `git clone --recursive git@github.com:MagicStack/httptools.git` + +2. Create a virtual environment with Python 3: + `python3 -m venv envname` + +3. Activate the environment with `source envname/bin/activate` + +4. Install development requirements with `pip install -e .[test]` + +5. Run `make` and `make test`. + + +# License + +MIT. diff --git a/venv/lib/python3.10/site-packages/httptools-0.6.4.dist-info/RECORD b/venv/lib/python3.10/site-packages/httptools-0.6.4.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..4cde9d8527404be23aef35d58fec4d47be779845 --- /dev/null +++ b/venv/lib/python3.10/site-packages/httptools-0.6.4.dist-info/RECORD @@ -0,0 +1,21 @@ +httptools-0.6.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +httptools-0.6.4.dist-info/LICENSE,sha256=9Fc-fLdnZ0X7W402-lSKqT45HPtoct2s1lEwxF6mqS0,1093 +httptools-0.6.4.dist-info/METADATA,sha256=U4VRvayKRJbdHp18Fagk4_L7RvIQJ4anhwtwS64rO9A,3583 +httptools-0.6.4.dist-info/RECORD,, +httptools-0.6.4.dist-info/WHEEL,sha256=2JiXcU0-Qfy9gwog3WAIw4e5slUs9ReZ9xqrxqqbExM,224 +httptools-0.6.4.dist-info/top_level.txt,sha256=APjJKTbZcj0OQ4fdgf2eTCk82nK1n2BFXOD7ky41MPY,10 +httptools/__init__.py,sha256=plt3MIbueJdco9Dy7zoH3ksLNeyirqWagat5rwRmAjo,147 +httptools/__pycache__/__init__.cpython-310.pyc,, +httptools/__pycache__/_version.cpython-310.pyc,, +httptools/_version.py,sha256=ASqOB8fLS7jwZsM551Lc49WxYPyjteqnz1iDWmka-KA,575 +httptools/parser/__init__.py,sha256=fWyconPEHZlJojzRwmBKSn4C85OGXmKEwiEcdjHqXO8,166 +httptools/parser/__pycache__/__init__.cpython-310.pyc,, +httptools/parser/__pycache__/errors.cpython-310.pyc,, +httptools/parser/cparser.pxd,sha256=4qBxnma83Vz86Z9sOZRxjqYj20A-aLSWVGXZgTVLJqE,4977 +httptools/parser/errors.py,sha256=ZVrtN1smPIb_opQ2Ud3uCbGlNLMlECYM2-6S7r5LnHs,566 +httptools/parser/parser.cpython-310-x86_64-linux-gnu.so,sha256=gUJVhgrfY6kfy5lOsa1_8lIFPtbRLot_obcQUnrfRuk,1028176 +httptools/parser/parser.pyx,sha256=x0BUY9EzHNKCDaw-U8bkZ1MaKGtrOQ8iVCm1IuOtEQI,15140 +httptools/parser/python.pxd,sha256=zWCdGZh34fyQNt3BUHIUjPqY8a5sodRUkfdABxqYHgQ,138 +httptools/parser/url_cparser.pxd,sha256=X5dDI8A7T0l5HL_Czt0mTs0l_d2lXnUDHx1TN8LeiCM,779 +httptools/parser/url_parser.cpython-310-x86_64-linux-gnu.so,sha256=rG1YT8DYa7kfGmsTITKzLi1H-ZJShJEkJWXnziUo0JU,426712 +httptools/parser/url_parser.pyx,sha256=ZJVUZqrIDdhzVodA7tTtoFb570av-SczIyh2oAZXKzM,3758 diff --git a/venv/lib/python3.10/site-packages/httptools-0.6.4.dist-info/WHEEL b/venv/lib/python3.10/site-packages/httptools-0.6.4.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..5d79f5676bcd9b2cd48440916083f8129fc7be69 --- /dev/null +++ b/venv/lib/python3.10/site-packages/httptools-0.6.4.dist-info/WHEEL @@ -0,0 +1,8 @@ +Wheel-Version: 1.0 +Generator: setuptools (75.1.0) +Root-Is-Purelib: false +Tag: cp310-cp310-manylinux_2_5_x86_64 +Tag: cp310-cp310-manylinux1_x86_64 +Tag: cp310-cp310-manylinux_2_17_x86_64 +Tag: cp310-cp310-manylinux2014_x86_64 + diff --git a/venv/lib/python3.10/site-packages/httptools-0.6.4.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/httptools-0.6.4.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..bef3b40b2f33a46ae13d46456df480f77ab712e3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/httptools-0.6.4.dist-info/top_level.txt @@ -0,0 +1 @@ +httptools diff --git a/venv/lib/python3.10/site-packages/idna/__init__.py b/venv/lib/python3.10/site-packages/idna/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..cfdc030a751b089fc7e38fc88093b791605d501d --- /dev/null +++ b/venv/lib/python3.10/site-packages/idna/__init__.py @@ -0,0 +1,45 @@ +from .core import ( + IDNABidiError, + IDNAError, + InvalidCodepoint, + InvalidCodepointContext, + alabel, + check_bidi, + check_hyphen_ok, + check_initial_combiner, + check_label, + check_nfc, + decode, + encode, + ulabel, + uts46_remap, + valid_contextj, + valid_contexto, + valid_label_length, + valid_string_length, +) +from .intranges import intranges_contain +from .package_data import __version__ + +__all__ = [ + "__version__", + "IDNABidiError", + "IDNAError", + "InvalidCodepoint", + "InvalidCodepointContext", + "alabel", + "check_bidi", + "check_hyphen_ok", + "check_initial_combiner", + "check_label", + "check_nfc", + "decode", + "encode", + "intranges_contain", + "ulabel", + "uts46_remap", + "valid_contextj", + "valid_contexto", + "valid_label_length", + "valid_string_length", +] diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..99998f8201d9288edcc3ac3d7ac697767913a571 Binary files /dev/null and b/venv/lib/python3.10/site-packages/idna/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/codec.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/codec.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8837a0b92473d3cf8700cc28cf686e702241a4f4 Binary files /dev/null and b/venv/lib/python3.10/site-packages/idna/__pycache__/codec.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/compat.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/compat.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..14d0937cf8f4e65cb5429a5a2293ef352ce6e873 Binary files /dev/null and b/venv/lib/python3.10/site-packages/idna/__pycache__/compat.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/core.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/core.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..442712db808996b52b9787be4ad918219cdedd96 Binary files /dev/null and b/venv/lib/python3.10/site-packages/idna/__pycache__/core.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/intranges.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/intranges.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..51fca686bce72913468f4b07c19cac34273ee03b Binary files /dev/null and b/venv/lib/python3.10/site-packages/idna/__pycache__/intranges.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/idna/__pycache__/package_data.cpython-310.pyc b/venv/lib/python3.10/site-packages/idna/__pycache__/package_data.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..19d7cb69f81c8d67665469432c84c6d0f29e6fc3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/idna/__pycache__/package_data.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/idna/codec.py b/venv/lib/python3.10/site-packages/idna/codec.py new file mode 100644 index 0000000000000000000000000000000000000000..913abfd6a23ce547f84de2adc41221012f1007d6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/idna/codec.py @@ -0,0 +1,122 @@ +import codecs +import re +from typing import Any, Optional, Tuple + +from .core import IDNAError, alabel, decode, encode, ulabel + +_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]") + + +class Codec(codecs.Codec): + def encode(self, data: str, errors: str = "strict") -> Tuple[bytes, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return b"", 0 + + return encode(data), len(data) + + def decode(self, data: bytes, errors: str = "strict") -> Tuple[str, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return "", 0 + + return decode(data), len(data) + + +class IncrementalEncoder(codecs.BufferedIncrementalEncoder): + def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[bytes, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return b"", 0 + + labels = _unicode_dots_re.split(data) + trailing_dot = b"" + if labels: + if not labels[-1]: + trailing_dot = b"." + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = b"." + + result = [] + size = 0 + for label in labels: + result.append(alabel(label)) + if size: + size += 1 + size += len(label) + + # Join with U+002E + result_bytes = b".".join(result) + trailing_dot + size += len(trailing_dot) + return result_bytes, size + + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, data: Any, errors: str, final: bool) -> Tuple[str, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return ("", 0) + + if not isinstance(data, str): + data = str(data, "ascii") + + labels = _unicode_dots_re.split(data) + trailing_dot = "" + if labels: + if not labels[-1]: + trailing_dot = "." + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = "." + + result = [] + size = 0 + for label in labels: + result.append(ulabel(label)) + if size: + size += 1 + size += len(label) + + result_str = ".".join(result) + trailing_dot + size += len(trailing_dot) + return (result_str, size) + + +class StreamWriter(Codec, codecs.StreamWriter): + pass + + +class StreamReader(Codec, codecs.StreamReader): + pass + + +def search_function(name: str) -> Optional[codecs.CodecInfo]: + if name != "idna2008": + return None + return codecs.CodecInfo( + name=name, + encode=Codec().encode, + decode=Codec().decode, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamwriter=StreamWriter, + streamreader=StreamReader, + ) + + +codecs.register(search_function) diff --git a/venv/lib/python3.10/site-packages/idna/compat.py b/venv/lib/python3.10/site-packages/idna/compat.py new file mode 100644 index 0000000000000000000000000000000000000000..1df9f2a70e6815908f2784e88897a9a359eef84c --- /dev/null +++ b/venv/lib/python3.10/site-packages/idna/compat.py @@ -0,0 +1,15 @@ +from typing import Any, Union + +from .core import decode, encode + + +def ToASCII(label: str) -> bytes: + return encode(label) + + +def ToUnicode(label: Union[bytes, bytearray]) -> str: + return decode(label) + + +def nameprep(s: Any) -> None: + raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") diff --git a/venv/lib/python3.10/site-packages/idna/core.py b/venv/lib/python3.10/site-packages/idna/core.py new file mode 100644 index 0000000000000000000000000000000000000000..9115f123f0274832af5ba1cf3c5481cc5353eecd --- /dev/null +++ b/venv/lib/python3.10/site-packages/idna/core.py @@ -0,0 +1,437 @@ +import bisect +import re +import unicodedata +from typing import Optional, Union + +from . import idnadata +from .intranges import intranges_contain + +_virama_combining_class = 9 +_alabel_prefix = b"xn--" +_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]") + + +class IDNAError(UnicodeError): + """Base exception for all IDNA-encoding related problems""" + + pass + + +class IDNABidiError(IDNAError): + """Exception when bidirectional requirements are not satisfied""" + + pass + + +class InvalidCodepoint(IDNAError): + """Exception when a disallowed or unallocated codepoint is used""" + + pass + + +class InvalidCodepointContext(IDNAError): + """Exception when the codepoint is not valid in the context it is used""" + + pass + + +def _combining_class(cp: int) -> int: + v = unicodedata.combining(chr(cp)) + if v == 0: + if not unicodedata.name(chr(cp)): + raise ValueError("Unknown character in unicodedata") + return v + + +def _is_script(cp: str, script: str) -> bool: + return intranges_contain(ord(cp), idnadata.scripts[script]) + + +def _punycode(s: str) -> bytes: + return s.encode("punycode") + + +def _unot(s: int) -> str: + return "U+{:04X}".format(s) + + +def valid_label_length(label: Union[bytes, str]) -> bool: + if len(label) > 63: + return False + return True + + +def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool: + if len(label) > (254 if trailing_dot else 253): + return False + return True + + +def check_bidi(label: str, check_ltr: bool = False) -> bool: + # Bidi rules should only be applied if string contains RTL characters + bidi_label = False + for idx, cp in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + if direction == "": + # String likely comes from a newer version of Unicode + raise IDNABidiError("Unknown directionality in label {} at position {}".format(repr(label), idx)) + if direction in ["R", "AL", "AN"]: + bidi_label = True + if not bidi_label and not check_ltr: + return True + + # Bidi rule 1 + direction = unicodedata.bidirectional(label[0]) + if direction in ["R", "AL"]: + rtl = True + elif direction == "L": + rtl = False + else: + raise IDNABidiError("First codepoint in label {} must be directionality L, R or AL".format(repr(label))) + + valid_ending = False + number_type: Optional[str] = None + for idx, cp in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + + if rtl: + # Bidi rule 2 + if direction not in [ + "R", + "AL", + "AN", + "EN", + "ES", + "CS", + "ET", + "ON", + "BN", + "NSM", + ]: + raise IDNABidiError("Invalid direction for codepoint at position {} in a right-to-left label".format(idx)) + # Bidi rule 3 + if direction in ["R", "AL", "EN", "AN"]: + valid_ending = True + elif direction != "NSM": + valid_ending = False + # Bidi rule 4 + if direction in ["AN", "EN"]: + if not number_type: + number_type = direction + else: + if number_type != direction: + raise IDNABidiError("Can not mix numeral types in a right-to-left label") + else: + # Bidi rule 5 + if direction not in ["L", "EN", "ES", "CS", "ET", "ON", "BN", "NSM"]: + raise IDNABidiError("Invalid direction for codepoint at position {} in a left-to-right label".format(idx)) + # Bidi rule 6 + if direction in ["L", "EN"]: + valid_ending = True + elif direction != "NSM": + valid_ending = False + + if not valid_ending: + raise IDNABidiError("Label ends with illegal codepoint directionality") + + return True + + +def check_initial_combiner(label: str) -> bool: + if unicodedata.category(label[0])[0] == "M": + raise IDNAError("Label begins with an illegal combining character") + return True + + +def check_hyphen_ok(label: str) -> bool: + if label[2:4] == "--": + raise IDNAError("Label has disallowed hyphens in 3rd and 4th position") + if label[0] == "-" or label[-1] == "-": + raise IDNAError("Label must not start or end with a hyphen") + return True + + +def check_nfc(label: str) -> None: + if unicodedata.normalize("NFC", label) != label: + raise IDNAError("Label must be in Normalization Form C") + + +def valid_contextj(label: str, pos: int) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x200C: + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + + ok = False + for i in range(pos - 1, -1, -1): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord("T"): + continue + elif joining_type in [ord("L"), ord("D")]: + ok = True + break + else: + break + + if not ok: + return False + + ok = False + for i in range(pos + 1, len(label)): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord("T"): + continue + elif joining_type in [ord("R"), ord("D")]: + ok = True + break + else: + break + return ok + + if cp_value == 0x200D: + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + return False + + else: + return False + + +def valid_contexto(label: str, pos: int, exception: bool = False) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x00B7: + if 0 < pos < len(label) - 1: + if ord(label[pos - 1]) == 0x006C and ord(label[pos + 1]) == 0x006C: + return True + return False + + elif cp_value == 0x0375: + if pos < len(label) - 1 and len(label) > 1: + return _is_script(label[pos + 1], "Greek") + return False + + elif cp_value == 0x05F3 or cp_value == 0x05F4: + if pos > 0: + return _is_script(label[pos - 1], "Hebrew") + return False + + elif cp_value == 0x30FB: + for cp in label: + if cp == "\u30fb": + continue + if _is_script(cp, "Hiragana") or _is_script(cp, "Katakana") or _is_script(cp, "Han"): + return True + return False + + elif 0x660 <= cp_value <= 0x669: + for cp in label: + if 0x6F0 <= ord(cp) <= 0x06F9: + return False + return True + + elif 0x6F0 <= cp_value <= 0x6F9: + for cp in label: + if 0x660 <= ord(cp) <= 0x0669: + return False + return True + + return False + + +def check_label(label: Union[str, bytes, bytearray]) -> None: + if isinstance(label, (bytes, bytearray)): + label = label.decode("utf-8") + if len(label) == 0: + raise IDNAError("Empty Label") + + check_nfc(label) + check_hyphen_ok(label) + check_initial_combiner(label) + + for pos, cp in enumerate(label): + cp_value = ord(cp) + if intranges_contain(cp_value, idnadata.codepoint_classes["PVALID"]): + continue + elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTJ"]): + try: + if not valid_contextj(label, pos): + raise InvalidCodepointContext( + "Joiner {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label)) + ) + except ValueError: + raise IDNAError( + "Unknown codepoint adjacent to joiner {} at position {} in {}".format( + _unot(cp_value), pos + 1, repr(label) + ) + ) + elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTO"]): + if not valid_contexto(label, pos): + raise InvalidCodepointContext( + "Codepoint {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label)) + ) + else: + raise InvalidCodepoint( + "Codepoint {} at position {} of {} not allowed".format(_unot(cp_value), pos + 1, repr(label)) + ) + + check_bidi(label) + + +def alabel(label: str) -> bytes: + try: + label_bytes = label.encode("ascii") + ulabel(label_bytes) + if not valid_label_length(label_bytes): + raise IDNAError("Label too long") + return label_bytes + except UnicodeEncodeError: + pass + + check_label(label) + label_bytes = _alabel_prefix + _punycode(label) + + if not valid_label_length(label_bytes): + raise IDNAError("Label too long") + + return label_bytes + + +def ulabel(label: Union[str, bytes, bytearray]) -> str: + if not isinstance(label, (bytes, bytearray)): + try: + label_bytes = label.encode("ascii") + except UnicodeEncodeError: + check_label(label) + return label + else: + label_bytes = label + + label_bytes = label_bytes.lower() + if label_bytes.startswith(_alabel_prefix): + label_bytes = label_bytes[len(_alabel_prefix) :] + if not label_bytes: + raise IDNAError("Malformed A-label, no Punycode eligible content found") + if label_bytes.decode("ascii")[-1] == "-": + raise IDNAError("A-label must not end with a hyphen") + else: + check_label(label_bytes) + return label_bytes.decode("ascii") + + try: + label = label_bytes.decode("punycode") + except UnicodeError: + raise IDNAError("Invalid A-label") + check_label(label) + return label + + +def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str: + """Re-map the characters in the string according to UTS46 processing.""" + from .uts46data import uts46data + + output = "" + + for pos, char in enumerate(domain): + code_point = ord(char) + try: + uts46row = uts46data[code_point if code_point < 256 else bisect.bisect_left(uts46data, (code_point, "Z")) - 1] + status = uts46row[1] + replacement: Optional[str] = None + if len(uts46row) == 3: + replacement = uts46row[2] + if ( + status == "V" + or (status == "D" and not transitional) + or (status == "3" and not std3_rules and replacement is None) + ): + output += char + elif replacement is not None and ( + status == "M" or (status == "3" and not std3_rules) or (status == "D" and transitional) + ): + output += replacement + elif status != "I": + raise IndexError() + except IndexError: + raise InvalidCodepoint( + "Codepoint {} not allowed at position {} in {}".format(_unot(code_point), pos + 1, repr(domain)) + ) + + return unicodedata.normalize("NFC", output) + + +def encode( + s: Union[str, bytes, bytearray], + strict: bool = False, + uts46: bool = False, + std3_rules: bool = False, + transitional: bool = False, +) -> bytes: + if not isinstance(s, str): + try: + s = str(s, "ascii") + except UnicodeDecodeError: + raise IDNAError("should pass a unicode string to the function rather than a byte string.") + if uts46: + s = uts46_remap(s, std3_rules, transitional) + trailing_dot = False + result = [] + if strict: + labels = s.split(".") + else: + labels = _unicode_dots_re.split(s) + if not labels or labels == [""]: + raise IDNAError("Empty domain") + if labels[-1] == "": + del labels[-1] + trailing_dot = True + for label in labels: + s = alabel(label) + if s: + result.append(s) + else: + raise IDNAError("Empty label") + if trailing_dot: + result.append(b"") + s = b".".join(result) + if not valid_string_length(s, trailing_dot): + raise IDNAError("Domain too long") + return s + + +def decode( + s: Union[str, bytes, bytearray], + strict: bool = False, + uts46: bool = False, + std3_rules: bool = False, +) -> str: + try: + if not isinstance(s, str): + s = str(s, "ascii") + except UnicodeDecodeError: + raise IDNAError("Invalid ASCII in A-label") + if uts46: + s = uts46_remap(s, std3_rules, False) + trailing_dot = False + result = [] + if not strict: + labels = _unicode_dots_re.split(s) + else: + labels = s.split(".") + if not labels or labels == [""]: + raise IDNAError("Empty domain") + if not labels[-1]: + del labels[-1] + trailing_dot = True + for label in labels: + s = ulabel(label) + if s: + result.append(s) + else: + raise IDNAError("Empty label") + if trailing_dot: + result.append("") + return ".".join(result) diff --git a/venv/lib/python3.10/site-packages/idna/idnadata.py b/venv/lib/python3.10/site-packages/idna/idnadata.py new file mode 100644 index 0000000000000000000000000000000000000000..4be6004622efcdc36a8d15efc0ac3e138a4bae02 --- /dev/null +++ b/venv/lib/python3.10/site-packages/idna/idnadata.py @@ -0,0 +1,4243 @@ +# This file is automatically generated by tools/idna-data + +__version__ = "15.1.0" +scripts = { + "Greek": ( + 0x37000000374, + 0x37500000378, + 0x37A0000037E, + 0x37F00000380, + 0x38400000385, + 0x38600000387, + 0x3880000038B, + 0x38C0000038D, + 0x38E000003A2, + 0x3A3000003E2, + 0x3F000000400, + 0x1D2600001D2B, + 0x1D5D00001D62, + 0x1D6600001D6B, + 0x1DBF00001DC0, + 0x1F0000001F16, + 0x1F1800001F1E, + 0x1F2000001F46, + 0x1F4800001F4E, + 0x1F5000001F58, + 0x1F5900001F5A, + 0x1F5B00001F5C, + 0x1F5D00001F5E, + 0x1F5F00001F7E, + 0x1F8000001FB5, + 0x1FB600001FC5, + 0x1FC600001FD4, + 0x1FD600001FDC, + 0x1FDD00001FF0, + 0x1FF200001FF5, + 0x1FF600001FFF, + 0x212600002127, + 0xAB650000AB66, + 0x101400001018F, + 0x101A0000101A1, + 0x1D2000001D246, + ), + "Han": ( + 0x2E8000002E9A, + 0x2E9B00002EF4, + 0x2F0000002FD6, + 0x300500003006, + 0x300700003008, + 0x30210000302A, + 0x30380000303C, + 0x340000004DC0, + 0x4E000000A000, + 0xF9000000FA6E, + 0xFA700000FADA, + 0x16FE200016FE4, + 0x16FF000016FF2, + 0x200000002A6E0, + 0x2A7000002B73A, + 0x2B7400002B81E, + 0x2B8200002CEA2, + 0x2CEB00002EBE1, + 0x2EBF00002EE5E, + 0x2F8000002FA1E, + 0x300000003134B, + 0x31350000323B0, + ), + "Hebrew": ( + 0x591000005C8, + 0x5D0000005EB, + 0x5EF000005F5, + 0xFB1D0000FB37, + 0xFB380000FB3D, + 0xFB3E0000FB3F, + 0xFB400000FB42, + 0xFB430000FB45, + 0xFB460000FB50, + ), + "Hiragana": ( + 0x304100003097, + 0x309D000030A0, + 0x1B0010001B120, + 0x1B1320001B133, + 0x1B1500001B153, + 0x1F2000001F201, + ), + "Katakana": ( + 0x30A1000030FB, + 0x30FD00003100, + 0x31F000003200, + 0x32D0000032FF, + 0x330000003358, + 0xFF660000FF70, + 0xFF710000FF9E, + 0x1AFF00001AFF4, + 0x1AFF50001AFFC, + 0x1AFFD0001AFFF, + 0x1B0000001B001, + 0x1B1200001B123, + 0x1B1550001B156, + 0x1B1640001B168, + ), +} +joining_types = { + 0xAD: 84, + 0x300: 84, + 0x301: 84, + 0x302: 84, + 0x303: 84, + 0x304: 84, + 0x305: 84, + 0x306: 84, + 0x307: 84, + 0x308: 84, + 0x309: 84, + 0x30A: 84, + 0x30B: 84, + 0x30C: 84, + 0x30D: 84, + 0x30E: 84, + 0x30F: 84, + 0x310: 84, + 0x311: 84, + 0x312: 84, + 0x313: 84, + 0x314: 84, + 0x315: 84, + 0x316: 84, + 0x317: 84, + 0x318: 84, + 0x319: 84, + 0x31A: 84, + 0x31B: 84, + 0x31C: 84, + 0x31D: 84, + 0x31E: 84, + 0x31F: 84, + 0x320: 84, + 0x321: 84, + 0x322: 84, + 0x323: 84, + 0x324: 84, + 0x325: 84, + 0x326: 84, + 0x327: 84, + 0x328: 84, + 0x329: 84, + 0x32A: 84, + 0x32B: 84, + 0x32C: 84, + 0x32D: 84, + 0x32E: 84, + 0x32F: 84, + 0x330: 84, + 0x331: 84, + 0x332: 84, + 0x333: 84, + 0x334: 84, + 0x335: 84, + 0x336: 84, + 0x337: 84, + 0x338: 84, + 0x339: 84, + 0x33A: 84, + 0x33B: 84, + 0x33C: 84, + 0x33D: 84, + 0x33E: 84, + 0x33F: 84, + 0x340: 84, + 0x341: 84, + 0x342: 84, + 0x343: 84, + 0x344: 84, + 0x345: 84, + 0x346: 84, + 0x347: 84, + 0x348: 84, + 0x349: 84, + 0x34A: 84, + 0x34B: 84, + 0x34C: 84, + 0x34D: 84, + 0x34E: 84, + 0x34F: 84, + 0x350: 84, + 0x351: 84, + 0x352: 84, + 0x353: 84, + 0x354: 84, + 0x355: 84, + 0x356: 84, + 0x357: 84, + 0x358: 84, + 0x359: 84, + 0x35A: 84, + 0x35B: 84, + 0x35C: 84, + 0x35D: 84, + 0x35E: 84, + 0x35F: 84, + 0x360: 84, + 0x361: 84, + 0x362: 84, + 0x363: 84, + 0x364: 84, + 0x365: 84, + 0x366: 84, + 0x367: 84, + 0x368: 84, + 0x369: 84, + 0x36A: 84, + 0x36B: 84, + 0x36C: 84, + 0x36D: 84, + 0x36E: 84, + 0x36F: 84, + 0x483: 84, + 0x484: 84, + 0x485: 84, + 0x486: 84, + 0x487: 84, + 0x488: 84, + 0x489: 84, + 0x591: 84, + 0x592: 84, + 0x593: 84, + 0x594: 84, + 0x595: 84, + 0x596: 84, + 0x597: 84, + 0x598: 84, + 0x599: 84, + 0x59A: 84, + 0x59B: 84, + 0x59C: 84, + 0x59D: 84, + 0x59E: 84, + 0x59F: 84, + 0x5A0: 84, + 0x5A1: 84, + 0x5A2: 84, + 0x5A3: 84, + 0x5A4: 84, + 0x5A5: 84, + 0x5A6: 84, + 0x5A7: 84, + 0x5A8: 84, + 0x5A9: 84, + 0x5AA: 84, + 0x5AB: 84, + 0x5AC: 84, + 0x5AD: 84, + 0x5AE: 84, + 0x5AF: 84, + 0x5B0: 84, + 0x5B1: 84, + 0x5B2: 84, + 0x5B3: 84, + 0x5B4: 84, + 0x5B5: 84, + 0x5B6: 84, + 0x5B7: 84, + 0x5B8: 84, + 0x5B9: 84, + 0x5BA: 84, + 0x5BB: 84, + 0x5BC: 84, + 0x5BD: 84, + 0x5BF: 84, + 0x5C1: 84, + 0x5C2: 84, + 0x5C4: 84, + 0x5C5: 84, + 0x5C7: 84, + 0x610: 84, + 0x611: 84, + 0x612: 84, + 0x613: 84, + 0x614: 84, + 0x615: 84, + 0x616: 84, + 0x617: 84, + 0x618: 84, + 0x619: 84, + 0x61A: 84, + 0x61C: 84, + 0x620: 68, + 0x622: 82, + 0x623: 82, + 0x624: 82, + 0x625: 82, + 0x626: 68, + 0x627: 82, + 0x628: 68, + 0x629: 82, + 0x62A: 68, + 0x62B: 68, + 0x62C: 68, + 0x62D: 68, + 0x62E: 68, + 0x62F: 82, + 0x630: 82, + 0x631: 82, + 0x632: 82, + 0x633: 68, + 0x634: 68, + 0x635: 68, + 0x636: 68, + 0x637: 68, + 0x638: 68, + 0x639: 68, + 0x63A: 68, + 0x63B: 68, + 0x63C: 68, + 0x63D: 68, + 0x63E: 68, + 0x63F: 68, + 0x640: 67, + 0x641: 68, + 0x642: 68, + 0x643: 68, + 0x644: 68, + 0x645: 68, + 0x646: 68, + 0x647: 68, + 0x648: 82, + 0x649: 68, + 0x64A: 68, + 0x64B: 84, + 0x64C: 84, + 0x64D: 84, + 0x64E: 84, + 0x64F: 84, + 0x650: 84, + 0x651: 84, + 0x652: 84, + 0x653: 84, + 0x654: 84, + 0x655: 84, + 0x656: 84, + 0x657: 84, + 0x658: 84, + 0x659: 84, + 0x65A: 84, + 0x65B: 84, + 0x65C: 84, + 0x65D: 84, + 0x65E: 84, + 0x65F: 84, + 0x66E: 68, + 0x66F: 68, + 0x670: 84, + 0x671: 82, + 0x672: 82, + 0x673: 82, + 0x675: 82, + 0x676: 82, + 0x677: 82, + 0x678: 68, + 0x679: 68, + 0x67A: 68, + 0x67B: 68, + 0x67C: 68, + 0x67D: 68, + 0x67E: 68, + 0x67F: 68, + 0x680: 68, + 0x681: 68, + 0x682: 68, + 0x683: 68, + 0x684: 68, + 0x685: 68, + 0x686: 68, + 0x687: 68, + 0x688: 82, + 0x689: 82, + 0x68A: 82, + 0x68B: 82, + 0x68C: 82, + 0x68D: 82, + 0x68E: 82, + 0x68F: 82, + 0x690: 82, + 0x691: 82, + 0x692: 82, + 0x693: 82, + 0x694: 82, + 0x695: 82, + 0x696: 82, + 0x697: 82, + 0x698: 82, + 0x699: 82, + 0x69A: 68, + 0x69B: 68, + 0x69C: 68, + 0x69D: 68, + 0x69E: 68, + 0x69F: 68, + 0x6A0: 68, + 0x6A1: 68, + 0x6A2: 68, + 0x6A3: 68, + 0x6A4: 68, + 0x6A5: 68, + 0x6A6: 68, + 0x6A7: 68, + 0x6A8: 68, + 0x6A9: 68, + 0x6AA: 68, + 0x6AB: 68, + 0x6AC: 68, + 0x6AD: 68, + 0x6AE: 68, + 0x6AF: 68, + 0x6B0: 68, + 0x6B1: 68, + 0x6B2: 68, + 0x6B3: 68, + 0x6B4: 68, + 0x6B5: 68, + 0x6B6: 68, + 0x6B7: 68, + 0x6B8: 68, + 0x6B9: 68, + 0x6BA: 68, + 0x6BB: 68, + 0x6BC: 68, + 0x6BD: 68, + 0x6BE: 68, + 0x6BF: 68, + 0x6C0: 82, + 0x6C1: 68, + 0x6C2: 68, + 0x6C3: 82, + 0x6C4: 82, + 0x6C5: 82, + 0x6C6: 82, + 0x6C7: 82, + 0x6C8: 82, + 0x6C9: 82, + 0x6CA: 82, + 0x6CB: 82, + 0x6CC: 68, + 0x6CD: 82, + 0x6CE: 68, + 0x6CF: 82, + 0x6D0: 68, + 0x6D1: 68, + 0x6D2: 82, + 0x6D3: 82, + 0x6D5: 82, + 0x6D6: 84, + 0x6D7: 84, + 0x6D8: 84, + 0x6D9: 84, + 0x6DA: 84, + 0x6DB: 84, + 0x6DC: 84, + 0x6DF: 84, + 0x6E0: 84, + 0x6E1: 84, + 0x6E2: 84, + 0x6E3: 84, + 0x6E4: 84, + 0x6E7: 84, + 0x6E8: 84, + 0x6EA: 84, + 0x6EB: 84, + 0x6EC: 84, + 0x6ED: 84, + 0x6EE: 82, + 0x6EF: 82, + 0x6FA: 68, + 0x6FB: 68, + 0x6FC: 68, + 0x6FF: 68, + 0x70F: 84, + 0x710: 82, + 0x711: 84, + 0x712: 68, + 0x713: 68, + 0x714: 68, + 0x715: 82, + 0x716: 82, + 0x717: 82, + 0x718: 82, + 0x719: 82, + 0x71A: 68, + 0x71B: 68, + 0x71C: 68, + 0x71D: 68, + 0x71E: 82, + 0x71F: 68, + 0x720: 68, + 0x721: 68, + 0x722: 68, + 0x723: 68, + 0x724: 68, + 0x725: 68, + 0x726: 68, + 0x727: 68, + 0x728: 82, + 0x729: 68, + 0x72A: 82, + 0x72B: 68, + 0x72C: 82, + 0x72D: 68, + 0x72E: 68, + 0x72F: 82, + 0x730: 84, + 0x731: 84, + 0x732: 84, + 0x733: 84, + 0x734: 84, + 0x735: 84, + 0x736: 84, + 0x737: 84, + 0x738: 84, + 0x739: 84, + 0x73A: 84, + 0x73B: 84, + 0x73C: 84, + 0x73D: 84, + 0x73E: 84, + 0x73F: 84, + 0x740: 84, + 0x741: 84, + 0x742: 84, + 0x743: 84, + 0x744: 84, + 0x745: 84, + 0x746: 84, + 0x747: 84, + 0x748: 84, + 0x749: 84, + 0x74A: 84, + 0x74D: 82, + 0x74E: 68, + 0x74F: 68, + 0x750: 68, + 0x751: 68, + 0x752: 68, + 0x753: 68, + 0x754: 68, + 0x755: 68, + 0x756: 68, + 0x757: 68, + 0x758: 68, + 0x759: 82, + 0x75A: 82, + 0x75B: 82, + 0x75C: 68, + 0x75D: 68, + 0x75E: 68, + 0x75F: 68, + 0x760: 68, + 0x761: 68, + 0x762: 68, + 0x763: 68, + 0x764: 68, + 0x765: 68, + 0x766: 68, + 0x767: 68, + 0x768: 68, + 0x769: 68, + 0x76A: 68, + 0x76B: 82, + 0x76C: 82, + 0x76D: 68, + 0x76E: 68, + 0x76F: 68, + 0x770: 68, + 0x771: 82, + 0x772: 68, + 0x773: 82, + 0x774: 82, + 0x775: 68, + 0x776: 68, + 0x777: 68, + 0x778: 82, + 0x779: 82, + 0x77A: 68, + 0x77B: 68, + 0x77C: 68, + 0x77D: 68, + 0x77E: 68, + 0x77F: 68, + 0x7A6: 84, + 0x7A7: 84, + 0x7A8: 84, + 0x7A9: 84, + 0x7AA: 84, + 0x7AB: 84, + 0x7AC: 84, + 0x7AD: 84, + 0x7AE: 84, + 0x7AF: 84, + 0x7B0: 84, + 0x7CA: 68, + 0x7CB: 68, + 0x7CC: 68, + 0x7CD: 68, + 0x7CE: 68, + 0x7CF: 68, + 0x7D0: 68, + 0x7D1: 68, + 0x7D2: 68, + 0x7D3: 68, + 0x7D4: 68, + 0x7D5: 68, + 0x7D6: 68, + 0x7D7: 68, + 0x7D8: 68, + 0x7D9: 68, + 0x7DA: 68, + 0x7DB: 68, + 0x7DC: 68, + 0x7DD: 68, + 0x7DE: 68, + 0x7DF: 68, + 0x7E0: 68, + 0x7E1: 68, + 0x7E2: 68, + 0x7E3: 68, + 0x7E4: 68, + 0x7E5: 68, + 0x7E6: 68, + 0x7E7: 68, + 0x7E8: 68, + 0x7E9: 68, + 0x7EA: 68, + 0x7EB: 84, + 0x7EC: 84, + 0x7ED: 84, + 0x7EE: 84, + 0x7EF: 84, + 0x7F0: 84, + 0x7F1: 84, + 0x7F2: 84, + 0x7F3: 84, + 0x7FA: 67, + 0x7FD: 84, + 0x816: 84, + 0x817: 84, + 0x818: 84, + 0x819: 84, + 0x81B: 84, + 0x81C: 84, + 0x81D: 84, + 0x81E: 84, + 0x81F: 84, + 0x820: 84, + 0x821: 84, + 0x822: 84, + 0x823: 84, + 0x825: 84, + 0x826: 84, + 0x827: 84, + 0x829: 84, + 0x82A: 84, + 0x82B: 84, + 0x82C: 84, + 0x82D: 84, + 0x840: 82, + 0x841: 68, + 0x842: 68, + 0x843: 68, + 0x844: 68, + 0x845: 68, + 0x846: 82, + 0x847: 82, + 0x848: 68, + 0x849: 82, + 0x84A: 68, + 0x84B: 68, + 0x84C: 68, + 0x84D: 68, + 0x84E: 68, + 0x84F: 68, + 0x850: 68, + 0x851: 68, + 0x852: 68, + 0x853: 68, + 0x854: 82, + 0x855: 68, + 0x856: 82, + 0x857: 82, + 0x858: 82, + 0x859: 84, + 0x85A: 84, + 0x85B: 84, + 0x860: 68, + 0x862: 68, + 0x863: 68, + 0x864: 68, + 0x865: 68, + 0x867: 82, + 0x868: 68, + 0x869: 82, + 0x86A: 82, + 0x870: 82, + 0x871: 82, + 0x872: 82, + 0x873: 82, + 0x874: 82, + 0x875: 82, + 0x876: 82, + 0x877: 82, + 0x878: 82, + 0x879: 82, + 0x87A: 82, + 0x87B: 82, + 0x87C: 82, + 0x87D: 82, + 0x87E: 82, + 0x87F: 82, + 0x880: 82, + 0x881: 82, + 0x882: 82, + 0x883: 67, + 0x884: 67, + 0x885: 67, + 0x886: 68, + 0x889: 68, + 0x88A: 68, + 0x88B: 68, + 0x88C: 68, + 0x88D: 68, + 0x88E: 82, + 0x898: 84, + 0x899: 84, + 0x89A: 84, + 0x89B: 84, + 0x89C: 84, + 0x89D: 84, + 0x89E: 84, + 0x89F: 84, + 0x8A0: 68, + 0x8A1: 68, + 0x8A2: 68, + 0x8A3: 68, + 0x8A4: 68, + 0x8A5: 68, + 0x8A6: 68, + 0x8A7: 68, + 0x8A8: 68, + 0x8A9: 68, + 0x8AA: 82, + 0x8AB: 82, + 0x8AC: 82, + 0x8AE: 82, + 0x8AF: 68, + 0x8B0: 68, + 0x8B1: 82, + 0x8B2: 82, + 0x8B3: 68, + 0x8B4: 68, + 0x8B5: 68, + 0x8B6: 68, + 0x8B7: 68, + 0x8B8: 68, + 0x8B9: 82, + 0x8BA: 68, + 0x8BB: 68, + 0x8BC: 68, + 0x8BD: 68, + 0x8BE: 68, + 0x8BF: 68, + 0x8C0: 68, + 0x8C1: 68, + 0x8C2: 68, + 0x8C3: 68, + 0x8C4: 68, + 0x8C5: 68, + 0x8C6: 68, + 0x8C7: 68, + 0x8C8: 68, + 0x8CA: 84, + 0x8CB: 84, + 0x8CC: 84, + 0x8CD: 84, + 0x8CE: 84, + 0x8CF: 84, + 0x8D0: 84, + 0x8D1: 84, + 0x8D2: 84, + 0x8D3: 84, + 0x8D4: 84, + 0x8D5: 84, + 0x8D6: 84, + 0x8D7: 84, + 0x8D8: 84, + 0x8D9: 84, + 0x8DA: 84, + 0x8DB: 84, + 0x8DC: 84, + 0x8DD: 84, + 0x8DE: 84, + 0x8DF: 84, + 0x8E0: 84, + 0x8E1: 84, + 0x8E3: 84, + 0x8E4: 84, + 0x8E5: 84, + 0x8E6: 84, + 0x8E7: 84, + 0x8E8: 84, + 0x8E9: 84, + 0x8EA: 84, + 0x8EB: 84, + 0x8EC: 84, + 0x8ED: 84, + 0x8EE: 84, + 0x8EF: 84, + 0x8F0: 84, + 0x8F1: 84, + 0x8F2: 84, + 0x8F3: 84, + 0x8F4: 84, + 0x8F5: 84, + 0x8F6: 84, + 0x8F7: 84, + 0x8F8: 84, + 0x8F9: 84, + 0x8FA: 84, + 0x8FB: 84, + 0x8FC: 84, + 0x8FD: 84, + 0x8FE: 84, + 0x8FF: 84, + 0x900: 84, + 0x901: 84, + 0x902: 84, + 0x93A: 84, + 0x93C: 84, + 0x941: 84, + 0x942: 84, + 0x943: 84, + 0x944: 84, + 0x945: 84, + 0x946: 84, + 0x947: 84, + 0x948: 84, + 0x94D: 84, + 0x951: 84, + 0x952: 84, + 0x953: 84, + 0x954: 84, + 0x955: 84, + 0x956: 84, + 0x957: 84, + 0x962: 84, + 0x963: 84, + 0x981: 84, + 0x9BC: 84, + 0x9C1: 84, + 0x9C2: 84, + 0x9C3: 84, + 0x9C4: 84, + 0x9CD: 84, + 0x9E2: 84, + 0x9E3: 84, + 0x9FE: 84, + 0xA01: 84, + 0xA02: 84, + 0xA3C: 84, + 0xA41: 84, + 0xA42: 84, + 0xA47: 84, + 0xA48: 84, + 0xA4B: 84, + 0xA4C: 84, + 0xA4D: 84, + 0xA51: 84, + 0xA70: 84, + 0xA71: 84, + 0xA75: 84, + 0xA81: 84, + 0xA82: 84, + 0xABC: 84, + 0xAC1: 84, + 0xAC2: 84, + 0xAC3: 84, + 0xAC4: 84, + 0xAC5: 84, + 0xAC7: 84, + 0xAC8: 84, + 0xACD: 84, + 0xAE2: 84, + 0xAE3: 84, + 0xAFA: 84, + 0xAFB: 84, + 0xAFC: 84, + 0xAFD: 84, + 0xAFE: 84, + 0xAFF: 84, + 0xB01: 84, + 0xB3C: 84, + 0xB3F: 84, + 0xB41: 84, + 0xB42: 84, + 0xB43: 84, + 0xB44: 84, + 0xB4D: 84, + 0xB55: 84, + 0xB56: 84, + 0xB62: 84, + 0xB63: 84, + 0xB82: 84, + 0xBC0: 84, + 0xBCD: 84, + 0xC00: 84, + 0xC04: 84, + 0xC3C: 84, + 0xC3E: 84, + 0xC3F: 84, + 0xC40: 84, + 0xC46: 84, + 0xC47: 84, + 0xC48: 84, + 0xC4A: 84, + 0xC4B: 84, + 0xC4C: 84, + 0xC4D: 84, + 0xC55: 84, + 0xC56: 84, + 0xC62: 84, + 0xC63: 84, + 0xC81: 84, + 0xCBC: 84, + 0xCBF: 84, + 0xCC6: 84, + 0xCCC: 84, + 0xCCD: 84, + 0xCE2: 84, + 0xCE3: 84, + 0xD00: 84, + 0xD01: 84, + 0xD3B: 84, + 0xD3C: 84, + 0xD41: 84, + 0xD42: 84, + 0xD43: 84, + 0xD44: 84, + 0xD4D: 84, + 0xD62: 84, + 0xD63: 84, + 0xD81: 84, + 0xDCA: 84, + 0xDD2: 84, + 0xDD3: 84, + 0xDD4: 84, + 0xDD6: 84, + 0xE31: 84, + 0xE34: 84, + 0xE35: 84, + 0xE36: 84, + 0xE37: 84, + 0xE38: 84, + 0xE39: 84, + 0xE3A: 84, + 0xE47: 84, + 0xE48: 84, + 0xE49: 84, + 0xE4A: 84, + 0xE4B: 84, + 0xE4C: 84, + 0xE4D: 84, + 0xE4E: 84, + 0xEB1: 84, + 0xEB4: 84, + 0xEB5: 84, + 0xEB6: 84, + 0xEB7: 84, + 0xEB8: 84, + 0xEB9: 84, + 0xEBA: 84, + 0xEBB: 84, + 0xEBC: 84, + 0xEC8: 84, + 0xEC9: 84, + 0xECA: 84, + 0xECB: 84, + 0xECC: 84, + 0xECD: 84, + 0xECE: 84, + 0xF18: 84, + 0xF19: 84, + 0xF35: 84, + 0xF37: 84, + 0xF39: 84, + 0xF71: 84, + 0xF72: 84, + 0xF73: 84, + 0xF74: 84, + 0xF75: 84, + 0xF76: 84, + 0xF77: 84, + 0xF78: 84, + 0xF79: 84, + 0xF7A: 84, + 0xF7B: 84, + 0xF7C: 84, + 0xF7D: 84, + 0xF7E: 84, + 0xF80: 84, + 0xF81: 84, + 0xF82: 84, + 0xF83: 84, + 0xF84: 84, + 0xF86: 84, + 0xF87: 84, + 0xF8D: 84, + 0xF8E: 84, + 0xF8F: 84, + 0xF90: 84, + 0xF91: 84, + 0xF92: 84, + 0xF93: 84, + 0xF94: 84, + 0xF95: 84, + 0xF96: 84, + 0xF97: 84, + 0xF99: 84, + 0xF9A: 84, + 0xF9B: 84, + 0xF9C: 84, + 0xF9D: 84, + 0xF9E: 84, + 0xF9F: 84, + 0xFA0: 84, + 0xFA1: 84, + 0xFA2: 84, + 0xFA3: 84, + 0xFA4: 84, + 0xFA5: 84, + 0xFA6: 84, + 0xFA7: 84, + 0xFA8: 84, + 0xFA9: 84, + 0xFAA: 84, + 0xFAB: 84, + 0xFAC: 84, + 0xFAD: 84, + 0xFAE: 84, + 0xFAF: 84, + 0xFB0: 84, + 0xFB1: 84, + 0xFB2: 84, + 0xFB3: 84, + 0xFB4: 84, + 0xFB5: 84, + 0xFB6: 84, + 0xFB7: 84, + 0xFB8: 84, + 0xFB9: 84, + 0xFBA: 84, + 0xFBB: 84, + 0xFBC: 84, + 0xFC6: 84, + 0x102D: 84, + 0x102E: 84, + 0x102F: 84, + 0x1030: 84, + 0x1032: 84, + 0x1033: 84, + 0x1034: 84, + 0x1035: 84, + 0x1036: 84, + 0x1037: 84, + 0x1039: 84, + 0x103A: 84, + 0x103D: 84, + 0x103E: 84, + 0x1058: 84, + 0x1059: 84, + 0x105E: 84, + 0x105F: 84, + 0x1060: 84, + 0x1071: 84, + 0x1072: 84, + 0x1073: 84, + 0x1074: 84, + 0x1082: 84, + 0x1085: 84, + 0x1086: 84, + 0x108D: 84, + 0x109D: 84, + 0x135D: 84, + 0x135E: 84, + 0x135F: 84, + 0x1712: 84, + 0x1713: 84, + 0x1714: 84, + 0x1732: 84, + 0x1733: 84, + 0x1752: 84, + 0x1753: 84, + 0x1772: 84, + 0x1773: 84, + 0x17B4: 84, + 0x17B5: 84, + 0x17B7: 84, + 0x17B8: 84, + 0x17B9: 84, + 0x17BA: 84, + 0x17BB: 84, + 0x17BC: 84, + 0x17BD: 84, + 0x17C6: 84, + 0x17C9: 84, + 0x17CA: 84, + 0x17CB: 84, + 0x17CC: 84, + 0x17CD: 84, + 0x17CE: 84, + 0x17CF: 84, + 0x17D0: 84, + 0x17D1: 84, + 0x17D2: 84, + 0x17D3: 84, + 0x17DD: 84, + 0x1807: 68, + 0x180A: 67, + 0x180B: 84, + 0x180C: 84, + 0x180D: 84, + 0x180F: 84, + 0x1820: 68, + 0x1821: 68, + 0x1822: 68, + 0x1823: 68, + 0x1824: 68, + 0x1825: 68, + 0x1826: 68, + 0x1827: 68, + 0x1828: 68, + 0x1829: 68, + 0x182A: 68, + 0x182B: 68, + 0x182C: 68, + 0x182D: 68, + 0x182E: 68, + 0x182F: 68, + 0x1830: 68, + 0x1831: 68, + 0x1832: 68, + 0x1833: 68, + 0x1834: 68, + 0x1835: 68, + 0x1836: 68, + 0x1837: 68, + 0x1838: 68, + 0x1839: 68, + 0x183A: 68, + 0x183B: 68, + 0x183C: 68, + 0x183D: 68, + 0x183E: 68, + 0x183F: 68, + 0x1840: 68, + 0x1841: 68, + 0x1842: 68, + 0x1843: 68, + 0x1844: 68, + 0x1845: 68, + 0x1846: 68, + 0x1847: 68, + 0x1848: 68, + 0x1849: 68, + 0x184A: 68, + 0x184B: 68, + 0x184C: 68, + 0x184D: 68, + 0x184E: 68, + 0x184F: 68, + 0x1850: 68, + 0x1851: 68, + 0x1852: 68, + 0x1853: 68, + 0x1854: 68, + 0x1855: 68, + 0x1856: 68, + 0x1857: 68, + 0x1858: 68, + 0x1859: 68, + 0x185A: 68, + 0x185B: 68, + 0x185C: 68, + 0x185D: 68, + 0x185E: 68, + 0x185F: 68, + 0x1860: 68, + 0x1861: 68, + 0x1862: 68, + 0x1863: 68, + 0x1864: 68, + 0x1865: 68, + 0x1866: 68, + 0x1867: 68, + 0x1868: 68, + 0x1869: 68, + 0x186A: 68, + 0x186B: 68, + 0x186C: 68, + 0x186D: 68, + 0x186E: 68, + 0x186F: 68, + 0x1870: 68, + 0x1871: 68, + 0x1872: 68, + 0x1873: 68, + 0x1874: 68, + 0x1875: 68, + 0x1876: 68, + 0x1877: 68, + 0x1878: 68, + 0x1885: 84, + 0x1886: 84, + 0x1887: 68, + 0x1888: 68, + 0x1889: 68, + 0x188A: 68, + 0x188B: 68, + 0x188C: 68, + 0x188D: 68, + 0x188E: 68, + 0x188F: 68, + 0x1890: 68, + 0x1891: 68, + 0x1892: 68, + 0x1893: 68, + 0x1894: 68, + 0x1895: 68, + 0x1896: 68, + 0x1897: 68, + 0x1898: 68, + 0x1899: 68, + 0x189A: 68, + 0x189B: 68, + 0x189C: 68, + 0x189D: 68, + 0x189E: 68, + 0x189F: 68, + 0x18A0: 68, + 0x18A1: 68, + 0x18A2: 68, + 0x18A3: 68, + 0x18A4: 68, + 0x18A5: 68, + 0x18A6: 68, + 0x18A7: 68, + 0x18A8: 68, + 0x18A9: 84, + 0x18AA: 68, + 0x1920: 84, + 0x1921: 84, + 0x1922: 84, + 0x1927: 84, + 0x1928: 84, + 0x1932: 84, + 0x1939: 84, + 0x193A: 84, + 0x193B: 84, + 0x1A17: 84, + 0x1A18: 84, + 0x1A1B: 84, + 0x1A56: 84, + 0x1A58: 84, + 0x1A59: 84, + 0x1A5A: 84, + 0x1A5B: 84, + 0x1A5C: 84, + 0x1A5D: 84, + 0x1A5E: 84, + 0x1A60: 84, + 0x1A62: 84, + 0x1A65: 84, + 0x1A66: 84, + 0x1A67: 84, + 0x1A68: 84, + 0x1A69: 84, + 0x1A6A: 84, + 0x1A6B: 84, + 0x1A6C: 84, + 0x1A73: 84, + 0x1A74: 84, + 0x1A75: 84, + 0x1A76: 84, + 0x1A77: 84, + 0x1A78: 84, + 0x1A79: 84, + 0x1A7A: 84, + 0x1A7B: 84, + 0x1A7C: 84, + 0x1A7F: 84, + 0x1AB0: 84, + 0x1AB1: 84, + 0x1AB2: 84, + 0x1AB3: 84, + 0x1AB4: 84, + 0x1AB5: 84, + 0x1AB6: 84, + 0x1AB7: 84, + 0x1AB8: 84, + 0x1AB9: 84, + 0x1ABA: 84, + 0x1ABB: 84, + 0x1ABC: 84, + 0x1ABD: 84, + 0x1ABE: 84, + 0x1ABF: 84, + 0x1AC0: 84, + 0x1AC1: 84, + 0x1AC2: 84, + 0x1AC3: 84, + 0x1AC4: 84, + 0x1AC5: 84, + 0x1AC6: 84, + 0x1AC7: 84, + 0x1AC8: 84, + 0x1AC9: 84, + 0x1ACA: 84, + 0x1ACB: 84, + 0x1ACC: 84, + 0x1ACD: 84, + 0x1ACE: 84, + 0x1B00: 84, + 0x1B01: 84, + 0x1B02: 84, + 0x1B03: 84, + 0x1B34: 84, + 0x1B36: 84, + 0x1B37: 84, + 0x1B38: 84, + 0x1B39: 84, + 0x1B3A: 84, + 0x1B3C: 84, + 0x1B42: 84, + 0x1B6B: 84, + 0x1B6C: 84, + 0x1B6D: 84, + 0x1B6E: 84, + 0x1B6F: 84, + 0x1B70: 84, + 0x1B71: 84, + 0x1B72: 84, + 0x1B73: 84, + 0x1B80: 84, + 0x1B81: 84, + 0x1BA2: 84, + 0x1BA3: 84, + 0x1BA4: 84, + 0x1BA5: 84, + 0x1BA8: 84, + 0x1BA9: 84, + 0x1BAB: 84, + 0x1BAC: 84, + 0x1BAD: 84, + 0x1BE6: 84, + 0x1BE8: 84, + 0x1BE9: 84, + 0x1BED: 84, + 0x1BEF: 84, + 0x1BF0: 84, + 0x1BF1: 84, + 0x1C2C: 84, + 0x1C2D: 84, + 0x1C2E: 84, + 0x1C2F: 84, + 0x1C30: 84, + 0x1C31: 84, + 0x1C32: 84, + 0x1C33: 84, + 0x1C36: 84, + 0x1C37: 84, + 0x1CD0: 84, + 0x1CD1: 84, + 0x1CD2: 84, + 0x1CD4: 84, + 0x1CD5: 84, + 0x1CD6: 84, + 0x1CD7: 84, + 0x1CD8: 84, + 0x1CD9: 84, + 0x1CDA: 84, + 0x1CDB: 84, + 0x1CDC: 84, + 0x1CDD: 84, + 0x1CDE: 84, + 0x1CDF: 84, + 0x1CE0: 84, + 0x1CE2: 84, + 0x1CE3: 84, + 0x1CE4: 84, + 0x1CE5: 84, + 0x1CE6: 84, + 0x1CE7: 84, + 0x1CE8: 84, + 0x1CED: 84, + 0x1CF4: 84, + 0x1CF8: 84, + 0x1CF9: 84, + 0x1DC0: 84, + 0x1DC1: 84, + 0x1DC2: 84, + 0x1DC3: 84, + 0x1DC4: 84, + 0x1DC5: 84, + 0x1DC6: 84, + 0x1DC7: 84, + 0x1DC8: 84, + 0x1DC9: 84, + 0x1DCA: 84, + 0x1DCB: 84, + 0x1DCC: 84, + 0x1DCD: 84, + 0x1DCE: 84, + 0x1DCF: 84, + 0x1DD0: 84, + 0x1DD1: 84, + 0x1DD2: 84, + 0x1DD3: 84, + 0x1DD4: 84, + 0x1DD5: 84, + 0x1DD6: 84, + 0x1DD7: 84, + 0x1DD8: 84, + 0x1DD9: 84, + 0x1DDA: 84, + 0x1DDB: 84, + 0x1DDC: 84, + 0x1DDD: 84, + 0x1DDE: 84, + 0x1DDF: 84, + 0x1DE0: 84, + 0x1DE1: 84, + 0x1DE2: 84, + 0x1DE3: 84, + 0x1DE4: 84, + 0x1DE5: 84, + 0x1DE6: 84, + 0x1DE7: 84, + 0x1DE8: 84, + 0x1DE9: 84, + 0x1DEA: 84, + 0x1DEB: 84, + 0x1DEC: 84, + 0x1DED: 84, + 0x1DEE: 84, + 0x1DEF: 84, + 0x1DF0: 84, + 0x1DF1: 84, + 0x1DF2: 84, + 0x1DF3: 84, + 0x1DF4: 84, + 0x1DF5: 84, + 0x1DF6: 84, + 0x1DF7: 84, + 0x1DF8: 84, + 0x1DF9: 84, + 0x1DFA: 84, + 0x1DFB: 84, + 0x1DFC: 84, + 0x1DFD: 84, + 0x1DFE: 84, + 0x1DFF: 84, + 0x200B: 84, + 0x200D: 67, + 0x200E: 84, + 0x200F: 84, + 0x202A: 84, + 0x202B: 84, + 0x202C: 84, + 0x202D: 84, + 0x202E: 84, + 0x2060: 84, + 0x2061: 84, + 0x2062: 84, + 0x2063: 84, + 0x2064: 84, + 0x206A: 84, + 0x206B: 84, + 0x206C: 84, + 0x206D: 84, + 0x206E: 84, + 0x206F: 84, + 0x20D0: 84, + 0x20D1: 84, + 0x20D2: 84, + 0x20D3: 84, + 0x20D4: 84, + 0x20D5: 84, + 0x20D6: 84, + 0x20D7: 84, + 0x20D8: 84, + 0x20D9: 84, + 0x20DA: 84, + 0x20DB: 84, + 0x20DC: 84, + 0x20DD: 84, + 0x20DE: 84, + 0x20DF: 84, + 0x20E0: 84, + 0x20E1: 84, + 0x20E2: 84, + 0x20E3: 84, + 0x20E4: 84, + 0x20E5: 84, + 0x20E6: 84, + 0x20E7: 84, + 0x20E8: 84, + 0x20E9: 84, + 0x20EA: 84, + 0x20EB: 84, + 0x20EC: 84, + 0x20ED: 84, + 0x20EE: 84, + 0x20EF: 84, + 0x20F0: 84, + 0x2CEF: 84, + 0x2CF0: 84, + 0x2CF1: 84, + 0x2D7F: 84, + 0x2DE0: 84, + 0x2DE1: 84, + 0x2DE2: 84, + 0x2DE3: 84, + 0x2DE4: 84, + 0x2DE5: 84, + 0x2DE6: 84, + 0x2DE7: 84, + 0x2DE8: 84, + 0x2DE9: 84, + 0x2DEA: 84, + 0x2DEB: 84, + 0x2DEC: 84, + 0x2DED: 84, + 0x2DEE: 84, + 0x2DEF: 84, + 0x2DF0: 84, + 0x2DF1: 84, + 0x2DF2: 84, + 0x2DF3: 84, + 0x2DF4: 84, + 0x2DF5: 84, + 0x2DF6: 84, + 0x2DF7: 84, + 0x2DF8: 84, + 0x2DF9: 84, + 0x2DFA: 84, + 0x2DFB: 84, + 0x2DFC: 84, + 0x2DFD: 84, + 0x2DFE: 84, + 0x2DFF: 84, + 0x302A: 84, + 0x302B: 84, + 0x302C: 84, + 0x302D: 84, + 0x3099: 84, + 0x309A: 84, + 0xA66F: 84, + 0xA670: 84, + 0xA671: 84, + 0xA672: 84, + 0xA674: 84, + 0xA675: 84, + 0xA676: 84, + 0xA677: 84, + 0xA678: 84, + 0xA679: 84, + 0xA67A: 84, + 0xA67B: 84, + 0xA67C: 84, + 0xA67D: 84, + 0xA69E: 84, + 0xA69F: 84, + 0xA6F0: 84, + 0xA6F1: 84, + 0xA802: 84, + 0xA806: 84, + 0xA80B: 84, + 0xA825: 84, + 0xA826: 84, + 0xA82C: 84, + 0xA840: 68, + 0xA841: 68, + 0xA842: 68, + 0xA843: 68, + 0xA844: 68, + 0xA845: 68, + 0xA846: 68, + 0xA847: 68, + 0xA848: 68, + 0xA849: 68, + 0xA84A: 68, + 0xA84B: 68, + 0xA84C: 68, + 0xA84D: 68, + 0xA84E: 68, + 0xA84F: 68, + 0xA850: 68, + 0xA851: 68, + 0xA852: 68, + 0xA853: 68, + 0xA854: 68, + 0xA855: 68, + 0xA856: 68, + 0xA857: 68, + 0xA858: 68, + 0xA859: 68, + 0xA85A: 68, + 0xA85B: 68, + 0xA85C: 68, + 0xA85D: 68, + 0xA85E: 68, + 0xA85F: 68, + 0xA860: 68, + 0xA861: 68, + 0xA862: 68, + 0xA863: 68, + 0xA864: 68, + 0xA865: 68, + 0xA866: 68, + 0xA867: 68, + 0xA868: 68, + 0xA869: 68, + 0xA86A: 68, + 0xA86B: 68, + 0xA86C: 68, + 0xA86D: 68, + 0xA86E: 68, + 0xA86F: 68, + 0xA870: 68, + 0xA871: 68, + 0xA872: 76, + 0xA8C4: 84, + 0xA8C5: 84, + 0xA8E0: 84, + 0xA8E1: 84, + 0xA8E2: 84, + 0xA8E3: 84, + 0xA8E4: 84, + 0xA8E5: 84, + 0xA8E6: 84, + 0xA8E7: 84, + 0xA8E8: 84, + 0xA8E9: 84, + 0xA8EA: 84, + 0xA8EB: 84, + 0xA8EC: 84, + 0xA8ED: 84, + 0xA8EE: 84, + 0xA8EF: 84, + 0xA8F0: 84, + 0xA8F1: 84, + 0xA8FF: 84, + 0xA926: 84, + 0xA927: 84, + 0xA928: 84, + 0xA929: 84, + 0xA92A: 84, + 0xA92B: 84, + 0xA92C: 84, + 0xA92D: 84, + 0xA947: 84, + 0xA948: 84, + 0xA949: 84, + 0xA94A: 84, + 0xA94B: 84, + 0xA94C: 84, + 0xA94D: 84, + 0xA94E: 84, + 0xA94F: 84, + 0xA950: 84, + 0xA951: 84, + 0xA980: 84, + 0xA981: 84, + 0xA982: 84, + 0xA9B3: 84, + 0xA9B6: 84, + 0xA9B7: 84, + 0xA9B8: 84, + 0xA9B9: 84, + 0xA9BC: 84, + 0xA9BD: 84, + 0xA9E5: 84, + 0xAA29: 84, + 0xAA2A: 84, + 0xAA2B: 84, + 0xAA2C: 84, + 0xAA2D: 84, + 0xAA2E: 84, + 0xAA31: 84, + 0xAA32: 84, + 0xAA35: 84, + 0xAA36: 84, + 0xAA43: 84, + 0xAA4C: 84, + 0xAA7C: 84, + 0xAAB0: 84, + 0xAAB2: 84, + 0xAAB3: 84, + 0xAAB4: 84, + 0xAAB7: 84, + 0xAAB8: 84, + 0xAABE: 84, + 0xAABF: 84, + 0xAAC1: 84, + 0xAAEC: 84, + 0xAAED: 84, + 0xAAF6: 84, + 0xABE5: 84, + 0xABE8: 84, + 0xABED: 84, + 0xFB1E: 84, + 0xFE00: 84, + 0xFE01: 84, + 0xFE02: 84, + 0xFE03: 84, + 0xFE04: 84, + 0xFE05: 84, + 0xFE06: 84, + 0xFE07: 84, + 0xFE08: 84, + 0xFE09: 84, + 0xFE0A: 84, + 0xFE0B: 84, + 0xFE0C: 84, + 0xFE0D: 84, + 0xFE0E: 84, + 0xFE0F: 84, + 0xFE20: 84, + 0xFE21: 84, + 0xFE22: 84, + 0xFE23: 84, + 0xFE24: 84, + 0xFE25: 84, + 0xFE26: 84, + 0xFE27: 84, + 0xFE28: 84, + 0xFE29: 84, + 0xFE2A: 84, + 0xFE2B: 84, + 0xFE2C: 84, + 0xFE2D: 84, + 0xFE2E: 84, + 0xFE2F: 84, + 0xFEFF: 84, + 0xFFF9: 84, + 0xFFFA: 84, + 0xFFFB: 84, + 0x101FD: 84, + 0x102E0: 84, + 0x10376: 84, + 0x10377: 84, + 0x10378: 84, + 0x10379: 84, + 0x1037A: 84, + 0x10A01: 84, + 0x10A02: 84, + 0x10A03: 84, + 0x10A05: 84, + 0x10A06: 84, + 0x10A0C: 84, + 0x10A0D: 84, + 0x10A0E: 84, + 0x10A0F: 84, + 0x10A38: 84, + 0x10A39: 84, + 0x10A3A: 84, + 0x10A3F: 84, + 0x10AC0: 68, + 0x10AC1: 68, + 0x10AC2: 68, + 0x10AC3: 68, + 0x10AC4: 68, + 0x10AC5: 82, + 0x10AC7: 82, + 0x10AC9: 82, + 0x10ACA: 82, + 0x10ACD: 76, + 0x10ACE: 82, + 0x10ACF: 82, + 0x10AD0: 82, + 0x10AD1: 82, + 0x10AD2: 82, + 0x10AD3: 68, + 0x10AD4: 68, + 0x10AD5: 68, + 0x10AD6: 68, + 0x10AD7: 76, + 0x10AD8: 68, + 0x10AD9: 68, + 0x10ADA: 68, + 0x10ADB: 68, + 0x10ADC: 68, + 0x10ADD: 82, + 0x10ADE: 68, + 0x10ADF: 68, + 0x10AE0: 68, + 0x10AE1: 82, + 0x10AE4: 82, + 0x10AE5: 84, + 0x10AE6: 84, + 0x10AEB: 68, + 0x10AEC: 68, + 0x10AED: 68, + 0x10AEE: 68, + 0x10AEF: 82, + 0x10B80: 68, + 0x10B81: 82, + 0x10B82: 68, + 0x10B83: 82, + 0x10B84: 82, + 0x10B85: 82, + 0x10B86: 68, + 0x10B87: 68, + 0x10B88: 68, + 0x10B89: 82, + 0x10B8A: 68, + 0x10B8B: 68, + 0x10B8C: 82, + 0x10B8D: 68, + 0x10B8E: 82, + 0x10B8F: 82, + 0x10B90: 68, + 0x10B91: 82, + 0x10BA9: 82, + 0x10BAA: 82, + 0x10BAB: 82, + 0x10BAC: 82, + 0x10BAD: 68, + 0x10BAE: 68, + 0x10D00: 76, + 0x10D01: 68, + 0x10D02: 68, + 0x10D03: 68, + 0x10D04: 68, + 0x10D05: 68, + 0x10D06: 68, + 0x10D07: 68, + 0x10D08: 68, + 0x10D09: 68, + 0x10D0A: 68, + 0x10D0B: 68, + 0x10D0C: 68, + 0x10D0D: 68, + 0x10D0E: 68, + 0x10D0F: 68, + 0x10D10: 68, + 0x10D11: 68, + 0x10D12: 68, + 0x10D13: 68, + 0x10D14: 68, + 0x10D15: 68, + 0x10D16: 68, + 0x10D17: 68, + 0x10D18: 68, + 0x10D19: 68, + 0x10D1A: 68, + 0x10D1B: 68, + 0x10D1C: 68, + 0x10D1D: 68, + 0x10D1E: 68, + 0x10D1F: 68, + 0x10D20: 68, + 0x10D21: 68, + 0x10D22: 82, + 0x10D23: 68, + 0x10D24: 84, + 0x10D25: 84, + 0x10D26: 84, + 0x10D27: 84, + 0x10EAB: 84, + 0x10EAC: 84, + 0x10EFD: 84, + 0x10EFE: 84, + 0x10EFF: 84, + 0x10F30: 68, + 0x10F31: 68, + 0x10F32: 68, + 0x10F33: 82, + 0x10F34: 68, + 0x10F35: 68, + 0x10F36: 68, + 0x10F37: 68, + 0x10F38: 68, + 0x10F39: 68, + 0x10F3A: 68, + 0x10F3B: 68, + 0x10F3C: 68, + 0x10F3D: 68, + 0x10F3E: 68, + 0x10F3F: 68, + 0x10F40: 68, + 0x10F41: 68, + 0x10F42: 68, + 0x10F43: 68, + 0x10F44: 68, + 0x10F46: 84, + 0x10F47: 84, + 0x10F48: 84, + 0x10F49: 84, + 0x10F4A: 84, + 0x10F4B: 84, + 0x10F4C: 84, + 0x10F4D: 84, + 0x10F4E: 84, + 0x10F4F: 84, + 0x10F50: 84, + 0x10F51: 68, + 0x10F52: 68, + 0x10F53: 68, + 0x10F54: 82, + 0x10F70: 68, + 0x10F71: 68, + 0x10F72: 68, + 0x10F73: 68, + 0x10F74: 82, + 0x10F75: 82, + 0x10F76: 68, + 0x10F77: 68, + 0x10F78: 68, + 0x10F79: 68, + 0x10F7A: 68, + 0x10F7B: 68, + 0x10F7C: 68, + 0x10F7D: 68, + 0x10F7E: 68, + 0x10F7F: 68, + 0x10F80: 68, + 0x10F81: 68, + 0x10F82: 84, + 0x10F83: 84, + 0x10F84: 84, + 0x10F85: 84, + 0x10FB0: 68, + 0x10FB2: 68, + 0x10FB3: 68, + 0x10FB4: 82, + 0x10FB5: 82, + 0x10FB6: 82, + 0x10FB8: 68, + 0x10FB9: 82, + 0x10FBA: 82, + 0x10FBB: 68, + 0x10FBC: 68, + 0x10FBD: 82, + 0x10FBE: 68, + 0x10FBF: 68, + 0x10FC1: 68, + 0x10FC2: 82, + 0x10FC3: 82, + 0x10FC4: 68, + 0x10FC9: 82, + 0x10FCA: 68, + 0x10FCB: 76, + 0x11001: 84, + 0x11038: 84, + 0x11039: 84, + 0x1103A: 84, + 0x1103B: 84, + 0x1103C: 84, + 0x1103D: 84, + 0x1103E: 84, + 0x1103F: 84, + 0x11040: 84, + 0x11041: 84, + 0x11042: 84, + 0x11043: 84, + 0x11044: 84, + 0x11045: 84, + 0x11046: 84, + 0x11070: 84, + 0x11073: 84, + 0x11074: 84, + 0x1107F: 84, + 0x11080: 84, + 0x11081: 84, + 0x110B3: 84, + 0x110B4: 84, + 0x110B5: 84, + 0x110B6: 84, + 0x110B9: 84, + 0x110BA: 84, + 0x110C2: 84, + 0x11100: 84, + 0x11101: 84, + 0x11102: 84, + 0x11127: 84, + 0x11128: 84, + 0x11129: 84, + 0x1112A: 84, + 0x1112B: 84, + 0x1112D: 84, + 0x1112E: 84, + 0x1112F: 84, + 0x11130: 84, + 0x11131: 84, + 0x11132: 84, + 0x11133: 84, + 0x11134: 84, + 0x11173: 84, + 0x11180: 84, + 0x11181: 84, + 0x111B6: 84, + 0x111B7: 84, + 0x111B8: 84, + 0x111B9: 84, + 0x111BA: 84, + 0x111BB: 84, + 0x111BC: 84, + 0x111BD: 84, + 0x111BE: 84, + 0x111C9: 84, + 0x111CA: 84, + 0x111CB: 84, + 0x111CC: 84, + 0x111CF: 84, + 0x1122F: 84, + 0x11230: 84, + 0x11231: 84, + 0x11234: 84, + 0x11236: 84, + 0x11237: 84, + 0x1123E: 84, + 0x11241: 84, + 0x112DF: 84, + 0x112E3: 84, + 0x112E4: 84, + 0x112E5: 84, + 0x112E6: 84, + 0x112E7: 84, + 0x112E8: 84, + 0x112E9: 84, + 0x112EA: 84, + 0x11300: 84, + 0x11301: 84, + 0x1133B: 84, + 0x1133C: 84, + 0x11340: 84, + 0x11366: 84, + 0x11367: 84, + 0x11368: 84, + 0x11369: 84, + 0x1136A: 84, + 0x1136B: 84, + 0x1136C: 84, + 0x11370: 84, + 0x11371: 84, + 0x11372: 84, + 0x11373: 84, + 0x11374: 84, + 0x11438: 84, + 0x11439: 84, + 0x1143A: 84, + 0x1143B: 84, + 0x1143C: 84, + 0x1143D: 84, + 0x1143E: 84, + 0x1143F: 84, + 0x11442: 84, + 0x11443: 84, + 0x11444: 84, + 0x11446: 84, + 0x1145E: 84, + 0x114B3: 84, + 0x114B4: 84, + 0x114B5: 84, + 0x114B6: 84, + 0x114B7: 84, + 0x114B8: 84, + 0x114BA: 84, + 0x114BF: 84, + 0x114C0: 84, + 0x114C2: 84, + 0x114C3: 84, + 0x115B2: 84, + 0x115B3: 84, + 0x115B4: 84, + 0x115B5: 84, + 0x115BC: 84, + 0x115BD: 84, + 0x115BF: 84, + 0x115C0: 84, + 0x115DC: 84, + 0x115DD: 84, + 0x11633: 84, + 0x11634: 84, + 0x11635: 84, + 0x11636: 84, + 0x11637: 84, + 0x11638: 84, + 0x11639: 84, + 0x1163A: 84, + 0x1163D: 84, + 0x1163F: 84, + 0x11640: 84, + 0x116AB: 84, + 0x116AD: 84, + 0x116B0: 84, + 0x116B1: 84, + 0x116B2: 84, + 0x116B3: 84, + 0x116B4: 84, + 0x116B5: 84, + 0x116B7: 84, + 0x1171D: 84, + 0x1171E: 84, + 0x1171F: 84, + 0x11722: 84, + 0x11723: 84, + 0x11724: 84, + 0x11725: 84, + 0x11727: 84, + 0x11728: 84, + 0x11729: 84, + 0x1172A: 84, + 0x1172B: 84, + 0x1182F: 84, + 0x11830: 84, + 0x11831: 84, + 0x11832: 84, + 0x11833: 84, + 0x11834: 84, + 0x11835: 84, + 0x11836: 84, + 0x11837: 84, + 0x11839: 84, + 0x1183A: 84, + 0x1193B: 84, + 0x1193C: 84, + 0x1193E: 84, + 0x11943: 84, + 0x119D4: 84, + 0x119D5: 84, + 0x119D6: 84, + 0x119D7: 84, + 0x119DA: 84, + 0x119DB: 84, + 0x119E0: 84, + 0x11A01: 84, + 0x11A02: 84, + 0x11A03: 84, + 0x11A04: 84, + 0x11A05: 84, + 0x11A06: 84, + 0x11A07: 84, + 0x11A08: 84, + 0x11A09: 84, + 0x11A0A: 84, + 0x11A33: 84, + 0x11A34: 84, + 0x11A35: 84, + 0x11A36: 84, + 0x11A37: 84, + 0x11A38: 84, + 0x11A3B: 84, + 0x11A3C: 84, + 0x11A3D: 84, + 0x11A3E: 84, + 0x11A47: 84, + 0x11A51: 84, + 0x11A52: 84, + 0x11A53: 84, + 0x11A54: 84, + 0x11A55: 84, + 0x11A56: 84, + 0x11A59: 84, + 0x11A5A: 84, + 0x11A5B: 84, + 0x11A8A: 84, + 0x11A8B: 84, + 0x11A8C: 84, + 0x11A8D: 84, + 0x11A8E: 84, + 0x11A8F: 84, + 0x11A90: 84, + 0x11A91: 84, + 0x11A92: 84, + 0x11A93: 84, + 0x11A94: 84, + 0x11A95: 84, + 0x11A96: 84, + 0x11A98: 84, + 0x11A99: 84, + 0x11C30: 84, + 0x11C31: 84, + 0x11C32: 84, + 0x11C33: 84, + 0x11C34: 84, + 0x11C35: 84, + 0x11C36: 84, + 0x11C38: 84, + 0x11C39: 84, + 0x11C3A: 84, + 0x11C3B: 84, + 0x11C3C: 84, + 0x11C3D: 84, + 0x11C3F: 84, + 0x11C92: 84, + 0x11C93: 84, + 0x11C94: 84, + 0x11C95: 84, + 0x11C96: 84, + 0x11C97: 84, + 0x11C98: 84, + 0x11C99: 84, + 0x11C9A: 84, + 0x11C9B: 84, + 0x11C9C: 84, + 0x11C9D: 84, + 0x11C9E: 84, + 0x11C9F: 84, + 0x11CA0: 84, + 0x11CA1: 84, + 0x11CA2: 84, + 0x11CA3: 84, + 0x11CA4: 84, + 0x11CA5: 84, + 0x11CA6: 84, + 0x11CA7: 84, + 0x11CAA: 84, + 0x11CAB: 84, + 0x11CAC: 84, + 0x11CAD: 84, + 0x11CAE: 84, + 0x11CAF: 84, + 0x11CB0: 84, + 0x11CB2: 84, + 0x11CB3: 84, + 0x11CB5: 84, + 0x11CB6: 84, + 0x11D31: 84, + 0x11D32: 84, + 0x11D33: 84, + 0x11D34: 84, + 0x11D35: 84, + 0x11D36: 84, + 0x11D3A: 84, + 0x11D3C: 84, + 0x11D3D: 84, + 0x11D3F: 84, + 0x11D40: 84, + 0x11D41: 84, + 0x11D42: 84, + 0x11D43: 84, + 0x11D44: 84, + 0x11D45: 84, + 0x11D47: 84, + 0x11D90: 84, + 0x11D91: 84, + 0x11D95: 84, + 0x11D97: 84, + 0x11EF3: 84, + 0x11EF4: 84, + 0x11F00: 84, + 0x11F01: 84, + 0x11F36: 84, + 0x11F37: 84, + 0x11F38: 84, + 0x11F39: 84, + 0x11F3A: 84, + 0x11F40: 84, + 0x11F42: 84, + 0x13430: 84, + 0x13431: 84, + 0x13432: 84, + 0x13433: 84, + 0x13434: 84, + 0x13435: 84, + 0x13436: 84, + 0x13437: 84, + 0x13438: 84, + 0x13439: 84, + 0x1343A: 84, + 0x1343B: 84, + 0x1343C: 84, + 0x1343D: 84, + 0x1343E: 84, + 0x1343F: 84, + 0x13440: 84, + 0x13447: 84, + 0x13448: 84, + 0x13449: 84, + 0x1344A: 84, + 0x1344B: 84, + 0x1344C: 84, + 0x1344D: 84, + 0x1344E: 84, + 0x1344F: 84, + 0x13450: 84, + 0x13451: 84, + 0x13452: 84, + 0x13453: 84, + 0x13454: 84, + 0x13455: 84, + 0x16AF0: 84, + 0x16AF1: 84, + 0x16AF2: 84, + 0x16AF3: 84, + 0x16AF4: 84, + 0x16B30: 84, + 0x16B31: 84, + 0x16B32: 84, + 0x16B33: 84, + 0x16B34: 84, + 0x16B35: 84, + 0x16B36: 84, + 0x16F4F: 84, + 0x16F8F: 84, + 0x16F90: 84, + 0x16F91: 84, + 0x16F92: 84, + 0x16FE4: 84, + 0x1BC9D: 84, + 0x1BC9E: 84, + 0x1BCA0: 84, + 0x1BCA1: 84, + 0x1BCA2: 84, + 0x1BCA3: 84, + 0x1CF00: 84, + 0x1CF01: 84, + 0x1CF02: 84, + 0x1CF03: 84, + 0x1CF04: 84, + 0x1CF05: 84, + 0x1CF06: 84, + 0x1CF07: 84, + 0x1CF08: 84, + 0x1CF09: 84, + 0x1CF0A: 84, + 0x1CF0B: 84, + 0x1CF0C: 84, + 0x1CF0D: 84, + 0x1CF0E: 84, + 0x1CF0F: 84, + 0x1CF10: 84, + 0x1CF11: 84, + 0x1CF12: 84, + 0x1CF13: 84, + 0x1CF14: 84, + 0x1CF15: 84, + 0x1CF16: 84, + 0x1CF17: 84, + 0x1CF18: 84, + 0x1CF19: 84, + 0x1CF1A: 84, + 0x1CF1B: 84, + 0x1CF1C: 84, + 0x1CF1D: 84, + 0x1CF1E: 84, + 0x1CF1F: 84, + 0x1CF20: 84, + 0x1CF21: 84, + 0x1CF22: 84, + 0x1CF23: 84, + 0x1CF24: 84, + 0x1CF25: 84, + 0x1CF26: 84, + 0x1CF27: 84, + 0x1CF28: 84, + 0x1CF29: 84, + 0x1CF2A: 84, + 0x1CF2B: 84, + 0x1CF2C: 84, + 0x1CF2D: 84, + 0x1CF30: 84, + 0x1CF31: 84, + 0x1CF32: 84, + 0x1CF33: 84, + 0x1CF34: 84, + 0x1CF35: 84, + 0x1CF36: 84, + 0x1CF37: 84, + 0x1CF38: 84, + 0x1CF39: 84, + 0x1CF3A: 84, + 0x1CF3B: 84, + 0x1CF3C: 84, + 0x1CF3D: 84, + 0x1CF3E: 84, + 0x1CF3F: 84, + 0x1CF40: 84, + 0x1CF41: 84, + 0x1CF42: 84, + 0x1CF43: 84, + 0x1CF44: 84, + 0x1CF45: 84, + 0x1CF46: 84, + 0x1D167: 84, + 0x1D168: 84, + 0x1D169: 84, + 0x1D173: 84, + 0x1D174: 84, + 0x1D175: 84, + 0x1D176: 84, + 0x1D177: 84, + 0x1D178: 84, + 0x1D179: 84, + 0x1D17A: 84, + 0x1D17B: 84, + 0x1D17C: 84, + 0x1D17D: 84, + 0x1D17E: 84, + 0x1D17F: 84, + 0x1D180: 84, + 0x1D181: 84, + 0x1D182: 84, + 0x1D185: 84, + 0x1D186: 84, + 0x1D187: 84, + 0x1D188: 84, + 0x1D189: 84, + 0x1D18A: 84, + 0x1D18B: 84, + 0x1D1AA: 84, + 0x1D1AB: 84, + 0x1D1AC: 84, + 0x1D1AD: 84, + 0x1D242: 84, + 0x1D243: 84, + 0x1D244: 84, + 0x1DA00: 84, + 0x1DA01: 84, + 0x1DA02: 84, + 0x1DA03: 84, + 0x1DA04: 84, + 0x1DA05: 84, + 0x1DA06: 84, + 0x1DA07: 84, + 0x1DA08: 84, + 0x1DA09: 84, + 0x1DA0A: 84, + 0x1DA0B: 84, + 0x1DA0C: 84, + 0x1DA0D: 84, + 0x1DA0E: 84, + 0x1DA0F: 84, + 0x1DA10: 84, + 0x1DA11: 84, + 0x1DA12: 84, + 0x1DA13: 84, + 0x1DA14: 84, + 0x1DA15: 84, + 0x1DA16: 84, + 0x1DA17: 84, + 0x1DA18: 84, + 0x1DA19: 84, + 0x1DA1A: 84, + 0x1DA1B: 84, + 0x1DA1C: 84, + 0x1DA1D: 84, + 0x1DA1E: 84, + 0x1DA1F: 84, + 0x1DA20: 84, + 0x1DA21: 84, + 0x1DA22: 84, + 0x1DA23: 84, + 0x1DA24: 84, + 0x1DA25: 84, + 0x1DA26: 84, + 0x1DA27: 84, + 0x1DA28: 84, + 0x1DA29: 84, + 0x1DA2A: 84, + 0x1DA2B: 84, + 0x1DA2C: 84, + 0x1DA2D: 84, + 0x1DA2E: 84, + 0x1DA2F: 84, + 0x1DA30: 84, + 0x1DA31: 84, + 0x1DA32: 84, + 0x1DA33: 84, + 0x1DA34: 84, + 0x1DA35: 84, + 0x1DA36: 84, + 0x1DA3B: 84, + 0x1DA3C: 84, + 0x1DA3D: 84, + 0x1DA3E: 84, + 0x1DA3F: 84, + 0x1DA40: 84, + 0x1DA41: 84, + 0x1DA42: 84, + 0x1DA43: 84, + 0x1DA44: 84, + 0x1DA45: 84, + 0x1DA46: 84, + 0x1DA47: 84, + 0x1DA48: 84, + 0x1DA49: 84, + 0x1DA4A: 84, + 0x1DA4B: 84, + 0x1DA4C: 84, + 0x1DA4D: 84, + 0x1DA4E: 84, + 0x1DA4F: 84, + 0x1DA50: 84, + 0x1DA51: 84, + 0x1DA52: 84, + 0x1DA53: 84, + 0x1DA54: 84, + 0x1DA55: 84, + 0x1DA56: 84, + 0x1DA57: 84, + 0x1DA58: 84, + 0x1DA59: 84, + 0x1DA5A: 84, + 0x1DA5B: 84, + 0x1DA5C: 84, + 0x1DA5D: 84, + 0x1DA5E: 84, + 0x1DA5F: 84, + 0x1DA60: 84, + 0x1DA61: 84, + 0x1DA62: 84, + 0x1DA63: 84, + 0x1DA64: 84, + 0x1DA65: 84, + 0x1DA66: 84, + 0x1DA67: 84, + 0x1DA68: 84, + 0x1DA69: 84, + 0x1DA6A: 84, + 0x1DA6B: 84, + 0x1DA6C: 84, + 0x1DA75: 84, + 0x1DA84: 84, + 0x1DA9B: 84, + 0x1DA9C: 84, + 0x1DA9D: 84, + 0x1DA9E: 84, + 0x1DA9F: 84, + 0x1DAA1: 84, + 0x1DAA2: 84, + 0x1DAA3: 84, + 0x1DAA4: 84, + 0x1DAA5: 84, + 0x1DAA6: 84, + 0x1DAA7: 84, + 0x1DAA8: 84, + 0x1DAA9: 84, + 0x1DAAA: 84, + 0x1DAAB: 84, + 0x1DAAC: 84, + 0x1DAAD: 84, + 0x1DAAE: 84, + 0x1DAAF: 84, + 0x1E000: 84, + 0x1E001: 84, + 0x1E002: 84, + 0x1E003: 84, + 0x1E004: 84, + 0x1E005: 84, + 0x1E006: 84, + 0x1E008: 84, + 0x1E009: 84, + 0x1E00A: 84, + 0x1E00B: 84, + 0x1E00C: 84, + 0x1E00D: 84, + 0x1E00E: 84, + 0x1E00F: 84, + 0x1E010: 84, + 0x1E011: 84, + 0x1E012: 84, + 0x1E013: 84, + 0x1E014: 84, + 0x1E015: 84, + 0x1E016: 84, + 0x1E017: 84, + 0x1E018: 84, + 0x1E01B: 84, + 0x1E01C: 84, + 0x1E01D: 84, + 0x1E01E: 84, + 0x1E01F: 84, + 0x1E020: 84, + 0x1E021: 84, + 0x1E023: 84, + 0x1E024: 84, + 0x1E026: 84, + 0x1E027: 84, + 0x1E028: 84, + 0x1E029: 84, + 0x1E02A: 84, + 0x1E08F: 84, + 0x1E130: 84, + 0x1E131: 84, + 0x1E132: 84, + 0x1E133: 84, + 0x1E134: 84, + 0x1E135: 84, + 0x1E136: 84, + 0x1E2AE: 84, + 0x1E2EC: 84, + 0x1E2ED: 84, + 0x1E2EE: 84, + 0x1E2EF: 84, + 0x1E4EC: 84, + 0x1E4ED: 84, + 0x1E4EE: 84, + 0x1E4EF: 84, + 0x1E8D0: 84, + 0x1E8D1: 84, + 0x1E8D2: 84, + 0x1E8D3: 84, + 0x1E8D4: 84, + 0x1E8D5: 84, + 0x1E8D6: 84, + 0x1E900: 68, + 0x1E901: 68, + 0x1E902: 68, + 0x1E903: 68, + 0x1E904: 68, + 0x1E905: 68, + 0x1E906: 68, + 0x1E907: 68, + 0x1E908: 68, + 0x1E909: 68, + 0x1E90A: 68, + 0x1E90B: 68, + 0x1E90C: 68, + 0x1E90D: 68, + 0x1E90E: 68, + 0x1E90F: 68, + 0x1E910: 68, + 0x1E911: 68, + 0x1E912: 68, + 0x1E913: 68, + 0x1E914: 68, + 0x1E915: 68, + 0x1E916: 68, + 0x1E917: 68, + 0x1E918: 68, + 0x1E919: 68, + 0x1E91A: 68, + 0x1E91B: 68, + 0x1E91C: 68, + 0x1E91D: 68, + 0x1E91E: 68, + 0x1E91F: 68, + 0x1E920: 68, + 0x1E921: 68, + 0x1E922: 68, + 0x1E923: 68, + 0x1E924: 68, + 0x1E925: 68, + 0x1E926: 68, + 0x1E927: 68, + 0x1E928: 68, + 0x1E929: 68, + 0x1E92A: 68, + 0x1E92B: 68, + 0x1E92C: 68, + 0x1E92D: 68, + 0x1E92E: 68, + 0x1E92F: 68, + 0x1E930: 68, + 0x1E931: 68, + 0x1E932: 68, + 0x1E933: 68, + 0x1E934: 68, + 0x1E935: 68, + 0x1E936: 68, + 0x1E937: 68, + 0x1E938: 68, + 0x1E939: 68, + 0x1E93A: 68, + 0x1E93B: 68, + 0x1E93C: 68, + 0x1E93D: 68, + 0x1E93E: 68, + 0x1E93F: 68, + 0x1E940: 68, + 0x1E941: 68, + 0x1E942: 68, + 0x1E943: 68, + 0x1E944: 84, + 0x1E945: 84, + 0x1E946: 84, + 0x1E947: 84, + 0x1E948: 84, + 0x1E949: 84, + 0x1E94A: 84, + 0x1E94B: 84, + 0xE0001: 84, + 0xE0020: 84, + 0xE0021: 84, + 0xE0022: 84, + 0xE0023: 84, + 0xE0024: 84, + 0xE0025: 84, + 0xE0026: 84, + 0xE0027: 84, + 0xE0028: 84, + 0xE0029: 84, + 0xE002A: 84, + 0xE002B: 84, + 0xE002C: 84, + 0xE002D: 84, + 0xE002E: 84, + 0xE002F: 84, + 0xE0030: 84, + 0xE0031: 84, + 0xE0032: 84, + 0xE0033: 84, + 0xE0034: 84, + 0xE0035: 84, + 0xE0036: 84, + 0xE0037: 84, + 0xE0038: 84, + 0xE0039: 84, + 0xE003A: 84, + 0xE003B: 84, + 0xE003C: 84, + 0xE003D: 84, + 0xE003E: 84, + 0xE003F: 84, + 0xE0040: 84, + 0xE0041: 84, + 0xE0042: 84, + 0xE0043: 84, + 0xE0044: 84, + 0xE0045: 84, + 0xE0046: 84, + 0xE0047: 84, + 0xE0048: 84, + 0xE0049: 84, + 0xE004A: 84, + 0xE004B: 84, + 0xE004C: 84, + 0xE004D: 84, + 0xE004E: 84, + 0xE004F: 84, + 0xE0050: 84, + 0xE0051: 84, + 0xE0052: 84, + 0xE0053: 84, + 0xE0054: 84, + 0xE0055: 84, + 0xE0056: 84, + 0xE0057: 84, + 0xE0058: 84, + 0xE0059: 84, + 0xE005A: 84, + 0xE005B: 84, + 0xE005C: 84, + 0xE005D: 84, + 0xE005E: 84, + 0xE005F: 84, + 0xE0060: 84, + 0xE0061: 84, + 0xE0062: 84, + 0xE0063: 84, + 0xE0064: 84, + 0xE0065: 84, + 0xE0066: 84, + 0xE0067: 84, + 0xE0068: 84, + 0xE0069: 84, + 0xE006A: 84, + 0xE006B: 84, + 0xE006C: 84, + 0xE006D: 84, + 0xE006E: 84, + 0xE006F: 84, + 0xE0070: 84, + 0xE0071: 84, + 0xE0072: 84, + 0xE0073: 84, + 0xE0074: 84, + 0xE0075: 84, + 0xE0076: 84, + 0xE0077: 84, + 0xE0078: 84, + 0xE0079: 84, + 0xE007A: 84, + 0xE007B: 84, + 0xE007C: 84, + 0xE007D: 84, + 0xE007E: 84, + 0xE007F: 84, + 0xE0100: 84, + 0xE0101: 84, + 0xE0102: 84, + 0xE0103: 84, + 0xE0104: 84, + 0xE0105: 84, + 0xE0106: 84, + 0xE0107: 84, + 0xE0108: 84, + 0xE0109: 84, + 0xE010A: 84, + 0xE010B: 84, + 0xE010C: 84, + 0xE010D: 84, + 0xE010E: 84, + 0xE010F: 84, + 0xE0110: 84, + 0xE0111: 84, + 0xE0112: 84, + 0xE0113: 84, + 0xE0114: 84, + 0xE0115: 84, + 0xE0116: 84, + 0xE0117: 84, + 0xE0118: 84, + 0xE0119: 84, + 0xE011A: 84, + 0xE011B: 84, + 0xE011C: 84, + 0xE011D: 84, + 0xE011E: 84, + 0xE011F: 84, + 0xE0120: 84, + 0xE0121: 84, + 0xE0122: 84, + 0xE0123: 84, + 0xE0124: 84, + 0xE0125: 84, + 0xE0126: 84, + 0xE0127: 84, + 0xE0128: 84, + 0xE0129: 84, + 0xE012A: 84, + 0xE012B: 84, + 0xE012C: 84, + 0xE012D: 84, + 0xE012E: 84, + 0xE012F: 84, + 0xE0130: 84, + 0xE0131: 84, + 0xE0132: 84, + 0xE0133: 84, + 0xE0134: 84, + 0xE0135: 84, + 0xE0136: 84, + 0xE0137: 84, + 0xE0138: 84, + 0xE0139: 84, + 0xE013A: 84, + 0xE013B: 84, + 0xE013C: 84, + 0xE013D: 84, + 0xE013E: 84, + 0xE013F: 84, + 0xE0140: 84, + 0xE0141: 84, + 0xE0142: 84, + 0xE0143: 84, + 0xE0144: 84, + 0xE0145: 84, + 0xE0146: 84, + 0xE0147: 84, + 0xE0148: 84, + 0xE0149: 84, + 0xE014A: 84, + 0xE014B: 84, + 0xE014C: 84, + 0xE014D: 84, + 0xE014E: 84, + 0xE014F: 84, + 0xE0150: 84, + 0xE0151: 84, + 0xE0152: 84, + 0xE0153: 84, + 0xE0154: 84, + 0xE0155: 84, + 0xE0156: 84, + 0xE0157: 84, + 0xE0158: 84, + 0xE0159: 84, + 0xE015A: 84, + 0xE015B: 84, + 0xE015C: 84, + 0xE015D: 84, + 0xE015E: 84, + 0xE015F: 84, + 0xE0160: 84, + 0xE0161: 84, + 0xE0162: 84, + 0xE0163: 84, + 0xE0164: 84, + 0xE0165: 84, + 0xE0166: 84, + 0xE0167: 84, + 0xE0168: 84, + 0xE0169: 84, + 0xE016A: 84, + 0xE016B: 84, + 0xE016C: 84, + 0xE016D: 84, + 0xE016E: 84, + 0xE016F: 84, + 0xE0170: 84, + 0xE0171: 84, + 0xE0172: 84, + 0xE0173: 84, + 0xE0174: 84, + 0xE0175: 84, + 0xE0176: 84, + 0xE0177: 84, + 0xE0178: 84, + 0xE0179: 84, + 0xE017A: 84, + 0xE017B: 84, + 0xE017C: 84, + 0xE017D: 84, + 0xE017E: 84, + 0xE017F: 84, + 0xE0180: 84, + 0xE0181: 84, + 0xE0182: 84, + 0xE0183: 84, + 0xE0184: 84, + 0xE0185: 84, + 0xE0186: 84, + 0xE0187: 84, + 0xE0188: 84, + 0xE0189: 84, + 0xE018A: 84, + 0xE018B: 84, + 0xE018C: 84, + 0xE018D: 84, + 0xE018E: 84, + 0xE018F: 84, + 0xE0190: 84, + 0xE0191: 84, + 0xE0192: 84, + 0xE0193: 84, + 0xE0194: 84, + 0xE0195: 84, + 0xE0196: 84, + 0xE0197: 84, + 0xE0198: 84, + 0xE0199: 84, + 0xE019A: 84, + 0xE019B: 84, + 0xE019C: 84, + 0xE019D: 84, + 0xE019E: 84, + 0xE019F: 84, + 0xE01A0: 84, + 0xE01A1: 84, + 0xE01A2: 84, + 0xE01A3: 84, + 0xE01A4: 84, + 0xE01A5: 84, + 0xE01A6: 84, + 0xE01A7: 84, + 0xE01A8: 84, + 0xE01A9: 84, + 0xE01AA: 84, + 0xE01AB: 84, + 0xE01AC: 84, + 0xE01AD: 84, + 0xE01AE: 84, + 0xE01AF: 84, + 0xE01B0: 84, + 0xE01B1: 84, + 0xE01B2: 84, + 0xE01B3: 84, + 0xE01B4: 84, + 0xE01B5: 84, + 0xE01B6: 84, + 0xE01B7: 84, + 0xE01B8: 84, + 0xE01B9: 84, + 0xE01BA: 84, + 0xE01BB: 84, + 0xE01BC: 84, + 0xE01BD: 84, + 0xE01BE: 84, + 0xE01BF: 84, + 0xE01C0: 84, + 0xE01C1: 84, + 0xE01C2: 84, + 0xE01C3: 84, + 0xE01C4: 84, + 0xE01C5: 84, + 0xE01C6: 84, + 0xE01C7: 84, + 0xE01C8: 84, + 0xE01C9: 84, + 0xE01CA: 84, + 0xE01CB: 84, + 0xE01CC: 84, + 0xE01CD: 84, + 0xE01CE: 84, + 0xE01CF: 84, + 0xE01D0: 84, + 0xE01D1: 84, + 0xE01D2: 84, + 0xE01D3: 84, + 0xE01D4: 84, + 0xE01D5: 84, + 0xE01D6: 84, + 0xE01D7: 84, + 0xE01D8: 84, + 0xE01D9: 84, + 0xE01DA: 84, + 0xE01DB: 84, + 0xE01DC: 84, + 0xE01DD: 84, + 0xE01DE: 84, + 0xE01DF: 84, + 0xE01E0: 84, + 0xE01E1: 84, + 0xE01E2: 84, + 0xE01E3: 84, + 0xE01E4: 84, + 0xE01E5: 84, + 0xE01E6: 84, + 0xE01E7: 84, + 0xE01E8: 84, + 0xE01E9: 84, + 0xE01EA: 84, + 0xE01EB: 84, + 0xE01EC: 84, + 0xE01ED: 84, + 0xE01EE: 84, + 0xE01EF: 84, +} +codepoint_classes = { + "PVALID": ( + 0x2D0000002E, + 0x300000003A, + 0x610000007B, + 0xDF000000F7, + 0xF800000100, + 0x10100000102, + 0x10300000104, + 0x10500000106, + 0x10700000108, + 0x1090000010A, + 0x10B0000010C, + 0x10D0000010E, + 0x10F00000110, + 0x11100000112, + 0x11300000114, + 0x11500000116, + 0x11700000118, + 0x1190000011A, + 0x11B0000011C, + 0x11D0000011E, + 0x11F00000120, + 0x12100000122, + 0x12300000124, + 0x12500000126, + 0x12700000128, + 0x1290000012A, + 0x12B0000012C, + 0x12D0000012E, + 0x12F00000130, + 0x13100000132, + 0x13500000136, + 0x13700000139, + 0x13A0000013B, + 0x13C0000013D, + 0x13E0000013F, + 0x14200000143, + 0x14400000145, + 0x14600000147, + 0x14800000149, + 0x14B0000014C, + 0x14D0000014E, + 0x14F00000150, + 0x15100000152, + 0x15300000154, + 0x15500000156, + 0x15700000158, + 0x1590000015A, + 0x15B0000015C, + 0x15D0000015E, + 0x15F00000160, + 0x16100000162, + 0x16300000164, + 0x16500000166, + 0x16700000168, + 0x1690000016A, + 0x16B0000016C, + 0x16D0000016E, + 0x16F00000170, + 0x17100000172, + 0x17300000174, + 0x17500000176, + 0x17700000178, + 0x17A0000017B, + 0x17C0000017D, + 0x17E0000017F, + 0x18000000181, + 0x18300000184, + 0x18500000186, + 0x18800000189, + 0x18C0000018E, + 0x19200000193, + 0x19500000196, + 0x1990000019C, + 0x19E0000019F, + 0x1A1000001A2, + 0x1A3000001A4, + 0x1A5000001A6, + 0x1A8000001A9, + 0x1AA000001AC, + 0x1AD000001AE, + 0x1B0000001B1, + 0x1B4000001B5, + 0x1B6000001B7, + 0x1B9000001BC, + 0x1BD000001C4, + 0x1CE000001CF, + 0x1D0000001D1, + 0x1D2000001D3, + 0x1D4000001D5, + 0x1D6000001D7, + 0x1D8000001D9, + 0x1DA000001DB, + 0x1DC000001DE, + 0x1DF000001E0, + 0x1E1000001E2, + 0x1E3000001E4, + 0x1E5000001E6, + 0x1E7000001E8, + 0x1E9000001EA, + 0x1EB000001EC, + 0x1ED000001EE, + 0x1EF000001F1, + 0x1F5000001F6, + 0x1F9000001FA, + 0x1FB000001FC, + 0x1FD000001FE, + 0x1FF00000200, + 0x20100000202, + 0x20300000204, + 0x20500000206, + 0x20700000208, + 0x2090000020A, + 0x20B0000020C, + 0x20D0000020E, + 0x20F00000210, + 0x21100000212, + 0x21300000214, + 0x21500000216, + 0x21700000218, + 0x2190000021A, + 0x21B0000021C, + 0x21D0000021E, + 0x21F00000220, + 0x22100000222, + 0x22300000224, + 0x22500000226, + 0x22700000228, + 0x2290000022A, + 0x22B0000022C, + 0x22D0000022E, + 0x22F00000230, + 0x23100000232, + 0x2330000023A, + 0x23C0000023D, + 0x23F00000241, + 0x24200000243, + 0x24700000248, + 0x2490000024A, + 0x24B0000024C, + 0x24D0000024E, + 0x24F000002B0, + 0x2B9000002C2, + 0x2C6000002D2, + 0x2EC000002ED, + 0x2EE000002EF, + 0x30000000340, + 0x34200000343, + 0x3460000034F, + 0x35000000370, + 0x37100000372, + 0x37300000374, + 0x37700000378, + 0x37B0000037E, + 0x39000000391, + 0x3AC000003CF, + 0x3D7000003D8, + 0x3D9000003DA, + 0x3DB000003DC, + 0x3DD000003DE, + 0x3DF000003E0, + 0x3E1000003E2, + 0x3E3000003E4, + 0x3E5000003E6, + 0x3E7000003E8, + 0x3E9000003EA, + 0x3EB000003EC, + 0x3ED000003EE, + 0x3EF000003F0, + 0x3F3000003F4, + 0x3F8000003F9, + 0x3FB000003FD, + 0x43000000460, + 0x46100000462, + 0x46300000464, + 0x46500000466, + 0x46700000468, + 0x4690000046A, + 0x46B0000046C, + 0x46D0000046E, + 0x46F00000470, + 0x47100000472, + 0x47300000474, + 0x47500000476, + 0x47700000478, + 0x4790000047A, + 0x47B0000047C, + 0x47D0000047E, + 0x47F00000480, + 0x48100000482, + 0x48300000488, + 0x48B0000048C, + 0x48D0000048E, + 0x48F00000490, + 0x49100000492, + 0x49300000494, + 0x49500000496, + 0x49700000498, + 0x4990000049A, + 0x49B0000049C, + 0x49D0000049E, + 0x49F000004A0, + 0x4A1000004A2, + 0x4A3000004A4, + 0x4A5000004A6, + 0x4A7000004A8, + 0x4A9000004AA, + 0x4AB000004AC, + 0x4AD000004AE, + 0x4AF000004B0, + 0x4B1000004B2, + 0x4B3000004B4, + 0x4B5000004B6, + 0x4B7000004B8, + 0x4B9000004BA, + 0x4BB000004BC, + 0x4BD000004BE, + 0x4BF000004C0, + 0x4C2000004C3, + 0x4C4000004C5, + 0x4C6000004C7, + 0x4C8000004C9, + 0x4CA000004CB, + 0x4CC000004CD, + 0x4CE000004D0, + 0x4D1000004D2, + 0x4D3000004D4, + 0x4D5000004D6, + 0x4D7000004D8, + 0x4D9000004DA, + 0x4DB000004DC, + 0x4DD000004DE, + 0x4DF000004E0, + 0x4E1000004E2, + 0x4E3000004E4, + 0x4E5000004E6, + 0x4E7000004E8, + 0x4E9000004EA, + 0x4EB000004EC, + 0x4ED000004EE, + 0x4EF000004F0, + 0x4F1000004F2, + 0x4F3000004F4, + 0x4F5000004F6, + 0x4F7000004F8, + 0x4F9000004FA, + 0x4FB000004FC, + 0x4FD000004FE, + 0x4FF00000500, + 0x50100000502, + 0x50300000504, + 0x50500000506, + 0x50700000508, + 0x5090000050A, + 0x50B0000050C, + 0x50D0000050E, + 0x50F00000510, + 0x51100000512, + 0x51300000514, + 0x51500000516, + 0x51700000518, + 0x5190000051A, + 0x51B0000051C, + 0x51D0000051E, + 0x51F00000520, + 0x52100000522, + 0x52300000524, + 0x52500000526, + 0x52700000528, + 0x5290000052A, + 0x52B0000052C, + 0x52D0000052E, + 0x52F00000530, + 0x5590000055A, + 0x56000000587, + 0x58800000589, + 0x591000005BE, + 0x5BF000005C0, + 0x5C1000005C3, + 0x5C4000005C6, + 0x5C7000005C8, + 0x5D0000005EB, + 0x5EF000005F3, + 0x6100000061B, + 0x62000000640, + 0x64100000660, + 0x66E00000675, + 0x679000006D4, + 0x6D5000006DD, + 0x6DF000006E9, + 0x6EA000006F0, + 0x6FA00000700, + 0x7100000074B, + 0x74D000007B2, + 0x7C0000007F6, + 0x7FD000007FE, + 0x8000000082E, + 0x8400000085C, + 0x8600000086B, + 0x87000000888, + 0x8890000088F, + 0x898000008E2, + 0x8E300000958, + 0x96000000964, + 0x96600000970, + 0x97100000984, + 0x9850000098D, + 0x98F00000991, + 0x993000009A9, + 0x9AA000009B1, + 0x9B2000009B3, + 0x9B6000009BA, + 0x9BC000009C5, + 0x9C7000009C9, + 0x9CB000009CF, + 0x9D7000009D8, + 0x9E0000009E4, + 0x9E6000009F2, + 0x9FC000009FD, + 0x9FE000009FF, + 0xA0100000A04, + 0xA0500000A0B, + 0xA0F00000A11, + 0xA1300000A29, + 0xA2A00000A31, + 0xA3200000A33, + 0xA3500000A36, + 0xA3800000A3A, + 0xA3C00000A3D, + 0xA3E00000A43, + 0xA4700000A49, + 0xA4B00000A4E, + 0xA5100000A52, + 0xA5C00000A5D, + 0xA6600000A76, + 0xA8100000A84, + 0xA8500000A8E, + 0xA8F00000A92, + 0xA9300000AA9, + 0xAAA00000AB1, + 0xAB200000AB4, + 0xAB500000ABA, + 0xABC00000AC6, + 0xAC700000ACA, + 0xACB00000ACE, + 0xAD000000AD1, + 0xAE000000AE4, + 0xAE600000AF0, + 0xAF900000B00, + 0xB0100000B04, + 0xB0500000B0D, + 0xB0F00000B11, + 0xB1300000B29, + 0xB2A00000B31, + 0xB3200000B34, + 0xB3500000B3A, + 0xB3C00000B45, + 0xB4700000B49, + 0xB4B00000B4E, + 0xB5500000B58, + 0xB5F00000B64, + 0xB6600000B70, + 0xB7100000B72, + 0xB8200000B84, + 0xB8500000B8B, + 0xB8E00000B91, + 0xB9200000B96, + 0xB9900000B9B, + 0xB9C00000B9D, + 0xB9E00000BA0, + 0xBA300000BA5, + 0xBA800000BAB, + 0xBAE00000BBA, + 0xBBE00000BC3, + 0xBC600000BC9, + 0xBCA00000BCE, + 0xBD000000BD1, + 0xBD700000BD8, + 0xBE600000BF0, + 0xC0000000C0D, + 0xC0E00000C11, + 0xC1200000C29, + 0xC2A00000C3A, + 0xC3C00000C45, + 0xC4600000C49, + 0xC4A00000C4E, + 0xC5500000C57, + 0xC5800000C5B, + 0xC5D00000C5E, + 0xC6000000C64, + 0xC6600000C70, + 0xC8000000C84, + 0xC8500000C8D, + 0xC8E00000C91, + 0xC9200000CA9, + 0xCAA00000CB4, + 0xCB500000CBA, + 0xCBC00000CC5, + 0xCC600000CC9, + 0xCCA00000CCE, + 0xCD500000CD7, + 0xCDD00000CDF, + 0xCE000000CE4, + 0xCE600000CF0, + 0xCF100000CF4, + 0xD0000000D0D, + 0xD0E00000D11, + 0xD1200000D45, + 0xD4600000D49, + 0xD4A00000D4F, + 0xD5400000D58, + 0xD5F00000D64, + 0xD6600000D70, + 0xD7A00000D80, + 0xD8100000D84, + 0xD8500000D97, + 0xD9A00000DB2, + 0xDB300000DBC, + 0xDBD00000DBE, + 0xDC000000DC7, + 0xDCA00000DCB, + 0xDCF00000DD5, + 0xDD600000DD7, + 0xDD800000DE0, + 0xDE600000DF0, + 0xDF200000DF4, + 0xE0100000E33, + 0xE3400000E3B, + 0xE4000000E4F, + 0xE5000000E5A, + 0xE8100000E83, + 0xE8400000E85, + 0xE8600000E8B, + 0xE8C00000EA4, + 0xEA500000EA6, + 0xEA700000EB3, + 0xEB400000EBE, + 0xEC000000EC5, + 0xEC600000EC7, + 0xEC800000ECF, + 0xED000000EDA, + 0xEDE00000EE0, + 0xF0000000F01, + 0xF0B00000F0C, + 0xF1800000F1A, + 0xF2000000F2A, + 0xF3500000F36, + 0xF3700000F38, + 0xF3900000F3A, + 0xF3E00000F43, + 0xF4400000F48, + 0xF4900000F4D, + 0xF4E00000F52, + 0xF5300000F57, + 0xF5800000F5C, + 0xF5D00000F69, + 0xF6A00000F6D, + 0xF7100000F73, + 0xF7400000F75, + 0xF7A00000F81, + 0xF8200000F85, + 0xF8600000F93, + 0xF9400000F98, + 0xF9900000F9D, + 0xF9E00000FA2, + 0xFA300000FA7, + 0xFA800000FAC, + 0xFAD00000FB9, + 0xFBA00000FBD, + 0xFC600000FC7, + 0x10000000104A, + 0x10500000109E, + 0x10D0000010FB, + 0x10FD00001100, + 0x120000001249, + 0x124A0000124E, + 0x125000001257, + 0x125800001259, + 0x125A0000125E, + 0x126000001289, + 0x128A0000128E, + 0x1290000012B1, + 0x12B2000012B6, + 0x12B8000012BF, + 0x12C0000012C1, + 0x12C2000012C6, + 0x12C8000012D7, + 0x12D800001311, + 0x131200001316, + 0x13180000135B, + 0x135D00001360, + 0x138000001390, + 0x13A0000013F6, + 0x14010000166D, + 0x166F00001680, + 0x16810000169B, + 0x16A0000016EB, + 0x16F1000016F9, + 0x170000001716, + 0x171F00001735, + 0x174000001754, + 0x17600000176D, + 0x176E00001771, + 0x177200001774, + 0x1780000017B4, + 0x17B6000017D4, + 0x17D7000017D8, + 0x17DC000017DE, + 0x17E0000017EA, + 0x18100000181A, + 0x182000001879, + 0x1880000018AB, + 0x18B0000018F6, + 0x19000000191F, + 0x19200000192C, + 0x19300000193C, + 0x19460000196E, + 0x197000001975, + 0x1980000019AC, + 0x19B0000019CA, + 0x19D0000019DA, + 0x1A0000001A1C, + 0x1A2000001A5F, + 0x1A6000001A7D, + 0x1A7F00001A8A, + 0x1A9000001A9A, + 0x1AA700001AA8, + 0x1AB000001ABE, + 0x1ABF00001ACF, + 0x1B0000001B4D, + 0x1B5000001B5A, + 0x1B6B00001B74, + 0x1B8000001BF4, + 0x1C0000001C38, + 0x1C4000001C4A, + 0x1C4D00001C7E, + 0x1CD000001CD3, + 0x1CD400001CFB, + 0x1D0000001D2C, + 0x1D2F00001D30, + 0x1D3B00001D3C, + 0x1D4E00001D4F, + 0x1D6B00001D78, + 0x1D7900001D9B, + 0x1DC000001E00, + 0x1E0100001E02, + 0x1E0300001E04, + 0x1E0500001E06, + 0x1E0700001E08, + 0x1E0900001E0A, + 0x1E0B00001E0C, + 0x1E0D00001E0E, + 0x1E0F00001E10, + 0x1E1100001E12, + 0x1E1300001E14, + 0x1E1500001E16, + 0x1E1700001E18, + 0x1E1900001E1A, + 0x1E1B00001E1C, + 0x1E1D00001E1E, + 0x1E1F00001E20, + 0x1E2100001E22, + 0x1E2300001E24, + 0x1E2500001E26, + 0x1E2700001E28, + 0x1E2900001E2A, + 0x1E2B00001E2C, + 0x1E2D00001E2E, + 0x1E2F00001E30, + 0x1E3100001E32, + 0x1E3300001E34, + 0x1E3500001E36, + 0x1E3700001E38, + 0x1E3900001E3A, + 0x1E3B00001E3C, + 0x1E3D00001E3E, + 0x1E3F00001E40, + 0x1E4100001E42, + 0x1E4300001E44, + 0x1E4500001E46, + 0x1E4700001E48, + 0x1E4900001E4A, + 0x1E4B00001E4C, + 0x1E4D00001E4E, + 0x1E4F00001E50, + 0x1E5100001E52, + 0x1E5300001E54, + 0x1E5500001E56, + 0x1E5700001E58, + 0x1E5900001E5A, + 0x1E5B00001E5C, + 0x1E5D00001E5E, + 0x1E5F00001E60, + 0x1E6100001E62, + 0x1E6300001E64, + 0x1E6500001E66, + 0x1E6700001E68, + 0x1E6900001E6A, + 0x1E6B00001E6C, + 0x1E6D00001E6E, + 0x1E6F00001E70, + 0x1E7100001E72, + 0x1E7300001E74, + 0x1E7500001E76, + 0x1E7700001E78, + 0x1E7900001E7A, + 0x1E7B00001E7C, + 0x1E7D00001E7E, + 0x1E7F00001E80, + 0x1E8100001E82, + 0x1E8300001E84, + 0x1E8500001E86, + 0x1E8700001E88, + 0x1E8900001E8A, + 0x1E8B00001E8C, + 0x1E8D00001E8E, + 0x1E8F00001E90, + 0x1E9100001E92, + 0x1E9300001E94, + 0x1E9500001E9A, + 0x1E9C00001E9E, + 0x1E9F00001EA0, + 0x1EA100001EA2, + 0x1EA300001EA4, + 0x1EA500001EA6, + 0x1EA700001EA8, + 0x1EA900001EAA, + 0x1EAB00001EAC, + 0x1EAD00001EAE, + 0x1EAF00001EB0, + 0x1EB100001EB2, + 0x1EB300001EB4, + 0x1EB500001EB6, + 0x1EB700001EB8, + 0x1EB900001EBA, + 0x1EBB00001EBC, + 0x1EBD00001EBE, + 0x1EBF00001EC0, + 0x1EC100001EC2, + 0x1EC300001EC4, + 0x1EC500001EC6, + 0x1EC700001EC8, + 0x1EC900001ECA, + 0x1ECB00001ECC, + 0x1ECD00001ECE, + 0x1ECF00001ED0, + 0x1ED100001ED2, + 0x1ED300001ED4, + 0x1ED500001ED6, + 0x1ED700001ED8, + 0x1ED900001EDA, + 0x1EDB00001EDC, + 0x1EDD00001EDE, + 0x1EDF00001EE0, + 0x1EE100001EE2, + 0x1EE300001EE4, + 0x1EE500001EE6, + 0x1EE700001EE8, + 0x1EE900001EEA, + 0x1EEB00001EEC, + 0x1EED00001EEE, + 0x1EEF00001EF0, + 0x1EF100001EF2, + 0x1EF300001EF4, + 0x1EF500001EF6, + 0x1EF700001EF8, + 0x1EF900001EFA, + 0x1EFB00001EFC, + 0x1EFD00001EFE, + 0x1EFF00001F08, + 0x1F1000001F16, + 0x1F2000001F28, + 0x1F3000001F38, + 0x1F4000001F46, + 0x1F5000001F58, + 0x1F6000001F68, + 0x1F7000001F71, + 0x1F7200001F73, + 0x1F7400001F75, + 0x1F7600001F77, + 0x1F7800001F79, + 0x1F7A00001F7B, + 0x1F7C00001F7D, + 0x1FB000001FB2, + 0x1FB600001FB7, + 0x1FC600001FC7, + 0x1FD000001FD3, + 0x1FD600001FD8, + 0x1FE000001FE3, + 0x1FE400001FE8, + 0x1FF600001FF7, + 0x214E0000214F, + 0x218400002185, + 0x2C3000002C60, + 0x2C6100002C62, + 0x2C6500002C67, + 0x2C6800002C69, + 0x2C6A00002C6B, + 0x2C6C00002C6D, + 0x2C7100002C72, + 0x2C7300002C75, + 0x2C7600002C7C, + 0x2C8100002C82, + 0x2C8300002C84, + 0x2C8500002C86, + 0x2C8700002C88, + 0x2C8900002C8A, + 0x2C8B00002C8C, + 0x2C8D00002C8E, + 0x2C8F00002C90, + 0x2C9100002C92, + 0x2C9300002C94, + 0x2C9500002C96, + 0x2C9700002C98, + 0x2C9900002C9A, + 0x2C9B00002C9C, + 0x2C9D00002C9E, + 0x2C9F00002CA0, + 0x2CA100002CA2, + 0x2CA300002CA4, + 0x2CA500002CA6, + 0x2CA700002CA8, + 0x2CA900002CAA, + 0x2CAB00002CAC, + 0x2CAD00002CAE, + 0x2CAF00002CB0, + 0x2CB100002CB2, + 0x2CB300002CB4, + 0x2CB500002CB6, + 0x2CB700002CB8, + 0x2CB900002CBA, + 0x2CBB00002CBC, + 0x2CBD00002CBE, + 0x2CBF00002CC0, + 0x2CC100002CC2, + 0x2CC300002CC4, + 0x2CC500002CC6, + 0x2CC700002CC8, + 0x2CC900002CCA, + 0x2CCB00002CCC, + 0x2CCD00002CCE, + 0x2CCF00002CD0, + 0x2CD100002CD2, + 0x2CD300002CD4, + 0x2CD500002CD6, + 0x2CD700002CD8, + 0x2CD900002CDA, + 0x2CDB00002CDC, + 0x2CDD00002CDE, + 0x2CDF00002CE0, + 0x2CE100002CE2, + 0x2CE300002CE5, + 0x2CEC00002CED, + 0x2CEE00002CF2, + 0x2CF300002CF4, + 0x2D0000002D26, + 0x2D2700002D28, + 0x2D2D00002D2E, + 0x2D3000002D68, + 0x2D7F00002D97, + 0x2DA000002DA7, + 0x2DA800002DAF, + 0x2DB000002DB7, + 0x2DB800002DBF, + 0x2DC000002DC7, + 0x2DC800002DCF, + 0x2DD000002DD7, + 0x2DD800002DDF, + 0x2DE000002E00, + 0x2E2F00002E30, + 0x300500003008, + 0x302A0000302E, + 0x303C0000303D, + 0x304100003097, + 0x30990000309B, + 0x309D0000309F, + 0x30A1000030FB, + 0x30FC000030FF, + 0x310500003130, + 0x31A0000031C0, + 0x31F000003200, + 0x340000004DC0, + 0x4E000000A48D, + 0xA4D00000A4FE, + 0xA5000000A60D, + 0xA6100000A62C, + 0xA6410000A642, + 0xA6430000A644, + 0xA6450000A646, + 0xA6470000A648, + 0xA6490000A64A, + 0xA64B0000A64C, + 0xA64D0000A64E, + 0xA64F0000A650, + 0xA6510000A652, + 0xA6530000A654, + 0xA6550000A656, + 0xA6570000A658, + 0xA6590000A65A, + 0xA65B0000A65C, + 0xA65D0000A65E, + 0xA65F0000A660, + 0xA6610000A662, + 0xA6630000A664, + 0xA6650000A666, + 0xA6670000A668, + 0xA6690000A66A, + 0xA66B0000A66C, + 0xA66D0000A670, + 0xA6740000A67E, + 0xA67F0000A680, + 0xA6810000A682, + 0xA6830000A684, + 0xA6850000A686, + 0xA6870000A688, + 0xA6890000A68A, + 0xA68B0000A68C, + 0xA68D0000A68E, + 0xA68F0000A690, + 0xA6910000A692, + 0xA6930000A694, + 0xA6950000A696, + 0xA6970000A698, + 0xA6990000A69A, + 0xA69B0000A69C, + 0xA69E0000A6E6, + 0xA6F00000A6F2, + 0xA7170000A720, + 0xA7230000A724, + 0xA7250000A726, + 0xA7270000A728, + 0xA7290000A72A, + 0xA72B0000A72C, + 0xA72D0000A72E, + 0xA72F0000A732, + 0xA7330000A734, + 0xA7350000A736, + 0xA7370000A738, + 0xA7390000A73A, + 0xA73B0000A73C, + 0xA73D0000A73E, + 0xA73F0000A740, + 0xA7410000A742, + 0xA7430000A744, + 0xA7450000A746, + 0xA7470000A748, + 0xA7490000A74A, + 0xA74B0000A74C, + 0xA74D0000A74E, + 0xA74F0000A750, + 0xA7510000A752, + 0xA7530000A754, + 0xA7550000A756, + 0xA7570000A758, + 0xA7590000A75A, + 0xA75B0000A75C, + 0xA75D0000A75E, + 0xA75F0000A760, + 0xA7610000A762, + 0xA7630000A764, + 0xA7650000A766, + 0xA7670000A768, + 0xA7690000A76A, + 0xA76B0000A76C, + 0xA76D0000A76E, + 0xA76F0000A770, + 0xA7710000A779, + 0xA77A0000A77B, + 0xA77C0000A77D, + 0xA77F0000A780, + 0xA7810000A782, + 0xA7830000A784, + 0xA7850000A786, + 0xA7870000A789, + 0xA78C0000A78D, + 0xA78E0000A790, + 0xA7910000A792, + 0xA7930000A796, + 0xA7970000A798, + 0xA7990000A79A, + 0xA79B0000A79C, + 0xA79D0000A79E, + 0xA79F0000A7A0, + 0xA7A10000A7A2, + 0xA7A30000A7A4, + 0xA7A50000A7A6, + 0xA7A70000A7A8, + 0xA7A90000A7AA, + 0xA7AF0000A7B0, + 0xA7B50000A7B6, + 0xA7B70000A7B8, + 0xA7B90000A7BA, + 0xA7BB0000A7BC, + 0xA7BD0000A7BE, + 0xA7BF0000A7C0, + 0xA7C10000A7C2, + 0xA7C30000A7C4, + 0xA7C80000A7C9, + 0xA7CA0000A7CB, + 0xA7D10000A7D2, + 0xA7D30000A7D4, + 0xA7D50000A7D6, + 0xA7D70000A7D8, + 0xA7D90000A7DA, + 0xA7F60000A7F8, + 0xA7FA0000A828, + 0xA82C0000A82D, + 0xA8400000A874, + 0xA8800000A8C6, + 0xA8D00000A8DA, + 0xA8E00000A8F8, + 0xA8FB0000A8FC, + 0xA8FD0000A92E, + 0xA9300000A954, + 0xA9800000A9C1, + 0xA9CF0000A9DA, + 0xA9E00000A9FF, + 0xAA000000AA37, + 0xAA400000AA4E, + 0xAA500000AA5A, + 0xAA600000AA77, + 0xAA7A0000AAC3, + 0xAADB0000AADE, + 0xAAE00000AAF0, + 0xAAF20000AAF7, + 0xAB010000AB07, + 0xAB090000AB0F, + 0xAB110000AB17, + 0xAB200000AB27, + 0xAB280000AB2F, + 0xAB300000AB5B, + 0xAB600000AB69, + 0xABC00000ABEB, + 0xABEC0000ABEE, + 0xABF00000ABFA, + 0xAC000000D7A4, + 0xFA0E0000FA10, + 0xFA110000FA12, + 0xFA130000FA15, + 0xFA1F0000FA20, + 0xFA210000FA22, + 0xFA230000FA25, + 0xFA270000FA2A, + 0xFB1E0000FB1F, + 0xFE200000FE30, + 0xFE730000FE74, + 0x100000001000C, + 0x1000D00010027, + 0x100280001003B, + 0x1003C0001003E, + 0x1003F0001004E, + 0x100500001005E, + 0x10080000100FB, + 0x101FD000101FE, + 0x102800001029D, + 0x102A0000102D1, + 0x102E0000102E1, + 0x1030000010320, + 0x1032D00010341, + 0x103420001034A, + 0x103500001037B, + 0x103800001039E, + 0x103A0000103C4, + 0x103C8000103D0, + 0x104280001049E, + 0x104A0000104AA, + 0x104D8000104FC, + 0x1050000010528, + 0x1053000010564, + 0x10597000105A2, + 0x105A3000105B2, + 0x105B3000105BA, + 0x105BB000105BD, + 0x1060000010737, + 0x1074000010756, + 0x1076000010768, + 0x1078000010781, + 0x1080000010806, + 0x1080800010809, + 0x1080A00010836, + 0x1083700010839, + 0x1083C0001083D, + 0x1083F00010856, + 0x1086000010877, + 0x108800001089F, + 0x108E0000108F3, + 0x108F4000108F6, + 0x1090000010916, + 0x109200001093A, + 0x10980000109B8, + 0x109BE000109C0, + 0x10A0000010A04, + 0x10A0500010A07, + 0x10A0C00010A14, + 0x10A1500010A18, + 0x10A1900010A36, + 0x10A3800010A3B, + 0x10A3F00010A40, + 0x10A6000010A7D, + 0x10A8000010A9D, + 0x10AC000010AC8, + 0x10AC900010AE7, + 0x10B0000010B36, + 0x10B4000010B56, + 0x10B6000010B73, + 0x10B8000010B92, + 0x10C0000010C49, + 0x10CC000010CF3, + 0x10D0000010D28, + 0x10D3000010D3A, + 0x10E8000010EAA, + 0x10EAB00010EAD, + 0x10EB000010EB2, + 0x10EFD00010F1D, + 0x10F2700010F28, + 0x10F3000010F51, + 0x10F7000010F86, + 0x10FB000010FC5, + 0x10FE000010FF7, + 0x1100000011047, + 0x1106600011076, + 0x1107F000110BB, + 0x110C2000110C3, + 0x110D0000110E9, + 0x110F0000110FA, + 0x1110000011135, + 0x1113600011140, + 0x1114400011148, + 0x1115000011174, + 0x1117600011177, + 0x11180000111C5, + 0x111C9000111CD, + 0x111CE000111DB, + 0x111DC000111DD, + 0x1120000011212, + 0x1121300011238, + 0x1123E00011242, + 0x1128000011287, + 0x1128800011289, + 0x1128A0001128E, + 0x1128F0001129E, + 0x1129F000112A9, + 0x112B0000112EB, + 0x112F0000112FA, + 0x1130000011304, + 0x113050001130D, + 0x1130F00011311, + 0x1131300011329, + 0x1132A00011331, + 0x1133200011334, + 0x113350001133A, + 0x1133B00011345, + 0x1134700011349, + 0x1134B0001134E, + 0x1135000011351, + 0x1135700011358, + 0x1135D00011364, + 0x113660001136D, + 0x1137000011375, + 0x114000001144B, + 0x114500001145A, + 0x1145E00011462, + 0x11480000114C6, + 0x114C7000114C8, + 0x114D0000114DA, + 0x11580000115B6, + 0x115B8000115C1, + 0x115D8000115DE, + 0x1160000011641, + 0x1164400011645, + 0x116500001165A, + 0x11680000116B9, + 0x116C0000116CA, + 0x117000001171B, + 0x1171D0001172C, + 0x117300001173A, + 0x1174000011747, + 0x118000001183B, + 0x118C0000118EA, + 0x118FF00011907, + 0x119090001190A, + 0x1190C00011914, + 0x1191500011917, + 0x1191800011936, + 0x1193700011939, + 0x1193B00011944, + 0x119500001195A, + 0x119A0000119A8, + 0x119AA000119D8, + 0x119DA000119E2, + 0x119E3000119E5, + 0x11A0000011A3F, + 0x11A4700011A48, + 0x11A5000011A9A, + 0x11A9D00011A9E, + 0x11AB000011AF9, + 0x11C0000011C09, + 0x11C0A00011C37, + 0x11C3800011C41, + 0x11C5000011C5A, + 0x11C7200011C90, + 0x11C9200011CA8, + 0x11CA900011CB7, + 0x11D0000011D07, + 0x11D0800011D0A, + 0x11D0B00011D37, + 0x11D3A00011D3B, + 0x11D3C00011D3E, + 0x11D3F00011D48, + 0x11D5000011D5A, + 0x11D6000011D66, + 0x11D6700011D69, + 0x11D6A00011D8F, + 0x11D9000011D92, + 0x11D9300011D99, + 0x11DA000011DAA, + 0x11EE000011EF7, + 0x11F0000011F11, + 0x11F1200011F3B, + 0x11F3E00011F43, + 0x11F5000011F5A, + 0x11FB000011FB1, + 0x120000001239A, + 0x1248000012544, + 0x12F9000012FF1, + 0x1300000013430, + 0x1344000013456, + 0x1440000014647, + 0x1680000016A39, + 0x16A4000016A5F, + 0x16A6000016A6A, + 0x16A7000016ABF, + 0x16AC000016ACA, + 0x16AD000016AEE, + 0x16AF000016AF5, + 0x16B0000016B37, + 0x16B4000016B44, + 0x16B5000016B5A, + 0x16B6300016B78, + 0x16B7D00016B90, + 0x16E6000016E80, + 0x16F0000016F4B, + 0x16F4F00016F88, + 0x16F8F00016FA0, + 0x16FE000016FE2, + 0x16FE300016FE5, + 0x16FF000016FF2, + 0x17000000187F8, + 0x1880000018CD6, + 0x18D0000018D09, + 0x1AFF00001AFF4, + 0x1AFF50001AFFC, + 0x1AFFD0001AFFF, + 0x1B0000001B123, + 0x1B1320001B133, + 0x1B1500001B153, + 0x1B1550001B156, + 0x1B1640001B168, + 0x1B1700001B2FC, + 0x1BC000001BC6B, + 0x1BC700001BC7D, + 0x1BC800001BC89, + 0x1BC900001BC9A, + 0x1BC9D0001BC9F, + 0x1CF000001CF2E, + 0x1CF300001CF47, + 0x1DA000001DA37, + 0x1DA3B0001DA6D, + 0x1DA750001DA76, + 0x1DA840001DA85, + 0x1DA9B0001DAA0, + 0x1DAA10001DAB0, + 0x1DF000001DF1F, + 0x1DF250001DF2B, + 0x1E0000001E007, + 0x1E0080001E019, + 0x1E01B0001E022, + 0x1E0230001E025, + 0x1E0260001E02B, + 0x1E08F0001E090, + 0x1E1000001E12D, + 0x1E1300001E13E, + 0x1E1400001E14A, + 0x1E14E0001E14F, + 0x1E2900001E2AF, + 0x1E2C00001E2FA, + 0x1E4D00001E4FA, + 0x1E7E00001E7E7, + 0x1E7E80001E7EC, + 0x1E7ED0001E7EF, + 0x1E7F00001E7FF, + 0x1E8000001E8C5, + 0x1E8D00001E8D7, + 0x1E9220001E94C, + 0x1E9500001E95A, + 0x200000002A6E0, + 0x2A7000002B73A, + 0x2B7400002B81E, + 0x2B8200002CEA2, + 0x2CEB00002EBE1, + 0x2EBF00002EE5E, + 0x300000003134B, + 0x31350000323B0, + ), + "CONTEXTJ": (0x200C0000200E,), + "CONTEXTO": ( + 0xB7000000B8, + 0x37500000376, + 0x5F3000005F5, + 0x6600000066A, + 0x6F0000006FA, + 0x30FB000030FC, + ), +} diff --git a/venv/lib/python3.10/site-packages/idna/intranges.py b/venv/lib/python3.10/site-packages/idna/intranges.py new file mode 100644 index 0000000000000000000000000000000000000000..7bfaa8d80d7dc471d572db0f949460901126e8bd --- /dev/null +++ b/venv/lib/python3.10/site-packages/idna/intranges.py @@ -0,0 +1,57 @@ +""" +Given a list of integers, made up of (hopefully) a small number of long runs +of consecutive integers, compute a representation of the form +((start1, end1), (start2, end2) ...). Then answer the question "was x present +in the original list?" in time O(log(# runs)). +""" + +import bisect +from typing import List, Tuple + + +def intranges_from_list(list_: List[int]) -> Tuple[int, ...]: + """Represent a list of integers as a sequence of ranges: + ((start_0, end_0), (start_1, end_1), ...), such that the original + integers are exactly those x such that start_i <= x < end_i for some i. + + Ranges are encoded as single integers (start << 32 | end), not as tuples. + """ + + sorted_list = sorted(list_) + ranges = [] + last_write = -1 + for i in range(len(sorted_list)): + if i + 1 < len(sorted_list): + if sorted_list[i] == sorted_list[i + 1] - 1: + continue + current_range = sorted_list[last_write + 1 : i + 1] + ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) + last_write = i + + return tuple(ranges) + + +def _encode_range(start: int, end: int) -> int: + return (start << 32) | end + + +def _decode_range(r: int) -> Tuple[int, int]: + return (r >> 32), (r & ((1 << 32) - 1)) + + +def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool: + """Determine if `int_` falls into one of the ranges in `ranges`.""" + tuple_ = _encode_range(int_, 0) + pos = bisect.bisect_left(ranges, tuple_) + # we could be immediately ahead of a tuple (start, end) + # with start < int_ <= end + if pos > 0: + left, right = _decode_range(ranges[pos - 1]) + if left <= int_ < right: + return True + # or we could be immediately behind a tuple (int_, end) + if pos < len(ranges): + left, _ = _decode_range(ranges[pos]) + if left == int_: + return True + return False diff --git a/venv/lib/python3.10/site-packages/idna/package_data.py b/venv/lib/python3.10/site-packages/idna/package_data.py new file mode 100644 index 0000000000000000000000000000000000000000..514ff7e2e68b65f309d30a0b06e6b290d2c353a8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/idna/package_data.py @@ -0,0 +1 @@ +__version__ = "3.10" diff --git a/venv/lib/python3.10/site-packages/idna/py.typed b/venv/lib/python3.10/site-packages/idna/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.10/site-packages/idna/uts46data.py b/venv/lib/python3.10/site-packages/idna/uts46data.py new file mode 100644 index 0000000000000000000000000000000000000000..eb894327410debecb64ddf40eddc3131cf8344de --- /dev/null +++ b/venv/lib/python3.10/site-packages/idna/uts46data.py @@ -0,0 +1,8681 @@ +# This file is automatically generated by tools/idna-data +# vim: set fileencoding=utf-8 : + +from typing import List, Tuple, Union + +"""IDNA Mapping Table from UTS46.""" + + +__version__ = "15.1.0" + + +def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x0, "3"), + (0x1, "3"), + (0x2, "3"), + (0x3, "3"), + (0x4, "3"), + (0x5, "3"), + (0x6, "3"), + (0x7, "3"), + (0x8, "3"), + (0x9, "3"), + (0xA, "3"), + (0xB, "3"), + (0xC, "3"), + (0xD, "3"), + (0xE, "3"), + (0xF, "3"), + (0x10, "3"), + (0x11, "3"), + (0x12, "3"), + (0x13, "3"), + (0x14, "3"), + (0x15, "3"), + (0x16, "3"), + (0x17, "3"), + (0x18, "3"), + (0x19, "3"), + (0x1A, "3"), + (0x1B, "3"), + (0x1C, "3"), + (0x1D, "3"), + (0x1E, "3"), + (0x1F, "3"), + (0x20, "3"), + (0x21, "3"), + (0x22, "3"), + (0x23, "3"), + (0x24, "3"), + (0x25, "3"), + (0x26, "3"), + (0x27, "3"), + (0x28, "3"), + (0x29, "3"), + (0x2A, "3"), + (0x2B, "3"), + (0x2C, "3"), + (0x2D, "V"), + (0x2E, "V"), + (0x2F, "3"), + (0x30, "V"), + (0x31, "V"), + (0x32, "V"), + (0x33, "V"), + (0x34, "V"), + (0x35, "V"), + (0x36, "V"), + (0x37, "V"), + (0x38, "V"), + (0x39, "V"), + (0x3A, "3"), + (0x3B, "3"), + (0x3C, "3"), + (0x3D, "3"), + (0x3E, "3"), + (0x3F, "3"), + (0x40, "3"), + (0x41, "M", "a"), + (0x42, "M", "b"), + (0x43, "M", "c"), + (0x44, "M", "d"), + (0x45, "M", "e"), + (0x46, "M", "f"), + (0x47, "M", "g"), + (0x48, "M", "h"), + (0x49, "M", "i"), + (0x4A, "M", "j"), + (0x4B, "M", "k"), + (0x4C, "M", "l"), + (0x4D, "M", "m"), + (0x4E, "M", "n"), + (0x4F, "M", "o"), + (0x50, "M", "p"), + (0x51, "M", "q"), + (0x52, "M", "r"), + (0x53, "M", "s"), + (0x54, "M", "t"), + (0x55, "M", "u"), + (0x56, "M", "v"), + (0x57, "M", "w"), + (0x58, "M", "x"), + (0x59, "M", "y"), + (0x5A, "M", "z"), + (0x5B, "3"), + (0x5C, "3"), + (0x5D, "3"), + (0x5E, "3"), + (0x5F, "3"), + (0x60, "3"), + (0x61, "V"), + (0x62, "V"), + (0x63, "V"), + ] + + +def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x64, "V"), + (0x65, "V"), + (0x66, "V"), + (0x67, "V"), + (0x68, "V"), + (0x69, "V"), + (0x6A, "V"), + (0x6B, "V"), + (0x6C, "V"), + (0x6D, "V"), + (0x6E, "V"), + (0x6F, "V"), + (0x70, "V"), + (0x71, "V"), + (0x72, "V"), + (0x73, "V"), + (0x74, "V"), + (0x75, "V"), + (0x76, "V"), + (0x77, "V"), + (0x78, "V"), + (0x79, "V"), + (0x7A, "V"), + (0x7B, "3"), + (0x7C, "3"), + (0x7D, "3"), + (0x7E, "3"), + (0x7F, "3"), + (0x80, "X"), + (0x81, "X"), + (0x82, "X"), + (0x83, "X"), + (0x84, "X"), + (0x85, "X"), + (0x86, "X"), + (0x87, "X"), + (0x88, "X"), + (0x89, "X"), + (0x8A, "X"), + (0x8B, "X"), + (0x8C, "X"), + (0x8D, "X"), + (0x8E, "X"), + (0x8F, "X"), + (0x90, "X"), + (0x91, "X"), + (0x92, "X"), + (0x93, "X"), + (0x94, "X"), + (0x95, "X"), + (0x96, "X"), + (0x97, "X"), + (0x98, "X"), + (0x99, "X"), + (0x9A, "X"), + (0x9B, "X"), + (0x9C, "X"), + (0x9D, "X"), + (0x9E, "X"), + (0x9F, "X"), + (0xA0, "3", " "), + (0xA1, "V"), + (0xA2, "V"), + (0xA3, "V"), + (0xA4, "V"), + (0xA5, "V"), + (0xA6, "V"), + (0xA7, "V"), + (0xA8, "3", " ̈"), + (0xA9, "V"), + (0xAA, "M", "a"), + (0xAB, "V"), + (0xAC, "V"), + (0xAD, "I"), + (0xAE, "V"), + (0xAF, "3", " ̄"), + (0xB0, "V"), + (0xB1, "V"), + (0xB2, "M", "2"), + (0xB3, "M", "3"), + (0xB4, "3", " ́"), + (0xB5, "M", "μ"), + (0xB6, "V"), + (0xB7, "V"), + (0xB8, "3", " ̧"), + (0xB9, "M", "1"), + (0xBA, "M", "o"), + (0xBB, "V"), + (0xBC, "M", "1⁄4"), + (0xBD, "M", "1⁄2"), + (0xBE, "M", "3⁄4"), + (0xBF, "V"), + (0xC0, "M", "à"), + (0xC1, "M", "á"), + (0xC2, "M", "â"), + (0xC3, "M", "ã"), + (0xC4, "M", "ä"), + (0xC5, "M", "å"), + (0xC6, "M", "æ"), + (0xC7, "M", "ç"), + ] + + +def _seg_2() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xC8, "M", "è"), + (0xC9, "M", "é"), + (0xCA, "M", "ê"), + (0xCB, "M", "ë"), + (0xCC, "M", "ì"), + (0xCD, "M", "í"), + (0xCE, "M", "î"), + (0xCF, "M", "ï"), + (0xD0, "M", "ð"), + (0xD1, "M", "ñ"), + (0xD2, "M", "ò"), + (0xD3, "M", "ó"), + (0xD4, "M", "ô"), + (0xD5, "M", "õ"), + (0xD6, "M", "ö"), + (0xD7, "V"), + (0xD8, "M", "ø"), + (0xD9, "M", "ù"), + (0xDA, "M", "ú"), + (0xDB, "M", "û"), + (0xDC, "M", "ü"), + (0xDD, "M", "ý"), + (0xDE, "M", "þ"), + (0xDF, "D", "ss"), + (0xE0, "V"), + (0xE1, "V"), + (0xE2, "V"), + (0xE3, "V"), + (0xE4, "V"), + (0xE5, "V"), + (0xE6, "V"), + (0xE7, "V"), + (0xE8, "V"), + (0xE9, "V"), + (0xEA, "V"), + (0xEB, "V"), + (0xEC, "V"), + (0xED, "V"), + (0xEE, "V"), + (0xEF, "V"), + (0xF0, "V"), + (0xF1, "V"), + (0xF2, "V"), + (0xF3, "V"), + (0xF4, "V"), + (0xF5, "V"), + (0xF6, "V"), + (0xF7, "V"), + (0xF8, "V"), + (0xF9, "V"), + (0xFA, "V"), + (0xFB, "V"), + (0xFC, "V"), + (0xFD, "V"), + (0xFE, "V"), + (0xFF, "V"), + (0x100, "M", "ā"), + (0x101, "V"), + (0x102, "M", "ă"), + (0x103, "V"), + (0x104, "M", "ą"), + (0x105, "V"), + (0x106, "M", "ć"), + (0x107, "V"), + (0x108, "M", "ĉ"), + (0x109, "V"), + (0x10A, "M", "ċ"), + (0x10B, "V"), + (0x10C, "M", "č"), + (0x10D, "V"), + (0x10E, "M", "ď"), + (0x10F, "V"), + (0x110, "M", "đ"), + (0x111, "V"), + (0x112, "M", "ē"), + (0x113, "V"), + (0x114, "M", "ĕ"), + (0x115, "V"), + (0x116, "M", "ė"), + (0x117, "V"), + (0x118, "M", "ę"), + (0x119, "V"), + (0x11A, "M", "ě"), + (0x11B, "V"), + (0x11C, "M", "ĝ"), + (0x11D, "V"), + (0x11E, "M", "ğ"), + (0x11F, "V"), + (0x120, "M", "ġ"), + (0x121, "V"), + (0x122, "M", "ģ"), + (0x123, "V"), + (0x124, "M", "ĥ"), + (0x125, "V"), + (0x126, "M", "ħ"), + (0x127, "V"), + (0x128, "M", "ĩ"), + (0x129, "V"), + (0x12A, "M", "ī"), + (0x12B, "V"), + ] + + +def _seg_3() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x12C, "M", "ĭ"), + (0x12D, "V"), + (0x12E, "M", "į"), + (0x12F, "V"), + (0x130, "M", "i̇"), + (0x131, "V"), + (0x132, "M", "ij"), + (0x134, "M", "ĵ"), + (0x135, "V"), + (0x136, "M", "ķ"), + (0x137, "V"), + (0x139, "M", "ĺ"), + (0x13A, "V"), + (0x13B, "M", "ļ"), + (0x13C, "V"), + (0x13D, "M", "ľ"), + (0x13E, "V"), + (0x13F, "M", "l·"), + (0x141, "M", "ł"), + (0x142, "V"), + (0x143, "M", "ń"), + (0x144, "V"), + (0x145, "M", "ņ"), + (0x146, "V"), + (0x147, "M", "ň"), + (0x148, "V"), + (0x149, "M", "ʼn"), + (0x14A, "M", "ŋ"), + (0x14B, "V"), + (0x14C, "M", "ō"), + (0x14D, "V"), + (0x14E, "M", "ŏ"), + (0x14F, "V"), + (0x150, "M", "ő"), + (0x151, "V"), + (0x152, "M", "œ"), + (0x153, "V"), + (0x154, "M", "ŕ"), + (0x155, "V"), + (0x156, "M", "ŗ"), + (0x157, "V"), + (0x158, "M", "ř"), + (0x159, "V"), + (0x15A, "M", "ś"), + (0x15B, "V"), + (0x15C, "M", "ŝ"), + (0x15D, "V"), + (0x15E, "M", "ş"), + (0x15F, "V"), + (0x160, "M", "š"), + (0x161, "V"), + (0x162, "M", "ţ"), + (0x163, "V"), + (0x164, "M", "ť"), + (0x165, "V"), + (0x166, "M", "ŧ"), + (0x167, "V"), + (0x168, "M", "ũ"), + (0x169, "V"), + (0x16A, "M", "ū"), + (0x16B, "V"), + (0x16C, "M", "ŭ"), + (0x16D, "V"), + (0x16E, "M", "ů"), + (0x16F, "V"), + (0x170, "M", "ű"), + (0x171, "V"), + (0x172, "M", "ų"), + (0x173, "V"), + (0x174, "M", "ŵ"), + (0x175, "V"), + (0x176, "M", "ŷ"), + (0x177, "V"), + (0x178, "M", "ÿ"), + (0x179, "M", "ź"), + (0x17A, "V"), + (0x17B, "M", "ż"), + (0x17C, "V"), + (0x17D, "M", "ž"), + (0x17E, "V"), + (0x17F, "M", "s"), + (0x180, "V"), + (0x181, "M", "ɓ"), + (0x182, "M", "ƃ"), + (0x183, "V"), + (0x184, "M", "ƅ"), + (0x185, "V"), + (0x186, "M", "ɔ"), + (0x187, "M", "ƈ"), + (0x188, "V"), + (0x189, "M", "ɖ"), + (0x18A, "M", "ɗ"), + (0x18B, "M", "ƌ"), + (0x18C, "V"), + (0x18E, "M", "ǝ"), + (0x18F, "M", "ə"), + (0x190, "M", "ɛ"), + (0x191, "M", "ƒ"), + (0x192, "V"), + (0x193, "M", "ɠ"), + ] + + +def _seg_4() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x194, "M", "ɣ"), + (0x195, "V"), + (0x196, "M", "ɩ"), + (0x197, "M", "ɨ"), + (0x198, "M", "ƙ"), + (0x199, "V"), + (0x19C, "M", "ɯ"), + (0x19D, "M", "ɲ"), + (0x19E, "V"), + (0x19F, "M", "ɵ"), + (0x1A0, "M", "ơ"), + (0x1A1, "V"), + (0x1A2, "M", "ƣ"), + (0x1A3, "V"), + (0x1A4, "M", "ƥ"), + (0x1A5, "V"), + (0x1A6, "M", "ʀ"), + (0x1A7, "M", "ƨ"), + (0x1A8, "V"), + (0x1A9, "M", "ʃ"), + (0x1AA, "V"), + (0x1AC, "M", "ƭ"), + (0x1AD, "V"), + (0x1AE, "M", "ʈ"), + (0x1AF, "M", "ư"), + (0x1B0, "V"), + (0x1B1, "M", "ʊ"), + (0x1B2, "M", "ʋ"), + (0x1B3, "M", "ƴ"), + (0x1B4, "V"), + (0x1B5, "M", "ƶ"), + (0x1B6, "V"), + (0x1B7, "M", "ʒ"), + (0x1B8, "M", "ƹ"), + (0x1B9, "V"), + (0x1BC, "M", "ƽ"), + (0x1BD, "V"), + (0x1C4, "M", "dž"), + (0x1C7, "M", "lj"), + (0x1CA, "M", "nj"), + (0x1CD, "M", "ǎ"), + (0x1CE, "V"), + (0x1CF, "M", "ǐ"), + (0x1D0, "V"), + (0x1D1, "M", "ǒ"), + (0x1D2, "V"), + (0x1D3, "M", "ǔ"), + (0x1D4, "V"), + (0x1D5, "M", "ǖ"), + (0x1D6, "V"), + (0x1D7, "M", "ǘ"), + (0x1D8, "V"), + (0x1D9, "M", "ǚ"), + (0x1DA, "V"), + (0x1DB, "M", "ǜ"), + (0x1DC, "V"), + (0x1DE, "M", "ǟ"), + (0x1DF, "V"), + (0x1E0, "M", "ǡ"), + (0x1E1, "V"), + (0x1E2, "M", "ǣ"), + (0x1E3, "V"), + (0x1E4, "M", "ǥ"), + (0x1E5, "V"), + (0x1E6, "M", "ǧ"), + (0x1E7, "V"), + (0x1E8, "M", "ǩ"), + (0x1E9, "V"), + (0x1EA, "M", "ǫ"), + (0x1EB, "V"), + (0x1EC, "M", "ǭ"), + (0x1ED, "V"), + (0x1EE, "M", "ǯ"), + (0x1EF, "V"), + (0x1F1, "M", "dz"), + (0x1F4, "M", "ǵ"), + (0x1F5, "V"), + (0x1F6, "M", "ƕ"), + (0x1F7, "M", "ƿ"), + (0x1F8, "M", "ǹ"), + (0x1F9, "V"), + (0x1FA, "M", "ǻ"), + (0x1FB, "V"), + (0x1FC, "M", "ǽ"), + (0x1FD, "V"), + (0x1FE, "M", "ǿ"), + (0x1FF, "V"), + (0x200, "M", "ȁ"), + (0x201, "V"), + (0x202, "M", "ȃ"), + (0x203, "V"), + (0x204, "M", "ȅ"), + (0x205, "V"), + (0x206, "M", "ȇ"), + (0x207, "V"), + (0x208, "M", "ȉ"), + (0x209, "V"), + (0x20A, "M", "ȋ"), + (0x20B, "V"), + (0x20C, "M", "ȍ"), + ] + + +def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x20D, "V"), + (0x20E, "M", "ȏ"), + (0x20F, "V"), + (0x210, "M", "ȑ"), + (0x211, "V"), + (0x212, "M", "ȓ"), + (0x213, "V"), + (0x214, "M", "ȕ"), + (0x215, "V"), + (0x216, "M", "ȗ"), + (0x217, "V"), + (0x218, "M", "ș"), + (0x219, "V"), + (0x21A, "M", "ț"), + (0x21B, "V"), + (0x21C, "M", "ȝ"), + (0x21D, "V"), + (0x21E, "M", "ȟ"), + (0x21F, "V"), + (0x220, "M", "ƞ"), + (0x221, "V"), + (0x222, "M", "ȣ"), + (0x223, "V"), + (0x224, "M", "ȥ"), + (0x225, "V"), + (0x226, "M", "ȧ"), + (0x227, "V"), + (0x228, "M", "ȩ"), + (0x229, "V"), + (0x22A, "M", "ȫ"), + (0x22B, "V"), + (0x22C, "M", "ȭ"), + (0x22D, "V"), + (0x22E, "M", "ȯ"), + (0x22F, "V"), + (0x230, "M", "ȱ"), + (0x231, "V"), + (0x232, "M", "ȳ"), + (0x233, "V"), + (0x23A, "M", "ⱥ"), + (0x23B, "M", "ȼ"), + (0x23C, "V"), + (0x23D, "M", "ƚ"), + (0x23E, "M", "ⱦ"), + (0x23F, "V"), + (0x241, "M", "ɂ"), + (0x242, "V"), + (0x243, "M", "ƀ"), + (0x244, "M", "ʉ"), + (0x245, "M", "ʌ"), + (0x246, "M", "ɇ"), + (0x247, "V"), + (0x248, "M", "ɉ"), + (0x249, "V"), + (0x24A, "M", "ɋ"), + (0x24B, "V"), + (0x24C, "M", "ɍ"), + (0x24D, "V"), + (0x24E, "M", "ɏ"), + (0x24F, "V"), + (0x2B0, "M", "h"), + (0x2B1, "M", "ɦ"), + (0x2B2, "M", "j"), + (0x2B3, "M", "r"), + (0x2B4, "M", "ɹ"), + (0x2B5, "M", "ɻ"), + (0x2B6, "M", "ʁ"), + (0x2B7, "M", "w"), + (0x2B8, "M", "y"), + (0x2B9, "V"), + (0x2D8, "3", " ̆"), + (0x2D9, "3", " ̇"), + (0x2DA, "3", " ̊"), + (0x2DB, "3", " ̨"), + (0x2DC, "3", " ̃"), + (0x2DD, "3", " ̋"), + (0x2DE, "V"), + (0x2E0, "M", "ɣ"), + (0x2E1, "M", "l"), + (0x2E2, "M", "s"), + (0x2E3, "M", "x"), + (0x2E4, "M", "ʕ"), + (0x2E5, "V"), + (0x340, "M", "̀"), + (0x341, "M", "́"), + (0x342, "V"), + (0x343, "M", "̓"), + (0x344, "M", "̈́"), + (0x345, "M", "ι"), + (0x346, "V"), + (0x34F, "I"), + (0x350, "V"), + (0x370, "M", "ͱ"), + (0x371, "V"), + (0x372, "M", "ͳ"), + (0x373, "V"), + (0x374, "M", "ʹ"), + (0x375, "V"), + (0x376, "M", "ͷ"), + (0x377, "V"), + ] + + +def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x378, "X"), + (0x37A, "3", " ι"), + (0x37B, "V"), + (0x37E, "3", ";"), + (0x37F, "M", "ϳ"), + (0x380, "X"), + (0x384, "3", " ́"), + (0x385, "3", " ̈́"), + (0x386, "M", "ά"), + (0x387, "M", "·"), + (0x388, "M", "έ"), + (0x389, "M", "ή"), + (0x38A, "M", "ί"), + (0x38B, "X"), + (0x38C, "M", "ό"), + (0x38D, "X"), + (0x38E, "M", "ύ"), + (0x38F, "M", "ώ"), + (0x390, "V"), + (0x391, "M", "α"), + (0x392, "M", "β"), + (0x393, "M", "γ"), + (0x394, "M", "δ"), + (0x395, "M", "ε"), + (0x396, "M", "ζ"), + (0x397, "M", "η"), + (0x398, "M", "θ"), + (0x399, "M", "ι"), + (0x39A, "M", "κ"), + (0x39B, "M", "λ"), + (0x39C, "M", "μ"), + (0x39D, "M", "ν"), + (0x39E, "M", "ξ"), + (0x39F, "M", "ο"), + (0x3A0, "M", "π"), + (0x3A1, "M", "ρ"), + (0x3A2, "X"), + (0x3A3, "M", "σ"), + (0x3A4, "M", "τ"), + (0x3A5, "M", "υ"), + (0x3A6, "M", "φ"), + (0x3A7, "M", "χ"), + (0x3A8, "M", "ψ"), + (0x3A9, "M", "ω"), + (0x3AA, "M", "ϊ"), + (0x3AB, "M", "ϋ"), + (0x3AC, "V"), + (0x3C2, "D", "σ"), + (0x3C3, "V"), + (0x3CF, "M", "ϗ"), + (0x3D0, "M", "β"), + (0x3D1, "M", "θ"), + (0x3D2, "M", "υ"), + (0x3D3, "M", "ύ"), + (0x3D4, "M", "ϋ"), + (0x3D5, "M", "φ"), + (0x3D6, "M", "π"), + (0x3D7, "V"), + (0x3D8, "M", "ϙ"), + (0x3D9, "V"), + (0x3DA, "M", "ϛ"), + (0x3DB, "V"), + (0x3DC, "M", "ϝ"), + (0x3DD, "V"), + (0x3DE, "M", "ϟ"), + (0x3DF, "V"), + (0x3E0, "M", "ϡ"), + (0x3E1, "V"), + (0x3E2, "M", "ϣ"), + (0x3E3, "V"), + (0x3E4, "M", "ϥ"), + (0x3E5, "V"), + (0x3E6, "M", "ϧ"), + (0x3E7, "V"), + (0x3E8, "M", "ϩ"), + (0x3E9, "V"), + (0x3EA, "M", "ϫ"), + (0x3EB, "V"), + (0x3EC, "M", "ϭ"), + (0x3ED, "V"), + (0x3EE, "M", "ϯ"), + (0x3EF, "V"), + (0x3F0, "M", "κ"), + (0x3F1, "M", "ρ"), + (0x3F2, "M", "σ"), + (0x3F3, "V"), + (0x3F4, "M", "θ"), + (0x3F5, "M", "ε"), + (0x3F6, "V"), + (0x3F7, "M", "ϸ"), + (0x3F8, "V"), + (0x3F9, "M", "σ"), + (0x3FA, "M", "ϻ"), + (0x3FB, "V"), + (0x3FD, "M", "ͻ"), + (0x3FE, "M", "ͼ"), + (0x3FF, "M", "ͽ"), + (0x400, "M", "ѐ"), + (0x401, "M", "ё"), + (0x402, "M", "ђ"), + ] + + +def _seg_7() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x403, "M", "ѓ"), + (0x404, "M", "є"), + (0x405, "M", "ѕ"), + (0x406, "M", "і"), + (0x407, "M", "ї"), + (0x408, "M", "ј"), + (0x409, "M", "љ"), + (0x40A, "M", "њ"), + (0x40B, "M", "ћ"), + (0x40C, "M", "ќ"), + (0x40D, "M", "ѝ"), + (0x40E, "M", "ў"), + (0x40F, "M", "џ"), + (0x410, "M", "а"), + (0x411, "M", "б"), + (0x412, "M", "в"), + (0x413, "M", "г"), + (0x414, "M", "д"), + (0x415, "M", "е"), + (0x416, "M", "ж"), + (0x417, "M", "з"), + (0x418, "M", "и"), + (0x419, "M", "й"), + (0x41A, "M", "к"), + (0x41B, "M", "л"), + (0x41C, "M", "м"), + (0x41D, "M", "н"), + (0x41E, "M", "о"), + (0x41F, "M", "п"), + (0x420, "M", "р"), + (0x421, "M", "с"), + (0x422, "M", "т"), + (0x423, "M", "у"), + (0x424, "M", "ф"), + (0x425, "M", "х"), + (0x426, "M", "ц"), + (0x427, "M", "ч"), + (0x428, "M", "ш"), + (0x429, "M", "щ"), + (0x42A, "M", "ъ"), + (0x42B, "M", "ы"), + (0x42C, "M", "ь"), + (0x42D, "M", "э"), + (0x42E, "M", "ю"), + (0x42F, "M", "я"), + (0x430, "V"), + (0x460, "M", "ѡ"), + (0x461, "V"), + (0x462, "M", "ѣ"), + (0x463, "V"), + (0x464, "M", "ѥ"), + (0x465, "V"), + (0x466, "M", "ѧ"), + (0x467, "V"), + (0x468, "M", "ѩ"), + (0x469, "V"), + (0x46A, "M", "ѫ"), + (0x46B, "V"), + (0x46C, "M", "ѭ"), + (0x46D, "V"), + (0x46E, "M", "ѯ"), + (0x46F, "V"), + (0x470, "M", "ѱ"), + (0x471, "V"), + (0x472, "M", "ѳ"), + (0x473, "V"), + (0x474, "M", "ѵ"), + (0x475, "V"), + (0x476, "M", "ѷ"), + (0x477, "V"), + (0x478, "M", "ѹ"), + (0x479, "V"), + (0x47A, "M", "ѻ"), + (0x47B, "V"), + (0x47C, "M", "ѽ"), + (0x47D, "V"), + (0x47E, "M", "ѿ"), + (0x47F, "V"), + (0x480, "M", "ҁ"), + (0x481, "V"), + (0x48A, "M", "ҋ"), + (0x48B, "V"), + (0x48C, "M", "ҍ"), + (0x48D, "V"), + (0x48E, "M", "ҏ"), + (0x48F, "V"), + (0x490, "M", "ґ"), + (0x491, "V"), + (0x492, "M", "ғ"), + (0x493, "V"), + (0x494, "M", "ҕ"), + (0x495, "V"), + (0x496, "M", "җ"), + (0x497, "V"), + (0x498, "M", "ҙ"), + (0x499, "V"), + (0x49A, "M", "қ"), + (0x49B, "V"), + (0x49C, "M", "ҝ"), + (0x49D, "V"), + ] + + +def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x49E, "M", "ҟ"), + (0x49F, "V"), + (0x4A0, "M", "ҡ"), + (0x4A1, "V"), + (0x4A2, "M", "ң"), + (0x4A3, "V"), + (0x4A4, "M", "ҥ"), + (0x4A5, "V"), + (0x4A6, "M", "ҧ"), + (0x4A7, "V"), + (0x4A8, "M", "ҩ"), + (0x4A9, "V"), + (0x4AA, "M", "ҫ"), + (0x4AB, "V"), + (0x4AC, "M", "ҭ"), + (0x4AD, "V"), + (0x4AE, "M", "ү"), + (0x4AF, "V"), + (0x4B0, "M", "ұ"), + (0x4B1, "V"), + (0x4B2, "M", "ҳ"), + (0x4B3, "V"), + (0x4B4, "M", "ҵ"), + (0x4B5, "V"), + (0x4B6, "M", "ҷ"), + (0x4B7, "V"), + (0x4B8, "M", "ҹ"), + (0x4B9, "V"), + (0x4BA, "M", "һ"), + (0x4BB, "V"), + (0x4BC, "M", "ҽ"), + (0x4BD, "V"), + (0x4BE, "M", "ҿ"), + (0x4BF, "V"), + (0x4C0, "X"), + (0x4C1, "M", "ӂ"), + (0x4C2, "V"), + (0x4C3, "M", "ӄ"), + (0x4C4, "V"), + (0x4C5, "M", "ӆ"), + (0x4C6, "V"), + (0x4C7, "M", "ӈ"), + (0x4C8, "V"), + (0x4C9, "M", "ӊ"), + (0x4CA, "V"), + (0x4CB, "M", "ӌ"), + (0x4CC, "V"), + (0x4CD, "M", "ӎ"), + (0x4CE, "V"), + (0x4D0, "M", "ӑ"), + (0x4D1, "V"), + (0x4D2, "M", "ӓ"), + (0x4D3, "V"), + (0x4D4, "M", "ӕ"), + (0x4D5, "V"), + (0x4D6, "M", "ӗ"), + (0x4D7, "V"), + (0x4D8, "M", "ә"), + (0x4D9, "V"), + (0x4DA, "M", "ӛ"), + (0x4DB, "V"), + (0x4DC, "M", "ӝ"), + (0x4DD, "V"), + (0x4DE, "M", "ӟ"), + (0x4DF, "V"), + (0x4E0, "M", "ӡ"), + (0x4E1, "V"), + (0x4E2, "M", "ӣ"), + (0x4E3, "V"), + (0x4E4, "M", "ӥ"), + (0x4E5, "V"), + (0x4E6, "M", "ӧ"), + (0x4E7, "V"), + (0x4E8, "M", "ө"), + (0x4E9, "V"), + (0x4EA, "M", "ӫ"), + (0x4EB, "V"), + (0x4EC, "M", "ӭ"), + (0x4ED, "V"), + (0x4EE, "M", "ӯ"), + (0x4EF, "V"), + (0x4F0, "M", "ӱ"), + (0x4F1, "V"), + (0x4F2, "M", "ӳ"), + (0x4F3, "V"), + (0x4F4, "M", "ӵ"), + (0x4F5, "V"), + (0x4F6, "M", "ӷ"), + (0x4F7, "V"), + (0x4F8, "M", "ӹ"), + (0x4F9, "V"), + (0x4FA, "M", "ӻ"), + (0x4FB, "V"), + (0x4FC, "M", "ӽ"), + (0x4FD, "V"), + (0x4FE, "M", "ӿ"), + (0x4FF, "V"), + (0x500, "M", "ԁ"), + (0x501, "V"), + (0x502, "M", "ԃ"), + ] + + +def _seg_9() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x503, "V"), + (0x504, "M", "ԅ"), + (0x505, "V"), + (0x506, "M", "ԇ"), + (0x507, "V"), + (0x508, "M", "ԉ"), + (0x509, "V"), + (0x50A, "M", "ԋ"), + (0x50B, "V"), + (0x50C, "M", "ԍ"), + (0x50D, "V"), + (0x50E, "M", "ԏ"), + (0x50F, "V"), + (0x510, "M", "ԑ"), + (0x511, "V"), + (0x512, "M", "ԓ"), + (0x513, "V"), + (0x514, "M", "ԕ"), + (0x515, "V"), + (0x516, "M", "ԗ"), + (0x517, "V"), + (0x518, "M", "ԙ"), + (0x519, "V"), + (0x51A, "M", "ԛ"), + (0x51B, "V"), + (0x51C, "M", "ԝ"), + (0x51D, "V"), + (0x51E, "M", "ԟ"), + (0x51F, "V"), + (0x520, "M", "ԡ"), + (0x521, "V"), + (0x522, "M", "ԣ"), + (0x523, "V"), + (0x524, "M", "ԥ"), + (0x525, "V"), + (0x526, "M", "ԧ"), + (0x527, "V"), + (0x528, "M", "ԩ"), + (0x529, "V"), + (0x52A, "M", "ԫ"), + (0x52B, "V"), + (0x52C, "M", "ԭ"), + (0x52D, "V"), + (0x52E, "M", "ԯ"), + (0x52F, "V"), + (0x530, "X"), + (0x531, "M", "ա"), + (0x532, "M", "բ"), + (0x533, "M", "գ"), + (0x534, "M", "դ"), + (0x535, "M", "ե"), + (0x536, "M", "զ"), + (0x537, "M", "է"), + (0x538, "M", "ը"), + (0x539, "M", "թ"), + (0x53A, "M", "ժ"), + (0x53B, "M", "ի"), + (0x53C, "M", "լ"), + (0x53D, "M", "խ"), + (0x53E, "M", "ծ"), + (0x53F, "M", "կ"), + (0x540, "M", "հ"), + (0x541, "M", "ձ"), + (0x542, "M", "ղ"), + (0x543, "M", "ճ"), + (0x544, "M", "մ"), + (0x545, "M", "յ"), + (0x546, "M", "ն"), + (0x547, "M", "շ"), + (0x548, "M", "ո"), + (0x549, "M", "չ"), + (0x54A, "M", "պ"), + (0x54B, "M", "ջ"), + (0x54C, "M", "ռ"), + (0x54D, "M", "ս"), + (0x54E, "M", "վ"), + (0x54F, "M", "տ"), + (0x550, "M", "ր"), + (0x551, "M", "ց"), + (0x552, "M", "ւ"), + (0x553, "M", "փ"), + (0x554, "M", "ք"), + (0x555, "M", "օ"), + (0x556, "M", "ֆ"), + (0x557, "X"), + (0x559, "V"), + (0x587, "M", "եւ"), + (0x588, "V"), + (0x58B, "X"), + (0x58D, "V"), + (0x590, "X"), + (0x591, "V"), + (0x5C8, "X"), + (0x5D0, "V"), + (0x5EB, "X"), + (0x5EF, "V"), + (0x5F5, "X"), + (0x606, "V"), + (0x61C, "X"), + (0x61D, "V"), + ] + + +def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x675, "M", "اٴ"), + (0x676, "M", "وٴ"), + (0x677, "M", "ۇٴ"), + (0x678, "M", "يٴ"), + (0x679, "V"), + (0x6DD, "X"), + (0x6DE, "V"), + (0x70E, "X"), + (0x710, "V"), + (0x74B, "X"), + (0x74D, "V"), + (0x7B2, "X"), + (0x7C0, "V"), + (0x7FB, "X"), + (0x7FD, "V"), + (0x82E, "X"), + (0x830, "V"), + (0x83F, "X"), + (0x840, "V"), + (0x85C, "X"), + (0x85E, "V"), + (0x85F, "X"), + (0x860, "V"), + (0x86B, "X"), + (0x870, "V"), + (0x88F, "X"), + (0x898, "V"), + (0x8E2, "X"), + (0x8E3, "V"), + (0x958, "M", "क़"), + (0x959, "M", "ख़"), + (0x95A, "M", "ग़"), + (0x95B, "M", "ज़"), + (0x95C, "M", "ड़"), + (0x95D, "M", "ढ़"), + (0x95E, "M", "फ़"), + (0x95F, "M", "य़"), + (0x960, "V"), + (0x984, "X"), + (0x985, "V"), + (0x98D, "X"), + (0x98F, "V"), + (0x991, "X"), + (0x993, "V"), + (0x9A9, "X"), + (0x9AA, "V"), + (0x9B1, "X"), + (0x9B2, "V"), + (0x9B3, "X"), + (0x9B6, "V"), + (0x9BA, "X"), + (0x9BC, "V"), + (0x9C5, "X"), + (0x9C7, "V"), + (0x9C9, "X"), + (0x9CB, "V"), + (0x9CF, "X"), + (0x9D7, "V"), + (0x9D8, "X"), + (0x9DC, "M", "ড়"), + (0x9DD, "M", "ঢ়"), + (0x9DE, "X"), + (0x9DF, "M", "য়"), + (0x9E0, "V"), + (0x9E4, "X"), + (0x9E6, "V"), + (0x9FF, "X"), + (0xA01, "V"), + (0xA04, "X"), + (0xA05, "V"), + (0xA0B, "X"), + (0xA0F, "V"), + (0xA11, "X"), + (0xA13, "V"), + (0xA29, "X"), + (0xA2A, "V"), + (0xA31, "X"), + (0xA32, "V"), + (0xA33, "M", "ਲ਼"), + (0xA34, "X"), + (0xA35, "V"), + (0xA36, "M", "ਸ਼"), + (0xA37, "X"), + (0xA38, "V"), + (0xA3A, "X"), + (0xA3C, "V"), + (0xA3D, "X"), + (0xA3E, "V"), + (0xA43, "X"), + (0xA47, "V"), + (0xA49, "X"), + (0xA4B, "V"), + (0xA4E, "X"), + (0xA51, "V"), + (0xA52, "X"), + (0xA59, "M", "ਖ਼"), + (0xA5A, "M", "ਗ਼"), + (0xA5B, "M", "ਜ਼"), + (0xA5C, "V"), + (0xA5D, "X"), + ] + + +def _seg_11() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA5E, "M", "ਫ਼"), + (0xA5F, "X"), + (0xA66, "V"), + (0xA77, "X"), + (0xA81, "V"), + (0xA84, "X"), + (0xA85, "V"), + (0xA8E, "X"), + (0xA8F, "V"), + (0xA92, "X"), + (0xA93, "V"), + (0xAA9, "X"), + (0xAAA, "V"), + (0xAB1, "X"), + (0xAB2, "V"), + (0xAB4, "X"), + (0xAB5, "V"), + (0xABA, "X"), + (0xABC, "V"), + (0xAC6, "X"), + (0xAC7, "V"), + (0xACA, "X"), + (0xACB, "V"), + (0xACE, "X"), + (0xAD0, "V"), + (0xAD1, "X"), + (0xAE0, "V"), + (0xAE4, "X"), + (0xAE6, "V"), + (0xAF2, "X"), + (0xAF9, "V"), + (0xB00, "X"), + (0xB01, "V"), + (0xB04, "X"), + (0xB05, "V"), + (0xB0D, "X"), + (0xB0F, "V"), + (0xB11, "X"), + (0xB13, "V"), + (0xB29, "X"), + (0xB2A, "V"), + (0xB31, "X"), + (0xB32, "V"), + (0xB34, "X"), + (0xB35, "V"), + (0xB3A, "X"), + (0xB3C, "V"), + (0xB45, "X"), + (0xB47, "V"), + (0xB49, "X"), + (0xB4B, "V"), + (0xB4E, "X"), + (0xB55, "V"), + (0xB58, "X"), + (0xB5C, "M", "ଡ଼"), + (0xB5D, "M", "ଢ଼"), + (0xB5E, "X"), + (0xB5F, "V"), + (0xB64, "X"), + (0xB66, "V"), + (0xB78, "X"), + (0xB82, "V"), + (0xB84, "X"), + (0xB85, "V"), + (0xB8B, "X"), + (0xB8E, "V"), + (0xB91, "X"), + (0xB92, "V"), + (0xB96, "X"), + (0xB99, "V"), + (0xB9B, "X"), + (0xB9C, "V"), + (0xB9D, "X"), + (0xB9E, "V"), + (0xBA0, "X"), + (0xBA3, "V"), + (0xBA5, "X"), + (0xBA8, "V"), + (0xBAB, "X"), + (0xBAE, "V"), + (0xBBA, "X"), + (0xBBE, "V"), + (0xBC3, "X"), + (0xBC6, "V"), + (0xBC9, "X"), + (0xBCA, "V"), + (0xBCE, "X"), + (0xBD0, "V"), + (0xBD1, "X"), + (0xBD7, "V"), + (0xBD8, "X"), + (0xBE6, "V"), + (0xBFB, "X"), + (0xC00, "V"), + (0xC0D, "X"), + (0xC0E, "V"), + (0xC11, "X"), + (0xC12, "V"), + (0xC29, "X"), + (0xC2A, "V"), + ] + + +def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xC3A, "X"), + (0xC3C, "V"), + (0xC45, "X"), + (0xC46, "V"), + (0xC49, "X"), + (0xC4A, "V"), + (0xC4E, "X"), + (0xC55, "V"), + (0xC57, "X"), + (0xC58, "V"), + (0xC5B, "X"), + (0xC5D, "V"), + (0xC5E, "X"), + (0xC60, "V"), + (0xC64, "X"), + (0xC66, "V"), + (0xC70, "X"), + (0xC77, "V"), + (0xC8D, "X"), + (0xC8E, "V"), + (0xC91, "X"), + (0xC92, "V"), + (0xCA9, "X"), + (0xCAA, "V"), + (0xCB4, "X"), + (0xCB5, "V"), + (0xCBA, "X"), + (0xCBC, "V"), + (0xCC5, "X"), + (0xCC6, "V"), + (0xCC9, "X"), + (0xCCA, "V"), + (0xCCE, "X"), + (0xCD5, "V"), + (0xCD7, "X"), + (0xCDD, "V"), + (0xCDF, "X"), + (0xCE0, "V"), + (0xCE4, "X"), + (0xCE6, "V"), + (0xCF0, "X"), + (0xCF1, "V"), + (0xCF4, "X"), + (0xD00, "V"), + (0xD0D, "X"), + (0xD0E, "V"), + (0xD11, "X"), + (0xD12, "V"), + (0xD45, "X"), + (0xD46, "V"), + (0xD49, "X"), + (0xD4A, "V"), + (0xD50, "X"), + (0xD54, "V"), + (0xD64, "X"), + (0xD66, "V"), + (0xD80, "X"), + (0xD81, "V"), + (0xD84, "X"), + (0xD85, "V"), + (0xD97, "X"), + (0xD9A, "V"), + (0xDB2, "X"), + (0xDB3, "V"), + (0xDBC, "X"), + (0xDBD, "V"), + (0xDBE, "X"), + (0xDC0, "V"), + (0xDC7, "X"), + (0xDCA, "V"), + (0xDCB, "X"), + (0xDCF, "V"), + (0xDD5, "X"), + (0xDD6, "V"), + (0xDD7, "X"), + (0xDD8, "V"), + (0xDE0, "X"), + (0xDE6, "V"), + (0xDF0, "X"), + (0xDF2, "V"), + (0xDF5, "X"), + (0xE01, "V"), + (0xE33, "M", "ํา"), + (0xE34, "V"), + (0xE3B, "X"), + (0xE3F, "V"), + (0xE5C, "X"), + (0xE81, "V"), + (0xE83, "X"), + (0xE84, "V"), + (0xE85, "X"), + (0xE86, "V"), + (0xE8B, "X"), + (0xE8C, "V"), + (0xEA4, "X"), + (0xEA5, "V"), + (0xEA6, "X"), + (0xEA7, "V"), + (0xEB3, "M", "ໍາ"), + (0xEB4, "V"), + ] + + +def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xEBE, "X"), + (0xEC0, "V"), + (0xEC5, "X"), + (0xEC6, "V"), + (0xEC7, "X"), + (0xEC8, "V"), + (0xECF, "X"), + (0xED0, "V"), + (0xEDA, "X"), + (0xEDC, "M", "ຫນ"), + (0xEDD, "M", "ຫມ"), + (0xEDE, "V"), + (0xEE0, "X"), + (0xF00, "V"), + (0xF0C, "M", "་"), + (0xF0D, "V"), + (0xF43, "M", "གྷ"), + (0xF44, "V"), + (0xF48, "X"), + (0xF49, "V"), + (0xF4D, "M", "ཌྷ"), + (0xF4E, "V"), + (0xF52, "M", "དྷ"), + (0xF53, "V"), + (0xF57, "M", "བྷ"), + (0xF58, "V"), + (0xF5C, "M", "ཛྷ"), + (0xF5D, "V"), + (0xF69, "M", "ཀྵ"), + (0xF6A, "V"), + (0xF6D, "X"), + (0xF71, "V"), + (0xF73, "M", "ཱི"), + (0xF74, "V"), + (0xF75, "M", "ཱུ"), + (0xF76, "M", "ྲྀ"), + (0xF77, "M", "ྲཱྀ"), + (0xF78, "M", "ླྀ"), + (0xF79, "M", "ླཱྀ"), + (0xF7A, "V"), + (0xF81, "M", "ཱྀ"), + (0xF82, "V"), + (0xF93, "M", "ྒྷ"), + (0xF94, "V"), + (0xF98, "X"), + (0xF99, "V"), + (0xF9D, "M", "ྜྷ"), + (0xF9E, "V"), + (0xFA2, "M", "ྡྷ"), + (0xFA3, "V"), + (0xFA7, "M", "ྦྷ"), + (0xFA8, "V"), + (0xFAC, "M", "ྫྷ"), + (0xFAD, "V"), + (0xFB9, "M", "ྐྵ"), + (0xFBA, "V"), + (0xFBD, "X"), + (0xFBE, "V"), + (0xFCD, "X"), + (0xFCE, "V"), + (0xFDB, "X"), + (0x1000, "V"), + (0x10A0, "X"), + (0x10C7, "M", "ⴧ"), + (0x10C8, "X"), + (0x10CD, "M", "ⴭ"), + (0x10CE, "X"), + (0x10D0, "V"), + (0x10FC, "M", "ნ"), + (0x10FD, "V"), + (0x115F, "X"), + (0x1161, "V"), + (0x1249, "X"), + (0x124A, "V"), + (0x124E, "X"), + (0x1250, "V"), + (0x1257, "X"), + (0x1258, "V"), + (0x1259, "X"), + (0x125A, "V"), + (0x125E, "X"), + (0x1260, "V"), + (0x1289, "X"), + (0x128A, "V"), + (0x128E, "X"), + (0x1290, "V"), + (0x12B1, "X"), + (0x12B2, "V"), + (0x12B6, "X"), + (0x12B8, "V"), + (0x12BF, "X"), + (0x12C0, "V"), + (0x12C1, "X"), + (0x12C2, "V"), + (0x12C6, "X"), + (0x12C8, "V"), + (0x12D7, "X"), + (0x12D8, "V"), + (0x1311, "X"), + (0x1312, "V"), + ] + + +def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1316, "X"), + (0x1318, "V"), + (0x135B, "X"), + (0x135D, "V"), + (0x137D, "X"), + (0x1380, "V"), + (0x139A, "X"), + (0x13A0, "V"), + (0x13F6, "X"), + (0x13F8, "M", "Ᏸ"), + (0x13F9, "M", "Ᏹ"), + (0x13FA, "M", "Ᏺ"), + (0x13FB, "M", "Ᏻ"), + (0x13FC, "M", "Ᏼ"), + (0x13FD, "M", "Ᏽ"), + (0x13FE, "X"), + (0x1400, "V"), + (0x1680, "X"), + (0x1681, "V"), + (0x169D, "X"), + (0x16A0, "V"), + (0x16F9, "X"), + (0x1700, "V"), + (0x1716, "X"), + (0x171F, "V"), + (0x1737, "X"), + (0x1740, "V"), + (0x1754, "X"), + (0x1760, "V"), + (0x176D, "X"), + (0x176E, "V"), + (0x1771, "X"), + (0x1772, "V"), + (0x1774, "X"), + (0x1780, "V"), + (0x17B4, "X"), + (0x17B6, "V"), + (0x17DE, "X"), + (0x17E0, "V"), + (0x17EA, "X"), + (0x17F0, "V"), + (0x17FA, "X"), + (0x1800, "V"), + (0x1806, "X"), + (0x1807, "V"), + (0x180B, "I"), + (0x180E, "X"), + (0x180F, "I"), + (0x1810, "V"), + (0x181A, "X"), + (0x1820, "V"), + (0x1879, "X"), + (0x1880, "V"), + (0x18AB, "X"), + (0x18B0, "V"), + (0x18F6, "X"), + (0x1900, "V"), + (0x191F, "X"), + (0x1920, "V"), + (0x192C, "X"), + (0x1930, "V"), + (0x193C, "X"), + (0x1940, "V"), + (0x1941, "X"), + (0x1944, "V"), + (0x196E, "X"), + (0x1970, "V"), + (0x1975, "X"), + (0x1980, "V"), + (0x19AC, "X"), + (0x19B0, "V"), + (0x19CA, "X"), + (0x19D0, "V"), + (0x19DB, "X"), + (0x19DE, "V"), + (0x1A1C, "X"), + (0x1A1E, "V"), + (0x1A5F, "X"), + (0x1A60, "V"), + (0x1A7D, "X"), + (0x1A7F, "V"), + (0x1A8A, "X"), + (0x1A90, "V"), + (0x1A9A, "X"), + (0x1AA0, "V"), + (0x1AAE, "X"), + (0x1AB0, "V"), + (0x1ACF, "X"), + (0x1B00, "V"), + (0x1B4D, "X"), + (0x1B50, "V"), + (0x1B7F, "X"), + (0x1B80, "V"), + (0x1BF4, "X"), + (0x1BFC, "V"), + (0x1C38, "X"), + (0x1C3B, "V"), + (0x1C4A, "X"), + (0x1C4D, "V"), + (0x1C80, "M", "в"), + ] + + +def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1C81, "M", "д"), + (0x1C82, "M", "о"), + (0x1C83, "M", "с"), + (0x1C84, "M", "т"), + (0x1C86, "M", "ъ"), + (0x1C87, "M", "ѣ"), + (0x1C88, "M", "ꙋ"), + (0x1C89, "X"), + (0x1C90, "M", "ა"), + (0x1C91, "M", "ბ"), + (0x1C92, "M", "გ"), + (0x1C93, "M", "დ"), + (0x1C94, "M", "ე"), + (0x1C95, "M", "ვ"), + (0x1C96, "M", "ზ"), + (0x1C97, "M", "თ"), + (0x1C98, "M", "ი"), + (0x1C99, "M", "კ"), + (0x1C9A, "M", "ლ"), + (0x1C9B, "M", "მ"), + (0x1C9C, "M", "ნ"), + (0x1C9D, "M", "ო"), + (0x1C9E, "M", "პ"), + (0x1C9F, "M", "ჟ"), + (0x1CA0, "M", "რ"), + (0x1CA1, "M", "ს"), + (0x1CA2, "M", "ტ"), + (0x1CA3, "M", "უ"), + (0x1CA4, "M", "ფ"), + (0x1CA5, "M", "ქ"), + (0x1CA6, "M", "ღ"), + (0x1CA7, "M", "ყ"), + (0x1CA8, "M", "შ"), + (0x1CA9, "M", "ჩ"), + (0x1CAA, "M", "ც"), + (0x1CAB, "M", "ძ"), + (0x1CAC, "M", "წ"), + (0x1CAD, "M", "ჭ"), + (0x1CAE, "M", "ხ"), + (0x1CAF, "M", "ჯ"), + (0x1CB0, "M", "ჰ"), + (0x1CB1, "M", "ჱ"), + (0x1CB2, "M", "ჲ"), + (0x1CB3, "M", "ჳ"), + (0x1CB4, "M", "ჴ"), + (0x1CB5, "M", "ჵ"), + (0x1CB6, "M", "ჶ"), + (0x1CB7, "M", "ჷ"), + (0x1CB8, "M", "ჸ"), + (0x1CB9, "M", "ჹ"), + (0x1CBA, "M", "ჺ"), + (0x1CBB, "X"), + (0x1CBD, "M", "ჽ"), + (0x1CBE, "M", "ჾ"), + (0x1CBF, "M", "ჿ"), + (0x1CC0, "V"), + (0x1CC8, "X"), + (0x1CD0, "V"), + (0x1CFB, "X"), + (0x1D00, "V"), + (0x1D2C, "M", "a"), + (0x1D2D, "M", "æ"), + (0x1D2E, "M", "b"), + (0x1D2F, "V"), + (0x1D30, "M", "d"), + (0x1D31, "M", "e"), + (0x1D32, "M", "ǝ"), + (0x1D33, "M", "g"), + (0x1D34, "M", "h"), + (0x1D35, "M", "i"), + (0x1D36, "M", "j"), + (0x1D37, "M", "k"), + (0x1D38, "M", "l"), + (0x1D39, "M", "m"), + (0x1D3A, "M", "n"), + (0x1D3B, "V"), + (0x1D3C, "M", "o"), + (0x1D3D, "M", "ȣ"), + (0x1D3E, "M", "p"), + (0x1D3F, "M", "r"), + (0x1D40, "M", "t"), + (0x1D41, "M", "u"), + (0x1D42, "M", "w"), + (0x1D43, "M", "a"), + (0x1D44, "M", "ɐ"), + (0x1D45, "M", "ɑ"), + (0x1D46, "M", "ᴂ"), + (0x1D47, "M", "b"), + (0x1D48, "M", "d"), + (0x1D49, "M", "e"), + (0x1D4A, "M", "ə"), + (0x1D4B, "M", "ɛ"), + (0x1D4C, "M", "ɜ"), + (0x1D4D, "M", "g"), + (0x1D4E, "V"), + (0x1D4F, "M", "k"), + (0x1D50, "M", "m"), + (0x1D51, "M", "ŋ"), + (0x1D52, "M", "o"), + (0x1D53, "M", "ɔ"), + ] + + +def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D54, "M", "ᴖ"), + (0x1D55, "M", "ᴗ"), + (0x1D56, "M", "p"), + (0x1D57, "M", "t"), + (0x1D58, "M", "u"), + (0x1D59, "M", "ᴝ"), + (0x1D5A, "M", "ɯ"), + (0x1D5B, "M", "v"), + (0x1D5C, "M", "ᴥ"), + (0x1D5D, "M", "β"), + (0x1D5E, "M", "γ"), + (0x1D5F, "M", "δ"), + (0x1D60, "M", "φ"), + (0x1D61, "M", "χ"), + (0x1D62, "M", "i"), + (0x1D63, "M", "r"), + (0x1D64, "M", "u"), + (0x1D65, "M", "v"), + (0x1D66, "M", "β"), + (0x1D67, "M", "γ"), + (0x1D68, "M", "ρ"), + (0x1D69, "M", "φ"), + (0x1D6A, "M", "χ"), + (0x1D6B, "V"), + (0x1D78, "M", "н"), + (0x1D79, "V"), + (0x1D9B, "M", "ɒ"), + (0x1D9C, "M", "c"), + (0x1D9D, "M", "ɕ"), + (0x1D9E, "M", "ð"), + (0x1D9F, "M", "ɜ"), + (0x1DA0, "M", "f"), + (0x1DA1, "M", "ɟ"), + (0x1DA2, "M", "ɡ"), + (0x1DA3, "M", "ɥ"), + (0x1DA4, "M", "ɨ"), + (0x1DA5, "M", "ɩ"), + (0x1DA6, "M", "ɪ"), + (0x1DA7, "M", "ᵻ"), + (0x1DA8, "M", "ʝ"), + (0x1DA9, "M", "ɭ"), + (0x1DAA, "M", "ᶅ"), + (0x1DAB, "M", "ʟ"), + (0x1DAC, "M", "ɱ"), + (0x1DAD, "M", "ɰ"), + (0x1DAE, "M", "ɲ"), + (0x1DAF, "M", "ɳ"), + (0x1DB0, "M", "ɴ"), + (0x1DB1, "M", "ɵ"), + (0x1DB2, "M", "ɸ"), + (0x1DB3, "M", "ʂ"), + (0x1DB4, "M", "ʃ"), + (0x1DB5, "M", "ƫ"), + (0x1DB6, "M", "ʉ"), + (0x1DB7, "M", "ʊ"), + (0x1DB8, "M", "ᴜ"), + (0x1DB9, "M", "ʋ"), + (0x1DBA, "M", "ʌ"), + (0x1DBB, "M", "z"), + (0x1DBC, "M", "ʐ"), + (0x1DBD, "M", "ʑ"), + (0x1DBE, "M", "ʒ"), + (0x1DBF, "M", "θ"), + (0x1DC0, "V"), + (0x1E00, "M", "ḁ"), + (0x1E01, "V"), + (0x1E02, "M", "ḃ"), + (0x1E03, "V"), + (0x1E04, "M", "ḅ"), + (0x1E05, "V"), + (0x1E06, "M", "ḇ"), + (0x1E07, "V"), + (0x1E08, "M", "ḉ"), + (0x1E09, "V"), + (0x1E0A, "M", "ḋ"), + (0x1E0B, "V"), + (0x1E0C, "M", "ḍ"), + (0x1E0D, "V"), + (0x1E0E, "M", "ḏ"), + (0x1E0F, "V"), + (0x1E10, "M", "ḑ"), + (0x1E11, "V"), + (0x1E12, "M", "ḓ"), + (0x1E13, "V"), + (0x1E14, "M", "ḕ"), + (0x1E15, "V"), + (0x1E16, "M", "ḗ"), + (0x1E17, "V"), + (0x1E18, "M", "ḙ"), + (0x1E19, "V"), + (0x1E1A, "M", "ḛ"), + (0x1E1B, "V"), + (0x1E1C, "M", "ḝ"), + (0x1E1D, "V"), + (0x1E1E, "M", "ḟ"), + (0x1E1F, "V"), + (0x1E20, "M", "ḡ"), + (0x1E21, "V"), + (0x1E22, "M", "ḣ"), + (0x1E23, "V"), + ] + + +def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E24, "M", "ḥ"), + (0x1E25, "V"), + (0x1E26, "M", "ḧ"), + (0x1E27, "V"), + (0x1E28, "M", "ḩ"), + (0x1E29, "V"), + (0x1E2A, "M", "ḫ"), + (0x1E2B, "V"), + (0x1E2C, "M", "ḭ"), + (0x1E2D, "V"), + (0x1E2E, "M", "ḯ"), + (0x1E2F, "V"), + (0x1E30, "M", "ḱ"), + (0x1E31, "V"), + (0x1E32, "M", "ḳ"), + (0x1E33, "V"), + (0x1E34, "M", "ḵ"), + (0x1E35, "V"), + (0x1E36, "M", "ḷ"), + (0x1E37, "V"), + (0x1E38, "M", "ḹ"), + (0x1E39, "V"), + (0x1E3A, "M", "ḻ"), + (0x1E3B, "V"), + (0x1E3C, "M", "ḽ"), + (0x1E3D, "V"), + (0x1E3E, "M", "ḿ"), + (0x1E3F, "V"), + (0x1E40, "M", "ṁ"), + (0x1E41, "V"), + (0x1E42, "M", "ṃ"), + (0x1E43, "V"), + (0x1E44, "M", "ṅ"), + (0x1E45, "V"), + (0x1E46, "M", "ṇ"), + (0x1E47, "V"), + (0x1E48, "M", "ṉ"), + (0x1E49, "V"), + (0x1E4A, "M", "ṋ"), + (0x1E4B, "V"), + (0x1E4C, "M", "ṍ"), + (0x1E4D, "V"), + (0x1E4E, "M", "ṏ"), + (0x1E4F, "V"), + (0x1E50, "M", "ṑ"), + (0x1E51, "V"), + (0x1E52, "M", "ṓ"), + (0x1E53, "V"), + (0x1E54, "M", "ṕ"), + (0x1E55, "V"), + (0x1E56, "M", "ṗ"), + (0x1E57, "V"), + (0x1E58, "M", "ṙ"), + (0x1E59, "V"), + (0x1E5A, "M", "ṛ"), + (0x1E5B, "V"), + (0x1E5C, "M", "ṝ"), + (0x1E5D, "V"), + (0x1E5E, "M", "ṟ"), + (0x1E5F, "V"), + (0x1E60, "M", "ṡ"), + (0x1E61, "V"), + (0x1E62, "M", "ṣ"), + (0x1E63, "V"), + (0x1E64, "M", "ṥ"), + (0x1E65, "V"), + (0x1E66, "M", "ṧ"), + (0x1E67, "V"), + (0x1E68, "M", "ṩ"), + (0x1E69, "V"), + (0x1E6A, "M", "ṫ"), + (0x1E6B, "V"), + (0x1E6C, "M", "ṭ"), + (0x1E6D, "V"), + (0x1E6E, "M", "ṯ"), + (0x1E6F, "V"), + (0x1E70, "M", "ṱ"), + (0x1E71, "V"), + (0x1E72, "M", "ṳ"), + (0x1E73, "V"), + (0x1E74, "M", "ṵ"), + (0x1E75, "V"), + (0x1E76, "M", "ṷ"), + (0x1E77, "V"), + (0x1E78, "M", "ṹ"), + (0x1E79, "V"), + (0x1E7A, "M", "ṻ"), + (0x1E7B, "V"), + (0x1E7C, "M", "ṽ"), + (0x1E7D, "V"), + (0x1E7E, "M", "ṿ"), + (0x1E7F, "V"), + (0x1E80, "M", "ẁ"), + (0x1E81, "V"), + (0x1E82, "M", "ẃ"), + (0x1E83, "V"), + (0x1E84, "M", "ẅ"), + (0x1E85, "V"), + (0x1E86, "M", "ẇ"), + (0x1E87, "V"), + ] + + +def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E88, "M", "ẉ"), + (0x1E89, "V"), + (0x1E8A, "M", "ẋ"), + (0x1E8B, "V"), + (0x1E8C, "M", "ẍ"), + (0x1E8D, "V"), + (0x1E8E, "M", "ẏ"), + (0x1E8F, "V"), + (0x1E90, "M", "ẑ"), + (0x1E91, "V"), + (0x1E92, "M", "ẓ"), + (0x1E93, "V"), + (0x1E94, "M", "ẕ"), + (0x1E95, "V"), + (0x1E9A, "M", "aʾ"), + (0x1E9B, "M", "ṡ"), + (0x1E9C, "V"), + (0x1E9E, "M", "ß"), + (0x1E9F, "V"), + (0x1EA0, "M", "ạ"), + (0x1EA1, "V"), + (0x1EA2, "M", "ả"), + (0x1EA3, "V"), + (0x1EA4, "M", "ấ"), + (0x1EA5, "V"), + (0x1EA6, "M", "ầ"), + (0x1EA7, "V"), + (0x1EA8, "M", "ẩ"), + (0x1EA9, "V"), + (0x1EAA, "M", "ẫ"), + (0x1EAB, "V"), + (0x1EAC, "M", "ậ"), + (0x1EAD, "V"), + (0x1EAE, "M", "ắ"), + (0x1EAF, "V"), + (0x1EB0, "M", "ằ"), + (0x1EB1, "V"), + (0x1EB2, "M", "ẳ"), + (0x1EB3, "V"), + (0x1EB4, "M", "ẵ"), + (0x1EB5, "V"), + (0x1EB6, "M", "ặ"), + (0x1EB7, "V"), + (0x1EB8, "M", "ẹ"), + (0x1EB9, "V"), + (0x1EBA, "M", "ẻ"), + (0x1EBB, "V"), + (0x1EBC, "M", "ẽ"), + (0x1EBD, "V"), + (0x1EBE, "M", "ế"), + (0x1EBF, "V"), + (0x1EC0, "M", "ề"), + (0x1EC1, "V"), + (0x1EC2, "M", "ể"), + (0x1EC3, "V"), + (0x1EC4, "M", "ễ"), + (0x1EC5, "V"), + (0x1EC6, "M", "ệ"), + (0x1EC7, "V"), + (0x1EC8, "M", "ỉ"), + (0x1EC9, "V"), + (0x1ECA, "M", "ị"), + (0x1ECB, "V"), + (0x1ECC, "M", "ọ"), + (0x1ECD, "V"), + (0x1ECE, "M", "ỏ"), + (0x1ECF, "V"), + (0x1ED0, "M", "ố"), + (0x1ED1, "V"), + (0x1ED2, "M", "ồ"), + (0x1ED3, "V"), + (0x1ED4, "M", "ổ"), + (0x1ED5, "V"), + (0x1ED6, "M", "ỗ"), + (0x1ED7, "V"), + (0x1ED8, "M", "ộ"), + (0x1ED9, "V"), + (0x1EDA, "M", "ớ"), + (0x1EDB, "V"), + (0x1EDC, "M", "ờ"), + (0x1EDD, "V"), + (0x1EDE, "M", "ở"), + (0x1EDF, "V"), + (0x1EE0, "M", "ỡ"), + (0x1EE1, "V"), + (0x1EE2, "M", "ợ"), + (0x1EE3, "V"), + (0x1EE4, "M", "ụ"), + (0x1EE5, "V"), + (0x1EE6, "M", "ủ"), + (0x1EE7, "V"), + (0x1EE8, "M", "ứ"), + (0x1EE9, "V"), + (0x1EEA, "M", "ừ"), + (0x1EEB, "V"), + (0x1EEC, "M", "ử"), + (0x1EED, "V"), + (0x1EEE, "M", "ữ"), + (0x1EEF, "V"), + (0x1EF0, "M", "ự"), + ] + + +def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EF1, "V"), + (0x1EF2, "M", "ỳ"), + (0x1EF3, "V"), + (0x1EF4, "M", "ỵ"), + (0x1EF5, "V"), + (0x1EF6, "M", "ỷ"), + (0x1EF7, "V"), + (0x1EF8, "M", "ỹ"), + (0x1EF9, "V"), + (0x1EFA, "M", "ỻ"), + (0x1EFB, "V"), + (0x1EFC, "M", "ỽ"), + (0x1EFD, "V"), + (0x1EFE, "M", "ỿ"), + (0x1EFF, "V"), + (0x1F08, "M", "ἀ"), + (0x1F09, "M", "ἁ"), + (0x1F0A, "M", "ἂ"), + (0x1F0B, "M", "ἃ"), + (0x1F0C, "M", "ἄ"), + (0x1F0D, "M", "ἅ"), + (0x1F0E, "M", "ἆ"), + (0x1F0F, "M", "ἇ"), + (0x1F10, "V"), + (0x1F16, "X"), + (0x1F18, "M", "ἐ"), + (0x1F19, "M", "ἑ"), + (0x1F1A, "M", "ἒ"), + (0x1F1B, "M", "ἓ"), + (0x1F1C, "M", "ἔ"), + (0x1F1D, "M", "ἕ"), + (0x1F1E, "X"), + (0x1F20, "V"), + (0x1F28, "M", "ἠ"), + (0x1F29, "M", "ἡ"), + (0x1F2A, "M", "ἢ"), + (0x1F2B, "M", "ἣ"), + (0x1F2C, "M", "ἤ"), + (0x1F2D, "M", "ἥ"), + (0x1F2E, "M", "ἦ"), + (0x1F2F, "M", "ἧ"), + (0x1F30, "V"), + (0x1F38, "M", "ἰ"), + (0x1F39, "M", "ἱ"), + (0x1F3A, "M", "ἲ"), + (0x1F3B, "M", "ἳ"), + (0x1F3C, "M", "ἴ"), + (0x1F3D, "M", "ἵ"), + (0x1F3E, "M", "ἶ"), + (0x1F3F, "M", "ἷ"), + (0x1F40, "V"), + (0x1F46, "X"), + (0x1F48, "M", "ὀ"), + (0x1F49, "M", "ὁ"), + (0x1F4A, "M", "ὂ"), + (0x1F4B, "M", "ὃ"), + (0x1F4C, "M", "ὄ"), + (0x1F4D, "M", "ὅ"), + (0x1F4E, "X"), + (0x1F50, "V"), + (0x1F58, "X"), + (0x1F59, "M", "ὑ"), + (0x1F5A, "X"), + (0x1F5B, "M", "ὓ"), + (0x1F5C, "X"), + (0x1F5D, "M", "ὕ"), + (0x1F5E, "X"), + (0x1F5F, "M", "ὗ"), + (0x1F60, "V"), + (0x1F68, "M", "ὠ"), + (0x1F69, "M", "ὡ"), + (0x1F6A, "M", "ὢ"), + (0x1F6B, "M", "ὣ"), + (0x1F6C, "M", "ὤ"), + (0x1F6D, "M", "ὥ"), + (0x1F6E, "M", "ὦ"), + (0x1F6F, "M", "ὧ"), + (0x1F70, "V"), + (0x1F71, "M", "ά"), + (0x1F72, "V"), + (0x1F73, "M", "έ"), + (0x1F74, "V"), + (0x1F75, "M", "ή"), + (0x1F76, "V"), + (0x1F77, "M", "ί"), + (0x1F78, "V"), + (0x1F79, "M", "ό"), + (0x1F7A, "V"), + (0x1F7B, "M", "ύ"), + (0x1F7C, "V"), + (0x1F7D, "M", "ώ"), + (0x1F7E, "X"), + (0x1F80, "M", "ἀι"), + (0x1F81, "M", "ἁι"), + (0x1F82, "M", "ἂι"), + (0x1F83, "M", "ἃι"), + (0x1F84, "M", "ἄι"), + (0x1F85, "M", "ἅι"), + (0x1F86, "M", "ἆι"), + (0x1F87, "M", "ἇι"), + ] + + +def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F88, "M", "ἀι"), + (0x1F89, "M", "ἁι"), + (0x1F8A, "M", "ἂι"), + (0x1F8B, "M", "ἃι"), + (0x1F8C, "M", "ἄι"), + (0x1F8D, "M", "ἅι"), + (0x1F8E, "M", "ἆι"), + (0x1F8F, "M", "ἇι"), + (0x1F90, "M", "ἠι"), + (0x1F91, "M", "ἡι"), + (0x1F92, "M", "ἢι"), + (0x1F93, "M", "ἣι"), + (0x1F94, "M", "ἤι"), + (0x1F95, "M", "ἥι"), + (0x1F96, "M", "ἦι"), + (0x1F97, "M", "ἧι"), + (0x1F98, "M", "ἠι"), + (0x1F99, "M", "ἡι"), + (0x1F9A, "M", "ἢι"), + (0x1F9B, "M", "ἣι"), + (0x1F9C, "M", "ἤι"), + (0x1F9D, "M", "ἥι"), + (0x1F9E, "M", "ἦι"), + (0x1F9F, "M", "ἧι"), + (0x1FA0, "M", "ὠι"), + (0x1FA1, "M", "ὡι"), + (0x1FA2, "M", "ὢι"), + (0x1FA3, "M", "ὣι"), + (0x1FA4, "M", "ὤι"), + (0x1FA5, "M", "ὥι"), + (0x1FA6, "M", "ὦι"), + (0x1FA7, "M", "ὧι"), + (0x1FA8, "M", "ὠι"), + (0x1FA9, "M", "ὡι"), + (0x1FAA, "M", "ὢι"), + (0x1FAB, "M", "ὣι"), + (0x1FAC, "M", "ὤι"), + (0x1FAD, "M", "ὥι"), + (0x1FAE, "M", "ὦι"), + (0x1FAF, "M", "ὧι"), + (0x1FB0, "V"), + (0x1FB2, "M", "ὰι"), + (0x1FB3, "M", "αι"), + (0x1FB4, "M", "άι"), + (0x1FB5, "X"), + (0x1FB6, "V"), + (0x1FB7, "M", "ᾶι"), + (0x1FB8, "M", "ᾰ"), + (0x1FB9, "M", "ᾱ"), + (0x1FBA, "M", "ὰ"), + (0x1FBB, "M", "ά"), + (0x1FBC, "M", "αι"), + (0x1FBD, "3", " ̓"), + (0x1FBE, "M", "ι"), + (0x1FBF, "3", " ̓"), + (0x1FC0, "3", " ͂"), + (0x1FC1, "3", " ̈͂"), + (0x1FC2, "M", "ὴι"), + (0x1FC3, "M", "ηι"), + (0x1FC4, "M", "ήι"), + (0x1FC5, "X"), + (0x1FC6, "V"), + (0x1FC7, "M", "ῆι"), + (0x1FC8, "M", "ὲ"), + (0x1FC9, "M", "έ"), + (0x1FCA, "M", "ὴ"), + (0x1FCB, "M", "ή"), + (0x1FCC, "M", "ηι"), + (0x1FCD, "3", " ̓̀"), + (0x1FCE, "3", " ̓́"), + (0x1FCF, "3", " ̓͂"), + (0x1FD0, "V"), + (0x1FD3, "M", "ΐ"), + (0x1FD4, "X"), + (0x1FD6, "V"), + (0x1FD8, "M", "ῐ"), + (0x1FD9, "M", "ῑ"), + (0x1FDA, "M", "ὶ"), + (0x1FDB, "M", "ί"), + (0x1FDC, "X"), + (0x1FDD, "3", " ̔̀"), + (0x1FDE, "3", " ̔́"), + (0x1FDF, "3", " ̔͂"), + (0x1FE0, "V"), + (0x1FE3, "M", "ΰ"), + (0x1FE4, "V"), + (0x1FE8, "M", "ῠ"), + (0x1FE9, "M", "ῡ"), + (0x1FEA, "M", "ὺ"), + (0x1FEB, "M", "ύ"), + (0x1FEC, "M", "ῥ"), + (0x1FED, "3", " ̈̀"), + (0x1FEE, "3", " ̈́"), + (0x1FEF, "3", "`"), + (0x1FF0, "X"), + (0x1FF2, "M", "ὼι"), + (0x1FF3, "M", "ωι"), + (0x1FF4, "M", "ώι"), + (0x1FF5, "X"), + (0x1FF6, "V"), + ] + + +def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1FF7, "M", "ῶι"), + (0x1FF8, "M", "ὸ"), + (0x1FF9, "M", "ό"), + (0x1FFA, "M", "ὼ"), + (0x1FFB, "M", "ώ"), + (0x1FFC, "M", "ωι"), + (0x1FFD, "3", " ́"), + (0x1FFE, "3", " ̔"), + (0x1FFF, "X"), + (0x2000, "3", " "), + (0x200B, "I"), + (0x200C, "D", ""), + (0x200E, "X"), + (0x2010, "V"), + (0x2011, "M", "‐"), + (0x2012, "V"), + (0x2017, "3", " ̳"), + (0x2018, "V"), + (0x2024, "X"), + (0x2027, "V"), + (0x2028, "X"), + (0x202F, "3", " "), + (0x2030, "V"), + (0x2033, "M", "′′"), + (0x2034, "M", "′′′"), + (0x2035, "V"), + (0x2036, "M", "‵‵"), + (0x2037, "M", "‵‵‵"), + (0x2038, "V"), + (0x203C, "3", "!!"), + (0x203D, "V"), + (0x203E, "3", " ̅"), + (0x203F, "V"), + (0x2047, "3", "??"), + (0x2048, "3", "?!"), + (0x2049, "3", "!?"), + (0x204A, "V"), + (0x2057, "M", "′′′′"), + (0x2058, "V"), + (0x205F, "3", " "), + (0x2060, "I"), + (0x2061, "X"), + (0x2064, "I"), + (0x2065, "X"), + (0x2070, "M", "0"), + (0x2071, "M", "i"), + (0x2072, "X"), + (0x2074, "M", "4"), + (0x2075, "M", "5"), + (0x2076, "M", "6"), + (0x2077, "M", "7"), + (0x2078, "M", "8"), + (0x2079, "M", "9"), + (0x207A, "3", "+"), + (0x207B, "M", "−"), + (0x207C, "3", "="), + (0x207D, "3", "("), + (0x207E, "3", ")"), + (0x207F, "M", "n"), + (0x2080, "M", "0"), + (0x2081, "M", "1"), + (0x2082, "M", "2"), + (0x2083, "M", "3"), + (0x2084, "M", "4"), + (0x2085, "M", "5"), + (0x2086, "M", "6"), + (0x2087, "M", "7"), + (0x2088, "M", "8"), + (0x2089, "M", "9"), + (0x208A, "3", "+"), + (0x208B, "M", "−"), + (0x208C, "3", "="), + (0x208D, "3", "("), + (0x208E, "3", ")"), + (0x208F, "X"), + (0x2090, "M", "a"), + (0x2091, "M", "e"), + (0x2092, "M", "o"), + (0x2093, "M", "x"), + (0x2094, "M", "ə"), + (0x2095, "M", "h"), + (0x2096, "M", "k"), + (0x2097, "M", "l"), + (0x2098, "M", "m"), + (0x2099, "M", "n"), + (0x209A, "M", "p"), + (0x209B, "M", "s"), + (0x209C, "M", "t"), + (0x209D, "X"), + (0x20A0, "V"), + (0x20A8, "M", "rs"), + (0x20A9, "V"), + (0x20C1, "X"), + (0x20D0, "V"), + (0x20F1, "X"), + (0x2100, "3", "a/c"), + (0x2101, "3", "a/s"), + (0x2102, "M", "c"), + (0x2103, "M", "°c"), + (0x2104, "V"), + ] + + +def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2105, "3", "c/o"), + (0x2106, "3", "c/u"), + (0x2107, "M", "ɛ"), + (0x2108, "V"), + (0x2109, "M", "°f"), + (0x210A, "M", "g"), + (0x210B, "M", "h"), + (0x210F, "M", "ħ"), + (0x2110, "M", "i"), + (0x2112, "M", "l"), + (0x2114, "V"), + (0x2115, "M", "n"), + (0x2116, "M", "no"), + (0x2117, "V"), + (0x2119, "M", "p"), + (0x211A, "M", "q"), + (0x211B, "M", "r"), + (0x211E, "V"), + (0x2120, "M", "sm"), + (0x2121, "M", "tel"), + (0x2122, "M", "tm"), + (0x2123, "V"), + (0x2124, "M", "z"), + (0x2125, "V"), + (0x2126, "M", "ω"), + (0x2127, "V"), + (0x2128, "M", "z"), + (0x2129, "V"), + (0x212A, "M", "k"), + (0x212B, "M", "å"), + (0x212C, "M", "b"), + (0x212D, "M", "c"), + (0x212E, "V"), + (0x212F, "M", "e"), + (0x2131, "M", "f"), + (0x2132, "X"), + (0x2133, "M", "m"), + (0x2134, "M", "o"), + (0x2135, "M", "א"), + (0x2136, "M", "ב"), + (0x2137, "M", "ג"), + (0x2138, "M", "ד"), + (0x2139, "M", "i"), + (0x213A, "V"), + (0x213B, "M", "fax"), + (0x213C, "M", "π"), + (0x213D, "M", "γ"), + (0x213F, "M", "π"), + (0x2140, "M", "∑"), + (0x2141, "V"), + (0x2145, "M", "d"), + (0x2147, "M", "e"), + (0x2148, "M", "i"), + (0x2149, "M", "j"), + (0x214A, "V"), + (0x2150, "M", "1⁄7"), + (0x2151, "M", "1⁄9"), + (0x2152, "M", "1⁄10"), + (0x2153, "M", "1⁄3"), + (0x2154, "M", "2⁄3"), + (0x2155, "M", "1⁄5"), + (0x2156, "M", "2⁄5"), + (0x2157, "M", "3⁄5"), + (0x2158, "M", "4⁄5"), + (0x2159, "M", "1⁄6"), + (0x215A, "M", "5⁄6"), + (0x215B, "M", "1⁄8"), + (0x215C, "M", "3⁄8"), + (0x215D, "M", "5⁄8"), + (0x215E, "M", "7⁄8"), + (0x215F, "M", "1⁄"), + (0x2160, "M", "i"), + (0x2161, "M", "ii"), + (0x2162, "M", "iii"), + (0x2163, "M", "iv"), + (0x2164, "M", "v"), + (0x2165, "M", "vi"), + (0x2166, "M", "vii"), + (0x2167, "M", "viii"), + (0x2168, "M", "ix"), + (0x2169, "M", "x"), + (0x216A, "M", "xi"), + (0x216B, "M", "xii"), + (0x216C, "M", "l"), + (0x216D, "M", "c"), + (0x216E, "M", "d"), + (0x216F, "M", "m"), + (0x2170, "M", "i"), + (0x2171, "M", "ii"), + (0x2172, "M", "iii"), + (0x2173, "M", "iv"), + (0x2174, "M", "v"), + (0x2175, "M", "vi"), + (0x2176, "M", "vii"), + (0x2177, "M", "viii"), + (0x2178, "M", "ix"), + (0x2179, "M", "x"), + (0x217A, "M", "xi"), + (0x217B, "M", "xii"), + (0x217C, "M", "l"), + ] + + +def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x217D, "M", "c"), + (0x217E, "M", "d"), + (0x217F, "M", "m"), + (0x2180, "V"), + (0x2183, "X"), + (0x2184, "V"), + (0x2189, "M", "0⁄3"), + (0x218A, "V"), + (0x218C, "X"), + (0x2190, "V"), + (0x222C, "M", "∫∫"), + (0x222D, "M", "∫∫∫"), + (0x222E, "V"), + (0x222F, "M", "∮∮"), + (0x2230, "M", "∮∮∮"), + (0x2231, "V"), + (0x2329, "M", "〈"), + (0x232A, "M", "〉"), + (0x232B, "V"), + (0x2427, "X"), + (0x2440, "V"), + (0x244B, "X"), + (0x2460, "M", "1"), + (0x2461, "M", "2"), + (0x2462, "M", "3"), + (0x2463, "M", "4"), + (0x2464, "M", "5"), + (0x2465, "M", "6"), + (0x2466, "M", "7"), + (0x2467, "M", "8"), + (0x2468, "M", "9"), + (0x2469, "M", "10"), + (0x246A, "M", "11"), + (0x246B, "M", "12"), + (0x246C, "M", "13"), + (0x246D, "M", "14"), + (0x246E, "M", "15"), + (0x246F, "M", "16"), + (0x2470, "M", "17"), + (0x2471, "M", "18"), + (0x2472, "M", "19"), + (0x2473, "M", "20"), + (0x2474, "3", "(1)"), + (0x2475, "3", "(2)"), + (0x2476, "3", "(3)"), + (0x2477, "3", "(4)"), + (0x2478, "3", "(5)"), + (0x2479, "3", "(6)"), + (0x247A, "3", "(7)"), + (0x247B, "3", "(8)"), + (0x247C, "3", "(9)"), + (0x247D, "3", "(10)"), + (0x247E, "3", "(11)"), + (0x247F, "3", "(12)"), + (0x2480, "3", "(13)"), + (0x2481, "3", "(14)"), + (0x2482, "3", "(15)"), + (0x2483, "3", "(16)"), + (0x2484, "3", "(17)"), + (0x2485, "3", "(18)"), + (0x2486, "3", "(19)"), + (0x2487, "3", "(20)"), + (0x2488, "X"), + (0x249C, "3", "(a)"), + (0x249D, "3", "(b)"), + (0x249E, "3", "(c)"), + (0x249F, "3", "(d)"), + (0x24A0, "3", "(e)"), + (0x24A1, "3", "(f)"), + (0x24A2, "3", "(g)"), + (0x24A3, "3", "(h)"), + (0x24A4, "3", "(i)"), + (0x24A5, "3", "(j)"), + (0x24A6, "3", "(k)"), + (0x24A7, "3", "(l)"), + (0x24A8, "3", "(m)"), + (0x24A9, "3", "(n)"), + (0x24AA, "3", "(o)"), + (0x24AB, "3", "(p)"), + (0x24AC, "3", "(q)"), + (0x24AD, "3", "(r)"), + (0x24AE, "3", "(s)"), + (0x24AF, "3", "(t)"), + (0x24B0, "3", "(u)"), + (0x24B1, "3", "(v)"), + (0x24B2, "3", "(w)"), + (0x24B3, "3", "(x)"), + (0x24B4, "3", "(y)"), + (0x24B5, "3", "(z)"), + (0x24B6, "M", "a"), + (0x24B7, "M", "b"), + (0x24B8, "M", "c"), + (0x24B9, "M", "d"), + (0x24BA, "M", "e"), + (0x24BB, "M", "f"), + (0x24BC, "M", "g"), + (0x24BD, "M", "h"), + (0x24BE, "M", "i"), + (0x24BF, "M", "j"), + (0x24C0, "M", "k"), + ] + + +def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x24C1, "M", "l"), + (0x24C2, "M", "m"), + (0x24C3, "M", "n"), + (0x24C4, "M", "o"), + (0x24C5, "M", "p"), + (0x24C6, "M", "q"), + (0x24C7, "M", "r"), + (0x24C8, "M", "s"), + (0x24C9, "M", "t"), + (0x24CA, "M", "u"), + (0x24CB, "M", "v"), + (0x24CC, "M", "w"), + (0x24CD, "M", "x"), + (0x24CE, "M", "y"), + (0x24CF, "M", "z"), + (0x24D0, "M", "a"), + (0x24D1, "M", "b"), + (0x24D2, "M", "c"), + (0x24D3, "M", "d"), + (0x24D4, "M", "e"), + (0x24D5, "M", "f"), + (0x24D6, "M", "g"), + (0x24D7, "M", "h"), + (0x24D8, "M", "i"), + (0x24D9, "M", "j"), + (0x24DA, "M", "k"), + (0x24DB, "M", "l"), + (0x24DC, "M", "m"), + (0x24DD, "M", "n"), + (0x24DE, "M", "o"), + (0x24DF, "M", "p"), + (0x24E0, "M", "q"), + (0x24E1, "M", "r"), + (0x24E2, "M", "s"), + (0x24E3, "M", "t"), + (0x24E4, "M", "u"), + (0x24E5, "M", "v"), + (0x24E6, "M", "w"), + (0x24E7, "M", "x"), + (0x24E8, "M", "y"), + (0x24E9, "M", "z"), + (0x24EA, "M", "0"), + (0x24EB, "V"), + (0x2A0C, "M", "∫∫∫∫"), + (0x2A0D, "V"), + (0x2A74, "3", "::="), + (0x2A75, "3", "=="), + (0x2A76, "3", "==="), + (0x2A77, "V"), + (0x2ADC, "M", "⫝̸"), + (0x2ADD, "V"), + (0x2B74, "X"), + (0x2B76, "V"), + (0x2B96, "X"), + (0x2B97, "V"), + (0x2C00, "M", "ⰰ"), + (0x2C01, "M", "ⰱ"), + (0x2C02, "M", "ⰲ"), + (0x2C03, "M", "ⰳ"), + (0x2C04, "M", "ⰴ"), + (0x2C05, "M", "ⰵ"), + (0x2C06, "M", "ⰶ"), + (0x2C07, "M", "ⰷ"), + (0x2C08, "M", "ⰸ"), + (0x2C09, "M", "ⰹ"), + (0x2C0A, "M", "ⰺ"), + (0x2C0B, "M", "ⰻ"), + (0x2C0C, "M", "ⰼ"), + (0x2C0D, "M", "ⰽ"), + (0x2C0E, "M", "ⰾ"), + (0x2C0F, "M", "ⰿ"), + (0x2C10, "M", "ⱀ"), + (0x2C11, "M", "ⱁ"), + (0x2C12, "M", "ⱂ"), + (0x2C13, "M", "ⱃ"), + (0x2C14, "M", "ⱄ"), + (0x2C15, "M", "ⱅ"), + (0x2C16, "M", "ⱆ"), + (0x2C17, "M", "ⱇ"), + (0x2C18, "M", "ⱈ"), + (0x2C19, "M", "ⱉ"), + (0x2C1A, "M", "ⱊ"), + (0x2C1B, "M", "ⱋ"), + (0x2C1C, "M", "ⱌ"), + (0x2C1D, "M", "ⱍ"), + (0x2C1E, "M", "ⱎ"), + (0x2C1F, "M", "ⱏ"), + (0x2C20, "M", "ⱐ"), + (0x2C21, "M", "ⱑ"), + (0x2C22, "M", "ⱒ"), + (0x2C23, "M", "ⱓ"), + (0x2C24, "M", "ⱔ"), + (0x2C25, "M", "ⱕ"), + (0x2C26, "M", "ⱖ"), + (0x2C27, "M", "ⱗ"), + (0x2C28, "M", "ⱘ"), + (0x2C29, "M", "ⱙ"), + (0x2C2A, "M", "ⱚ"), + (0x2C2B, "M", "ⱛ"), + (0x2C2C, "M", "ⱜ"), + ] + + +def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2C2D, "M", "ⱝ"), + (0x2C2E, "M", "ⱞ"), + (0x2C2F, "M", "ⱟ"), + (0x2C30, "V"), + (0x2C60, "M", "ⱡ"), + (0x2C61, "V"), + (0x2C62, "M", "ɫ"), + (0x2C63, "M", "ᵽ"), + (0x2C64, "M", "ɽ"), + (0x2C65, "V"), + (0x2C67, "M", "ⱨ"), + (0x2C68, "V"), + (0x2C69, "M", "ⱪ"), + (0x2C6A, "V"), + (0x2C6B, "M", "ⱬ"), + (0x2C6C, "V"), + (0x2C6D, "M", "ɑ"), + (0x2C6E, "M", "ɱ"), + (0x2C6F, "M", "ɐ"), + (0x2C70, "M", "ɒ"), + (0x2C71, "V"), + (0x2C72, "M", "ⱳ"), + (0x2C73, "V"), + (0x2C75, "M", "ⱶ"), + (0x2C76, "V"), + (0x2C7C, "M", "j"), + (0x2C7D, "M", "v"), + (0x2C7E, "M", "ȿ"), + (0x2C7F, "M", "ɀ"), + (0x2C80, "M", "ⲁ"), + (0x2C81, "V"), + (0x2C82, "M", "ⲃ"), + (0x2C83, "V"), + (0x2C84, "M", "ⲅ"), + (0x2C85, "V"), + (0x2C86, "M", "ⲇ"), + (0x2C87, "V"), + (0x2C88, "M", "ⲉ"), + (0x2C89, "V"), + (0x2C8A, "M", "ⲋ"), + (0x2C8B, "V"), + (0x2C8C, "M", "ⲍ"), + (0x2C8D, "V"), + (0x2C8E, "M", "ⲏ"), + (0x2C8F, "V"), + (0x2C90, "M", "ⲑ"), + (0x2C91, "V"), + (0x2C92, "M", "ⲓ"), + (0x2C93, "V"), + (0x2C94, "M", "ⲕ"), + (0x2C95, "V"), + (0x2C96, "M", "ⲗ"), + (0x2C97, "V"), + (0x2C98, "M", "ⲙ"), + (0x2C99, "V"), + (0x2C9A, "M", "ⲛ"), + (0x2C9B, "V"), + (0x2C9C, "M", "ⲝ"), + (0x2C9D, "V"), + (0x2C9E, "M", "ⲟ"), + (0x2C9F, "V"), + (0x2CA0, "M", "ⲡ"), + (0x2CA1, "V"), + (0x2CA2, "M", "ⲣ"), + (0x2CA3, "V"), + (0x2CA4, "M", "ⲥ"), + (0x2CA5, "V"), + (0x2CA6, "M", "ⲧ"), + (0x2CA7, "V"), + (0x2CA8, "M", "ⲩ"), + (0x2CA9, "V"), + (0x2CAA, "M", "ⲫ"), + (0x2CAB, "V"), + (0x2CAC, "M", "ⲭ"), + (0x2CAD, "V"), + (0x2CAE, "M", "ⲯ"), + (0x2CAF, "V"), + (0x2CB0, "M", "ⲱ"), + (0x2CB1, "V"), + (0x2CB2, "M", "ⲳ"), + (0x2CB3, "V"), + (0x2CB4, "M", "ⲵ"), + (0x2CB5, "V"), + (0x2CB6, "M", "ⲷ"), + (0x2CB7, "V"), + (0x2CB8, "M", "ⲹ"), + (0x2CB9, "V"), + (0x2CBA, "M", "ⲻ"), + (0x2CBB, "V"), + (0x2CBC, "M", "ⲽ"), + (0x2CBD, "V"), + (0x2CBE, "M", "ⲿ"), + (0x2CBF, "V"), + (0x2CC0, "M", "ⳁ"), + (0x2CC1, "V"), + (0x2CC2, "M", "ⳃ"), + (0x2CC3, "V"), + (0x2CC4, "M", "ⳅ"), + (0x2CC5, "V"), + (0x2CC6, "M", "ⳇ"), + ] + + +def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2CC7, "V"), + (0x2CC8, "M", "ⳉ"), + (0x2CC9, "V"), + (0x2CCA, "M", "ⳋ"), + (0x2CCB, "V"), + (0x2CCC, "M", "ⳍ"), + (0x2CCD, "V"), + (0x2CCE, "M", "ⳏ"), + (0x2CCF, "V"), + (0x2CD0, "M", "ⳑ"), + (0x2CD1, "V"), + (0x2CD2, "M", "ⳓ"), + (0x2CD3, "V"), + (0x2CD4, "M", "ⳕ"), + (0x2CD5, "V"), + (0x2CD6, "M", "ⳗ"), + (0x2CD7, "V"), + (0x2CD8, "M", "ⳙ"), + (0x2CD9, "V"), + (0x2CDA, "M", "ⳛ"), + (0x2CDB, "V"), + (0x2CDC, "M", "ⳝ"), + (0x2CDD, "V"), + (0x2CDE, "M", "ⳟ"), + (0x2CDF, "V"), + (0x2CE0, "M", "ⳡ"), + (0x2CE1, "V"), + (0x2CE2, "M", "ⳣ"), + (0x2CE3, "V"), + (0x2CEB, "M", "ⳬ"), + (0x2CEC, "V"), + (0x2CED, "M", "ⳮ"), + (0x2CEE, "V"), + (0x2CF2, "M", "ⳳ"), + (0x2CF3, "V"), + (0x2CF4, "X"), + (0x2CF9, "V"), + (0x2D26, "X"), + (0x2D27, "V"), + (0x2D28, "X"), + (0x2D2D, "V"), + (0x2D2E, "X"), + (0x2D30, "V"), + (0x2D68, "X"), + (0x2D6F, "M", "ⵡ"), + (0x2D70, "V"), + (0x2D71, "X"), + (0x2D7F, "V"), + (0x2D97, "X"), + (0x2DA0, "V"), + (0x2DA7, "X"), + (0x2DA8, "V"), + (0x2DAF, "X"), + (0x2DB0, "V"), + (0x2DB7, "X"), + (0x2DB8, "V"), + (0x2DBF, "X"), + (0x2DC0, "V"), + (0x2DC7, "X"), + (0x2DC8, "V"), + (0x2DCF, "X"), + (0x2DD0, "V"), + (0x2DD7, "X"), + (0x2DD8, "V"), + (0x2DDF, "X"), + (0x2DE0, "V"), + (0x2E5E, "X"), + (0x2E80, "V"), + (0x2E9A, "X"), + (0x2E9B, "V"), + (0x2E9F, "M", "母"), + (0x2EA0, "V"), + (0x2EF3, "M", "龟"), + (0x2EF4, "X"), + (0x2F00, "M", "一"), + (0x2F01, "M", "丨"), + (0x2F02, "M", "丶"), + (0x2F03, "M", "丿"), + (0x2F04, "M", "乙"), + (0x2F05, "M", "亅"), + (0x2F06, "M", "二"), + (0x2F07, "M", "亠"), + (0x2F08, "M", "人"), + (0x2F09, "M", "儿"), + (0x2F0A, "M", "入"), + (0x2F0B, "M", "八"), + (0x2F0C, "M", "冂"), + (0x2F0D, "M", "冖"), + (0x2F0E, "M", "冫"), + (0x2F0F, "M", "几"), + (0x2F10, "M", "凵"), + (0x2F11, "M", "刀"), + (0x2F12, "M", "力"), + (0x2F13, "M", "勹"), + (0x2F14, "M", "匕"), + (0x2F15, "M", "匚"), + (0x2F16, "M", "匸"), + (0x2F17, "M", "十"), + (0x2F18, "M", "卜"), + (0x2F19, "M", "卩"), + ] + + +def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F1A, "M", "厂"), + (0x2F1B, "M", "厶"), + (0x2F1C, "M", "又"), + (0x2F1D, "M", "口"), + (0x2F1E, "M", "囗"), + (0x2F1F, "M", "土"), + (0x2F20, "M", "士"), + (0x2F21, "M", "夂"), + (0x2F22, "M", "夊"), + (0x2F23, "M", "夕"), + (0x2F24, "M", "大"), + (0x2F25, "M", "女"), + (0x2F26, "M", "子"), + (0x2F27, "M", "宀"), + (0x2F28, "M", "寸"), + (0x2F29, "M", "小"), + (0x2F2A, "M", "尢"), + (0x2F2B, "M", "尸"), + (0x2F2C, "M", "屮"), + (0x2F2D, "M", "山"), + (0x2F2E, "M", "巛"), + (0x2F2F, "M", "工"), + (0x2F30, "M", "己"), + (0x2F31, "M", "巾"), + (0x2F32, "M", "干"), + (0x2F33, "M", "幺"), + (0x2F34, "M", "广"), + (0x2F35, "M", "廴"), + (0x2F36, "M", "廾"), + (0x2F37, "M", "弋"), + (0x2F38, "M", "弓"), + (0x2F39, "M", "彐"), + (0x2F3A, "M", "彡"), + (0x2F3B, "M", "彳"), + (0x2F3C, "M", "心"), + (0x2F3D, "M", "戈"), + (0x2F3E, "M", "戶"), + (0x2F3F, "M", "手"), + (0x2F40, "M", "支"), + (0x2F41, "M", "攴"), + (0x2F42, "M", "文"), + (0x2F43, "M", "斗"), + (0x2F44, "M", "斤"), + (0x2F45, "M", "方"), + (0x2F46, "M", "无"), + (0x2F47, "M", "日"), + (0x2F48, "M", "曰"), + (0x2F49, "M", "月"), + (0x2F4A, "M", "木"), + (0x2F4B, "M", "欠"), + (0x2F4C, "M", "止"), + (0x2F4D, "M", "歹"), + (0x2F4E, "M", "殳"), + (0x2F4F, "M", "毋"), + (0x2F50, "M", "比"), + (0x2F51, "M", "毛"), + (0x2F52, "M", "氏"), + (0x2F53, "M", "气"), + (0x2F54, "M", "水"), + (0x2F55, "M", "火"), + (0x2F56, "M", "爪"), + (0x2F57, "M", "父"), + (0x2F58, "M", "爻"), + (0x2F59, "M", "爿"), + (0x2F5A, "M", "片"), + (0x2F5B, "M", "牙"), + (0x2F5C, "M", "牛"), + (0x2F5D, "M", "犬"), + (0x2F5E, "M", "玄"), + (0x2F5F, "M", "玉"), + (0x2F60, "M", "瓜"), + (0x2F61, "M", "瓦"), + (0x2F62, "M", "甘"), + (0x2F63, "M", "生"), + (0x2F64, "M", "用"), + (0x2F65, "M", "田"), + (0x2F66, "M", "疋"), + (0x2F67, "M", "疒"), + (0x2F68, "M", "癶"), + (0x2F69, "M", "白"), + (0x2F6A, "M", "皮"), + (0x2F6B, "M", "皿"), + (0x2F6C, "M", "目"), + (0x2F6D, "M", "矛"), + (0x2F6E, "M", "矢"), + (0x2F6F, "M", "石"), + (0x2F70, "M", "示"), + (0x2F71, "M", "禸"), + (0x2F72, "M", "禾"), + (0x2F73, "M", "穴"), + (0x2F74, "M", "立"), + (0x2F75, "M", "竹"), + (0x2F76, "M", "米"), + (0x2F77, "M", "糸"), + (0x2F78, "M", "缶"), + (0x2F79, "M", "网"), + (0x2F7A, "M", "羊"), + (0x2F7B, "M", "羽"), + (0x2F7C, "M", "老"), + (0x2F7D, "M", "而"), + ] + + +def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F7E, "M", "耒"), + (0x2F7F, "M", "耳"), + (0x2F80, "M", "聿"), + (0x2F81, "M", "肉"), + (0x2F82, "M", "臣"), + (0x2F83, "M", "自"), + (0x2F84, "M", "至"), + (0x2F85, "M", "臼"), + (0x2F86, "M", "舌"), + (0x2F87, "M", "舛"), + (0x2F88, "M", "舟"), + (0x2F89, "M", "艮"), + (0x2F8A, "M", "色"), + (0x2F8B, "M", "艸"), + (0x2F8C, "M", "虍"), + (0x2F8D, "M", "虫"), + (0x2F8E, "M", "血"), + (0x2F8F, "M", "行"), + (0x2F90, "M", "衣"), + (0x2F91, "M", "襾"), + (0x2F92, "M", "見"), + (0x2F93, "M", "角"), + (0x2F94, "M", "言"), + (0x2F95, "M", "谷"), + (0x2F96, "M", "豆"), + (0x2F97, "M", "豕"), + (0x2F98, "M", "豸"), + (0x2F99, "M", "貝"), + (0x2F9A, "M", "赤"), + (0x2F9B, "M", "走"), + (0x2F9C, "M", "足"), + (0x2F9D, "M", "身"), + (0x2F9E, "M", "車"), + (0x2F9F, "M", "辛"), + (0x2FA0, "M", "辰"), + (0x2FA1, "M", "辵"), + (0x2FA2, "M", "邑"), + (0x2FA3, "M", "酉"), + (0x2FA4, "M", "釆"), + (0x2FA5, "M", "里"), + (0x2FA6, "M", "金"), + (0x2FA7, "M", "長"), + (0x2FA8, "M", "門"), + (0x2FA9, "M", "阜"), + (0x2FAA, "M", "隶"), + (0x2FAB, "M", "隹"), + (0x2FAC, "M", "雨"), + (0x2FAD, "M", "靑"), + (0x2FAE, "M", "非"), + (0x2FAF, "M", "面"), + (0x2FB0, "M", "革"), + (0x2FB1, "M", "韋"), + (0x2FB2, "M", "韭"), + (0x2FB3, "M", "音"), + (0x2FB4, "M", "頁"), + (0x2FB5, "M", "風"), + (0x2FB6, "M", "飛"), + (0x2FB7, "M", "食"), + (0x2FB8, "M", "首"), + (0x2FB9, "M", "香"), + (0x2FBA, "M", "馬"), + (0x2FBB, "M", "骨"), + (0x2FBC, "M", "高"), + (0x2FBD, "M", "髟"), + (0x2FBE, "M", "鬥"), + (0x2FBF, "M", "鬯"), + (0x2FC0, "M", "鬲"), + (0x2FC1, "M", "鬼"), + (0x2FC2, "M", "魚"), + (0x2FC3, "M", "鳥"), + (0x2FC4, "M", "鹵"), + (0x2FC5, "M", "鹿"), + (0x2FC6, "M", "麥"), + (0x2FC7, "M", "麻"), + (0x2FC8, "M", "黃"), + (0x2FC9, "M", "黍"), + (0x2FCA, "M", "黑"), + (0x2FCB, "M", "黹"), + (0x2FCC, "M", "黽"), + (0x2FCD, "M", "鼎"), + (0x2FCE, "M", "鼓"), + (0x2FCF, "M", "鼠"), + (0x2FD0, "M", "鼻"), + (0x2FD1, "M", "齊"), + (0x2FD2, "M", "齒"), + (0x2FD3, "M", "龍"), + (0x2FD4, "M", "龜"), + (0x2FD5, "M", "龠"), + (0x2FD6, "X"), + (0x3000, "3", " "), + (0x3001, "V"), + (0x3002, "M", "."), + (0x3003, "V"), + (0x3036, "M", "〒"), + (0x3037, "V"), + (0x3038, "M", "十"), + (0x3039, "M", "卄"), + (0x303A, "M", "卅"), + (0x303B, "V"), + (0x3040, "X"), + ] + + +def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3041, "V"), + (0x3097, "X"), + (0x3099, "V"), + (0x309B, "3", " ゙"), + (0x309C, "3", " ゚"), + (0x309D, "V"), + (0x309F, "M", "より"), + (0x30A0, "V"), + (0x30FF, "M", "コト"), + (0x3100, "X"), + (0x3105, "V"), + (0x3130, "X"), + (0x3131, "M", "ᄀ"), + (0x3132, "M", "ᄁ"), + (0x3133, "M", "ᆪ"), + (0x3134, "M", "ᄂ"), + (0x3135, "M", "ᆬ"), + (0x3136, "M", "ᆭ"), + (0x3137, "M", "ᄃ"), + (0x3138, "M", "ᄄ"), + (0x3139, "M", "ᄅ"), + (0x313A, "M", "ᆰ"), + (0x313B, "M", "ᆱ"), + (0x313C, "M", "ᆲ"), + (0x313D, "M", "ᆳ"), + (0x313E, "M", "ᆴ"), + (0x313F, "M", "ᆵ"), + (0x3140, "M", "ᄚ"), + (0x3141, "M", "ᄆ"), + (0x3142, "M", "ᄇ"), + (0x3143, "M", "ᄈ"), + (0x3144, "M", "ᄡ"), + (0x3145, "M", "ᄉ"), + (0x3146, "M", "ᄊ"), + (0x3147, "M", "ᄋ"), + (0x3148, "M", "ᄌ"), + (0x3149, "M", "ᄍ"), + (0x314A, "M", "ᄎ"), + (0x314B, "M", "ᄏ"), + (0x314C, "M", "ᄐ"), + (0x314D, "M", "ᄑ"), + (0x314E, "M", "ᄒ"), + (0x314F, "M", "ᅡ"), + (0x3150, "M", "ᅢ"), + (0x3151, "M", "ᅣ"), + (0x3152, "M", "ᅤ"), + (0x3153, "M", "ᅥ"), + (0x3154, "M", "ᅦ"), + (0x3155, "M", "ᅧ"), + (0x3156, "M", "ᅨ"), + (0x3157, "M", "ᅩ"), + (0x3158, "M", "ᅪ"), + (0x3159, "M", "ᅫ"), + (0x315A, "M", "ᅬ"), + (0x315B, "M", "ᅭ"), + (0x315C, "M", "ᅮ"), + (0x315D, "M", "ᅯ"), + (0x315E, "M", "ᅰ"), + (0x315F, "M", "ᅱ"), + (0x3160, "M", "ᅲ"), + (0x3161, "M", "ᅳ"), + (0x3162, "M", "ᅴ"), + (0x3163, "M", "ᅵ"), + (0x3164, "X"), + (0x3165, "M", "ᄔ"), + (0x3166, "M", "ᄕ"), + (0x3167, "M", "ᇇ"), + (0x3168, "M", "ᇈ"), + (0x3169, "M", "ᇌ"), + (0x316A, "M", "ᇎ"), + (0x316B, "M", "ᇓ"), + (0x316C, "M", "ᇗ"), + (0x316D, "M", "ᇙ"), + (0x316E, "M", "ᄜ"), + (0x316F, "M", "ᇝ"), + (0x3170, "M", "ᇟ"), + (0x3171, "M", "ᄝ"), + (0x3172, "M", "ᄞ"), + (0x3173, "M", "ᄠ"), + (0x3174, "M", "ᄢ"), + (0x3175, "M", "ᄣ"), + (0x3176, "M", "ᄧ"), + (0x3177, "M", "ᄩ"), + (0x3178, "M", "ᄫ"), + (0x3179, "M", "ᄬ"), + (0x317A, "M", "ᄭ"), + (0x317B, "M", "ᄮ"), + (0x317C, "M", "ᄯ"), + (0x317D, "M", "ᄲ"), + (0x317E, "M", "ᄶ"), + (0x317F, "M", "ᅀ"), + (0x3180, "M", "ᅇ"), + (0x3181, "M", "ᅌ"), + (0x3182, "M", "ᇱ"), + (0x3183, "M", "ᇲ"), + (0x3184, "M", "ᅗ"), + (0x3185, "M", "ᅘ"), + (0x3186, "M", "ᅙ"), + (0x3187, "M", "ᆄ"), + (0x3188, "M", "ᆅ"), + ] + + +def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3189, "M", "ᆈ"), + (0x318A, "M", "ᆑ"), + (0x318B, "M", "ᆒ"), + (0x318C, "M", "ᆔ"), + (0x318D, "M", "ᆞ"), + (0x318E, "M", "ᆡ"), + (0x318F, "X"), + (0x3190, "V"), + (0x3192, "M", "一"), + (0x3193, "M", "二"), + (0x3194, "M", "三"), + (0x3195, "M", "四"), + (0x3196, "M", "上"), + (0x3197, "M", "中"), + (0x3198, "M", "下"), + (0x3199, "M", "甲"), + (0x319A, "M", "乙"), + (0x319B, "M", "丙"), + (0x319C, "M", "丁"), + (0x319D, "M", "天"), + (0x319E, "M", "地"), + (0x319F, "M", "人"), + (0x31A0, "V"), + (0x31E4, "X"), + (0x31F0, "V"), + (0x3200, "3", "(ᄀ)"), + (0x3201, "3", "(ᄂ)"), + (0x3202, "3", "(ᄃ)"), + (0x3203, "3", "(ᄅ)"), + (0x3204, "3", "(ᄆ)"), + (0x3205, "3", "(ᄇ)"), + (0x3206, "3", "(ᄉ)"), + (0x3207, "3", "(ᄋ)"), + (0x3208, "3", "(ᄌ)"), + (0x3209, "3", "(ᄎ)"), + (0x320A, "3", "(ᄏ)"), + (0x320B, "3", "(ᄐ)"), + (0x320C, "3", "(ᄑ)"), + (0x320D, "3", "(ᄒ)"), + (0x320E, "3", "(가)"), + (0x320F, "3", "(나)"), + (0x3210, "3", "(다)"), + (0x3211, "3", "(라)"), + (0x3212, "3", "(마)"), + (0x3213, "3", "(바)"), + (0x3214, "3", "(사)"), + (0x3215, "3", "(아)"), + (0x3216, "3", "(자)"), + (0x3217, "3", "(차)"), + (0x3218, "3", "(카)"), + (0x3219, "3", "(타)"), + (0x321A, "3", "(파)"), + (0x321B, "3", "(하)"), + (0x321C, "3", "(주)"), + (0x321D, "3", "(오전)"), + (0x321E, "3", "(오후)"), + (0x321F, "X"), + (0x3220, "3", "(一)"), + (0x3221, "3", "(二)"), + (0x3222, "3", "(三)"), + (0x3223, "3", "(四)"), + (0x3224, "3", "(五)"), + (0x3225, "3", "(六)"), + (0x3226, "3", "(七)"), + (0x3227, "3", "(八)"), + (0x3228, "3", "(九)"), + (0x3229, "3", "(十)"), + (0x322A, "3", "(月)"), + (0x322B, "3", "(火)"), + (0x322C, "3", "(水)"), + (0x322D, "3", "(木)"), + (0x322E, "3", "(金)"), + (0x322F, "3", "(土)"), + (0x3230, "3", "(日)"), + (0x3231, "3", "(株)"), + (0x3232, "3", "(有)"), + (0x3233, "3", "(社)"), + (0x3234, "3", "(名)"), + (0x3235, "3", "(特)"), + (0x3236, "3", "(財)"), + (0x3237, "3", "(祝)"), + (0x3238, "3", "(労)"), + (0x3239, "3", "(代)"), + (0x323A, "3", "(呼)"), + (0x323B, "3", "(学)"), + (0x323C, "3", "(監)"), + (0x323D, "3", "(企)"), + (0x323E, "3", "(資)"), + (0x323F, "3", "(協)"), + (0x3240, "3", "(祭)"), + (0x3241, "3", "(休)"), + (0x3242, "3", "(自)"), + (0x3243, "3", "(至)"), + (0x3244, "M", "問"), + (0x3245, "M", "幼"), + (0x3246, "M", "文"), + (0x3247, "M", "箏"), + (0x3248, "V"), + (0x3250, "M", "pte"), + (0x3251, "M", "21"), + ] + + +def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3252, "M", "22"), + (0x3253, "M", "23"), + (0x3254, "M", "24"), + (0x3255, "M", "25"), + (0x3256, "M", "26"), + (0x3257, "M", "27"), + (0x3258, "M", "28"), + (0x3259, "M", "29"), + (0x325A, "M", "30"), + (0x325B, "M", "31"), + (0x325C, "M", "32"), + (0x325D, "M", "33"), + (0x325E, "M", "34"), + (0x325F, "M", "35"), + (0x3260, "M", "ᄀ"), + (0x3261, "M", "ᄂ"), + (0x3262, "M", "ᄃ"), + (0x3263, "M", "ᄅ"), + (0x3264, "M", "ᄆ"), + (0x3265, "M", "ᄇ"), + (0x3266, "M", "ᄉ"), + (0x3267, "M", "ᄋ"), + (0x3268, "M", "ᄌ"), + (0x3269, "M", "ᄎ"), + (0x326A, "M", "ᄏ"), + (0x326B, "M", "ᄐ"), + (0x326C, "M", "ᄑ"), + (0x326D, "M", "ᄒ"), + (0x326E, "M", "가"), + (0x326F, "M", "나"), + (0x3270, "M", "다"), + (0x3271, "M", "라"), + (0x3272, "M", "마"), + (0x3273, "M", "바"), + (0x3274, "M", "사"), + (0x3275, "M", "아"), + (0x3276, "M", "자"), + (0x3277, "M", "차"), + (0x3278, "M", "카"), + (0x3279, "M", "타"), + (0x327A, "M", "파"), + (0x327B, "M", "하"), + (0x327C, "M", "참고"), + (0x327D, "M", "주의"), + (0x327E, "M", "우"), + (0x327F, "V"), + (0x3280, "M", "一"), + (0x3281, "M", "二"), + (0x3282, "M", "三"), + (0x3283, "M", "四"), + (0x3284, "M", "五"), + (0x3285, "M", "六"), + (0x3286, "M", "七"), + (0x3287, "M", "八"), + (0x3288, "M", "九"), + (0x3289, "M", "十"), + (0x328A, "M", "月"), + (0x328B, "M", "火"), + (0x328C, "M", "水"), + (0x328D, "M", "木"), + (0x328E, "M", "金"), + (0x328F, "M", "土"), + (0x3290, "M", "日"), + (0x3291, "M", "株"), + (0x3292, "M", "有"), + (0x3293, "M", "社"), + (0x3294, "M", "名"), + (0x3295, "M", "特"), + (0x3296, "M", "財"), + (0x3297, "M", "祝"), + (0x3298, "M", "労"), + (0x3299, "M", "秘"), + (0x329A, "M", "男"), + (0x329B, "M", "女"), + (0x329C, "M", "適"), + (0x329D, "M", "優"), + (0x329E, "M", "印"), + (0x329F, "M", "注"), + (0x32A0, "M", "項"), + (0x32A1, "M", "休"), + (0x32A2, "M", "写"), + (0x32A3, "M", "正"), + (0x32A4, "M", "上"), + (0x32A5, "M", "中"), + (0x32A6, "M", "下"), + (0x32A7, "M", "左"), + (0x32A8, "M", "右"), + (0x32A9, "M", "医"), + (0x32AA, "M", "宗"), + (0x32AB, "M", "学"), + (0x32AC, "M", "監"), + (0x32AD, "M", "企"), + (0x32AE, "M", "資"), + (0x32AF, "M", "協"), + (0x32B0, "M", "夜"), + (0x32B1, "M", "36"), + (0x32B2, "M", "37"), + (0x32B3, "M", "38"), + (0x32B4, "M", "39"), + (0x32B5, "M", "40"), + ] + + +def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x32B6, "M", "41"), + (0x32B7, "M", "42"), + (0x32B8, "M", "43"), + (0x32B9, "M", "44"), + (0x32BA, "M", "45"), + (0x32BB, "M", "46"), + (0x32BC, "M", "47"), + (0x32BD, "M", "48"), + (0x32BE, "M", "49"), + (0x32BF, "M", "50"), + (0x32C0, "M", "1月"), + (0x32C1, "M", "2月"), + (0x32C2, "M", "3月"), + (0x32C3, "M", "4月"), + (0x32C4, "M", "5月"), + (0x32C5, "M", "6月"), + (0x32C6, "M", "7月"), + (0x32C7, "M", "8月"), + (0x32C8, "M", "9月"), + (0x32C9, "M", "10月"), + (0x32CA, "M", "11月"), + (0x32CB, "M", "12月"), + (0x32CC, "M", "hg"), + (0x32CD, "M", "erg"), + (0x32CE, "M", "ev"), + (0x32CF, "M", "ltd"), + (0x32D0, "M", "ア"), + (0x32D1, "M", "イ"), + (0x32D2, "M", "ウ"), + (0x32D3, "M", "エ"), + (0x32D4, "M", "オ"), + (0x32D5, "M", "カ"), + (0x32D6, "M", "キ"), + (0x32D7, "M", "ク"), + (0x32D8, "M", "ケ"), + (0x32D9, "M", "コ"), + (0x32DA, "M", "サ"), + (0x32DB, "M", "シ"), + (0x32DC, "M", "ス"), + (0x32DD, "M", "セ"), + (0x32DE, "M", "ソ"), + (0x32DF, "M", "タ"), + (0x32E0, "M", "チ"), + (0x32E1, "M", "ツ"), + (0x32E2, "M", "テ"), + (0x32E3, "M", "ト"), + (0x32E4, "M", "ナ"), + (0x32E5, "M", "ニ"), + (0x32E6, "M", "ヌ"), + (0x32E7, "M", "ネ"), + (0x32E8, "M", "ノ"), + (0x32E9, "M", "ハ"), + (0x32EA, "M", "ヒ"), + (0x32EB, "M", "フ"), + (0x32EC, "M", "ヘ"), + (0x32ED, "M", "ホ"), + (0x32EE, "M", "マ"), + (0x32EF, "M", "ミ"), + (0x32F0, "M", "ム"), + (0x32F1, "M", "メ"), + (0x32F2, "M", "モ"), + (0x32F3, "M", "ヤ"), + (0x32F4, "M", "ユ"), + (0x32F5, "M", "ヨ"), + (0x32F6, "M", "ラ"), + (0x32F7, "M", "リ"), + (0x32F8, "M", "ル"), + (0x32F9, "M", "レ"), + (0x32FA, "M", "ロ"), + (0x32FB, "M", "ワ"), + (0x32FC, "M", "ヰ"), + (0x32FD, "M", "ヱ"), + (0x32FE, "M", "ヲ"), + (0x32FF, "M", "令和"), + (0x3300, "M", "アパート"), + (0x3301, "M", "アルファ"), + (0x3302, "M", "アンペア"), + (0x3303, "M", "アール"), + (0x3304, "M", "イニング"), + (0x3305, "M", "インチ"), + (0x3306, "M", "ウォン"), + (0x3307, "M", "エスクード"), + (0x3308, "M", "エーカー"), + (0x3309, "M", "オンス"), + (0x330A, "M", "オーム"), + (0x330B, "M", "カイリ"), + (0x330C, "M", "カラット"), + (0x330D, "M", "カロリー"), + (0x330E, "M", "ガロン"), + (0x330F, "M", "ガンマ"), + (0x3310, "M", "ギガ"), + (0x3311, "M", "ギニー"), + (0x3312, "M", "キュリー"), + (0x3313, "M", "ギルダー"), + (0x3314, "M", "キロ"), + (0x3315, "M", "キログラム"), + (0x3316, "M", "キロメートル"), + (0x3317, "M", "キロワット"), + (0x3318, "M", "グラム"), + (0x3319, "M", "グラムトン"), + ] + + +def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x331A, "M", "クルゼイロ"), + (0x331B, "M", "クローネ"), + (0x331C, "M", "ケース"), + (0x331D, "M", "コルナ"), + (0x331E, "M", "コーポ"), + (0x331F, "M", "サイクル"), + (0x3320, "M", "サンチーム"), + (0x3321, "M", "シリング"), + (0x3322, "M", "センチ"), + (0x3323, "M", "セント"), + (0x3324, "M", "ダース"), + (0x3325, "M", "デシ"), + (0x3326, "M", "ドル"), + (0x3327, "M", "トン"), + (0x3328, "M", "ナノ"), + (0x3329, "M", "ノット"), + (0x332A, "M", "ハイツ"), + (0x332B, "M", "パーセント"), + (0x332C, "M", "パーツ"), + (0x332D, "M", "バーレル"), + (0x332E, "M", "ピアストル"), + (0x332F, "M", "ピクル"), + (0x3330, "M", "ピコ"), + (0x3331, "M", "ビル"), + (0x3332, "M", "ファラッド"), + (0x3333, "M", "フィート"), + (0x3334, "M", "ブッシェル"), + (0x3335, "M", "フラン"), + (0x3336, "M", "ヘクタール"), + (0x3337, "M", "ペソ"), + (0x3338, "M", "ペニヒ"), + (0x3339, "M", "ヘルツ"), + (0x333A, "M", "ペンス"), + (0x333B, "M", "ページ"), + (0x333C, "M", "ベータ"), + (0x333D, "M", "ポイント"), + (0x333E, "M", "ボルト"), + (0x333F, "M", "ホン"), + (0x3340, "M", "ポンド"), + (0x3341, "M", "ホール"), + (0x3342, "M", "ホーン"), + (0x3343, "M", "マイクロ"), + (0x3344, "M", "マイル"), + (0x3345, "M", "マッハ"), + (0x3346, "M", "マルク"), + (0x3347, "M", "マンション"), + (0x3348, "M", "ミクロン"), + (0x3349, "M", "ミリ"), + (0x334A, "M", "ミリバール"), + (0x334B, "M", "メガ"), + (0x334C, "M", "メガトン"), + (0x334D, "M", "メートル"), + (0x334E, "M", "ヤード"), + (0x334F, "M", "ヤール"), + (0x3350, "M", "ユアン"), + (0x3351, "M", "リットル"), + (0x3352, "M", "リラ"), + (0x3353, "M", "ルピー"), + (0x3354, "M", "ルーブル"), + (0x3355, "M", "レム"), + (0x3356, "M", "レントゲン"), + (0x3357, "M", "ワット"), + (0x3358, "M", "0点"), + (0x3359, "M", "1点"), + (0x335A, "M", "2点"), + (0x335B, "M", "3点"), + (0x335C, "M", "4点"), + (0x335D, "M", "5点"), + (0x335E, "M", "6点"), + (0x335F, "M", "7点"), + (0x3360, "M", "8点"), + (0x3361, "M", "9点"), + (0x3362, "M", "10点"), + (0x3363, "M", "11点"), + (0x3364, "M", "12点"), + (0x3365, "M", "13点"), + (0x3366, "M", "14点"), + (0x3367, "M", "15点"), + (0x3368, "M", "16点"), + (0x3369, "M", "17点"), + (0x336A, "M", "18点"), + (0x336B, "M", "19点"), + (0x336C, "M", "20点"), + (0x336D, "M", "21点"), + (0x336E, "M", "22点"), + (0x336F, "M", "23点"), + (0x3370, "M", "24点"), + (0x3371, "M", "hpa"), + (0x3372, "M", "da"), + (0x3373, "M", "au"), + (0x3374, "M", "bar"), + (0x3375, "M", "ov"), + (0x3376, "M", "pc"), + (0x3377, "M", "dm"), + (0x3378, "M", "dm2"), + (0x3379, "M", "dm3"), + (0x337A, "M", "iu"), + (0x337B, "M", "平成"), + (0x337C, "M", "昭和"), + (0x337D, "M", "大正"), + ] + + +def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x337E, "M", "明治"), + (0x337F, "M", "株式会社"), + (0x3380, "M", "pa"), + (0x3381, "M", "na"), + (0x3382, "M", "μa"), + (0x3383, "M", "ma"), + (0x3384, "M", "ka"), + (0x3385, "M", "kb"), + (0x3386, "M", "mb"), + (0x3387, "M", "gb"), + (0x3388, "M", "cal"), + (0x3389, "M", "kcal"), + (0x338A, "M", "pf"), + (0x338B, "M", "nf"), + (0x338C, "M", "μf"), + (0x338D, "M", "μg"), + (0x338E, "M", "mg"), + (0x338F, "M", "kg"), + (0x3390, "M", "hz"), + (0x3391, "M", "khz"), + (0x3392, "M", "mhz"), + (0x3393, "M", "ghz"), + (0x3394, "M", "thz"), + (0x3395, "M", "μl"), + (0x3396, "M", "ml"), + (0x3397, "M", "dl"), + (0x3398, "M", "kl"), + (0x3399, "M", "fm"), + (0x339A, "M", "nm"), + (0x339B, "M", "μm"), + (0x339C, "M", "mm"), + (0x339D, "M", "cm"), + (0x339E, "M", "km"), + (0x339F, "M", "mm2"), + (0x33A0, "M", "cm2"), + (0x33A1, "M", "m2"), + (0x33A2, "M", "km2"), + (0x33A3, "M", "mm3"), + (0x33A4, "M", "cm3"), + (0x33A5, "M", "m3"), + (0x33A6, "M", "km3"), + (0x33A7, "M", "m∕s"), + (0x33A8, "M", "m∕s2"), + (0x33A9, "M", "pa"), + (0x33AA, "M", "kpa"), + (0x33AB, "M", "mpa"), + (0x33AC, "M", "gpa"), + (0x33AD, "M", "rad"), + (0x33AE, "M", "rad∕s"), + (0x33AF, "M", "rad∕s2"), + (0x33B0, "M", "ps"), + (0x33B1, "M", "ns"), + (0x33B2, "M", "μs"), + (0x33B3, "M", "ms"), + (0x33B4, "M", "pv"), + (0x33B5, "M", "nv"), + (0x33B6, "M", "μv"), + (0x33B7, "M", "mv"), + (0x33B8, "M", "kv"), + (0x33B9, "M", "mv"), + (0x33BA, "M", "pw"), + (0x33BB, "M", "nw"), + (0x33BC, "M", "μw"), + (0x33BD, "M", "mw"), + (0x33BE, "M", "kw"), + (0x33BF, "M", "mw"), + (0x33C0, "M", "kω"), + (0x33C1, "M", "mω"), + (0x33C2, "X"), + (0x33C3, "M", "bq"), + (0x33C4, "M", "cc"), + (0x33C5, "M", "cd"), + (0x33C6, "M", "c∕kg"), + (0x33C7, "X"), + (0x33C8, "M", "db"), + (0x33C9, "M", "gy"), + (0x33CA, "M", "ha"), + (0x33CB, "M", "hp"), + (0x33CC, "M", "in"), + (0x33CD, "M", "kk"), + (0x33CE, "M", "km"), + (0x33CF, "M", "kt"), + (0x33D0, "M", "lm"), + (0x33D1, "M", "ln"), + (0x33D2, "M", "log"), + (0x33D3, "M", "lx"), + (0x33D4, "M", "mb"), + (0x33D5, "M", "mil"), + (0x33D6, "M", "mol"), + (0x33D7, "M", "ph"), + (0x33D8, "X"), + (0x33D9, "M", "ppm"), + (0x33DA, "M", "pr"), + (0x33DB, "M", "sr"), + (0x33DC, "M", "sv"), + (0x33DD, "M", "wb"), + (0x33DE, "M", "v∕m"), + (0x33DF, "M", "a∕m"), + (0x33E0, "M", "1日"), + (0x33E1, "M", "2日"), + ] + + +def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x33E2, "M", "3日"), + (0x33E3, "M", "4日"), + (0x33E4, "M", "5日"), + (0x33E5, "M", "6日"), + (0x33E6, "M", "7日"), + (0x33E7, "M", "8日"), + (0x33E8, "M", "9日"), + (0x33E9, "M", "10日"), + (0x33EA, "M", "11日"), + (0x33EB, "M", "12日"), + (0x33EC, "M", "13日"), + (0x33ED, "M", "14日"), + (0x33EE, "M", "15日"), + (0x33EF, "M", "16日"), + (0x33F0, "M", "17日"), + (0x33F1, "M", "18日"), + (0x33F2, "M", "19日"), + (0x33F3, "M", "20日"), + (0x33F4, "M", "21日"), + (0x33F5, "M", "22日"), + (0x33F6, "M", "23日"), + (0x33F7, "M", "24日"), + (0x33F8, "M", "25日"), + (0x33F9, "M", "26日"), + (0x33FA, "M", "27日"), + (0x33FB, "M", "28日"), + (0x33FC, "M", "29日"), + (0x33FD, "M", "30日"), + (0x33FE, "M", "31日"), + (0x33FF, "M", "gal"), + (0x3400, "V"), + (0xA48D, "X"), + (0xA490, "V"), + (0xA4C7, "X"), + (0xA4D0, "V"), + (0xA62C, "X"), + (0xA640, "M", "ꙁ"), + (0xA641, "V"), + (0xA642, "M", "ꙃ"), + (0xA643, "V"), + (0xA644, "M", "ꙅ"), + (0xA645, "V"), + (0xA646, "M", "ꙇ"), + (0xA647, "V"), + (0xA648, "M", "ꙉ"), + (0xA649, "V"), + (0xA64A, "M", "ꙋ"), + (0xA64B, "V"), + (0xA64C, "M", "ꙍ"), + (0xA64D, "V"), + (0xA64E, "M", "ꙏ"), + (0xA64F, "V"), + (0xA650, "M", "ꙑ"), + (0xA651, "V"), + (0xA652, "M", "ꙓ"), + (0xA653, "V"), + (0xA654, "M", "ꙕ"), + (0xA655, "V"), + (0xA656, "M", "ꙗ"), + (0xA657, "V"), + (0xA658, "M", "ꙙ"), + (0xA659, "V"), + (0xA65A, "M", "ꙛ"), + (0xA65B, "V"), + (0xA65C, "M", "ꙝ"), + (0xA65D, "V"), + (0xA65E, "M", "ꙟ"), + (0xA65F, "V"), + (0xA660, "M", "ꙡ"), + (0xA661, "V"), + (0xA662, "M", "ꙣ"), + (0xA663, "V"), + (0xA664, "M", "ꙥ"), + (0xA665, "V"), + (0xA666, "M", "ꙧ"), + (0xA667, "V"), + (0xA668, "M", "ꙩ"), + (0xA669, "V"), + (0xA66A, "M", "ꙫ"), + (0xA66B, "V"), + (0xA66C, "M", "ꙭ"), + (0xA66D, "V"), + (0xA680, "M", "ꚁ"), + (0xA681, "V"), + (0xA682, "M", "ꚃ"), + (0xA683, "V"), + (0xA684, "M", "ꚅ"), + (0xA685, "V"), + (0xA686, "M", "ꚇ"), + (0xA687, "V"), + (0xA688, "M", "ꚉ"), + (0xA689, "V"), + (0xA68A, "M", "ꚋ"), + (0xA68B, "V"), + (0xA68C, "M", "ꚍ"), + (0xA68D, "V"), + (0xA68E, "M", "ꚏ"), + (0xA68F, "V"), + (0xA690, "M", "ꚑ"), + (0xA691, "V"), + ] + + +def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA692, "M", "ꚓ"), + (0xA693, "V"), + (0xA694, "M", "ꚕ"), + (0xA695, "V"), + (0xA696, "M", "ꚗ"), + (0xA697, "V"), + (0xA698, "M", "ꚙ"), + (0xA699, "V"), + (0xA69A, "M", "ꚛ"), + (0xA69B, "V"), + (0xA69C, "M", "ъ"), + (0xA69D, "M", "ь"), + (0xA69E, "V"), + (0xA6F8, "X"), + (0xA700, "V"), + (0xA722, "M", "ꜣ"), + (0xA723, "V"), + (0xA724, "M", "ꜥ"), + (0xA725, "V"), + (0xA726, "M", "ꜧ"), + (0xA727, "V"), + (0xA728, "M", "ꜩ"), + (0xA729, "V"), + (0xA72A, "M", "ꜫ"), + (0xA72B, "V"), + (0xA72C, "M", "ꜭ"), + (0xA72D, "V"), + (0xA72E, "M", "ꜯ"), + (0xA72F, "V"), + (0xA732, "M", "ꜳ"), + (0xA733, "V"), + (0xA734, "M", "ꜵ"), + (0xA735, "V"), + (0xA736, "M", "ꜷ"), + (0xA737, "V"), + (0xA738, "M", "ꜹ"), + (0xA739, "V"), + (0xA73A, "M", "ꜻ"), + (0xA73B, "V"), + (0xA73C, "M", "ꜽ"), + (0xA73D, "V"), + (0xA73E, "M", "ꜿ"), + (0xA73F, "V"), + (0xA740, "M", "ꝁ"), + (0xA741, "V"), + (0xA742, "M", "ꝃ"), + (0xA743, "V"), + (0xA744, "M", "ꝅ"), + (0xA745, "V"), + (0xA746, "M", "ꝇ"), + (0xA747, "V"), + (0xA748, "M", "ꝉ"), + (0xA749, "V"), + (0xA74A, "M", "ꝋ"), + (0xA74B, "V"), + (0xA74C, "M", "ꝍ"), + (0xA74D, "V"), + (0xA74E, "M", "ꝏ"), + (0xA74F, "V"), + (0xA750, "M", "ꝑ"), + (0xA751, "V"), + (0xA752, "M", "ꝓ"), + (0xA753, "V"), + (0xA754, "M", "ꝕ"), + (0xA755, "V"), + (0xA756, "M", "ꝗ"), + (0xA757, "V"), + (0xA758, "M", "ꝙ"), + (0xA759, "V"), + (0xA75A, "M", "ꝛ"), + (0xA75B, "V"), + (0xA75C, "M", "ꝝ"), + (0xA75D, "V"), + (0xA75E, "M", "ꝟ"), + (0xA75F, "V"), + (0xA760, "M", "ꝡ"), + (0xA761, "V"), + (0xA762, "M", "ꝣ"), + (0xA763, "V"), + (0xA764, "M", "ꝥ"), + (0xA765, "V"), + (0xA766, "M", "ꝧ"), + (0xA767, "V"), + (0xA768, "M", "ꝩ"), + (0xA769, "V"), + (0xA76A, "M", "ꝫ"), + (0xA76B, "V"), + (0xA76C, "M", "ꝭ"), + (0xA76D, "V"), + (0xA76E, "M", "ꝯ"), + (0xA76F, "V"), + (0xA770, "M", "ꝯ"), + (0xA771, "V"), + (0xA779, "M", "ꝺ"), + (0xA77A, "V"), + (0xA77B, "M", "ꝼ"), + (0xA77C, "V"), + (0xA77D, "M", "ᵹ"), + (0xA77E, "M", "ꝿ"), + (0xA77F, "V"), + ] + + +def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA780, "M", "ꞁ"), + (0xA781, "V"), + (0xA782, "M", "ꞃ"), + (0xA783, "V"), + (0xA784, "M", "ꞅ"), + (0xA785, "V"), + (0xA786, "M", "ꞇ"), + (0xA787, "V"), + (0xA78B, "M", "ꞌ"), + (0xA78C, "V"), + (0xA78D, "M", "ɥ"), + (0xA78E, "V"), + (0xA790, "M", "ꞑ"), + (0xA791, "V"), + (0xA792, "M", "ꞓ"), + (0xA793, "V"), + (0xA796, "M", "ꞗ"), + (0xA797, "V"), + (0xA798, "M", "ꞙ"), + (0xA799, "V"), + (0xA79A, "M", "ꞛ"), + (0xA79B, "V"), + (0xA79C, "M", "ꞝ"), + (0xA79D, "V"), + (0xA79E, "M", "ꞟ"), + (0xA79F, "V"), + (0xA7A0, "M", "ꞡ"), + (0xA7A1, "V"), + (0xA7A2, "M", "ꞣ"), + (0xA7A3, "V"), + (0xA7A4, "M", "ꞥ"), + (0xA7A5, "V"), + (0xA7A6, "M", "ꞧ"), + (0xA7A7, "V"), + (0xA7A8, "M", "ꞩ"), + (0xA7A9, "V"), + (0xA7AA, "M", "ɦ"), + (0xA7AB, "M", "ɜ"), + (0xA7AC, "M", "ɡ"), + (0xA7AD, "M", "ɬ"), + (0xA7AE, "M", "ɪ"), + (0xA7AF, "V"), + (0xA7B0, "M", "ʞ"), + (0xA7B1, "M", "ʇ"), + (0xA7B2, "M", "ʝ"), + (0xA7B3, "M", "ꭓ"), + (0xA7B4, "M", "ꞵ"), + (0xA7B5, "V"), + (0xA7B6, "M", "ꞷ"), + (0xA7B7, "V"), + (0xA7B8, "M", "ꞹ"), + (0xA7B9, "V"), + (0xA7BA, "M", "ꞻ"), + (0xA7BB, "V"), + (0xA7BC, "M", "ꞽ"), + (0xA7BD, "V"), + (0xA7BE, "M", "ꞿ"), + (0xA7BF, "V"), + (0xA7C0, "M", "ꟁ"), + (0xA7C1, "V"), + (0xA7C2, "M", "ꟃ"), + (0xA7C3, "V"), + (0xA7C4, "M", "ꞔ"), + (0xA7C5, "M", "ʂ"), + (0xA7C6, "M", "ᶎ"), + (0xA7C7, "M", "ꟈ"), + (0xA7C8, "V"), + (0xA7C9, "M", "ꟊ"), + (0xA7CA, "V"), + (0xA7CB, "X"), + (0xA7D0, "M", "ꟑ"), + (0xA7D1, "V"), + (0xA7D2, "X"), + (0xA7D3, "V"), + (0xA7D4, "X"), + (0xA7D5, "V"), + (0xA7D6, "M", "ꟗ"), + (0xA7D7, "V"), + (0xA7D8, "M", "ꟙ"), + (0xA7D9, "V"), + (0xA7DA, "X"), + (0xA7F2, "M", "c"), + (0xA7F3, "M", "f"), + (0xA7F4, "M", "q"), + (0xA7F5, "M", "ꟶ"), + (0xA7F6, "V"), + (0xA7F8, "M", "ħ"), + (0xA7F9, "M", "œ"), + (0xA7FA, "V"), + (0xA82D, "X"), + (0xA830, "V"), + (0xA83A, "X"), + (0xA840, "V"), + (0xA878, "X"), + (0xA880, "V"), + (0xA8C6, "X"), + (0xA8CE, "V"), + (0xA8DA, "X"), + (0xA8E0, "V"), + (0xA954, "X"), + ] + + +def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA95F, "V"), + (0xA97D, "X"), + (0xA980, "V"), + (0xA9CE, "X"), + (0xA9CF, "V"), + (0xA9DA, "X"), + (0xA9DE, "V"), + (0xA9FF, "X"), + (0xAA00, "V"), + (0xAA37, "X"), + (0xAA40, "V"), + (0xAA4E, "X"), + (0xAA50, "V"), + (0xAA5A, "X"), + (0xAA5C, "V"), + (0xAAC3, "X"), + (0xAADB, "V"), + (0xAAF7, "X"), + (0xAB01, "V"), + (0xAB07, "X"), + (0xAB09, "V"), + (0xAB0F, "X"), + (0xAB11, "V"), + (0xAB17, "X"), + (0xAB20, "V"), + (0xAB27, "X"), + (0xAB28, "V"), + (0xAB2F, "X"), + (0xAB30, "V"), + (0xAB5C, "M", "ꜧ"), + (0xAB5D, "M", "ꬷ"), + (0xAB5E, "M", "ɫ"), + (0xAB5F, "M", "ꭒ"), + (0xAB60, "V"), + (0xAB69, "M", "ʍ"), + (0xAB6A, "V"), + (0xAB6C, "X"), + (0xAB70, "M", "Ꭰ"), + (0xAB71, "M", "Ꭱ"), + (0xAB72, "M", "Ꭲ"), + (0xAB73, "M", "Ꭳ"), + (0xAB74, "M", "Ꭴ"), + (0xAB75, "M", "Ꭵ"), + (0xAB76, "M", "Ꭶ"), + (0xAB77, "M", "Ꭷ"), + (0xAB78, "M", "Ꭸ"), + (0xAB79, "M", "Ꭹ"), + (0xAB7A, "M", "Ꭺ"), + (0xAB7B, "M", "Ꭻ"), + (0xAB7C, "M", "Ꭼ"), + (0xAB7D, "M", "Ꭽ"), + (0xAB7E, "M", "Ꭾ"), + (0xAB7F, "M", "Ꭿ"), + (0xAB80, "M", "Ꮀ"), + (0xAB81, "M", "Ꮁ"), + (0xAB82, "M", "Ꮂ"), + (0xAB83, "M", "Ꮃ"), + (0xAB84, "M", "Ꮄ"), + (0xAB85, "M", "Ꮅ"), + (0xAB86, "M", "Ꮆ"), + (0xAB87, "M", "Ꮇ"), + (0xAB88, "M", "Ꮈ"), + (0xAB89, "M", "Ꮉ"), + (0xAB8A, "M", "Ꮊ"), + (0xAB8B, "M", "Ꮋ"), + (0xAB8C, "M", "Ꮌ"), + (0xAB8D, "M", "Ꮍ"), + (0xAB8E, "M", "Ꮎ"), + (0xAB8F, "M", "Ꮏ"), + (0xAB90, "M", "Ꮐ"), + (0xAB91, "M", "Ꮑ"), + (0xAB92, "M", "Ꮒ"), + (0xAB93, "M", "Ꮓ"), + (0xAB94, "M", "Ꮔ"), + (0xAB95, "M", "Ꮕ"), + (0xAB96, "M", "Ꮖ"), + (0xAB97, "M", "Ꮗ"), + (0xAB98, "M", "Ꮘ"), + (0xAB99, "M", "Ꮙ"), + (0xAB9A, "M", "Ꮚ"), + (0xAB9B, "M", "Ꮛ"), + (0xAB9C, "M", "Ꮜ"), + (0xAB9D, "M", "Ꮝ"), + (0xAB9E, "M", "Ꮞ"), + (0xAB9F, "M", "Ꮟ"), + (0xABA0, "M", "Ꮠ"), + (0xABA1, "M", "Ꮡ"), + (0xABA2, "M", "Ꮢ"), + (0xABA3, "M", "Ꮣ"), + (0xABA4, "M", "Ꮤ"), + (0xABA5, "M", "Ꮥ"), + (0xABA6, "M", "Ꮦ"), + (0xABA7, "M", "Ꮧ"), + (0xABA8, "M", "Ꮨ"), + (0xABA9, "M", "Ꮩ"), + (0xABAA, "M", "Ꮪ"), + (0xABAB, "M", "Ꮫ"), + (0xABAC, "M", "Ꮬ"), + (0xABAD, "M", "Ꮭ"), + (0xABAE, "M", "Ꮮ"), + ] + + +def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xABAF, "M", "Ꮯ"), + (0xABB0, "M", "Ꮰ"), + (0xABB1, "M", "Ꮱ"), + (0xABB2, "M", "Ꮲ"), + (0xABB3, "M", "Ꮳ"), + (0xABB4, "M", "Ꮴ"), + (0xABB5, "M", "Ꮵ"), + (0xABB6, "M", "Ꮶ"), + (0xABB7, "M", "Ꮷ"), + (0xABB8, "M", "Ꮸ"), + (0xABB9, "M", "Ꮹ"), + (0xABBA, "M", "Ꮺ"), + (0xABBB, "M", "Ꮻ"), + (0xABBC, "M", "Ꮼ"), + (0xABBD, "M", "Ꮽ"), + (0xABBE, "M", "Ꮾ"), + (0xABBF, "M", "Ꮿ"), + (0xABC0, "V"), + (0xABEE, "X"), + (0xABF0, "V"), + (0xABFA, "X"), + (0xAC00, "V"), + (0xD7A4, "X"), + (0xD7B0, "V"), + (0xD7C7, "X"), + (0xD7CB, "V"), + (0xD7FC, "X"), + (0xF900, "M", "豈"), + (0xF901, "M", "更"), + (0xF902, "M", "車"), + (0xF903, "M", "賈"), + (0xF904, "M", "滑"), + (0xF905, "M", "串"), + (0xF906, "M", "句"), + (0xF907, "M", "龜"), + (0xF909, "M", "契"), + (0xF90A, "M", "金"), + (0xF90B, "M", "喇"), + (0xF90C, "M", "奈"), + (0xF90D, "M", "懶"), + (0xF90E, "M", "癩"), + (0xF90F, "M", "羅"), + (0xF910, "M", "蘿"), + (0xF911, "M", "螺"), + (0xF912, "M", "裸"), + (0xF913, "M", "邏"), + (0xF914, "M", "樂"), + (0xF915, "M", "洛"), + (0xF916, "M", "烙"), + (0xF917, "M", "珞"), + (0xF918, "M", "落"), + (0xF919, "M", "酪"), + (0xF91A, "M", "駱"), + (0xF91B, "M", "亂"), + (0xF91C, "M", "卵"), + (0xF91D, "M", "欄"), + (0xF91E, "M", "爛"), + (0xF91F, "M", "蘭"), + (0xF920, "M", "鸞"), + (0xF921, "M", "嵐"), + (0xF922, "M", "濫"), + (0xF923, "M", "藍"), + (0xF924, "M", "襤"), + (0xF925, "M", "拉"), + (0xF926, "M", "臘"), + (0xF927, "M", "蠟"), + (0xF928, "M", "廊"), + (0xF929, "M", "朗"), + (0xF92A, "M", "浪"), + (0xF92B, "M", "狼"), + (0xF92C, "M", "郎"), + (0xF92D, "M", "來"), + (0xF92E, "M", "冷"), + (0xF92F, "M", "勞"), + (0xF930, "M", "擄"), + (0xF931, "M", "櫓"), + (0xF932, "M", "爐"), + (0xF933, "M", "盧"), + (0xF934, "M", "老"), + (0xF935, "M", "蘆"), + (0xF936, "M", "虜"), + (0xF937, "M", "路"), + (0xF938, "M", "露"), + (0xF939, "M", "魯"), + (0xF93A, "M", "鷺"), + (0xF93B, "M", "碌"), + (0xF93C, "M", "祿"), + (0xF93D, "M", "綠"), + (0xF93E, "M", "菉"), + (0xF93F, "M", "錄"), + (0xF940, "M", "鹿"), + (0xF941, "M", "論"), + (0xF942, "M", "壟"), + (0xF943, "M", "弄"), + (0xF944, "M", "籠"), + (0xF945, "M", "聾"), + (0xF946, "M", "牢"), + (0xF947, "M", "磊"), + (0xF948, "M", "賂"), + (0xF949, "M", "雷"), + ] + + +def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF94A, "M", "壘"), + (0xF94B, "M", "屢"), + (0xF94C, "M", "樓"), + (0xF94D, "M", "淚"), + (0xF94E, "M", "漏"), + (0xF94F, "M", "累"), + (0xF950, "M", "縷"), + (0xF951, "M", "陋"), + (0xF952, "M", "勒"), + (0xF953, "M", "肋"), + (0xF954, "M", "凜"), + (0xF955, "M", "凌"), + (0xF956, "M", "稜"), + (0xF957, "M", "綾"), + (0xF958, "M", "菱"), + (0xF959, "M", "陵"), + (0xF95A, "M", "讀"), + (0xF95B, "M", "拏"), + (0xF95C, "M", "樂"), + (0xF95D, "M", "諾"), + (0xF95E, "M", "丹"), + (0xF95F, "M", "寧"), + (0xF960, "M", "怒"), + (0xF961, "M", "率"), + (0xF962, "M", "異"), + (0xF963, "M", "北"), + (0xF964, "M", "磻"), + (0xF965, "M", "便"), + (0xF966, "M", "復"), + (0xF967, "M", "不"), + (0xF968, "M", "泌"), + (0xF969, "M", "數"), + (0xF96A, "M", "索"), + (0xF96B, "M", "參"), + (0xF96C, "M", "塞"), + (0xF96D, "M", "省"), + (0xF96E, "M", "葉"), + (0xF96F, "M", "說"), + (0xF970, "M", "殺"), + (0xF971, "M", "辰"), + (0xF972, "M", "沈"), + (0xF973, "M", "拾"), + (0xF974, "M", "若"), + (0xF975, "M", "掠"), + (0xF976, "M", "略"), + (0xF977, "M", "亮"), + (0xF978, "M", "兩"), + (0xF979, "M", "凉"), + (0xF97A, "M", "梁"), + (0xF97B, "M", "糧"), + (0xF97C, "M", "良"), + (0xF97D, "M", "諒"), + (0xF97E, "M", "量"), + (0xF97F, "M", "勵"), + (0xF980, "M", "呂"), + (0xF981, "M", "女"), + (0xF982, "M", "廬"), + (0xF983, "M", "旅"), + (0xF984, "M", "濾"), + (0xF985, "M", "礪"), + (0xF986, "M", "閭"), + (0xF987, "M", "驪"), + (0xF988, "M", "麗"), + (0xF989, "M", "黎"), + (0xF98A, "M", "力"), + (0xF98B, "M", "曆"), + (0xF98C, "M", "歷"), + (0xF98D, "M", "轢"), + (0xF98E, "M", "年"), + (0xF98F, "M", "憐"), + (0xF990, "M", "戀"), + (0xF991, "M", "撚"), + (0xF992, "M", "漣"), + (0xF993, "M", "煉"), + (0xF994, "M", "璉"), + (0xF995, "M", "秊"), + (0xF996, "M", "練"), + (0xF997, "M", "聯"), + (0xF998, "M", "輦"), + (0xF999, "M", "蓮"), + (0xF99A, "M", "連"), + (0xF99B, "M", "鍊"), + (0xF99C, "M", "列"), + (0xF99D, "M", "劣"), + (0xF99E, "M", "咽"), + (0xF99F, "M", "烈"), + (0xF9A0, "M", "裂"), + (0xF9A1, "M", "說"), + (0xF9A2, "M", "廉"), + (0xF9A3, "M", "念"), + (0xF9A4, "M", "捻"), + (0xF9A5, "M", "殮"), + (0xF9A6, "M", "簾"), + (0xF9A7, "M", "獵"), + (0xF9A8, "M", "令"), + (0xF9A9, "M", "囹"), + (0xF9AA, "M", "寧"), + (0xF9AB, "M", "嶺"), + (0xF9AC, "M", "怜"), + (0xF9AD, "M", "玲"), + ] + + +def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF9AE, "M", "瑩"), + (0xF9AF, "M", "羚"), + (0xF9B0, "M", "聆"), + (0xF9B1, "M", "鈴"), + (0xF9B2, "M", "零"), + (0xF9B3, "M", "靈"), + (0xF9B4, "M", "領"), + (0xF9B5, "M", "例"), + (0xF9B6, "M", "禮"), + (0xF9B7, "M", "醴"), + (0xF9B8, "M", "隸"), + (0xF9B9, "M", "惡"), + (0xF9BA, "M", "了"), + (0xF9BB, "M", "僚"), + (0xF9BC, "M", "寮"), + (0xF9BD, "M", "尿"), + (0xF9BE, "M", "料"), + (0xF9BF, "M", "樂"), + (0xF9C0, "M", "燎"), + (0xF9C1, "M", "療"), + (0xF9C2, "M", "蓼"), + (0xF9C3, "M", "遼"), + (0xF9C4, "M", "龍"), + (0xF9C5, "M", "暈"), + (0xF9C6, "M", "阮"), + (0xF9C7, "M", "劉"), + (0xF9C8, "M", "杻"), + (0xF9C9, "M", "柳"), + (0xF9CA, "M", "流"), + (0xF9CB, "M", "溜"), + (0xF9CC, "M", "琉"), + (0xF9CD, "M", "留"), + (0xF9CE, "M", "硫"), + (0xF9CF, "M", "紐"), + (0xF9D0, "M", "類"), + (0xF9D1, "M", "六"), + (0xF9D2, "M", "戮"), + (0xF9D3, "M", "陸"), + (0xF9D4, "M", "倫"), + (0xF9D5, "M", "崙"), + (0xF9D6, "M", "淪"), + (0xF9D7, "M", "輪"), + (0xF9D8, "M", "律"), + (0xF9D9, "M", "慄"), + (0xF9DA, "M", "栗"), + (0xF9DB, "M", "率"), + (0xF9DC, "M", "隆"), + (0xF9DD, "M", "利"), + (0xF9DE, "M", "吏"), + (0xF9DF, "M", "履"), + (0xF9E0, "M", "易"), + (0xF9E1, "M", "李"), + (0xF9E2, "M", "梨"), + (0xF9E3, "M", "泥"), + (0xF9E4, "M", "理"), + (0xF9E5, "M", "痢"), + (0xF9E6, "M", "罹"), + (0xF9E7, "M", "裏"), + (0xF9E8, "M", "裡"), + (0xF9E9, "M", "里"), + (0xF9EA, "M", "離"), + (0xF9EB, "M", "匿"), + (0xF9EC, "M", "溺"), + (0xF9ED, "M", "吝"), + (0xF9EE, "M", "燐"), + (0xF9EF, "M", "璘"), + (0xF9F0, "M", "藺"), + (0xF9F1, "M", "隣"), + (0xF9F2, "M", "鱗"), + (0xF9F3, "M", "麟"), + (0xF9F4, "M", "林"), + (0xF9F5, "M", "淋"), + (0xF9F6, "M", "臨"), + (0xF9F7, "M", "立"), + (0xF9F8, "M", "笠"), + (0xF9F9, "M", "粒"), + (0xF9FA, "M", "狀"), + (0xF9FB, "M", "炙"), + (0xF9FC, "M", "識"), + (0xF9FD, "M", "什"), + (0xF9FE, "M", "茶"), + (0xF9FF, "M", "刺"), + (0xFA00, "M", "切"), + (0xFA01, "M", "度"), + (0xFA02, "M", "拓"), + (0xFA03, "M", "糖"), + (0xFA04, "M", "宅"), + (0xFA05, "M", "洞"), + (0xFA06, "M", "暴"), + (0xFA07, "M", "輻"), + (0xFA08, "M", "行"), + (0xFA09, "M", "降"), + (0xFA0A, "M", "見"), + (0xFA0B, "M", "廓"), + (0xFA0C, "M", "兀"), + (0xFA0D, "M", "嗀"), + (0xFA0E, "V"), + (0xFA10, "M", "塚"), + (0xFA11, "V"), + (0xFA12, "M", "晴"), + ] + + +def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFA13, "V"), + (0xFA15, "M", "凞"), + (0xFA16, "M", "猪"), + (0xFA17, "M", "益"), + (0xFA18, "M", "礼"), + (0xFA19, "M", "神"), + (0xFA1A, "M", "祥"), + (0xFA1B, "M", "福"), + (0xFA1C, "M", "靖"), + (0xFA1D, "M", "精"), + (0xFA1E, "M", "羽"), + (0xFA1F, "V"), + (0xFA20, "M", "蘒"), + (0xFA21, "V"), + (0xFA22, "M", "諸"), + (0xFA23, "V"), + (0xFA25, "M", "逸"), + (0xFA26, "M", "都"), + (0xFA27, "V"), + (0xFA2A, "M", "飯"), + (0xFA2B, "M", "飼"), + (0xFA2C, "M", "館"), + (0xFA2D, "M", "鶴"), + (0xFA2E, "M", "郞"), + (0xFA2F, "M", "隷"), + (0xFA30, "M", "侮"), + (0xFA31, "M", "僧"), + (0xFA32, "M", "免"), + (0xFA33, "M", "勉"), + (0xFA34, "M", "勤"), + (0xFA35, "M", "卑"), + (0xFA36, "M", "喝"), + (0xFA37, "M", "嘆"), + (0xFA38, "M", "器"), + (0xFA39, "M", "塀"), + (0xFA3A, "M", "墨"), + (0xFA3B, "M", "層"), + (0xFA3C, "M", "屮"), + (0xFA3D, "M", "悔"), + (0xFA3E, "M", "慨"), + (0xFA3F, "M", "憎"), + (0xFA40, "M", "懲"), + (0xFA41, "M", "敏"), + (0xFA42, "M", "既"), + (0xFA43, "M", "暑"), + (0xFA44, "M", "梅"), + (0xFA45, "M", "海"), + (0xFA46, "M", "渚"), + (0xFA47, "M", "漢"), + (0xFA48, "M", "煮"), + (0xFA49, "M", "爫"), + (0xFA4A, "M", "琢"), + (0xFA4B, "M", "碑"), + (0xFA4C, "M", "社"), + (0xFA4D, "M", "祉"), + (0xFA4E, "M", "祈"), + (0xFA4F, "M", "祐"), + (0xFA50, "M", "祖"), + (0xFA51, "M", "祝"), + (0xFA52, "M", "禍"), + (0xFA53, "M", "禎"), + (0xFA54, "M", "穀"), + (0xFA55, "M", "突"), + (0xFA56, "M", "節"), + (0xFA57, "M", "練"), + (0xFA58, "M", "縉"), + (0xFA59, "M", "繁"), + (0xFA5A, "M", "署"), + (0xFA5B, "M", "者"), + (0xFA5C, "M", "臭"), + (0xFA5D, "M", "艹"), + (0xFA5F, "M", "著"), + (0xFA60, "M", "褐"), + (0xFA61, "M", "視"), + (0xFA62, "M", "謁"), + (0xFA63, "M", "謹"), + (0xFA64, "M", "賓"), + (0xFA65, "M", "贈"), + (0xFA66, "M", "辶"), + (0xFA67, "M", "逸"), + (0xFA68, "M", "難"), + (0xFA69, "M", "響"), + (0xFA6A, "M", "頻"), + (0xFA6B, "M", "恵"), + (0xFA6C, "M", "𤋮"), + (0xFA6D, "M", "舘"), + (0xFA6E, "X"), + (0xFA70, "M", "並"), + (0xFA71, "M", "况"), + (0xFA72, "M", "全"), + (0xFA73, "M", "侀"), + (0xFA74, "M", "充"), + (0xFA75, "M", "冀"), + (0xFA76, "M", "勇"), + (0xFA77, "M", "勺"), + (0xFA78, "M", "喝"), + (0xFA79, "M", "啕"), + (0xFA7A, "M", "喙"), + (0xFA7B, "M", "嗢"), + (0xFA7C, "M", "塚"), + ] + + +def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFA7D, "M", "墳"), + (0xFA7E, "M", "奄"), + (0xFA7F, "M", "奔"), + (0xFA80, "M", "婢"), + (0xFA81, "M", "嬨"), + (0xFA82, "M", "廒"), + (0xFA83, "M", "廙"), + (0xFA84, "M", "彩"), + (0xFA85, "M", "徭"), + (0xFA86, "M", "惘"), + (0xFA87, "M", "慎"), + (0xFA88, "M", "愈"), + (0xFA89, "M", "憎"), + (0xFA8A, "M", "慠"), + (0xFA8B, "M", "懲"), + (0xFA8C, "M", "戴"), + (0xFA8D, "M", "揄"), + (0xFA8E, "M", "搜"), + (0xFA8F, "M", "摒"), + (0xFA90, "M", "敖"), + (0xFA91, "M", "晴"), + (0xFA92, "M", "朗"), + (0xFA93, "M", "望"), + (0xFA94, "M", "杖"), + (0xFA95, "M", "歹"), + (0xFA96, "M", "殺"), + (0xFA97, "M", "流"), + (0xFA98, "M", "滛"), + (0xFA99, "M", "滋"), + (0xFA9A, "M", "漢"), + (0xFA9B, "M", "瀞"), + (0xFA9C, "M", "煮"), + (0xFA9D, "M", "瞧"), + (0xFA9E, "M", "爵"), + (0xFA9F, "M", "犯"), + (0xFAA0, "M", "猪"), + (0xFAA1, "M", "瑱"), + (0xFAA2, "M", "甆"), + (0xFAA3, "M", "画"), + (0xFAA4, "M", "瘝"), + (0xFAA5, "M", "瘟"), + (0xFAA6, "M", "益"), + (0xFAA7, "M", "盛"), + (0xFAA8, "M", "直"), + (0xFAA9, "M", "睊"), + (0xFAAA, "M", "着"), + (0xFAAB, "M", "磌"), + (0xFAAC, "M", "窱"), + (0xFAAD, "M", "節"), + (0xFAAE, "M", "类"), + (0xFAAF, "M", "絛"), + (0xFAB0, "M", "練"), + (0xFAB1, "M", "缾"), + (0xFAB2, "M", "者"), + (0xFAB3, "M", "荒"), + (0xFAB4, "M", "華"), + (0xFAB5, "M", "蝹"), + (0xFAB6, "M", "襁"), + (0xFAB7, "M", "覆"), + (0xFAB8, "M", "視"), + (0xFAB9, "M", "調"), + (0xFABA, "M", "諸"), + (0xFABB, "M", "請"), + (0xFABC, "M", "謁"), + (0xFABD, "M", "諾"), + (0xFABE, "M", "諭"), + (0xFABF, "M", "謹"), + (0xFAC0, "M", "變"), + (0xFAC1, "M", "贈"), + (0xFAC2, "M", "輸"), + (0xFAC3, "M", "遲"), + (0xFAC4, "M", "醙"), + (0xFAC5, "M", "鉶"), + (0xFAC6, "M", "陼"), + (0xFAC7, "M", "難"), + (0xFAC8, "M", "靖"), + (0xFAC9, "M", "韛"), + (0xFACA, "M", "響"), + (0xFACB, "M", "頋"), + (0xFACC, "M", "頻"), + (0xFACD, "M", "鬒"), + (0xFACE, "M", "龜"), + (0xFACF, "M", "𢡊"), + (0xFAD0, "M", "𢡄"), + (0xFAD1, "M", "𣏕"), + (0xFAD2, "M", "㮝"), + (0xFAD3, "M", "䀘"), + (0xFAD4, "M", "䀹"), + (0xFAD5, "M", "𥉉"), + (0xFAD6, "M", "𥳐"), + (0xFAD7, "M", "𧻓"), + (0xFAD8, "M", "齃"), + (0xFAD9, "M", "龎"), + (0xFADA, "X"), + (0xFB00, "M", "ff"), + (0xFB01, "M", "fi"), + (0xFB02, "M", "fl"), + (0xFB03, "M", "ffi"), + (0xFB04, "M", "ffl"), + (0xFB05, "M", "st"), + ] + + +def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFB07, "X"), + (0xFB13, "M", "մն"), + (0xFB14, "M", "մե"), + (0xFB15, "M", "մի"), + (0xFB16, "M", "վն"), + (0xFB17, "M", "մխ"), + (0xFB18, "X"), + (0xFB1D, "M", "יִ"), + (0xFB1E, "V"), + (0xFB1F, "M", "ײַ"), + (0xFB20, "M", "ע"), + (0xFB21, "M", "א"), + (0xFB22, "M", "ד"), + (0xFB23, "M", "ה"), + (0xFB24, "M", "כ"), + (0xFB25, "M", "ל"), + (0xFB26, "M", "ם"), + (0xFB27, "M", "ר"), + (0xFB28, "M", "ת"), + (0xFB29, "3", "+"), + (0xFB2A, "M", "שׁ"), + (0xFB2B, "M", "שׂ"), + (0xFB2C, "M", "שּׁ"), + (0xFB2D, "M", "שּׂ"), + (0xFB2E, "M", "אַ"), + (0xFB2F, "M", "אָ"), + (0xFB30, "M", "אּ"), + (0xFB31, "M", "בּ"), + (0xFB32, "M", "גּ"), + (0xFB33, "M", "דּ"), + (0xFB34, "M", "הּ"), + (0xFB35, "M", "וּ"), + (0xFB36, "M", "זּ"), + (0xFB37, "X"), + (0xFB38, "M", "טּ"), + (0xFB39, "M", "יּ"), + (0xFB3A, "M", "ךּ"), + (0xFB3B, "M", "כּ"), + (0xFB3C, "M", "לּ"), + (0xFB3D, "X"), + (0xFB3E, "M", "מּ"), + (0xFB3F, "X"), + (0xFB40, "M", "נּ"), + (0xFB41, "M", "סּ"), + (0xFB42, "X"), + (0xFB43, "M", "ףּ"), + (0xFB44, "M", "פּ"), + (0xFB45, "X"), + (0xFB46, "M", "צּ"), + (0xFB47, "M", "קּ"), + (0xFB48, "M", "רּ"), + (0xFB49, "M", "שּ"), + (0xFB4A, "M", "תּ"), + (0xFB4B, "M", "וֹ"), + (0xFB4C, "M", "בֿ"), + (0xFB4D, "M", "כֿ"), + (0xFB4E, "M", "פֿ"), + (0xFB4F, "M", "אל"), + (0xFB50, "M", "ٱ"), + (0xFB52, "M", "ٻ"), + (0xFB56, "M", "پ"), + (0xFB5A, "M", "ڀ"), + (0xFB5E, "M", "ٺ"), + (0xFB62, "M", "ٿ"), + (0xFB66, "M", "ٹ"), + (0xFB6A, "M", "ڤ"), + (0xFB6E, "M", "ڦ"), + (0xFB72, "M", "ڄ"), + (0xFB76, "M", "ڃ"), + (0xFB7A, "M", "چ"), + (0xFB7E, "M", "ڇ"), + (0xFB82, "M", "ڍ"), + (0xFB84, "M", "ڌ"), + (0xFB86, "M", "ڎ"), + (0xFB88, "M", "ڈ"), + (0xFB8A, "M", "ژ"), + (0xFB8C, "M", "ڑ"), + (0xFB8E, "M", "ک"), + (0xFB92, "M", "گ"), + (0xFB96, "M", "ڳ"), + (0xFB9A, "M", "ڱ"), + (0xFB9E, "M", "ں"), + (0xFBA0, "M", "ڻ"), + (0xFBA4, "M", "ۀ"), + (0xFBA6, "M", "ہ"), + (0xFBAA, "M", "ھ"), + (0xFBAE, "M", "ے"), + (0xFBB0, "M", "ۓ"), + (0xFBB2, "V"), + (0xFBC3, "X"), + (0xFBD3, "M", "ڭ"), + (0xFBD7, "M", "ۇ"), + (0xFBD9, "M", "ۆ"), + (0xFBDB, "M", "ۈ"), + (0xFBDD, "M", "ۇٴ"), + (0xFBDE, "M", "ۋ"), + (0xFBE0, "M", "ۅ"), + (0xFBE2, "M", "ۉ"), + (0xFBE4, "M", "ې"), + (0xFBE8, "M", "ى"), + ] + + +def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFBEA, "M", "ئا"), + (0xFBEC, "M", "ئە"), + (0xFBEE, "M", "ئو"), + (0xFBF0, "M", "ئۇ"), + (0xFBF2, "M", "ئۆ"), + (0xFBF4, "M", "ئۈ"), + (0xFBF6, "M", "ئې"), + (0xFBF9, "M", "ئى"), + (0xFBFC, "M", "ی"), + (0xFC00, "M", "ئج"), + (0xFC01, "M", "ئح"), + (0xFC02, "M", "ئم"), + (0xFC03, "M", "ئى"), + (0xFC04, "M", "ئي"), + (0xFC05, "M", "بج"), + (0xFC06, "M", "بح"), + (0xFC07, "M", "بخ"), + (0xFC08, "M", "بم"), + (0xFC09, "M", "بى"), + (0xFC0A, "M", "بي"), + (0xFC0B, "M", "تج"), + (0xFC0C, "M", "تح"), + (0xFC0D, "M", "تخ"), + (0xFC0E, "M", "تم"), + (0xFC0F, "M", "تى"), + (0xFC10, "M", "تي"), + (0xFC11, "M", "ثج"), + (0xFC12, "M", "ثم"), + (0xFC13, "M", "ثى"), + (0xFC14, "M", "ثي"), + (0xFC15, "M", "جح"), + (0xFC16, "M", "جم"), + (0xFC17, "M", "حج"), + (0xFC18, "M", "حم"), + (0xFC19, "M", "خج"), + (0xFC1A, "M", "خح"), + (0xFC1B, "M", "خم"), + (0xFC1C, "M", "سج"), + (0xFC1D, "M", "سح"), + (0xFC1E, "M", "سخ"), + (0xFC1F, "M", "سم"), + (0xFC20, "M", "صح"), + (0xFC21, "M", "صم"), + (0xFC22, "M", "ضج"), + (0xFC23, "M", "ضح"), + (0xFC24, "M", "ضخ"), + (0xFC25, "M", "ضم"), + (0xFC26, "M", "طح"), + (0xFC27, "M", "طم"), + (0xFC28, "M", "ظم"), + (0xFC29, "M", "عج"), + (0xFC2A, "M", "عم"), + (0xFC2B, "M", "غج"), + (0xFC2C, "M", "غم"), + (0xFC2D, "M", "فج"), + (0xFC2E, "M", "فح"), + (0xFC2F, "M", "فخ"), + (0xFC30, "M", "فم"), + (0xFC31, "M", "فى"), + (0xFC32, "M", "في"), + (0xFC33, "M", "قح"), + (0xFC34, "M", "قم"), + (0xFC35, "M", "قى"), + (0xFC36, "M", "قي"), + (0xFC37, "M", "كا"), + (0xFC38, "M", "كج"), + (0xFC39, "M", "كح"), + (0xFC3A, "M", "كخ"), + (0xFC3B, "M", "كل"), + (0xFC3C, "M", "كم"), + (0xFC3D, "M", "كى"), + (0xFC3E, "M", "كي"), + (0xFC3F, "M", "لج"), + (0xFC40, "M", "لح"), + (0xFC41, "M", "لخ"), + (0xFC42, "M", "لم"), + (0xFC43, "M", "لى"), + (0xFC44, "M", "لي"), + (0xFC45, "M", "مج"), + (0xFC46, "M", "مح"), + (0xFC47, "M", "مخ"), + (0xFC48, "M", "مم"), + (0xFC49, "M", "مى"), + (0xFC4A, "M", "مي"), + (0xFC4B, "M", "نج"), + (0xFC4C, "M", "نح"), + (0xFC4D, "M", "نخ"), + (0xFC4E, "M", "نم"), + (0xFC4F, "M", "نى"), + (0xFC50, "M", "ني"), + (0xFC51, "M", "هج"), + (0xFC52, "M", "هم"), + (0xFC53, "M", "هى"), + (0xFC54, "M", "هي"), + (0xFC55, "M", "يج"), + (0xFC56, "M", "يح"), + (0xFC57, "M", "يخ"), + (0xFC58, "M", "يم"), + (0xFC59, "M", "يى"), + (0xFC5A, "M", "يي"), + ] + + +def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFC5B, "M", "ذٰ"), + (0xFC5C, "M", "رٰ"), + (0xFC5D, "M", "ىٰ"), + (0xFC5E, "3", " ٌّ"), + (0xFC5F, "3", " ٍّ"), + (0xFC60, "3", " َّ"), + (0xFC61, "3", " ُّ"), + (0xFC62, "3", " ِّ"), + (0xFC63, "3", " ّٰ"), + (0xFC64, "M", "ئر"), + (0xFC65, "M", "ئز"), + (0xFC66, "M", "ئم"), + (0xFC67, "M", "ئن"), + (0xFC68, "M", "ئى"), + (0xFC69, "M", "ئي"), + (0xFC6A, "M", "بر"), + (0xFC6B, "M", "بز"), + (0xFC6C, "M", "بم"), + (0xFC6D, "M", "بن"), + (0xFC6E, "M", "بى"), + (0xFC6F, "M", "بي"), + (0xFC70, "M", "تر"), + (0xFC71, "M", "تز"), + (0xFC72, "M", "تم"), + (0xFC73, "M", "تن"), + (0xFC74, "M", "تى"), + (0xFC75, "M", "تي"), + (0xFC76, "M", "ثر"), + (0xFC77, "M", "ثز"), + (0xFC78, "M", "ثم"), + (0xFC79, "M", "ثن"), + (0xFC7A, "M", "ثى"), + (0xFC7B, "M", "ثي"), + (0xFC7C, "M", "فى"), + (0xFC7D, "M", "في"), + (0xFC7E, "M", "قى"), + (0xFC7F, "M", "قي"), + (0xFC80, "M", "كا"), + (0xFC81, "M", "كل"), + (0xFC82, "M", "كم"), + (0xFC83, "M", "كى"), + (0xFC84, "M", "كي"), + (0xFC85, "M", "لم"), + (0xFC86, "M", "لى"), + (0xFC87, "M", "لي"), + (0xFC88, "M", "ما"), + (0xFC89, "M", "مم"), + (0xFC8A, "M", "نر"), + (0xFC8B, "M", "نز"), + (0xFC8C, "M", "نم"), + (0xFC8D, "M", "نن"), + (0xFC8E, "M", "نى"), + (0xFC8F, "M", "ني"), + (0xFC90, "M", "ىٰ"), + (0xFC91, "M", "ير"), + (0xFC92, "M", "يز"), + (0xFC93, "M", "يم"), + (0xFC94, "M", "ين"), + (0xFC95, "M", "يى"), + (0xFC96, "M", "يي"), + (0xFC97, "M", "ئج"), + (0xFC98, "M", "ئح"), + (0xFC99, "M", "ئخ"), + (0xFC9A, "M", "ئم"), + (0xFC9B, "M", "ئه"), + (0xFC9C, "M", "بج"), + (0xFC9D, "M", "بح"), + (0xFC9E, "M", "بخ"), + (0xFC9F, "M", "بم"), + (0xFCA0, "M", "به"), + (0xFCA1, "M", "تج"), + (0xFCA2, "M", "تح"), + (0xFCA3, "M", "تخ"), + (0xFCA4, "M", "تم"), + (0xFCA5, "M", "ته"), + (0xFCA6, "M", "ثم"), + (0xFCA7, "M", "جح"), + (0xFCA8, "M", "جم"), + (0xFCA9, "M", "حج"), + (0xFCAA, "M", "حم"), + (0xFCAB, "M", "خج"), + (0xFCAC, "M", "خم"), + (0xFCAD, "M", "سج"), + (0xFCAE, "M", "سح"), + (0xFCAF, "M", "سخ"), + (0xFCB0, "M", "سم"), + (0xFCB1, "M", "صح"), + (0xFCB2, "M", "صخ"), + (0xFCB3, "M", "صم"), + (0xFCB4, "M", "ضج"), + (0xFCB5, "M", "ضح"), + (0xFCB6, "M", "ضخ"), + (0xFCB7, "M", "ضم"), + (0xFCB8, "M", "طح"), + (0xFCB9, "M", "ظم"), + (0xFCBA, "M", "عج"), + (0xFCBB, "M", "عم"), + (0xFCBC, "M", "غج"), + (0xFCBD, "M", "غم"), + (0xFCBE, "M", "فج"), + ] + + +def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFCBF, "M", "فح"), + (0xFCC0, "M", "فخ"), + (0xFCC1, "M", "فم"), + (0xFCC2, "M", "قح"), + (0xFCC3, "M", "قم"), + (0xFCC4, "M", "كج"), + (0xFCC5, "M", "كح"), + (0xFCC6, "M", "كخ"), + (0xFCC7, "M", "كل"), + (0xFCC8, "M", "كم"), + (0xFCC9, "M", "لج"), + (0xFCCA, "M", "لح"), + (0xFCCB, "M", "لخ"), + (0xFCCC, "M", "لم"), + (0xFCCD, "M", "له"), + (0xFCCE, "M", "مج"), + (0xFCCF, "M", "مح"), + (0xFCD0, "M", "مخ"), + (0xFCD1, "M", "مم"), + (0xFCD2, "M", "نج"), + (0xFCD3, "M", "نح"), + (0xFCD4, "M", "نخ"), + (0xFCD5, "M", "نم"), + (0xFCD6, "M", "نه"), + (0xFCD7, "M", "هج"), + (0xFCD8, "M", "هم"), + (0xFCD9, "M", "هٰ"), + (0xFCDA, "M", "يج"), + (0xFCDB, "M", "يح"), + (0xFCDC, "M", "يخ"), + (0xFCDD, "M", "يم"), + (0xFCDE, "M", "يه"), + (0xFCDF, "M", "ئم"), + (0xFCE0, "M", "ئه"), + (0xFCE1, "M", "بم"), + (0xFCE2, "M", "به"), + (0xFCE3, "M", "تم"), + (0xFCE4, "M", "ته"), + (0xFCE5, "M", "ثم"), + (0xFCE6, "M", "ثه"), + (0xFCE7, "M", "سم"), + (0xFCE8, "M", "سه"), + (0xFCE9, "M", "شم"), + (0xFCEA, "M", "شه"), + (0xFCEB, "M", "كل"), + (0xFCEC, "M", "كم"), + (0xFCED, "M", "لم"), + (0xFCEE, "M", "نم"), + (0xFCEF, "M", "نه"), + (0xFCF0, "M", "يم"), + (0xFCF1, "M", "يه"), + (0xFCF2, "M", "ـَّ"), + (0xFCF3, "M", "ـُّ"), + (0xFCF4, "M", "ـِّ"), + (0xFCF5, "M", "طى"), + (0xFCF6, "M", "طي"), + (0xFCF7, "M", "عى"), + (0xFCF8, "M", "عي"), + (0xFCF9, "M", "غى"), + (0xFCFA, "M", "غي"), + (0xFCFB, "M", "سى"), + (0xFCFC, "M", "سي"), + (0xFCFD, "M", "شى"), + (0xFCFE, "M", "شي"), + (0xFCFF, "M", "حى"), + (0xFD00, "M", "حي"), + (0xFD01, "M", "جى"), + (0xFD02, "M", "جي"), + (0xFD03, "M", "خى"), + (0xFD04, "M", "خي"), + (0xFD05, "M", "صى"), + (0xFD06, "M", "صي"), + (0xFD07, "M", "ضى"), + (0xFD08, "M", "ضي"), + (0xFD09, "M", "شج"), + (0xFD0A, "M", "شح"), + (0xFD0B, "M", "شخ"), + (0xFD0C, "M", "شم"), + (0xFD0D, "M", "شر"), + (0xFD0E, "M", "سر"), + (0xFD0F, "M", "صر"), + (0xFD10, "M", "ضر"), + (0xFD11, "M", "طى"), + (0xFD12, "M", "طي"), + (0xFD13, "M", "عى"), + (0xFD14, "M", "عي"), + (0xFD15, "M", "غى"), + (0xFD16, "M", "غي"), + (0xFD17, "M", "سى"), + (0xFD18, "M", "سي"), + (0xFD19, "M", "شى"), + (0xFD1A, "M", "شي"), + (0xFD1B, "M", "حى"), + (0xFD1C, "M", "حي"), + (0xFD1D, "M", "جى"), + (0xFD1E, "M", "جي"), + (0xFD1F, "M", "خى"), + (0xFD20, "M", "خي"), + (0xFD21, "M", "صى"), + (0xFD22, "M", "صي"), + ] + + +def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFD23, "M", "ضى"), + (0xFD24, "M", "ضي"), + (0xFD25, "M", "شج"), + (0xFD26, "M", "شح"), + (0xFD27, "M", "شخ"), + (0xFD28, "M", "شم"), + (0xFD29, "M", "شر"), + (0xFD2A, "M", "سر"), + (0xFD2B, "M", "صر"), + (0xFD2C, "M", "ضر"), + (0xFD2D, "M", "شج"), + (0xFD2E, "M", "شح"), + (0xFD2F, "M", "شخ"), + (0xFD30, "M", "شم"), + (0xFD31, "M", "سه"), + (0xFD32, "M", "شه"), + (0xFD33, "M", "طم"), + (0xFD34, "M", "سج"), + (0xFD35, "M", "سح"), + (0xFD36, "M", "سخ"), + (0xFD37, "M", "شج"), + (0xFD38, "M", "شح"), + (0xFD39, "M", "شخ"), + (0xFD3A, "M", "طم"), + (0xFD3B, "M", "ظم"), + (0xFD3C, "M", "اً"), + (0xFD3E, "V"), + (0xFD50, "M", "تجم"), + (0xFD51, "M", "تحج"), + (0xFD53, "M", "تحم"), + (0xFD54, "M", "تخم"), + (0xFD55, "M", "تمج"), + (0xFD56, "M", "تمح"), + (0xFD57, "M", "تمخ"), + (0xFD58, "M", "جمح"), + (0xFD5A, "M", "حمي"), + (0xFD5B, "M", "حمى"), + (0xFD5C, "M", "سحج"), + (0xFD5D, "M", "سجح"), + (0xFD5E, "M", "سجى"), + (0xFD5F, "M", "سمح"), + (0xFD61, "M", "سمج"), + (0xFD62, "M", "سمم"), + (0xFD64, "M", "صحح"), + (0xFD66, "M", "صمم"), + (0xFD67, "M", "شحم"), + (0xFD69, "M", "شجي"), + (0xFD6A, "M", "شمخ"), + (0xFD6C, "M", "شمم"), + (0xFD6E, "M", "ضحى"), + (0xFD6F, "M", "ضخم"), + (0xFD71, "M", "طمح"), + (0xFD73, "M", "طمم"), + (0xFD74, "M", "طمي"), + (0xFD75, "M", "عجم"), + (0xFD76, "M", "عمم"), + (0xFD78, "M", "عمى"), + (0xFD79, "M", "غمم"), + (0xFD7A, "M", "غمي"), + (0xFD7B, "M", "غمى"), + (0xFD7C, "M", "فخم"), + (0xFD7E, "M", "قمح"), + (0xFD7F, "M", "قمم"), + (0xFD80, "M", "لحم"), + (0xFD81, "M", "لحي"), + (0xFD82, "M", "لحى"), + (0xFD83, "M", "لجج"), + (0xFD85, "M", "لخم"), + (0xFD87, "M", "لمح"), + (0xFD89, "M", "محج"), + (0xFD8A, "M", "محم"), + (0xFD8B, "M", "محي"), + (0xFD8C, "M", "مجح"), + (0xFD8D, "M", "مجم"), + (0xFD8E, "M", "مخج"), + (0xFD8F, "M", "مخم"), + (0xFD90, "X"), + (0xFD92, "M", "مجخ"), + (0xFD93, "M", "همج"), + (0xFD94, "M", "همم"), + (0xFD95, "M", "نحم"), + (0xFD96, "M", "نحى"), + (0xFD97, "M", "نجم"), + (0xFD99, "M", "نجى"), + (0xFD9A, "M", "نمي"), + (0xFD9B, "M", "نمى"), + (0xFD9C, "M", "يمم"), + (0xFD9E, "M", "بخي"), + (0xFD9F, "M", "تجي"), + (0xFDA0, "M", "تجى"), + (0xFDA1, "M", "تخي"), + (0xFDA2, "M", "تخى"), + (0xFDA3, "M", "تمي"), + (0xFDA4, "M", "تمى"), + (0xFDA5, "M", "جمي"), + (0xFDA6, "M", "جحى"), + (0xFDA7, "M", "جمى"), + (0xFDA8, "M", "سخى"), + (0xFDA9, "M", "صحي"), + (0xFDAA, "M", "شحي"), + ] + + +def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFDAB, "M", "ضحي"), + (0xFDAC, "M", "لجي"), + (0xFDAD, "M", "لمي"), + (0xFDAE, "M", "يحي"), + (0xFDAF, "M", "يجي"), + (0xFDB0, "M", "يمي"), + (0xFDB1, "M", "ممي"), + (0xFDB2, "M", "قمي"), + (0xFDB3, "M", "نحي"), + (0xFDB4, "M", "قمح"), + (0xFDB5, "M", "لحم"), + (0xFDB6, "M", "عمي"), + (0xFDB7, "M", "كمي"), + (0xFDB8, "M", "نجح"), + (0xFDB9, "M", "مخي"), + (0xFDBA, "M", "لجم"), + (0xFDBB, "M", "كمم"), + (0xFDBC, "M", "لجم"), + (0xFDBD, "M", "نجح"), + (0xFDBE, "M", "جحي"), + (0xFDBF, "M", "حجي"), + (0xFDC0, "M", "مجي"), + (0xFDC1, "M", "فمي"), + (0xFDC2, "M", "بحي"), + (0xFDC3, "M", "كمم"), + (0xFDC4, "M", "عجم"), + (0xFDC5, "M", "صمم"), + (0xFDC6, "M", "سخي"), + (0xFDC7, "M", "نجي"), + (0xFDC8, "X"), + (0xFDCF, "V"), + (0xFDD0, "X"), + (0xFDF0, "M", "صلے"), + (0xFDF1, "M", "قلے"), + (0xFDF2, "M", "الله"), + (0xFDF3, "M", "اكبر"), + (0xFDF4, "M", "محمد"), + (0xFDF5, "M", "صلعم"), + (0xFDF6, "M", "رسول"), + (0xFDF7, "M", "عليه"), + (0xFDF8, "M", "وسلم"), + (0xFDF9, "M", "صلى"), + (0xFDFA, "3", "صلى الله عليه وسلم"), + (0xFDFB, "3", "جل جلاله"), + (0xFDFC, "M", "ریال"), + (0xFDFD, "V"), + (0xFE00, "I"), + (0xFE10, "3", ","), + (0xFE11, "M", "、"), + (0xFE12, "X"), + (0xFE13, "3", ":"), + (0xFE14, "3", ";"), + (0xFE15, "3", "!"), + (0xFE16, "3", "?"), + (0xFE17, "M", "〖"), + (0xFE18, "M", "〗"), + (0xFE19, "X"), + (0xFE20, "V"), + (0xFE30, "X"), + (0xFE31, "M", "—"), + (0xFE32, "M", "–"), + (0xFE33, "3", "_"), + (0xFE35, "3", "("), + (0xFE36, "3", ")"), + (0xFE37, "3", "{"), + (0xFE38, "3", "}"), + (0xFE39, "M", "〔"), + (0xFE3A, "M", "〕"), + (0xFE3B, "M", "【"), + (0xFE3C, "M", "】"), + (0xFE3D, "M", "《"), + (0xFE3E, "M", "》"), + (0xFE3F, "M", "〈"), + (0xFE40, "M", "〉"), + (0xFE41, "M", "「"), + (0xFE42, "M", "」"), + (0xFE43, "M", "『"), + (0xFE44, "M", "』"), + (0xFE45, "V"), + (0xFE47, "3", "["), + (0xFE48, "3", "]"), + (0xFE49, "3", " ̅"), + (0xFE4D, "3", "_"), + (0xFE50, "3", ","), + (0xFE51, "M", "、"), + (0xFE52, "X"), + (0xFE54, "3", ";"), + (0xFE55, "3", ":"), + (0xFE56, "3", "?"), + (0xFE57, "3", "!"), + (0xFE58, "M", "—"), + (0xFE59, "3", "("), + (0xFE5A, "3", ")"), + (0xFE5B, "3", "{"), + (0xFE5C, "3", "}"), + (0xFE5D, "M", "〔"), + (0xFE5E, "M", "〕"), + (0xFE5F, "3", "#"), + (0xFE60, "3", "&"), + (0xFE61, "3", "*"), + ] + + +def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFE62, "3", "+"), + (0xFE63, "M", "-"), + (0xFE64, "3", "<"), + (0xFE65, "3", ">"), + (0xFE66, "3", "="), + (0xFE67, "X"), + (0xFE68, "3", "\\"), + (0xFE69, "3", "$"), + (0xFE6A, "3", "%"), + (0xFE6B, "3", "@"), + (0xFE6C, "X"), + (0xFE70, "3", " ً"), + (0xFE71, "M", "ـً"), + (0xFE72, "3", " ٌ"), + (0xFE73, "V"), + (0xFE74, "3", " ٍ"), + (0xFE75, "X"), + (0xFE76, "3", " َ"), + (0xFE77, "M", "ـَ"), + (0xFE78, "3", " ُ"), + (0xFE79, "M", "ـُ"), + (0xFE7A, "3", " ِ"), + (0xFE7B, "M", "ـِ"), + (0xFE7C, "3", " ّ"), + (0xFE7D, "M", "ـّ"), + (0xFE7E, "3", " ْ"), + (0xFE7F, "M", "ـْ"), + (0xFE80, "M", "ء"), + (0xFE81, "M", "آ"), + (0xFE83, "M", "أ"), + (0xFE85, "M", "ؤ"), + (0xFE87, "M", "إ"), + (0xFE89, "M", "ئ"), + (0xFE8D, "M", "ا"), + (0xFE8F, "M", "ب"), + (0xFE93, "M", "ة"), + (0xFE95, "M", "ت"), + (0xFE99, "M", "ث"), + (0xFE9D, "M", "ج"), + (0xFEA1, "M", "ح"), + (0xFEA5, "M", "خ"), + (0xFEA9, "M", "د"), + (0xFEAB, "M", "ذ"), + (0xFEAD, "M", "ر"), + (0xFEAF, "M", "ز"), + (0xFEB1, "M", "س"), + (0xFEB5, "M", "ش"), + (0xFEB9, "M", "ص"), + (0xFEBD, "M", "ض"), + (0xFEC1, "M", "ط"), + (0xFEC5, "M", "ظ"), + (0xFEC9, "M", "ع"), + (0xFECD, "M", "غ"), + (0xFED1, "M", "ف"), + (0xFED5, "M", "ق"), + (0xFED9, "M", "ك"), + (0xFEDD, "M", "ل"), + (0xFEE1, "M", "م"), + (0xFEE5, "M", "ن"), + (0xFEE9, "M", "ه"), + (0xFEED, "M", "و"), + (0xFEEF, "M", "ى"), + (0xFEF1, "M", "ي"), + (0xFEF5, "M", "لآ"), + (0xFEF7, "M", "لأ"), + (0xFEF9, "M", "لإ"), + (0xFEFB, "M", "لا"), + (0xFEFD, "X"), + (0xFEFF, "I"), + (0xFF00, "X"), + (0xFF01, "3", "!"), + (0xFF02, "3", '"'), + (0xFF03, "3", "#"), + (0xFF04, "3", "$"), + (0xFF05, "3", "%"), + (0xFF06, "3", "&"), + (0xFF07, "3", "'"), + (0xFF08, "3", "("), + (0xFF09, "3", ")"), + (0xFF0A, "3", "*"), + (0xFF0B, "3", "+"), + (0xFF0C, "3", ","), + (0xFF0D, "M", "-"), + (0xFF0E, "M", "."), + (0xFF0F, "3", "/"), + (0xFF10, "M", "0"), + (0xFF11, "M", "1"), + (0xFF12, "M", "2"), + (0xFF13, "M", "3"), + (0xFF14, "M", "4"), + (0xFF15, "M", "5"), + (0xFF16, "M", "6"), + (0xFF17, "M", "7"), + (0xFF18, "M", "8"), + (0xFF19, "M", "9"), + (0xFF1A, "3", ":"), + (0xFF1B, "3", ";"), + (0xFF1C, "3", "<"), + (0xFF1D, "3", "="), + (0xFF1E, "3", ">"), + ] + + +def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFF1F, "3", "?"), + (0xFF20, "3", "@"), + (0xFF21, "M", "a"), + (0xFF22, "M", "b"), + (0xFF23, "M", "c"), + (0xFF24, "M", "d"), + (0xFF25, "M", "e"), + (0xFF26, "M", "f"), + (0xFF27, "M", "g"), + (0xFF28, "M", "h"), + (0xFF29, "M", "i"), + (0xFF2A, "M", "j"), + (0xFF2B, "M", "k"), + (0xFF2C, "M", "l"), + (0xFF2D, "M", "m"), + (0xFF2E, "M", "n"), + (0xFF2F, "M", "o"), + (0xFF30, "M", "p"), + (0xFF31, "M", "q"), + (0xFF32, "M", "r"), + (0xFF33, "M", "s"), + (0xFF34, "M", "t"), + (0xFF35, "M", "u"), + (0xFF36, "M", "v"), + (0xFF37, "M", "w"), + (0xFF38, "M", "x"), + (0xFF39, "M", "y"), + (0xFF3A, "M", "z"), + (0xFF3B, "3", "["), + (0xFF3C, "3", "\\"), + (0xFF3D, "3", "]"), + (0xFF3E, "3", "^"), + (0xFF3F, "3", "_"), + (0xFF40, "3", "`"), + (0xFF41, "M", "a"), + (0xFF42, "M", "b"), + (0xFF43, "M", "c"), + (0xFF44, "M", "d"), + (0xFF45, "M", "e"), + (0xFF46, "M", "f"), + (0xFF47, "M", "g"), + (0xFF48, "M", "h"), + (0xFF49, "M", "i"), + (0xFF4A, "M", "j"), + (0xFF4B, "M", "k"), + (0xFF4C, "M", "l"), + (0xFF4D, "M", "m"), + (0xFF4E, "M", "n"), + (0xFF4F, "M", "o"), + (0xFF50, "M", "p"), + (0xFF51, "M", "q"), + (0xFF52, "M", "r"), + (0xFF53, "M", "s"), + (0xFF54, "M", "t"), + (0xFF55, "M", "u"), + (0xFF56, "M", "v"), + (0xFF57, "M", "w"), + (0xFF58, "M", "x"), + (0xFF59, "M", "y"), + (0xFF5A, "M", "z"), + (0xFF5B, "3", "{"), + (0xFF5C, "3", "|"), + (0xFF5D, "3", "}"), + (0xFF5E, "3", "~"), + (0xFF5F, "M", "⦅"), + (0xFF60, "M", "⦆"), + (0xFF61, "M", "."), + (0xFF62, "M", "「"), + (0xFF63, "M", "」"), + (0xFF64, "M", "、"), + (0xFF65, "M", "・"), + (0xFF66, "M", "ヲ"), + (0xFF67, "M", "ァ"), + (0xFF68, "M", "ィ"), + (0xFF69, "M", "ゥ"), + (0xFF6A, "M", "ェ"), + (0xFF6B, "M", "ォ"), + (0xFF6C, "M", "ャ"), + (0xFF6D, "M", "ュ"), + (0xFF6E, "M", "ョ"), + (0xFF6F, "M", "ッ"), + (0xFF70, "M", "ー"), + (0xFF71, "M", "ア"), + (0xFF72, "M", "イ"), + (0xFF73, "M", "ウ"), + (0xFF74, "M", "エ"), + (0xFF75, "M", "オ"), + (0xFF76, "M", "カ"), + (0xFF77, "M", "キ"), + (0xFF78, "M", "ク"), + (0xFF79, "M", "ケ"), + (0xFF7A, "M", "コ"), + (0xFF7B, "M", "サ"), + (0xFF7C, "M", "シ"), + (0xFF7D, "M", "ス"), + (0xFF7E, "M", "セ"), + (0xFF7F, "M", "ソ"), + (0xFF80, "M", "タ"), + (0xFF81, "M", "チ"), + (0xFF82, "M", "ツ"), + ] + + +def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFF83, "M", "テ"), + (0xFF84, "M", "ト"), + (0xFF85, "M", "ナ"), + (0xFF86, "M", "ニ"), + (0xFF87, "M", "ヌ"), + (0xFF88, "M", "ネ"), + (0xFF89, "M", "ノ"), + (0xFF8A, "M", "ハ"), + (0xFF8B, "M", "ヒ"), + (0xFF8C, "M", "フ"), + (0xFF8D, "M", "ヘ"), + (0xFF8E, "M", "ホ"), + (0xFF8F, "M", "マ"), + (0xFF90, "M", "ミ"), + (0xFF91, "M", "ム"), + (0xFF92, "M", "メ"), + (0xFF93, "M", "モ"), + (0xFF94, "M", "ヤ"), + (0xFF95, "M", "ユ"), + (0xFF96, "M", "ヨ"), + (0xFF97, "M", "ラ"), + (0xFF98, "M", "リ"), + (0xFF99, "M", "ル"), + (0xFF9A, "M", "レ"), + (0xFF9B, "M", "ロ"), + (0xFF9C, "M", "ワ"), + (0xFF9D, "M", "ン"), + (0xFF9E, "M", "゙"), + (0xFF9F, "M", "゚"), + (0xFFA0, "X"), + (0xFFA1, "M", "ᄀ"), + (0xFFA2, "M", "ᄁ"), + (0xFFA3, "M", "ᆪ"), + (0xFFA4, "M", "ᄂ"), + (0xFFA5, "M", "ᆬ"), + (0xFFA6, "M", "ᆭ"), + (0xFFA7, "M", "ᄃ"), + (0xFFA8, "M", "ᄄ"), + (0xFFA9, "M", "ᄅ"), + (0xFFAA, "M", "ᆰ"), + (0xFFAB, "M", "ᆱ"), + (0xFFAC, "M", "ᆲ"), + (0xFFAD, "M", "ᆳ"), + (0xFFAE, "M", "ᆴ"), + (0xFFAF, "M", "ᆵ"), + (0xFFB0, "M", "ᄚ"), + (0xFFB1, "M", "ᄆ"), + (0xFFB2, "M", "ᄇ"), + (0xFFB3, "M", "ᄈ"), + (0xFFB4, "M", "ᄡ"), + (0xFFB5, "M", "ᄉ"), + (0xFFB6, "M", "ᄊ"), + (0xFFB7, "M", "ᄋ"), + (0xFFB8, "M", "ᄌ"), + (0xFFB9, "M", "ᄍ"), + (0xFFBA, "M", "ᄎ"), + (0xFFBB, "M", "ᄏ"), + (0xFFBC, "M", "ᄐ"), + (0xFFBD, "M", "ᄑ"), + (0xFFBE, "M", "ᄒ"), + (0xFFBF, "X"), + (0xFFC2, "M", "ᅡ"), + (0xFFC3, "M", "ᅢ"), + (0xFFC4, "M", "ᅣ"), + (0xFFC5, "M", "ᅤ"), + (0xFFC6, "M", "ᅥ"), + (0xFFC7, "M", "ᅦ"), + (0xFFC8, "X"), + (0xFFCA, "M", "ᅧ"), + (0xFFCB, "M", "ᅨ"), + (0xFFCC, "M", "ᅩ"), + (0xFFCD, "M", "ᅪ"), + (0xFFCE, "M", "ᅫ"), + (0xFFCF, "M", "ᅬ"), + (0xFFD0, "X"), + (0xFFD2, "M", "ᅭ"), + (0xFFD3, "M", "ᅮ"), + (0xFFD4, "M", "ᅯ"), + (0xFFD5, "M", "ᅰ"), + (0xFFD6, "M", "ᅱ"), + (0xFFD7, "M", "ᅲ"), + (0xFFD8, "X"), + (0xFFDA, "M", "ᅳ"), + (0xFFDB, "M", "ᅴ"), + (0xFFDC, "M", "ᅵ"), + (0xFFDD, "X"), + (0xFFE0, "M", "¢"), + (0xFFE1, "M", "£"), + (0xFFE2, "M", "¬"), + (0xFFE3, "3", " ̄"), + (0xFFE4, "M", "¦"), + (0xFFE5, "M", "¥"), + (0xFFE6, "M", "₩"), + (0xFFE7, "X"), + (0xFFE8, "M", "│"), + (0xFFE9, "M", "←"), + (0xFFEA, "M", "↑"), + (0xFFEB, "M", "→"), + (0xFFEC, "M", "↓"), + (0xFFED, "M", "■"), + ] + + +def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFFEE, "M", "○"), + (0xFFEF, "X"), + (0x10000, "V"), + (0x1000C, "X"), + (0x1000D, "V"), + (0x10027, "X"), + (0x10028, "V"), + (0x1003B, "X"), + (0x1003C, "V"), + (0x1003E, "X"), + (0x1003F, "V"), + (0x1004E, "X"), + (0x10050, "V"), + (0x1005E, "X"), + (0x10080, "V"), + (0x100FB, "X"), + (0x10100, "V"), + (0x10103, "X"), + (0x10107, "V"), + (0x10134, "X"), + (0x10137, "V"), + (0x1018F, "X"), + (0x10190, "V"), + (0x1019D, "X"), + (0x101A0, "V"), + (0x101A1, "X"), + (0x101D0, "V"), + (0x101FE, "X"), + (0x10280, "V"), + (0x1029D, "X"), + (0x102A0, "V"), + (0x102D1, "X"), + (0x102E0, "V"), + (0x102FC, "X"), + (0x10300, "V"), + (0x10324, "X"), + (0x1032D, "V"), + (0x1034B, "X"), + (0x10350, "V"), + (0x1037B, "X"), + (0x10380, "V"), + (0x1039E, "X"), + (0x1039F, "V"), + (0x103C4, "X"), + (0x103C8, "V"), + (0x103D6, "X"), + (0x10400, "M", "𐐨"), + (0x10401, "M", "𐐩"), + (0x10402, "M", "𐐪"), + (0x10403, "M", "𐐫"), + (0x10404, "M", "𐐬"), + (0x10405, "M", "𐐭"), + (0x10406, "M", "𐐮"), + (0x10407, "M", "𐐯"), + (0x10408, "M", "𐐰"), + (0x10409, "M", "𐐱"), + (0x1040A, "M", "𐐲"), + (0x1040B, "M", "𐐳"), + (0x1040C, "M", "𐐴"), + (0x1040D, "M", "𐐵"), + (0x1040E, "M", "𐐶"), + (0x1040F, "M", "𐐷"), + (0x10410, "M", "𐐸"), + (0x10411, "M", "𐐹"), + (0x10412, "M", "𐐺"), + (0x10413, "M", "𐐻"), + (0x10414, "M", "𐐼"), + (0x10415, "M", "𐐽"), + (0x10416, "M", "𐐾"), + (0x10417, "M", "𐐿"), + (0x10418, "M", "𐑀"), + (0x10419, "M", "𐑁"), + (0x1041A, "M", "𐑂"), + (0x1041B, "M", "𐑃"), + (0x1041C, "M", "𐑄"), + (0x1041D, "M", "𐑅"), + (0x1041E, "M", "𐑆"), + (0x1041F, "M", "𐑇"), + (0x10420, "M", "𐑈"), + (0x10421, "M", "𐑉"), + (0x10422, "M", "𐑊"), + (0x10423, "M", "𐑋"), + (0x10424, "M", "𐑌"), + (0x10425, "M", "𐑍"), + (0x10426, "M", "𐑎"), + (0x10427, "M", "𐑏"), + (0x10428, "V"), + (0x1049E, "X"), + (0x104A0, "V"), + (0x104AA, "X"), + (0x104B0, "M", "𐓘"), + (0x104B1, "M", "𐓙"), + (0x104B2, "M", "𐓚"), + (0x104B3, "M", "𐓛"), + (0x104B4, "M", "𐓜"), + (0x104B5, "M", "𐓝"), + (0x104B6, "M", "𐓞"), + (0x104B7, "M", "𐓟"), + (0x104B8, "M", "𐓠"), + (0x104B9, "M", "𐓡"), + ] + + +def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x104BA, "M", "𐓢"), + (0x104BB, "M", "𐓣"), + (0x104BC, "M", "𐓤"), + (0x104BD, "M", "𐓥"), + (0x104BE, "M", "𐓦"), + (0x104BF, "M", "𐓧"), + (0x104C0, "M", "𐓨"), + (0x104C1, "M", "𐓩"), + (0x104C2, "M", "𐓪"), + (0x104C3, "M", "𐓫"), + (0x104C4, "M", "𐓬"), + (0x104C5, "M", "𐓭"), + (0x104C6, "M", "𐓮"), + (0x104C7, "M", "𐓯"), + (0x104C8, "M", "𐓰"), + (0x104C9, "M", "𐓱"), + (0x104CA, "M", "𐓲"), + (0x104CB, "M", "𐓳"), + (0x104CC, "M", "𐓴"), + (0x104CD, "M", "𐓵"), + (0x104CE, "M", "𐓶"), + (0x104CF, "M", "𐓷"), + (0x104D0, "M", "𐓸"), + (0x104D1, "M", "𐓹"), + (0x104D2, "M", "𐓺"), + (0x104D3, "M", "𐓻"), + (0x104D4, "X"), + (0x104D8, "V"), + (0x104FC, "X"), + (0x10500, "V"), + (0x10528, "X"), + (0x10530, "V"), + (0x10564, "X"), + (0x1056F, "V"), + (0x10570, "M", "𐖗"), + (0x10571, "M", "𐖘"), + (0x10572, "M", "𐖙"), + (0x10573, "M", "𐖚"), + (0x10574, "M", "𐖛"), + (0x10575, "M", "𐖜"), + (0x10576, "M", "𐖝"), + (0x10577, "M", "𐖞"), + (0x10578, "M", "𐖟"), + (0x10579, "M", "𐖠"), + (0x1057A, "M", "𐖡"), + (0x1057B, "X"), + (0x1057C, "M", "𐖣"), + (0x1057D, "M", "𐖤"), + (0x1057E, "M", "𐖥"), + (0x1057F, "M", "𐖦"), + (0x10580, "M", "𐖧"), + (0x10581, "M", "𐖨"), + (0x10582, "M", "𐖩"), + (0x10583, "M", "𐖪"), + (0x10584, "M", "𐖫"), + (0x10585, "M", "𐖬"), + (0x10586, "M", "𐖭"), + (0x10587, "M", "𐖮"), + (0x10588, "M", "𐖯"), + (0x10589, "M", "𐖰"), + (0x1058A, "M", "𐖱"), + (0x1058B, "X"), + (0x1058C, "M", "𐖳"), + (0x1058D, "M", "𐖴"), + (0x1058E, "M", "𐖵"), + (0x1058F, "M", "𐖶"), + (0x10590, "M", "𐖷"), + (0x10591, "M", "𐖸"), + (0x10592, "M", "𐖹"), + (0x10593, "X"), + (0x10594, "M", "𐖻"), + (0x10595, "M", "𐖼"), + (0x10596, "X"), + (0x10597, "V"), + (0x105A2, "X"), + (0x105A3, "V"), + (0x105B2, "X"), + (0x105B3, "V"), + (0x105BA, "X"), + (0x105BB, "V"), + (0x105BD, "X"), + (0x10600, "V"), + (0x10737, "X"), + (0x10740, "V"), + (0x10756, "X"), + (0x10760, "V"), + (0x10768, "X"), + (0x10780, "V"), + (0x10781, "M", "ː"), + (0x10782, "M", "ˑ"), + (0x10783, "M", "æ"), + (0x10784, "M", "ʙ"), + (0x10785, "M", "ɓ"), + (0x10786, "X"), + (0x10787, "M", "ʣ"), + (0x10788, "M", "ꭦ"), + (0x10789, "M", "ʥ"), + (0x1078A, "M", "ʤ"), + (0x1078B, "M", "ɖ"), + (0x1078C, "M", "ɗ"), + ] + + +def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1078D, "M", "ᶑ"), + (0x1078E, "M", "ɘ"), + (0x1078F, "M", "ɞ"), + (0x10790, "M", "ʩ"), + (0x10791, "M", "ɤ"), + (0x10792, "M", "ɢ"), + (0x10793, "M", "ɠ"), + (0x10794, "M", "ʛ"), + (0x10795, "M", "ħ"), + (0x10796, "M", "ʜ"), + (0x10797, "M", "ɧ"), + (0x10798, "M", "ʄ"), + (0x10799, "M", "ʪ"), + (0x1079A, "M", "ʫ"), + (0x1079B, "M", "ɬ"), + (0x1079C, "M", "𝼄"), + (0x1079D, "M", "ꞎ"), + (0x1079E, "M", "ɮ"), + (0x1079F, "M", "𝼅"), + (0x107A0, "M", "ʎ"), + (0x107A1, "M", "𝼆"), + (0x107A2, "M", "ø"), + (0x107A3, "M", "ɶ"), + (0x107A4, "M", "ɷ"), + (0x107A5, "M", "q"), + (0x107A6, "M", "ɺ"), + (0x107A7, "M", "𝼈"), + (0x107A8, "M", "ɽ"), + (0x107A9, "M", "ɾ"), + (0x107AA, "M", "ʀ"), + (0x107AB, "M", "ʨ"), + (0x107AC, "M", "ʦ"), + (0x107AD, "M", "ꭧ"), + (0x107AE, "M", "ʧ"), + (0x107AF, "M", "ʈ"), + (0x107B0, "M", "ⱱ"), + (0x107B1, "X"), + (0x107B2, "M", "ʏ"), + (0x107B3, "M", "ʡ"), + (0x107B4, "M", "ʢ"), + (0x107B5, "M", "ʘ"), + (0x107B6, "M", "ǀ"), + (0x107B7, "M", "ǁ"), + (0x107B8, "M", "ǂ"), + (0x107B9, "M", "𝼊"), + (0x107BA, "M", "𝼞"), + (0x107BB, "X"), + (0x10800, "V"), + (0x10806, "X"), + (0x10808, "V"), + (0x10809, "X"), + (0x1080A, "V"), + (0x10836, "X"), + (0x10837, "V"), + (0x10839, "X"), + (0x1083C, "V"), + (0x1083D, "X"), + (0x1083F, "V"), + (0x10856, "X"), + (0x10857, "V"), + (0x1089F, "X"), + (0x108A7, "V"), + (0x108B0, "X"), + (0x108E0, "V"), + (0x108F3, "X"), + (0x108F4, "V"), + (0x108F6, "X"), + (0x108FB, "V"), + (0x1091C, "X"), + (0x1091F, "V"), + (0x1093A, "X"), + (0x1093F, "V"), + (0x10940, "X"), + (0x10980, "V"), + (0x109B8, "X"), + (0x109BC, "V"), + (0x109D0, "X"), + (0x109D2, "V"), + (0x10A04, "X"), + (0x10A05, "V"), + (0x10A07, "X"), + (0x10A0C, "V"), + (0x10A14, "X"), + (0x10A15, "V"), + (0x10A18, "X"), + (0x10A19, "V"), + (0x10A36, "X"), + (0x10A38, "V"), + (0x10A3B, "X"), + (0x10A3F, "V"), + (0x10A49, "X"), + (0x10A50, "V"), + (0x10A59, "X"), + (0x10A60, "V"), + (0x10AA0, "X"), + (0x10AC0, "V"), + (0x10AE7, "X"), + (0x10AEB, "V"), + (0x10AF7, "X"), + (0x10B00, "V"), + ] + + +def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x10B36, "X"), + (0x10B39, "V"), + (0x10B56, "X"), + (0x10B58, "V"), + (0x10B73, "X"), + (0x10B78, "V"), + (0x10B92, "X"), + (0x10B99, "V"), + (0x10B9D, "X"), + (0x10BA9, "V"), + (0x10BB0, "X"), + (0x10C00, "V"), + (0x10C49, "X"), + (0x10C80, "M", "𐳀"), + (0x10C81, "M", "𐳁"), + (0x10C82, "M", "𐳂"), + (0x10C83, "M", "𐳃"), + (0x10C84, "M", "𐳄"), + (0x10C85, "M", "𐳅"), + (0x10C86, "M", "𐳆"), + (0x10C87, "M", "𐳇"), + (0x10C88, "M", "𐳈"), + (0x10C89, "M", "𐳉"), + (0x10C8A, "M", "𐳊"), + (0x10C8B, "M", "𐳋"), + (0x10C8C, "M", "𐳌"), + (0x10C8D, "M", "𐳍"), + (0x10C8E, "M", "𐳎"), + (0x10C8F, "M", "𐳏"), + (0x10C90, "M", "𐳐"), + (0x10C91, "M", "𐳑"), + (0x10C92, "M", "𐳒"), + (0x10C93, "M", "𐳓"), + (0x10C94, "M", "𐳔"), + (0x10C95, "M", "𐳕"), + (0x10C96, "M", "𐳖"), + (0x10C97, "M", "𐳗"), + (0x10C98, "M", "𐳘"), + (0x10C99, "M", "𐳙"), + (0x10C9A, "M", "𐳚"), + (0x10C9B, "M", "𐳛"), + (0x10C9C, "M", "𐳜"), + (0x10C9D, "M", "𐳝"), + (0x10C9E, "M", "𐳞"), + (0x10C9F, "M", "𐳟"), + (0x10CA0, "M", "𐳠"), + (0x10CA1, "M", "𐳡"), + (0x10CA2, "M", "𐳢"), + (0x10CA3, "M", "𐳣"), + (0x10CA4, "M", "𐳤"), + (0x10CA5, "M", "𐳥"), + (0x10CA6, "M", "𐳦"), + (0x10CA7, "M", "𐳧"), + (0x10CA8, "M", "𐳨"), + (0x10CA9, "M", "𐳩"), + (0x10CAA, "M", "𐳪"), + (0x10CAB, "M", "𐳫"), + (0x10CAC, "M", "𐳬"), + (0x10CAD, "M", "𐳭"), + (0x10CAE, "M", "𐳮"), + (0x10CAF, "M", "𐳯"), + (0x10CB0, "M", "𐳰"), + (0x10CB1, "M", "𐳱"), + (0x10CB2, "M", "𐳲"), + (0x10CB3, "X"), + (0x10CC0, "V"), + (0x10CF3, "X"), + (0x10CFA, "V"), + (0x10D28, "X"), + (0x10D30, "V"), + (0x10D3A, "X"), + (0x10E60, "V"), + (0x10E7F, "X"), + (0x10E80, "V"), + (0x10EAA, "X"), + (0x10EAB, "V"), + (0x10EAE, "X"), + (0x10EB0, "V"), + (0x10EB2, "X"), + (0x10EFD, "V"), + (0x10F28, "X"), + (0x10F30, "V"), + (0x10F5A, "X"), + (0x10F70, "V"), + (0x10F8A, "X"), + (0x10FB0, "V"), + (0x10FCC, "X"), + (0x10FE0, "V"), + (0x10FF7, "X"), + (0x11000, "V"), + (0x1104E, "X"), + (0x11052, "V"), + (0x11076, "X"), + (0x1107F, "V"), + (0x110BD, "X"), + (0x110BE, "V"), + (0x110C3, "X"), + (0x110D0, "V"), + (0x110E9, "X"), + (0x110F0, "V"), + ] + + +def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x110FA, "X"), + (0x11100, "V"), + (0x11135, "X"), + (0x11136, "V"), + (0x11148, "X"), + (0x11150, "V"), + (0x11177, "X"), + (0x11180, "V"), + (0x111E0, "X"), + (0x111E1, "V"), + (0x111F5, "X"), + (0x11200, "V"), + (0x11212, "X"), + (0x11213, "V"), + (0x11242, "X"), + (0x11280, "V"), + (0x11287, "X"), + (0x11288, "V"), + (0x11289, "X"), + (0x1128A, "V"), + (0x1128E, "X"), + (0x1128F, "V"), + (0x1129E, "X"), + (0x1129F, "V"), + (0x112AA, "X"), + (0x112B0, "V"), + (0x112EB, "X"), + (0x112F0, "V"), + (0x112FA, "X"), + (0x11300, "V"), + (0x11304, "X"), + (0x11305, "V"), + (0x1130D, "X"), + (0x1130F, "V"), + (0x11311, "X"), + (0x11313, "V"), + (0x11329, "X"), + (0x1132A, "V"), + (0x11331, "X"), + (0x11332, "V"), + (0x11334, "X"), + (0x11335, "V"), + (0x1133A, "X"), + (0x1133B, "V"), + (0x11345, "X"), + (0x11347, "V"), + (0x11349, "X"), + (0x1134B, "V"), + (0x1134E, "X"), + (0x11350, "V"), + (0x11351, "X"), + (0x11357, "V"), + (0x11358, "X"), + (0x1135D, "V"), + (0x11364, "X"), + (0x11366, "V"), + (0x1136D, "X"), + (0x11370, "V"), + (0x11375, "X"), + (0x11400, "V"), + (0x1145C, "X"), + (0x1145D, "V"), + (0x11462, "X"), + (0x11480, "V"), + (0x114C8, "X"), + (0x114D0, "V"), + (0x114DA, "X"), + (0x11580, "V"), + (0x115B6, "X"), + (0x115B8, "V"), + (0x115DE, "X"), + (0x11600, "V"), + (0x11645, "X"), + (0x11650, "V"), + (0x1165A, "X"), + (0x11660, "V"), + (0x1166D, "X"), + (0x11680, "V"), + (0x116BA, "X"), + (0x116C0, "V"), + (0x116CA, "X"), + (0x11700, "V"), + (0x1171B, "X"), + (0x1171D, "V"), + (0x1172C, "X"), + (0x11730, "V"), + (0x11747, "X"), + (0x11800, "V"), + (0x1183C, "X"), + (0x118A0, "M", "𑣀"), + (0x118A1, "M", "𑣁"), + (0x118A2, "M", "𑣂"), + (0x118A3, "M", "𑣃"), + (0x118A4, "M", "𑣄"), + (0x118A5, "M", "𑣅"), + (0x118A6, "M", "𑣆"), + (0x118A7, "M", "𑣇"), + (0x118A8, "M", "𑣈"), + (0x118A9, "M", "𑣉"), + (0x118AA, "M", "𑣊"), + ] + + +def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x118AB, "M", "𑣋"), + (0x118AC, "M", "𑣌"), + (0x118AD, "M", "𑣍"), + (0x118AE, "M", "𑣎"), + (0x118AF, "M", "𑣏"), + (0x118B0, "M", "𑣐"), + (0x118B1, "M", "𑣑"), + (0x118B2, "M", "𑣒"), + (0x118B3, "M", "𑣓"), + (0x118B4, "M", "𑣔"), + (0x118B5, "M", "𑣕"), + (0x118B6, "M", "𑣖"), + (0x118B7, "M", "𑣗"), + (0x118B8, "M", "𑣘"), + (0x118B9, "M", "𑣙"), + (0x118BA, "M", "𑣚"), + (0x118BB, "M", "𑣛"), + (0x118BC, "M", "𑣜"), + (0x118BD, "M", "𑣝"), + (0x118BE, "M", "𑣞"), + (0x118BF, "M", "𑣟"), + (0x118C0, "V"), + (0x118F3, "X"), + (0x118FF, "V"), + (0x11907, "X"), + (0x11909, "V"), + (0x1190A, "X"), + (0x1190C, "V"), + (0x11914, "X"), + (0x11915, "V"), + (0x11917, "X"), + (0x11918, "V"), + (0x11936, "X"), + (0x11937, "V"), + (0x11939, "X"), + (0x1193B, "V"), + (0x11947, "X"), + (0x11950, "V"), + (0x1195A, "X"), + (0x119A0, "V"), + (0x119A8, "X"), + (0x119AA, "V"), + (0x119D8, "X"), + (0x119DA, "V"), + (0x119E5, "X"), + (0x11A00, "V"), + (0x11A48, "X"), + (0x11A50, "V"), + (0x11AA3, "X"), + (0x11AB0, "V"), + (0x11AF9, "X"), + (0x11B00, "V"), + (0x11B0A, "X"), + (0x11C00, "V"), + (0x11C09, "X"), + (0x11C0A, "V"), + (0x11C37, "X"), + (0x11C38, "V"), + (0x11C46, "X"), + (0x11C50, "V"), + (0x11C6D, "X"), + (0x11C70, "V"), + (0x11C90, "X"), + (0x11C92, "V"), + (0x11CA8, "X"), + (0x11CA9, "V"), + (0x11CB7, "X"), + (0x11D00, "V"), + (0x11D07, "X"), + (0x11D08, "V"), + (0x11D0A, "X"), + (0x11D0B, "V"), + (0x11D37, "X"), + (0x11D3A, "V"), + (0x11D3B, "X"), + (0x11D3C, "V"), + (0x11D3E, "X"), + (0x11D3F, "V"), + (0x11D48, "X"), + (0x11D50, "V"), + (0x11D5A, "X"), + (0x11D60, "V"), + (0x11D66, "X"), + (0x11D67, "V"), + (0x11D69, "X"), + (0x11D6A, "V"), + (0x11D8F, "X"), + (0x11D90, "V"), + (0x11D92, "X"), + (0x11D93, "V"), + (0x11D99, "X"), + (0x11DA0, "V"), + (0x11DAA, "X"), + (0x11EE0, "V"), + (0x11EF9, "X"), + (0x11F00, "V"), + (0x11F11, "X"), + (0x11F12, "V"), + (0x11F3B, "X"), + (0x11F3E, "V"), + ] + + +def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x11F5A, "X"), + (0x11FB0, "V"), + (0x11FB1, "X"), + (0x11FC0, "V"), + (0x11FF2, "X"), + (0x11FFF, "V"), + (0x1239A, "X"), + (0x12400, "V"), + (0x1246F, "X"), + (0x12470, "V"), + (0x12475, "X"), + (0x12480, "V"), + (0x12544, "X"), + (0x12F90, "V"), + (0x12FF3, "X"), + (0x13000, "V"), + (0x13430, "X"), + (0x13440, "V"), + (0x13456, "X"), + (0x14400, "V"), + (0x14647, "X"), + (0x16800, "V"), + (0x16A39, "X"), + (0x16A40, "V"), + (0x16A5F, "X"), + (0x16A60, "V"), + (0x16A6A, "X"), + (0x16A6E, "V"), + (0x16ABF, "X"), + (0x16AC0, "V"), + (0x16ACA, "X"), + (0x16AD0, "V"), + (0x16AEE, "X"), + (0x16AF0, "V"), + (0x16AF6, "X"), + (0x16B00, "V"), + (0x16B46, "X"), + (0x16B50, "V"), + (0x16B5A, "X"), + (0x16B5B, "V"), + (0x16B62, "X"), + (0x16B63, "V"), + (0x16B78, "X"), + (0x16B7D, "V"), + (0x16B90, "X"), + (0x16E40, "M", "𖹠"), + (0x16E41, "M", "𖹡"), + (0x16E42, "M", "𖹢"), + (0x16E43, "M", "𖹣"), + (0x16E44, "M", "𖹤"), + (0x16E45, "M", "𖹥"), + (0x16E46, "M", "𖹦"), + (0x16E47, "M", "𖹧"), + (0x16E48, "M", "𖹨"), + (0x16E49, "M", "𖹩"), + (0x16E4A, "M", "𖹪"), + (0x16E4B, "M", "𖹫"), + (0x16E4C, "M", "𖹬"), + (0x16E4D, "M", "𖹭"), + (0x16E4E, "M", "𖹮"), + (0x16E4F, "M", "𖹯"), + (0x16E50, "M", "𖹰"), + (0x16E51, "M", "𖹱"), + (0x16E52, "M", "𖹲"), + (0x16E53, "M", "𖹳"), + (0x16E54, "M", "𖹴"), + (0x16E55, "M", "𖹵"), + (0x16E56, "M", "𖹶"), + (0x16E57, "M", "𖹷"), + (0x16E58, "M", "𖹸"), + (0x16E59, "M", "𖹹"), + (0x16E5A, "M", "𖹺"), + (0x16E5B, "M", "𖹻"), + (0x16E5C, "M", "𖹼"), + (0x16E5D, "M", "𖹽"), + (0x16E5E, "M", "𖹾"), + (0x16E5F, "M", "𖹿"), + (0x16E60, "V"), + (0x16E9B, "X"), + (0x16F00, "V"), + (0x16F4B, "X"), + (0x16F4F, "V"), + (0x16F88, "X"), + (0x16F8F, "V"), + (0x16FA0, "X"), + (0x16FE0, "V"), + (0x16FE5, "X"), + (0x16FF0, "V"), + (0x16FF2, "X"), + (0x17000, "V"), + (0x187F8, "X"), + (0x18800, "V"), + (0x18CD6, "X"), + (0x18D00, "V"), + (0x18D09, "X"), + (0x1AFF0, "V"), + (0x1AFF4, "X"), + (0x1AFF5, "V"), + (0x1AFFC, "X"), + (0x1AFFD, "V"), + ] + + +def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1AFFF, "X"), + (0x1B000, "V"), + (0x1B123, "X"), + (0x1B132, "V"), + (0x1B133, "X"), + (0x1B150, "V"), + (0x1B153, "X"), + (0x1B155, "V"), + (0x1B156, "X"), + (0x1B164, "V"), + (0x1B168, "X"), + (0x1B170, "V"), + (0x1B2FC, "X"), + (0x1BC00, "V"), + (0x1BC6B, "X"), + (0x1BC70, "V"), + (0x1BC7D, "X"), + (0x1BC80, "V"), + (0x1BC89, "X"), + (0x1BC90, "V"), + (0x1BC9A, "X"), + (0x1BC9C, "V"), + (0x1BCA0, "I"), + (0x1BCA4, "X"), + (0x1CF00, "V"), + (0x1CF2E, "X"), + (0x1CF30, "V"), + (0x1CF47, "X"), + (0x1CF50, "V"), + (0x1CFC4, "X"), + (0x1D000, "V"), + (0x1D0F6, "X"), + (0x1D100, "V"), + (0x1D127, "X"), + (0x1D129, "V"), + (0x1D15E, "M", "𝅗𝅥"), + (0x1D15F, "M", "𝅘𝅥"), + (0x1D160, "M", "𝅘𝅥𝅮"), + (0x1D161, "M", "𝅘𝅥𝅯"), + (0x1D162, "M", "𝅘𝅥𝅰"), + (0x1D163, "M", "𝅘𝅥𝅱"), + (0x1D164, "M", "𝅘𝅥𝅲"), + (0x1D165, "V"), + (0x1D173, "X"), + (0x1D17B, "V"), + (0x1D1BB, "M", "𝆹𝅥"), + (0x1D1BC, "M", "𝆺𝅥"), + (0x1D1BD, "M", "𝆹𝅥𝅮"), + (0x1D1BE, "M", "𝆺𝅥𝅮"), + (0x1D1BF, "M", "𝆹𝅥𝅯"), + (0x1D1C0, "M", "𝆺𝅥𝅯"), + (0x1D1C1, "V"), + (0x1D1EB, "X"), + (0x1D200, "V"), + (0x1D246, "X"), + (0x1D2C0, "V"), + (0x1D2D4, "X"), + (0x1D2E0, "V"), + (0x1D2F4, "X"), + (0x1D300, "V"), + (0x1D357, "X"), + (0x1D360, "V"), + (0x1D379, "X"), + (0x1D400, "M", "a"), + (0x1D401, "M", "b"), + (0x1D402, "M", "c"), + (0x1D403, "M", "d"), + (0x1D404, "M", "e"), + (0x1D405, "M", "f"), + (0x1D406, "M", "g"), + (0x1D407, "M", "h"), + (0x1D408, "M", "i"), + (0x1D409, "M", "j"), + (0x1D40A, "M", "k"), + (0x1D40B, "M", "l"), + (0x1D40C, "M", "m"), + (0x1D40D, "M", "n"), + (0x1D40E, "M", "o"), + (0x1D40F, "M", "p"), + (0x1D410, "M", "q"), + (0x1D411, "M", "r"), + (0x1D412, "M", "s"), + (0x1D413, "M", "t"), + (0x1D414, "M", "u"), + (0x1D415, "M", "v"), + (0x1D416, "M", "w"), + (0x1D417, "M", "x"), + (0x1D418, "M", "y"), + (0x1D419, "M", "z"), + (0x1D41A, "M", "a"), + (0x1D41B, "M", "b"), + (0x1D41C, "M", "c"), + (0x1D41D, "M", "d"), + (0x1D41E, "M", "e"), + (0x1D41F, "M", "f"), + (0x1D420, "M", "g"), + (0x1D421, "M", "h"), + (0x1D422, "M", "i"), + (0x1D423, "M", "j"), + (0x1D424, "M", "k"), + ] + + +def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D425, "M", "l"), + (0x1D426, "M", "m"), + (0x1D427, "M", "n"), + (0x1D428, "M", "o"), + (0x1D429, "M", "p"), + (0x1D42A, "M", "q"), + (0x1D42B, "M", "r"), + (0x1D42C, "M", "s"), + (0x1D42D, "M", "t"), + (0x1D42E, "M", "u"), + (0x1D42F, "M", "v"), + (0x1D430, "M", "w"), + (0x1D431, "M", "x"), + (0x1D432, "M", "y"), + (0x1D433, "M", "z"), + (0x1D434, "M", "a"), + (0x1D435, "M", "b"), + (0x1D436, "M", "c"), + (0x1D437, "M", "d"), + (0x1D438, "M", "e"), + (0x1D439, "M", "f"), + (0x1D43A, "M", "g"), + (0x1D43B, "M", "h"), + (0x1D43C, "M", "i"), + (0x1D43D, "M", "j"), + (0x1D43E, "M", "k"), + (0x1D43F, "M", "l"), + (0x1D440, "M", "m"), + (0x1D441, "M", "n"), + (0x1D442, "M", "o"), + (0x1D443, "M", "p"), + (0x1D444, "M", "q"), + (0x1D445, "M", "r"), + (0x1D446, "M", "s"), + (0x1D447, "M", "t"), + (0x1D448, "M", "u"), + (0x1D449, "M", "v"), + (0x1D44A, "M", "w"), + (0x1D44B, "M", "x"), + (0x1D44C, "M", "y"), + (0x1D44D, "M", "z"), + (0x1D44E, "M", "a"), + (0x1D44F, "M", "b"), + (0x1D450, "M", "c"), + (0x1D451, "M", "d"), + (0x1D452, "M", "e"), + (0x1D453, "M", "f"), + (0x1D454, "M", "g"), + (0x1D455, "X"), + (0x1D456, "M", "i"), + (0x1D457, "M", "j"), + (0x1D458, "M", "k"), + (0x1D459, "M", "l"), + (0x1D45A, "M", "m"), + (0x1D45B, "M", "n"), + (0x1D45C, "M", "o"), + (0x1D45D, "M", "p"), + (0x1D45E, "M", "q"), + (0x1D45F, "M", "r"), + (0x1D460, "M", "s"), + (0x1D461, "M", "t"), + (0x1D462, "M", "u"), + (0x1D463, "M", "v"), + (0x1D464, "M", "w"), + (0x1D465, "M", "x"), + (0x1D466, "M", "y"), + (0x1D467, "M", "z"), + (0x1D468, "M", "a"), + (0x1D469, "M", "b"), + (0x1D46A, "M", "c"), + (0x1D46B, "M", "d"), + (0x1D46C, "M", "e"), + (0x1D46D, "M", "f"), + (0x1D46E, "M", "g"), + (0x1D46F, "M", "h"), + (0x1D470, "M", "i"), + (0x1D471, "M", "j"), + (0x1D472, "M", "k"), + (0x1D473, "M", "l"), + (0x1D474, "M", "m"), + (0x1D475, "M", "n"), + (0x1D476, "M", "o"), + (0x1D477, "M", "p"), + (0x1D478, "M", "q"), + (0x1D479, "M", "r"), + (0x1D47A, "M", "s"), + (0x1D47B, "M", "t"), + (0x1D47C, "M", "u"), + (0x1D47D, "M", "v"), + (0x1D47E, "M", "w"), + (0x1D47F, "M", "x"), + (0x1D480, "M", "y"), + (0x1D481, "M", "z"), + (0x1D482, "M", "a"), + (0x1D483, "M", "b"), + (0x1D484, "M", "c"), + (0x1D485, "M", "d"), + (0x1D486, "M", "e"), + (0x1D487, "M", "f"), + (0x1D488, "M", "g"), + ] + + +def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D489, "M", "h"), + (0x1D48A, "M", "i"), + (0x1D48B, "M", "j"), + (0x1D48C, "M", "k"), + (0x1D48D, "M", "l"), + (0x1D48E, "M", "m"), + (0x1D48F, "M", "n"), + (0x1D490, "M", "o"), + (0x1D491, "M", "p"), + (0x1D492, "M", "q"), + (0x1D493, "M", "r"), + (0x1D494, "M", "s"), + (0x1D495, "M", "t"), + (0x1D496, "M", "u"), + (0x1D497, "M", "v"), + (0x1D498, "M", "w"), + (0x1D499, "M", "x"), + (0x1D49A, "M", "y"), + (0x1D49B, "M", "z"), + (0x1D49C, "M", "a"), + (0x1D49D, "X"), + (0x1D49E, "M", "c"), + (0x1D49F, "M", "d"), + (0x1D4A0, "X"), + (0x1D4A2, "M", "g"), + (0x1D4A3, "X"), + (0x1D4A5, "M", "j"), + (0x1D4A6, "M", "k"), + (0x1D4A7, "X"), + (0x1D4A9, "M", "n"), + (0x1D4AA, "M", "o"), + (0x1D4AB, "M", "p"), + (0x1D4AC, "M", "q"), + (0x1D4AD, "X"), + (0x1D4AE, "M", "s"), + (0x1D4AF, "M", "t"), + (0x1D4B0, "M", "u"), + (0x1D4B1, "M", "v"), + (0x1D4B2, "M", "w"), + (0x1D4B3, "M", "x"), + (0x1D4B4, "M", "y"), + (0x1D4B5, "M", "z"), + (0x1D4B6, "M", "a"), + (0x1D4B7, "M", "b"), + (0x1D4B8, "M", "c"), + (0x1D4B9, "M", "d"), + (0x1D4BA, "X"), + (0x1D4BB, "M", "f"), + (0x1D4BC, "X"), + (0x1D4BD, "M", "h"), + (0x1D4BE, "M", "i"), + (0x1D4BF, "M", "j"), + (0x1D4C0, "M", "k"), + (0x1D4C1, "M", "l"), + (0x1D4C2, "M", "m"), + (0x1D4C3, "M", "n"), + (0x1D4C4, "X"), + (0x1D4C5, "M", "p"), + (0x1D4C6, "M", "q"), + (0x1D4C7, "M", "r"), + (0x1D4C8, "M", "s"), + (0x1D4C9, "M", "t"), + (0x1D4CA, "M", "u"), + (0x1D4CB, "M", "v"), + (0x1D4CC, "M", "w"), + (0x1D4CD, "M", "x"), + (0x1D4CE, "M", "y"), + (0x1D4CF, "M", "z"), + (0x1D4D0, "M", "a"), + (0x1D4D1, "M", "b"), + (0x1D4D2, "M", "c"), + (0x1D4D3, "M", "d"), + (0x1D4D4, "M", "e"), + (0x1D4D5, "M", "f"), + (0x1D4D6, "M", "g"), + (0x1D4D7, "M", "h"), + (0x1D4D8, "M", "i"), + (0x1D4D9, "M", "j"), + (0x1D4DA, "M", "k"), + (0x1D4DB, "M", "l"), + (0x1D4DC, "M", "m"), + (0x1D4DD, "M", "n"), + (0x1D4DE, "M", "o"), + (0x1D4DF, "M", "p"), + (0x1D4E0, "M", "q"), + (0x1D4E1, "M", "r"), + (0x1D4E2, "M", "s"), + (0x1D4E3, "M", "t"), + (0x1D4E4, "M", "u"), + (0x1D4E5, "M", "v"), + (0x1D4E6, "M", "w"), + (0x1D4E7, "M", "x"), + (0x1D4E8, "M", "y"), + (0x1D4E9, "M", "z"), + (0x1D4EA, "M", "a"), + (0x1D4EB, "M", "b"), + (0x1D4EC, "M", "c"), + (0x1D4ED, "M", "d"), + (0x1D4EE, "M", "e"), + (0x1D4EF, "M", "f"), + ] + + +def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D4F0, "M", "g"), + (0x1D4F1, "M", "h"), + (0x1D4F2, "M", "i"), + (0x1D4F3, "M", "j"), + (0x1D4F4, "M", "k"), + (0x1D4F5, "M", "l"), + (0x1D4F6, "M", "m"), + (0x1D4F7, "M", "n"), + (0x1D4F8, "M", "o"), + (0x1D4F9, "M", "p"), + (0x1D4FA, "M", "q"), + (0x1D4FB, "M", "r"), + (0x1D4FC, "M", "s"), + (0x1D4FD, "M", "t"), + (0x1D4FE, "M", "u"), + (0x1D4FF, "M", "v"), + (0x1D500, "M", "w"), + (0x1D501, "M", "x"), + (0x1D502, "M", "y"), + (0x1D503, "M", "z"), + (0x1D504, "M", "a"), + (0x1D505, "M", "b"), + (0x1D506, "X"), + (0x1D507, "M", "d"), + (0x1D508, "M", "e"), + (0x1D509, "M", "f"), + (0x1D50A, "M", "g"), + (0x1D50B, "X"), + (0x1D50D, "M", "j"), + (0x1D50E, "M", "k"), + (0x1D50F, "M", "l"), + (0x1D510, "M", "m"), + (0x1D511, "M", "n"), + (0x1D512, "M", "o"), + (0x1D513, "M", "p"), + (0x1D514, "M", "q"), + (0x1D515, "X"), + (0x1D516, "M", "s"), + (0x1D517, "M", "t"), + (0x1D518, "M", "u"), + (0x1D519, "M", "v"), + (0x1D51A, "M", "w"), + (0x1D51B, "M", "x"), + (0x1D51C, "M", "y"), + (0x1D51D, "X"), + (0x1D51E, "M", "a"), + (0x1D51F, "M", "b"), + (0x1D520, "M", "c"), + (0x1D521, "M", "d"), + (0x1D522, "M", "e"), + (0x1D523, "M", "f"), + (0x1D524, "M", "g"), + (0x1D525, "M", "h"), + (0x1D526, "M", "i"), + (0x1D527, "M", "j"), + (0x1D528, "M", "k"), + (0x1D529, "M", "l"), + (0x1D52A, "M", "m"), + (0x1D52B, "M", "n"), + (0x1D52C, "M", "o"), + (0x1D52D, "M", "p"), + (0x1D52E, "M", "q"), + (0x1D52F, "M", "r"), + (0x1D530, "M", "s"), + (0x1D531, "M", "t"), + (0x1D532, "M", "u"), + (0x1D533, "M", "v"), + (0x1D534, "M", "w"), + (0x1D535, "M", "x"), + (0x1D536, "M", "y"), + (0x1D537, "M", "z"), + (0x1D538, "M", "a"), + (0x1D539, "M", "b"), + (0x1D53A, "X"), + (0x1D53B, "M", "d"), + (0x1D53C, "M", "e"), + (0x1D53D, "M", "f"), + (0x1D53E, "M", "g"), + (0x1D53F, "X"), + (0x1D540, "M", "i"), + (0x1D541, "M", "j"), + (0x1D542, "M", "k"), + (0x1D543, "M", "l"), + (0x1D544, "M", "m"), + (0x1D545, "X"), + (0x1D546, "M", "o"), + (0x1D547, "X"), + (0x1D54A, "M", "s"), + (0x1D54B, "M", "t"), + (0x1D54C, "M", "u"), + (0x1D54D, "M", "v"), + (0x1D54E, "M", "w"), + (0x1D54F, "M", "x"), + (0x1D550, "M", "y"), + (0x1D551, "X"), + (0x1D552, "M", "a"), + (0x1D553, "M", "b"), + (0x1D554, "M", "c"), + (0x1D555, "M", "d"), + (0x1D556, "M", "e"), + ] + + +def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D557, "M", "f"), + (0x1D558, "M", "g"), + (0x1D559, "M", "h"), + (0x1D55A, "M", "i"), + (0x1D55B, "M", "j"), + (0x1D55C, "M", "k"), + (0x1D55D, "M", "l"), + (0x1D55E, "M", "m"), + (0x1D55F, "M", "n"), + (0x1D560, "M", "o"), + (0x1D561, "M", "p"), + (0x1D562, "M", "q"), + (0x1D563, "M", "r"), + (0x1D564, "M", "s"), + (0x1D565, "M", "t"), + (0x1D566, "M", "u"), + (0x1D567, "M", "v"), + (0x1D568, "M", "w"), + (0x1D569, "M", "x"), + (0x1D56A, "M", "y"), + (0x1D56B, "M", "z"), + (0x1D56C, "M", "a"), + (0x1D56D, "M", "b"), + (0x1D56E, "M", "c"), + (0x1D56F, "M", "d"), + (0x1D570, "M", "e"), + (0x1D571, "M", "f"), + (0x1D572, "M", "g"), + (0x1D573, "M", "h"), + (0x1D574, "M", "i"), + (0x1D575, "M", "j"), + (0x1D576, "M", "k"), + (0x1D577, "M", "l"), + (0x1D578, "M", "m"), + (0x1D579, "M", "n"), + (0x1D57A, "M", "o"), + (0x1D57B, "M", "p"), + (0x1D57C, "M", "q"), + (0x1D57D, "M", "r"), + (0x1D57E, "M", "s"), + (0x1D57F, "M", "t"), + (0x1D580, "M", "u"), + (0x1D581, "M", "v"), + (0x1D582, "M", "w"), + (0x1D583, "M", "x"), + (0x1D584, "M", "y"), + (0x1D585, "M", "z"), + (0x1D586, "M", "a"), + (0x1D587, "M", "b"), + (0x1D588, "M", "c"), + (0x1D589, "M", "d"), + (0x1D58A, "M", "e"), + (0x1D58B, "M", "f"), + (0x1D58C, "M", "g"), + (0x1D58D, "M", "h"), + (0x1D58E, "M", "i"), + (0x1D58F, "M", "j"), + (0x1D590, "M", "k"), + (0x1D591, "M", "l"), + (0x1D592, "M", "m"), + (0x1D593, "M", "n"), + (0x1D594, "M", "o"), + (0x1D595, "M", "p"), + (0x1D596, "M", "q"), + (0x1D597, "M", "r"), + (0x1D598, "M", "s"), + (0x1D599, "M", "t"), + (0x1D59A, "M", "u"), + (0x1D59B, "M", "v"), + (0x1D59C, "M", "w"), + (0x1D59D, "M", "x"), + (0x1D59E, "M", "y"), + (0x1D59F, "M", "z"), + (0x1D5A0, "M", "a"), + (0x1D5A1, "M", "b"), + (0x1D5A2, "M", "c"), + (0x1D5A3, "M", "d"), + (0x1D5A4, "M", "e"), + (0x1D5A5, "M", "f"), + (0x1D5A6, "M", "g"), + (0x1D5A7, "M", "h"), + (0x1D5A8, "M", "i"), + (0x1D5A9, "M", "j"), + (0x1D5AA, "M", "k"), + (0x1D5AB, "M", "l"), + (0x1D5AC, "M", "m"), + (0x1D5AD, "M", "n"), + (0x1D5AE, "M", "o"), + (0x1D5AF, "M", "p"), + (0x1D5B0, "M", "q"), + (0x1D5B1, "M", "r"), + (0x1D5B2, "M", "s"), + (0x1D5B3, "M", "t"), + (0x1D5B4, "M", "u"), + (0x1D5B5, "M", "v"), + (0x1D5B6, "M", "w"), + (0x1D5B7, "M", "x"), + (0x1D5B8, "M", "y"), + (0x1D5B9, "M", "z"), + (0x1D5BA, "M", "a"), + ] + + +def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D5BB, "M", "b"), + (0x1D5BC, "M", "c"), + (0x1D5BD, "M", "d"), + (0x1D5BE, "M", "e"), + (0x1D5BF, "M", "f"), + (0x1D5C0, "M", "g"), + (0x1D5C1, "M", "h"), + (0x1D5C2, "M", "i"), + (0x1D5C3, "M", "j"), + (0x1D5C4, "M", "k"), + (0x1D5C5, "M", "l"), + (0x1D5C6, "M", "m"), + (0x1D5C7, "M", "n"), + (0x1D5C8, "M", "o"), + (0x1D5C9, "M", "p"), + (0x1D5CA, "M", "q"), + (0x1D5CB, "M", "r"), + (0x1D5CC, "M", "s"), + (0x1D5CD, "M", "t"), + (0x1D5CE, "M", "u"), + (0x1D5CF, "M", "v"), + (0x1D5D0, "M", "w"), + (0x1D5D1, "M", "x"), + (0x1D5D2, "M", "y"), + (0x1D5D3, "M", "z"), + (0x1D5D4, "M", "a"), + (0x1D5D5, "M", "b"), + (0x1D5D6, "M", "c"), + (0x1D5D7, "M", "d"), + (0x1D5D8, "M", "e"), + (0x1D5D9, "M", "f"), + (0x1D5DA, "M", "g"), + (0x1D5DB, "M", "h"), + (0x1D5DC, "M", "i"), + (0x1D5DD, "M", "j"), + (0x1D5DE, "M", "k"), + (0x1D5DF, "M", "l"), + (0x1D5E0, "M", "m"), + (0x1D5E1, "M", "n"), + (0x1D5E2, "M", "o"), + (0x1D5E3, "M", "p"), + (0x1D5E4, "M", "q"), + (0x1D5E5, "M", "r"), + (0x1D5E6, "M", "s"), + (0x1D5E7, "M", "t"), + (0x1D5E8, "M", "u"), + (0x1D5E9, "M", "v"), + (0x1D5EA, "M", "w"), + (0x1D5EB, "M", "x"), + (0x1D5EC, "M", "y"), + (0x1D5ED, "M", "z"), + (0x1D5EE, "M", "a"), + (0x1D5EF, "M", "b"), + (0x1D5F0, "M", "c"), + (0x1D5F1, "M", "d"), + (0x1D5F2, "M", "e"), + (0x1D5F3, "M", "f"), + (0x1D5F4, "M", "g"), + (0x1D5F5, "M", "h"), + (0x1D5F6, "M", "i"), + (0x1D5F7, "M", "j"), + (0x1D5F8, "M", "k"), + (0x1D5F9, "M", "l"), + (0x1D5FA, "M", "m"), + (0x1D5FB, "M", "n"), + (0x1D5FC, "M", "o"), + (0x1D5FD, "M", "p"), + (0x1D5FE, "M", "q"), + (0x1D5FF, "M", "r"), + (0x1D600, "M", "s"), + (0x1D601, "M", "t"), + (0x1D602, "M", "u"), + (0x1D603, "M", "v"), + (0x1D604, "M", "w"), + (0x1D605, "M", "x"), + (0x1D606, "M", "y"), + (0x1D607, "M", "z"), + (0x1D608, "M", "a"), + (0x1D609, "M", "b"), + (0x1D60A, "M", "c"), + (0x1D60B, "M", "d"), + (0x1D60C, "M", "e"), + (0x1D60D, "M", "f"), + (0x1D60E, "M", "g"), + (0x1D60F, "M", "h"), + (0x1D610, "M", "i"), + (0x1D611, "M", "j"), + (0x1D612, "M", "k"), + (0x1D613, "M", "l"), + (0x1D614, "M", "m"), + (0x1D615, "M", "n"), + (0x1D616, "M", "o"), + (0x1D617, "M", "p"), + (0x1D618, "M", "q"), + (0x1D619, "M", "r"), + (0x1D61A, "M", "s"), + (0x1D61B, "M", "t"), + (0x1D61C, "M", "u"), + (0x1D61D, "M", "v"), + (0x1D61E, "M", "w"), + ] + + +def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D61F, "M", "x"), + (0x1D620, "M", "y"), + (0x1D621, "M", "z"), + (0x1D622, "M", "a"), + (0x1D623, "M", "b"), + (0x1D624, "M", "c"), + (0x1D625, "M", "d"), + (0x1D626, "M", "e"), + (0x1D627, "M", "f"), + (0x1D628, "M", "g"), + (0x1D629, "M", "h"), + (0x1D62A, "M", "i"), + (0x1D62B, "M", "j"), + (0x1D62C, "M", "k"), + (0x1D62D, "M", "l"), + (0x1D62E, "M", "m"), + (0x1D62F, "M", "n"), + (0x1D630, "M", "o"), + (0x1D631, "M", "p"), + (0x1D632, "M", "q"), + (0x1D633, "M", "r"), + (0x1D634, "M", "s"), + (0x1D635, "M", "t"), + (0x1D636, "M", "u"), + (0x1D637, "M", "v"), + (0x1D638, "M", "w"), + (0x1D639, "M", "x"), + (0x1D63A, "M", "y"), + (0x1D63B, "M", "z"), + (0x1D63C, "M", "a"), + (0x1D63D, "M", "b"), + (0x1D63E, "M", "c"), + (0x1D63F, "M", "d"), + (0x1D640, "M", "e"), + (0x1D641, "M", "f"), + (0x1D642, "M", "g"), + (0x1D643, "M", "h"), + (0x1D644, "M", "i"), + (0x1D645, "M", "j"), + (0x1D646, "M", "k"), + (0x1D647, "M", "l"), + (0x1D648, "M", "m"), + (0x1D649, "M", "n"), + (0x1D64A, "M", "o"), + (0x1D64B, "M", "p"), + (0x1D64C, "M", "q"), + (0x1D64D, "M", "r"), + (0x1D64E, "M", "s"), + (0x1D64F, "M", "t"), + (0x1D650, "M", "u"), + (0x1D651, "M", "v"), + (0x1D652, "M", "w"), + (0x1D653, "M", "x"), + (0x1D654, "M", "y"), + (0x1D655, "M", "z"), + (0x1D656, "M", "a"), + (0x1D657, "M", "b"), + (0x1D658, "M", "c"), + (0x1D659, "M", "d"), + (0x1D65A, "M", "e"), + (0x1D65B, "M", "f"), + (0x1D65C, "M", "g"), + (0x1D65D, "M", "h"), + (0x1D65E, "M", "i"), + (0x1D65F, "M", "j"), + (0x1D660, "M", "k"), + (0x1D661, "M", "l"), + (0x1D662, "M", "m"), + (0x1D663, "M", "n"), + (0x1D664, "M", "o"), + (0x1D665, "M", "p"), + (0x1D666, "M", "q"), + (0x1D667, "M", "r"), + (0x1D668, "M", "s"), + (0x1D669, "M", "t"), + (0x1D66A, "M", "u"), + (0x1D66B, "M", "v"), + (0x1D66C, "M", "w"), + (0x1D66D, "M", "x"), + (0x1D66E, "M", "y"), + (0x1D66F, "M", "z"), + (0x1D670, "M", "a"), + (0x1D671, "M", "b"), + (0x1D672, "M", "c"), + (0x1D673, "M", "d"), + (0x1D674, "M", "e"), + (0x1D675, "M", "f"), + (0x1D676, "M", "g"), + (0x1D677, "M", "h"), + (0x1D678, "M", "i"), + (0x1D679, "M", "j"), + (0x1D67A, "M", "k"), + (0x1D67B, "M", "l"), + (0x1D67C, "M", "m"), + (0x1D67D, "M", "n"), + (0x1D67E, "M", "o"), + (0x1D67F, "M", "p"), + (0x1D680, "M", "q"), + (0x1D681, "M", "r"), + (0x1D682, "M", "s"), + ] + + +def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D683, "M", "t"), + (0x1D684, "M", "u"), + (0x1D685, "M", "v"), + (0x1D686, "M", "w"), + (0x1D687, "M", "x"), + (0x1D688, "M", "y"), + (0x1D689, "M", "z"), + (0x1D68A, "M", "a"), + (0x1D68B, "M", "b"), + (0x1D68C, "M", "c"), + (0x1D68D, "M", "d"), + (0x1D68E, "M", "e"), + (0x1D68F, "M", "f"), + (0x1D690, "M", "g"), + (0x1D691, "M", "h"), + (0x1D692, "M", "i"), + (0x1D693, "M", "j"), + (0x1D694, "M", "k"), + (0x1D695, "M", "l"), + (0x1D696, "M", "m"), + (0x1D697, "M", "n"), + (0x1D698, "M", "o"), + (0x1D699, "M", "p"), + (0x1D69A, "M", "q"), + (0x1D69B, "M", "r"), + (0x1D69C, "M", "s"), + (0x1D69D, "M", "t"), + (0x1D69E, "M", "u"), + (0x1D69F, "M", "v"), + (0x1D6A0, "M", "w"), + (0x1D6A1, "M", "x"), + (0x1D6A2, "M", "y"), + (0x1D6A3, "M", "z"), + (0x1D6A4, "M", "ı"), + (0x1D6A5, "M", "ȷ"), + (0x1D6A6, "X"), + (0x1D6A8, "M", "α"), + (0x1D6A9, "M", "β"), + (0x1D6AA, "M", "γ"), + (0x1D6AB, "M", "δ"), + (0x1D6AC, "M", "ε"), + (0x1D6AD, "M", "ζ"), + (0x1D6AE, "M", "η"), + (0x1D6AF, "M", "θ"), + (0x1D6B0, "M", "ι"), + (0x1D6B1, "M", "κ"), + (0x1D6B2, "M", "λ"), + (0x1D6B3, "M", "μ"), + (0x1D6B4, "M", "ν"), + (0x1D6B5, "M", "ξ"), + (0x1D6B6, "M", "ο"), + (0x1D6B7, "M", "π"), + (0x1D6B8, "M", "ρ"), + (0x1D6B9, "M", "θ"), + (0x1D6BA, "M", "σ"), + (0x1D6BB, "M", "τ"), + (0x1D6BC, "M", "υ"), + (0x1D6BD, "M", "φ"), + (0x1D6BE, "M", "χ"), + (0x1D6BF, "M", "ψ"), + (0x1D6C0, "M", "ω"), + (0x1D6C1, "M", "∇"), + (0x1D6C2, "M", "α"), + (0x1D6C3, "M", "β"), + (0x1D6C4, "M", "γ"), + (0x1D6C5, "M", "δ"), + (0x1D6C6, "M", "ε"), + (0x1D6C7, "M", "ζ"), + (0x1D6C8, "M", "η"), + (0x1D6C9, "M", "θ"), + (0x1D6CA, "M", "ι"), + (0x1D6CB, "M", "κ"), + (0x1D6CC, "M", "λ"), + (0x1D6CD, "M", "μ"), + (0x1D6CE, "M", "ν"), + (0x1D6CF, "M", "ξ"), + (0x1D6D0, "M", "ο"), + (0x1D6D1, "M", "π"), + (0x1D6D2, "M", "ρ"), + (0x1D6D3, "M", "σ"), + (0x1D6D5, "M", "τ"), + (0x1D6D6, "M", "υ"), + (0x1D6D7, "M", "φ"), + (0x1D6D8, "M", "χ"), + (0x1D6D9, "M", "ψ"), + (0x1D6DA, "M", "ω"), + (0x1D6DB, "M", "∂"), + (0x1D6DC, "M", "ε"), + (0x1D6DD, "M", "θ"), + (0x1D6DE, "M", "κ"), + (0x1D6DF, "M", "φ"), + (0x1D6E0, "M", "ρ"), + (0x1D6E1, "M", "π"), + (0x1D6E2, "M", "α"), + (0x1D6E3, "M", "β"), + (0x1D6E4, "M", "γ"), + (0x1D6E5, "M", "δ"), + (0x1D6E6, "M", "ε"), + (0x1D6E7, "M", "ζ"), + (0x1D6E8, "M", "η"), + ] + + +def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D6E9, "M", "θ"), + (0x1D6EA, "M", "ι"), + (0x1D6EB, "M", "κ"), + (0x1D6EC, "M", "λ"), + (0x1D6ED, "M", "μ"), + (0x1D6EE, "M", "ν"), + (0x1D6EF, "M", "ξ"), + (0x1D6F0, "M", "ο"), + (0x1D6F1, "M", "π"), + (0x1D6F2, "M", "ρ"), + (0x1D6F3, "M", "θ"), + (0x1D6F4, "M", "σ"), + (0x1D6F5, "M", "τ"), + (0x1D6F6, "M", "υ"), + (0x1D6F7, "M", "φ"), + (0x1D6F8, "M", "χ"), + (0x1D6F9, "M", "ψ"), + (0x1D6FA, "M", "ω"), + (0x1D6FB, "M", "∇"), + (0x1D6FC, "M", "α"), + (0x1D6FD, "M", "β"), + (0x1D6FE, "M", "γ"), + (0x1D6FF, "M", "δ"), + (0x1D700, "M", "ε"), + (0x1D701, "M", "ζ"), + (0x1D702, "M", "η"), + (0x1D703, "M", "θ"), + (0x1D704, "M", "ι"), + (0x1D705, "M", "κ"), + (0x1D706, "M", "λ"), + (0x1D707, "M", "μ"), + (0x1D708, "M", "ν"), + (0x1D709, "M", "ξ"), + (0x1D70A, "M", "ο"), + (0x1D70B, "M", "π"), + (0x1D70C, "M", "ρ"), + (0x1D70D, "M", "σ"), + (0x1D70F, "M", "τ"), + (0x1D710, "M", "υ"), + (0x1D711, "M", "φ"), + (0x1D712, "M", "χ"), + (0x1D713, "M", "ψ"), + (0x1D714, "M", "ω"), + (0x1D715, "M", "∂"), + (0x1D716, "M", "ε"), + (0x1D717, "M", "θ"), + (0x1D718, "M", "κ"), + (0x1D719, "M", "φ"), + (0x1D71A, "M", "ρ"), + (0x1D71B, "M", "π"), + (0x1D71C, "M", "α"), + (0x1D71D, "M", "β"), + (0x1D71E, "M", "γ"), + (0x1D71F, "M", "δ"), + (0x1D720, "M", "ε"), + (0x1D721, "M", "ζ"), + (0x1D722, "M", "η"), + (0x1D723, "M", "θ"), + (0x1D724, "M", "ι"), + (0x1D725, "M", "κ"), + (0x1D726, "M", "λ"), + (0x1D727, "M", "μ"), + (0x1D728, "M", "ν"), + (0x1D729, "M", "ξ"), + (0x1D72A, "M", "ο"), + (0x1D72B, "M", "π"), + (0x1D72C, "M", "ρ"), + (0x1D72D, "M", "θ"), + (0x1D72E, "M", "σ"), + (0x1D72F, "M", "τ"), + (0x1D730, "M", "υ"), + (0x1D731, "M", "φ"), + (0x1D732, "M", "χ"), + (0x1D733, "M", "ψ"), + (0x1D734, "M", "ω"), + (0x1D735, "M", "∇"), + (0x1D736, "M", "α"), + (0x1D737, "M", "β"), + (0x1D738, "M", "γ"), + (0x1D739, "M", "δ"), + (0x1D73A, "M", "ε"), + (0x1D73B, "M", "ζ"), + (0x1D73C, "M", "η"), + (0x1D73D, "M", "θ"), + (0x1D73E, "M", "ι"), + (0x1D73F, "M", "κ"), + (0x1D740, "M", "λ"), + (0x1D741, "M", "μ"), + (0x1D742, "M", "ν"), + (0x1D743, "M", "ξ"), + (0x1D744, "M", "ο"), + (0x1D745, "M", "π"), + (0x1D746, "M", "ρ"), + (0x1D747, "M", "σ"), + (0x1D749, "M", "τ"), + (0x1D74A, "M", "υ"), + (0x1D74B, "M", "φ"), + (0x1D74C, "M", "χ"), + (0x1D74D, "M", "ψ"), + (0x1D74E, "M", "ω"), + ] + + +def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D74F, "M", "∂"), + (0x1D750, "M", "ε"), + (0x1D751, "M", "θ"), + (0x1D752, "M", "κ"), + (0x1D753, "M", "φ"), + (0x1D754, "M", "ρ"), + (0x1D755, "M", "π"), + (0x1D756, "M", "α"), + (0x1D757, "M", "β"), + (0x1D758, "M", "γ"), + (0x1D759, "M", "δ"), + (0x1D75A, "M", "ε"), + (0x1D75B, "M", "ζ"), + (0x1D75C, "M", "η"), + (0x1D75D, "M", "θ"), + (0x1D75E, "M", "ι"), + (0x1D75F, "M", "κ"), + (0x1D760, "M", "λ"), + (0x1D761, "M", "μ"), + (0x1D762, "M", "ν"), + (0x1D763, "M", "ξ"), + (0x1D764, "M", "ο"), + (0x1D765, "M", "π"), + (0x1D766, "M", "ρ"), + (0x1D767, "M", "θ"), + (0x1D768, "M", "σ"), + (0x1D769, "M", "τ"), + (0x1D76A, "M", "υ"), + (0x1D76B, "M", "φ"), + (0x1D76C, "M", "χ"), + (0x1D76D, "M", "ψ"), + (0x1D76E, "M", "ω"), + (0x1D76F, "M", "∇"), + (0x1D770, "M", "α"), + (0x1D771, "M", "β"), + (0x1D772, "M", "γ"), + (0x1D773, "M", "δ"), + (0x1D774, "M", "ε"), + (0x1D775, "M", "ζ"), + (0x1D776, "M", "η"), + (0x1D777, "M", "θ"), + (0x1D778, "M", "ι"), + (0x1D779, "M", "κ"), + (0x1D77A, "M", "λ"), + (0x1D77B, "M", "μ"), + (0x1D77C, "M", "ν"), + (0x1D77D, "M", "ξ"), + (0x1D77E, "M", "ο"), + (0x1D77F, "M", "π"), + (0x1D780, "M", "ρ"), + (0x1D781, "M", "σ"), + (0x1D783, "M", "τ"), + (0x1D784, "M", "υ"), + (0x1D785, "M", "φ"), + (0x1D786, "M", "χ"), + (0x1D787, "M", "ψ"), + (0x1D788, "M", "ω"), + (0x1D789, "M", "∂"), + (0x1D78A, "M", "ε"), + (0x1D78B, "M", "θ"), + (0x1D78C, "M", "κ"), + (0x1D78D, "M", "φ"), + (0x1D78E, "M", "ρ"), + (0x1D78F, "M", "π"), + (0x1D790, "M", "α"), + (0x1D791, "M", "β"), + (0x1D792, "M", "γ"), + (0x1D793, "M", "δ"), + (0x1D794, "M", "ε"), + (0x1D795, "M", "ζ"), + (0x1D796, "M", "η"), + (0x1D797, "M", "θ"), + (0x1D798, "M", "ι"), + (0x1D799, "M", "κ"), + (0x1D79A, "M", "λ"), + (0x1D79B, "M", "μ"), + (0x1D79C, "M", "ν"), + (0x1D79D, "M", "ξ"), + (0x1D79E, "M", "ο"), + (0x1D79F, "M", "π"), + (0x1D7A0, "M", "ρ"), + (0x1D7A1, "M", "θ"), + (0x1D7A2, "M", "σ"), + (0x1D7A3, "M", "τ"), + (0x1D7A4, "M", "υ"), + (0x1D7A5, "M", "φ"), + (0x1D7A6, "M", "χ"), + (0x1D7A7, "M", "ψ"), + (0x1D7A8, "M", "ω"), + (0x1D7A9, "M", "∇"), + (0x1D7AA, "M", "α"), + (0x1D7AB, "M", "β"), + (0x1D7AC, "M", "γ"), + (0x1D7AD, "M", "δ"), + (0x1D7AE, "M", "ε"), + (0x1D7AF, "M", "ζ"), + (0x1D7B0, "M", "η"), + (0x1D7B1, "M", "θ"), + (0x1D7B2, "M", "ι"), + (0x1D7B3, "M", "κ"), + ] + + +def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D7B4, "M", "λ"), + (0x1D7B5, "M", "μ"), + (0x1D7B6, "M", "ν"), + (0x1D7B7, "M", "ξ"), + (0x1D7B8, "M", "ο"), + (0x1D7B9, "M", "π"), + (0x1D7BA, "M", "ρ"), + (0x1D7BB, "M", "σ"), + (0x1D7BD, "M", "τ"), + (0x1D7BE, "M", "υ"), + (0x1D7BF, "M", "φ"), + (0x1D7C0, "M", "χ"), + (0x1D7C1, "M", "ψ"), + (0x1D7C2, "M", "ω"), + (0x1D7C3, "M", "∂"), + (0x1D7C4, "M", "ε"), + (0x1D7C5, "M", "θ"), + (0x1D7C6, "M", "κ"), + (0x1D7C7, "M", "φ"), + (0x1D7C8, "M", "ρ"), + (0x1D7C9, "M", "π"), + (0x1D7CA, "M", "ϝ"), + (0x1D7CC, "X"), + (0x1D7CE, "M", "0"), + (0x1D7CF, "M", "1"), + (0x1D7D0, "M", "2"), + (0x1D7D1, "M", "3"), + (0x1D7D2, "M", "4"), + (0x1D7D3, "M", "5"), + (0x1D7D4, "M", "6"), + (0x1D7D5, "M", "7"), + (0x1D7D6, "M", "8"), + (0x1D7D7, "M", "9"), + (0x1D7D8, "M", "0"), + (0x1D7D9, "M", "1"), + (0x1D7DA, "M", "2"), + (0x1D7DB, "M", "3"), + (0x1D7DC, "M", "4"), + (0x1D7DD, "M", "5"), + (0x1D7DE, "M", "6"), + (0x1D7DF, "M", "7"), + (0x1D7E0, "M", "8"), + (0x1D7E1, "M", "9"), + (0x1D7E2, "M", "0"), + (0x1D7E3, "M", "1"), + (0x1D7E4, "M", "2"), + (0x1D7E5, "M", "3"), + (0x1D7E6, "M", "4"), + (0x1D7E7, "M", "5"), + (0x1D7E8, "M", "6"), + (0x1D7E9, "M", "7"), + (0x1D7EA, "M", "8"), + (0x1D7EB, "M", "9"), + (0x1D7EC, "M", "0"), + (0x1D7ED, "M", "1"), + (0x1D7EE, "M", "2"), + (0x1D7EF, "M", "3"), + (0x1D7F0, "M", "4"), + (0x1D7F1, "M", "5"), + (0x1D7F2, "M", "6"), + (0x1D7F3, "M", "7"), + (0x1D7F4, "M", "8"), + (0x1D7F5, "M", "9"), + (0x1D7F6, "M", "0"), + (0x1D7F7, "M", "1"), + (0x1D7F8, "M", "2"), + (0x1D7F9, "M", "3"), + (0x1D7FA, "M", "4"), + (0x1D7FB, "M", "5"), + (0x1D7FC, "M", "6"), + (0x1D7FD, "M", "7"), + (0x1D7FE, "M", "8"), + (0x1D7FF, "M", "9"), + (0x1D800, "V"), + (0x1DA8C, "X"), + (0x1DA9B, "V"), + (0x1DAA0, "X"), + (0x1DAA1, "V"), + (0x1DAB0, "X"), + (0x1DF00, "V"), + (0x1DF1F, "X"), + (0x1DF25, "V"), + (0x1DF2B, "X"), + (0x1E000, "V"), + (0x1E007, "X"), + (0x1E008, "V"), + (0x1E019, "X"), + (0x1E01B, "V"), + (0x1E022, "X"), + (0x1E023, "V"), + (0x1E025, "X"), + (0x1E026, "V"), + (0x1E02B, "X"), + (0x1E030, "M", "а"), + (0x1E031, "M", "б"), + (0x1E032, "M", "в"), + (0x1E033, "M", "г"), + (0x1E034, "M", "д"), + (0x1E035, "M", "е"), + (0x1E036, "M", "ж"), + ] + + +def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E037, "M", "з"), + (0x1E038, "M", "и"), + (0x1E039, "M", "к"), + (0x1E03A, "M", "л"), + (0x1E03B, "M", "м"), + (0x1E03C, "M", "о"), + (0x1E03D, "M", "п"), + (0x1E03E, "M", "р"), + (0x1E03F, "M", "с"), + (0x1E040, "M", "т"), + (0x1E041, "M", "у"), + (0x1E042, "M", "ф"), + (0x1E043, "M", "х"), + (0x1E044, "M", "ц"), + (0x1E045, "M", "ч"), + (0x1E046, "M", "ш"), + (0x1E047, "M", "ы"), + (0x1E048, "M", "э"), + (0x1E049, "M", "ю"), + (0x1E04A, "M", "ꚉ"), + (0x1E04B, "M", "ә"), + (0x1E04C, "M", "і"), + (0x1E04D, "M", "ј"), + (0x1E04E, "M", "ө"), + (0x1E04F, "M", "ү"), + (0x1E050, "M", "ӏ"), + (0x1E051, "M", "а"), + (0x1E052, "M", "б"), + (0x1E053, "M", "в"), + (0x1E054, "M", "г"), + (0x1E055, "M", "д"), + (0x1E056, "M", "е"), + (0x1E057, "M", "ж"), + (0x1E058, "M", "з"), + (0x1E059, "M", "и"), + (0x1E05A, "M", "к"), + (0x1E05B, "M", "л"), + (0x1E05C, "M", "о"), + (0x1E05D, "M", "п"), + (0x1E05E, "M", "с"), + (0x1E05F, "M", "у"), + (0x1E060, "M", "ф"), + (0x1E061, "M", "х"), + (0x1E062, "M", "ц"), + (0x1E063, "M", "ч"), + (0x1E064, "M", "ш"), + (0x1E065, "M", "ъ"), + (0x1E066, "M", "ы"), + (0x1E067, "M", "ґ"), + (0x1E068, "M", "і"), + (0x1E069, "M", "ѕ"), + (0x1E06A, "M", "џ"), + (0x1E06B, "M", "ҫ"), + (0x1E06C, "M", "ꙑ"), + (0x1E06D, "M", "ұ"), + (0x1E06E, "X"), + (0x1E08F, "V"), + (0x1E090, "X"), + (0x1E100, "V"), + (0x1E12D, "X"), + (0x1E130, "V"), + (0x1E13E, "X"), + (0x1E140, "V"), + (0x1E14A, "X"), + (0x1E14E, "V"), + (0x1E150, "X"), + (0x1E290, "V"), + (0x1E2AF, "X"), + (0x1E2C0, "V"), + (0x1E2FA, "X"), + (0x1E2FF, "V"), + (0x1E300, "X"), + (0x1E4D0, "V"), + (0x1E4FA, "X"), + (0x1E7E0, "V"), + (0x1E7E7, "X"), + (0x1E7E8, "V"), + (0x1E7EC, "X"), + (0x1E7ED, "V"), + (0x1E7EF, "X"), + (0x1E7F0, "V"), + (0x1E7FF, "X"), + (0x1E800, "V"), + (0x1E8C5, "X"), + (0x1E8C7, "V"), + (0x1E8D7, "X"), + (0x1E900, "M", "𞤢"), + (0x1E901, "M", "𞤣"), + (0x1E902, "M", "𞤤"), + (0x1E903, "M", "𞤥"), + (0x1E904, "M", "𞤦"), + (0x1E905, "M", "𞤧"), + (0x1E906, "M", "𞤨"), + (0x1E907, "M", "𞤩"), + (0x1E908, "M", "𞤪"), + (0x1E909, "M", "𞤫"), + (0x1E90A, "M", "𞤬"), + (0x1E90B, "M", "𞤭"), + (0x1E90C, "M", "𞤮"), + (0x1E90D, "M", "𞤯"), + ] + + +def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E90E, "M", "𞤰"), + (0x1E90F, "M", "𞤱"), + (0x1E910, "M", "𞤲"), + (0x1E911, "M", "𞤳"), + (0x1E912, "M", "𞤴"), + (0x1E913, "M", "𞤵"), + (0x1E914, "M", "𞤶"), + (0x1E915, "M", "𞤷"), + (0x1E916, "M", "𞤸"), + (0x1E917, "M", "𞤹"), + (0x1E918, "M", "𞤺"), + (0x1E919, "M", "𞤻"), + (0x1E91A, "M", "𞤼"), + (0x1E91B, "M", "𞤽"), + (0x1E91C, "M", "𞤾"), + (0x1E91D, "M", "𞤿"), + (0x1E91E, "M", "𞥀"), + (0x1E91F, "M", "𞥁"), + (0x1E920, "M", "𞥂"), + (0x1E921, "M", "𞥃"), + (0x1E922, "V"), + (0x1E94C, "X"), + (0x1E950, "V"), + (0x1E95A, "X"), + (0x1E95E, "V"), + (0x1E960, "X"), + (0x1EC71, "V"), + (0x1ECB5, "X"), + (0x1ED01, "V"), + (0x1ED3E, "X"), + (0x1EE00, "M", "ا"), + (0x1EE01, "M", "ب"), + (0x1EE02, "M", "ج"), + (0x1EE03, "M", "د"), + (0x1EE04, "X"), + (0x1EE05, "M", "و"), + (0x1EE06, "M", "ز"), + (0x1EE07, "M", "ح"), + (0x1EE08, "M", "ط"), + (0x1EE09, "M", "ي"), + (0x1EE0A, "M", "ك"), + (0x1EE0B, "M", "ل"), + (0x1EE0C, "M", "م"), + (0x1EE0D, "M", "ن"), + (0x1EE0E, "M", "س"), + (0x1EE0F, "M", "ع"), + (0x1EE10, "M", "ف"), + (0x1EE11, "M", "ص"), + (0x1EE12, "M", "ق"), + (0x1EE13, "M", "ر"), + (0x1EE14, "M", "ش"), + (0x1EE15, "M", "ت"), + (0x1EE16, "M", "ث"), + (0x1EE17, "M", "خ"), + (0x1EE18, "M", "ذ"), + (0x1EE19, "M", "ض"), + (0x1EE1A, "M", "ظ"), + (0x1EE1B, "M", "غ"), + (0x1EE1C, "M", "ٮ"), + (0x1EE1D, "M", "ں"), + (0x1EE1E, "M", "ڡ"), + (0x1EE1F, "M", "ٯ"), + (0x1EE20, "X"), + (0x1EE21, "M", "ب"), + (0x1EE22, "M", "ج"), + (0x1EE23, "X"), + (0x1EE24, "M", "ه"), + (0x1EE25, "X"), + (0x1EE27, "M", "ح"), + (0x1EE28, "X"), + (0x1EE29, "M", "ي"), + (0x1EE2A, "M", "ك"), + (0x1EE2B, "M", "ل"), + (0x1EE2C, "M", "م"), + (0x1EE2D, "M", "ن"), + (0x1EE2E, "M", "س"), + (0x1EE2F, "M", "ع"), + (0x1EE30, "M", "ف"), + (0x1EE31, "M", "ص"), + (0x1EE32, "M", "ق"), + (0x1EE33, "X"), + (0x1EE34, "M", "ش"), + (0x1EE35, "M", "ت"), + (0x1EE36, "M", "ث"), + (0x1EE37, "M", "خ"), + (0x1EE38, "X"), + (0x1EE39, "M", "ض"), + (0x1EE3A, "X"), + (0x1EE3B, "M", "غ"), + (0x1EE3C, "X"), + (0x1EE42, "M", "ج"), + (0x1EE43, "X"), + (0x1EE47, "M", "ح"), + (0x1EE48, "X"), + (0x1EE49, "M", "ي"), + (0x1EE4A, "X"), + (0x1EE4B, "M", "ل"), + (0x1EE4C, "X"), + (0x1EE4D, "M", "ن"), + (0x1EE4E, "M", "س"), + ] + + +def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EE4F, "M", "ع"), + (0x1EE50, "X"), + (0x1EE51, "M", "ص"), + (0x1EE52, "M", "ق"), + (0x1EE53, "X"), + (0x1EE54, "M", "ش"), + (0x1EE55, "X"), + (0x1EE57, "M", "خ"), + (0x1EE58, "X"), + (0x1EE59, "M", "ض"), + (0x1EE5A, "X"), + (0x1EE5B, "M", "غ"), + (0x1EE5C, "X"), + (0x1EE5D, "M", "ں"), + (0x1EE5E, "X"), + (0x1EE5F, "M", "ٯ"), + (0x1EE60, "X"), + (0x1EE61, "M", "ب"), + (0x1EE62, "M", "ج"), + (0x1EE63, "X"), + (0x1EE64, "M", "ه"), + (0x1EE65, "X"), + (0x1EE67, "M", "ح"), + (0x1EE68, "M", "ط"), + (0x1EE69, "M", "ي"), + (0x1EE6A, "M", "ك"), + (0x1EE6B, "X"), + (0x1EE6C, "M", "م"), + (0x1EE6D, "M", "ن"), + (0x1EE6E, "M", "س"), + (0x1EE6F, "M", "ع"), + (0x1EE70, "M", "ف"), + (0x1EE71, "M", "ص"), + (0x1EE72, "M", "ق"), + (0x1EE73, "X"), + (0x1EE74, "M", "ش"), + (0x1EE75, "M", "ت"), + (0x1EE76, "M", "ث"), + (0x1EE77, "M", "خ"), + (0x1EE78, "X"), + (0x1EE79, "M", "ض"), + (0x1EE7A, "M", "ظ"), + (0x1EE7B, "M", "غ"), + (0x1EE7C, "M", "ٮ"), + (0x1EE7D, "X"), + (0x1EE7E, "M", "ڡ"), + (0x1EE7F, "X"), + (0x1EE80, "M", "ا"), + (0x1EE81, "M", "ب"), + (0x1EE82, "M", "ج"), + (0x1EE83, "M", "د"), + (0x1EE84, "M", "ه"), + (0x1EE85, "M", "و"), + (0x1EE86, "M", "ز"), + (0x1EE87, "M", "ح"), + (0x1EE88, "M", "ط"), + (0x1EE89, "M", "ي"), + (0x1EE8A, "X"), + (0x1EE8B, "M", "ل"), + (0x1EE8C, "M", "م"), + (0x1EE8D, "M", "ن"), + (0x1EE8E, "M", "س"), + (0x1EE8F, "M", "ع"), + (0x1EE90, "M", "ف"), + (0x1EE91, "M", "ص"), + (0x1EE92, "M", "ق"), + (0x1EE93, "M", "ر"), + (0x1EE94, "M", "ش"), + (0x1EE95, "M", "ت"), + (0x1EE96, "M", "ث"), + (0x1EE97, "M", "خ"), + (0x1EE98, "M", "ذ"), + (0x1EE99, "M", "ض"), + (0x1EE9A, "M", "ظ"), + (0x1EE9B, "M", "غ"), + (0x1EE9C, "X"), + (0x1EEA1, "M", "ب"), + (0x1EEA2, "M", "ج"), + (0x1EEA3, "M", "د"), + (0x1EEA4, "X"), + (0x1EEA5, "M", "و"), + (0x1EEA6, "M", "ز"), + (0x1EEA7, "M", "ح"), + (0x1EEA8, "M", "ط"), + (0x1EEA9, "M", "ي"), + (0x1EEAA, "X"), + (0x1EEAB, "M", "ل"), + (0x1EEAC, "M", "م"), + (0x1EEAD, "M", "ن"), + (0x1EEAE, "M", "س"), + (0x1EEAF, "M", "ع"), + (0x1EEB0, "M", "ف"), + (0x1EEB1, "M", "ص"), + (0x1EEB2, "M", "ق"), + (0x1EEB3, "M", "ر"), + (0x1EEB4, "M", "ش"), + (0x1EEB5, "M", "ت"), + (0x1EEB6, "M", "ث"), + (0x1EEB7, "M", "خ"), + (0x1EEB8, "M", "ذ"), + ] + + +def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EEB9, "M", "ض"), + (0x1EEBA, "M", "ظ"), + (0x1EEBB, "M", "غ"), + (0x1EEBC, "X"), + (0x1EEF0, "V"), + (0x1EEF2, "X"), + (0x1F000, "V"), + (0x1F02C, "X"), + (0x1F030, "V"), + (0x1F094, "X"), + (0x1F0A0, "V"), + (0x1F0AF, "X"), + (0x1F0B1, "V"), + (0x1F0C0, "X"), + (0x1F0C1, "V"), + (0x1F0D0, "X"), + (0x1F0D1, "V"), + (0x1F0F6, "X"), + (0x1F101, "3", "0,"), + (0x1F102, "3", "1,"), + (0x1F103, "3", "2,"), + (0x1F104, "3", "3,"), + (0x1F105, "3", "4,"), + (0x1F106, "3", "5,"), + (0x1F107, "3", "6,"), + (0x1F108, "3", "7,"), + (0x1F109, "3", "8,"), + (0x1F10A, "3", "9,"), + (0x1F10B, "V"), + (0x1F110, "3", "(a)"), + (0x1F111, "3", "(b)"), + (0x1F112, "3", "(c)"), + (0x1F113, "3", "(d)"), + (0x1F114, "3", "(e)"), + (0x1F115, "3", "(f)"), + (0x1F116, "3", "(g)"), + (0x1F117, "3", "(h)"), + (0x1F118, "3", "(i)"), + (0x1F119, "3", "(j)"), + (0x1F11A, "3", "(k)"), + (0x1F11B, "3", "(l)"), + (0x1F11C, "3", "(m)"), + (0x1F11D, "3", "(n)"), + (0x1F11E, "3", "(o)"), + (0x1F11F, "3", "(p)"), + (0x1F120, "3", "(q)"), + (0x1F121, "3", "(r)"), + (0x1F122, "3", "(s)"), + (0x1F123, "3", "(t)"), + (0x1F124, "3", "(u)"), + (0x1F125, "3", "(v)"), + (0x1F126, "3", "(w)"), + (0x1F127, "3", "(x)"), + (0x1F128, "3", "(y)"), + (0x1F129, "3", "(z)"), + (0x1F12A, "M", "〔s〕"), + (0x1F12B, "M", "c"), + (0x1F12C, "M", "r"), + (0x1F12D, "M", "cd"), + (0x1F12E, "M", "wz"), + (0x1F12F, "V"), + (0x1F130, "M", "a"), + (0x1F131, "M", "b"), + (0x1F132, "M", "c"), + (0x1F133, "M", "d"), + (0x1F134, "M", "e"), + (0x1F135, "M", "f"), + (0x1F136, "M", "g"), + (0x1F137, "M", "h"), + (0x1F138, "M", "i"), + (0x1F139, "M", "j"), + (0x1F13A, "M", "k"), + (0x1F13B, "M", "l"), + (0x1F13C, "M", "m"), + (0x1F13D, "M", "n"), + (0x1F13E, "M", "o"), + (0x1F13F, "M", "p"), + (0x1F140, "M", "q"), + (0x1F141, "M", "r"), + (0x1F142, "M", "s"), + (0x1F143, "M", "t"), + (0x1F144, "M", "u"), + (0x1F145, "M", "v"), + (0x1F146, "M", "w"), + (0x1F147, "M", "x"), + (0x1F148, "M", "y"), + (0x1F149, "M", "z"), + (0x1F14A, "M", "hv"), + (0x1F14B, "M", "mv"), + (0x1F14C, "M", "sd"), + (0x1F14D, "M", "ss"), + (0x1F14E, "M", "ppv"), + (0x1F14F, "M", "wc"), + (0x1F150, "V"), + (0x1F16A, "M", "mc"), + (0x1F16B, "M", "md"), + (0x1F16C, "M", "mr"), + (0x1F16D, "V"), + (0x1F190, "M", "dj"), + (0x1F191, "V"), + ] + + +def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F1AE, "X"), + (0x1F1E6, "V"), + (0x1F200, "M", "ほか"), + (0x1F201, "M", "ココ"), + (0x1F202, "M", "サ"), + (0x1F203, "X"), + (0x1F210, "M", "手"), + (0x1F211, "M", "字"), + (0x1F212, "M", "双"), + (0x1F213, "M", "デ"), + (0x1F214, "M", "二"), + (0x1F215, "M", "多"), + (0x1F216, "M", "解"), + (0x1F217, "M", "天"), + (0x1F218, "M", "交"), + (0x1F219, "M", "映"), + (0x1F21A, "M", "無"), + (0x1F21B, "M", "料"), + (0x1F21C, "M", "前"), + (0x1F21D, "M", "後"), + (0x1F21E, "M", "再"), + (0x1F21F, "M", "新"), + (0x1F220, "M", "初"), + (0x1F221, "M", "終"), + (0x1F222, "M", "生"), + (0x1F223, "M", "販"), + (0x1F224, "M", "声"), + (0x1F225, "M", "吹"), + (0x1F226, "M", "演"), + (0x1F227, "M", "投"), + (0x1F228, "M", "捕"), + (0x1F229, "M", "一"), + (0x1F22A, "M", "三"), + (0x1F22B, "M", "遊"), + (0x1F22C, "M", "左"), + (0x1F22D, "M", "中"), + (0x1F22E, "M", "右"), + (0x1F22F, "M", "指"), + (0x1F230, "M", "走"), + (0x1F231, "M", "打"), + (0x1F232, "M", "禁"), + (0x1F233, "M", "空"), + (0x1F234, "M", "合"), + (0x1F235, "M", "満"), + (0x1F236, "M", "有"), + (0x1F237, "M", "月"), + (0x1F238, "M", "申"), + (0x1F239, "M", "割"), + (0x1F23A, "M", "営"), + (0x1F23B, "M", "配"), + (0x1F23C, "X"), + (0x1F240, "M", "〔本〕"), + (0x1F241, "M", "〔三〕"), + (0x1F242, "M", "〔二〕"), + (0x1F243, "M", "〔安〕"), + (0x1F244, "M", "〔点〕"), + (0x1F245, "M", "〔打〕"), + (0x1F246, "M", "〔盗〕"), + (0x1F247, "M", "〔勝〕"), + (0x1F248, "M", "〔敗〕"), + (0x1F249, "X"), + (0x1F250, "M", "得"), + (0x1F251, "M", "可"), + (0x1F252, "X"), + (0x1F260, "V"), + (0x1F266, "X"), + (0x1F300, "V"), + (0x1F6D8, "X"), + (0x1F6DC, "V"), + (0x1F6ED, "X"), + (0x1F6F0, "V"), + (0x1F6FD, "X"), + (0x1F700, "V"), + (0x1F777, "X"), + (0x1F77B, "V"), + (0x1F7DA, "X"), + (0x1F7E0, "V"), + (0x1F7EC, "X"), + (0x1F7F0, "V"), + (0x1F7F1, "X"), + (0x1F800, "V"), + (0x1F80C, "X"), + (0x1F810, "V"), + (0x1F848, "X"), + (0x1F850, "V"), + (0x1F85A, "X"), + (0x1F860, "V"), + (0x1F888, "X"), + (0x1F890, "V"), + (0x1F8AE, "X"), + (0x1F8B0, "V"), + (0x1F8B2, "X"), + (0x1F900, "V"), + (0x1FA54, "X"), + (0x1FA60, "V"), + (0x1FA6E, "X"), + (0x1FA70, "V"), + (0x1FA7D, "X"), + (0x1FA80, "V"), + (0x1FA89, "X"), + ] + + +def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1FA90, "V"), + (0x1FABE, "X"), + (0x1FABF, "V"), + (0x1FAC6, "X"), + (0x1FACE, "V"), + (0x1FADC, "X"), + (0x1FAE0, "V"), + (0x1FAE9, "X"), + (0x1FAF0, "V"), + (0x1FAF9, "X"), + (0x1FB00, "V"), + (0x1FB93, "X"), + (0x1FB94, "V"), + (0x1FBCB, "X"), + (0x1FBF0, "M", "0"), + (0x1FBF1, "M", "1"), + (0x1FBF2, "M", "2"), + (0x1FBF3, "M", "3"), + (0x1FBF4, "M", "4"), + (0x1FBF5, "M", "5"), + (0x1FBF6, "M", "6"), + (0x1FBF7, "M", "7"), + (0x1FBF8, "M", "8"), + (0x1FBF9, "M", "9"), + (0x1FBFA, "X"), + (0x20000, "V"), + (0x2A6E0, "X"), + (0x2A700, "V"), + (0x2B73A, "X"), + (0x2B740, "V"), + (0x2B81E, "X"), + (0x2B820, "V"), + (0x2CEA2, "X"), + (0x2CEB0, "V"), + (0x2EBE1, "X"), + (0x2EBF0, "V"), + (0x2EE5E, "X"), + (0x2F800, "M", "丽"), + (0x2F801, "M", "丸"), + (0x2F802, "M", "乁"), + (0x2F803, "M", "𠄢"), + (0x2F804, "M", "你"), + (0x2F805, "M", "侮"), + (0x2F806, "M", "侻"), + (0x2F807, "M", "倂"), + (0x2F808, "M", "偺"), + (0x2F809, "M", "備"), + (0x2F80A, "M", "僧"), + (0x2F80B, "M", "像"), + (0x2F80C, "M", "㒞"), + (0x2F80D, "M", "𠘺"), + (0x2F80E, "M", "免"), + (0x2F80F, "M", "兔"), + (0x2F810, "M", "兤"), + (0x2F811, "M", "具"), + (0x2F812, "M", "𠔜"), + (0x2F813, "M", "㒹"), + (0x2F814, "M", "內"), + (0x2F815, "M", "再"), + (0x2F816, "M", "𠕋"), + (0x2F817, "M", "冗"), + (0x2F818, "M", "冤"), + (0x2F819, "M", "仌"), + (0x2F81A, "M", "冬"), + (0x2F81B, "M", "况"), + (0x2F81C, "M", "𩇟"), + (0x2F81D, "M", "凵"), + (0x2F81E, "M", "刃"), + (0x2F81F, "M", "㓟"), + (0x2F820, "M", "刻"), + (0x2F821, "M", "剆"), + (0x2F822, "M", "割"), + (0x2F823, "M", "剷"), + (0x2F824, "M", "㔕"), + (0x2F825, "M", "勇"), + (0x2F826, "M", "勉"), + (0x2F827, "M", "勤"), + (0x2F828, "M", "勺"), + (0x2F829, "M", "包"), + (0x2F82A, "M", "匆"), + (0x2F82B, "M", "北"), + (0x2F82C, "M", "卉"), + (0x2F82D, "M", "卑"), + (0x2F82E, "M", "博"), + (0x2F82F, "M", "即"), + (0x2F830, "M", "卽"), + (0x2F831, "M", "卿"), + (0x2F834, "M", "𠨬"), + (0x2F835, "M", "灰"), + (0x2F836, "M", "及"), + (0x2F837, "M", "叟"), + (0x2F838, "M", "𠭣"), + (0x2F839, "M", "叫"), + (0x2F83A, "M", "叱"), + (0x2F83B, "M", "吆"), + (0x2F83C, "M", "咞"), + (0x2F83D, "M", "吸"), + (0x2F83E, "M", "呈"), + (0x2F83F, "M", "周"), + (0x2F840, "M", "咢"), + ] + + +def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F841, "M", "哶"), + (0x2F842, "M", "唐"), + (0x2F843, "M", "啓"), + (0x2F844, "M", "啣"), + (0x2F845, "M", "善"), + (0x2F847, "M", "喙"), + (0x2F848, "M", "喫"), + (0x2F849, "M", "喳"), + (0x2F84A, "M", "嗂"), + (0x2F84B, "M", "圖"), + (0x2F84C, "M", "嘆"), + (0x2F84D, "M", "圗"), + (0x2F84E, "M", "噑"), + (0x2F84F, "M", "噴"), + (0x2F850, "M", "切"), + (0x2F851, "M", "壮"), + (0x2F852, "M", "城"), + (0x2F853, "M", "埴"), + (0x2F854, "M", "堍"), + (0x2F855, "M", "型"), + (0x2F856, "M", "堲"), + (0x2F857, "M", "報"), + (0x2F858, "M", "墬"), + (0x2F859, "M", "𡓤"), + (0x2F85A, "M", "売"), + (0x2F85B, "M", "壷"), + (0x2F85C, "M", "夆"), + (0x2F85D, "M", "多"), + (0x2F85E, "M", "夢"), + (0x2F85F, "M", "奢"), + (0x2F860, "M", "𡚨"), + (0x2F861, "M", "𡛪"), + (0x2F862, "M", "姬"), + (0x2F863, "M", "娛"), + (0x2F864, "M", "娧"), + (0x2F865, "M", "姘"), + (0x2F866, "M", "婦"), + (0x2F867, "M", "㛮"), + (0x2F868, "X"), + (0x2F869, "M", "嬈"), + (0x2F86A, "M", "嬾"), + (0x2F86C, "M", "𡧈"), + (0x2F86D, "M", "寃"), + (0x2F86E, "M", "寘"), + (0x2F86F, "M", "寧"), + (0x2F870, "M", "寳"), + (0x2F871, "M", "𡬘"), + (0x2F872, "M", "寿"), + (0x2F873, "M", "将"), + (0x2F874, "X"), + (0x2F875, "M", "尢"), + (0x2F876, "M", "㞁"), + (0x2F877, "M", "屠"), + (0x2F878, "M", "屮"), + (0x2F879, "M", "峀"), + (0x2F87A, "M", "岍"), + (0x2F87B, "M", "𡷤"), + (0x2F87C, "M", "嵃"), + (0x2F87D, "M", "𡷦"), + (0x2F87E, "M", "嵮"), + (0x2F87F, "M", "嵫"), + (0x2F880, "M", "嵼"), + (0x2F881, "M", "巡"), + (0x2F882, "M", "巢"), + (0x2F883, "M", "㠯"), + (0x2F884, "M", "巽"), + (0x2F885, "M", "帨"), + (0x2F886, "M", "帽"), + (0x2F887, "M", "幩"), + (0x2F888, "M", "㡢"), + (0x2F889, "M", "𢆃"), + (0x2F88A, "M", "㡼"), + (0x2F88B, "M", "庰"), + (0x2F88C, "M", "庳"), + (0x2F88D, "M", "庶"), + (0x2F88E, "M", "廊"), + (0x2F88F, "M", "𪎒"), + (0x2F890, "M", "廾"), + (0x2F891, "M", "𢌱"), + (0x2F893, "M", "舁"), + (0x2F894, "M", "弢"), + (0x2F896, "M", "㣇"), + (0x2F897, "M", "𣊸"), + (0x2F898, "M", "𦇚"), + (0x2F899, "M", "形"), + (0x2F89A, "M", "彫"), + (0x2F89B, "M", "㣣"), + (0x2F89C, "M", "徚"), + (0x2F89D, "M", "忍"), + (0x2F89E, "M", "志"), + (0x2F89F, "M", "忹"), + (0x2F8A0, "M", "悁"), + (0x2F8A1, "M", "㤺"), + (0x2F8A2, "M", "㤜"), + (0x2F8A3, "M", "悔"), + (0x2F8A4, "M", "𢛔"), + (0x2F8A5, "M", "惇"), + (0x2F8A6, "M", "慈"), + (0x2F8A7, "M", "慌"), + (0x2F8A8, "M", "慎"), + ] + + +def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F8A9, "M", "慌"), + (0x2F8AA, "M", "慺"), + (0x2F8AB, "M", "憎"), + (0x2F8AC, "M", "憲"), + (0x2F8AD, "M", "憤"), + (0x2F8AE, "M", "憯"), + (0x2F8AF, "M", "懞"), + (0x2F8B0, "M", "懲"), + (0x2F8B1, "M", "懶"), + (0x2F8B2, "M", "成"), + (0x2F8B3, "M", "戛"), + (0x2F8B4, "M", "扝"), + (0x2F8B5, "M", "抱"), + (0x2F8B6, "M", "拔"), + (0x2F8B7, "M", "捐"), + (0x2F8B8, "M", "𢬌"), + (0x2F8B9, "M", "挽"), + (0x2F8BA, "M", "拼"), + (0x2F8BB, "M", "捨"), + (0x2F8BC, "M", "掃"), + (0x2F8BD, "M", "揤"), + (0x2F8BE, "M", "𢯱"), + (0x2F8BF, "M", "搢"), + (0x2F8C0, "M", "揅"), + (0x2F8C1, "M", "掩"), + (0x2F8C2, "M", "㨮"), + (0x2F8C3, "M", "摩"), + (0x2F8C4, "M", "摾"), + (0x2F8C5, "M", "撝"), + (0x2F8C6, "M", "摷"), + (0x2F8C7, "M", "㩬"), + (0x2F8C8, "M", "敏"), + (0x2F8C9, "M", "敬"), + (0x2F8CA, "M", "𣀊"), + (0x2F8CB, "M", "旣"), + (0x2F8CC, "M", "書"), + (0x2F8CD, "M", "晉"), + (0x2F8CE, "M", "㬙"), + (0x2F8CF, "M", "暑"), + (0x2F8D0, "M", "㬈"), + (0x2F8D1, "M", "㫤"), + (0x2F8D2, "M", "冒"), + (0x2F8D3, "M", "冕"), + (0x2F8D4, "M", "最"), + (0x2F8D5, "M", "暜"), + (0x2F8D6, "M", "肭"), + (0x2F8D7, "M", "䏙"), + (0x2F8D8, "M", "朗"), + (0x2F8D9, "M", "望"), + (0x2F8DA, "M", "朡"), + (0x2F8DB, "M", "杞"), + (0x2F8DC, "M", "杓"), + (0x2F8DD, "M", "𣏃"), + (0x2F8DE, "M", "㭉"), + (0x2F8DF, "M", "柺"), + (0x2F8E0, "M", "枅"), + (0x2F8E1, "M", "桒"), + (0x2F8E2, "M", "梅"), + (0x2F8E3, "M", "𣑭"), + (0x2F8E4, "M", "梎"), + (0x2F8E5, "M", "栟"), + (0x2F8E6, "M", "椔"), + (0x2F8E7, "M", "㮝"), + (0x2F8E8, "M", "楂"), + (0x2F8E9, "M", "榣"), + (0x2F8EA, "M", "槪"), + (0x2F8EB, "M", "檨"), + (0x2F8EC, "M", "𣚣"), + (0x2F8ED, "M", "櫛"), + (0x2F8EE, "M", "㰘"), + (0x2F8EF, "M", "次"), + (0x2F8F0, "M", "𣢧"), + (0x2F8F1, "M", "歔"), + (0x2F8F2, "M", "㱎"), + (0x2F8F3, "M", "歲"), + (0x2F8F4, "M", "殟"), + (0x2F8F5, "M", "殺"), + (0x2F8F6, "M", "殻"), + (0x2F8F7, "M", "𣪍"), + (0x2F8F8, "M", "𡴋"), + (0x2F8F9, "M", "𣫺"), + (0x2F8FA, "M", "汎"), + (0x2F8FB, "M", "𣲼"), + (0x2F8FC, "M", "沿"), + (0x2F8FD, "M", "泍"), + (0x2F8FE, "M", "汧"), + (0x2F8FF, "M", "洖"), + (0x2F900, "M", "派"), + (0x2F901, "M", "海"), + (0x2F902, "M", "流"), + (0x2F903, "M", "浩"), + (0x2F904, "M", "浸"), + (0x2F905, "M", "涅"), + (0x2F906, "M", "𣴞"), + (0x2F907, "M", "洴"), + (0x2F908, "M", "港"), + (0x2F909, "M", "湮"), + (0x2F90A, "M", "㴳"), + (0x2F90B, "M", "滋"), + (0x2F90C, "M", "滇"), + ] + + +def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F90D, "M", "𣻑"), + (0x2F90E, "M", "淹"), + (0x2F90F, "M", "潮"), + (0x2F910, "M", "𣽞"), + (0x2F911, "M", "𣾎"), + (0x2F912, "M", "濆"), + (0x2F913, "M", "瀹"), + (0x2F914, "M", "瀞"), + (0x2F915, "M", "瀛"), + (0x2F916, "M", "㶖"), + (0x2F917, "M", "灊"), + (0x2F918, "M", "災"), + (0x2F919, "M", "灷"), + (0x2F91A, "M", "炭"), + (0x2F91B, "M", "𠔥"), + (0x2F91C, "M", "煅"), + (0x2F91D, "M", "𤉣"), + (0x2F91E, "M", "熜"), + (0x2F91F, "X"), + (0x2F920, "M", "爨"), + (0x2F921, "M", "爵"), + (0x2F922, "M", "牐"), + (0x2F923, "M", "𤘈"), + (0x2F924, "M", "犀"), + (0x2F925, "M", "犕"), + (0x2F926, "M", "𤜵"), + (0x2F927, "M", "𤠔"), + (0x2F928, "M", "獺"), + (0x2F929, "M", "王"), + (0x2F92A, "M", "㺬"), + (0x2F92B, "M", "玥"), + (0x2F92C, "M", "㺸"), + (0x2F92E, "M", "瑇"), + (0x2F92F, "M", "瑜"), + (0x2F930, "M", "瑱"), + (0x2F931, "M", "璅"), + (0x2F932, "M", "瓊"), + (0x2F933, "M", "㼛"), + (0x2F934, "M", "甤"), + (0x2F935, "M", "𤰶"), + (0x2F936, "M", "甾"), + (0x2F937, "M", "𤲒"), + (0x2F938, "M", "異"), + (0x2F939, "M", "𢆟"), + (0x2F93A, "M", "瘐"), + (0x2F93B, "M", "𤾡"), + (0x2F93C, "M", "𤾸"), + (0x2F93D, "M", "𥁄"), + (0x2F93E, "M", "㿼"), + (0x2F93F, "M", "䀈"), + (0x2F940, "M", "直"), + (0x2F941, "M", "𥃳"), + (0x2F942, "M", "𥃲"), + (0x2F943, "M", "𥄙"), + (0x2F944, "M", "𥄳"), + (0x2F945, "M", "眞"), + (0x2F946, "M", "真"), + (0x2F948, "M", "睊"), + (0x2F949, "M", "䀹"), + (0x2F94A, "M", "瞋"), + (0x2F94B, "M", "䁆"), + (0x2F94C, "M", "䂖"), + (0x2F94D, "M", "𥐝"), + (0x2F94E, "M", "硎"), + (0x2F94F, "M", "碌"), + (0x2F950, "M", "磌"), + (0x2F951, "M", "䃣"), + (0x2F952, "M", "𥘦"), + (0x2F953, "M", "祖"), + (0x2F954, "M", "𥚚"), + (0x2F955, "M", "𥛅"), + (0x2F956, "M", "福"), + (0x2F957, "M", "秫"), + (0x2F958, "M", "䄯"), + (0x2F959, "M", "穀"), + (0x2F95A, "M", "穊"), + (0x2F95B, "M", "穏"), + (0x2F95C, "M", "𥥼"), + (0x2F95D, "M", "𥪧"), + (0x2F95F, "X"), + (0x2F960, "M", "䈂"), + (0x2F961, "M", "𥮫"), + (0x2F962, "M", "篆"), + (0x2F963, "M", "築"), + (0x2F964, "M", "䈧"), + (0x2F965, "M", "𥲀"), + (0x2F966, "M", "糒"), + (0x2F967, "M", "䊠"), + (0x2F968, "M", "糨"), + (0x2F969, "M", "糣"), + (0x2F96A, "M", "紀"), + (0x2F96B, "M", "𥾆"), + (0x2F96C, "M", "絣"), + (0x2F96D, "M", "䌁"), + (0x2F96E, "M", "緇"), + (0x2F96F, "M", "縂"), + (0x2F970, "M", "繅"), + (0x2F971, "M", "䌴"), + (0x2F972, "M", "𦈨"), + (0x2F973, "M", "𦉇"), + ] + + +def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F974, "M", "䍙"), + (0x2F975, "M", "𦋙"), + (0x2F976, "M", "罺"), + (0x2F977, "M", "𦌾"), + (0x2F978, "M", "羕"), + (0x2F979, "M", "翺"), + (0x2F97A, "M", "者"), + (0x2F97B, "M", "𦓚"), + (0x2F97C, "M", "𦔣"), + (0x2F97D, "M", "聠"), + (0x2F97E, "M", "𦖨"), + (0x2F97F, "M", "聰"), + (0x2F980, "M", "𣍟"), + (0x2F981, "M", "䏕"), + (0x2F982, "M", "育"), + (0x2F983, "M", "脃"), + (0x2F984, "M", "䐋"), + (0x2F985, "M", "脾"), + (0x2F986, "M", "媵"), + (0x2F987, "M", "𦞧"), + (0x2F988, "M", "𦞵"), + (0x2F989, "M", "𣎓"), + (0x2F98A, "M", "𣎜"), + (0x2F98B, "M", "舁"), + (0x2F98C, "M", "舄"), + (0x2F98D, "M", "辞"), + (0x2F98E, "M", "䑫"), + (0x2F98F, "M", "芑"), + (0x2F990, "M", "芋"), + (0x2F991, "M", "芝"), + (0x2F992, "M", "劳"), + (0x2F993, "M", "花"), + (0x2F994, "M", "芳"), + (0x2F995, "M", "芽"), + (0x2F996, "M", "苦"), + (0x2F997, "M", "𦬼"), + (0x2F998, "M", "若"), + (0x2F999, "M", "茝"), + (0x2F99A, "M", "荣"), + (0x2F99B, "M", "莭"), + (0x2F99C, "M", "茣"), + (0x2F99D, "M", "莽"), + (0x2F99E, "M", "菧"), + (0x2F99F, "M", "著"), + (0x2F9A0, "M", "荓"), + (0x2F9A1, "M", "菊"), + (0x2F9A2, "M", "菌"), + (0x2F9A3, "M", "菜"), + (0x2F9A4, "M", "𦰶"), + (0x2F9A5, "M", "𦵫"), + (0x2F9A6, "M", "𦳕"), + (0x2F9A7, "M", "䔫"), + (0x2F9A8, "M", "蓱"), + (0x2F9A9, "M", "蓳"), + (0x2F9AA, "M", "蔖"), + (0x2F9AB, "M", "𧏊"), + (0x2F9AC, "M", "蕤"), + (0x2F9AD, "M", "𦼬"), + (0x2F9AE, "M", "䕝"), + (0x2F9AF, "M", "䕡"), + (0x2F9B0, "M", "𦾱"), + (0x2F9B1, "M", "𧃒"), + (0x2F9B2, "M", "䕫"), + (0x2F9B3, "M", "虐"), + (0x2F9B4, "M", "虜"), + (0x2F9B5, "M", "虧"), + (0x2F9B6, "M", "虩"), + (0x2F9B7, "M", "蚩"), + (0x2F9B8, "M", "蚈"), + (0x2F9B9, "M", "蜎"), + (0x2F9BA, "M", "蛢"), + (0x2F9BB, "M", "蝹"), + (0x2F9BC, "M", "蜨"), + (0x2F9BD, "M", "蝫"), + (0x2F9BE, "M", "螆"), + (0x2F9BF, "X"), + (0x2F9C0, "M", "蟡"), + (0x2F9C1, "M", "蠁"), + (0x2F9C2, "M", "䗹"), + (0x2F9C3, "M", "衠"), + (0x2F9C4, "M", "衣"), + (0x2F9C5, "M", "𧙧"), + (0x2F9C6, "M", "裗"), + (0x2F9C7, "M", "裞"), + (0x2F9C8, "M", "䘵"), + (0x2F9C9, "M", "裺"), + (0x2F9CA, "M", "㒻"), + (0x2F9CB, "M", "𧢮"), + (0x2F9CC, "M", "𧥦"), + (0x2F9CD, "M", "䚾"), + (0x2F9CE, "M", "䛇"), + (0x2F9CF, "M", "誠"), + (0x2F9D0, "M", "諭"), + (0x2F9D1, "M", "變"), + (0x2F9D2, "M", "豕"), + (0x2F9D3, "M", "𧲨"), + (0x2F9D4, "M", "貫"), + (0x2F9D5, "M", "賁"), + (0x2F9D6, "M", "贛"), + (0x2F9D7, "M", "起"), + ] + + +def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F9D8, "M", "𧼯"), + (0x2F9D9, "M", "𠠄"), + (0x2F9DA, "M", "跋"), + (0x2F9DB, "M", "趼"), + (0x2F9DC, "M", "跰"), + (0x2F9DD, "M", "𠣞"), + (0x2F9DE, "M", "軔"), + (0x2F9DF, "M", "輸"), + (0x2F9E0, "M", "𨗒"), + (0x2F9E1, "M", "𨗭"), + (0x2F9E2, "M", "邔"), + (0x2F9E3, "M", "郱"), + (0x2F9E4, "M", "鄑"), + (0x2F9E5, "M", "𨜮"), + (0x2F9E6, "M", "鄛"), + (0x2F9E7, "M", "鈸"), + (0x2F9E8, "M", "鋗"), + (0x2F9E9, "M", "鋘"), + (0x2F9EA, "M", "鉼"), + (0x2F9EB, "M", "鏹"), + (0x2F9EC, "M", "鐕"), + (0x2F9ED, "M", "𨯺"), + (0x2F9EE, "M", "開"), + (0x2F9EF, "M", "䦕"), + (0x2F9F0, "M", "閷"), + (0x2F9F1, "M", "𨵷"), + (0x2F9F2, "M", "䧦"), + (0x2F9F3, "M", "雃"), + (0x2F9F4, "M", "嶲"), + (0x2F9F5, "M", "霣"), + (0x2F9F6, "M", "𩅅"), + (0x2F9F7, "M", "𩈚"), + (0x2F9F8, "M", "䩮"), + (0x2F9F9, "M", "䩶"), + (0x2F9FA, "M", "韠"), + (0x2F9FB, "M", "𩐊"), + (0x2F9FC, "M", "䪲"), + (0x2F9FD, "M", "𩒖"), + (0x2F9FE, "M", "頋"), + (0x2FA00, "M", "頩"), + (0x2FA01, "M", "𩖶"), + (0x2FA02, "M", "飢"), + (0x2FA03, "M", "䬳"), + (0x2FA04, "M", "餩"), + (0x2FA05, "M", "馧"), + (0x2FA06, "M", "駂"), + (0x2FA07, "M", "駾"), + (0x2FA08, "M", "䯎"), + (0x2FA09, "M", "𩬰"), + (0x2FA0A, "M", "鬒"), + (0x2FA0B, "M", "鱀"), + (0x2FA0C, "M", "鳽"), + (0x2FA0D, "M", "䳎"), + (0x2FA0E, "M", "䳭"), + (0x2FA0F, "M", "鵧"), + (0x2FA10, "M", "𪃎"), + (0x2FA11, "M", "䳸"), + (0x2FA12, "M", "𪄅"), + (0x2FA13, "M", "𪈎"), + (0x2FA14, "M", "𪊑"), + (0x2FA15, "M", "麻"), + (0x2FA16, "M", "䵖"), + (0x2FA17, "M", "黹"), + (0x2FA18, "M", "黾"), + (0x2FA19, "M", "鼅"), + (0x2FA1A, "M", "鼏"), + (0x2FA1B, "M", "鼖"), + (0x2FA1C, "M", "鼻"), + (0x2FA1D, "M", "𪘀"), + (0x2FA1E, "X"), + (0x30000, "V"), + (0x3134B, "X"), + (0x31350, "V"), + (0x323B0, "X"), + (0xE0100, "I"), + (0xE01F0, "X"), + ] + + +uts46data = tuple( + _seg_0() + + _seg_1() + + _seg_2() + + _seg_3() + + _seg_4() + + _seg_5() + + _seg_6() + + _seg_7() + + _seg_8() + + _seg_9() + + _seg_10() + + _seg_11() + + _seg_12() + + _seg_13() + + _seg_14() + + _seg_15() + + _seg_16() + + _seg_17() + + _seg_18() + + _seg_19() + + _seg_20() + + _seg_21() + + _seg_22() + + _seg_23() + + _seg_24() + + _seg_25() + + _seg_26() + + _seg_27() + + _seg_28() + + _seg_29() + + _seg_30() + + _seg_31() + + _seg_32() + + _seg_33() + + _seg_34() + + _seg_35() + + _seg_36() + + _seg_37() + + _seg_38() + + _seg_39() + + _seg_40() + + _seg_41() + + _seg_42() + + _seg_43() + + _seg_44() + + _seg_45() + + _seg_46() + + _seg_47() + + _seg_48() + + _seg_49() + + _seg_50() + + _seg_51() + + _seg_52() + + _seg_53() + + _seg_54() + + _seg_55() + + _seg_56() + + _seg_57() + + _seg_58() + + _seg_59() + + _seg_60() + + _seg_61() + + _seg_62() + + _seg_63() + + _seg_64() + + _seg_65() + + _seg_66() + + _seg_67() + + _seg_68() + + _seg_69() + + _seg_70() + + _seg_71() + + _seg_72() + + _seg_73() + + _seg_74() + + _seg_75() + + _seg_76() + + _seg_77() + + _seg_78() + + _seg_79() + + _seg_80() + + _seg_81() +) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...] diff --git a/venv/lib/python3.10/site-packages/iniconfig/__init__.py b/venv/lib/python3.10/site-packages/iniconfig/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b84809f8e9dc0db6fba3505fcc29ca26c3383cce --- /dev/null +++ b/venv/lib/python3.10/site-packages/iniconfig/__init__.py @@ -0,0 +1,249 @@ +"""brain-dead simple parser for ini-style files. +(C) Ronny Pfannschmidt, Holger Krekel -- MIT licensed +""" + +import os +from collections.abc import Callable +from collections.abc import Iterator +from collections.abc import Mapping +from typing import Final +from typing import TypeVar +from typing import overload + +__all__ = ["IniConfig", "ParseError", "COMMENTCHARS", "iscommentline"] + +from . import _parse +from ._parse import COMMENTCHARS +from ._parse import iscommentline +from .exceptions import ParseError + +_D = TypeVar("_D") +_T = TypeVar("_T") + + +class SectionWrapper: + config: Final["IniConfig"] + name: Final[str] + + def __init__(self, config: "IniConfig", name: str) -> None: + self.config = config + self.name = name + + def lineof(self, name: str) -> int | None: + return self.config.lineof(self.name, name) + + @overload + def get(self, key: str) -> str | None: ... + + @overload + def get( + self, + key: str, + convert: Callable[[str], _T], + ) -> _T | None: ... + + @overload + def get( + self, + key: str, + default: None, + convert: Callable[[str], _T], + ) -> _T | None: ... + + @overload + def get(self, key: str, default: _D, convert: None = None) -> str | _D: ... + + @overload + def get( + self, + key: str, + default: _D, + convert: Callable[[str], _T], + ) -> _T | _D: ... + + # TODO: investigate possible mypy bug wrt matching the passed over data + def get( # type: ignore [misc] + self, + key: str, + default: _D | None = None, + convert: Callable[[str], _T] | None = None, + ) -> _D | _T | str | None: + return self.config.get(self.name, key, convert=convert, default=default) + + def __getitem__(self, key: str) -> str: + return self.config.sections[self.name][key] + + def __iter__(self) -> Iterator[str]: + section: Mapping[str, str] = self.config.sections.get(self.name, {}) + + def lineof(key: str) -> int: + return self.config.lineof(self.name, key) # type: ignore[return-value] + + yield from sorted(section, key=lineof) + + def items(self) -> Iterator[tuple[str, str]]: + for name in self: + yield name, self[name] + + +class IniConfig: + path: Final[str] + sections: Final[Mapping[str, Mapping[str, str]]] + _sources: Final[Mapping[tuple[str, str | None], int]] + + def __init__( + self, + path: str | os.PathLike[str], + data: str | None = None, + encoding: str = "utf-8", + *, + _sections: Mapping[str, Mapping[str, str]] | None = None, + _sources: Mapping[tuple[str, str | None], int] | None = None, + ) -> None: + self.path = os.fspath(path) + + # Determine sections and sources + if _sections is not None and _sources is not None: + # Use provided pre-parsed data (called from parse()) + sections_data = _sections + sources = _sources + else: + # Parse the data (backward compatible path) + if data is None: + with open(self.path, encoding=encoding) as fp: + data = fp.read() + + # Use old behavior (no stripping) for backward compatibility + sections_data, sources = _parse.parse_ini_data( + self.path, data, strip_inline_comments=False + ) + + # Assign once to Final attributes + self._sources = sources + self.sections = sections_data + + @classmethod + def parse( + cls, + path: str | os.PathLike[str], + data: str | None = None, + encoding: str = "utf-8", + *, + strip_inline_comments: bool = True, + strip_section_whitespace: bool = False, + ) -> "IniConfig": + """Parse an INI file. + + Args: + path: Path to the INI file (used for error messages) + data: Optional INI content as string. If None, reads from path. + encoding: Encoding to use when reading the file (default: utf-8) + strip_inline_comments: Whether to strip inline comments from values + (default: True). When True, comments starting with # or ; are + removed from values, matching the behavior for section comments. + strip_section_whitespace: Whether to strip whitespace from section and key names + (default: False). When True, strips Unicode whitespace from section and key names, + addressing issue #4. When False, preserves existing behavior for backward compatibility. + + Returns: + IniConfig instance with parsed configuration + + Example: + # With comment stripping (default): + config = IniConfig.parse("setup.cfg") + # value = "foo" instead of "foo # comment" + + # Without comment stripping (old behavior): + config = IniConfig.parse("setup.cfg", strip_inline_comments=False) + # value = "foo # comment" + + # With section name stripping (opt-in for issue #4): + config = IniConfig.parse("setup.cfg", strip_section_whitespace=True) + # section names and keys have Unicode whitespace stripped + """ + fspath = os.fspath(path) + + if data is None: + with open(fspath, encoding=encoding) as fp: + data = fp.read() + + sections_data, sources = _parse.parse_ini_data( + fspath, + data, + strip_inline_comments=strip_inline_comments, + strip_section_whitespace=strip_section_whitespace, + ) + + # Call constructor with pre-parsed sections and sources + return cls(path=fspath, _sections=sections_data, _sources=sources) + + def lineof(self, section: str, name: str | None = None) -> int | None: + lineno = self._sources.get((section, name)) + return None if lineno is None else lineno + 1 + + @overload + def get( + self, + section: str, + name: str, + ) -> str | None: ... + + @overload + def get( + self, + section: str, + name: str, + convert: Callable[[str], _T], + ) -> _T | None: ... + + @overload + def get( + self, + section: str, + name: str, + default: None, + convert: Callable[[str], _T], + ) -> _T | None: ... + + @overload + def get( + self, section: str, name: str, default: _D, convert: None = None + ) -> str | _D: ... + + @overload + def get( + self, + section: str, + name: str, + default: _D, + convert: Callable[[str], _T], + ) -> _T | _D: ... + + def get( # type: ignore + self, + section: str, + name: str, + default: _D | None = None, + convert: Callable[[str], _T] | None = None, + ) -> _D | _T | str | None: + try: + value: str = self.sections[section][name] + except KeyError: + return default + else: + if convert is not None: + return convert(value) + else: + return value + + def __getitem__(self, name: str) -> SectionWrapper: + if name not in self.sections: + raise KeyError(name) + return SectionWrapper(self, name) + + def __iter__(self) -> Iterator[SectionWrapper]: + for name in sorted(self.sections, key=self.lineof): # type: ignore + yield SectionWrapper(self, name) + + def __contains__(self, arg: str) -> bool: + return arg in self.sections diff --git a/venv/lib/python3.10/site-packages/iniconfig/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/iniconfig/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..480ab0b0553112081ebbd5fdc19cd2887c84f1a0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/iniconfig/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/iniconfig/__pycache__/_parse.cpython-310.pyc b/venv/lib/python3.10/site-packages/iniconfig/__pycache__/_parse.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b6e93036a7e350a3d5e65acbaa87cd766fb301b9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/iniconfig/__pycache__/_parse.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/iniconfig/__pycache__/_version.cpython-310.pyc b/venv/lib/python3.10/site-packages/iniconfig/__pycache__/_version.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..58a885b41531fea8326efd3323c0fe99498d91fd Binary files /dev/null and b/venv/lib/python3.10/site-packages/iniconfig/__pycache__/_version.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/iniconfig/__pycache__/exceptions.cpython-310.pyc b/venv/lib/python3.10/site-packages/iniconfig/__pycache__/exceptions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..47a7b78265fb04014191f34d13bf27349f99ae5a Binary files /dev/null and b/venv/lib/python3.10/site-packages/iniconfig/__pycache__/exceptions.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/iniconfig/_parse.py b/venv/lib/python3.10/site-packages/iniconfig/_parse.py new file mode 100644 index 0000000000000000000000000000000000000000..57b9b44e4cccca635651b8d4380aed1e2ab020a8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/iniconfig/_parse.py @@ -0,0 +1,163 @@ +from collections.abc import Mapping +from typing import NamedTuple + +from .exceptions import ParseError + +COMMENTCHARS = "#;" + + +class ParsedLine(NamedTuple): + lineno: int + section: str | None + name: str | None + value: str | None + + +def parse_ini_data( + path: str, + data: str, + *, + strip_inline_comments: bool, + strip_section_whitespace: bool = False, +) -> tuple[Mapping[str, Mapping[str, str]], Mapping[tuple[str, str | None], int]]: + """Parse INI data and return sections and sources mappings. + + Args: + path: Path for error messages + data: INI content as string + strip_inline_comments: Whether to strip inline comments from values + strip_section_whitespace: Whether to strip whitespace from section and key names + (default: False). When True, addresses issue #4 by stripping Unicode whitespace. + + Returns: + Tuple of (sections_data, sources) where: + - sections_data: mapping of section -> {name -> value} + - sources: mapping of (section, name) -> line number + """ + tokens = parse_lines( + path, + data.splitlines(True), + strip_inline_comments=strip_inline_comments, + strip_section_whitespace=strip_section_whitespace, + ) + + sources: dict[tuple[str, str | None], int] = {} + sections_data: dict[str, dict[str, str]] = {} + + for lineno, section, name, value in tokens: + if section is None: + raise ParseError(path, lineno, "no section header defined") + sources[section, name] = lineno + if name is None: + if section in sections_data: + raise ParseError(path, lineno, f"duplicate section {section!r}") + sections_data[section] = {} + else: + if name in sections_data[section]: + raise ParseError(path, lineno, f"duplicate name {name!r}") + assert value is not None + sections_data[section][name] = value + + return sections_data, sources + + +def parse_lines( + path: str, + line_iter: list[str], + *, + strip_inline_comments: bool = False, + strip_section_whitespace: bool = False, +) -> list[ParsedLine]: + result: list[ParsedLine] = [] + section = None + for lineno, line in enumerate(line_iter): + name, data = _parseline( + path, line, lineno, strip_inline_comments, strip_section_whitespace + ) + # new value + if name is not None and data is not None: + result.append(ParsedLine(lineno, section, name, data)) + # new section + elif name is not None and data is None: + if not name: + raise ParseError(path, lineno, "empty section name") + section = name + result.append(ParsedLine(lineno, section, None, None)) + # continuation + elif name is None and data is not None: + if not result: + raise ParseError(path, lineno, "unexpected value continuation") + last = result.pop() + if last.name is None: + raise ParseError(path, lineno, "unexpected value continuation") + + if last.value: + last = last._replace(value=f"{last.value}\n{data}") + else: + last = last._replace(value=data) + result.append(last) + return result + + +def _parseline( + path: str, + line: str, + lineno: int, + strip_inline_comments: bool, + strip_section_whitespace: bool, +) -> tuple[str | None, str | None]: + # blank lines + if iscommentline(line): + line = "" + else: + line = line.rstrip() + if not line: + return None, None + # section + if line[0] == "[": + realline = line + for c in COMMENTCHARS: + line = line.split(c)[0].rstrip() + if line[-1] == "]": + section_name = line[1:-1] + # Optionally strip whitespace from section name (issue #4) + if strip_section_whitespace: + section_name = section_name.strip() + return section_name, None + return None, realline.strip() + # value + elif not line[0].isspace(): + try: + name, value = line.split("=", 1) + if ":" in name: + raise ValueError() + except ValueError: + try: + name, value = line.split(":", 1) + except ValueError: + raise ParseError(path, lineno, f"unexpected line: {line!r}") from None + + # Strip key name (always for backward compatibility, optionally with unicode awareness) + key_name = name.strip() + + # Strip value + value = value.strip() + # Strip inline comments from values if requested (issue #55) + if strip_inline_comments: + for c in COMMENTCHARS: + value = value.split(c)[0].rstrip() + + return key_name, value + # continuation + else: + line = line.strip() + # Strip inline comments from continuations if requested (issue #55) + if strip_inline_comments: + for c in COMMENTCHARS: + line = line.split(c)[0].rstrip() + return None, line + + +def iscommentline(line: str) -> bool: + c = line.lstrip()[:1] + return c in COMMENTCHARS diff --git a/venv/lib/python3.10/site-packages/iniconfig/_version.py b/venv/lib/python3.10/site-packages/iniconfig/_version.py new file mode 100644 index 0000000000000000000000000000000000000000..b982b024d8649c68b58ba29198fab080774ddf5f --- /dev/null +++ b/venv/lib/python3.10/site-packages/iniconfig/_version.py @@ -0,0 +1,34 @@ +# file generated by setuptools-scm +# don't change, don't track in version control + +__all__ = [ + "__version__", + "__version_tuple__", + "version", + "version_tuple", + "__commit_id__", + "commit_id", +] + +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Tuple + from typing import Union + + VERSION_TUPLE = Tuple[Union[int, str], ...] + COMMIT_ID = Union[str, None] +else: + VERSION_TUPLE = object + COMMIT_ID = object + +version: str +__version__: str +__version_tuple__: VERSION_TUPLE +version_tuple: VERSION_TUPLE +commit_id: COMMIT_ID +__commit_id__: COMMIT_ID + +__version__ = version = '2.3.0' +__version_tuple__ = version_tuple = (2, 3, 0) + +__commit_id__ = commit_id = None diff --git a/venv/lib/python3.10/site-packages/iniconfig/exceptions.py b/venv/lib/python3.10/site-packages/iniconfig/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..d078bc659504cf79d3cedefe66dd828af9a9d9e0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/iniconfig/exceptions.py @@ -0,0 +1,16 @@ +from typing import Final + + +class ParseError(Exception): + path: Final[str] + lineno: Final[int] + msg: Final[str] + + def __init__(self, path: str, lineno: int, msg: str) -> None: + super().__init__(path, lineno, msg) + self.path = path + self.lineno = lineno + self.msg = msg + + def __str__(self) -> str: + return f"{self.path}:{self.lineno + 1}: {self.msg}" diff --git a/venv/lib/python3.10/site-packages/iniconfig/py.typed b/venv/lib/python3.10/site-packages/iniconfig/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.10/site-packages/markupsafe/__init__.py b/venv/lib/python3.10/site-packages/markupsafe/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..fee8dc7acca04f4d3e8ff002a4440d24b59dacac --- /dev/null +++ b/venv/lib/python3.10/site-packages/markupsafe/__init__.py @@ -0,0 +1,395 @@ +from __future__ import annotations + +import collections.abc as cabc +import string +import typing as t + +try: + from ._speedups import _escape_inner +except ImportError: + from ._native import _escape_inner + +if t.TYPE_CHECKING: + import typing_extensions as te + + +class _HasHTML(t.Protocol): + def __html__(self, /) -> str: ... + + +class _TPEscape(t.Protocol): + def __call__(self, s: t.Any, /) -> Markup: ... + + +def escape(s: t.Any, /) -> Markup: + """Replace the characters ``&``, ``<``, ``>``, ``'``, and ``"`` in + the string with HTML-safe sequences. Use this if you need to display + text that might contain such characters in HTML. + + If the object has an ``__html__`` method, it is called and the + return value is assumed to already be safe for HTML. + + :param s: An object to be converted to a string and escaped. + :return: A :class:`Markup` string with the escaped text. + """ + # If the object is already a plain string, skip __html__ check and string + # conversion. This is the most common use case. + # Use type(s) instead of s.__class__ because a proxy object may be reporting + # the __class__ of the proxied value. + if type(s) is str: + return Markup(_escape_inner(s)) + + if hasattr(s, "__html__"): + return Markup(s.__html__()) + + return Markup(_escape_inner(str(s))) + + +def escape_silent(s: t.Any | None, /) -> Markup: + """Like :func:`escape` but treats ``None`` as the empty string. + Useful with optional values, as otherwise you get the string + ``'None'`` when the value is ``None``. + + >>> escape(None) + Markup('None') + >>> escape_silent(None) + Markup('') + """ + if s is None: + return Markup() + + return escape(s) + + +def soft_str(s: t.Any, /) -> str: + """Convert an object to a string if it isn't already. This preserves + a :class:`Markup` string rather than converting it back to a basic + string, so it will still be marked as safe and won't be escaped + again. + + >>> value = escape("") + >>> value + Markup('<User 1>') + >>> escape(str(value)) + Markup('&lt;User 1&gt;') + >>> escape(soft_str(value)) + Markup('<User 1>') + """ + if not isinstance(s, str): + return str(s) + + return s + + +class Markup(str): + """A string that is ready to be safely inserted into an HTML or XML + document, either because it was escaped or because it was marked + safe. + + Passing an object to the constructor converts it to text and wraps + it to mark it safe without escaping. To escape the text, use the + :meth:`escape` class method instead. + + >>> Markup("Hello, World!") + Markup('Hello, World!') + >>> Markup(42) + Markup('42') + >>> Markup.escape("Hello, World!") + Markup('Hello <em>World</em>!') + + This implements the ``__html__()`` interface that some frameworks + use. Passing an object that implements ``__html__()`` will wrap the + output of that method, marking it safe. + + >>> class Foo: + ... def __html__(self): + ... return 'foo' + ... + >>> Markup(Foo()) + Markup('foo') + + This is a subclass of :class:`str`. It has the same methods, but + escapes their arguments and returns a ``Markup`` instance. + + >>> Markup("%s") % ("foo & bar",) + Markup('foo & bar') + >>> Markup("Hello ") + "" + Markup('Hello <foo>') + """ + + __slots__ = () + + def __new__( + cls, object: t.Any = "", encoding: str | None = None, errors: str = "strict" + ) -> te.Self: + if hasattr(object, "__html__"): + object = object.__html__() + + if encoding is None: + return super().__new__(cls, object) + + return super().__new__(cls, object, encoding, errors) + + def __html__(self, /) -> te.Self: + return self + + def __add__(self, value: str | _HasHTML, /) -> te.Self: + if isinstance(value, str) or hasattr(value, "__html__"): + return self.__class__(super().__add__(self.escape(value))) + + return NotImplemented + + def __radd__(self, value: str | _HasHTML, /) -> te.Self: + if isinstance(value, str) or hasattr(value, "__html__"): + return self.escape(value).__add__(self) + + return NotImplemented + + def __mul__(self, value: t.SupportsIndex, /) -> te.Self: + return self.__class__(super().__mul__(value)) + + def __rmul__(self, value: t.SupportsIndex, /) -> te.Self: + return self.__class__(super().__mul__(value)) + + def __mod__(self, value: t.Any, /) -> te.Self: + if isinstance(value, tuple): + # a tuple of arguments, each wrapped + value = tuple(_MarkupEscapeHelper(x, self.escape) for x in value) + elif hasattr(type(value), "__getitem__") and not isinstance(value, str): + # a mapping of arguments, wrapped + value = _MarkupEscapeHelper(value, self.escape) + else: + # a single argument, wrapped with the helper and a tuple + value = (_MarkupEscapeHelper(value, self.escape),) + + return self.__class__(super().__mod__(value)) + + def __repr__(self, /) -> str: + return f"{self.__class__.__name__}({super().__repr__()})" + + def join(self, iterable: cabc.Iterable[str | _HasHTML], /) -> te.Self: + return self.__class__(super().join(map(self.escape, iterable))) + + def split( # type: ignore[override] + self, /, sep: str | None = None, maxsplit: t.SupportsIndex = -1 + ) -> list[te.Self]: + return [self.__class__(v) for v in super().split(sep, maxsplit)] + + def rsplit( # type: ignore[override] + self, /, sep: str | None = None, maxsplit: t.SupportsIndex = -1 + ) -> list[te.Self]: + return [self.__class__(v) for v in super().rsplit(sep, maxsplit)] + + def splitlines( # type: ignore[override] + self, /, keepends: bool = False + ) -> list[te.Self]: + return [self.__class__(v) for v in super().splitlines(keepends)] + + def unescape(self, /) -> str: + """Convert escaped markup back into a text string. This replaces + HTML entities with the characters they represent. + + >>> Markup("Main » About").unescape() + 'Main » About' + """ + from html import unescape + + return unescape(str(self)) + + def striptags(self, /) -> str: + """:meth:`unescape` the markup, remove tags, and normalize + whitespace to single spaces. + + >>> Markup("Main »\tAbout").striptags() + 'Main » About' + """ + value = str(self) + + # Look for comments then tags separately. Otherwise, a comment that + # contains a tag would end early, leaving some of the comment behind. + + # keep finding comment start marks + while (start := value.find("", start)) == -1: + break + + value = f"{value[:start]}{value[end + 3:]}" + + # remove tags using the same method + while (start := value.find("<")) != -1: + if (end := value.find(">", start)) == -1: + break + + value = f"{value[:start]}{value[end + 1:]}" + + # collapse spaces + value = " ".join(value.split()) + return self.__class__(value).unescape() + + @classmethod + def escape(cls, s: t.Any, /) -> te.Self: + """Escape a string. Calls :func:`escape` and ensures that for + subclasses the correct type is returned. + """ + rv = escape(s) + + if rv.__class__ is not cls: + return cls(rv) + + return rv # type: ignore[return-value] + + def __getitem__(self, key: t.SupportsIndex | slice, /) -> te.Self: + return self.__class__(super().__getitem__(key)) + + def capitalize(self, /) -> te.Self: + return self.__class__(super().capitalize()) + + def title(self, /) -> te.Self: + return self.__class__(super().title()) + + def lower(self, /) -> te.Self: + return self.__class__(super().lower()) + + def upper(self, /) -> te.Self: + return self.__class__(super().upper()) + + def replace(self, old: str, new: str, count: t.SupportsIndex = -1, /) -> te.Self: + return self.__class__(super().replace(old, self.escape(new), count)) + + def ljust(self, width: t.SupportsIndex, fillchar: str = " ", /) -> te.Self: + return self.__class__(super().ljust(width, self.escape(fillchar))) + + def rjust(self, width: t.SupportsIndex, fillchar: str = " ", /) -> te.Self: + return self.__class__(super().rjust(width, self.escape(fillchar))) + + def lstrip(self, chars: str | None = None, /) -> te.Self: + return self.__class__(super().lstrip(chars)) + + def rstrip(self, chars: str | None = None, /) -> te.Self: + return self.__class__(super().rstrip(chars)) + + def center(self, width: t.SupportsIndex, fillchar: str = " ", /) -> te.Self: + return self.__class__(super().center(width, self.escape(fillchar))) + + def strip(self, chars: str | None = None, /) -> te.Self: + return self.__class__(super().strip(chars)) + + def translate( + self, + table: cabc.Mapping[int, str | int | None], # type: ignore[override] + /, + ) -> str: + return self.__class__(super().translate(table)) + + def expandtabs(self, /, tabsize: t.SupportsIndex = 8) -> te.Self: + return self.__class__(super().expandtabs(tabsize)) + + def swapcase(self, /) -> te.Self: + return self.__class__(super().swapcase()) + + def zfill(self, width: t.SupportsIndex, /) -> te.Self: + return self.__class__(super().zfill(width)) + + def casefold(self, /) -> te.Self: + return self.__class__(super().casefold()) + + def removeprefix(self, prefix: str, /) -> te.Self: + return self.__class__(super().removeprefix(prefix)) + + def removesuffix(self, suffix: str) -> te.Self: + return self.__class__(super().removesuffix(suffix)) + + def partition(self, sep: str, /) -> tuple[te.Self, te.Self, te.Self]: + left, sep, right = super().partition(sep) + cls = self.__class__ + return cls(left), cls(sep), cls(right) + + def rpartition(self, sep: str, /) -> tuple[te.Self, te.Self, te.Self]: + left, sep, right = super().rpartition(sep) + cls = self.__class__ + return cls(left), cls(sep), cls(right) + + def format(self, *args: t.Any, **kwargs: t.Any) -> te.Self: + formatter = EscapeFormatter(self.escape) + return self.__class__(formatter.vformat(self, args, kwargs)) + + def format_map( + self, + mapping: cabc.Mapping[str, t.Any], # type: ignore[override] + /, + ) -> te.Self: + formatter = EscapeFormatter(self.escape) + return self.__class__(formatter.vformat(self, (), mapping)) + + def __html_format__(self, format_spec: str, /) -> te.Self: + if format_spec: + raise ValueError("Unsupported format specification for Markup.") + + return self + + +class EscapeFormatter(string.Formatter): + __slots__ = ("escape",) + + def __init__(self, escape: _TPEscape) -> None: + self.escape: _TPEscape = escape + super().__init__() + + def format_field(self, value: t.Any, format_spec: str) -> str: + if hasattr(value, "__html_format__"): + rv = value.__html_format__(format_spec) + elif hasattr(value, "__html__"): + if format_spec: + raise ValueError( + f"Format specifier {format_spec} given, but {type(value)} does not" + " define __html_format__. A class that defines __html__ must define" + " __html_format__ to work with format specifiers." + ) + rv = value.__html__() + else: + # We need to make sure the format spec is str here as + # otherwise the wrong callback methods are invoked. + rv = super().format_field(value, str(format_spec)) + return str(self.escape(rv)) + + +class _MarkupEscapeHelper: + """Helper for :meth:`Markup.__mod__`.""" + + __slots__ = ("obj", "escape") + + def __init__(self, obj: t.Any, escape: _TPEscape) -> None: + self.obj: t.Any = obj + self.escape: _TPEscape = escape + + def __getitem__(self, key: t.Any, /) -> te.Self: + return self.__class__(self.obj[key], self.escape) + + def __str__(self, /) -> str: + return str(self.escape(self.obj)) + + def __repr__(self, /) -> str: + return str(self.escape(repr(self.obj))) + + def __int__(self, /) -> int: + return int(self.obj) + + def __float__(self, /) -> float: + return float(self.obj) + + +def __getattr__(name: str) -> t.Any: + if name == "__version__": + import importlib.metadata + import warnings + + warnings.warn( + "The '__version__' attribute is deprecated and will be removed in" + " MarkupSafe 3.1. Use feature detection, or" + ' `importlib.metadata.version("markupsafe")`, instead.', + stacklevel=2, + ) + return importlib.metadata.version("markupsafe") + + raise AttributeError(name) diff --git a/venv/lib/python3.10/site-packages/markupsafe/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/markupsafe/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b3bc18f6359110c922e09c79eb763b0631a372e5 Binary files /dev/null and b/venv/lib/python3.10/site-packages/markupsafe/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/markupsafe/__pycache__/_native.cpython-310.pyc b/venv/lib/python3.10/site-packages/markupsafe/__pycache__/_native.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..70d026aa3ffaaf2a17cd1a2c2f502a1a0cf7a723 Binary files /dev/null and b/venv/lib/python3.10/site-packages/markupsafe/__pycache__/_native.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/markupsafe/_native.py b/venv/lib/python3.10/site-packages/markupsafe/_native.py new file mode 100644 index 0000000000000000000000000000000000000000..088b3bca9839ee489eefa546a0773a465b8cd0ca --- /dev/null +++ b/venv/lib/python3.10/site-packages/markupsafe/_native.py @@ -0,0 +1,8 @@ +def _escape_inner(s: str, /) -> str: + return ( + s.replace("&", "&") + .replace(">", ">") + .replace("<", "<") + .replace("'", "'") + .replace('"', """) + ) diff --git a/venv/lib/python3.10/site-packages/markupsafe/_speedups.c b/venv/lib/python3.10/site-packages/markupsafe/_speedups.c new file mode 100644 index 0000000000000000000000000000000000000000..09dd57caa8c364f431b4fe6cbf37d4cc3172687e --- /dev/null +++ b/venv/lib/python3.10/site-packages/markupsafe/_speedups.c @@ -0,0 +1,204 @@ +#include + +#define GET_DELTA(inp, inp_end, delta) \ + while (inp < inp_end) { \ + switch (*inp++) { \ + case '"': \ + case '\'': \ + case '&': \ + delta += 4; \ + break; \ + case '<': \ + case '>': \ + delta += 3; \ + break; \ + } \ + } + +#define DO_ESCAPE(inp, inp_end, outp) \ + { \ + Py_ssize_t ncopy = 0; \ + while (inp < inp_end) { \ + switch (*inp) { \ + case '"': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = '#'; \ + *outp++ = '3'; \ + *outp++ = '4'; \ + *outp++ = ';'; \ + break; \ + case '\'': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = '#'; \ + *outp++ = '3'; \ + *outp++ = '9'; \ + *outp++ = ';'; \ + break; \ + case '&': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = 'a'; \ + *outp++ = 'm'; \ + *outp++ = 'p'; \ + *outp++ = ';'; \ + break; \ + case '<': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = 'l'; \ + *outp++ = 't'; \ + *outp++ = ';'; \ + break; \ + case '>': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = 'g'; \ + *outp++ = 't'; \ + *outp++ = ';'; \ + break; \ + default: \ + ncopy++; \ + } \ + inp++; \ + } \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + } + +static PyObject* +escape_unicode_kind1(PyUnicodeObject *in) +{ + Py_UCS1 *inp = PyUnicode_1BYTE_DATA(in); + Py_UCS1 *inp_end = inp + PyUnicode_GET_LENGTH(in); + Py_UCS1 *outp; + PyObject *out; + Py_ssize_t delta = 0; + + GET_DELTA(inp, inp_end, delta); + if (!delta) { + Py_INCREF(in); + return (PyObject*)in; + } + + out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, + PyUnicode_IS_ASCII(in) ? 127 : 255); + if (!out) + return NULL; + + inp = PyUnicode_1BYTE_DATA(in); + outp = PyUnicode_1BYTE_DATA(out); + DO_ESCAPE(inp, inp_end, outp); + return out; +} + +static PyObject* +escape_unicode_kind2(PyUnicodeObject *in) +{ + Py_UCS2 *inp = PyUnicode_2BYTE_DATA(in); + Py_UCS2 *inp_end = inp + PyUnicode_GET_LENGTH(in); + Py_UCS2 *outp; + PyObject *out; + Py_ssize_t delta = 0; + + GET_DELTA(inp, inp_end, delta); + if (!delta) { + Py_INCREF(in); + return (PyObject*)in; + } + + out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, 65535); + if (!out) + return NULL; + + inp = PyUnicode_2BYTE_DATA(in); + outp = PyUnicode_2BYTE_DATA(out); + DO_ESCAPE(inp, inp_end, outp); + return out; +} + + +static PyObject* +escape_unicode_kind4(PyUnicodeObject *in) +{ + Py_UCS4 *inp = PyUnicode_4BYTE_DATA(in); + Py_UCS4 *inp_end = inp + PyUnicode_GET_LENGTH(in); + Py_UCS4 *outp; + PyObject *out; + Py_ssize_t delta = 0; + + GET_DELTA(inp, inp_end, delta); + if (!delta) { + Py_INCREF(in); + return (PyObject*)in; + } + + out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, 1114111); + if (!out) + return NULL; + + inp = PyUnicode_4BYTE_DATA(in); + outp = PyUnicode_4BYTE_DATA(out); + DO_ESCAPE(inp, inp_end, outp); + return out; +} + +static PyObject* +escape_unicode(PyObject *self, PyObject *s) +{ + if (!PyUnicode_Check(s)) + return NULL; + + // This check is no longer needed in Python 3.12. + if (PyUnicode_READY(s)) + return NULL; + + switch (PyUnicode_KIND(s)) { + case PyUnicode_1BYTE_KIND: + return escape_unicode_kind1((PyUnicodeObject*) s); + case PyUnicode_2BYTE_KIND: + return escape_unicode_kind2((PyUnicodeObject*) s); + case PyUnicode_4BYTE_KIND: + return escape_unicode_kind4((PyUnicodeObject*) s); + } + assert(0); /* shouldn't happen */ + return NULL; +} + +static PyMethodDef module_methods[] = { + {"_escape_inner", (PyCFunction)escape_unicode, METH_O, NULL}, + {NULL, NULL, 0, NULL} /* Sentinel */ +}; + +static struct PyModuleDef module_definition = { + PyModuleDef_HEAD_INIT, + "markupsafe._speedups", + NULL, + -1, + module_methods, + NULL, + NULL, + NULL, + NULL +}; + +PyMODINIT_FUNC +PyInit__speedups(void) +{ + PyObject *m = PyModule_Create(&module_definition); + + if (m == NULL) { + return NULL; + } + + #ifdef Py_GIL_DISABLED + PyUnstable_Module_SetGIL(m, Py_MOD_GIL_NOT_USED); + #endif + + return m; +} diff --git a/venv/lib/python3.10/site-packages/markupsafe/_speedups.cpython-310-x86_64-linux-gnu.so b/venv/lib/python3.10/site-packages/markupsafe/_speedups.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..59d567f5e40f98be1d8948caa6734614789ee74e Binary files /dev/null and b/venv/lib/python3.10/site-packages/markupsafe/_speedups.cpython-310-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.10/site-packages/markupsafe/_speedups.pyi b/venv/lib/python3.10/site-packages/markupsafe/_speedups.pyi new file mode 100644 index 0000000000000000000000000000000000000000..8c8885852a26eba90d3ca1783beca535d4d43bb0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/markupsafe/_speedups.pyi @@ -0,0 +1 @@ +def _escape_inner(s: str, /) -> str: ... diff --git a/venv/lib/python3.10/site-packages/markupsafe/py.typed b/venv/lib/python3.10/site-packages/markupsafe/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.10/site-packages/mistral_common-1.8.3.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/mistral_common-1.8.3.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/mistral_common-1.8.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/mistral_common-1.8.3.dist-info/METADATA b/venv/lib/python3.10/site-packages/mistral_common-1.8.3.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..a2f4cae1a0dc9bacfb51a90e34ed1d3506bfcac0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/mistral_common-1.8.3.dist-info/METADATA @@ -0,0 +1,90 @@ +Metadata-Version: 2.4 +Name: mistral_common +Version: 1.8.3 +Summary: Mistral-common is a library of common utilities for Mistral AI. +Author-email: bam4d +License-Expression: Apache-2.0 +Requires-Python: <3.14,>=3.9.0 +Description-Content-Type: text/markdown +Requires-Dist: pydantic<3.0,>=2.7 +Requires-Dist: jsonschema>=4.21.1 +Requires-Dist: sentencepiece>=0.2.0 +Requires-Dist: typing-extensions>=4.11.0 +Requires-Dist: tiktoken>=0.7.0 +Requires-Dist: pillow>=10.3.0 +Requires-Dist: requests>=2.0.0 +Requires-Dist: numpy>=1.25 +Requires-Dist: pydantic-extra-types[pycountry]>=2.10.5 +Provides-Extra: opencv +Requires-Dist: opencv-python-headless>=4.0.0; extra == "opencv" +Provides-Extra: soundfile +Requires-Dist: soundfile>=0.12.1; extra == "soundfile" +Provides-Extra: soxr +Requires-Dist: soxr>=0.5.0; extra == "soxr" +Provides-Extra: audio +Requires-Dist: mistral_common[soundfile]; extra == "audio" +Requires-Dist: mistral_common[soxr]; extra == "audio" +Provides-Extra: image +Requires-Dist: mistral_common[opencv]; extra == "image" +Provides-Extra: hf-hub +Requires-Dist: huggingface-hub>=0.32.4; extra == "hf-hub" +Provides-Extra: server +Requires-Dist: fastapi[standard]>=0.115.12; extra == "server" +Requires-Dist: pydantic-settings>=2.9.1; extra == "server" +Requires-Dist: click>=8.1.0; extra == "server" + +
+ +Mistral AI + +
+
+ +# Mistral-common + +[![PyPI version](https://img.shields.io/pypi/v/mistral-common?label=release&logo=pypi&logoColor=white)](https://pypi.org/project/mistral-common/) +[![Tests](https://img.shields.io/github/actions/workflow/status/mistralai/mistral-common/lint_build_test.yaml?label=tests&branch=main)](https://github.com/mistralai/mistral-common/actions/workflows/lint_build_test.yaml) +[![Documentation](https://img.shields.io/website?url=https%3A%2F%2Fmistralai.github.io%2Fmistral-common%2F&up_message=online&down_message=offline&label=docs)](https://mistralai.github.io/mistral-common/) +[![Python version](https://img.shields.io/badge/dynamic/json?query=info.requires_python&label=python&url=https%3A%2F%2Fpypi.org%2Fpypi%2Fmistral-common%2Fjson)](https://www.python.org/downloads/) +[![License](https://img.shields.io/badge/license-Apache--2.0-blue.svg)](./LICENCE) + +
+ +## What is it? + +**mistral-common** is a set of tools to help you work with [Mistral AI](https://mistral.ai/) models. + +We open-source the tokenizers, validation and normalization code that can be used with our models. + +This ensures that you can take full advantage of our models for the following features: + +- **tokenization** of text, images and tools calls. +- **validation and normalization** of requests, messages, tool calls, and responses. This is built on top of the [Pydantic](https://docs.pydantic.dev/latest/) library. + +We also version our tokenizers to guarantee backward compatibility for the models that we release. + +## For who ? + +This library is for you if you want to: + +- use our models in your own application. +- build your own models and want to use the same tokenization and validation code as we do. + +## How to use it ? + +You can install the library using pip: +```sh +pip install mistral-common[opencv] +``` + +For more information, please refer to the [documentation](https://mistralai.github.io/mistral-common/). + +## How to contribute ? + +We welcome contributions to this library. Please open an issue on our [GitHub repository](https://github.com/mistralai/mistral-common/issues) if you have any questions or suggestions. + +All of our features are tested to ensure best usage. But if you encounter a bug, find difficulties in using `mistral-common`. Please also open an issue. + +## License + +This library is licensed under the Apache 2.0 License. See the [LICENCE](./LICENCE) file for more information. diff --git a/venv/lib/python3.10/site-packages/mistral_common-1.8.3.dist-info/RECORD b/venv/lib/python3.10/site-packages/mistral_common-1.8.3.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..9fb83f2c9141ad95ffda95d4f2018b826d01cb63 --- /dev/null +++ b/venv/lib/python3.10/site-packages/mistral_common-1.8.3.dist-info/RECORD @@ -0,0 +1,106 @@ +../../../bin/mistral_common,sha256=tJ8vTJPFusq4R4NQPlhDqZaDFF-bNGE2X5O2OsaLp9U,303 +mistral_common-1.8.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +mistral_common-1.8.3.dist-info/METADATA,sha256=a3b9kmhiUzfrbKzFXiQHwB6qNE86o8TEOXW5rXtjLVE,3803 +mistral_common-1.8.3.dist-info/RECORD,, +mistral_common-1.8.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 +mistral_common-1.8.3.dist-info/entry_points.txt,sha256=99lAxT6jReW4ZHGQtql5zua-HQyxvYqYjBpoNixAIvw,76 +mistral_common-1.8.3.dist-info/top_level.txt,sha256=erE7iWVoW3e9e4bF6zSa3hUYLkJir1ZeOeN5vIBfWSg,15 +mistral_common/.DS_Store,sha256=Q9_iboH-8HlPB5s_3lHaqDhwSg460_V-fBs2J5OlHz8,6148 +mistral_common/__init__.py,sha256=Ep-kqB0quYLZHNvIGhHgL7PZ_utthXoRM8Lgx4F5PxU,86 +mistral_common/__pycache__/__init__.cpython-310.pyc,, +mistral_common/__pycache__/audio.cpython-310.pyc,, +mistral_common/__pycache__/base.cpython-310.pyc,, +mistral_common/__pycache__/exceptions.cpython-310.pyc,, +mistral_common/__pycache__/image.cpython-310.pyc,, +mistral_common/__pycache__/multimodal.cpython-310.pyc,, +mistral_common/audio.py,sha256=XtKOIadUrBtgthAj4HFEuLf_yMKsNXnkfWqQ02ec2l8,13129 +mistral_common/base.py,sha256=S8QlbIBaNwijYMnPEVbXR0dBrmDiuWdIJeiUXN2dctw,304 +mistral_common/data/mistral_instruct_tokenizer_240216.model.v2,sha256=N_ADdN6khljuj10PIYlbm8VcsBA5OWB8gYW_0cbKH4k,587404 +mistral_common/data/mistral_instruct_tokenizer_240323.model.v3,sha256=mt3Ivc5ZiESK6Btykzb0OoEmIWCujadgZ0utq51MfTM,587591 +mistral_common/data/mistral_instruct_tokenizer_241114.model.v7,sha256=G5aLjcNS9CGSNnM3x4zMYeHq3cbWQaV5Ny1PIGlL63o,587562 +mistral_common/data/mistral_instruct_tokenizer_241114.model.v7m1,sha256=G5aLjcNS9CGSNnM3x4zMYeHq3cbWQaV5Ny1PIGlL63o,587562 +mistral_common/data/tekken_240718.json,sha256=7M0WZdLkd2l8M8t_Dapvbf78V6CmvOtm1L5SlS-CdRY,14801223 +mistral_common/data/tekken_240911.json,sha256=GUji1IsOc3fxu18SEPGuX5hJNOdXE_wH4kUnKbg2UxY,19280963 +mistral_common/data/tokenizer.model.v1,sha256=2t_VbXZnFcYdLveApSWrQ7jm2k3mhlvaPZX9714TQFU,493443 +mistral_common/exceptions.py,sha256=B3Y8Q0Q8THTDk7y2FUuERD8QCZdGzrrDOxKClHhADe4,4649 +mistral_common/experimental/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mistral_common/experimental/__pycache__/__init__.cpython-310.pyc,, +mistral_common/experimental/__pycache__/think.cpython-310.pyc,, +mistral_common/experimental/__pycache__/tools.cpython-310.pyc,, +mistral_common/experimental/__pycache__/utils.cpython-310.pyc,, +mistral_common/experimental/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mistral_common/experimental/app/__pycache__/__init__.cpython-310.pyc,, +mistral_common/experimental/app/__pycache__/main.cpython-310.pyc,, +mistral_common/experimental/app/__pycache__/models.cpython-310.pyc,, +mistral_common/experimental/app/__pycache__/routers.cpython-310.pyc,, +mistral_common/experimental/app/main.py,sha256=K43Ko0L3x6Qpg9oR26-l9EkB9XqlUqsJXTf_9TOS5fY,3732 +mistral_common/experimental/app/models.py,sha256=E1Ddi72acextiC72gnYlrv_qth3V-krKoIKoK4DeWb4,4483 +mistral_common/experimental/app/routers.py,sha256=YFnWfUCDB2Pa34k6_S7v5ofhlGP-7emikp-rmasUiLE,7925 +mistral_common/experimental/think.py,sha256=cdRARcNQG6wAY3PTkQmBZPPcyDbrreUbCAAv7vGkNhA,1647 +mistral_common/experimental/tools.py,sha256=BYcJMyaw0o03QDjxWxOO_ZZfYgxbi6xLAQTfto3WsMs,6474 +mistral_common/experimental/utils.py,sha256=SCIupnJ9TFOdUxmfnsZCFak_vO-U79WEJkoDb6-hDAw,1737 +mistral_common/image.py,sha256=1LeQOi1gjYYNPJo0bRaTJ3AybqqWp21DWa1Ke172YdA,3247 +mistral_common/multimodal.py,sha256=Kk3xhdQMnDNF5rM1z3TmxBPE3_eCljcJKGWeDKEWQrE,201 +mistral_common/protocol/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mistral_common/protocol/__pycache__/__init__.cpython-310.pyc,, +mistral_common/protocol/__pycache__/base.cpython-310.pyc,, +mistral_common/protocol/__pycache__/utils.cpython-310.pyc,, +mistral_common/protocol/base.py,sha256=HAlfFPhf-nIYajPhQOjSfcx5ChEWpXEHnkZsTopubgY,1489 +mistral_common/protocol/embedding/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mistral_common/protocol/embedding/__pycache__/__init__.cpython-310.pyc,, +mistral_common/protocol/embedding/__pycache__/request.cpython-310.pyc,, +mistral_common/protocol/embedding/__pycache__/response.cpython-310.pyc,, +mistral_common/protocol/embedding/request.py,sha256=dCCbvC8OAen0dv00JfeMHEurcPGh9rYSOxBY78qEmlk,858 +mistral_common/protocol/embedding/response.py,sha256=lbgJWaNIhKRSe_2pNW_tREozGRRWo0S1ZH8TMwoDOVA,1926 +mistral_common/protocol/fim/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mistral_common/protocol/fim/__pycache__/__init__.cpython-310.pyc,, +mistral_common/protocol/fim/__pycache__/request.cpython-310.pyc,, +mistral_common/protocol/fim/request.py,sha256=mIoIZYxBRhrKquPpNZApLdiP0e50AJB5TVEctWkXyOY,555 +mistral_common/protocol/instruct/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mistral_common/protocol/instruct/__pycache__/__init__.cpython-310.pyc,, +mistral_common/protocol/instruct/__pycache__/converters.cpython-310.pyc,, +mistral_common/protocol/instruct/__pycache__/messages.cpython-310.pyc,, +mistral_common/protocol/instruct/__pycache__/normalize.cpython-310.pyc,, +mistral_common/protocol/instruct/__pycache__/request.cpython-310.pyc,, +mistral_common/protocol/instruct/__pycache__/response.cpython-310.pyc,, +mistral_common/protocol/instruct/__pycache__/tool_calls.cpython-310.pyc,, +mistral_common/protocol/instruct/__pycache__/validator.cpython-310.pyc,, +mistral_common/protocol/instruct/converters.py,sha256=sQRPlyPug6XdpraAwGah7Cy5zapep_USR-vT3bKn8tE,3610 +mistral_common/protocol/instruct/messages.py,sha256=zslvaqEOU1Y3wB3PymMaQDC_Vc2voVGfJ27sF9YiME0,21780 +mistral_common/protocol/instruct/normalize.py,sha256=h8Fl9bVDPg0ZYMgcD3-RqyCp5zrQ8RR8ET90pYQGOQ4,18810 +mistral_common/protocol/instruct/request.py,sha256=BCV2jkPIrNPdcMkLbAQMFKT9Yf8W-jQQ9CYqM2_wlKU,15066 +mistral_common/protocol/instruct/response.py,sha256=0QOJifL9JfsQWOZy3b4xw1RaWuzCbu3djSBujxOfoRM,5937 +mistral_common/protocol/instruct/tool_calls.py,sha256=yQYOqAc7zL0PqLjzb6wVQmPjl5R_sZkSBQCzlrG0yv4,4552 +mistral_common/protocol/instruct/validator.py,sha256=oSnHNPBFJfDcc73EzQdDWWvhtlAbEnFN16xmWs_rB3A,19084 +mistral_common/protocol/transcription/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mistral_common/protocol/transcription/__pycache__/__init__.cpython-310.pyc,, +mistral_common/protocol/transcription/__pycache__/request.cpython-310.pyc,, +mistral_common/protocol/transcription/request.py,sha256=AfWJeGF3v5IH_osQ2gxTPq4rn6mGGCgTYlVJeE3Q9sE,4626 +mistral_common/protocol/utils.py,sha256=2OuTsQzJaMwKJnzIJIvGx8KhNgyy72WsMjqtORkT3ls,107 +mistral_common/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mistral_common/tokens/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mistral_common/tokens/__pycache__/__init__.cpython-310.pyc,, +mistral_common/tokens/instruct/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mistral_common/tokens/instruct/__pycache__/__init__.cpython-310.pyc,, +mistral_common/tokens/instruct/__pycache__/request.cpython-310.pyc,, +mistral_common/tokens/instruct/request.py,sha256=JONqBmiUk1wbdSZ2c1Drr4Y1jDxx9mJZnp67FNYBQFQ,488 +mistral_common/tokens/tokenizers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mistral_common/tokens/tokenizers/__pycache__/__init__.cpython-310.pyc,, +mistral_common/tokens/tokenizers/__pycache__/audio.cpython-310.pyc,, +mistral_common/tokens/tokenizers/__pycache__/base.cpython-310.pyc,, +mistral_common/tokens/tokenizers/__pycache__/image.cpython-310.pyc,, +mistral_common/tokens/tokenizers/__pycache__/instruct.cpython-310.pyc,, +mistral_common/tokens/tokenizers/__pycache__/mistral.cpython-310.pyc,, +mistral_common/tokens/tokenizers/__pycache__/multimodal.cpython-310.pyc,, +mistral_common/tokens/tokenizers/__pycache__/sentencepiece.cpython-310.pyc,, +mistral_common/tokens/tokenizers/__pycache__/tekken.cpython-310.pyc,, +mistral_common/tokens/tokenizers/__pycache__/utils.cpython-310.pyc,, +mistral_common/tokens/tokenizers/audio.py,sha256=5zAPP9PWuFhEb9XMM1RUVeG6lCS8MosQKPyoNVRuzV0,8008 +mistral_common/tokens/tokenizers/base.py,sha256=0PAaxbnZ8r8Dny4XSmvcOJVcYS5ncuRxN43-H-QXneo,13592 +mistral_common/tokens/tokenizers/image.py,sha256=Rt4NCdKVML-8N3udbt4xAw4niDUdhTsz6r-Yj5MOOX4,7152 +mistral_common/tokens/tokenizers/instruct.py,sha256=tTrS19KxB_UFMH2uq8TmCqBySAWQpzxv9IJ9ZFBxGAo,45183 +mistral_common/tokens/tokenizers/mistral.py,sha256=iI2RdwOfZYDmzKOZ1uPr31NyspdFsioEDo1EVKx5o8s,18401 +mistral_common/tokens/tokenizers/multimodal.py,sha256=gWLGNSPFgQ0sT5E-p8OPnv5N12ehnP92MUyHo3UQbqQ,219 +mistral_common/tokens/tokenizers/sentencepiece.py,sha256=BGbzlnFpHi0_GiEvWEOGVNE5M7BOS0_spmKqAyqjCkI,8970 +mistral_common/tokens/tokenizers/tekken.py,sha256=fR8t-hXsvorKCEPXzPrVkCCCMy2LHNr3Jcr09hvAyq0,22062 +mistral_common/tokens/tokenizers/utils.py,sha256=jU572wjB2HW-aNuRyf0pX_R92KXpAtlKNn2wYa4InN0,6520 diff --git a/venv/lib/python3.10/site-packages/mistral_common-1.8.3.dist-info/WHEEL b/venv/lib/python3.10/site-packages/mistral_common-1.8.3.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..e7fa31b6f3f78deb1022c1f7927f07d4d16da822 --- /dev/null +++ b/venv/lib/python3.10/site-packages/mistral_common-1.8.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/mistral_common-1.8.3.dist-info/entry_points.txt b/venv/lib/python3.10/site-packages/mistral_common-1.8.3.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..39cc2443dd7f7d25b2dee6bd9993ab32d3b60e6a --- /dev/null +++ b/venv/lib/python3.10/site-packages/mistral_common-1.8.3.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +mistral_common = mistral_common.experimental.app.main:cli diff --git a/venv/lib/python3.10/site-packages/mistral_common-1.8.3.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/mistral_common-1.8.3.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..26d660edb5751c8844816a6cf0baba690b65a8a4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/mistral_common-1.8.3.dist-info/top_level.txt @@ -0,0 +1 @@ +mistral_common diff --git a/venv/lib/python3.10/site-packages/ninja/__init__.py b/venv/lib/python3.10/site-packages/ninja/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..252a4b8337015af7d975e3813f2d90071caa58bf --- /dev/null +++ b/venv/lib/python3.10/site-packages/ninja/__init__.py @@ -0,0 +1,60 @@ +from __future__ import annotations + +import os +import subprocess +import sys +import sysconfig +from collections.abc import Iterable +from typing import NoReturn + +from ._version import version as __version__ +from .ninja_syntax import Writer, escape, expand + +__all__ = ["BIN_DIR", "DATA", "Writer", "__version__", "escape", "expand", "ninja"] + + +def __dir__() -> list[str]: + return __all__ + + +def _get_ninja_dir() -> str: + ninja_exe = "ninja" + sysconfig.get_config_var("EXE") + + # Default path + path = os.path.join(sysconfig.get_path("scripts"), ninja_exe) + if os.path.isfile(path): + return os.path.dirname(path) + + # User path + if sys.version_info >= (3, 10): + user_scheme = sysconfig.get_preferred_scheme("user") + elif os.name == "nt": + user_scheme = "nt_user" + elif sys.platform.startswith("darwin") and getattr(sys, "_framework", None): + user_scheme = "osx_framework_user" + else: + user_scheme = "posix_user" + + path = sysconfig.get_path("scripts", scheme=user_scheme) + + if os.path.isfile(os.path.join(path, ninja_exe)): + return path + + # Fallback to python location + path = os.path.dirname(sys.executable) + if os.path.isfile(os.path.join(path, ninja_exe)): + return path + + return "" + + +BIN_DIR = _get_ninja_dir() + + +def _program(name: str, args: Iterable[str]) -> int: + cmd = os.path.join(BIN_DIR, name) + return subprocess.call([cmd, *args], close_fds=False) + + +def ninja() -> NoReturn: + raise SystemExit(_program('ninja', sys.argv[1:])) diff --git a/venv/lib/python3.10/site-packages/ninja/__main__.py b/venv/lib/python3.10/site-packages/ninja/__main__.py new file mode 100644 index 0000000000000000000000000000000000000000..87772abf7bee12bc06c887a199e791989b8ccdbc --- /dev/null +++ b/venv/lib/python3.10/site-packages/ninja/__main__.py @@ -0,0 +1,6 @@ +from __future__ import annotations + +from ninja import ninja + +if __name__ == '__main__': + ninja() diff --git a/venv/lib/python3.10/site-packages/ninja/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/ninja/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5a1cfeb8fc41c2796f785e938368499e2695599d Binary files /dev/null and b/venv/lib/python3.10/site-packages/ninja/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ninja/__pycache__/__main__.cpython-310.pyc b/venv/lib/python3.10/site-packages/ninja/__pycache__/__main__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..24f0d30ee7e8d113938bedf4c50487b1ca5a2ff2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/ninja/__pycache__/__main__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ninja/__pycache__/_version.cpython-310.pyc b/venv/lib/python3.10/site-packages/ninja/__pycache__/_version.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..098dbca11bcf1868d570fcdc662a71207ef4600f Binary files /dev/null and b/venv/lib/python3.10/site-packages/ninja/__pycache__/_version.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ninja/__pycache__/ninja_syntax.cpython-310.pyc b/venv/lib/python3.10/site-packages/ninja/__pycache__/ninja_syntax.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..58201babb209b67168492afacc120d5125bebbc3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/ninja/__pycache__/ninja_syntax.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/ninja/_version.py b/venv/lib/python3.10/site-packages/ninja/_version.py new file mode 100644 index 0000000000000000000000000000000000000000..667df30e26167da3017fb177fcf95c652a4040b9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ninja/_version.py @@ -0,0 +1 @@ +version = "1.13.0" diff --git a/venv/lib/python3.10/site-packages/ninja/_version.pyi b/venv/lib/python3.10/site-packages/ninja/_version.pyi new file mode 100644 index 0000000000000000000000000000000000000000..91744f98344db40b12025cf4e48d9e6320225968 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ninja/_version.pyi @@ -0,0 +1,4 @@ +from __future__ import annotations + +version: str +version_tuple: tuple[int, int, int] | tuple[int, int, int, str, str] diff --git a/venv/lib/python3.10/site-packages/ninja/ninja_syntax.py b/venv/lib/python3.10/site-packages/ninja/ninja_syntax.py new file mode 100644 index 0000000000000000000000000000000000000000..2aa8456e9dbaf802ae8de7c83594ea1206e293e6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/ninja/ninja_syntax.py @@ -0,0 +1,231 @@ +#!/usr/bin/python + +# Copyright 2011 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Python module for generating .ninja files. + +Note that this is emphatically not a required piece of Ninja; it's +just a helpful utility for build-file-generation systems that already +use Python. +""" + +import re +import textwrap +from io import TextIOWrapper +from typing import Dict, List, Match, Optional, Tuple, Union + +def escape_path(word: str) -> str: + return word.replace('$ ', '$$ ').replace(' ', '$ ').replace(':', '$:') + +class Writer(object): + def __init__(self, output: TextIOWrapper, width: int = 78) -> None: + self.output = output + self.width = width + + def newline(self) -> None: + self.output.write('\n') + + def comment(self, text: str) -> None: + for line in textwrap.wrap(text, self.width - 2, break_long_words=False, + break_on_hyphens=False): + self.output.write('# ' + line + '\n') + + def variable( + self, + key: str, + value: Optional[Union[bool, int, float, str, List[str]]], + indent: int = 0, + ) -> None: + if value is None: + return + if isinstance(value, list): + value = ' '.join(filter(None, value)) # Filter out empty strings. + self._line('%s = %s' % (key, value), indent) + + def pool(self, name: str, depth: int) -> None: + self._line('pool %s' % name) + self.variable('depth', depth, indent=1) + + def rule( + self, + name: str, + command: str, + description: Optional[str] = None, + depfile: Optional[str] = None, + generator: bool = False, + pool: Optional[str] = None, + restat: bool = False, + rspfile: Optional[str] = None, + rspfile_content: Optional[str] = None, + deps: Optional[Union[str, List[str]]] = None, + ) -> None: + self._line('rule %s' % name) + self.variable('command', command, indent=1) + if description: + self.variable('description', description, indent=1) + if depfile: + self.variable('depfile', depfile, indent=1) + if generator: + self.variable('generator', '1', indent=1) + if pool: + self.variable('pool', pool, indent=1) + if restat: + self.variable('restat', '1', indent=1) + if rspfile: + self.variable('rspfile', rspfile, indent=1) + if rspfile_content: + self.variable('rspfile_content', rspfile_content, indent=1) + if deps: + self.variable('deps', deps, indent=1) + + def build( + self, + outputs: Union[str, List[str]], + rule: str, + inputs: Optional[Union[str, List[str]]] = None, + implicit: Optional[Union[str, List[str]]] = None, + order_only: Optional[Union[str, List[str]]] = None, + variables: Optional[ + Union[ + List[Tuple[str, Optional[Union[str, List[str]]]]], + Dict[str, Optional[Union[str, List[str]]]], + ] + ] = None, + implicit_outputs: Optional[Union[str, List[str]]] = None, + pool: Optional[str] = None, + dyndep: Optional[str] = None, + ) -> List[str]: + outputs = as_list(outputs) + out_outputs = [escape_path(x) for x in outputs] + all_inputs = [escape_path(x) for x in as_list(inputs)] + + if implicit: + implicit = [escape_path(x) for x in as_list(implicit)] + all_inputs.append('|') + all_inputs.extend(implicit) + if order_only: + order_only = [escape_path(x) for x in as_list(order_only)] + all_inputs.append('||') + all_inputs.extend(order_only) + if implicit_outputs: + implicit_outputs = [escape_path(x) + for x in as_list(implicit_outputs)] + out_outputs.append('|') + out_outputs.extend(implicit_outputs) + + self._line('build %s: %s' % (' '.join(out_outputs), + ' '.join([rule] + all_inputs))) + if pool is not None: + self._line(' pool = %s' % pool) + if dyndep is not None: + self._line(' dyndep = %s' % dyndep) + + if variables: + if isinstance(variables, dict): + iterator = iter(variables.items()) + else: + iterator = iter(variables) + + for key, val in iterator: + self.variable(key, val, indent=1) + + return outputs + + def include(self, path: str) -> None: + self._line('include %s' % path) + + def subninja(self, path: str) -> None: + self._line('subninja %s' % path) + + def default(self, paths: Union[str, List[str]]) -> None: + self._line('default %s' % ' '.join(as_list(paths))) + + def _count_dollars_before_index(self, s: str, i: int) -> int: + """Returns the number of '$' characters right in front of s[i].""" + dollar_count = 0 + dollar_index = i - 1 + while dollar_index > 0 and s[dollar_index] == '$': + dollar_count += 1 + dollar_index -= 1 + return dollar_count + + def _line(self, text: str, indent: int = 0) -> None: + """Write 'text' word-wrapped at self.width characters.""" + leading_space = ' ' * indent + while len(leading_space) + len(text) > self.width: + # The text is too wide; wrap if possible. + + # Find the rightmost space that would obey our width constraint and + # that's not an escaped space. + available_space = self.width - len(leading_space) - len(' $') + space = available_space + while True: + space = text.rfind(' ', 0, space) + if (space < 0 or + self._count_dollars_before_index(text, space) % 2 == 0): + break + + if space < 0: + # No such space; just use the first unescaped space we can find. + space = available_space - 1 + while True: + space = text.find(' ', space + 1) + if (space < 0 or + self._count_dollars_before_index(text, space) % 2 == 0): + break + if space < 0: + # Give up on breaking. + break + + self.output.write(leading_space + text[0:space] + ' $\n') + text = text[space+1:] + + # Subsequent lines are continuations, so indent them. + leading_space = ' ' * (indent+2) + + self.output.write(leading_space + text + '\n') + + def close(self) -> None: + self.output.close() + + +def as_list(input: Optional[Union[str, List[str]]]) -> List[str]: + if input is None: + return [] + if isinstance(input, list): + return input + return [input] + + +def escape(string: str) -> str: + """Escape a string such that it can be embedded into a Ninja file without + further interpretation.""" + assert '\n' not in string, 'Ninja syntax does not allow newlines' + # We only have one special metacharacter: '$'. + return string.replace('$', '$$') + + +def expand(string: str, vars: Dict[str, str], local_vars: Dict[str, str] = {}) -> str: + """Expand a string containing $vars as Ninja would. + + Note: doesn't handle the full Ninja variable syntax, but it's enough + to make configure.py's use of it work. + """ + def exp(m: Match[str]) -> str: + var = m.group(1) + if var == '$': + return '$' + return local_vars.get(var, vars.get(var, '')) + return re.sub(r'\$(\$|\w*)', exp, string) diff --git a/venv/lib/python3.10/site-packages/ninja/ninja_syntax.pyi b/venv/lib/python3.10/site-packages/ninja/ninja_syntax.pyi new file mode 100644 index 0000000000000000000000000000000000000000..8f84bf75bd33a50ab34103342abb227c2d177adc --- /dev/null +++ b/venv/lib/python3.10/site-packages/ninja/ninja_syntax.pyi @@ -0,0 +1,37 @@ +from collections.abc import Mapping, Sequence +from os import PathLike + +def escape_path(word: str) -> str: ... + +class Writer: + output: str + width: int + + def __init__(self, output: str, width: int = ...): ... + def newline(self) -> None: ... + def comment(self, text: str) -> None: ... + def variable(self, key: str, value: list[str] | str, indent: int = ...) -> None: ... + def pool(self, name: str, depth: int) -> None: ... + def rule(self, name: str, command: str, description: str | None = None, + depfile: str | None = None, generator: bool = False, + pool: str | None = None, restat: bool = False, + rspfile: str | None = None, rspfile_content: str | None = None, + deps: str | None = None) -> None: + ... + + def build(self, outputs: list[str], rule: str, inputs: list[str] | None = None, + implicit: list[str] | None = None, order_only: list[str] | None = None, + variables: dict[str, str] | None = None, + implicit_outputs: list[str] | None = None, + pool: str | None = None, dyndep: str | None = None) -> None: + ... + def include(self, path: str | PathLike[str]) -> None: ... + def subninja(self, path: str | PathLike[str]) -> None: ... + def default(self, paths: Sequence[str | PathLike[str]]) -> None: ... + def close(self) -> None: ... + +def as_list(input: None | list[str] | str) -> list[str]: ... + +def escape(string: str) -> str: ... + +def expand(string: str, vars: Mapping[str, str], local_vars: Mapping[str, str]=...) -> str: ... diff --git a/venv/lib/python3.10/site-packages/ninja/py.typed b/venv/lib/python3.10/site-packages/ninja/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.10/site-packages/nvidia_cuda_nvcc_cu12-12.9.86.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/nvidia_cuda_nvcc_cu12-12.9.86.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_cuda_nvcc_cu12-12.9.86.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/nvidia_cuda_nvcc_cu12-12.9.86.dist-info/METADATA b/venv/lib/python3.10/site-packages/nvidia_cuda_nvcc_cu12-12.9.86.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..7eceb22d5120ac6d66cdac1ce05da9c261c8aa7a --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_cuda_nvcc_cu12-12.9.86.dist-info/METADATA @@ -0,0 +1,45 @@ +Metadata-Version: 2.4 +Name: nvidia-cuda-nvcc-cu12 +Version: 12.9.86 +Summary: CUDA nvcc +Home-page: https://developer.nvidia.com/cuda-zone +Author: Nvidia CUDA Installer Team +Author-email: compute_installer@nvidia.com +License: LicenseRef-NVIDIA-Proprietary +Keywords: cuda,nvidia,runtime,machine learning,deep learning +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: Science/Research +Classifier: License :: Other/Proprietary License +Classifier: Natural Language :: English +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Topic :: Scientific/Engineering +Classifier: Topic :: Scientific/Engineering :: Mathematics +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Classifier: Topic :: Software Development +Classifier: Topic :: Software Development :: Libraries +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX :: Linux +Requires-Python: >=3 +License-File: License.txt +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: home-page +Dynamic: keywords +Dynamic: license +Dynamic: license-file +Dynamic: requires-python +Dynamic: summary + +Compiler for CUDA applications. diff --git a/venv/lib/python3.10/site-packages/nvidia_cuda_nvcc_cu12-12.9.86.dist-info/RECORD b/venv/lib/python3.10/site-packages/nvidia_cuda_nvcc_cu12-12.9.86.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..b743b7ad8ff8074e4a860b5b38a30b5060d945f6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_cuda_nvcc_cu12-12.9.86.dist-info/RECORD @@ -0,0 +1,37 @@ +nvidia/cuda_nvcc/bin/ptxas,sha256=mDsOkoOFWXn0LOv9gNQ_m254brhPA_dXC_lBxNOjxGE,34931056 +nvidia/cuda_nvcc/include/crt/common_functions.h,sha256=-U44f4yUGmwDPwd7Q_3Cz5if05xHGPSlAzz5zMylLSQ,13559 +nvidia/cuda_nvcc/include/crt/cudacc_ext.h,sha256=KW6n0ImOZKS0VqVmBHWTXtHI816hh88YeEgUg2aYdVU,3224 +nvidia/cuda_nvcc/include/crt/device_double_functions.h,sha256=A1vB3g0qwnNEfcpT1d9RiGDaxqPXXgYr-Vxe2oMHyxY,39938 +nvidia/cuda_nvcc/include/crt/device_double_functions.hpp,sha256=YYIbqYhb5Qmf8c4YfcC_jytg4FRwcXPjv3TFTwhb24E,8568 +nvidia/cuda_nvcc/include/crt/device_fp128_functions.h,sha256=3iCKrdmPp1NIjrlGR47dCZOgV9X6MmdfmjugrF6DEts,51047 +nvidia/cuda_nvcc/include/crt/device_functions.h,sha256=fJ2ptFQ1MOiZRE7AKB-q5nrZdz71JDrBgJ4XlK-fVc4,137923 +nvidia/cuda_nvcc/include/crt/device_functions.hpp,sha256=OVHiqBjday_aUFnDKJxTeI0VDZI8ZA6JIdUKeAKR4pA,37991 +nvidia/cuda_nvcc/include/crt/func_macro.h,sha256=EOpDlaM917bh9cwBiFBPF689DCMBw5hFarxLxFt-i74,1755 +nvidia/cuda_nvcc/include/crt/host_config.h,sha256=1eki3w5xuY00gIkdYbmSfZO2SoI8giZWYSTRIL2uFs0,12169 +nvidia/cuda_nvcc/include/crt/host_defines.h,sha256=AMGTpusQLiwY_DPSLkjdweyQmQpFFYdVRoZcZg6HVEY,11416 +nvidia/cuda_nvcc/include/crt/host_runtime.h,sha256=lOpmkxFZVkEp8dcMAGEZRITsh-19o9jy39kdSNLc3Ng,10284 +nvidia/cuda_nvcc/include/crt/math_functions.h,sha256=LyGJ0XUthi6WEi-YVITInha9A6SFoB4fEUUU9zii7U8,238284 +nvidia/cuda_nvcc/include/crt/math_functions.hpp,sha256=u-CGbd0R2FZWdKG-6bdmGSor9KT_wnmISj63lPQKASM,100207 +nvidia/cuda_nvcc/include/crt/mma.h,sha256=BooWALDWATvZupJaL7-GFQRQYOotNe_Fy11I5NGh2FA,62695 +nvidia/cuda_nvcc/include/crt/mma.hpp,sha256=spo0LX71tUCipxK517Bssj0nc-ZHf8oMWzvHoYYB_6I,66599 +nvidia/cuda_nvcc/include/crt/sm_100_rt.h,sha256=3cBiCU11OcjGYpr85edCN1q4m2z91FaGhBtuO0is4To,8987 +nvidia/cuda_nvcc/include/crt/sm_100_rt.hpp,sha256=vbI2CNCY08dDI7zXwp6BNceZKl0ceXG1lveq4w-VNao,6855 +nvidia/cuda_nvcc/include/crt/sm_70_rt.h,sha256=Kf830xymA-zmF7LsunFHLSNyhhT5UiJMocgoHBQeNns,6837 +nvidia/cuda_nvcc/include/crt/sm_70_rt.hpp,sha256=3a_rU-Y0MSB4htBDFY4PCQ_jXiWFTe7WT1ZyhMuCJOA,7837 +nvidia/cuda_nvcc/include/crt/sm_80_rt.h,sha256=MdJHWCRzLM__nDDf1go61rDsl9ydOW3oi6SZBfjUyc8,7743 +nvidia/cuda_nvcc/include/crt/sm_80_rt.hpp,sha256=o-rJu-jpehCeyABGgv-8dYRB7oJTCwuNdvSCq0VURdE,6705 +nvidia/cuda_nvcc/include/crt/sm_90_rt.h,sha256=an47m0XFBaJ3pUX9MlE4-nktP1jb3eJUXhQ3ntZtzc8,11445 +nvidia/cuda_nvcc/include/crt/sm_90_rt.hpp,sha256=YuqVygGV6rgtWtx1J9cPpEI3BXKQBII-Ez6oZFP3wrE,9228 +nvidia/cuda_nvcc/include/crt/storage_class.h,sha256=dzcOZ16pLaN8ejqHaXw4iHbBJ6fXWxfaU-sj2QjYzzg,4791 +nvidia/cuda_nvcc/include/fatbinary_section.h,sha256=NnuUfy358yGJx4enq0pBnetjv17UWa-nOlgYToUitrw,1809 +nvidia/cuda_nvcc/include/nvPTXCompiler.h,sha256=wtKxrK1FI7waelfjpSyebMVJhN-RTNPv6deViv7yUH4,14488 +nvidia/cuda_nvcc/nvvm/include/nvvm.h,sha256=kAky50NXmfqxSZzU7r4CQyeexxZM1OZGEkoQa4M-5H0,11705 +nvidia/cuda_nvcc/nvvm/lib64/libnvvm.so,sha256=eRW54-rtf2hndO6C-Zz6zrrvzQApb0S9uXnlfTY6fKg,62581536 +nvidia/cuda_nvcc/nvvm/libdevice/libdevice.10.bc,sha256=noS1BPJdJux3XvkfcSLmjax3-h93bxBEEo9QKDViZdg,486168 +nvidia_cuda_nvcc_cu12-12.9.86.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +nvidia_cuda_nvcc_cu12-12.9.86.dist-info/METADATA,sha256=Il-g_0SoVgj4c5p2BPcD1qDNck_En5BecsBfpD5W-z8,1687 +nvidia_cuda_nvcc_cu12-12.9.86.dist-info/RECORD,, +nvidia_cuda_nvcc_cu12-12.9.86.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +nvidia_cuda_nvcc_cu12-12.9.86.dist-info/WHEEL,sha256=Nh2Ev5kI-AJERgb0NFee1zrCiChBqq7oF05S-p7GLqk,144 +nvidia_cuda_nvcc_cu12-12.9.86.dist-info/licenses/License.txt,sha256=rW9YU_ugyg0VnQ9Y1JrkmDDC-Mk_epJki5zpCttMbM0,59262 +nvidia_cuda_nvcc_cu12-12.9.86.dist-info/top_level.txt,sha256=fTkAtiFuL16nUrB9ytDDtpytz2t0B4NvYTnRzwAhO14,7 diff --git a/venv/lib/python3.10/site-packages/nvidia_cuda_nvcc_cu12-12.9.86.dist-info/REQUESTED b/venv/lib/python3.10/site-packages/nvidia_cuda_nvcc_cu12-12.9.86.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.10/site-packages/nvidia_cuda_nvcc_cu12-12.9.86.dist-info/WHEEL b/venv/lib/python3.10/site-packages/nvidia_cuda_nvcc_cu12-12.9.86.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..051a4e1881e1fc26bd6a1501537735c0bd86fc56 --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_cuda_nvcc_cu12-12.9.86.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: true +Tag: py3-none-manylinux2010_x86_64 +Tag: py3-none-manylinux_2_12_x86_64 + diff --git a/venv/lib/python3.10/site-packages/nvidia_cuda_nvcc_cu12-12.9.86.dist-info/licenses/License.txt b/venv/lib/python3.10/site-packages/nvidia_cuda_nvcc_cu12-12.9.86.dist-info/licenses/License.txt new file mode 100644 index 0000000000000000000000000000000000000000..b491c70e0aef319022ded661e111ddbd45b8a17f --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_cuda_nvcc_cu12-12.9.86.dist-info/licenses/License.txt @@ -0,0 +1,1568 @@ +End User License Agreement +-------------------------- + + +Preface +------- + +The Software License Agreement in Chapter 1 and the Supplement +in Chapter 2 contain license terms and conditions that govern +the use of NVIDIA software. By accepting this agreement, you +agree to comply with all the terms and conditions applicable +to the product(s) included herein. + + +NVIDIA Driver + + +Description + +This package contains the operating system driver and +fundamental system software components for NVIDIA GPUs. + + +NVIDIA CUDA Toolkit + + +Description + +The NVIDIA CUDA Toolkit provides command-line and graphical +tools for building, debugging and optimizing the performance +of applications accelerated by NVIDIA GPUs, runtime and math +libraries, and documentation including programming guides, +user manuals, and API references. + + +Default Install Location of CUDA Toolkit + +Windows platform: + +%ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v#.# + +Linux platform: + +/usr/local/cuda-#.# + +Mac platform: + +/Developer/NVIDIA/CUDA-#.# + + +NVIDIA CUDA Samples + + +Description + +This package includes over 100+ CUDA examples that demonstrate +various CUDA programming principles, and efficient CUDA +implementation of algorithms in specific application domains. + + +Default Install Location of CUDA Samples + +Windows platform: + +%ProgramData%\NVIDIA Corporation\CUDA Samples\v#.# + +Linux platform: + +/usr/local/cuda-#.#/samples + +and + +$HOME/NVIDIA_CUDA-#.#_Samples + +Mac platform: + +/Developer/NVIDIA/CUDA-#.#/samples + + +NVIDIA Nsight Visual Studio Edition (Windows only) + + +Description + +NVIDIA Nsight Development Platform, Visual Studio Edition is a +development environment integrated into Microsoft Visual +Studio that provides tools for debugging, profiling, analyzing +and optimizing your GPU computing and graphics applications. + + +Default Install Location of Nsight Visual Studio Edition + +Windows platform: + +%ProgramFiles(x86)%\NVIDIA Corporation\Nsight Visual Studio Edition #.# + + +1. License Agreement for NVIDIA Software Development Kits +--------------------------------------------------------- + + +Release Date: July 26, 2018 +--------------------------- + + +Important NoticeRead before downloading, installing, +copying or using the licensed software: +------------------------------------------------------- + +This license agreement, including exhibits attached +("Agreement”) is a legal agreement between you and NVIDIA +Corporation ("NVIDIA") and governs your use of a NVIDIA +software development kit (“SDK”). + +Each SDK has its own set of software and materials, but here +is a description of the types of items that may be included in +a SDK: source code, header files, APIs, data sets and assets +(examples include images, textures, models, scenes, videos, +native API input/output files), binary software, sample code, +libraries, utility programs, programming code and +documentation. + +This Agreement can be accepted only by an adult of legal age +of majority in the country in which the SDK is used. + +If you are entering into this Agreement on behalf of a company +or other legal entity, you represent that you have the legal +authority to bind the entity to this Agreement, in which case +“you” will mean the entity you represent. + +If you don’t have the required age or authority to accept +this Agreement, or if you don’t accept all the terms and +conditions of this Agreement, do not download, install or use +the SDK. + +You agree to use the SDK only for purposes that are permitted +by (a) this Agreement, and (b) any applicable law, regulation +or generally accepted practices or guidelines in the relevant +jurisdictions. + + +1.1. License + + +1.1.1. License Grant + +Subject to the terms of this Agreement, NVIDIA hereby grants +you a non-exclusive, non-transferable license, without the +right to sublicense (except as expressly provided in this +Agreement) to: + + 1. Install and use the SDK, + + 2. Modify and create derivative works of sample source code + delivered in the SDK, and + + 3. Distribute those portions of the SDK that are identified + in this Agreement as distributable, as incorporated in + object code format into a software application that meets + the distribution requirements indicated in this Agreement. + + +1.1.2. Distribution Requirements + +These are the distribution requirements for you to exercise +the distribution grant: + + 1. Your application must have material additional + functionality, beyond the included portions of the SDK. + + 2. The distributable portions of the SDK shall only be + accessed by your application. + + 3. The following notice shall be included in modifications + and derivative works of sample source code distributed: + “This software contains source code provided by NVIDIA + Corporation.” + + 4. Unless a developer tool is identified in this Agreement + as distributable, it is delivered for your internal use + only. + + 5. The terms under which you distribute your application + must be consistent with the terms of this Agreement, + including (without limitation) terms relating to the + license grant and license restrictions and protection of + NVIDIA’s intellectual property rights. Additionally, you + agree that you will protect the privacy, security and + legal rights of your application users. + + 6. You agree to notify NVIDIA in writing of any known or + suspected distribution or use of the SDK not in compliance + with the requirements of this Agreement, and to enforce + the terms of your agreements with respect to distributed + SDK. + + +1.1.3. Authorized Users + +You may allow employees and contractors of your entity or of +your subsidiary(ies) to access and use the SDK from your +secure network to perform work on your behalf. + +If you are an academic institution you may allow users +enrolled or employed by the academic institution to access and +use the SDK from your secure network. + +You are responsible for the compliance with the terms of this +Agreement by your authorized users. If you become aware that +your authorized users didn’t follow the terms of this +Agreement, you agree to take reasonable steps to resolve the +non-compliance and prevent new occurrences. + + +1.1.4. Pre-Release SDK + +The SDK versions identified as alpha, beta, preview or +otherwise as pre-release, may not be fully functional, may +contain errors or design flaws, and may have reduced or +different security, privacy, accessibility, availability, and +reliability standards relative to commercial versions of +NVIDIA software and materials. Use of a pre-release SDK may +result in unexpected results, loss of data, project delays or +other unpredictable damage or loss. + +You may use a pre-release SDK at your own risk, understanding +that pre-release SDKs are not intended for use in production +or business-critical systems. + +NVIDIA may choose not to make available a commercial version +of any pre-release SDK. NVIDIA may also choose to abandon +development and terminate the availability of a pre-release +SDK at any time without liability. + + +1.1.5. Updates + +NVIDIA may, at its option, make available patches, workarounds +or other updates to this SDK. Unless the updates are provided +with their separate governing terms, they are deemed part of +the SDK licensed to you as provided in this Agreement. You +agree that the form and content of the SDK that NVIDIA +provides may change without prior notice to you. While NVIDIA +generally maintains compatibility between versions, NVIDIA may +in some cases make changes that introduce incompatibilities in +future versions of the SDK. + + +1.1.6. Third Party Licenses + +The SDK may come bundled with, or otherwise include or be +distributed with, third party software licensed by a NVIDIA +supplier and/or open source software provided under an open +source license. Use of third party software is subject to the +third-party license terms, or in the absence of third party +terms, the terms of this Agreement. Copyright to third party +software is held by the copyright holders indicated in the +third-party software or license. + + +1.1.7. Reservation of Rights + +NVIDIA reserves all rights, title, and interest in and to the +SDK, not expressly granted to you under this Agreement. + + +1.2. Limitations + +The following license limitations apply to your use of the +SDK: + + 1. You may not reverse engineer, decompile or disassemble, + or remove copyright or other proprietary notices from any + portion of the SDK or copies of the SDK. + + 2. Except as expressly provided in this Agreement, you may + not copy, sell, rent, sublicense, transfer, distribute, + modify, or create derivative works of any portion of the + SDK. For clarity, you may not distribute or sublicense the + SDK as a stand-alone product. + + 3. Unless you have an agreement with NVIDIA for this + purpose, you may not indicate that an application created + with the SDK is sponsored or endorsed by NVIDIA. + + 4. You may not bypass, disable, or circumvent any + encryption, security, digital rights management or + authentication mechanism in the SDK. + + 5. You may not use the SDK in any manner that would cause it + to become subject to an open source software license. As + examples, licenses that require as a condition of use, + modification, and/or distribution that the SDK be: + + a. Disclosed or distributed in source code form; + + b. Licensed for the purpose of making derivative works; + or + + c. Redistributable at no charge. + + 6. Unless you have an agreement with NVIDIA for this + purpose, you may not use the SDK with any system or + application where the use or failure of the system or + application can reasonably be expected to threaten or + result in personal injury, death, or catastrophic loss. + Examples include use in avionics, navigation, military, + medical, life support or other life critical applications. + NVIDIA does not design, test or manufacture the SDK for + these critical uses and NVIDIA shall not be liable to you + or any third party, in whole or in part, for any claims or + damages arising from such uses. + + 7. You agree to defend, indemnify and hold harmless NVIDIA + and its affiliates, and their respective employees, + contractors, agents, officers and directors, from and + against any and all claims, damages, obligations, losses, + liabilities, costs or debt, fines, restitutions and + expenses (including but not limited to attorney’s fees + and costs incident to establishing the right of + indemnification) arising out of or related to your use of + the SDK outside of the scope of this Agreement, or not in + compliance with its terms. + + +1.3. Ownership + + 1. NVIDIA or its licensors hold all rights, title and + interest in and to the SDK and its modifications and + derivative works, including their respective intellectual + property rights, subject to your rights described in this + section. This SDK may include software and materials from + NVIDIA’s licensors, and these licensors are intended + third party beneficiaries that may enforce this Agreement + with respect to their intellectual property rights. + + 2. You hold all rights, title and interest in and to your + applications and your derivative works of the sample + source code delivered in the SDK, including their + respective intellectual property rights, subject to + NVIDIA’s rights described in this section. + + 3. You may, but don’t have to, provide to NVIDIA + suggestions, feature requests or other feedback regarding + the SDK, including possible enhancements or modifications + to the SDK. For any feedback that you voluntarily provide, + you hereby grant NVIDIA and its affiliates a perpetual, + non-exclusive, worldwide, irrevocable license to use, + reproduce, modify, license, sublicense (through multiple + tiers of sublicensees), and distribute (through multiple + tiers of distributors) it without the payment of any + royalties or fees to you. NVIDIA will use feedback at its + choice. NVIDIA is constantly looking for ways to improve + its products, so you may send feedback to NVIDIA through + the developer portal at https://developer.nvidia.com. + + +1.4. No Warranties + +THE SDK IS PROVIDED BY NVIDIA “AS IS” AND “WITH ALL +FAULTS.” TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND +ITS AFFILIATES EXPRESSLY DISCLAIM ALL WARRANTIES OF ANY KIND +OR NATURE, WHETHER EXPRESS, IMPLIED OR STATUTORY, INCLUDING, +BUT NOT LIMITED TO, ANY WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE, TITLE, NON-INFRINGEMENT, OR THE +ABSENCE OF ANY DEFECTS THEREIN, WHETHER LATENT OR PATENT. NO +WARRANTY IS MADE ON THE BASIS OF TRADE USAGE, COURSE OF +DEALING OR COURSE OF TRADE. + + +1.5. Limitation of Liability + +TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND ITS +AFFILIATES SHALL NOT BE LIABLE FOR ANY SPECIAL, INCIDENTAL, +PUNITIVE OR CONSEQUENTIAL DAMAGES, OR ANY LOST PROFITS, LOSS +OF USE, LOSS OF DATA OR LOSS OF GOODWILL, OR THE COSTS OF +PROCURING SUBSTITUTE PRODUCTS, ARISING OUT OF OR IN CONNECTION +WITH THIS AGREEMENT OR THE USE OR PERFORMANCE OF THE SDK, +WHETHER SUCH LIABILITY ARISES FROM ANY CLAIM BASED UPON BREACH +OF CONTRACT, BREACH OF WARRANTY, TORT (INCLUDING NEGLIGENCE), +PRODUCT LIABILITY OR ANY OTHER CAUSE OF ACTION OR THEORY OF +LIABILITY. IN NO EVENT WILL NVIDIA’S AND ITS AFFILIATES +TOTAL CUMULATIVE LIABILITY UNDER OR ARISING OUT OF THIS +AGREEMENT EXCEED US$10.00. THE NATURE OF THE LIABILITY OR THE +NUMBER OF CLAIMS OR SUITS SHALL NOT ENLARGE OR EXTEND THIS +LIMIT. + +These exclusions and limitations of liability shall apply +regardless if NVIDIA or its affiliates have been advised of +the possibility of such damages, and regardless of whether a +remedy fails its essential purpose. These exclusions and +limitations of liability form an essential basis of the +bargain between the parties, and, absent any of these +exclusions or limitations of liability, the provisions of this +Agreement, including, without limitation, the economic terms, +would be substantially different. + + +1.6. Termination + + 1. This Agreement will continue to apply until terminated by + either you or NVIDIA as described below. + + 2. If you want to terminate this Agreement, you may do so by + stopping to use the SDK. + + 3. NVIDIA may, at any time, terminate this Agreement if: + + a. (i) you fail to comply with any term of this + Agreement and the non-compliance is not fixed within + thirty (30) days following notice from NVIDIA (or + immediately if you violate NVIDIA’s intellectual + property rights); + + b. (ii) you commence or participate in any legal + proceeding against NVIDIA with respect to the SDK; or + + c. (iii) NVIDIA decides to no longer provide the SDK in + a country or, in NVIDIA’s sole discretion, the + continued use of it is no longer commercially viable. + + 4. Upon any termination of this Agreement, you agree to + promptly discontinue use of the SDK and destroy all copies + in your possession or control. Your prior distributions in + accordance with this Agreement are not affected by the + termination of this Agreement. Upon written request, you + will certify in writing that you have complied with your + commitments under this section. Upon any termination of + this Agreement all provisions survive except for the + license grant provisions. + + +1.7. General + +If you wish to assign this Agreement or your rights and +obligations, including by merger, consolidation, dissolution +or operation of law, contact NVIDIA to ask for permission. Any +attempted assignment not approved by NVIDIA in writing shall +be void and of no effect. NVIDIA may assign, delegate or +transfer this Agreement and its rights and obligations, and if +to a non-affiliate you will be notified. + +You agree to cooperate with NVIDIA and provide reasonably +requested information to verify your compliance with this +Agreement. + +This Agreement will be governed in all respects by the laws of +the United States and of the State of Delaware as those laws +are applied to contracts entered into and performed entirely +within Delaware by Delaware residents, without regard to the +conflicts of laws principles. The United Nations Convention on +Contracts for the International Sale of Goods is specifically +disclaimed. You agree to all terms of this Agreement in the +English language. + +The state or federal courts residing in Santa Clara County, +California shall have exclusive jurisdiction over any dispute +or claim arising out of this Agreement. Notwithstanding this, +you agree that NVIDIA shall still be allowed to apply for +injunctive remedies or an equivalent type of urgent legal +relief in any jurisdiction. + +If any court of competent jurisdiction determines that any +provision of this Agreement is illegal, invalid or +unenforceable, such provision will be construed as limited to +the extent necessary to be consistent with and fully +enforceable under the law and the remaining provisions will +remain in full force and effect. Unless otherwise specified, +remedies are cumulative. + +Each party acknowledges and agrees that the other is an +independent contractor in the performance of this Agreement. + +The SDK has been developed entirely at private expense and is +“commercial items” consisting of “commercial computer +software” and “commercial computer software +documentation” provided with RESTRICTED RIGHTS. Use, +duplication or disclosure by the U.S. Government or a U.S. +Government subcontractor is subject to the restrictions in +this Agreement pursuant to DFARS 227.7202-3(a) or as set forth +in subparagraphs (c)(1) and (2) of the Commercial Computer +Software - Restricted Rights clause at FAR 52.227-19, as +applicable. Contractor/manufacturer is NVIDIA, 2788 San Tomas +Expressway, Santa Clara, CA 95051. + +The SDK is subject to United States export laws and +regulations. You agree that you will not ship, transfer or +export the SDK into any country, or use the SDK in any manner, +prohibited by the United States Bureau of Industry and +Security or economic sanctions regulations administered by the +U.S. Department of Treasury’s Office of Foreign Assets +Control (OFAC), or any applicable export laws, restrictions or +regulations. These laws include restrictions on destinations, +end users and end use. By accepting this Agreement, you +confirm that you are not a resident or citizen of any country +currently embargoed by the U.S. and that you are not otherwise +prohibited from receiving the SDK. + +Any notice delivered by NVIDIA to you under this Agreement +will be delivered via mail, email or fax. You agree that any +notices that NVIDIA sends you electronically will satisfy any +legal communication requirements. Please direct your legal +notices or other correspondence to NVIDIA Corporation, 2788 +San Tomas Expressway, Santa Clara, California 95051, United +States of America, Attention: Legal Department. + +This Agreement and any exhibits incorporated into this +Agreement constitute the entire agreement of the parties with +respect to the subject matter of this Agreement and supersede +all prior negotiations or documentation exchanged between the +parties relating to this SDK license. Any additional and/or +conflicting terms on documents issued by you are null, void, +and invalid. Any amendment or waiver under this Agreement +shall be in writing and signed by representatives of both +parties. + + +2. CUDA Toolkit Supplement to Software License Agreement for +NVIDIA Software Development Kits +------------------------------------------------------------ + + +Release date: August 16, 2018 +----------------------------- + +The terms in this supplement govern your use of the NVIDIA +CUDA Toolkit SDK under the terms of your license agreement +(“Agreement”) as modified by this supplement. Capitalized +terms used but not defined below have the meaning assigned to +them in the Agreement. + +This supplement is an exhibit to the Agreement and is +incorporated as an integral part of the Agreement. In the +event of conflict between the terms in this supplement and the +terms in the Agreement, the terms in this supplement govern. + + +2.1. License Scope + +The SDK is licensed for you to develop applications only for +use in systems with NVIDIA GPUs. + + +2.2. Distribution + +The portions of the SDK that are distributable under the +Agreement are listed in Attachment A. + + +2.3. Operating Systems + +Those portions of the SDK designed exclusively for use on the +Linux or FreeBSD operating systems, or other operating systems +derived from the source code to these operating systems, may +be copied and redistributed for use in accordance with this +Agreement, provided that the object code files are not +modified in any way (except for unzipping of compressed +files). + + +2.4. Audio and Video Encoders and Decoders + +You acknowledge and agree that it is your sole responsibility +to obtain any additional third-party licenses required to +make, have made, use, have used, sell, import, and offer for +sale your products or services that include or incorporate any +third-party software and content relating to audio and/or +video encoders and decoders from, including but not limited +to, Microsoft, Thomson, Fraunhofer IIS, Sisvel S.p.A., +MPEG-LA, and Coding Technologies. NVIDIA does not grant to you +under this Agreement any necessary patent or other rights with +respect to any audio and/or video encoders and decoders. + + +2.5. Licensing + +If the distribution terms in this Agreement are not suitable +for your organization, or for any questions regarding this +Agreement, please contact NVIDIA at +nvidia-compute-license-questions@nvidia.com. + + +2.6. Attachment A + +The following portions of the SDK are distributable under the +Agreement: + +Component + +CUDA Runtime + +Windows + +cudart.dll, cudart_static.lib, cudadevrt.lib + +Mac OSX + +libcudart.dylib, libcudart_static.a, libcudadevrt.a + +Linux + +libcudart.so, libcudart_static.a, libcudadevrt.a + +Android + +libcudart.so, libcudart_static.a, libcudadevrt.a + +Component + +CUDA FFT Library + +Windows + +cufft.dll, cufftw.dll, cufft.lib, cufftw.lib + +Mac OSX + +libcufft.dylib, libcufft_static.a, libcufftw.dylib, +libcufftw_static.a + +Linux + +libcufft.so, libcufft_static.a, libcufftw.so, +libcufftw_static.a + +Android + +libcufft.so, libcufft_static.a, libcufftw.so, +libcufftw_static.a + +Component + +CUDA BLAS Library + +Windows + +cublas.dll, cublasLt.dll + +Mac OSX + +libcublas.dylib, libcublasLt.dylib, libcublas_static.a, +libcublasLt_static.a + +Linux + +libcublas.so, libcublasLt.so, libcublas_static.a, +libcublasLt_static.a + +Android + +libcublas.so, libcublasLt.so, libcublas_static.a, +libcublasLt_static.a + +Component + +NVIDIA "Drop-in" BLAS Library + +Windows + +nvblas.dll + +Mac OSX + +libnvblas.dylib + +Linux + +libnvblas.so + +Component + +CUDA Sparse Matrix Library + +Windows + +cusparse.dll, cusparse.lib + +Mac OSX + +libcusparse.dylib, libcusparse_static.a + +Linux + +libcusparse.so, libcusparse_static.a + +Android + +libcusparse.so, libcusparse_static.a + +Component + +CUDA Linear Solver Library + +Windows + +cusolver.dll, cusolver.lib + +Mac OSX + +libcusolver.dylib, libcusolver_static.a + +Linux + +libcusolver.so, libcusolver_static.a + +Android + +libcusolver.so, libcusolver_static.a + +Component + +CUDA Random Number Generation Library + +Windows + +curand.dll, curand.lib + +Mac OSX + +libcurand.dylib, libcurand_static.a + +Linux + +libcurand.so, libcurand_static.a + +Android + +libcurand.so, libcurand_static.a + +Component + +CUDA Accelerated Graph Library + +Component + +NVIDIA Performance Primitives Library + +Windows + +nppc.dll, nppc.lib, nppial.dll, nppial.lib, nppicc.dll, +nppicc.lib, nppicom.dll, nppicom.lib, nppidei.dll, +nppidei.lib, nppif.dll, nppif.lib, nppig.dll, nppig.lib, +nppim.dll, nppim.lib, nppist.dll, nppist.lib, nppisu.dll, +nppisu.lib, nppitc.dll, nppitc.lib, npps.dll, npps.lib + +Mac OSX + +libnppc.dylib, libnppc_static.a, libnppial.dylib, +libnppial_static.a, libnppicc.dylib, libnppicc_static.a, +libnppicom.dylib, libnppicom_static.a, libnppidei.dylib, +libnppidei_static.a, libnppif.dylib, libnppif_static.a, +libnppig.dylib, libnppig_static.a, libnppim.dylib, +libnppisu_static.a, libnppitc.dylib, libnppitc_static.a, +libnpps.dylib, libnpps_static.a + +Linux + +libnppc.so, libnppc_static.a, libnppial.so, +libnppial_static.a, libnppicc.so, libnppicc_static.a, +libnppicom.so, libnppicom_static.a, libnppidei.so, +libnppidei_static.a, libnppif.so, libnppif_static.a +libnppig.so, libnppig_static.a, libnppim.so, +libnppim_static.a, libnppist.so, libnppist_static.a, +libnppisu.so, libnppisu_static.a, libnppitc.so +libnppitc_static.a, libnpps.so, libnpps_static.a + +Android + +libnppc.so, libnppc_static.a, libnppial.so, +libnppial_static.a, libnppicc.so, libnppicc_static.a, +libnppicom.so, libnppicom_static.a, libnppidei.so, +libnppidei_static.a, libnppif.so, libnppif_static.a +libnppig.so, libnppig_static.a, libnppim.so, +libnppim_static.a, libnppist.so, libnppist_static.a, +libnppisu.so, libnppisu_static.a, libnppitc.so +libnppitc_static.a, libnpps.so, libnpps_static.a + +Component + +NVIDIA JPEG Library + +Linux + +libnvjpeg.so, libnvjpeg_static.a + +Component + +Internal common library required for statically linking to +cuBLAS, cuSPARSE, cuFFT, cuRAND, nvJPEG and NPP + +Mac OSX + +libculibos.a + +Linux + +libculibos.a + +Component + +NVIDIA Runtime Compilation Library and Header + +All + +nvrtc.h + +Windows + +nvrtc.dll, nvrtc-builtins.dll + +Mac OSX + +libnvrtc.dylib, libnvrtc-builtins.dylib + +Linux + +libnvrtc.so, libnvrtc-builtins.so + +Component + +NVIDIA Optimizing Compiler Library + +Windows + +nvvm.dll + +Mac OSX + +libnvvm.dylib + +Linux + +libnvvm.so + +Component + +NVIDIA Common Device Math Functions Library + +Windows + +libdevice.10.bc + +Mac OSX + +libdevice.10.bc + +Linux + +libdevice.10.bc + +Component + +CUDA Occupancy Calculation Header Library + +All + +cuda_occupancy.h + +Component + +CUDA Half Precision Headers + +All + +cuda_fp16.h, cuda_fp16.hpp + +Component + +CUDA Profiling Tools Interface (CUPTI) Library + +Windows + +cupti.dll + +Mac OSX + +libcupti.dylib + +Linux + +libcupti.so + +Component + +NVIDIA Tools Extension Library + +Windows + +nvToolsExt.dll, nvToolsExt.lib + +Mac OSX + +libnvToolsExt.dylib + +Linux + +libnvToolsExt.so + +Component + +NVIDIA CUDA Driver Libraries + +Linux + +libcuda.so, libnvidia-fatbinaryloader.so, +libnvidia-ptxjitcompiler.so + +The NVIDIA CUDA Driver Libraries are only distributable in +applications that meet this criteria: + + 1. The application was developed starting from a NVIDIA CUDA + container obtained from Docker Hub or the NVIDIA GPU + Cloud, and + + 2. The resulting application is packaged as a Docker + container and distributed to users on Docker Hub or the + NVIDIA GPU Cloud only. + + +2.7. Attachment B + + +Additional Licensing Obligations + +The following third party components included in the SOFTWARE +are licensed to Licensee pursuant to the following terms and +conditions: + + 1. Licensee's use of the GDB third party component is + subject to the terms and conditions of GNU GPL v3: + + This product includes copyrighted third-party software licensed + under the terms of the GNU General Public License v3 ("GPL v3"). + All third-party software packages are copyright by their respective + authors. GPL v3 terms and conditions are hereby incorporated into + the Agreement by this reference: http://www.gnu.org/licenses/gpl.txt + + Consistent with these licensing requirements, the software + listed below is provided under the terms of the specified + open source software licenses. To obtain source code for + software provided under licenses that require + redistribution of source code, including the GNU General + Public License (GPL) and GNU Lesser General Public License + (LGPL), contact oss-requests@nvidia.com. This offer is + valid for a period of three (3) years from the date of the + distribution of this product by NVIDIA CORPORATION. + + Component License + CUDA-GDB GPL v3 + + 2. Licensee represents and warrants that any and all third + party licensing and/or royalty payment obligations in + connection with Licensee's use of the H.264 video codecs + are solely the responsibility of Licensee. + + 3. Licensee's use of the Thrust library is subject to the + terms and conditions of the Apache License Version 2.0. + All third-party software packages are copyright by their + respective authors. Apache License Version 2.0 terms and + conditions are hereby incorporated into the Agreement by + this reference. + http://www.apache.org/licenses/LICENSE-2.0.html + + In addition, Licensee acknowledges the following notice: + Thrust includes source code from the Boost Iterator, + Tuple, System, and Random Number libraries. + + Boost Software License - Version 1.0 - August 17th, 2003 + . . . . + + Permission is hereby granted, free of charge, to any person or + organization obtaining a copy of the software and accompanying + documentation covered by this license (the "Software") to use, + reproduce, display, distribute, execute, and transmit the Software, + and to prepare derivative works of the Software, and to permit + third-parties to whom the Software is furnished to do so, all + subject to the following: + + The copyright notices in the Software and this entire statement, + including the above license grant, this restriction and the following + disclaimer, must be included in all copies of the Software, in whole + or in part, and all derivative works of the Software, unless such + copies or derivative works are solely in the form of machine-executable + object code generated by a source language processor. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND + NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR + ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR + OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + + 4. Licensee's use of the LLVM third party component is + subject to the following terms and conditions: + + ====================================================== + LLVM Release License + ====================================================== + University of Illinois/NCSA + Open Source License + + Copyright (c) 2003-2010 University of Illinois at Urbana-Champaign. + All rights reserved. + + Developed by: + + LLVM Team + + University of Illinois at Urbana-Champaign + + http://llvm.org + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to + deal with the Software without restriction, including without limitation the + rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimers. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimers in the + documentation and/or other materials provided with the distribution. + + * Neither the names of the LLVM Team, University of Illinois at Urbana- + Champaign, nor the names of its contributors may be used to endorse or + promote products derived from this Software without specific prior + written permission. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR + OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS WITH THE SOFTWARE. + + 5. Licensee's use (e.g. nvprof) of the PCRE third party + component is subject to the following terms and + conditions: + + ------------ + PCRE LICENCE + ------------ + PCRE is a library of functions to support regular expressions whose syntax + and semantics are as close as possible to those of the Perl 5 language. + Release 8 of PCRE is distributed under the terms of the "BSD" licence, as + specified below. The documentation for PCRE, supplied in the "doc" + directory, is distributed under the same terms as the software itself. The + basic library functions are written in C and are freestanding. Also + included in the distribution is a set of C++ wrapper functions, and a just- + in-time compiler that can be used to optimize pattern matching. These are + both optional features that can be omitted when the library is built. + + THE BASIC LIBRARY FUNCTIONS + --------------------------- + Written by: Philip Hazel + Email local part: ph10 + Email domain: cam.ac.uk + University of Cambridge Computing Service, + Cambridge, England. + Copyright (c) 1997-2012 University of Cambridge + All rights reserved. + + PCRE JUST-IN-TIME COMPILATION SUPPORT + ------------------------------------- + Written by: Zoltan Herczeg + Email local part: hzmester + Emain domain: freemail.hu + Copyright(c) 2010-2012 Zoltan Herczeg + All rights reserved. + + STACK-LESS JUST-IN-TIME COMPILER + -------------------------------- + Written by: Zoltan Herczeg + Email local part: hzmester + Emain domain: freemail.hu + Copyright(c) 2009-2012 Zoltan Herczeg + All rights reserved. + + THE C++ WRAPPER FUNCTIONS + ------------------------- + Contributed by: Google Inc. + Copyright (c) 2007-2012, Google Inc. + All rights reserved. + + THE "BSD" LICENCE + ----------------- + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of the University of Cambridge nor the name of Google + Inc. nor the names of their contributors may be used to endorse or + promote products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + 6. Some of the cuBLAS library routines were written by or + derived from code written by Vasily Volkov and are subject + to the Modified Berkeley Software Distribution License as + follows: + + Copyright (c) 2007-2009, Regents of the University of California + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of the University of California, Berkeley nor + the names of its contributors may be used to endorse or promote + products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, + INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING + IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + 7. Some of the cuBLAS library routines were written by or + derived from code written by Davide Barbieri and are + subject to the Modified Berkeley Software Distribution + License as follows: + + Copyright (c) 2008-2009 Davide Barbieri @ University of Rome Tor Vergata. + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * The name of the author may not be used to endorse or promote + products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, + INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING + IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + 8. Some of the cuBLAS library routines were derived from + code developed by the University of Tennessee and are + subject to the Modified Berkeley Software Distribution + License as follows: + + Copyright (c) 2010 The University of Tennessee. + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer listed in this license in the documentation and/or + other materials provided with the distribution. + * Neither the name of the copyright holders nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 9. Some of the cuBLAS library routines were written by or + derived from code written by Jonathan Hogg and are subject + to the Modified Berkeley Software Distribution License as + follows: + + Copyright (c) 2012, The Science and Technology Facilities Council (STFC). + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of the STFC nor the names of its contributors + may be used to endorse or promote products derived from this + software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE STFC BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR + BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE + OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN + IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 10. Some of the cuBLAS library routines were written by or + derived from code written by Ahmad M. Abdelfattah, David + Keyes, and Hatem Ltaief, and are subject to the Apache + License, Version 2.0, as follows: + + -- (C) Copyright 2013 King Abdullah University of Science and Technology + Authors: + Ahmad Abdelfattah (ahmad.ahmad@kaust.edu.sa) + David Keyes (david.keyes@kaust.edu.sa) + Hatem Ltaief (hatem.ltaief@kaust.edu.sa) + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of the King Abdullah University of Science and + Technology nor the names of its contributors may be used to endorse + or promote products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE + + 11. Some of the cuSPARSE library routines were written by or + derived from code written by Li-Wen Chang and are subject + to the NCSA Open Source License as follows: + + Copyright (c) 2012, University of Illinois. + + All rights reserved. + + Developed by: IMPACT Group, University of Illinois, http://impact.crhc.illinois.edu + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal with the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimers in the documentation and/or other materials provided + with the distribution. + * Neither the names of IMPACT Group, University of Illinois, nor + the names of its contributors may be used to endorse or promote + products derived from this Software without specific prior + written permission. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE CONTRIBUTORS OR COPYRIGHT + HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER + IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR + IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE + SOFTWARE. + + 12. Some of the cuRAND library routines were written by or + derived from code written by Mutsuo Saito and Makoto + Matsumoto and are subject to the following license: + + Copyright (c) 2009, 2010 Mutsuo Saito, Makoto Matsumoto and Hiroshima + University. All rights reserved. + + Copyright (c) 2011 Mutsuo Saito, Makoto Matsumoto, Hiroshima + University and University of Tokyo. All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of the Hiroshima University nor the names of + its contributors may be used to endorse or promote products + derived from this software without specific prior written + permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 13. Some of the cuRAND library routines were derived from + code developed by D. E. Shaw Research and are subject to + the following license: + + Copyright 2010-2011, D. E. Shaw Research. + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions, and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions, and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of D. E. Shaw Research nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 14. Some of the Math library routines were written by or + derived from code developed by Norbert Juffa and are + subject to the following license: + + Copyright (c) 2015-2017, Norbert Juffa + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 15. Licensee's use of the lz4 third party component is + subject to the following terms and conditions: + + Copyright (C) 2011-2013, Yann Collet. + BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 16. The NPP library uses code from the Boost Math Toolkit, + and is subject to the following license: + + Boost Software License - Version 1.0 - August 17th, 2003 + . . . . + + Permission is hereby granted, free of charge, to any person or + organization obtaining a copy of the software and accompanying + documentation covered by this license (the "Software") to use, + reproduce, display, distribute, execute, and transmit the Software, + and to prepare derivative works of the Software, and to permit + third-parties to whom the Software is furnished to do so, all + subject to the following: + + The copyright notices in the Software and this entire statement, + including the above license grant, this restriction and the following + disclaimer, must be included in all copies of the Software, in whole + or in part, and all derivative works of the Software, unless such + copies or derivative works are solely in the form of machine-executable + object code generated by a source language processor. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND + NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR + ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR + OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + + 17. Portions of the Nsight Eclipse Edition is subject to the + following license: + + The Eclipse Foundation makes available all content in this plug-in + ("Content"). Unless otherwise indicated below, the Content is provided + to you under the terms and conditions of the Eclipse Public License + Version 1.0 ("EPL"). A copy of the EPL is available at http:// + www.eclipse.org/legal/epl-v10.html. For purposes of the EPL, "Program" + will mean the Content. + + If you did not receive this Content directly from the Eclipse + Foundation, the Content is being redistributed by another party + ("Redistributor") and different terms and conditions may apply to your + use of any object code in the Content. Check the Redistributor's + license that was provided with the Content. If no such license exists, + contact the Redistributor. Unless otherwise indicated below, the terms + and conditions of the EPL still apply to any source code in the + Content and such source code may be obtained at http://www.eclipse.org. + + 18. Some of the cuBLAS library routines uses code from + OpenAI, which is subject to the following license: + + License URL + https://github.com/openai/openai-gemm/blob/master/LICENSE + + License Text + The MIT License + + Copyright (c) 2016 OpenAI (http://openai.com), 2016 Google Inc. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + + 19. Licensee's use of the Visual Studio Setup Configuration + Samples is subject to the following license: + + The MIT License (MIT) + Copyright (C) Microsoft Corporation. All rights reserved. + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without restriction, + including without limitation the rights to use, copy, modify, merge, + publish, distribute, sublicense, and/or sell copies of the Software, + and to permit persons to whom the Software is furnished to do so, + subject to the following conditions: + + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + 20. Licensee's use of linmath.h header for CPU functions for + GL vector/matrix operations from lunarG is subject to the + Apache License Version 2.0. + + 21. The DX12-CUDA sample uses the d3dx12.h header, which is + subject to the MIT license . + +----------------- diff --git a/venv/lib/python3.10/site-packages/nvidia_cuda_nvcc_cu12-12.9.86.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/nvidia_cuda_nvcc_cu12-12.9.86.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..862f7abf232cdfbb928609856247292e81c9decb --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_cuda_nvcc_cu12-12.9.86.dist-info/top_level.txt @@ -0,0 +1 @@ +nvidia diff --git a/venv/lib/python3.10/site-packages/nvidia_cuda_runtime-13.0.96.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/nvidia_cuda_runtime-13.0.96.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_cuda_runtime-13.0.96.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/nvidia_cuda_runtime-13.0.96.dist-info/METADATA b/venv/lib/python3.10/site-packages/nvidia_cuda_runtime-13.0.96.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..7c8294ca2e0a794253b5ad048f89cbc91d0b645c --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_cuda_runtime-13.0.96.dist-info/METADATA @@ -0,0 +1,45 @@ +Metadata-Version: 2.4 +Name: nvidia-cuda-runtime +Version: 13.0.96 +Summary: CUDA Runtime native Libraries +Home-page: https://developer.nvidia.com/cuda-zone +Author: Nvidia CUDA Installer Team +Author-email: compute_installer@nvidia.com +License-Expression: LicenseRef-NVIDIA-Proprietary +Keywords: cuda,nvidia,runtime,machine learning,deep learning +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: Science/Research +Classifier: Natural Language :: English +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Topic :: Scientific/Engineering +Classifier: Topic :: Scientific/Engineering :: Mathematics +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Classifier: Topic :: Software Development +Classifier: Topic :: Software Development :: Libraries +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX :: Linux +Requires-Python: >=3 +License-File: License.txt +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: home-page +Dynamic: keywords +Dynamic: license +Dynamic: license-file +Dynamic: license-expression +Dynamic: requires-python +Dynamic: summary + +CUDA Runtime native Libraries diff --git a/venv/lib/python3.10/site-packages/nvidia_cuda_runtime-13.0.96.dist-info/RECORD b/venv/lib/python3.10/site-packages/nvidia_cuda_runtime-13.0.96.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..4ab439f1280dc8e9f6c8e4290a2cf5ec6b5227af --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_cuda_runtime-13.0.96.dist-info/RECORD @@ -0,0 +1,100 @@ +nvidia/cu13/include/builtin_types.h,sha256=JxT9Vf2q2snxTBOL9ACzNmYzTWACO2VOVUu1KdFt7_g,3150 +nvidia/cu13/include/channel_descriptor.h,sha256=0TKPhsHbLXH1lXQp22nPctupg1TuDSZZFYJQAuQ7vgw,21906 +nvidia/cu13/include/common_functions.h,sha256=22LTZRVcPZzEH6MJda7nNMCvMgIjSTe0OKR7sEQj6kc,3410 +nvidia/cu13/include/cooperative_groups.h,sha256=LQwcmz8pVDdgKlKLBhsMIYU5XNIhls_ZgXN258rXLJ0,55406 +nvidia/cu13/include/cooperative_groups/details/async.h,sha256=xsEHCZP3nuEY3l2p8SU2d1226XiXumUvDP_Gyh8PdVY,19122 +nvidia/cu13/include/cooperative_groups/details/coalesced_reduce.h,sha256=pBQgFY7i64V87XNATg1UEIQHVNYOItQtHjS5B4yn8pc,4257 +nvidia/cu13/include/cooperative_groups/details/coalesced_scan.h,sha256=DfZv5d5W0XJv-tZVhgrIdjLjs6aCx_u0oy1lDIpjo1Q,7314 +nvidia/cu13/include/cooperative_groups/details/driver_abi.h,sha256=v-ZUb4UgGKJk6NR2WCWHD3x_42y-togI1urFn70Gi-g,3964 +nvidia/cu13/include/cooperative_groups/details/functional.h,sha256=2BV8i8Bidz0kgxuYkJCAbwFxOIZRyzHgG-c_rVKhRzc,8905 +nvidia/cu13/include/cooperative_groups/details/helpers.h,sha256=rCoeWhq6WWy7EdeTOvCxWO83PCGqFKuVbdwtYiM9P0g,22316 +nvidia/cu13/include/cooperative_groups/details/info.h,sha256=FOrp3Ltt4PcbK2fAM5UX9jssFZtj_LqVShzLFcKiSaY,12465 +nvidia/cu13/include/cooperative_groups/details/invoke.h,sha256=Osq3K-tZuXHVCMQJ708PjPo-BwMhjhjApO4b0TYLFJg,8616 +nvidia/cu13/include/cooperative_groups/details/memory.h,sha256=hES3SfgXIBsj2MFrC_M5COXlOirSBuuhPMAJnWoI92w,5606 +nvidia/cu13/include/cooperative_groups/details/partitioning.h,sha256=AQz-TheqX3onqX2RmIUipzYUVB273zhLlHJw_kX9D2U,7153 +nvidia/cu13/include/cooperative_groups/details/reduce.h,sha256=MjqMDwT0TyWZk4JWcF3WHw8xtwMqyizA4C3zy7f8ee0,23296 +nvidia/cu13/include/cooperative_groups/details/scan.h,sha256=-Ttwb2AfEEY_tsmqJjR2dojkPpoRx387SoqxgvfdBtQ,17166 +nvidia/cu13/include/cooperative_groups/details/sync.h,sha256=Ed4K9QrPZi43ddSqZwv1X8NG_CTsXUowSQndoUv82LU,10795 +nvidia/cu13/include/cooperative_groups/memcpy_async.h,sha256=erOIHuObdfxRhBWfrXE3wsZF4B2GUuqwzQrsPwKPpbg,2960 +nvidia/cu13/include/cooperative_groups/reduce.h,sha256=B0hgDkqM-6ueqTTgb3b34A0RH4vGz8mBf5e2jT1dJ1o,2949 +nvidia/cu13/include/cooperative_groups/scan.h,sha256=2EU6T5cWNwftm2B7FicV31PojoI61yo5fHXGRYkGk40,2940 +nvidia/cu13/include/cuComplex.h,sha256=WpcgpaiPhU_o9sTPMcNTEZuyXDIc8x3sz4dUWSztL2g,12186 +nvidia/cu13/include/cuda.h,sha256=ZAJfganW0rEOnhFlTzUj5bBG0BuIPwucpHTkYydLgNQ,1146681 +nvidia/cu13/include/cudaEGL.h,sha256=iruZU9xSGAcJ29OEX4K_Uo1o4NGP9hggv2fiOZOfDQo,39955 +nvidia/cu13/include/cudaEGLTypedefs.h,sha256=zaSLukNV6mIWkUzbEcN8uqXUgd53l_F36A2BCokvx2I,4559 +nvidia/cu13/include/cudaGL.h,sha256=gMT1HPGa-siuji0gAsKYr4X45Lc29HKglC_ttNSGyUM,22501 +nvidia/cu13/include/cudaGLTypedefs.h,sha256=UYQowbOfBPYXaqhJfXERvJQDkiwGBNFY8UUVMjgg2Xg,5586 +nvidia/cu13/include/cudaProfilerTypedefs.h,sha256=x0JSTVgZdjy5UBqYVXtLEa_dixggZ6aomGGVDjSt3ro,3039 +nvidia/cu13/include/cudaTypedefs.h,sha256=2iNS7LBhFcJSOUybpyRy0dxSVWnTD5yD4towD854jy8,86518 +nvidia/cu13/include/cudaVDPAU.h,sha256=Np7Nc2Wjaz--hkpbhW6f9aapr-NbcPDAgkot0sJerco,12694 +nvidia/cu13/include/cudaVDPAUTypedefs.h,sha256=_KVyArHNpRJGvm3T1itFdmctDTDj-2AKWxDGurTT0lI,3756 +nvidia/cu13/include/cuda_awbarrier.h,sha256=3ZH-ZlXODhSiwSY9rqSni_EQwi25QMHP6Tm-zOdxBwE,9340 +nvidia/cu13/include/cuda_awbarrier_helpers.h,sha256=OCskCts5bCKl_RKBe9M74zKSIsVpePn44S_aJp1tFXE,12489 +nvidia/cu13/include/cuda_awbarrier_primitives.h,sha256=n5__E1jYYDhlgH-f3u8MQjtz57UZ7v5VshhMye1eicM,4699 +nvidia/cu13/include/cuda_bf16.h,sha256=Vl0J5nd-OtYuVRpDfLjNHOjioXgoGsq8RiCH0bRlTmw,204518 +nvidia/cu13/include/cuda_bf16.hpp,sha256=eJBEVb3-iz1VlpttFA4JnhI7rSzd_CPXzZag0_HRP4s,136552 +nvidia/cu13/include/cuda_device_runtime_api.h,sha256=0xEOCNnb5gQFjfrh1cVMWLk4XOVLMrlBt5uFDq3Edl8,47099 +nvidia/cu13/include/cuda_egl_interop.h,sha256=awWBBEYvUFM7AURNp2mND8H7_5kGQLRswRveXYBy-3s,37509 +nvidia/cu13/include/cuda_fp16.h,sha256=dcVcEd2iXMSAcma3uxVaFJzCKm7eimwllMHSaIl9yyQ,206859 +nvidia/cu13/include/cuda_fp16.hpp,sha256=_eMOR83jBCGjUTos-QxtYS5PlamNv3qOQVzjkQ9_PSE,120941 +nvidia/cu13/include/cuda_fp4.h,sha256=pTEQf5rLfiaU_UMXgnnsS13NH5H9FtHgdeiNuW_NkHY,13823 +nvidia/cu13/include/cuda_fp4.hpp,sha256=UxcPibHCFDNAeVN5A_uC8WLrHarWJ4NQc2VU1wfgkok,36158 +nvidia/cu13/include/cuda_fp6.h,sha256=6xh0E4SNmjmJZD3H5_HoZe08bQ0loUE8y3cbO19-Ad4,13963 +nvidia/cu13/include/cuda_fp6.hpp,sha256=3ajRvDdJYFeWoPVbB8jo1pQHV3Me4AmN6-RH09q7R_0,58497 +nvidia/cu13/include/cuda_fp8.h,sha256=7wbDDNQZn5w0K_lI3HFIDFyHMJBAp8BldZihdVgEPUw,19143 +nvidia/cu13/include/cuda_fp8.hpp,sha256=iI82MB2OT5UPv1ctd_s-_jnLmV4Fk-hPPLQnx7boq-o,100356 +nvidia/cu13/include/cuda_gl_interop.h,sha256=VQEswFeOBF6JN6Q0pdlkvc5WT7bD1FnTfKewvANulCc,19150 +nvidia/cu13/include/cuda_occupancy.h,sha256=FjuPKW7yozx5ni3IxSWzcWbakhe8xqPB0mscEKyl4is,74222 +nvidia/cu13/include/cuda_pipeline.h,sha256=0enXG49wN4JajlQi3ahbp2ei_ufTY_Mznic7zfWmKHM,8130 +nvidia/cu13/include/cuda_pipeline_helpers.h,sha256=bo1L7e6vCuM-K3Il8K1z4wJUja5DyXQKdo_hSWUME-E,13852 +nvidia/cu13/include/cuda_pipeline_primitives.h,sha256=FnJJtuV6rHr6LgL56XDwilcSbFr6W1Hj6mf1AJaMI20,8675 +nvidia/cu13/include/cuda_runtime.h,sha256=4e8KJuSfF3YAT94SF-L6UmBa2xOTgbp5_pGRRdmTu2I,100474 +nvidia/cu13/include/cuda_runtime_api.h,sha256=P5HT-E8ar7F8zMR1gD-cSJtkdPKjvw-8daU4CCHJZuA,607504 +nvidia/cu13/include/cuda_vdpau_interop.h,sha256=bXQanWc2IFXZAKWNGl2xAz9nLvFmQpWyGrsDvfeS9FA,7727 +nvidia/cu13/include/cudart_platform.h,sha256=YN6sKhB0b9w5tGX1IYL7ulJVPrWAiX9A44qLv4EtW5Q,2717 +nvidia/cu13/include/device_atomic_functions.h,sha256=OR2jNSfSKzaFri74zh4Vtz5M0z9UDBU3rKeC1rYaVQs,9500 +nvidia/cu13/include/device_atomic_functions.hpp,sha256=0e7MOiNNUnnloXpB_r9WT5YOws5cxgzQQAzRCYvgaFA,10486 +nvidia/cu13/include/device_double_functions.h,sha256=KUxId5Z1fx8SWfLRTxPD7RB-zN7zslzb4n7JaJLfL3I,3452 +nvidia/cu13/include/device_functions.h,sha256=bWSrhTYE9NQlss7xMSMEVusvto9j2fgUDXWVH2W_cOA,3410 +nvidia/cu13/include/device_launch_parameters.h,sha256=H1_CC-vvAaS26ys4XsTFkMgTxUTciAjdjswjizkisvQ,3846 +nvidia/cu13/include/device_types.h,sha256=2LFxoZBJPoA5V0H1EbKTEaXDi3GDJPtzOPdRHDaucIQ,3588 +nvidia/cu13/include/driver_functions.h,sha256=cN3IjRAz2Mj2Pj35SyxJIkZNDDusnJqaqzBdMzpQKbA,4625 +nvidia/cu13/include/driver_types.h,sha256=C95FEykrvYDU-h0u4LYFBSJG1fEouwiraaRCuUICRRc,202752 +nvidia/cu13/include/host_config.h,sha256=BscH_GazAZbbotddVzL5RmafbQ-QjRx8f-I1O01IBW8,3380 +nvidia/cu13/include/host_defines.h,sha256=bBQwQF5C1N1c2qpLV56g1c-weu9Ysgz-gIf2Kn3uz_A,3386 +nvidia/cu13/include/library_types.h,sha256=fMzNnPQiFDhhcHBIZVF7u_g3Ata7teDAy6P8OkjL0p0,7443 +nvidia/cu13/include/math_constants.h,sha256=cV6hAyQe8X7f7MBtaKjjIJq3BycOUDp6I5cizJX5HLw,7608 +nvidia/cu13/include/math_functions.h,sha256=5XcC6j-fJKttvhwc4hZNoLHNw808a2ZYIOtZ7ry7yd0,3398 +nvidia/cu13/include/mma.h,sha256=IY_VenxuEncwGq92MhrWUb-Xswh0ekAXLy9Rbxhxa2Y,2932 +nvidia/cu13/include/sm_20_atomic_functions.h,sha256=x4ycINVq__l9B4SQPD-I48jQbKxxdBmgp8Vf2GO0Qfg,4478 +nvidia/cu13/include/sm_20_atomic_functions.hpp,sha256=1l5NLM8DhDbqYZ_E51LoqElQJXObkbwo57d3r-4uEbE,4107 +nvidia/cu13/include/sm_20_intrinsics.h,sha256=axeDr7y6nT1V6LzrSWNSaHUwXgiNjPbXn1T6Uh7hlNM,57702 +nvidia/cu13/include/sm_20_intrinsics.hpp,sha256=mJTejRhw1prNiP_ax1OPbkYlhEqBqO4nVI3DRDXIzpo,8392 +nvidia/cu13/include/sm_30_intrinsics.h,sha256=b6W8Vxp9vD9OCJI6lZuGyZYXEdQ3Ei8PTAloHNkwCcQ,16978 +nvidia/cu13/include/sm_30_intrinsics.hpp,sha256=yX0ebd265tJ-BDhvluP2BhadPuWXpRZPI2eeQFFt5ys,24567 +nvidia/cu13/include/sm_32_atomic_functions.h,sha256=HGnZgQHACE2AAb6zabGUURc53IsVZelc2BSJqvs9OgY,5703 +nvidia/cu13/include/sm_32_atomic_functions.hpp,sha256=CQTTvOEYp-s5hqAgLvAon11vLYDrDp8cTHdel-XRzBQ,6592 +nvidia/cu13/include/sm_32_intrinsics.h,sha256=Xdkogdsjy1vh8u3eGu0i5xTmHxBGAjj6_vVGR-spdOE,33539 +nvidia/cu13/include/sm_32_intrinsics.hpp,sha256=Gl8aSLDLcit4W3pKQS19GsDG8RYcwD65HwYB_CeZe8M,70616 +nvidia/cu13/include/sm_35_atomic_functions.h,sha256=a3XoEsKRCEOf0Q_5Y__rMfmC4pScv4VkUggVgVJVn44,2909 +nvidia/cu13/include/sm_35_intrinsics.h,sha256=0mS5-LCgvZiTvL7-MG_4YwI-zWGvM-s4xyRuMkunMC8,2664 +nvidia/cu13/include/sm_60_atomic_functions.h,sha256=_anfNaJsvQpDEorYeUKIkbizYkwrinBcG_ZCiECtLqI,13178 +nvidia/cu13/include/sm_60_atomic_functions.hpp,sha256=cgIKddDn2B3QzYlzeBILAP1IRys74QCCxsH0QqaVGls,22903 +nvidia/cu13/include/sm_61_intrinsics.h,sha256=h_MBL1UUDxQX_qOddSImzqyFjcrhhm_63G97pGDyreU,10902 +nvidia/cu13/include/sm_61_intrinsics.hpp,sha256=N-nQvcBsPMT2Umy5zR69c9K1q366W-Jqe7NpoLTqTmg,6787 +nvidia/cu13/include/surface_indirect_functions.h,sha256=vy9QuFVV-ezZP-x2RT9RLp2qIUgdngACOCmalSfVFPA,10877 +nvidia/cu13/include/surface_types.h,sha256=XkFXD1nHbeSMgajR-UJE9uQ7TByzJnjdnUL4-yGiufk,4530 +nvidia/cu13/include/texture_indirect_functions.h,sha256=KfGFCJPzJt48G3BIpjyrBOhHtWVwx_SzWFDmnexyfAM,21283 +nvidia/cu13/include/texture_types.h,sha256=73ntVyg8r8fzKy5VIk6yuvC45GDeWepaLIqIk-M3Ri8,6360 +nvidia/cu13/include/vector_functions.h,sha256=8V4sxfayamFxQ1Ubiu1IvXTl2Zfsvp46er58tDAmLWs,8857 +nvidia/cu13/include/vector_functions.hpp,sha256=TS7z7_lDHrpOP-CufiEh0kATr7C3ZgtAUxsWQdkBBUE,12169 +nvidia/cu13/include/vector_types.h,sha256=82OOzq-VXoDpHy8yo_xBEGZT6nFF3Ax5kjgmp4s8VIY,19855 +nvidia/cu13/lib/libcudadevrt.a,sha256=2Gizz2IwyJS8JJJjntU4-19pE2wQxOnBIUA8lfhzZUE,1024340 +nvidia/cu13/lib/libcudart.so.13,sha256=lsQuQYzsGQVBhrlCnDIWA8wZC_JqGBBOGUCBF6KoF7A,704288 +nvidia/cu13/lib/libcudart_static.a,sha256=5oZEzTg5waqEo5SdJ12RCId-aRM-2Pjivtcl-jOcjVg,1361104 +nvidia_cuda_runtime-13.0.96.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +nvidia_cuda_runtime-13.0.96.dist-info/METADATA,sha256=MGmePKdvb0H1JS56D84sLTA8RluGwbAi4Donmb0VCYs,1693 +nvidia_cuda_runtime-13.0.96.dist-info/RECORD,, +nvidia_cuda_runtime-13.0.96.dist-info/WHEEL,sha256=gy6FWQgpujK_dnYc155G2NL32NQjpi5ebTEXjh8SGZQ,144 +nvidia_cuda_runtime-13.0.96.dist-info/licenses/License.txt,sha256=rW9YU_ugyg0VnQ9Y1JrkmDDC-Mk_epJki5zpCttMbM0,59262 +nvidia_cuda_runtime-13.0.96.dist-info/top_level.txt,sha256=fTkAtiFuL16nUrB9ytDDtpytz2t0B4NvYTnRzwAhO14,7 diff --git a/venv/lib/python3.10/site-packages/nvidia_cuda_runtime-13.0.96.dist-info/WHEEL b/venv/lib/python3.10/site-packages/nvidia_cuda_runtime-13.0.96.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..29738220b339f398b2cbbc9ac8ccb38a39bdd7b2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_cuda_runtime-13.0.96.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: true +Tag: py3-none-manylinux2014_x86_64 +Tag: py3-none-manylinux_2_17_x86_64 + diff --git a/venv/lib/python3.10/site-packages/nvidia_cuda_runtime-13.0.96.dist-info/licenses/License.txt b/venv/lib/python3.10/site-packages/nvidia_cuda_runtime-13.0.96.dist-info/licenses/License.txt new file mode 100644 index 0000000000000000000000000000000000000000..b491c70e0aef319022ded661e111ddbd45b8a17f --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_cuda_runtime-13.0.96.dist-info/licenses/License.txt @@ -0,0 +1,1568 @@ +End User License Agreement +-------------------------- + + +Preface +------- + +The Software License Agreement in Chapter 1 and the Supplement +in Chapter 2 contain license terms and conditions that govern +the use of NVIDIA software. By accepting this agreement, you +agree to comply with all the terms and conditions applicable +to the product(s) included herein. + + +NVIDIA Driver + + +Description + +This package contains the operating system driver and +fundamental system software components for NVIDIA GPUs. + + +NVIDIA CUDA Toolkit + + +Description + +The NVIDIA CUDA Toolkit provides command-line and graphical +tools for building, debugging and optimizing the performance +of applications accelerated by NVIDIA GPUs, runtime and math +libraries, and documentation including programming guides, +user manuals, and API references. + + +Default Install Location of CUDA Toolkit + +Windows platform: + +%ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v#.# + +Linux platform: + +/usr/local/cuda-#.# + +Mac platform: + +/Developer/NVIDIA/CUDA-#.# + + +NVIDIA CUDA Samples + + +Description + +This package includes over 100+ CUDA examples that demonstrate +various CUDA programming principles, and efficient CUDA +implementation of algorithms in specific application domains. + + +Default Install Location of CUDA Samples + +Windows platform: + +%ProgramData%\NVIDIA Corporation\CUDA Samples\v#.# + +Linux platform: + +/usr/local/cuda-#.#/samples + +and + +$HOME/NVIDIA_CUDA-#.#_Samples + +Mac platform: + +/Developer/NVIDIA/CUDA-#.#/samples + + +NVIDIA Nsight Visual Studio Edition (Windows only) + + +Description + +NVIDIA Nsight Development Platform, Visual Studio Edition is a +development environment integrated into Microsoft Visual +Studio that provides tools for debugging, profiling, analyzing +and optimizing your GPU computing and graphics applications. + + +Default Install Location of Nsight Visual Studio Edition + +Windows platform: + +%ProgramFiles(x86)%\NVIDIA Corporation\Nsight Visual Studio Edition #.# + + +1. License Agreement for NVIDIA Software Development Kits +--------------------------------------------------------- + + +Release Date: July 26, 2018 +--------------------------- + + +Important NoticeRead before downloading, installing, +copying or using the licensed software: +------------------------------------------------------- + +This license agreement, including exhibits attached +("Agreement”) is a legal agreement between you and NVIDIA +Corporation ("NVIDIA") and governs your use of a NVIDIA +software development kit (“SDK”). + +Each SDK has its own set of software and materials, but here +is a description of the types of items that may be included in +a SDK: source code, header files, APIs, data sets and assets +(examples include images, textures, models, scenes, videos, +native API input/output files), binary software, sample code, +libraries, utility programs, programming code and +documentation. + +This Agreement can be accepted only by an adult of legal age +of majority in the country in which the SDK is used. + +If you are entering into this Agreement on behalf of a company +or other legal entity, you represent that you have the legal +authority to bind the entity to this Agreement, in which case +“you” will mean the entity you represent. + +If you don’t have the required age or authority to accept +this Agreement, or if you don’t accept all the terms and +conditions of this Agreement, do not download, install or use +the SDK. + +You agree to use the SDK only for purposes that are permitted +by (a) this Agreement, and (b) any applicable law, regulation +or generally accepted practices or guidelines in the relevant +jurisdictions. + + +1.1. License + + +1.1.1. License Grant + +Subject to the terms of this Agreement, NVIDIA hereby grants +you a non-exclusive, non-transferable license, without the +right to sublicense (except as expressly provided in this +Agreement) to: + + 1. Install and use the SDK, + + 2. Modify and create derivative works of sample source code + delivered in the SDK, and + + 3. Distribute those portions of the SDK that are identified + in this Agreement as distributable, as incorporated in + object code format into a software application that meets + the distribution requirements indicated in this Agreement. + + +1.1.2. Distribution Requirements + +These are the distribution requirements for you to exercise +the distribution grant: + + 1. Your application must have material additional + functionality, beyond the included portions of the SDK. + + 2. The distributable portions of the SDK shall only be + accessed by your application. + + 3. The following notice shall be included in modifications + and derivative works of sample source code distributed: + “This software contains source code provided by NVIDIA + Corporation.” + + 4. Unless a developer tool is identified in this Agreement + as distributable, it is delivered for your internal use + only. + + 5. The terms under which you distribute your application + must be consistent with the terms of this Agreement, + including (without limitation) terms relating to the + license grant and license restrictions and protection of + NVIDIA’s intellectual property rights. Additionally, you + agree that you will protect the privacy, security and + legal rights of your application users. + + 6. You agree to notify NVIDIA in writing of any known or + suspected distribution or use of the SDK not in compliance + with the requirements of this Agreement, and to enforce + the terms of your agreements with respect to distributed + SDK. + + +1.1.3. Authorized Users + +You may allow employees and contractors of your entity or of +your subsidiary(ies) to access and use the SDK from your +secure network to perform work on your behalf. + +If you are an academic institution you may allow users +enrolled or employed by the academic institution to access and +use the SDK from your secure network. + +You are responsible for the compliance with the terms of this +Agreement by your authorized users. If you become aware that +your authorized users didn’t follow the terms of this +Agreement, you agree to take reasonable steps to resolve the +non-compliance and prevent new occurrences. + + +1.1.4. Pre-Release SDK + +The SDK versions identified as alpha, beta, preview or +otherwise as pre-release, may not be fully functional, may +contain errors or design flaws, and may have reduced or +different security, privacy, accessibility, availability, and +reliability standards relative to commercial versions of +NVIDIA software and materials. Use of a pre-release SDK may +result in unexpected results, loss of data, project delays or +other unpredictable damage or loss. + +You may use a pre-release SDK at your own risk, understanding +that pre-release SDKs are not intended for use in production +or business-critical systems. + +NVIDIA may choose not to make available a commercial version +of any pre-release SDK. NVIDIA may also choose to abandon +development and terminate the availability of a pre-release +SDK at any time without liability. + + +1.1.5. Updates + +NVIDIA may, at its option, make available patches, workarounds +or other updates to this SDK. Unless the updates are provided +with their separate governing terms, they are deemed part of +the SDK licensed to you as provided in this Agreement. You +agree that the form and content of the SDK that NVIDIA +provides may change without prior notice to you. While NVIDIA +generally maintains compatibility between versions, NVIDIA may +in some cases make changes that introduce incompatibilities in +future versions of the SDK. + + +1.1.6. Third Party Licenses + +The SDK may come bundled with, or otherwise include or be +distributed with, third party software licensed by a NVIDIA +supplier and/or open source software provided under an open +source license. Use of third party software is subject to the +third-party license terms, or in the absence of third party +terms, the terms of this Agreement. Copyright to third party +software is held by the copyright holders indicated in the +third-party software or license. + + +1.1.7. Reservation of Rights + +NVIDIA reserves all rights, title, and interest in and to the +SDK, not expressly granted to you under this Agreement. + + +1.2. Limitations + +The following license limitations apply to your use of the +SDK: + + 1. You may not reverse engineer, decompile or disassemble, + or remove copyright or other proprietary notices from any + portion of the SDK or copies of the SDK. + + 2. Except as expressly provided in this Agreement, you may + not copy, sell, rent, sublicense, transfer, distribute, + modify, or create derivative works of any portion of the + SDK. For clarity, you may not distribute or sublicense the + SDK as a stand-alone product. + + 3. Unless you have an agreement with NVIDIA for this + purpose, you may not indicate that an application created + with the SDK is sponsored or endorsed by NVIDIA. + + 4. You may not bypass, disable, or circumvent any + encryption, security, digital rights management or + authentication mechanism in the SDK. + + 5. You may not use the SDK in any manner that would cause it + to become subject to an open source software license. As + examples, licenses that require as a condition of use, + modification, and/or distribution that the SDK be: + + a. Disclosed or distributed in source code form; + + b. Licensed for the purpose of making derivative works; + or + + c. Redistributable at no charge. + + 6. Unless you have an agreement with NVIDIA for this + purpose, you may not use the SDK with any system or + application where the use or failure of the system or + application can reasonably be expected to threaten or + result in personal injury, death, or catastrophic loss. + Examples include use in avionics, navigation, military, + medical, life support or other life critical applications. + NVIDIA does not design, test or manufacture the SDK for + these critical uses and NVIDIA shall not be liable to you + or any third party, in whole or in part, for any claims or + damages arising from such uses. + + 7. You agree to defend, indemnify and hold harmless NVIDIA + and its affiliates, and their respective employees, + contractors, agents, officers and directors, from and + against any and all claims, damages, obligations, losses, + liabilities, costs or debt, fines, restitutions and + expenses (including but not limited to attorney’s fees + and costs incident to establishing the right of + indemnification) arising out of or related to your use of + the SDK outside of the scope of this Agreement, or not in + compliance with its terms. + + +1.3. Ownership + + 1. NVIDIA or its licensors hold all rights, title and + interest in and to the SDK and its modifications and + derivative works, including their respective intellectual + property rights, subject to your rights described in this + section. This SDK may include software and materials from + NVIDIA’s licensors, and these licensors are intended + third party beneficiaries that may enforce this Agreement + with respect to their intellectual property rights. + + 2. You hold all rights, title and interest in and to your + applications and your derivative works of the sample + source code delivered in the SDK, including their + respective intellectual property rights, subject to + NVIDIA’s rights described in this section. + + 3. You may, but don’t have to, provide to NVIDIA + suggestions, feature requests or other feedback regarding + the SDK, including possible enhancements or modifications + to the SDK. For any feedback that you voluntarily provide, + you hereby grant NVIDIA and its affiliates a perpetual, + non-exclusive, worldwide, irrevocable license to use, + reproduce, modify, license, sublicense (through multiple + tiers of sublicensees), and distribute (through multiple + tiers of distributors) it without the payment of any + royalties or fees to you. NVIDIA will use feedback at its + choice. NVIDIA is constantly looking for ways to improve + its products, so you may send feedback to NVIDIA through + the developer portal at https://developer.nvidia.com. + + +1.4. No Warranties + +THE SDK IS PROVIDED BY NVIDIA “AS IS” AND “WITH ALL +FAULTS.” TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND +ITS AFFILIATES EXPRESSLY DISCLAIM ALL WARRANTIES OF ANY KIND +OR NATURE, WHETHER EXPRESS, IMPLIED OR STATUTORY, INCLUDING, +BUT NOT LIMITED TO, ANY WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE, TITLE, NON-INFRINGEMENT, OR THE +ABSENCE OF ANY DEFECTS THEREIN, WHETHER LATENT OR PATENT. NO +WARRANTY IS MADE ON THE BASIS OF TRADE USAGE, COURSE OF +DEALING OR COURSE OF TRADE. + + +1.5. Limitation of Liability + +TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND ITS +AFFILIATES SHALL NOT BE LIABLE FOR ANY SPECIAL, INCIDENTAL, +PUNITIVE OR CONSEQUENTIAL DAMAGES, OR ANY LOST PROFITS, LOSS +OF USE, LOSS OF DATA OR LOSS OF GOODWILL, OR THE COSTS OF +PROCURING SUBSTITUTE PRODUCTS, ARISING OUT OF OR IN CONNECTION +WITH THIS AGREEMENT OR THE USE OR PERFORMANCE OF THE SDK, +WHETHER SUCH LIABILITY ARISES FROM ANY CLAIM BASED UPON BREACH +OF CONTRACT, BREACH OF WARRANTY, TORT (INCLUDING NEGLIGENCE), +PRODUCT LIABILITY OR ANY OTHER CAUSE OF ACTION OR THEORY OF +LIABILITY. IN NO EVENT WILL NVIDIA’S AND ITS AFFILIATES +TOTAL CUMULATIVE LIABILITY UNDER OR ARISING OUT OF THIS +AGREEMENT EXCEED US$10.00. THE NATURE OF THE LIABILITY OR THE +NUMBER OF CLAIMS OR SUITS SHALL NOT ENLARGE OR EXTEND THIS +LIMIT. + +These exclusions and limitations of liability shall apply +regardless if NVIDIA or its affiliates have been advised of +the possibility of such damages, and regardless of whether a +remedy fails its essential purpose. These exclusions and +limitations of liability form an essential basis of the +bargain between the parties, and, absent any of these +exclusions or limitations of liability, the provisions of this +Agreement, including, without limitation, the economic terms, +would be substantially different. + + +1.6. Termination + + 1. This Agreement will continue to apply until terminated by + either you or NVIDIA as described below. + + 2. If you want to terminate this Agreement, you may do so by + stopping to use the SDK. + + 3. NVIDIA may, at any time, terminate this Agreement if: + + a. (i) you fail to comply with any term of this + Agreement and the non-compliance is not fixed within + thirty (30) days following notice from NVIDIA (or + immediately if you violate NVIDIA’s intellectual + property rights); + + b. (ii) you commence or participate in any legal + proceeding against NVIDIA with respect to the SDK; or + + c. (iii) NVIDIA decides to no longer provide the SDK in + a country or, in NVIDIA’s sole discretion, the + continued use of it is no longer commercially viable. + + 4. Upon any termination of this Agreement, you agree to + promptly discontinue use of the SDK and destroy all copies + in your possession or control. Your prior distributions in + accordance with this Agreement are not affected by the + termination of this Agreement. Upon written request, you + will certify in writing that you have complied with your + commitments under this section. Upon any termination of + this Agreement all provisions survive except for the + license grant provisions. + + +1.7. General + +If you wish to assign this Agreement or your rights and +obligations, including by merger, consolidation, dissolution +or operation of law, contact NVIDIA to ask for permission. Any +attempted assignment not approved by NVIDIA in writing shall +be void and of no effect. NVIDIA may assign, delegate or +transfer this Agreement and its rights and obligations, and if +to a non-affiliate you will be notified. + +You agree to cooperate with NVIDIA and provide reasonably +requested information to verify your compliance with this +Agreement. + +This Agreement will be governed in all respects by the laws of +the United States and of the State of Delaware as those laws +are applied to contracts entered into and performed entirely +within Delaware by Delaware residents, without regard to the +conflicts of laws principles. The United Nations Convention on +Contracts for the International Sale of Goods is specifically +disclaimed. You agree to all terms of this Agreement in the +English language. + +The state or federal courts residing in Santa Clara County, +California shall have exclusive jurisdiction over any dispute +or claim arising out of this Agreement. Notwithstanding this, +you agree that NVIDIA shall still be allowed to apply for +injunctive remedies or an equivalent type of urgent legal +relief in any jurisdiction. + +If any court of competent jurisdiction determines that any +provision of this Agreement is illegal, invalid or +unenforceable, such provision will be construed as limited to +the extent necessary to be consistent with and fully +enforceable under the law and the remaining provisions will +remain in full force and effect. Unless otherwise specified, +remedies are cumulative. + +Each party acknowledges and agrees that the other is an +independent contractor in the performance of this Agreement. + +The SDK has been developed entirely at private expense and is +“commercial items” consisting of “commercial computer +software” and “commercial computer software +documentation” provided with RESTRICTED RIGHTS. Use, +duplication or disclosure by the U.S. Government or a U.S. +Government subcontractor is subject to the restrictions in +this Agreement pursuant to DFARS 227.7202-3(a) or as set forth +in subparagraphs (c)(1) and (2) of the Commercial Computer +Software - Restricted Rights clause at FAR 52.227-19, as +applicable. Contractor/manufacturer is NVIDIA, 2788 San Tomas +Expressway, Santa Clara, CA 95051. + +The SDK is subject to United States export laws and +regulations. You agree that you will not ship, transfer or +export the SDK into any country, or use the SDK in any manner, +prohibited by the United States Bureau of Industry and +Security or economic sanctions regulations administered by the +U.S. Department of Treasury’s Office of Foreign Assets +Control (OFAC), or any applicable export laws, restrictions or +regulations. These laws include restrictions on destinations, +end users and end use. By accepting this Agreement, you +confirm that you are not a resident or citizen of any country +currently embargoed by the U.S. and that you are not otherwise +prohibited from receiving the SDK. + +Any notice delivered by NVIDIA to you under this Agreement +will be delivered via mail, email or fax. You agree that any +notices that NVIDIA sends you electronically will satisfy any +legal communication requirements. Please direct your legal +notices or other correspondence to NVIDIA Corporation, 2788 +San Tomas Expressway, Santa Clara, California 95051, United +States of America, Attention: Legal Department. + +This Agreement and any exhibits incorporated into this +Agreement constitute the entire agreement of the parties with +respect to the subject matter of this Agreement and supersede +all prior negotiations or documentation exchanged between the +parties relating to this SDK license. Any additional and/or +conflicting terms on documents issued by you are null, void, +and invalid. Any amendment or waiver under this Agreement +shall be in writing and signed by representatives of both +parties. + + +2. CUDA Toolkit Supplement to Software License Agreement for +NVIDIA Software Development Kits +------------------------------------------------------------ + + +Release date: August 16, 2018 +----------------------------- + +The terms in this supplement govern your use of the NVIDIA +CUDA Toolkit SDK under the terms of your license agreement +(“Agreement”) as modified by this supplement. Capitalized +terms used but not defined below have the meaning assigned to +them in the Agreement. + +This supplement is an exhibit to the Agreement and is +incorporated as an integral part of the Agreement. In the +event of conflict between the terms in this supplement and the +terms in the Agreement, the terms in this supplement govern. + + +2.1. License Scope + +The SDK is licensed for you to develop applications only for +use in systems with NVIDIA GPUs. + + +2.2. Distribution + +The portions of the SDK that are distributable under the +Agreement are listed in Attachment A. + + +2.3. Operating Systems + +Those portions of the SDK designed exclusively for use on the +Linux or FreeBSD operating systems, or other operating systems +derived from the source code to these operating systems, may +be copied and redistributed for use in accordance with this +Agreement, provided that the object code files are not +modified in any way (except for unzipping of compressed +files). + + +2.4. Audio and Video Encoders and Decoders + +You acknowledge and agree that it is your sole responsibility +to obtain any additional third-party licenses required to +make, have made, use, have used, sell, import, and offer for +sale your products or services that include or incorporate any +third-party software and content relating to audio and/or +video encoders and decoders from, including but not limited +to, Microsoft, Thomson, Fraunhofer IIS, Sisvel S.p.A., +MPEG-LA, and Coding Technologies. NVIDIA does not grant to you +under this Agreement any necessary patent or other rights with +respect to any audio and/or video encoders and decoders. + + +2.5. Licensing + +If the distribution terms in this Agreement are not suitable +for your organization, or for any questions regarding this +Agreement, please contact NVIDIA at +nvidia-compute-license-questions@nvidia.com. + + +2.6. Attachment A + +The following portions of the SDK are distributable under the +Agreement: + +Component + +CUDA Runtime + +Windows + +cudart.dll, cudart_static.lib, cudadevrt.lib + +Mac OSX + +libcudart.dylib, libcudart_static.a, libcudadevrt.a + +Linux + +libcudart.so, libcudart_static.a, libcudadevrt.a + +Android + +libcudart.so, libcudart_static.a, libcudadevrt.a + +Component + +CUDA FFT Library + +Windows + +cufft.dll, cufftw.dll, cufft.lib, cufftw.lib + +Mac OSX + +libcufft.dylib, libcufft_static.a, libcufftw.dylib, +libcufftw_static.a + +Linux + +libcufft.so, libcufft_static.a, libcufftw.so, +libcufftw_static.a + +Android + +libcufft.so, libcufft_static.a, libcufftw.so, +libcufftw_static.a + +Component + +CUDA BLAS Library + +Windows + +cublas.dll, cublasLt.dll + +Mac OSX + +libcublas.dylib, libcublasLt.dylib, libcublas_static.a, +libcublasLt_static.a + +Linux + +libcublas.so, libcublasLt.so, libcublas_static.a, +libcublasLt_static.a + +Android + +libcublas.so, libcublasLt.so, libcublas_static.a, +libcublasLt_static.a + +Component + +NVIDIA "Drop-in" BLAS Library + +Windows + +nvblas.dll + +Mac OSX + +libnvblas.dylib + +Linux + +libnvblas.so + +Component + +CUDA Sparse Matrix Library + +Windows + +cusparse.dll, cusparse.lib + +Mac OSX + +libcusparse.dylib, libcusparse_static.a + +Linux + +libcusparse.so, libcusparse_static.a + +Android + +libcusparse.so, libcusparse_static.a + +Component + +CUDA Linear Solver Library + +Windows + +cusolver.dll, cusolver.lib + +Mac OSX + +libcusolver.dylib, libcusolver_static.a + +Linux + +libcusolver.so, libcusolver_static.a + +Android + +libcusolver.so, libcusolver_static.a + +Component + +CUDA Random Number Generation Library + +Windows + +curand.dll, curand.lib + +Mac OSX + +libcurand.dylib, libcurand_static.a + +Linux + +libcurand.so, libcurand_static.a + +Android + +libcurand.so, libcurand_static.a + +Component + +CUDA Accelerated Graph Library + +Component + +NVIDIA Performance Primitives Library + +Windows + +nppc.dll, nppc.lib, nppial.dll, nppial.lib, nppicc.dll, +nppicc.lib, nppicom.dll, nppicom.lib, nppidei.dll, +nppidei.lib, nppif.dll, nppif.lib, nppig.dll, nppig.lib, +nppim.dll, nppim.lib, nppist.dll, nppist.lib, nppisu.dll, +nppisu.lib, nppitc.dll, nppitc.lib, npps.dll, npps.lib + +Mac OSX + +libnppc.dylib, libnppc_static.a, libnppial.dylib, +libnppial_static.a, libnppicc.dylib, libnppicc_static.a, +libnppicom.dylib, libnppicom_static.a, libnppidei.dylib, +libnppidei_static.a, libnppif.dylib, libnppif_static.a, +libnppig.dylib, libnppig_static.a, libnppim.dylib, +libnppisu_static.a, libnppitc.dylib, libnppitc_static.a, +libnpps.dylib, libnpps_static.a + +Linux + +libnppc.so, libnppc_static.a, libnppial.so, +libnppial_static.a, libnppicc.so, libnppicc_static.a, +libnppicom.so, libnppicom_static.a, libnppidei.so, +libnppidei_static.a, libnppif.so, libnppif_static.a +libnppig.so, libnppig_static.a, libnppim.so, +libnppim_static.a, libnppist.so, libnppist_static.a, +libnppisu.so, libnppisu_static.a, libnppitc.so +libnppitc_static.a, libnpps.so, libnpps_static.a + +Android + +libnppc.so, libnppc_static.a, libnppial.so, +libnppial_static.a, libnppicc.so, libnppicc_static.a, +libnppicom.so, libnppicom_static.a, libnppidei.so, +libnppidei_static.a, libnppif.so, libnppif_static.a +libnppig.so, libnppig_static.a, libnppim.so, +libnppim_static.a, libnppist.so, libnppist_static.a, +libnppisu.so, libnppisu_static.a, libnppitc.so +libnppitc_static.a, libnpps.so, libnpps_static.a + +Component + +NVIDIA JPEG Library + +Linux + +libnvjpeg.so, libnvjpeg_static.a + +Component + +Internal common library required for statically linking to +cuBLAS, cuSPARSE, cuFFT, cuRAND, nvJPEG and NPP + +Mac OSX + +libculibos.a + +Linux + +libculibos.a + +Component + +NVIDIA Runtime Compilation Library and Header + +All + +nvrtc.h + +Windows + +nvrtc.dll, nvrtc-builtins.dll + +Mac OSX + +libnvrtc.dylib, libnvrtc-builtins.dylib + +Linux + +libnvrtc.so, libnvrtc-builtins.so + +Component + +NVIDIA Optimizing Compiler Library + +Windows + +nvvm.dll + +Mac OSX + +libnvvm.dylib + +Linux + +libnvvm.so + +Component + +NVIDIA Common Device Math Functions Library + +Windows + +libdevice.10.bc + +Mac OSX + +libdevice.10.bc + +Linux + +libdevice.10.bc + +Component + +CUDA Occupancy Calculation Header Library + +All + +cuda_occupancy.h + +Component + +CUDA Half Precision Headers + +All + +cuda_fp16.h, cuda_fp16.hpp + +Component + +CUDA Profiling Tools Interface (CUPTI) Library + +Windows + +cupti.dll + +Mac OSX + +libcupti.dylib + +Linux + +libcupti.so + +Component + +NVIDIA Tools Extension Library + +Windows + +nvToolsExt.dll, nvToolsExt.lib + +Mac OSX + +libnvToolsExt.dylib + +Linux + +libnvToolsExt.so + +Component + +NVIDIA CUDA Driver Libraries + +Linux + +libcuda.so, libnvidia-fatbinaryloader.so, +libnvidia-ptxjitcompiler.so + +The NVIDIA CUDA Driver Libraries are only distributable in +applications that meet this criteria: + + 1. The application was developed starting from a NVIDIA CUDA + container obtained from Docker Hub or the NVIDIA GPU + Cloud, and + + 2. The resulting application is packaged as a Docker + container and distributed to users on Docker Hub or the + NVIDIA GPU Cloud only. + + +2.7. Attachment B + + +Additional Licensing Obligations + +The following third party components included in the SOFTWARE +are licensed to Licensee pursuant to the following terms and +conditions: + + 1. Licensee's use of the GDB third party component is + subject to the terms and conditions of GNU GPL v3: + + This product includes copyrighted third-party software licensed + under the terms of the GNU General Public License v3 ("GPL v3"). + All third-party software packages are copyright by their respective + authors. GPL v3 terms and conditions are hereby incorporated into + the Agreement by this reference: http://www.gnu.org/licenses/gpl.txt + + Consistent with these licensing requirements, the software + listed below is provided under the terms of the specified + open source software licenses. To obtain source code for + software provided under licenses that require + redistribution of source code, including the GNU General + Public License (GPL) and GNU Lesser General Public License + (LGPL), contact oss-requests@nvidia.com. This offer is + valid for a period of three (3) years from the date of the + distribution of this product by NVIDIA CORPORATION. + + Component License + CUDA-GDB GPL v3 + + 2. Licensee represents and warrants that any and all third + party licensing and/or royalty payment obligations in + connection with Licensee's use of the H.264 video codecs + are solely the responsibility of Licensee. + + 3. Licensee's use of the Thrust library is subject to the + terms and conditions of the Apache License Version 2.0. + All third-party software packages are copyright by their + respective authors. Apache License Version 2.0 terms and + conditions are hereby incorporated into the Agreement by + this reference. + http://www.apache.org/licenses/LICENSE-2.0.html + + In addition, Licensee acknowledges the following notice: + Thrust includes source code from the Boost Iterator, + Tuple, System, and Random Number libraries. + + Boost Software License - Version 1.0 - August 17th, 2003 + . . . . + + Permission is hereby granted, free of charge, to any person or + organization obtaining a copy of the software and accompanying + documentation covered by this license (the "Software") to use, + reproduce, display, distribute, execute, and transmit the Software, + and to prepare derivative works of the Software, and to permit + third-parties to whom the Software is furnished to do so, all + subject to the following: + + The copyright notices in the Software and this entire statement, + including the above license grant, this restriction and the following + disclaimer, must be included in all copies of the Software, in whole + or in part, and all derivative works of the Software, unless such + copies or derivative works are solely in the form of machine-executable + object code generated by a source language processor. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND + NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR + ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR + OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + + 4. Licensee's use of the LLVM third party component is + subject to the following terms and conditions: + + ====================================================== + LLVM Release License + ====================================================== + University of Illinois/NCSA + Open Source License + + Copyright (c) 2003-2010 University of Illinois at Urbana-Champaign. + All rights reserved. + + Developed by: + + LLVM Team + + University of Illinois at Urbana-Champaign + + http://llvm.org + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to + deal with the Software without restriction, including without limitation the + rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimers. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimers in the + documentation and/or other materials provided with the distribution. + + * Neither the names of the LLVM Team, University of Illinois at Urbana- + Champaign, nor the names of its contributors may be used to endorse or + promote products derived from this Software without specific prior + written permission. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR + OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS WITH THE SOFTWARE. + + 5. Licensee's use (e.g. nvprof) of the PCRE third party + component is subject to the following terms and + conditions: + + ------------ + PCRE LICENCE + ------------ + PCRE is a library of functions to support regular expressions whose syntax + and semantics are as close as possible to those of the Perl 5 language. + Release 8 of PCRE is distributed under the terms of the "BSD" licence, as + specified below. The documentation for PCRE, supplied in the "doc" + directory, is distributed under the same terms as the software itself. The + basic library functions are written in C and are freestanding. Also + included in the distribution is a set of C++ wrapper functions, and a just- + in-time compiler that can be used to optimize pattern matching. These are + both optional features that can be omitted when the library is built. + + THE BASIC LIBRARY FUNCTIONS + --------------------------- + Written by: Philip Hazel + Email local part: ph10 + Email domain: cam.ac.uk + University of Cambridge Computing Service, + Cambridge, England. + Copyright (c) 1997-2012 University of Cambridge + All rights reserved. + + PCRE JUST-IN-TIME COMPILATION SUPPORT + ------------------------------------- + Written by: Zoltan Herczeg + Email local part: hzmester + Emain domain: freemail.hu + Copyright(c) 2010-2012 Zoltan Herczeg + All rights reserved. + + STACK-LESS JUST-IN-TIME COMPILER + -------------------------------- + Written by: Zoltan Herczeg + Email local part: hzmester + Emain domain: freemail.hu + Copyright(c) 2009-2012 Zoltan Herczeg + All rights reserved. + + THE C++ WRAPPER FUNCTIONS + ------------------------- + Contributed by: Google Inc. + Copyright (c) 2007-2012, Google Inc. + All rights reserved. + + THE "BSD" LICENCE + ----------------- + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of the University of Cambridge nor the name of Google + Inc. nor the names of their contributors may be used to endorse or + promote products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + 6. Some of the cuBLAS library routines were written by or + derived from code written by Vasily Volkov and are subject + to the Modified Berkeley Software Distribution License as + follows: + + Copyright (c) 2007-2009, Regents of the University of California + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of the University of California, Berkeley nor + the names of its contributors may be used to endorse or promote + products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, + INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING + IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + 7. Some of the cuBLAS library routines were written by or + derived from code written by Davide Barbieri and are + subject to the Modified Berkeley Software Distribution + License as follows: + + Copyright (c) 2008-2009 Davide Barbieri @ University of Rome Tor Vergata. + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * The name of the author may not be used to endorse or promote + products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, + INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING + IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + 8. Some of the cuBLAS library routines were derived from + code developed by the University of Tennessee and are + subject to the Modified Berkeley Software Distribution + License as follows: + + Copyright (c) 2010 The University of Tennessee. + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer listed in this license in the documentation and/or + other materials provided with the distribution. + * Neither the name of the copyright holders nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 9. Some of the cuBLAS library routines were written by or + derived from code written by Jonathan Hogg and are subject + to the Modified Berkeley Software Distribution License as + follows: + + Copyright (c) 2012, The Science and Technology Facilities Council (STFC). + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of the STFC nor the names of its contributors + may be used to endorse or promote products derived from this + software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE STFC BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR + BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE + OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN + IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 10. Some of the cuBLAS library routines were written by or + derived from code written by Ahmad M. Abdelfattah, David + Keyes, and Hatem Ltaief, and are subject to the Apache + License, Version 2.0, as follows: + + -- (C) Copyright 2013 King Abdullah University of Science and Technology + Authors: + Ahmad Abdelfattah (ahmad.ahmad@kaust.edu.sa) + David Keyes (david.keyes@kaust.edu.sa) + Hatem Ltaief (hatem.ltaief@kaust.edu.sa) + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of the King Abdullah University of Science and + Technology nor the names of its contributors may be used to endorse + or promote products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE + + 11. Some of the cuSPARSE library routines were written by or + derived from code written by Li-Wen Chang and are subject + to the NCSA Open Source License as follows: + + Copyright (c) 2012, University of Illinois. + + All rights reserved. + + Developed by: IMPACT Group, University of Illinois, http://impact.crhc.illinois.edu + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal with the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimers in the documentation and/or other materials provided + with the distribution. + * Neither the names of IMPACT Group, University of Illinois, nor + the names of its contributors may be used to endorse or promote + products derived from this Software without specific prior + written permission. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE CONTRIBUTORS OR COPYRIGHT + HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER + IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR + IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE + SOFTWARE. + + 12. Some of the cuRAND library routines were written by or + derived from code written by Mutsuo Saito and Makoto + Matsumoto and are subject to the following license: + + Copyright (c) 2009, 2010 Mutsuo Saito, Makoto Matsumoto and Hiroshima + University. All rights reserved. + + Copyright (c) 2011 Mutsuo Saito, Makoto Matsumoto, Hiroshima + University and University of Tokyo. All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of the Hiroshima University nor the names of + its contributors may be used to endorse or promote products + derived from this software without specific prior written + permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 13. Some of the cuRAND library routines were derived from + code developed by D. E. Shaw Research and are subject to + the following license: + + Copyright 2010-2011, D. E. Shaw Research. + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions, and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions, and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of D. E. Shaw Research nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 14. Some of the Math library routines were written by or + derived from code developed by Norbert Juffa and are + subject to the following license: + + Copyright (c) 2015-2017, Norbert Juffa + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 15. Licensee's use of the lz4 third party component is + subject to the following terms and conditions: + + Copyright (C) 2011-2013, Yann Collet. + BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 16. The NPP library uses code from the Boost Math Toolkit, + and is subject to the following license: + + Boost Software License - Version 1.0 - August 17th, 2003 + . . . . + + Permission is hereby granted, free of charge, to any person or + organization obtaining a copy of the software and accompanying + documentation covered by this license (the "Software") to use, + reproduce, display, distribute, execute, and transmit the Software, + and to prepare derivative works of the Software, and to permit + third-parties to whom the Software is furnished to do so, all + subject to the following: + + The copyright notices in the Software and this entire statement, + including the above license grant, this restriction and the following + disclaimer, must be included in all copies of the Software, in whole + or in part, and all derivative works of the Software, unless such + copies or derivative works are solely in the form of machine-executable + object code generated by a source language processor. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND + NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR + ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR + OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + + 17. Portions of the Nsight Eclipse Edition is subject to the + following license: + + The Eclipse Foundation makes available all content in this plug-in + ("Content"). Unless otherwise indicated below, the Content is provided + to you under the terms and conditions of the Eclipse Public License + Version 1.0 ("EPL"). A copy of the EPL is available at http:// + www.eclipse.org/legal/epl-v10.html. For purposes of the EPL, "Program" + will mean the Content. + + If you did not receive this Content directly from the Eclipse + Foundation, the Content is being redistributed by another party + ("Redistributor") and different terms and conditions may apply to your + use of any object code in the Content. Check the Redistributor's + license that was provided with the Content. If no such license exists, + contact the Redistributor. Unless otherwise indicated below, the terms + and conditions of the EPL still apply to any source code in the + Content and such source code may be obtained at http://www.eclipse.org. + + 18. Some of the cuBLAS library routines uses code from + OpenAI, which is subject to the following license: + + License URL + https://github.com/openai/openai-gemm/blob/master/LICENSE + + License Text + The MIT License + + Copyright (c) 2016 OpenAI (http://openai.com), 2016 Google Inc. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + + 19. Licensee's use of the Visual Studio Setup Configuration + Samples is subject to the following license: + + The MIT License (MIT) + Copyright (C) Microsoft Corporation. All rights reserved. + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without restriction, + including without limitation the rights to use, copy, modify, merge, + publish, distribute, sublicense, and/or sell copies of the Software, + and to permit persons to whom the Software is furnished to do so, + subject to the following conditions: + + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + 20. Licensee's use of linmath.h header for CPU functions for + GL vector/matrix operations from lunarG is subject to the + Apache License Version 2.0. + + 21. The DX12-CUDA sample uses the d3dx12.h header, which is + subject to the MIT license . + +----------------- diff --git a/venv/lib/python3.10/site-packages/nvidia_cuda_runtime-13.0.96.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/nvidia_cuda_runtime-13.0.96.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..862f7abf232cdfbb928609856247292e81c9decb --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_cuda_runtime-13.0.96.dist-info/top_level.txt @@ -0,0 +1 @@ +nvidia diff --git a/venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.3.1.170.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.3.1.170.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.3.1.170.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.3.1.170.dist-info/License.txt b/venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.3.1.170.dist-info/License.txt new file mode 100644 index 0000000000000000000000000000000000000000..b491c70e0aef319022ded661e111ddbd45b8a17f --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.3.1.170.dist-info/License.txt @@ -0,0 +1,1568 @@ +End User License Agreement +-------------------------- + + +Preface +------- + +The Software License Agreement in Chapter 1 and the Supplement +in Chapter 2 contain license terms and conditions that govern +the use of NVIDIA software. By accepting this agreement, you +agree to comply with all the terms and conditions applicable +to the product(s) included herein. + + +NVIDIA Driver + + +Description + +This package contains the operating system driver and +fundamental system software components for NVIDIA GPUs. + + +NVIDIA CUDA Toolkit + + +Description + +The NVIDIA CUDA Toolkit provides command-line and graphical +tools for building, debugging and optimizing the performance +of applications accelerated by NVIDIA GPUs, runtime and math +libraries, and documentation including programming guides, +user manuals, and API references. + + +Default Install Location of CUDA Toolkit + +Windows platform: + +%ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v#.# + +Linux platform: + +/usr/local/cuda-#.# + +Mac platform: + +/Developer/NVIDIA/CUDA-#.# + + +NVIDIA CUDA Samples + + +Description + +This package includes over 100+ CUDA examples that demonstrate +various CUDA programming principles, and efficient CUDA +implementation of algorithms in specific application domains. + + +Default Install Location of CUDA Samples + +Windows platform: + +%ProgramData%\NVIDIA Corporation\CUDA Samples\v#.# + +Linux platform: + +/usr/local/cuda-#.#/samples + +and + +$HOME/NVIDIA_CUDA-#.#_Samples + +Mac platform: + +/Developer/NVIDIA/CUDA-#.#/samples + + +NVIDIA Nsight Visual Studio Edition (Windows only) + + +Description + +NVIDIA Nsight Development Platform, Visual Studio Edition is a +development environment integrated into Microsoft Visual +Studio that provides tools for debugging, profiling, analyzing +and optimizing your GPU computing and graphics applications. + + +Default Install Location of Nsight Visual Studio Edition + +Windows platform: + +%ProgramFiles(x86)%\NVIDIA Corporation\Nsight Visual Studio Edition #.# + + +1. License Agreement for NVIDIA Software Development Kits +--------------------------------------------------------- + + +Release Date: July 26, 2018 +--------------------------- + + +Important NoticeRead before downloading, installing, +copying or using the licensed software: +------------------------------------------------------- + +This license agreement, including exhibits attached +("Agreement”) is a legal agreement between you and NVIDIA +Corporation ("NVIDIA") and governs your use of a NVIDIA +software development kit (“SDK”). + +Each SDK has its own set of software and materials, but here +is a description of the types of items that may be included in +a SDK: source code, header files, APIs, data sets and assets +(examples include images, textures, models, scenes, videos, +native API input/output files), binary software, sample code, +libraries, utility programs, programming code and +documentation. + +This Agreement can be accepted only by an adult of legal age +of majority in the country in which the SDK is used. + +If you are entering into this Agreement on behalf of a company +or other legal entity, you represent that you have the legal +authority to bind the entity to this Agreement, in which case +“you” will mean the entity you represent. + +If you don’t have the required age or authority to accept +this Agreement, or if you don’t accept all the terms and +conditions of this Agreement, do not download, install or use +the SDK. + +You agree to use the SDK only for purposes that are permitted +by (a) this Agreement, and (b) any applicable law, regulation +or generally accepted practices or guidelines in the relevant +jurisdictions. + + +1.1. License + + +1.1.1. License Grant + +Subject to the terms of this Agreement, NVIDIA hereby grants +you a non-exclusive, non-transferable license, without the +right to sublicense (except as expressly provided in this +Agreement) to: + + 1. Install and use the SDK, + + 2. Modify and create derivative works of sample source code + delivered in the SDK, and + + 3. Distribute those portions of the SDK that are identified + in this Agreement as distributable, as incorporated in + object code format into a software application that meets + the distribution requirements indicated in this Agreement. + + +1.1.2. Distribution Requirements + +These are the distribution requirements for you to exercise +the distribution grant: + + 1. Your application must have material additional + functionality, beyond the included portions of the SDK. + + 2. The distributable portions of the SDK shall only be + accessed by your application. + + 3. The following notice shall be included in modifications + and derivative works of sample source code distributed: + “This software contains source code provided by NVIDIA + Corporation.” + + 4. Unless a developer tool is identified in this Agreement + as distributable, it is delivered for your internal use + only. + + 5. The terms under which you distribute your application + must be consistent with the terms of this Agreement, + including (without limitation) terms relating to the + license grant and license restrictions and protection of + NVIDIA’s intellectual property rights. Additionally, you + agree that you will protect the privacy, security and + legal rights of your application users. + + 6. You agree to notify NVIDIA in writing of any known or + suspected distribution or use of the SDK not in compliance + with the requirements of this Agreement, and to enforce + the terms of your agreements with respect to distributed + SDK. + + +1.1.3. Authorized Users + +You may allow employees and contractors of your entity or of +your subsidiary(ies) to access and use the SDK from your +secure network to perform work on your behalf. + +If you are an academic institution you may allow users +enrolled or employed by the academic institution to access and +use the SDK from your secure network. + +You are responsible for the compliance with the terms of this +Agreement by your authorized users. If you become aware that +your authorized users didn’t follow the terms of this +Agreement, you agree to take reasonable steps to resolve the +non-compliance and prevent new occurrences. + + +1.1.4. Pre-Release SDK + +The SDK versions identified as alpha, beta, preview or +otherwise as pre-release, may not be fully functional, may +contain errors or design flaws, and may have reduced or +different security, privacy, accessibility, availability, and +reliability standards relative to commercial versions of +NVIDIA software and materials. Use of a pre-release SDK may +result in unexpected results, loss of data, project delays or +other unpredictable damage or loss. + +You may use a pre-release SDK at your own risk, understanding +that pre-release SDKs are not intended for use in production +or business-critical systems. + +NVIDIA may choose not to make available a commercial version +of any pre-release SDK. NVIDIA may also choose to abandon +development and terminate the availability of a pre-release +SDK at any time without liability. + + +1.1.5. Updates + +NVIDIA may, at its option, make available patches, workarounds +or other updates to this SDK. Unless the updates are provided +with their separate governing terms, they are deemed part of +the SDK licensed to you as provided in this Agreement. You +agree that the form and content of the SDK that NVIDIA +provides may change without prior notice to you. While NVIDIA +generally maintains compatibility between versions, NVIDIA may +in some cases make changes that introduce incompatibilities in +future versions of the SDK. + + +1.1.6. Third Party Licenses + +The SDK may come bundled with, or otherwise include or be +distributed with, third party software licensed by a NVIDIA +supplier and/or open source software provided under an open +source license. Use of third party software is subject to the +third-party license terms, or in the absence of third party +terms, the terms of this Agreement. Copyright to third party +software is held by the copyright holders indicated in the +third-party software or license. + + +1.1.7. Reservation of Rights + +NVIDIA reserves all rights, title, and interest in and to the +SDK, not expressly granted to you under this Agreement. + + +1.2. Limitations + +The following license limitations apply to your use of the +SDK: + + 1. You may not reverse engineer, decompile or disassemble, + or remove copyright or other proprietary notices from any + portion of the SDK or copies of the SDK. + + 2. Except as expressly provided in this Agreement, you may + not copy, sell, rent, sublicense, transfer, distribute, + modify, or create derivative works of any portion of the + SDK. For clarity, you may not distribute or sublicense the + SDK as a stand-alone product. + + 3. Unless you have an agreement with NVIDIA for this + purpose, you may not indicate that an application created + with the SDK is sponsored or endorsed by NVIDIA. + + 4. You may not bypass, disable, or circumvent any + encryption, security, digital rights management or + authentication mechanism in the SDK. + + 5. You may not use the SDK in any manner that would cause it + to become subject to an open source software license. As + examples, licenses that require as a condition of use, + modification, and/or distribution that the SDK be: + + a. Disclosed or distributed in source code form; + + b. Licensed for the purpose of making derivative works; + or + + c. Redistributable at no charge. + + 6. Unless you have an agreement with NVIDIA for this + purpose, you may not use the SDK with any system or + application where the use or failure of the system or + application can reasonably be expected to threaten or + result in personal injury, death, or catastrophic loss. + Examples include use in avionics, navigation, military, + medical, life support or other life critical applications. + NVIDIA does not design, test or manufacture the SDK for + these critical uses and NVIDIA shall not be liable to you + or any third party, in whole or in part, for any claims or + damages arising from such uses. + + 7. You agree to defend, indemnify and hold harmless NVIDIA + and its affiliates, and their respective employees, + contractors, agents, officers and directors, from and + against any and all claims, damages, obligations, losses, + liabilities, costs or debt, fines, restitutions and + expenses (including but not limited to attorney’s fees + and costs incident to establishing the right of + indemnification) arising out of or related to your use of + the SDK outside of the scope of this Agreement, or not in + compliance with its terms. + + +1.3. Ownership + + 1. NVIDIA or its licensors hold all rights, title and + interest in and to the SDK and its modifications and + derivative works, including their respective intellectual + property rights, subject to your rights described in this + section. This SDK may include software and materials from + NVIDIA’s licensors, and these licensors are intended + third party beneficiaries that may enforce this Agreement + with respect to their intellectual property rights. + + 2. You hold all rights, title and interest in and to your + applications and your derivative works of the sample + source code delivered in the SDK, including their + respective intellectual property rights, subject to + NVIDIA’s rights described in this section. + + 3. You may, but don’t have to, provide to NVIDIA + suggestions, feature requests or other feedback regarding + the SDK, including possible enhancements or modifications + to the SDK. For any feedback that you voluntarily provide, + you hereby grant NVIDIA and its affiliates a perpetual, + non-exclusive, worldwide, irrevocable license to use, + reproduce, modify, license, sublicense (through multiple + tiers of sublicensees), and distribute (through multiple + tiers of distributors) it without the payment of any + royalties or fees to you. NVIDIA will use feedback at its + choice. NVIDIA is constantly looking for ways to improve + its products, so you may send feedback to NVIDIA through + the developer portal at https://developer.nvidia.com. + + +1.4. No Warranties + +THE SDK IS PROVIDED BY NVIDIA “AS IS” AND “WITH ALL +FAULTS.” TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND +ITS AFFILIATES EXPRESSLY DISCLAIM ALL WARRANTIES OF ANY KIND +OR NATURE, WHETHER EXPRESS, IMPLIED OR STATUTORY, INCLUDING, +BUT NOT LIMITED TO, ANY WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE, TITLE, NON-INFRINGEMENT, OR THE +ABSENCE OF ANY DEFECTS THEREIN, WHETHER LATENT OR PATENT. NO +WARRANTY IS MADE ON THE BASIS OF TRADE USAGE, COURSE OF +DEALING OR COURSE OF TRADE. + + +1.5. Limitation of Liability + +TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND ITS +AFFILIATES SHALL NOT BE LIABLE FOR ANY SPECIAL, INCIDENTAL, +PUNITIVE OR CONSEQUENTIAL DAMAGES, OR ANY LOST PROFITS, LOSS +OF USE, LOSS OF DATA OR LOSS OF GOODWILL, OR THE COSTS OF +PROCURING SUBSTITUTE PRODUCTS, ARISING OUT OF OR IN CONNECTION +WITH THIS AGREEMENT OR THE USE OR PERFORMANCE OF THE SDK, +WHETHER SUCH LIABILITY ARISES FROM ANY CLAIM BASED UPON BREACH +OF CONTRACT, BREACH OF WARRANTY, TORT (INCLUDING NEGLIGENCE), +PRODUCT LIABILITY OR ANY OTHER CAUSE OF ACTION OR THEORY OF +LIABILITY. IN NO EVENT WILL NVIDIA’S AND ITS AFFILIATES +TOTAL CUMULATIVE LIABILITY UNDER OR ARISING OUT OF THIS +AGREEMENT EXCEED US$10.00. THE NATURE OF THE LIABILITY OR THE +NUMBER OF CLAIMS OR SUITS SHALL NOT ENLARGE OR EXTEND THIS +LIMIT. + +These exclusions and limitations of liability shall apply +regardless if NVIDIA or its affiliates have been advised of +the possibility of such damages, and regardless of whether a +remedy fails its essential purpose. These exclusions and +limitations of liability form an essential basis of the +bargain between the parties, and, absent any of these +exclusions or limitations of liability, the provisions of this +Agreement, including, without limitation, the economic terms, +would be substantially different. + + +1.6. Termination + + 1. This Agreement will continue to apply until terminated by + either you or NVIDIA as described below. + + 2. If you want to terminate this Agreement, you may do so by + stopping to use the SDK. + + 3. NVIDIA may, at any time, terminate this Agreement if: + + a. (i) you fail to comply with any term of this + Agreement and the non-compliance is not fixed within + thirty (30) days following notice from NVIDIA (or + immediately if you violate NVIDIA’s intellectual + property rights); + + b. (ii) you commence or participate in any legal + proceeding against NVIDIA with respect to the SDK; or + + c. (iii) NVIDIA decides to no longer provide the SDK in + a country or, in NVIDIA’s sole discretion, the + continued use of it is no longer commercially viable. + + 4. Upon any termination of this Agreement, you agree to + promptly discontinue use of the SDK and destroy all copies + in your possession or control. Your prior distributions in + accordance with this Agreement are not affected by the + termination of this Agreement. Upon written request, you + will certify in writing that you have complied with your + commitments under this section. Upon any termination of + this Agreement all provisions survive except for the + license grant provisions. + + +1.7. General + +If you wish to assign this Agreement or your rights and +obligations, including by merger, consolidation, dissolution +or operation of law, contact NVIDIA to ask for permission. Any +attempted assignment not approved by NVIDIA in writing shall +be void and of no effect. NVIDIA may assign, delegate or +transfer this Agreement and its rights and obligations, and if +to a non-affiliate you will be notified. + +You agree to cooperate with NVIDIA and provide reasonably +requested information to verify your compliance with this +Agreement. + +This Agreement will be governed in all respects by the laws of +the United States and of the State of Delaware as those laws +are applied to contracts entered into and performed entirely +within Delaware by Delaware residents, without regard to the +conflicts of laws principles. The United Nations Convention on +Contracts for the International Sale of Goods is specifically +disclaimed. You agree to all terms of this Agreement in the +English language. + +The state or federal courts residing in Santa Clara County, +California shall have exclusive jurisdiction over any dispute +or claim arising out of this Agreement. Notwithstanding this, +you agree that NVIDIA shall still be allowed to apply for +injunctive remedies or an equivalent type of urgent legal +relief in any jurisdiction. + +If any court of competent jurisdiction determines that any +provision of this Agreement is illegal, invalid or +unenforceable, such provision will be construed as limited to +the extent necessary to be consistent with and fully +enforceable under the law and the remaining provisions will +remain in full force and effect. Unless otherwise specified, +remedies are cumulative. + +Each party acknowledges and agrees that the other is an +independent contractor in the performance of this Agreement. + +The SDK has been developed entirely at private expense and is +“commercial items” consisting of “commercial computer +software” and “commercial computer software +documentation” provided with RESTRICTED RIGHTS. Use, +duplication or disclosure by the U.S. Government or a U.S. +Government subcontractor is subject to the restrictions in +this Agreement pursuant to DFARS 227.7202-3(a) or as set forth +in subparagraphs (c)(1) and (2) of the Commercial Computer +Software - Restricted Rights clause at FAR 52.227-19, as +applicable. Contractor/manufacturer is NVIDIA, 2788 San Tomas +Expressway, Santa Clara, CA 95051. + +The SDK is subject to United States export laws and +regulations. You agree that you will not ship, transfer or +export the SDK into any country, or use the SDK in any manner, +prohibited by the United States Bureau of Industry and +Security or economic sanctions regulations administered by the +U.S. Department of Treasury’s Office of Foreign Assets +Control (OFAC), or any applicable export laws, restrictions or +regulations. These laws include restrictions on destinations, +end users and end use. By accepting this Agreement, you +confirm that you are not a resident or citizen of any country +currently embargoed by the U.S. and that you are not otherwise +prohibited from receiving the SDK. + +Any notice delivered by NVIDIA to you under this Agreement +will be delivered via mail, email or fax. You agree that any +notices that NVIDIA sends you electronically will satisfy any +legal communication requirements. Please direct your legal +notices or other correspondence to NVIDIA Corporation, 2788 +San Tomas Expressway, Santa Clara, California 95051, United +States of America, Attention: Legal Department. + +This Agreement and any exhibits incorporated into this +Agreement constitute the entire agreement of the parties with +respect to the subject matter of this Agreement and supersede +all prior negotiations or documentation exchanged between the +parties relating to this SDK license. Any additional and/or +conflicting terms on documents issued by you are null, void, +and invalid. Any amendment or waiver under this Agreement +shall be in writing and signed by representatives of both +parties. + + +2. CUDA Toolkit Supplement to Software License Agreement for +NVIDIA Software Development Kits +------------------------------------------------------------ + + +Release date: August 16, 2018 +----------------------------- + +The terms in this supplement govern your use of the NVIDIA +CUDA Toolkit SDK under the terms of your license agreement +(“Agreement”) as modified by this supplement. Capitalized +terms used but not defined below have the meaning assigned to +them in the Agreement. + +This supplement is an exhibit to the Agreement and is +incorporated as an integral part of the Agreement. In the +event of conflict between the terms in this supplement and the +terms in the Agreement, the terms in this supplement govern. + + +2.1. License Scope + +The SDK is licensed for you to develop applications only for +use in systems with NVIDIA GPUs. + + +2.2. Distribution + +The portions of the SDK that are distributable under the +Agreement are listed in Attachment A. + + +2.3. Operating Systems + +Those portions of the SDK designed exclusively for use on the +Linux or FreeBSD operating systems, or other operating systems +derived from the source code to these operating systems, may +be copied and redistributed for use in accordance with this +Agreement, provided that the object code files are not +modified in any way (except for unzipping of compressed +files). + + +2.4. Audio and Video Encoders and Decoders + +You acknowledge and agree that it is your sole responsibility +to obtain any additional third-party licenses required to +make, have made, use, have used, sell, import, and offer for +sale your products or services that include or incorporate any +third-party software and content relating to audio and/or +video encoders and decoders from, including but not limited +to, Microsoft, Thomson, Fraunhofer IIS, Sisvel S.p.A., +MPEG-LA, and Coding Technologies. NVIDIA does not grant to you +under this Agreement any necessary patent or other rights with +respect to any audio and/or video encoders and decoders. + + +2.5. Licensing + +If the distribution terms in this Agreement are not suitable +for your organization, or for any questions regarding this +Agreement, please contact NVIDIA at +nvidia-compute-license-questions@nvidia.com. + + +2.6. Attachment A + +The following portions of the SDK are distributable under the +Agreement: + +Component + +CUDA Runtime + +Windows + +cudart.dll, cudart_static.lib, cudadevrt.lib + +Mac OSX + +libcudart.dylib, libcudart_static.a, libcudadevrt.a + +Linux + +libcudart.so, libcudart_static.a, libcudadevrt.a + +Android + +libcudart.so, libcudart_static.a, libcudadevrt.a + +Component + +CUDA FFT Library + +Windows + +cufft.dll, cufftw.dll, cufft.lib, cufftw.lib + +Mac OSX + +libcufft.dylib, libcufft_static.a, libcufftw.dylib, +libcufftw_static.a + +Linux + +libcufft.so, libcufft_static.a, libcufftw.so, +libcufftw_static.a + +Android + +libcufft.so, libcufft_static.a, libcufftw.so, +libcufftw_static.a + +Component + +CUDA BLAS Library + +Windows + +cublas.dll, cublasLt.dll + +Mac OSX + +libcublas.dylib, libcublasLt.dylib, libcublas_static.a, +libcublasLt_static.a + +Linux + +libcublas.so, libcublasLt.so, libcublas_static.a, +libcublasLt_static.a + +Android + +libcublas.so, libcublasLt.so, libcublas_static.a, +libcublasLt_static.a + +Component + +NVIDIA "Drop-in" BLAS Library + +Windows + +nvblas.dll + +Mac OSX + +libnvblas.dylib + +Linux + +libnvblas.so + +Component + +CUDA Sparse Matrix Library + +Windows + +cusparse.dll, cusparse.lib + +Mac OSX + +libcusparse.dylib, libcusparse_static.a + +Linux + +libcusparse.so, libcusparse_static.a + +Android + +libcusparse.so, libcusparse_static.a + +Component + +CUDA Linear Solver Library + +Windows + +cusolver.dll, cusolver.lib + +Mac OSX + +libcusolver.dylib, libcusolver_static.a + +Linux + +libcusolver.so, libcusolver_static.a + +Android + +libcusolver.so, libcusolver_static.a + +Component + +CUDA Random Number Generation Library + +Windows + +curand.dll, curand.lib + +Mac OSX + +libcurand.dylib, libcurand_static.a + +Linux + +libcurand.so, libcurand_static.a + +Android + +libcurand.so, libcurand_static.a + +Component + +CUDA Accelerated Graph Library + +Component + +NVIDIA Performance Primitives Library + +Windows + +nppc.dll, nppc.lib, nppial.dll, nppial.lib, nppicc.dll, +nppicc.lib, nppicom.dll, nppicom.lib, nppidei.dll, +nppidei.lib, nppif.dll, nppif.lib, nppig.dll, nppig.lib, +nppim.dll, nppim.lib, nppist.dll, nppist.lib, nppisu.dll, +nppisu.lib, nppitc.dll, nppitc.lib, npps.dll, npps.lib + +Mac OSX + +libnppc.dylib, libnppc_static.a, libnppial.dylib, +libnppial_static.a, libnppicc.dylib, libnppicc_static.a, +libnppicom.dylib, libnppicom_static.a, libnppidei.dylib, +libnppidei_static.a, libnppif.dylib, libnppif_static.a, +libnppig.dylib, libnppig_static.a, libnppim.dylib, +libnppisu_static.a, libnppitc.dylib, libnppitc_static.a, +libnpps.dylib, libnpps_static.a + +Linux + +libnppc.so, libnppc_static.a, libnppial.so, +libnppial_static.a, libnppicc.so, libnppicc_static.a, +libnppicom.so, libnppicom_static.a, libnppidei.so, +libnppidei_static.a, libnppif.so, libnppif_static.a +libnppig.so, libnppig_static.a, libnppim.so, +libnppim_static.a, libnppist.so, libnppist_static.a, +libnppisu.so, libnppisu_static.a, libnppitc.so +libnppitc_static.a, libnpps.so, libnpps_static.a + +Android + +libnppc.so, libnppc_static.a, libnppial.so, +libnppial_static.a, libnppicc.so, libnppicc_static.a, +libnppicom.so, libnppicom_static.a, libnppidei.so, +libnppidei_static.a, libnppif.so, libnppif_static.a +libnppig.so, libnppig_static.a, libnppim.so, +libnppim_static.a, libnppist.so, libnppist_static.a, +libnppisu.so, libnppisu_static.a, libnppitc.so +libnppitc_static.a, libnpps.so, libnpps_static.a + +Component + +NVIDIA JPEG Library + +Linux + +libnvjpeg.so, libnvjpeg_static.a + +Component + +Internal common library required for statically linking to +cuBLAS, cuSPARSE, cuFFT, cuRAND, nvJPEG and NPP + +Mac OSX + +libculibos.a + +Linux + +libculibos.a + +Component + +NVIDIA Runtime Compilation Library and Header + +All + +nvrtc.h + +Windows + +nvrtc.dll, nvrtc-builtins.dll + +Mac OSX + +libnvrtc.dylib, libnvrtc-builtins.dylib + +Linux + +libnvrtc.so, libnvrtc-builtins.so + +Component + +NVIDIA Optimizing Compiler Library + +Windows + +nvvm.dll + +Mac OSX + +libnvvm.dylib + +Linux + +libnvvm.so + +Component + +NVIDIA Common Device Math Functions Library + +Windows + +libdevice.10.bc + +Mac OSX + +libdevice.10.bc + +Linux + +libdevice.10.bc + +Component + +CUDA Occupancy Calculation Header Library + +All + +cuda_occupancy.h + +Component + +CUDA Half Precision Headers + +All + +cuda_fp16.h, cuda_fp16.hpp + +Component + +CUDA Profiling Tools Interface (CUPTI) Library + +Windows + +cupti.dll + +Mac OSX + +libcupti.dylib + +Linux + +libcupti.so + +Component + +NVIDIA Tools Extension Library + +Windows + +nvToolsExt.dll, nvToolsExt.lib + +Mac OSX + +libnvToolsExt.dylib + +Linux + +libnvToolsExt.so + +Component + +NVIDIA CUDA Driver Libraries + +Linux + +libcuda.so, libnvidia-fatbinaryloader.so, +libnvidia-ptxjitcompiler.so + +The NVIDIA CUDA Driver Libraries are only distributable in +applications that meet this criteria: + + 1. The application was developed starting from a NVIDIA CUDA + container obtained from Docker Hub or the NVIDIA GPU + Cloud, and + + 2. The resulting application is packaged as a Docker + container and distributed to users on Docker Hub or the + NVIDIA GPU Cloud only. + + +2.7. Attachment B + + +Additional Licensing Obligations + +The following third party components included in the SOFTWARE +are licensed to Licensee pursuant to the following terms and +conditions: + + 1. Licensee's use of the GDB third party component is + subject to the terms and conditions of GNU GPL v3: + + This product includes copyrighted third-party software licensed + under the terms of the GNU General Public License v3 ("GPL v3"). + All third-party software packages are copyright by their respective + authors. GPL v3 terms and conditions are hereby incorporated into + the Agreement by this reference: http://www.gnu.org/licenses/gpl.txt + + Consistent with these licensing requirements, the software + listed below is provided under the terms of the specified + open source software licenses. To obtain source code for + software provided under licenses that require + redistribution of source code, including the GNU General + Public License (GPL) and GNU Lesser General Public License + (LGPL), contact oss-requests@nvidia.com. This offer is + valid for a period of three (3) years from the date of the + distribution of this product by NVIDIA CORPORATION. + + Component License + CUDA-GDB GPL v3 + + 2. Licensee represents and warrants that any and all third + party licensing and/or royalty payment obligations in + connection with Licensee's use of the H.264 video codecs + are solely the responsibility of Licensee. + + 3. Licensee's use of the Thrust library is subject to the + terms and conditions of the Apache License Version 2.0. + All third-party software packages are copyright by their + respective authors. Apache License Version 2.0 terms and + conditions are hereby incorporated into the Agreement by + this reference. + http://www.apache.org/licenses/LICENSE-2.0.html + + In addition, Licensee acknowledges the following notice: + Thrust includes source code from the Boost Iterator, + Tuple, System, and Random Number libraries. + + Boost Software License - Version 1.0 - August 17th, 2003 + . . . . + + Permission is hereby granted, free of charge, to any person or + organization obtaining a copy of the software and accompanying + documentation covered by this license (the "Software") to use, + reproduce, display, distribute, execute, and transmit the Software, + and to prepare derivative works of the Software, and to permit + third-parties to whom the Software is furnished to do so, all + subject to the following: + + The copyright notices in the Software and this entire statement, + including the above license grant, this restriction and the following + disclaimer, must be included in all copies of the Software, in whole + or in part, and all derivative works of the Software, unless such + copies or derivative works are solely in the form of machine-executable + object code generated by a source language processor. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND + NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR + ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR + OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + + 4. Licensee's use of the LLVM third party component is + subject to the following terms and conditions: + + ====================================================== + LLVM Release License + ====================================================== + University of Illinois/NCSA + Open Source License + + Copyright (c) 2003-2010 University of Illinois at Urbana-Champaign. + All rights reserved. + + Developed by: + + LLVM Team + + University of Illinois at Urbana-Champaign + + http://llvm.org + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to + deal with the Software without restriction, including without limitation the + rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimers. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimers in the + documentation and/or other materials provided with the distribution. + + * Neither the names of the LLVM Team, University of Illinois at Urbana- + Champaign, nor the names of its contributors may be used to endorse or + promote products derived from this Software without specific prior + written permission. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR + OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS WITH THE SOFTWARE. + + 5. Licensee's use (e.g. nvprof) of the PCRE third party + component is subject to the following terms and + conditions: + + ------------ + PCRE LICENCE + ------------ + PCRE is a library of functions to support regular expressions whose syntax + and semantics are as close as possible to those of the Perl 5 language. + Release 8 of PCRE is distributed under the terms of the "BSD" licence, as + specified below. The documentation for PCRE, supplied in the "doc" + directory, is distributed under the same terms as the software itself. The + basic library functions are written in C and are freestanding. Also + included in the distribution is a set of C++ wrapper functions, and a just- + in-time compiler that can be used to optimize pattern matching. These are + both optional features that can be omitted when the library is built. + + THE BASIC LIBRARY FUNCTIONS + --------------------------- + Written by: Philip Hazel + Email local part: ph10 + Email domain: cam.ac.uk + University of Cambridge Computing Service, + Cambridge, England. + Copyright (c) 1997-2012 University of Cambridge + All rights reserved. + + PCRE JUST-IN-TIME COMPILATION SUPPORT + ------------------------------------- + Written by: Zoltan Herczeg + Email local part: hzmester + Emain domain: freemail.hu + Copyright(c) 2010-2012 Zoltan Herczeg + All rights reserved. + + STACK-LESS JUST-IN-TIME COMPILER + -------------------------------- + Written by: Zoltan Herczeg + Email local part: hzmester + Emain domain: freemail.hu + Copyright(c) 2009-2012 Zoltan Herczeg + All rights reserved. + + THE C++ WRAPPER FUNCTIONS + ------------------------- + Contributed by: Google Inc. + Copyright (c) 2007-2012, Google Inc. + All rights reserved. + + THE "BSD" LICENCE + ----------------- + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of the University of Cambridge nor the name of Google + Inc. nor the names of their contributors may be used to endorse or + promote products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + 6. Some of the cuBLAS library routines were written by or + derived from code written by Vasily Volkov and are subject + to the Modified Berkeley Software Distribution License as + follows: + + Copyright (c) 2007-2009, Regents of the University of California + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of the University of California, Berkeley nor + the names of its contributors may be used to endorse or promote + products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, + INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING + IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + 7. Some of the cuBLAS library routines were written by or + derived from code written by Davide Barbieri and are + subject to the Modified Berkeley Software Distribution + License as follows: + + Copyright (c) 2008-2009 Davide Barbieri @ University of Rome Tor Vergata. + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * The name of the author may not be used to endorse or promote + products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, + INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING + IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + 8. Some of the cuBLAS library routines were derived from + code developed by the University of Tennessee and are + subject to the Modified Berkeley Software Distribution + License as follows: + + Copyright (c) 2010 The University of Tennessee. + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer listed in this license in the documentation and/or + other materials provided with the distribution. + * Neither the name of the copyright holders nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 9. Some of the cuBLAS library routines were written by or + derived from code written by Jonathan Hogg and are subject + to the Modified Berkeley Software Distribution License as + follows: + + Copyright (c) 2012, The Science and Technology Facilities Council (STFC). + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of the STFC nor the names of its contributors + may be used to endorse or promote products derived from this + software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE STFC BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR + BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE + OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN + IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 10. Some of the cuBLAS library routines were written by or + derived from code written by Ahmad M. Abdelfattah, David + Keyes, and Hatem Ltaief, and are subject to the Apache + License, Version 2.0, as follows: + + -- (C) Copyright 2013 King Abdullah University of Science and Technology + Authors: + Ahmad Abdelfattah (ahmad.ahmad@kaust.edu.sa) + David Keyes (david.keyes@kaust.edu.sa) + Hatem Ltaief (hatem.ltaief@kaust.edu.sa) + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of the King Abdullah University of Science and + Technology nor the names of its contributors may be used to endorse + or promote products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE + + 11. Some of the cuSPARSE library routines were written by or + derived from code written by Li-Wen Chang and are subject + to the NCSA Open Source License as follows: + + Copyright (c) 2012, University of Illinois. + + All rights reserved. + + Developed by: IMPACT Group, University of Illinois, http://impact.crhc.illinois.edu + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal with the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimers in the documentation and/or other materials provided + with the distribution. + * Neither the names of IMPACT Group, University of Illinois, nor + the names of its contributors may be used to endorse or promote + products derived from this Software without specific prior + written permission. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE CONTRIBUTORS OR COPYRIGHT + HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER + IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR + IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE + SOFTWARE. + + 12. Some of the cuRAND library routines were written by or + derived from code written by Mutsuo Saito and Makoto + Matsumoto and are subject to the following license: + + Copyright (c) 2009, 2010 Mutsuo Saito, Makoto Matsumoto and Hiroshima + University. All rights reserved. + + Copyright (c) 2011 Mutsuo Saito, Makoto Matsumoto, Hiroshima + University and University of Tokyo. All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of the Hiroshima University nor the names of + its contributors may be used to endorse or promote products + derived from this software without specific prior written + permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 13. Some of the cuRAND library routines were derived from + code developed by D. E. Shaw Research and are subject to + the following license: + + Copyright 2010-2011, D. E. Shaw Research. + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions, and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions, and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of D. E. Shaw Research nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 14. Some of the Math library routines were written by or + derived from code developed by Norbert Juffa and are + subject to the following license: + + Copyright (c) 2015-2017, Norbert Juffa + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 15. Licensee's use of the lz4 third party component is + subject to the following terms and conditions: + + Copyright (C) 2011-2013, Yann Collet. + BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 16. The NPP library uses code from the Boost Math Toolkit, + and is subject to the following license: + + Boost Software License - Version 1.0 - August 17th, 2003 + . . . . + + Permission is hereby granted, free of charge, to any person or + organization obtaining a copy of the software and accompanying + documentation covered by this license (the "Software") to use, + reproduce, display, distribute, execute, and transmit the Software, + and to prepare derivative works of the Software, and to permit + third-parties to whom the Software is furnished to do so, all + subject to the following: + + The copyright notices in the Software and this entire statement, + including the above license grant, this restriction and the following + disclaimer, must be included in all copies of the Software, in whole + or in part, and all derivative works of the Software, unless such + copies or derivative works are solely in the form of machine-executable + object code generated by a source language processor. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND + NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR + ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR + OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + + 17. Portions of the Nsight Eclipse Edition is subject to the + following license: + + The Eclipse Foundation makes available all content in this plug-in + ("Content"). Unless otherwise indicated below, the Content is provided + to you under the terms and conditions of the Eclipse Public License + Version 1.0 ("EPL"). A copy of the EPL is available at http:// + www.eclipse.org/legal/epl-v10.html. For purposes of the EPL, "Program" + will mean the Content. + + If you did not receive this Content directly from the Eclipse + Foundation, the Content is being redistributed by another party + ("Redistributor") and different terms and conditions may apply to your + use of any object code in the Content. Check the Redistributor's + license that was provided with the Content. If no such license exists, + contact the Redistributor. Unless otherwise indicated below, the terms + and conditions of the EPL still apply to any source code in the + Content and such source code may be obtained at http://www.eclipse.org. + + 18. Some of the cuBLAS library routines uses code from + OpenAI, which is subject to the following license: + + License URL + https://github.com/openai/openai-gemm/blob/master/LICENSE + + License Text + The MIT License + + Copyright (c) 2016 OpenAI (http://openai.com), 2016 Google Inc. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + + 19. Licensee's use of the Visual Studio Setup Configuration + Samples is subject to the following license: + + The MIT License (MIT) + Copyright (C) Microsoft Corporation. All rights reserved. + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without restriction, + including without limitation the rights to use, copy, modify, merge, + publish, distribute, sublicense, and/or sell copies of the Software, + and to permit persons to whom the Software is furnished to do so, + subject to the following conditions: + + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + 20. Licensee's use of linmath.h header for CPU functions for + GL vector/matrix operations from lunarG is subject to the + Apache License Version 2.0. + + 21. The DX12-CUDA sample uses the d3dx12.h header, which is + subject to the MIT license . + +----------------- diff --git a/venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.3.1.170.dist-info/METADATA b/venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.3.1.170.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..e8ef8db3f0102373f6e6897f513e5a80fb4e4dbd --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.3.1.170.dist-info/METADATA @@ -0,0 +1,36 @@ +Metadata-Version: 2.1 +Name: nvidia-cusparse-cu12 +Version: 12.3.1.170 +Summary: CUSPARSE native runtime libraries +Home-page: https://developer.nvidia.com/cuda-zone +Author: Nvidia CUDA Installer Team +Author-email: cuda_installer@nvidia.com +License: NVIDIA Proprietary Software +Keywords: cuda,nvidia,runtime,machine learning,deep learning +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: Science/Research +Classifier: License :: Other/Proprietary License +Classifier: Natural Language :: English +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Topic :: Scientific/Engineering +Classifier: Topic :: Scientific/Engineering :: Mathematics +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Classifier: Topic :: Software Development +Classifier: Topic :: Software Development :: Libraries +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX :: Linux +Requires-Python: >=3 +License-File: License.txt +Requires-Dist: nvidia-nvjitlink-cu12 + +CUSPARSE native runtime libraries diff --git a/venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.3.1.170.dist-info/RECORD b/venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.3.1.170.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..d0f16dc961297b326cc5491595cfe1db32e94ddc --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.3.1.170.dist-info/RECORD @@ -0,0 +1,17 @@ +nvidia/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +nvidia/__pycache__/__init__.cpython-310.pyc,, +nvidia/cusparse/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +nvidia/cusparse/__pycache__/__init__.cpython-310.pyc,, +nvidia/cusparse/include/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +nvidia/cusparse/include/__pycache__/__init__.cpython-310.pyc,, +nvidia/cusparse/include/cusparse.h,sha256=g-jyaV5nBdnCExOVQOd7lf4lMBoR3KvriiNHUoXNgXw,295187 +nvidia/cusparse/include/cusparse_v2.h,sha256=jkH2A9hYc-TEF0vuQ_SurbhPNEHkYGUIRuxKXhFAqnw,2587 +nvidia/cusparse/lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +nvidia/cusparse/lib/__pycache__/__init__.cpython-310.pyc,, +nvidia/cusparse/lib/libcusparse.so.12,sha256=y0KIyWVFOoAgZTwP2SzfzRK7WmRoQ9Zj6dTl9SD4kVw,281313984 +nvidia_cusparse_cu12-12.3.1.170.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +nvidia_cusparse_cu12-12.3.1.170.dist-info/License.txt,sha256=rW9YU_ugyg0VnQ9Y1JrkmDDC-Mk_epJki5zpCttMbM0,59262 +nvidia_cusparse_cu12-12.3.1.170.dist-info/METADATA,sha256=ivWTVzZv8-4bxy3u1Ce2wd3qUZeGJ3jTuH3-8u7AILM,1550 +nvidia_cusparse_cu12-12.3.1.170.dist-info/RECORD,, +nvidia_cusparse_cu12-12.3.1.170.dist-info/WHEEL,sha256=XDTs3wIbcE-BcRO08VJlZpA6z9OaC1mOKPCGGGwuM2g,109 +nvidia_cusparse_cu12-12.3.1.170.dist-info/top_level.txt,sha256=fTkAtiFuL16nUrB9ytDDtpytz2t0B4NvYTnRzwAhO14,7 diff --git a/venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.3.1.170.dist-info/WHEEL b/venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.3.1.170.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..e6c30e957cfb045017a9fef3430bb8ee87c4a074 --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.3.1.170.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: true +Tag: py3-none-manylinux2014_x86_64 + diff --git a/venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.3.1.170.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.3.1.170.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..862f7abf232cdfbb928609856247292e81c9decb --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_cusparse_cu12-12.3.1.170.dist-info/top_level.txt @@ -0,0 +1 @@ +nvidia diff --git a/venv/lib/python3.10/site-packages/nvidia_nvvm-13.0.88.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/nvidia_nvvm-13.0.88.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_nvvm-13.0.88.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/nvidia_nvvm-13.0.88.dist-info/METADATA b/venv/lib/python3.10/site-packages/nvidia_nvvm-13.0.88.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..f4d1730ff7d70ec6c4f81fc6b000e282c6f53753 --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_nvvm-13.0.88.dist-info/METADATA @@ -0,0 +1,45 @@ +Metadata-Version: 2.4 +Name: nvidia-nvvm +Version: 13.0.88 +Summary: NVVM Libraries +Home-page: https://developer.nvidia.com/cuda-zone +Author: Nvidia CUDA Installer Team +Author-email: compute_installer@nvidia.com +License-Expression: LicenseRef-NVIDIA-Proprietary +Keywords: cuda,nvidia,runtime,machine learning,deep learning +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: Science/Research +Classifier: Natural Language :: English +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Topic :: Scientific/Engineering +Classifier: Topic :: Scientific/Engineering :: Mathematics +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Classifier: Topic :: Software Development +Classifier: Topic :: Software Development :: Libraries +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX :: Linux +Requires-Python: >=3 +License-File: License.txt +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: home-page +Dynamic: keywords +Dynamic: license +Dynamic: license-file +Dynamic: license-expression +Dynamic: requires-python +Dynamic: summary + +Compiler IR for CUDA applications diff --git a/venv/lib/python3.10/site-packages/nvidia_nvvm-13.0.88.dist-info/RECORD b/venv/lib/python3.10/site-packages/nvidia_nvvm-13.0.88.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..974a3986174b363c81aa5b744e55a29e55a25459 --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_nvvm-13.0.88.dist-info/RECORD @@ -0,0 +1,10 @@ +nvidia/cu13/include/nvvm.h,sha256=5OSZrTul-7yZrCOpdFAT3l7cwItcmGtV0pzI1F_pIv8,11827 +nvidia/cu13/lib/libnvvm.so.4,sha256=y6ji72103mzekGdg2KDXAd8fNVmlcDohEyqvRRA3xSQ,63019680 +nvidia/cu13/nvvm/bin/cicc,sha256=R1qUhvHMyUCDI8x16i-hFZnwjp3uE3u3rHFQzlIIxCU,76506792 +nvidia/cu13/nvvm/libdevice/libdevice.10.bc,sha256=kTNNbhJ0j2y1u_ChzZZaVrzZPcT0ltLlxfjG5SMJQ1Y,464132 +nvidia_nvvm-13.0.88.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +nvidia_nvvm-13.0.88.dist-info/METADATA,sha256=EmuQ-YKECkybyhWH0q36zrGOiBuJjSBXGHxJkBsigYY,1674 +nvidia_nvvm-13.0.88.dist-info/RECORD,, +nvidia_nvvm-13.0.88.dist-info/WHEEL,sha256=Nh2Ev5kI-AJERgb0NFee1zrCiChBqq7oF05S-p7GLqk,144 +nvidia_nvvm-13.0.88.dist-info/licenses/License.txt,sha256=rW9YU_ugyg0VnQ9Y1JrkmDDC-Mk_epJki5zpCttMbM0,59262 +nvidia_nvvm-13.0.88.dist-info/top_level.txt,sha256=fTkAtiFuL16nUrB9ytDDtpytz2t0B4NvYTnRzwAhO14,7 diff --git a/venv/lib/python3.10/site-packages/nvidia_nvvm-13.0.88.dist-info/WHEEL b/venv/lib/python3.10/site-packages/nvidia_nvvm-13.0.88.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..051a4e1881e1fc26bd6a1501537735c0bd86fc56 --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_nvvm-13.0.88.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: true +Tag: py3-none-manylinux2010_x86_64 +Tag: py3-none-manylinux_2_12_x86_64 + diff --git a/venv/lib/python3.10/site-packages/nvidia_nvvm-13.0.88.dist-info/licenses/License.txt b/venv/lib/python3.10/site-packages/nvidia_nvvm-13.0.88.dist-info/licenses/License.txt new file mode 100644 index 0000000000000000000000000000000000000000..b491c70e0aef319022ded661e111ddbd45b8a17f --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_nvvm-13.0.88.dist-info/licenses/License.txt @@ -0,0 +1,1568 @@ +End User License Agreement +-------------------------- + + +Preface +------- + +The Software License Agreement in Chapter 1 and the Supplement +in Chapter 2 contain license terms and conditions that govern +the use of NVIDIA software. By accepting this agreement, you +agree to comply with all the terms and conditions applicable +to the product(s) included herein. + + +NVIDIA Driver + + +Description + +This package contains the operating system driver and +fundamental system software components for NVIDIA GPUs. + + +NVIDIA CUDA Toolkit + + +Description + +The NVIDIA CUDA Toolkit provides command-line and graphical +tools for building, debugging and optimizing the performance +of applications accelerated by NVIDIA GPUs, runtime and math +libraries, and documentation including programming guides, +user manuals, and API references. + + +Default Install Location of CUDA Toolkit + +Windows platform: + +%ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v#.# + +Linux platform: + +/usr/local/cuda-#.# + +Mac platform: + +/Developer/NVIDIA/CUDA-#.# + + +NVIDIA CUDA Samples + + +Description + +This package includes over 100+ CUDA examples that demonstrate +various CUDA programming principles, and efficient CUDA +implementation of algorithms in specific application domains. + + +Default Install Location of CUDA Samples + +Windows platform: + +%ProgramData%\NVIDIA Corporation\CUDA Samples\v#.# + +Linux platform: + +/usr/local/cuda-#.#/samples + +and + +$HOME/NVIDIA_CUDA-#.#_Samples + +Mac platform: + +/Developer/NVIDIA/CUDA-#.#/samples + + +NVIDIA Nsight Visual Studio Edition (Windows only) + + +Description + +NVIDIA Nsight Development Platform, Visual Studio Edition is a +development environment integrated into Microsoft Visual +Studio that provides tools for debugging, profiling, analyzing +and optimizing your GPU computing and graphics applications. + + +Default Install Location of Nsight Visual Studio Edition + +Windows platform: + +%ProgramFiles(x86)%\NVIDIA Corporation\Nsight Visual Studio Edition #.# + + +1. License Agreement for NVIDIA Software Development Kits +--------------------------------------------------------- + + +Release Date: July 26, 2018 +--------------------------- + + +Important NoticeRead before downloading, installing, +copying or using the licensed software: +------------------------------------------------------- + +This license agreement, including exhibits attached +("Agreement”) is a legal agreement between you and NVIDIA +Corporation ("NVIDIA") and governs your use of a NVIDIA +software development kit (“SDK”). + +Each SDK has its own set of software and materials, but here +is a description of the types of items that may be included in +a SDK: source code, header files, APIs, data sets and assets +(examples include images, textures, models, scenes, videos, +native API input/output files), binary software, sample code, +libraries, utility programs, programming code and +documentation. + +This Agreement can be accepted only by an adult of legal age +of majority in the country in which the SDK is used. + +If you are entering into this Agreement on behalf of a company +or other legal entity, you represent that you have the legal +authority to bind the entity to this Agreement, in which case +“you” will mean the entity you represent. + +If you don’t have the required age or authority to accept +this Agreement, or if you don’t accept all the terms and +conditions of this Agreement, do not download, install or use +the SDK. + +You agree to use the SDK only for purposes that are permitted +by (a) this Agreement, and (b) any applicable law, regulation +or generally accepted practices or guidelines in the relevant +jurisdictions. + + +1.1. License + + +1.1.1. License Grant + +Subject to the terms of this Agreement, NVIDIA hereby grants +you a non-exclusive, non-transferable license, without the +right to sublicense (except as expressly provided in this +Agreement) to: + + 1. Install and use the SDK, + + 2. Modify and create derivative works of sample source code + delivered in the SDK, and + + 3. Distribute those portions of the SDK that are identified + in this Agreement as distributable, as incorporated in + object code format into a software application that meets + the distribution requirements indicated in this Agreement. + + +1.1.2. Distribution Requirements + +These are the distribution requirements for you to exercise +the distribution grant: + + 1. Your application must have material additional + functionality, beyond the included portions of the SDK. + + 2. The distributable portions of the SDK shall only be + accessed by your application. + + 3. The following notice shall be included in modifications + and derivative works of sample source code distributed: + “This software contains source code provided by NVIDIA + Corporation.” + + 4. Unless a developer tool is identified in this Agreement + as distributable, it is delivered for your internal use + only. + + 5. The terms under which you distribute your application + must be consistent with the terms of this Agreement, + including (without limitation) terms relating to the + license grant and license restrictions and protection of + NVIDIA’s intellectual property rights. Additionally, you + agree that you will protect the privacy, security and + legal rights of your application users. + + 6. You agree to notify NVIDIA in writing of any known or + suspected distribution or use of the SDK not in compliance + with the requirements of this Agreement, and to enforce + the terms of your agreements with respect to distributed + SDK. + + +1.1.3. Authorized Users + +You may allow employees and contractors of your entity or of +your subsidiary(ies) to access and use the SDK from your +secure network to perform work on your behalf. + +If you are an academic institution you may allow users +enrolled or employed by the academic institution to access and +use the SDK from your secure network. + +You are responsible for the compliance with the terms of this +Agreement by your authorized users. If you become aware that +your authorized users didn’t follow the terms of this +Agreement, you agree to take reasonable steps to resolve the +non-compliance and prevent new occurrences. + + +1.1.4. Pre-Release SDK + +The SDK versions identified as alpha, beta, preview or +otherwise as pre-release, may not be fully functional, may +contain errors or design flaws, and may have reduced or +different security, privacy, accessibility, availability, and +reliability standards relative to commercial versions of +NVIDIA software and materials. Use of a pre-release SDK may +result in unexpected results, loss of data, project delays or +other unpredictable damage or loss. + +You may use a pre-release SDK at your own risk, understanding +that pre-release SDKs are not intended for use in production +or business-critical systems. + +NVIDIA may choose not to make available a commercial version +of any pre-release SDK. NVIDIA may also choose to abandon +development and terminate the availability of a pre-release +SDK at any time without liability. + + +1.1.5. Updates + +NVIDIA may, at its option, make available patches, workarounds +or other updates to this SDK. Unless the updates are provided +with their separate governing terms, they are deemed part of +the SDK licensed to you as provided in this Agreement. You +agree that the form and content of the SDK that NVIDIA +provides may change without prior notice to you. While NVIDIA +generally maintains compatibility between versions, NVIDIA may +in some cases make changes that introduce incompatibilities in +future versions of the SDK. + + +1.1.6. Third Party Licenses + +The SDK may come bundled with, or otherwise include or be +distributed with, third party software licensed by a NVIDIA +supplier and/or open source software provided under an open +source license. Use of third party software is subject to the +third-party license terms, or in the absence of third party +terms, the terms of this Agreement. Copyright to third party +software is held by the copyright holders indicated in the +third-party software or license. + + +1.1.7. Reservation of Rights + +NVIDIA reserves all rights, title, and interest in and to the +SDK, not expressly granted to you under this Agreement. + + +1.2. Limitations + +The following license limitations apply to your use of the +SDK: + + 1. You may not reverse engineer, decompile or disassemble, + or remove copyright or other proprietary notices from any + portion of the SDK or copies of the SDK. + + 2. Except as expressly provided in this Agreement, you may + not copy, sell, rent, sublicense, transfer, distribute, + modify, or create derivative works of any portion of the + SDK. For clarity, you may not distribute or sublicense the + SDK as a stand-alone product. + + 3. Unless you have an agreement with NVIDIA for this + purpose, you may not indicate that an application created + with the SDK is sponsored or endorsed by NVIDIA. + + 4. You may not bypass, disable, or circumvent any + encryption, security, digital rights management or + authentication mechanism in the SDK. + + 5. You may not use the SDK in any manner that would cause it + to become subject to an open source software license. As + examples, licenses that require as a condition of use, + modification, and/or distribution that the SDK be: + + a. Disclosed or distributed in source code form; + + b. Licensed for the purpose of making derivative works; + or + + c. Redistributable at no charge. + + 6. Unless you have an agreement with NVIDIA for this + purpose, you may not use the SDK with any system or + application where the use or failure of the system or + application can reasonably be expected to threaten or + result in personal injury, death, or catastrophic loss. + Examples include use in avionics, navigation, military, + medical, life support or other life critical applications. + NVIDIA does not design, test or manufacture the SDK for + these critical uses and NVIDIA shall not be liable to you + or any third party, in whole or in part, for any claims or + damages arising from such uses. + + 7. You agree to defend, indemnify and hold harmless NVIDIA + and its affiliates, and their respective employees, + contractors, agents, officers and directors, from and + against any and all claims, damages, obligations, losses, + liabilities, costs or debt, fines, restitutions and + expenses (including but not limited to attorney’s fees + and costs incident to establishing the right of + indemnification) arising out of or related to your use of + the SDK outside of the scope of this Agreement, or not in + compliance with its terms. + + +1.3. Ownership + + 1. NVIDIA or its licensors hold all rights, title and + interest in and to the SDK and its modifications and + derivative works, including their respective intellectual + property rights, subject to your rights described in this + section. This SDK may include software and materials from + NVIDIA’s licensors, and these licensors are intended + third party beneficiaries that may enforce this Agreement + with respect to their intellectual property rights. + + 2. You hold all rights, title and interest in and to your + applications and your derivative works of the sample + source code delivered in the SDK, including their + respective intellectual property rights, subject to + NVIDIA’s rights described in this section. + + 3. You may, but don’t have to, provide to NVIDIA + suggestions, feature requests or other feedback regarding + the SDK, including possible enhancements or modifications + to the SDK. For any feedback that you voluntarily provide, + you hereby grant NVIDIA and its affiliates a perpetual, + non-exclusive, worldwide, irrevocable license to use, + reproduce, modify, license, sublicense (through multiple + tiers of sublicensees), and distribute (through multiple + tiers of distributors) it without the payment of any + royalties or fees to you. NVIDIA will use feedback at its + choice. NVIDIA is constantly looking for ways to improve + its products, so you may send feedback to NVIDIA through + the developer portal at https://developer.nvidia.com. + + +1.4. No Warranties + +THE SDK IS PROVIDED BY NVIDIA “AS IS” AND “WITH ALL +FAULTS.” TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND +ITS AFFILIATES EXPRESSLY DISCLAIM ALL WARRANTIES OF ANY KIND +OR NATURE, WHETHER EXPRESS, IMPLIED OR STATUTORY, INCLUDING, +BUT NOT LIMITED TO, ANY WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE, TITLE, NON-INFRINGEMENT, OR THE +ABSENCE OF ANY DEFECTS THEREIN, WHETHER LATENT OR PATENT. NO +WARRANTY IS MADE ON THE BASIS OF TRADE USAGE, COURSE OF +DEALING OR COURSE OF TRADE. + + +1.5. Limitation of Liability + +TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND ITS +AFFILIATES SHALL NOT BE LIABLE FOR ANY SPECIAL, INCIDENTAL, +PUNITIVE OR CONSEQUENTIAL DAMAGES, OR ANY LOST PROFITS, LOSS +OF USE, LOSS OF DATA OR LOSS OF GOODWILL, OR THE COSTS OF +PROCURING SUBSTITUTE PRODUCTS, ARISING OUT OF OR IN CONNECTION +WITH THIS AGREEMENT OR THE USE OR PERFORMANCE OF THE SDK, +WHETHER SUCH LIABILITY ARISES FROM ANY CLAIM BASED UPON BREACH +OF CONTRACT, BREACH OF WARRANTY, TORT (INCLUDING NEGLIGENCE), +PRODUCT LIABILITY OR ANY OTHER CAUSE OF ACTION OR THEORY OF +LIABILITY. IN NO EVENT WILL NVIDIA’S AND ITS AFFILIATES +TOTAL CUMULATIVE LIABILITY UNDER OR ARISING OUT OF THIS +AGREEMENT EXCEED US$10.00. THE NATURE OF THE LIABILITY OR THE +NUMBER OF CLAIMS OR SUITS SHALL NOT ENLARGE OR EXTEND THIS +LIMIT. + +These exclusions and limitations of liability shall apply +regardless if NVIDIA or its affiliates have been advised of +the possibility of such damages, and regardless of whether a +remedy fails its essential purpose. These exclusions and +limitations of liability form an essential basis of the +bargain between the parties, and, absent any of these +exclusions or limitations of liability, the provisions of this +Agreement, including, without limitation, the economic terms, +would be substantially different. + + +1.6. Termination + + 1. This Agreement will continue to apply until terminated by + either you or NVIDIA as described below. + + 2. If you want to terminate this Agreement, you may do so by + stopping to use the SDK. + + 3. NVIDIA may, at any time, terminate this Agreement if: + + a. (i) you fail to comply with any term of this + Agreement and the non-compliance is not fixed within + thirty (30) days following notice from NVIDIA (or + immediately if you violate NVIDIA’s intellectual + property rights); + + b. (ii) you commence or participate in any legal + proceeding against NVIDIA with respect to the SDK; or + + c. (iii) NVIDIA decides to no longer provide the SDK in + a country or, in NVIDIA’s sole discretion, the + continued use of it is no longer commercially viable. + + 4. Upon any termination of this Agreement, you agree to + promptly discontinue use of the SDK and destroy all copies + in your possession or control. Your prior distributions in + accordance with this Agreement are not affected by the + termination of this Agreement. Upon written request, you + will certify in writing that you have complied with your + commitments under this section. Upon any termination of + this Agreement all provisions survive except for the + license grant provisions. + + +1.7. General + +If you wish to assign this Agreement or your rights and +obligations, including by merger, consolidation, dissolution +or operation of law, contact NVIDIA to ask for permission. Any +attempted assignment not approved by NVIDIA in writing shall +be void and of no effect. NVIDIA may assign, delegate or +transfer this Agreement and its rights and obligations, and if +to a non-affiliate you will be notified. + +You agree to cooperate with NVIDIA and provide reasonably +requested information to verify your compliance with this +Agreement. + +This Agreement will be governed in all respects by the laws of +the United States and of the State of Delaware as those laws +are applied to contracts entered into and performed entirely +within Delaware by Delaware residents, without regard to the +conflicts of laws principles. The United Nations Convention on +Contracts for the International Sale of Goods is specifically +disclaimed. You agree to all terms of this Agreement in the +English language. + +The state or federal courts residing in Santa Clara County, +California shall have exclusive jurisdiction over any dispute +or claim arising out of this Agreement. Notwithstanding this, +you agree that NVIDIA shall still be allowed to apply for +injunctive remedies or an equivalent type of urgent legal +relief in any jurisdiction. + +If any court of competent jurisdiction determines that any +provision of this Agreement is illegal, invalid or +unenforceable, such provision will be construed as limited to +the extent necessary to be consistent with and fully +enforceable under the law and the remaining provisions will +remain in full force and effect. Unless otherwise specified, +remedies are cumulative. + +Each party acknowledges and agrees that the other is an +independent contractor in the performance of this Agreement. + +The SDK has been developed entirely at private expense and is +“commercial items” consisting of “commercial computer +software” and “commercial computer software +documentation” provided with RESTRICTED RIGHTS. Use, +duplication or disclosure by the U.S. Government or a U.S. +Government subcontractor is subject to the restrictions in +this Agreement pursuant to DFARS 227.7202-3(a) or as set forth +in subparagraphs (c)(1) and (2) of the Commercial Computer +Software - Restricted Rights clause at FAR 52.227-19, as +applicable. Contractor/manufacturer is NVIDIA, 2788 San Tomas +Expressway, Santa Clara, CA 95051. + +The SDK is subject to United States export laws and +regulations. You agree that you will not ship, transfer or +export the SDK into any country, or use the SDK in any manner, +prohibited by the United States Bureau of Industry and +Security or economic sanctions regulations administered by the +U.S. Department of Treasury’s Office of Foreign Assets +Control (OFAC), or any applicable export laws, restrictions or +regulations. These laws include restrictions on destinations, +end users and end use. By accepting this Agreement, you +confirm that you are not a resident or citizen of any country +currently embargoed by the U.S. and that you are not otherwise +prohibited from receiving the SDK. + +Any notice delivered by NVIDIA to you under this Agreement +will be delivered via mail, email or fax. You agree that any +notices that NVIDIA sends you electronically will satisfy any +legal communication requirements. Please direct your legal +notices or other correspondence to NVIDIA Corporation, 2788 +San Tomas Expressway, Santa Clara, California 95051, United +States of America, Attention: Legal Department. + +This Agreement and any exhibits incorporated into this +Agreement constitute the entire agreement of the parties with +respect to the subject matter of this Agreement and supersede +all prior negotiations or documentation exchanged between the +parties relating to this SDK license. Any additional and/or +conflicting terms on documents issued by you are null, void, +and invalid. Any amendment or waiver under this Agreement +shall be in writing and signed by representatives of both +parties. + + +2. CUDA Toolkit Supplement to Software License Agreement for +NVIDIA Software Development Kits +------------------------------------------------------------ + + +Release date: August 16, 2018 +----------------------------- + +The terms in this supplement govern your use of the NVIDIA +CUDA Toolkit SDK under the terms of your license agreement +(“Agreement”) as modified by this supplement. Capitalized +terms used but not defined below have the meaning assigned to +them in the Agreement. + +This supplement is an exhibit to the Agreement and is +incorporated as an integral part of the Agreement. In the +event of conflict between the terms in this supplement and the +terms in the Agreement, the terms in this supplement govern. + + +2.1. License Scope + +The SDK is licensed for you to develop applications only for +use in systems with NVIDIA GPUs. + + +2.2. Distribution + +The portions of the SDK that are distributable under the +Agreement are listed in Attachment A. + + +2.3. Operating Systems + +Those portions of the SDK designed exclusively for use on the +Linux or FreeBSD operating systems, or other operating systems +derived from the source code to these operating systems, may +be copied and redistributed for use in accordance with this +Agreement, provided that the object code files are not +modified in any way (except for unzipping of compressed +files). + + +2.4. Audio and Video Encoders and Decoders + +You acknowledge and agree that it is your sole responsibility +to obtain any additional third-party licenses required to +make, have made, use, have used, sell, import, and offer for +sale your products or services that include or incorporate any +third-party software and content relating to audio and/or +video encoders and decoders from, including but not limited +to, Microsoft, Thomson, Fraunhofer IIS, Sisvel S.p.A., +MPEG-LA, and Coding Technologies. NVIDIA does not grant to you +under this Agreement any necessary patent or other rights with +respect to any audio and/or video encoders and decoders. + + +2.5. Licensing + +If the distribution terms in this Agreement are not suitable +for your organization, or for any questions regarding this +Agreement, please contact NVIDIA at +nvidia-compute-license-questions@nvidia.com. + + +2.6. Attachment A + +The following portions of the SDK are distributable under the +Agreement: + +Component + +CUDA Runtime + +Windows + +cudart.dll, cudart_static.lib, cudadevrt.lib + +Mac OSX + +libcudart.dylib, libcudart_static.a, libcudadevrt.a + +Linux + +libcudart.so, libcudart_static.a, libcudadevrt.a + +Android + +libcudart.so, libcudart_static.a, libcudadevrt.a + +Component + +CUDA FFT Library + +Windows + +cufft.dll, cufftw.dll, cufft.lib, cufftw.lib + +Mac OSX + +libcufft.dylib, libcufft_static.a, libcufftw.dylib, +libcufftw_static.a + +Linux + +libcufft.so, libcufft_static.a, libcufftw.so, +libcufftw_static.a + +Android + +libcufft.so, libcufft_static.a, libcufftw.so, +libcufftw_static.a + +Component + +CUDA BLAS Library + +Windows + +cublas.dll, cublasLt.dll + +Mac OSX + +libcublas.dylib, libcublasLt.dylib, libcublas_static.a, +libcublasLt_static.a + +Linux + +libcublas.so, libcublasLt.so, libcublas_static.a, +libcublasLt_static.a + +Android + +libcublas.so, libcublasLt.so, libcublas_static.a, +libcublasLt_static.a + +Component + +NVIDIA "Drop-in" BLAS Library + +Windows + +nvblas.dll + +Mac OSX + +libnvblas.dylib + +Linux + +libnvblas.so + +Component + +CUDA Sparse Matrix Library + +Windows + +cusparse.dll, cusparse.lib + +Mac OSX + +libcusparse.dylib, libcusparse_static.a + +Linux + +libcusparse.so, libcusparse_static.a + +Android + +libcusparse.so, libcusparse_static.a + +Component + +CUDA Linear Solver Library + +Windows + +cusolver.dll, cusolver.lib + +Mac OSX + +libcusolver.dylib, libcusolver_static.a + +Linux + +libcusolver.so, libcusolver_static.a + +Android + +libcusolver.so, libcusolver_static.a + +Component + +CUDA Random Number Generation Library + +Windows + +curand.dll, curand.lib + +Mac OSX + +libcurand.dylib, libcurand_static.a + +Linux + +libcurand.so, libcurand_static.a + +Android + +libcurand.so, libcurand_static.a + +Component + +CUDA Accelerated Graph Library + +Component + +NVIDIA Performance Primitives Library + +Windows + +nppc.dll, nppc.lib, nppial.dll, nppial.lib, nppicc.dll, +nppicc.lib, nppicom.dll, nppicom.lib, nppidei.dll, +nppidei.lib, nppif.dll, nppif.lib, nppig.dll, nppig.lib, +nppim.dll, nppim.lib, nppist.dll, nppist.lib, nppisu.dll, +nppisu.lib, nppitc.dll, nppitc.lib, npps.dll, npps.lib + +Mac OSX + +libnppc.dylib, libnppc_static.a, libnppial.dylib, +libnppial_static.a, libnppicc.dylib, libnppicc_static.a, +libnppicom.dylib, libnppicom_static.a, libnppidei.dylib, +libnppidei_static.a, libnppif.dylib, libnppif_static.a, +libnppig.dylib, libnppig_static.a, libnppim.dylib, +libnppisu_static.a, libnppitc.dylib, libnppitc_static.a, +libnpps.dylib, libnpps_static.a + +Linux + +libnppc.so, libnppc_static.a, libnppial.so, +libnppial_static.a, libnppicc.so, libnppicc_static.a, +libnppicom.so, libnppicom_static.a, libnppidei.so, +libnppidei_static.a, libnppif.so, libnppif_static.a +libnppig.so, libnppig_static.a, libnppim.so, +libnppim_static.a, libnppist.so, libnppist_static.a, +libnppisu.so, libnppisu_static.a, libnppitc.so +libnppitc_static.a, libnpps.so, libnpps_static.a + +Android + +libnppc.so, libnppc_static.a, libnppial.so, +libnppial_static.a, libnppicc.so, libnppicc_static.a, +libnppicom.so, libnppicom_static.a, libnppidei.so, +libnppidei_static.a, libnppif.so, libnppif_static.a +libnppig.so, libnppig_static.a, libnppim.so, +libnppim_static.a, libnppist.so, libnppist_static.a, +libnppisu.so, libnppisu_static.a, libnppitc.so +libnppitc_static.a, libnpps.so, libnpps_static.a + +Component + +NVIDIA JPEG Library + +Linux + +libnvjpeg.so, libnvjpeg_static.a + +Component + +Internal common library required for statically linking to +cuBLAS, cuSPARSE, cuFFT, cuRAND, nvJPEG and NPP + +Mac OSX + +libculibos.a + +Linux + +libculibos.a + +Component + +NVIDIA Runtime Compilation Library and Header + +All + +nvrtc.h + +Windows + +nvrtc.dll, nvrtc-builtins.dll + +Mac OSX + +libnvrtc.dylib, libnvrtc-builtins.dylib + +Linux + +libnvrtc.so, libnvrtc-builtins.so + +Component + +NVIDIA Optimizing Compiler Library + +Windows + +nvvm.dll + +Mac OSX + +libnvvm.dylib + +Linux + +libnvvm.so + +Component + +NVIDIA Common Device Math Functions Library + +Windows + +libdevice.10.bc + +Mac OSX + +libdevice.10.bc + +Linux + +libdevice.10.bc + +Component + +CUDA Occupancy Calculation Header Library + +All + +cuda_occupancy.h + +Component + +CUDA Half Precision Headers + +All + +cuda_fp16.h, cuda_fp16.hpp + +Component + +CUDA Profiling Tools Interface (CUPTI) Library + +Windows + +cupti.dll + +Mac OSX + +libcupti.dylib + +Linux + +libcupti.so + +Component + +NVIDIA Tools Extension Library + +Windows + +nvToolsExt.dll, nvToolsExt.lib + +Mac OSX + +libnvToolsExt.dylib + +Linux + +libnvToolsExt.so + +Component + +NVIDIA CUDA Driver Libraries + +Linux + +libcuda.so, libnvidia-fatbinaryloader.so, +libnvidia-ptxjitcompiler.so + +The NVIDIA CUDA Driver Libraries are only distributable in +applications that meet this criteria: + + 1. The application was developed starting from a NVIDIA CUDA + container obtained from Docker Hub or the NVIDIA GPU + Cloud, and + + 2. The resulting application is packaged as a Docker + container and distributed to users on Docker Hub or the + NVIDIA GPU Cloud only. + + +2.7. Attachment B + + +Additional Licensing Obligations + +The following third party components included in the SOFTWARE +are licensed to Licensee pursuant to the following terms and +conditions: + + 1. Licensee's use of the GDB third party component is + subject to the terms and conditions of GNU GPL v3: + + This product includes copyrighted third-party software licensed + under the terms of the GNU General Public License v3 ("GPL v3"). + All third-party software packages are copyright by their respective + authors. GPL v3 terms and conditions are hereby incorporated into + the Agreement by this reference: http://www.gnu.org/licenses/gpl.txt + + Consistent with these licensing requirements, the software + listed below is provided under the terms of the specified + open source software licenses. To obtain source code for + software provided under licenses that require + redistribution of source code, including the GNU General + Public License (GPL) and GNU Lesser General Public License + (LGPL), contact oss-requests@nvidia.com. This offer is + valid for a period of three (3) years from the date of the + distribution of this product by NVIDIA CORPORATION. + + Component License + CUDA-GDB GPL v3 + + 2. Licensee represents and warrants that any and all third + party licensing and/or royalty payment obligations in + connection with Licensee's use of the H.264 video codecs + are solely the responsibility of Licensee. + + 3. Licensee's use of the Thrust library is subject to the + terms and conditions of the Apache License Version 2.0. + All third-party software packages are copyright by their + respective authors. Apache License Version 2.0 terms and + conditions are hereby incorporated into the Agreement by + this reference. + http://www.apache.org/licenses/LICENSE-2.0.html + + In addition, Licensee acknowledges the following notice: + Thrust includes source code from the Boost Iterator, + Tuple, System, and Random Number libraries. + + Boost Software License - Version 1.0 - August 17th, 2003 + . . . . + + Permission is hereby granted, free of charge, to any person or + organization obtaining a copy of the software and accompanying + documentation covered by this license (the "Software") to use, + reproduce, display, distribute, execute, and transmit the Software, + and to prepare derivative works of the Software, and to permit + third-parties to whom the Software is furnished to do so, all + subject to the following: + + The copyright notices in the Software and this entire statement, + including the above license grant, this restriction and the following + disclaimer, must be included in all copies of the Software, in whole + or in part, and all derivative works of the Software, unless such + copies or derivative works are solely in the form of machine-executable + object code generated by a source language processor. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND + NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR + ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR + OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + + 4. Licensee's use of the LLVM third party component is + subject to the following terms and conditions: + + ====================================================== + LLVM Release License + ====================================================== + University of Illinois/NCSA + Open Source License + + Copyright (c) 2003-2010 University of Illinois at Urbana-Champaign. + All rights reserved. + + Developed by: + + LLVM Team + + University of Illinois at Urbana-Champaign + + http://llvm.org + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to + deal with the Software without restriction, including without limitation the + rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimers. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimers in the + documentation and/or other materials provided with the distribution. + + * Neither the names of the LLVM Team, University of Illinois at Urbana- + Champaign, nor the names of its contributors may be used to endorse or + promote products derived from this Software without specific prior + written permission. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR + OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS WITH THE SOFTWARE. + + 5. Licensee's use (e.g. nvprof) of the PCRE third party + component is subject to the following terms and + conditions: + + ------------ + PCRE LICENCE + ------------ + PCRE is a library of functions to support regular expressions whose syntax + and semantics are as close as possible to those of the Perl 5 language. + Release 8 of PCRE is distributed under the terms of the "BSD" licence, as + specified below. The documentation for PCRE, supplied in the "doc" + directory, is distributed under the same terms as the software itself. The + basic library functions are written in C and are freestanding. Also + included in the distribution is a set of C++ wrapper functions, and a just- + in-time compiler that can be used to optimize pattern matching. These are + both optional features that can be omitted when the library is built. + + THE BASIC LIBRARY FUNCTIONS + --------------------------- + Written by: Philip Hazel + Email local part: ph10 + Email domain: cam.ac.uk + University of Cambridge Computing Service, + Cambridge, England. + Copyright (c) 1997-2012 University of Cambridge + All rights reserved. + + PCRE JUST-IN-TIME COMPILATION SUPPORT + ------------------------------------- + Written by: Zoltan Herczeg + Email local part: hzmester + Emain domain: freemail.hu + Copyright(c) 2010-2012 Zoltan Herczeg + All rights reserved. + + STACK-LESS JUST-IN-TIME COMPILER + -------------------------------- + Written by: Zoltan Herczeg + Email local part: hzmester + Emain domain: freemail.hu + Copyright(c) 2009-2012 Zoltan Herczeg + All rights reserved. + + THE C++ WRAPPER FUNCTIONS + ------------------------- + Contributed by: Google Inc. + Copyright (c) 2007-2012, Google Inc. + All rights reserved. + + THE "BSD" LICENCE + ----------------- + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of the University of Cambridge nor the name of Google + Inc. nor the names of their contributors may be used to endorse or + promote products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + 6. Some of the cuBLAS library routines were written by or + derived from code written by Vasily Volkov and are subject + to the Modified Berkeley Software Distribution License as + follows: + + Copyright (c) 2007-2009, Regents of the University of California + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of the University of California, Berkeley nor + the names of its contributors may be used to endorse or promote + products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, + INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING + IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + 7. Some of the cuBLAS library routines were written by or + derived from code written by Davide Barbieri and are + subject to the Modified Berkeley Software Distribution + License as follows: + + Copyright (c) 2008-2009 Davide Barbieri @ University of Rome Tor Vergata. + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * The name of the author may not be used to endorse or promote + products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR + IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, + INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING + IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + 8. Some of the cuBLAS library routines were derived from + code developed by the University of Tennessee and are + subject to the Modified Berkeley Software Distribution + License as follows: + + Copyright (c) 2010 The University of Tennessee. + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer listed in this license in the documentation and/or + other materials provided with the distribution. + * Neither the name of the copyright holders nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 9. Some of the cuBLAS library routines were written by or + derived from code written by Jonathan Hogg and are subject + to the Modified Berkeley Software Distribution License as + follows: + + Copyright (c) 2012, The Science and Technology Facilities Council (STFC). + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of the STFC nor the names of its contributors + may be used to endorse or promote products derived from this + software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE STFC BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR + BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE + OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN + IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 10. Some of the cuBLAS library routines were written by or + derived from code written by Ahmad M. Abdelfattah, David + Keyes, and Hatem Ltaief, and are subject to the Apache + License, Version 2.0, as follows: + + -- (C) Copyright 2013 King Abdullah University of Science and Technology + Authors: + Ahmad Abdelfattah (ahmad.ahmad@kaust.edu.sa) + David Keyes (david.keyes@kaust.edu.sa) + Hatem Ltaief (hatem.ltaief@kaust.edu.sa) + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of the King Abdullah University of Science and + Technology nor the names of its contributors may be used to endorse + or promote products derived from this software without specific prior + written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE + + 11. Some of the cuSPARSE library routines were written by or + derived from code written by Li-Wen Chang and are subject + to the NCSA Open Source License as follows: + + Copyright (c) 2012, University of Illinois. + + All rights reserved. + + Developed by: IMPACT Group, University of Illinois, http://impact.crhc.illinois.edu + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal with the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimers in the documentation and/or other materials provided + with the distribution. + * Neither the names of IMPACT Group, University of Illinois, nor + the names of its contributors may be used to endorse or promote + products derived from this Software without specific prior + written permission. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE CONTRIBUTORS OR COPYRIGHT + HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER + IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR + IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE + SOFTWARE. + + 12. Some of the cuRAND library routines were written by or + derived from code written by Mutsuo Saito and Makoto + Matsumoto and are subject to the following license: + + Copyright (c) 2009, 2010 Mutsuo Saito, Makoto Matsumoto and Hiroshima + University. All rights reserved. + + Copyright (c) 2011 Mutsuo Saito, Makoto Matsumoto, Hiroshima + University and University of Tokyo. All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of the Hiroshima University nor the names of + its contributors may be used to endorse or promote products + derived from this software without specific prior written + permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 13. Some of the cuRAND library routines were derived from + code developed by D. E. Shaw Research and are subject to + the following license: + + Copyright 2010-2011, D. E. Shaw Research. + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions, and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions, and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of D. E. Shaw Research nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 14. Some of the Math library routines were written by or + derived from code developed by Norbert Juffa and are + subject to the following license: + + Copyright (c) 2015-2017, Norbert Juffa + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 15. Licensee's use of the lz4 third party component is + subject to the following terms and conditions: + + Copyright (C) 2011-2013, Yann Collet. + BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + 16. The NPP library uses code from the Boost Math Toolkit, + and is subject to the following license: + + Boost Software License - Version 1.0 - August 17th, 2003 + . . . . + + Permission is hereby granted, free of charge, to any person or + organization obtaining a copy of the software and accompanying + documentation covered by this license (the "Software") to use, + reproduce, display, distribute, execute, and transmit the Software, + and to prepare derivative works of the Software, and to permit + third-parties to whom the Software is furnished to do so, all + subject to the following: + + The copyright notices in the Software and this entire statement, + including the above license grant, this restriction and the following + disclaimer, must be included in all copies of the Software, in whole + or in part, and all derivative works of the Software, unless such + copies or derivative works are solely in the form of machine-executable + object code generated by a source language processor. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND + NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR + ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR + OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + + 17. Portions of the Nsight Eclipse Edition is subject to the + following license: + + The Eclipse Foundation makes available all content in this plug-in + ("Content"). Unless otherwise indicated below, the Content is provided + to you under the terms and conditions of the Eclipse Public License + Version 1.0 ("EPL"). A copy of the EPL is available at http:// + www.eclipse.org/legal/epl-v10.html. For purposes of the EPL, "Program" + will mean the Content. + + If you did not receive this Content directly from the Eclipse + Foundation, the Content is being redistributed by another party + ("Redistributor") and different terms and conditions may apply to your + use of any object code in the Content. Check the Redistributor's + license that was provided with the Content. If no such license exists, + contact the Redistributor. Unless otherwise indicated below, the terms + and conditions of the EPL still apply to any source code in the + Content and such source code may be obtained at http://www.eclipse.org. + + 18. Some of the cuBLAS library routines uses code from + OpenAI, which is subject to the following license: + + License URL + https://github.com/openai/openai-gemm/blob/master/LICENSE + + License Text + The MIT License + + Copyright (c) 2016 OpenAI (http://openai.com), 2016 Google Inc. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + + 19. Licensee's use of the Visual Studio Setup Configuration + Samples is subject to the following license: + + The MIT License (MIT) + Copyright (C) Microsoft Corporation. All rights reserved. + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without restriction, + including without limitation the rights to use, copy, modify, merge, + publish, distribute, sublicense, and/or sell copies of the Software, + and to permit persons to whom the Software is furnished to do so, + subject to the following conditions: + + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + 20. Licensee's use of linmath.h header for CPU functions for + GL vector/matrix operations from lunarG is subject to the + Apache License Version 2.0. + + 21. The DX12-CUDA sample uses the d3dx12.h header, which is + subject to the MIT license . + +----------------- diff --git a/venv/lib/python3.10/site-packages/nvidia_nvvm-13.0.88.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/nvidia_nvvm-13.0.88.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..862f7abf232cdfbb928609856247292e81c9decb --- /dev/null +++ b/venv/lib/python3.10/site-packages/nvidia_nvvm-13.0.88.dist-info/top_level.txt @@ -0,0 +1 @@ +nvidia diff --git a/venv/lib/python3.10/site-packages/opentelemetry_exporter_otlp-1.26.0.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/opentelemetry_exporter_otlp-1.26.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/opentelemetry_exporter_otlp-1.26.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/opentelemetry_exporter_otlp-1.26.0.dist-info/METADATA b/venv/lib/python3.10/site-packages/opentelemetry_exporter_otlp-1.26.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..33ff5d8b2a1f7d1318676b651fbff40cbac72e22 --- /dev/null +++ b/venv/lib/python3.10/site-packages/opentelemetry_exporter_otlp-1.26.0.dist-info/METADATA @@ -0,0 +1,60 @@ +Metadata-Version: 2.3 +Name: opentelemetry-exporter-otlp +Version: 1.26.0 +Summary: OpenTelemetry Collector Exporters +Project-URL: Homepage, https://github.com/open-telemetry/opentelemetry-python/tree/main/exporter/opentelemetry-exporter-otlp +Author-email: OpenTelemetry Authors +License: Apache-2.0 +License-File: LICENSE +Classifier: Development Status :: 5 - Production/Stable +Classifier: Framework :: OpenTelemetry +Classifier: Framework :: OpenTelemetry :: Exporters +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Typing :: Typed +Requires-Python: >=3.8 +Requires-Dist: opentelemetry-exporter-otlp-proto-grpc==1.26.0 +Requires-Dist: opentelemetry-exporter-otlp-proto-http==1.26.0 +Description-Content-Type: text/x-rst + +OpenTelemetry Collector Exporters +================================= + +|pypi| + +.. |pypi| image:: https://badge.fury.io/py/opentelemetry-exporter-otlp.svg + :target: https://pypi.org/project/opentelemetry-exporter-otlp/ + +This library is provided as a convenience to install all supported OpenTelemetry Collector Exporters. Currently it installs: + +* opentelemetry-exporter-otlp-proto-grpc +* opentelemetry-exporter-otlp-proto-http + +In the future, additional packages will be available: +* opentelemetry-exporter-otlp-json-http + +To avoid unnecessary dependencies, users should install the specific package once they've determined their +preferred serialization and protocol method. + +Installation +------------ + +:: + + pip install opentelemetry-exporter-otlp + + +References +---------- + +* `OpenTelemetry Collector Exporter `_ +* `OpenTelemetry Collector `_ +* `OpenTelemetry `_ +* `OpenTelemetry Protocol Specification `_ diff --git a/venv/lib/python3.10/site-packages/opentelemetry_exporter_otlp-1.26.0.dist-info/RECORD b/venv/lib/python3.10/site-packages/opentelemetry_exporter_otlp-1.26.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..05a307c7f135cdf7033f0eecf646695a33868d34 --- /dev/null +++ b/venv/lib/python3.10/site-packages/opentelemetry_exporter_otlp-1.26.0.dist-info/RECORD @@ -0,0 +1,9 @@ +opentelemetry/exporter/otlp/__pycache__/version.cpython-310.pyc,, +opentelemetry/exporter/otlp/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +opentelemetry/exporter/otlp/version.py,sha256=ANYEMcxW_7kp7m-QhNKZUKat8Jf1JBtQ3N9YJF-3SLU,608 +opentelemetry_exporter_otlp-1.26.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +opentelemetry_exporter_otlp-1.26.0.dist-info/METADATA,sha256=l9J_xQmCTL9VxlAcqprFrDcW5tgULsTHz67tOdR9duE,2339 +opentelemetry_exporter_otlp-1.26.0.dist-info/RECORD,, +opentelemetry_exporter_otlp-1.26.0.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87 +opentelemetry_exporter_otlp-1.26.0.dist-info/entry_points.txt,sha256=MFtaNsYON8Trs4p59kPy7eeldsg0t6fhRuiO1Eh-BkI,332 +opentelemetry_exporter_otlp-1.26.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357 diff --git a/venv/lib/python3.10/site-packages/opentelemetry_exporter_otlp-1.26.0.dist-info/WHEEL b/venv/lib/python3.10/site-packages/opentelemetry_exporter_otlp-1.26.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..cdd68a497cdfa8d3f2b837225beacef711b85047 --- /dev/null +++ b/venv/lib/python3.10/site-packages/opentelemetry_exporter_otlp-1.26.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.25.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.10/site-packages/opentelemetry_exporter_otlp-1.26.0.dist-info/entry_points.txt b/venv/lib/python3.10/site-packages/opentelemetry_exporter_otlp-1.26.0.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..75200c6af106f726538c5b68fcec887cd8bb6a7d --- /dev/null +++ b/venv/lib/python3.10/site-packages/opentelemetry_exporter_otlp-1.26.0.dist-info/entry_points.txt @@ -0,0 +1,8 @@ +[opentelemetry_logs_exporter] +otlp = opentelemetry.exporter.otlp.proto.grpc._log_exporter:OTLPLogExporter + +[opentelemetry_metrics_exporter] +otlp = opentelemetry.exporter.otlp.proto.grpc.metric_exporter:OTLPMetricExporter + +[opentelemetry_traces_exporter] +otlp = opentelemetry.exporter.otlp.proto.grpc.trace_exporter:OTLPSpanExporter diff --git a/venv/lib/python3.10/site-packages/opentelemetry_exporter_otlp-1.26.0.dist-info/licenses/LICENSE b/venv/lib/python3.10/site-packages/opentelemetry_exporter_otlp-1.26.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/venv/lib/python3.10/site-packages/opentelemetry_exporter_otlp-1.26.0.dist-info/licenses/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.10/site-packages/opentelemetry_sdk-1.26.0.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/opentelemetry_sdk-1.26.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/opentelemetry_sdk-1.26.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/opentelemetry_sdk-1.26.0.dist-info/METADATA b/venv/lib/python3.10/site-packages/opentelemetry_sdk-1.26.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..a75aac54c63986137b14838874600972898fe2d4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/opentelemetry_sdk-1.26.0.dist-info/METADATA @@ -0,0 +1,45 @@ +Metadata-Version: 2.3 +Name: opentelemetry-sdk +Version: 1.26.0 +Summary: OpenTelemetry Python SDK +Project-URL: Homepage, https://github.com/open-telemetry/opentelemetry-python/tree/main/opentelemetry-sdk +Author-email: OpenTelemetry Authors +License: Apache-2.0 +License-File: LICENSE +Classifier: Development Status :: 5 - Production/Stable +Classifier: Framework :: OpenTelemetry +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Typing :: Typed +Requires-Python: >=3.8 +Requires-Dist: opentelemetry-api==1.26.0 +Requires-Dist: opentelemetry-semantic-conventions==0.47b0 +Requires-Dist: typing-extensions>=3.7.4 +Description-Content-Type: text/x-rst + +OpenTelemetry Python SDK +============================================================================ + +|pypi| + +.. |pypi| image:: https://badge.fury.io/py/opentelemetry-sdk.svg + :target: https://pypi.org/project/opentelemetry-sdk/ + +Installation +------------ + +:: + + pip install opentelemetry-sdk + +References +---------- + +* `OpenTelemetry Project `_ diff --git a/venv/lib/python3.10/site-packages/opentelemetry_sdk-1.26.0.dist-info/RECORD b/venv/lib/python3.10/site-packages/opentelemetry_sdk-1.26.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..1f4e1f5e74fc78874742fb04adbac115a5a2f124 --- /dev/null +++ b/venv/lib/python3.10/site-packages/opentelemetry_sdk-1.26.0.dist-info/RECORD @@ -0,0 +1,95 @@ +opentelemetry/sdk/__init__.pyi,sha256=kQMbMw8wLQtWJ1bVBm7XoI06B_4Fv0un5hv3FKwrgRQ,669 +opentelemetry/sdk/_configuration/__init__.py,sha256=oDPvP2cqfQd_NLkOZx1UM_agWLXTNIB2bCrTjiX_xOk,14623 +opentelemetry/sdk/_configuration/__pycache__/__init__.cpython-310.pyc,, +opentelemetry/sdk/_configuration/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +opentelemetry/sdk/_logs/__init__.py,sha256=2wvbzweZC0i4b7coxY2J8awkvu2P2Idr2g_on5607sk,971 +opentelemetry/sdk/_logs/__pycache__/__init__.cpython-310.pyc,, +opentelemetry/sdk/_logs/_internal/__init__.py,sha256=aA19USYnRsWnQ_4qvd3Q5a1BDRCxVHmYW3Hv0D55pIg,25118 +opentelemetry/sdk/_logs/_internal/__pycache__/__init__.cpython-310.pyc,, +opentelemetry/sdk/_logs/_internal/export/__init__.py,sha256=l_wOU2L7v6kXUjN5JCkr4kD_oOvMgQR5aVl20uv2OKI,15253 +opentelemetry/sdk/_logs/_internal/export/__pycache__/__init__.cpython-310.pyc,, +opentelemetry/sdk/_logs/_internal/export/__pycache__/in_memory_log_exporter.cpython-310.pyc,, +opentelemetry/sdk/_logs/_internal/export/in_memory_log_exporter.py,sha256=bkVQmGnkkxX3wFDNM_6Aumjjpw7Jjnvfzel_59byIAU,1667 +opentelemetry/sdk/_logs/export/__init__.py,sha256=nUHdXNgwqfDe0KoGkNBX7Xl_mo477iyK3N0D5BH9g2g,1120 +opentelemetry/sdk/_logs/export/__pycache__/__init__.cpython-310.pyc,, +opentelemetry/sdk/_logs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +opentelemetry/sdk/environment_variables/__init__.py,sha256=76PzKdhQOok7ZRrtdeTcXUESDZrykLFRH_FupoLOVhg,25493 +opentelemetry/sdk/environment_variables/__pycache__/__init__.cpython-310.pyc,, +opentelemetry/sdk/environment_variables/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +opentelemetry/sdk/error_handler/__init__.py,sha256=UIiY22B12B9D2SsgR_eG6l6814ynBIZTSBejIphoosA,4649 +opentelemetry/sdk/error_handler/__pycache__/__init__.cpython-310.pyc,, +opentelemetry/sdk/error_handler/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +opentelemetry/sdk/metrics/__init__.py,sha256=BGG-TUOnFM5fPd1k0jBVh6q-9m5hFyLdIb1Ry4XchfI,1268 +opentelemetry/sdk/metrics/__pycache__/__init__.cpython-310.pyc,, +opentelemetry/sdk/metrics/_internal/__init__.py,sha256=J00Ix8NB4Q_TnPRNgJezDs3iXZNkrTLN1AbRS_x7thI,19528 +opentelemetry/sdk/metrics/_internal/__pycache__/__init__.cpython-310.pyc,, +opentelemetry/sdk/metrics/_internal/__pycache__/_view_instrument_match.cpython-310.pyc,, +opentelemetry/sdk/metrics/_internal/__pycache__/aggregation.cpython-310.pyc,, +opentelemetry/sdk/metrics/_internal/__pycache__/exceptions.cpython-310.pyc,, +opentelemetry/sdk/metrics/_internal/__pycache__/instrument.cpython-310.pyc,, +opentelemetry/sdk/metrics/_internal/__pycache__/measurement.cpython-310.pyc,, +opentelemetry/sdk/metrics/_internal/__pycache__/measurement_consumer.cpython-310.pyc,, +opentelemetry/sdk/metrics/_internal/__pycache__/metric_reader_storage.cpython-310.pyc,, +opentelemetry/sdk/metrics/_internal/__pycache__/point.cpython-310.pyc,, +opentelemetry/sdk/metrics/_internal/__pycache__/sdk_configuration.cpython-310.pyc,, +opentelemetry/sdk/metrics/_internal/__pycache__/view.cpython-310.pyc,, +opentelemetry/sdk/metrics/_internal/_view_instrument_match.py,sha256=n0bdt0BsZoA0gUMLocrUFgZf47TexJxnGTg4Y04s8cY,5572 +opentelemetry/sdk/metrics/_internal/aggregation.py,sha256=Y0F4o5MkikJkhJNChiPAd27rnOG3zqBDiLwPzfmo2c8,46631 +opentelemetry/sdk/metrics/_internal/exceptions.py,sha256=_0bPg3suYoIXKJ7eCqG3S_gUKVcUAHp11vwThwp_yAg,675 +opentelemetry/sdk/metrics/_internal/exponential_histogram/__pycache__/buckets.cpython-310.pyc,, +opentelemetry/sdk/metrics/_internal/exponential_histogram/buckets.py,sha256=D9Dmu6loQ-8AReViW2wchFUx009L49AKkbA1YWfcxgA,5946 +opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/__init__.py,sha256=Rr25E11iiSwPIyGyZ9UiOqGfmlOPjyPmhWKm605eHhg,3860 +opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/__pycache__/__init__.cpython-310.pyc,, +opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/__pycache__/errors.cpython-310.pyc,, +opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/__pycache__/exponent_mapping.cpython-310.pyc,, +opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/__pycache__/ieee_754.cpython-310.pyc,, +opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/__pycache__/logarithm_mapping.cpython-310.pyc,, +opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/errors.py,sha256=6Q6jfsVluEKp5R_9ECLW8mq3ZooyX0w9WVz5e-YAhuY,886 +opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/exponent_mapping.py,sha256=k70o6Fd6zedo4VcI1TOTKh2RurdaAUMRU837sd5kO54,6130 +opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/ieee_754.md,sha256=8Nf8FGbZi26c6KckxIsJHH2sa0hJZ24QCeOmE9huJLg,4980 +opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/ieee_754.py,sha256=s8bGxpmyn6MP98lIniAZ71hh1MFMq-ADyo16g8Dzeks,5494 +opentelemetry/sdk/metrics/_internal/exponential_histogram/mapping/logarithm_mapping.py,sha256=qXN0ZalesyUgvyJx4bNZO_sd9mO_5oiqP4nWONQHnAU,5833 +opentelemetry/sdk/metrics/_internal/export/__init__.py,sha256=9DFA-QWJ8RhgK2E1cei1MfLNUVahF23uj0_2iJY2RGE,21151 +opentelemetry/sdk/metrics/_internal/export/__pycache__/__init__.cpython-310.pyc,, +opentelemetry/sdk/metrics/_internal/instrument.py,sha256=ugD0o6SR_vloTo_Nulx3VR0IUKupkn_0vMCdn44p4n0,8611 +opentelemetry/sdk/metrics/_internal/measurement.py,sha256=oFyLgrizpDBI4R8VCwPeBtlhZOK-C9Lxbr-PzgOF3ew,959 +opentelemetry/sdk/metrics/_internal/measurement_consumer.py,sha256=Yj9dljEO_Hph_ZE632FwjHXDvwPQossB4PjOm2iGC10,4397 +opentelemetry/sdk/metrics/_internal/metric_reader_storage.py,sha256=KuRqPnCsfpi3C-7RWDUH2brfwX7_JfsHCCQ_ZEk2lvg,11872 +opentelemetry/sdk/metrics/_internal/point.py,sha256=PdNd-urYhxyXpceIgmrfKPEUL-2vWvWWSx-IYPcrCdE,7589 +opentelemetry/sdk/metrics/_internal/sdk_configuration.py,sha256=JG77yWdEH_MHzUIbvS_W2PiXKlcwOSd5wTiWAM0ihJo,1020 +opentelemetry/sdk/metrics/_internal/view.py,sha256=Uwd4a-WkTCyXWdBTHTmT9NdUjxpepKT7yIkHNAkDVNM,6369 +opentelemetry/sdk/metrics/export/__init__.py,sha256=5GE7tf3Ig7r2xSS_1eCuLeFmUbaf6UncNecjE_RbrsA,1627 +opentelemetry/sdk/metrics/export/__pycache__/__init__.cpython-310.pyc,, +opentelemetry/sdk/metrics/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +opentelemetry/sdk/metrics/view/__init__.py,sha256=kPqd6YQdIKp1AsO8li4TiYiAYvbTdKCZVl_fOHRAOkk,1130 +opentelemetry/sdk/metrics/view/__pycache__/__init__.cpython-310.pyc,, +opentelemetry/sdk/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +opentelemetry/sdk/resources/__init__.py,sha256=Cl93pBgzysGsQ-U08piROB0m8onsQYeA2eZDhTI-_Tk,15736 +opentelemetry/sdk/resources/__pycache__/__init__.cpython-310.pyc,, +opentelemetry/sdk/resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +opentelemetry/sdk/trace/__init__.py,sha256=Txfo7mx5WBp3nVcCdBNWT8yiuKswtaTn38_InZCiYIs,45275 +opentelemetry/sdk/trace/__pycache__/__init__.cpython-310.pyc,, +opentelemetry/sdk/trace/__pycache__/id_generator.cpython-310.pyc,, +opentelemetry/sdk/trace/__pycache__/sampling.cpython-310.pyc,, +opentelemetry/sdk/trace/export/__init__.py,sha256=GVHERvUTdq02W1sowOdz2urhVdCNbtU0w1A3KWcCIqE,17717 +opentelemetry/sdk/trace/export/__pycache__/__init__.cpython-310.pyc,, +opentelemetry/sdk/trace/export/__pycache__/in_memory_span_exporter.cpython-310.pyc,, +opentelemetry/sdk/trace/export/in_memory_span_exporter.py,sha256=H_4TRaThMO1H6vUQ0OpQvzJk_fZH0OOsRAM1iZQXsR8,2112 +opentelemetry/sdk/trace/id_generator.py,sha256=YdMREB4UcPbdnhMADFSG1njru4PjyNF4RDCptjcE6Lc,1959 +opentelemetry/sdk/trace/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +opentelemetry/sdk/trace/sampling.py,sha256=9dko9KQHQ_mYzW5rVLCCpEL9HXg3CfMXWaVFueFgcdo,16867 +opentelemetry/sdk/util/__init__.py,sha256=c73v6N7td5ToQ0Tfrn_56n_peN6RtqF5anVBWcWOhNE,4393 +opentelemetry/sdk/util/__init__.pyi,sha256=RFOnfLwZeldVdlnlEzUJwjL8wqAUwHdJ4anf5P_oBoE,2227 +opentelemetry/sdk/util/__pycache__/__init__.cpython-310.pyc,, +opentelemetry/sdk/util/__pycache__/instrumentation.cpython-310.pyc,, +opentelemetry/sdk/util/instrumentation.py,sha256=ttszMZ0P2puS1PQLGM2APkB6pqh6oT89tAu1JXKr1FE,4833 +opentelemetry/sdk/util/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +opentelemetry/sdk/version/__init__.py,sha256=ANYEMcxW_7kp7m-QhNKZUKat8Jf1JBtQ3N9YJF-3SLU,608 +opentelemetry/sdk/version/__pycache__/__init__.cpython-310.pyc,, +opentelemetry/sdk/version/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +opentelemetry_sdk-1.26.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +opentelemetry_sdk-1.26.0.dist-info/METADATA,sha256=3_W1W1mfgbh8RR6QIjj7PL8p1aQFV2dUbSwmi9F1w8w,1479 +opentelemetry_sdk-1.26.0.dist-info/RECORD,, +opentelemetry_sdk-1.26.0.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87 +opentelemetry_sdk-1.26.0.dist-info/entry_points.txt,sha256=PjoiLYsPeicEzv6YPkzoPXBarSUGbG_BSHW43qZCEFY,1348 +opentelemetry_sdk-1.26.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357 diff --git a/venv/lib/python3.10/site-packages/opentelemetry_sdk-1.26.0.dist-info/WHEEL b/venv/lib/python3.10/site-packages/opentelemetry_sdk-1.26.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..cdd68a497cdfa8d3f2b837225beacef711b85047 --- /dev/null +++ b/venv/lib/python3.10/site-packages/opentelemetry_sdk-1.26.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.25.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.10/site-packages/opentelemetry_sdk-1.26.0.dist-info/entry_points.txt b/venv/lib/python3.10/site-packages/opentelemetry_sdk-1.26.0.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..5cabd5296573e4b049af71fcaf7be3771073a5ce --- /dev/null +++ b/venv/lib/python3.10/site-packages/opentelemetry_sdk-1.26.0.dist-info/entry_points.txt @@ -0,0 +1,35 @@ +[opentelemetry_environment_variables] +sdk = opentelemetry.sdk.environment_variables + +[opentelemetry_id_generator] +random = opentelemetry.sdk.trace.id_generator:RandomIdGenerator + +[opentelemetry_logger_provider] +sdk_logger_provider = opentelemetry.sdk._logs:LoggerProvider + +[opentelemetry_logs_exporter] +console = opentelemetry.sdk._logs.export:ConsoleLogExporter + +[opentelemetry_meter_provider] +sdk_meter_provider = opentelemetry.sdk.metrics:MeterProvider + +[opentelemetry_metrics_exporter] +console = opentelemetry.sdk.metrics.export:ConsoleMetricExporter + +[opentelemetry_resource_detector] +otel = opentelemetry.sdk.resources:OTELResourceDetector +process = opentelemetry.sdk.resources:ProcessResourceDetector + +[opentelemetry_tracer_provider] +sdk_tracer_provider = opentelemetry.sdk.trace:TracerProvider + +[opentelemetry_traces_exporter] +console = opentelemetry.sdk.trace.export:ConsoleSpanExporter + +[opentelemetry_traces_sampler] +always_off = opentelemetry.sdk.trace.sampling:_AlwaysOff +always_on = opentelemetry.sdk.trace.sampling:_AlwaysOn +parentbased_always_off = opentelemetry.sdk.trace.sampling:_ParentBasedAlwaysOff +parentbased_always_on = opentelemetry.sdk.trace.sampling:_ParentBasedAlwaysOn +parentbased_traceidratio = opentelemetry.sdk.trace.sampling:ParentBasedTraceIdRatio +traceidratio = opentelemetry.sdk.trace.sampling:TraceIdRatioBased diff --git a/venv/lib/python3.10/site-packages/opentelemetry_sdk-1.26.0.dist-info/licenses/LICENSE b/venv/lib/python3.10/site-packages/opentelemetry_sdk-1.26.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/venv/lib/python3.10/site-packages/opentelemetry_sdk-1.26.0.dist-info/licenses/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.10/site-packages/outlines_core-0.1.26.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/outlines_core-0.1.26.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/outlines_core-0.1.26.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/outlines_core-0.1.26.dist-info/LICENSE b/venv/lib/python3.10/site-packages/outlines_core-0.1.26.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..cac50cdfa9f73d7329854bd5528c6c8c3c0eb5d4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/outlines_core-0.1.26.dist-info/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023- The Outlines developers + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.10/site-packages/outlines_core-0.1.26.dist-info/METADATA b/venv/lib/python3.10/site-packages/outlines_core-0.1.26.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..a2ad72b6a38e84ed8c5df5d64c78fdaa08d03c4b --- /dev/null +++ b/venv/lib/python3.10/site-packages/outlines_core-0.1.26.dist-info/METADATA @@ -0,0 +1,124 @@ +Metadata-Version: 2.1 +Name: outlines_core +Version: 0.1.26 +Summary: Structured Text Generation in Rust +Author: Outlines Developers +License: Apache-2.0 +Project-URL: homepage, https://github.com/dottxt-ai/outlines-core +Project-URL: documentation, https://dottxt-ai.github.io/outlines-core/ +Project-URL: repository, https://github.com/dottxt-ai/outlines-core +Keywords: machine learning,deep learning,language models,structured generation +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Information Technology +Classifier: Intended Audience :: Science/Research +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Requires-Python: >=3.8 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: interegular +Requires-Dist: jsonschema +Provides-Extra: test +Requires-Dist: pre-commit; extra == "test" +Requires-Dist: pydantic; extra == "test" +Requires-Dist: pytest; extra == "test" +Requires-Dist: pytest-benchmark; extra == "test" +Requires-Dist: pytest-cov; extra == "test" +Requires-Dist: pytest-mock; extra == "test" +Requires-Dist: coverage[toml]>=5.1; extra == "test" +Requires-Dist: diff-cover; extra == "test" +Requires-Dist: accelerate; extra == "test" +Requires-Dist: beartype<0.16.0; extra == "test" +Requires-Dist: huggingface_hub; extra == "test" +Requires-Dist: torch; extra == "test" +Requires-Dist: numpy; extra == "test" +Requires-Dist: scipy; extra == "test" +Requires-Dist: transformers; extra == "test" +Requires-Dist: datasets; extra == "test" +Requires-Dist: pillow; extra == "test" +Requires-Dist: asv; extra == "test" +Requires-Dist: psutil; extra == "test" +Requires-Dist: setuptools-rust; extra == "test" + +
+ +Outlines-core Logo + +[![Contributors][contributors-badge]][contributors] + +*Structured generation (in Rust).* +
+ +This package provides the core functionality for structured generation, formerly implemented in [Outlines][outlines], with a focus on performance and portability. + +# Install + +We provide bindings to the following languages: +- [Rust][rust-implementation] (Original implementation) +- [Python][python-bindings] + +The latest release of the Python bindings is available on PyPi using `pip`: + +``` python +pip install outlines-core +``` + +The current development branch of `outlines-core` can be installed from GitHub, also using `pip`: + +``` shell +pip install git+https://github.com/outlines-dev/outlines-core +``` + +Or install in a rust project with cargo: +``` bash +cargo add outlines-core +``` + +# How to contribute? + +## Setup + +First, fork the repository on GitHub and clone the fork locally: + +```bash +git clone git@github.com/YourUserName/outlines-core.git +cd outlines-core +``` + +Create a new virtual environment: + +``` bash +python -m venv .venv +source .venv/bin/activate +``` + +Then install the dependencies in editable mode, and install the pre-commit hooks: + +``` bash +pip install -e ".[test]" +pre-commit install +``` + +## Before pushing your code + +Run the tests: + + +``` bash +pytest +``` + +And run the code style checks: + +``` bash +pre-commit run --all-files +``` + + +[outlines]: https://github.com/dottxt-ai/outlines +[contributors]: https://github.com/outlines-dev/outlines-core/graphs/contributors +[contributors-badge]: https://img.shields.io/github/contributors/outlines-dev/outlines-core?style=flat-square&logo=github&logoColor=white&color=ECEFF4 +[rust-implementation]: https://github.com/outlines-dev/outlines-core/tree/readme/src +[python-bindings]: https://github.com/outlines-dev/outlines-core/tree/readme/python/outlines_core diff --git a/venv/lib/python3.10/site-packages/outlines_core-0.1.26.dist-info/RECORD b/venv/lib/python3.10/site-packages/outlines_core-0.1.26.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..83b185e8f8aecfe1286a267e52a72cc5e8933394 --- /dev/null +++ b/venv/lib/python3.10/site-packages/outlines_core-0.1.26.dist-info/RECORD @@ -0,0 +1,21 @@ +outlines_core-0.1.26.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +outlines_core-0.1.26.dist-info/LICENSE,sha256=9xB47oqqPVZwSIdW8Zk7neOuZMlUagIy67vdWVxTddc,11354 +outlines_core-0.1.26.dist-info/METADATA,sha256=YNmRZrAWCy_JWcTTuWsTzD9qZu-Wdm5J8JTTolLRigM,3761 +outlines_core-0.1.26.dist-info/RECORD,, +outlines_core-0.1.26.dist-info/WHEEL,sha256=m2_gJPr7xnnGfiE013tVzgC-Sc1sFXYskwf7kyqFHXk,151 +outlines_core-0.1.26.dist-info/top_level.txt,sha256=45vDiTQKP-oMzuyEv9_QNERQrpBud0CXZ1BDiFJjyV4,14 +outlines_core/__init__.py,sha256=U-Sz4BgH-fmII5vv_AL_qSp2a2mLd6g6aHbEfnUPmKU,208 +outlines_core/__pycache__/__init__.cpython-310.pyc,, +outlines_core/__pycache__/_version.cpython-310.pyc,, +outlines_core/_version.py,sha256=pHptU6h1OxA8-tsynXa5Rz3N6XELevZ_27Ye-N1R-ds,413 +outlines_core/fsm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +outlines_core/fsm/__pycache__/__init__.cpython-310.pyc,, +outlines_core/fsm/__pycache__/guide.cpython-310.pyc,, +outlines_core/fsm/__pycache__/json_schema.cpython-310.pyc,, +outlines_core/fsm/__pycache__/regex.cpython-310.pyc,, +outlines_core/fsm/guide.py,sha256=PujEqMU_UgYHBfOmWsAvVbdEb1HeniffGHQj61ZtCBE,9584 +outlines_core/fsm/json_schema.py,sha256=nXgIMF6uVI7sYqy9b1kJOChFfNutvbmvwmOgFbHcnl0,230 +outlines_core/fsm/outlines_core_rs.cpython-310-x86_64-linux-gnu.so,sha256=6aIxvTDvgGRq12zZa5y5Aj6titOIrSB0zONm8k2jDSg,686840 +outlines_core/fsm/outlines_core_rs.pyi,sha256=1pvCFg3KDIDySgwhyzC4-r7tljU-DoWVr-AWs5KuhM0,2917 +outlines_core/fsm/regex.py,sha256=jw4pOwLhXXVlErnN6QUN56F6_nmPp8ATIDN-e3cl1y8,17243 +outlines_core/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.10/site-packages/outlines_core-0.1.26.dist-info/WHEEL b/venv/lib/python3.10/site-packages/outlines_core-0.1.26.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..f5487b22db738ec9d4f3fcf1afc701af226d4171 --- /dev/null +++ b/venv/lib/python3.10/site-packages/outlines_core-0.1.26.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (75.6.0) +Root-Is-Purelib: false +Tag: cp310-cp310-manylinux_2_17_x86_64 +Tag: cp310-cp310-manylinux2014_x86_64 + diff --git a/venv/lib/python3.10/site-packages/outlines_core-0.1.26.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/outlines_core-0.1.26.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..44737cc8a2c5439560c6d7f59de74e020d0b23a0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/outlines_core-0.1.26.dist-info/top_level.txt @@ -0,0 +1 @@ +outlines_core diff --git a/venv/lib/python3.10/site-packages/partial_json_parser-0.2.1.1.post6.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/partial_json_parser-0.2.1.1.post6.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/partial_json_parser-0.2.1.1.post6.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/partial_json_parser-0.2.1.1.post6.dist-info/METADATA b/venv/lib/python3.10/site-packages/partial_json_parser-0.2.1.1.post6.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..e218d3b1dba9c0205fe9da7ffc7897518c009af7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/partial_json_parser-0.2.1.1.post6.dist-info/METADATA @@ -0,0 +1,190 @@ +Metadata-Version: 2.1 +Name: partial-json-parser +Version: 0.2.1.1.post6 +Summary: Parse partial JSON generated by LLM +Keywords: JSON,parser,LLM,nlp +Author-Email: Muspi Merol +License: MIT +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Project-URL: repository, https://github.com/promplate/partial-json-parser +Project-URL: homepage, https://promplate.dev/partial-json-parser +Requires-Python: >=3.6 +Provides-Extra: playground +Requires-Dist: rich; extra == "playground" +Description-Content-Type: text/markdown + +# Partial JSON Parser + +Sometimes we need **LLM (Large Language Models)** to produce **structural information** instead of natural language. The easiest way is to use JSON. + +But before receiving the last token of response, the JSON is broken, which means you can't use `JSON.parse` to decode it. But we still want to stream the data to the user. + +Here comes `partial-json-parser`, a lightweight and customizable library for parsing partial JSON strings. Here is a [demo](https://promplate.dev/partial-json-parser). + +(Note that there is [a JavaScript implementation](https://github.com/promplate/partial-json-parser-js) too) + +## Installation + +```sh +pip install partial-json-parser # or poetry / pdm / uv +``` + +`partial-json-parser` is implemented purely in Python, with good type hints. It is zero-dependency and works with Python 3.6+. + +You can install run its demo playground by installing `rich` too or: + +```sh +pip install partial-json-parser[playground] +``` + +Then run the `json-playground` in your terminal, and you can try the parser interactively. + +## Usage + +```py +from partial_json_parser import loads + +>>> loads('{"key": "v') # {'key': 'v'} +``` + +Alternatively, you can use `ensure_json` to get the completed JSON string: + +```py +from partial_json_parser import ensure_json + +>>> ensure_json('{"key": "v') # '{"key": "v"}' +``` + +### Detailed Usage + +You can import the `loads` function and the `Allow` object from the library like this: + +```py +from partial_json_parser import loads, Allow +``` + +The `Allow` object is just an Enum for options. It determines what types can be partial. types not included in `allow` only appears after its completion can be ensured. + +### Parsing complete / partial JSON strings + +The `loads` function works just like the built-in `json.loads` when parsing a complete JSON string: + +```py +result = loads('{"key":"value"}') +print(result) # Outputs: {'key': 'value'} +``` + +You can parse a partial JSON string by passing an additional parameter to the `loads` function. This parameter is a **bitwise OR** of the constants from the `Allow` flag: + +(Note that you can directly import the constants you need from `partial-json-parser`) + +```py +from partial_json_parser import loads, Allow, STR, OBJ + +result = loads('{"key": "v', STR | OBJ) +print(result) # Outputs: {'key': 'v'} +``` + +In this example, `Allow.STR` tells the parser that it's okay if a string is incomplete, and `Allow.OBJ` tells the parser so as a dict. The parser then try to return as much data as it can. + +If you don't allow partial strings, then it will not add `"key"` to the object because `"v` is not close: + +```py +result = loads('{"key": "v', OBJ) +print(result) # Outputs: {} + +result = loads('{"key": "value"', OBJ) +print(result) # Outputs: {'key': 'value'} +``` + +Similarity, you can parse partial lists or even partial special values if you allow it: + +(Note that `allow` defaults to `Allow.ALL`) + +```py +result = loads('[ {"key1": "value1", "key2": [ "value2') +print(result) # Outputs: [{'key1': 'value1', 'key2': ['value2']}] + +result = loads("-Inf") +print(result) # Outputs: -inf +``` + +### Handling malformed JSON + +If the JSON string is malformed, the `parse` function will throw an error: + +```py +loads("wrong") # MalformedJSON: Malformed node or string on line 1 +``` + +## API Reference + +### loads(json_string, [allow_partial], [parser]) + +- `json_string` ``: The (incomplete) JSON string to parse. +- `allow_partial` ``: Specify what kind of partialness is allowed during JSON parsing (default: `Allow.ALL`). +- `parser` `(str) -> JSON`: An ordinary JSON parser. Default is `json.loads`. + +Complete the JSON string and parse it with `parser` function. + +Returns the parsed Python value. + +Alias: `decode`, `parse_json`. + +### ensure_json(json_string, [allow_partial]) + +- `json_string` ``: The (incomplete) JSON string to complete. +- `allow_partial` ``: Specify what kind of partialness is allowed during JSON parsing (default: `Allow.ALL`). + +Returns the completed JSON string. + +### fix(json_string, [allow_partial]) + +- `json_string` ``: The (incomplete) JSON string to complete. +- `allow_partial` ``: Specify what kind of partialness is allowed during JSON parsing (default: `Allow.ALL`). + +Returns a tuple of a slice of the input string and the completion. + +Note that this is a low-level API, only useful for debugging and demonstration. + +### Allow + +Enum class that specifies what kind of partialness is allowed during JSON parsing. It has the following members: + +- `STR`: Allow partial string. +- `NUM`: Allow partial number. +- `ARR`: Allow partial array. +- `OBJ`: Allow partial object. +- `NULL`: Allow partial null. +- `BOOL`: Allow partial boolean. +- `NAN`: Allow partial NaN. +- `INFINITY`: Allow partial Infinity. +- `_INFINITY`: Allow partial -Infinity. +- `INF`: Allow both partial Infinity and -Infinity. +- `SPECIAL`: Allow all special values. +- `ATOM`: Allow all atomic values. +- `COLLECTION`: Allow all collection values. +- `ALL`: Allow all values. + +## Testing + +To run the tests for this library, you should clone the repository and install the dependencies: + +```sh +git clone https://github.com/promplate/partial-json-parser.git +cd partial-json-parser +pdm install +``` + +Then, you can run the tests using [Hypothesis](https://hypothesis.works/) and [Pytest](https://pytest.org/): + +```sh +pdm test +``` + +Please note that while we strive to cover as many edge cases as possible, it's always possible that some cases might not be covered. + +## License + +This project is licensed under the MIT License. diff --git a/venv/lib/python3.10/site-packages/partial_json_parser-0.2.1.1.post6.dist-info/RECORD b/venv/lib/python3.10/site-packages/partial_json_parser-0.2.1.1.post6.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..7e213c764f0d5bc02bf0aa7cb4d1cf5eb7eaed40 --- /dev/null +++ b/venv/lib/python3.10/site-packages/partial_json_parser-0.2.1.1.post6.dist-info/RECORD @@ -0,0 +1,25 @@ +../../../bin/json-playground,sha256=q9ujbVYQkfUZDpz0klOMEpCMXVSeS6DyiGJEHN-VL6M,299 +partial_json_parser-0.2.1.1.post6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +partial_json_parser-0.2.1.1.post6.dist-info/METADATA,sha256=BMA9JQiQwac4inPSLLtrkAPMg-vaC1XutBbAnFCB-_w,6129 +partial_json_parser-0.2.1.1.post6.dist-info/RECORD,, +partial_json_parser-0.2.1.1.post6.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90 +partial_json_parser-0.2.1.1.post6.dist-info/entry_points.txt,sha256=hxlbMgxJr0YUZWohKVs2M7C51WVr6Zl-4nS2xaEUy6I,88 +partial_json_parser-0.2.1.1.post6.dist-info/licenses/LICENSE,sha256=8so_-qO2cMSE9StRLtzmfgGIWAcdpaawteW9vLF8Idw,1066 +partial_json_parser/__init__.py,sha256=EkYwp0oHEpYoiUqrdoiKJnJjlcxRVsQm20l1bXQxLuI,205 +partial_json_parser/__pycache__/__init__.cpython-310.pyc,, +partial_json_parser/__pycache__/options.cpython-310.pyc,, +partial_json_parser/__pycache__/playground.cpython-310.pyc,, +partial_json_parser/__pycache__/version.cpython-310.pyc,, +partial_json_parser/core/__pycache__/api.cpython-310.pyc,, +partial_json_parser/core/__pycache__/complete.cpython-310.pyc,, +partial_json_parser/core/__pycache__/exceptions.cpython-310.pyc,, +partial_json_parser/core/__pycache__/myelin.cpython-310.pyc,, +partial_json_parser/core/__pycache__/options.cpython-310.pyc,, +partial_json_parser/core/api.py,sha256=KLUChYmc9b0_yzIJ-y6ddETrw-4aNzVmVAsdPQolZQE,834 +partial_json_parser/core/complete.py,sha256=GJz6Q3gcP7DBb-I85m5PyHYhGymKeXTQ7p3ZwXADtfg,6796 +partial_json_parser/core/exceptions.py,sha256=mm6x1uugFRvesdBUDq-UfnarhXjbuwnRCVyoCzZYR0s,140 +partial_json_parser/core/myelin.py,sha256=Wi97Nutqm90gTXkX_XdfP0Y-viMA6QZ8TjHkDeBEAUY,9065 +partial_json_parser/core/options.py,sha256=rXbCfmqP_uONvfCpcrxc9BZoN_MkLrEWlmiowdFG-sY,936 +partial_json_parser/options.py,sha256=gV1jaRXrRUUiS1FQd3wfnXlLLhUBANDR-fjv2Erh6xo,63 +partial_json_parser/playground.py,sha256=7rOcSWAcfNy7lfR7lXbcrQhZ0-7xkoI5kVIJE_rXmJQ,865 +partial_json_parser/version.py,sha256=gtGaUYib7rde9rvJLfzioDD3TS3MegpmOdx-EWPFZ6o,30 diff --git a/venv/lib/python3.10/site-packages/partial_json_parser-0.2.1.1.post6.dist-info/WHEEL b/venv/lib/python3.10/site-packages/partial_json_parser-0.2.1.1.post6.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..45ec8c4e6bf53687a824041168247bae0953dd7c --- /dev/null +++ b/venv/lib/python3.10/site-packages/partial_json_parser-0.2.1.1.post6.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: pdm-backend (2.4.4) +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.10/site-packages/partial_json_parser-0.2.1.1.post6.dist-info/entry_points.txt b/venv/lib/python3.10/site-packages/partial_json_parser-0.2.1.1.post6.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..ee5653ce0bf9a6bd927c0bdda9ab271294052763 --- /dev/null +++ b/venv/lib/python3.10/site-packages/partial_json_parser-0.2.1.1.post6.dist-info/entry_points.txt @@ -0,0 +1,5 @@ +[console_scripts] +json-playground = partial_json_parser.playground:main + +[gui_scripts] + diff --git a/venv/lib/python3.10/site-packages/partial_json_parser-0.2.1.1.post6.dist-info/licenses/LICENSE b/venv/lib/python3.10/site-packages/partial_json_parser-0.2.1.1.post6.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..bdb1a67c0a1cf079a7a4419cae14bc00ad98fe6f --- /dev/null +++ b/venv/lib/python3.10/site-packages/partial_json_parser-0.2.1.1.post6.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2025 Promplate + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/pydantic-2.11.7.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/pydantic-2.11.7.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic-2.11.7.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/pydantic-2.11.7.dist-info/METADATA b/venv/lib/python3.10/site-packages/pydantic-2.11.7.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..a691b00d89a3449386e704c620a0d310491ab741 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic-2.11.7.dist-info/METADATA @@ -0,0 +1,767 @@ +Metadata-Version: 2.4 +Name: pydantic +Version: 2.11.7 +Summary: Data validation using Python type hints +Project-URL: Homepage, https://github.com/pydantic/pydantic +Project-URL: Documentation, https://docs.pydantic.dev +Project-URL: Funding, https://github.com/sponsors/samuelcolvin +Project-URL: Source, https://github.com/pydantic/pydantic +Project-URL: Changelog, https://docs.pydantic.dev/latest/changelog/ +Author-email: Samuel Colvin , Eric Jolibois , Hasan Ramezani , Adrian Garcia Badaracco <1755071+adriangb@users.noreply.github.com>, Terrence Dorsey , David Montague , Serge Matveenko , Marcelo Trylesinski , Sydney Runkle , David Hewitt , Alex Hall , Victorien Plot +License-Expression: MIT +License-File: LICENSE +Classifier: Development Status :: 5 - Production/Stable +Classifier: Framework :: Hypothesis +Classifier: Framework :: Pydantic +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Information Technology +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.9 +Requires-Dist: annotated-types>=0.6.0 +Requires-Dist: pydantic-core==2.33.2 +Requires-Dist: typing-extensions>=4.12.2 +Requires-Dist: typing-inspection>=0.4.0 +Provides-Extra: email +Requires-Dist: email-validator>=2.0.0; extra == 'email' +Provides-Extra: timezone +Requires-Dist: tzdata; (python_version >= '3.9' and platform_system == 'Windows') and extra == 'timezone' +Description-Content-Type: text/markdown + +# Pydantic +[![CI](https://img.shields.io/github/actions/workflow/status/pydantic/pydantic/ci.yml?branch=main&logo=github&label=CI)](https://github.com/pydantic/pydantic/actions?query=event%3Apush+branch%3Amain+workflow%3ACI) +[![Coverage](https://coverage-badge.samuelcolvin.workers.dev/pydantic/pydantic.svg)](https://coverage-badge.samuelcolvin.workers.dev/redirect/pydantic/pydantic) +[![pypi](https://img.shields.io/pypi/v/pydantic.svg)](https://pypi.python.org/pypi/pydantic) +[![CondaForge](https://img.shields.io/conda/v/conda-forge/pydantic.svg)](https://anaconda.org/conda-forge/pydantic) +[![downloads](https://static.pepy.tech/badge/pydantic/month)](https://pepy.tech/project/pydantic) +[![versions](https://img.shields.io/pypi/pyversions/pydantic.svg)](https://github.com/pydantic/pydantic) +[![license](https://img.shields.io/github/license/pydantic/pydantic.svg)](https://github.com/pydantic/pydantic/blob/main/LICENSE) +[![Pydantic v2](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/pydantic/pydantic/main/docs/badge/v2.json)](https://docs.pydantic.dev/latest/contributing/#badges) +[![llms.txt](https://img.shields.io/badge/llms.txt-green)](https://docs.pydantic.dev/latest/llms.txt) + + +Data validation using Python type hints. + +Fast and extensible, Pydantic plays nicely with your linters/IDE/brain. +Define how data should be in pure, canonical Python 3.9+; validate it with Pydantic. + +## Pydantic Logfire :fire: + +We've recently launched Pydantic Logfire to help you monitor your applications. +[Learn more](https://pydantic.dev/articles/logfire-announcement) + +## Pydantic V1.10 vs. V2 + +Pydantic V2 is a ground-up rewrite that offers many new features, performance improvements, and some breaking changes compared to Pydantic V1. + +If you're using Pydantic V1 you may want to look at the +[pydantic V1.10 Documentation](https://docs.pydantic.dev/) or, +[`1.10.X-fixes` git branch](https://github.com/pydantic/pydantic/tree/1.10.X-fixes). Pydantic V2 also ships with the latest version of Pydantic V1 built in so that you can incrementally upgrade your code base and projects: `from pydantic import v1 as pydantic_v1`. + +## Help + +See [documentation](https://docs.pydantic.dev/) for more details. + +## Installation + +Install using `pip install -U pydantic` or `conda install pydantic -c conda-forge`. +For more installation options to make Pydantic even faster, +see the [Install](https://docs.pydantic.dev/install/) section in the documentation. + +## A Simple Example + +```python +from datetime import datetime +from typing import Optional +from pydantic import BaseModel + +class User(BaseModel): + id: int + name: str = 'John Doe' + signup_ts: Optional[datetime] = None + friends: list[int] = [] + +external_data = {'id': '123', 'signup_ts': '2017-06-01 12:22', 'friends': [1, '2', b'3']} +user = User(**external_data) +print(user) +#> User id=123 name='John Doe' signup_ts=datetime.datetime(2017, 6, 1, 12, 22) friends=[1, 2, 3] +print(user.id) +#> 123 +``` + +## Contributing + +For guidance on setting up a development environment and how to make a +contribution to Pydantic, see +[Contributing to Pydantic](https://docs.pydantic.dev/contributing/). + +## Reporting a Security Vulnerability + +See our [security policy](https://github.com/pydantic/pydantic/security/policy). + +## Changelog + +## v2.11.7 (2025-06-14) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.11.7) + +### What's Changed + +#### Fixes + +* Copy `FieldInfo` instance if necessary during `FieldInfo` build by [@Viicos](https://github.com/Viicos) in [#11898](https://github.com/pydantic/pydantic/pull/11898) + +## v2.11.6 (2025-06-13) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.11.6) + +### What's Changed + +#### Fixes + +* Rebuild dataclass fields before schema generation by [@Viicos](https://github.com/Viicos) in [#11949](https://github.com/pydantic/pydantic/pull/11949) +* Always store the original field assignment on `FieldInfo` by [@Viicos](https://github.com/Viicos) in [#11946](https://github.com/pydantic/pydantic/pull/11946) + +## v2.11.5 (2025-05-22) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.11.5) + +### What's Changed + +#### Fixes + +* Check if `FieldInfo` is complete after applying type variable map by [@Viicos](https://github.com/Viicos) in [#11855](https://github.com/pydantic/pydantic/pull/11855) +* Do not delete mock validator/serializer in `model_rebuild()` by [@Viicos](https://github.com/Viicos) in [#11890](https://github.com/pydantic/pydantic/pull/11890) +* Do not duplicate metadata on model rebuild by [@Viicos](https://github.com/Viicos) in [#11902](https://github.com/pydantic/pydantic/pull/11902) + +## v2.11.4 (2025-04-29) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.11.4) + +### What's Changed + +#### Packaging + +* Bump `mkdocs-llmstxt` to v0.2.0 by [@Viicos](https://github.com/Viicos) in [#11725](https://github.com/pydantic/pydantic/pull/11725) + +#### Changes + +* Allow config and bases to be specified together in `create_model()` by [@Viicos](https://github.com/Viicos) in [#11714](https://github.com/pydantic/pydantic/pull/11714). + This change was backported as it was previously possible (although not meant to be supported) + to provide `model_config` as a field, which would make it possible to provide both configuration + and bases. + +#### Fixes + +* Remove generics cache workaround by [@Viicos](https://github.com/Viicos) in [#11755](https://github.com/pydantic/pydantic/pull/11755) +* Remove coercion of decimal constraints by [@Viicos](https://github.com/Viicos) in [#11772](https://github.com/pydantic/pydantic/pull/11772) +* Fix crash when expanding root type in the mypy plugin by [@Viicos](https://github.com/Viicos) in [#11735](https://github.com/pydantic/pydantic/pull/11735) +* Fix issue with recursive generic models by [@Viicos](https://github.com/Viicos) in [#11775](https://github.com/pydantic/pydantic/pull/11775) +* Traverse `function-before` schemas during schema gathering by [@Viicos](https://github.com/Viicos) in [#11801](https://github.com/pydantic/pydantic/pull/11801) + +## v2.11.3 (2025-04-08) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.11.3) + +### What's Changed + +#### Packaging + +* Update V1 copy to v1.10.21 by [@Viicos](https://github.com/Viicos) in [#11706](https://github.com/pydantic/pydantic/pull/11706) + +#### Fixes + +* Preserve field description when rebuilding model fields by [@Viicos](https://github.com/Viicos) in [#11698](https://github.com/pydantic/pydantic/pull/11698) + +## v2.11.2 (2025-04-03) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.11.2) + +### What's Changed + +#### Fixes + +* Bump `pydantic-core` to v2.33.1 by [@Viicos](https://github.com/Viicos) in [#11678](https://github.com/pydantic/pydantic/pull/11678) +* Make sure `__pydantic_private__` exists before setting private attributes by [@Viicos](https://github.com/Viicos) in [#11666](https://github.com/pydantic/pydantic/pull/11666) +* Do not override `FieldInfo._complete` when using field from parent class by [@Viicos](https://github.com/Viicos) in [#11668](https://github.com/pydantic/pydantic/pull/11668) +* Provide the available definitions when applying discriminated unions by [@Viicos](https://github.com/Viicos) in [#11670](https://github.com/pydantic/pydantic/pull/11670) +* Do not expand root type in the mypy plugin for variables by [@Viicos](https://github.com/Viicos) in [#11676](https://github.com/pydantic/pydantic/pull/11676) +* Mention the attribute name in model fields deprecation message by [@Viicos](https://github.com/Viicos) in [#11674](https://github.com/pydantic/pydantic/pull/11674) +* Properly validate parameterized mappings by [@Viicos](https://github.com/Viicos) in [#11658](https://github.com/pydantic/pydantic/pull/11658) + +## v2.11.1 (2025-03-28) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.11.1) + +### What's Changed + +#### Fixes + +* Do not override `'definitions-ref'` schemas containing serialization schemas or metadata by [@Viicos](https://github.com/Viicos) in [#11644](https://github.com/pydantic/pydantic/pull/11644) + +## v2.11.0 (2025-03-27) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.11.0) + +### What's Changed + +Pydantic v2.11 is a version strongly focused on build time performance of Pydantic models (and core schema generation in general). +See the [blog post](https://pydantic.dev/articles/pydantic-v2-11-release) for more details. + +#### Packaging + +* Bump `pydantic-core` to v2.33.0 by [@Viicos](https://github.com/Viicos) in [#11631](https://github.com/pydantic/pydantic/pull/11631) + +#### New Features + +* Add `encoded_string()` method to the URL types by [@YassinNouh21](https://github.com/YassinNouh21) in [#11580](https://github.com/pydantic/pydantic/pull/11580) +* Add support for `defer_build` with `@validate_call` decorator by [@Viicos](https://github.com/Viicos) in [#11584](https://github.com/pydantic/pydantic/pull/11584) +* Allow `@with_config` decorator to be used with keyword arguments by [@Viicos](https://github.com/Viicos) in [#11608](https://github.com/pydantic/pydantic/pull/11608) +* Simplify customization of default value inclusion in JSON Schema generation by [@Viicos](https://github.com/Viicos) in [#11634](https://github.com/pydantic/pydantic/pull/11634) +* Add `generate_arguments_schema()` function by [@Viicos](https://github.com/Viicos) in [#11572](https://github.com/pydantic/pydantic/pull/11572) + +#### Fixes + +* Allow generic typed dictionaries to be used for unpacked variadic keyword parameters by [@Viicos](https://github.com/Viicos) in [#11571](https://github.com/pydantic/pydantic/pull/11571) +* Fix runtime error when computing model string representation involving cached properties and self-referenced models by [@Viicos](https://github.com/Viicos) in [#11579](https://github.com/pydantic/pydantic/pull/11579) +* Preserve other steps when using the ellipsis in the pipeline API by [@Viicos](https://github.com/Viicos) in [#11626](https://github.com/pydantic/pydantic/pull/11626) +* Fix deferred discriminator application logic by [@Viicos](https://github.com/Viicos) in [#11591](https://github.com/pydantic/pydantic/pull/11591) + +### New Contributors + +* [@cmenon12](https://github.com/cmenon12) made their first contribution in [#11562](https://github.com/pydantic/pydantic/pull/11562) +* [@Jeukoh](https://github.com/Jeukoh) made their first contribution in [#11611](https://github.com/pydantic/pydantic/pull/11611) + +## v2.11.0b2 (2025-03-17) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.11.0b2) + +### What's Changed + +#### Packaging + +* Bump `pydantic-core` to v2.32.0 by [@Viicos](https://github.com/Viicos) in [#11567](https://github.com/pydantic/pydantic/pull/11567) + +#### New Features + +* Add experimental support for free threading by [@Viicos](https://github.com/Viicos) in [#11516](https://github.com/pydantic/pydantic/pull/11516) + +#### Fixes + +* Fix `NotRequired` qualifier not taken into account in stringified annotation by [@Viicos](https://github.com/Viicos) in [#11559](https://github.com/pydantic/pydantic/pull/11559) + +### New Contributors + +* [@joren485](https://github.com/joren485) made their first contribution in [#11547](https://github.com/pydantic/pydantic/pull/11547) + +## v2.11.0b1 (2025-03-06) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.11.0b1) + +### What's Changed + +#### Packaging + +* Add a `check_pydantic_core_version()` function by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11324 +* Remove `greenlet` development dependency by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11351 +* Use the `typing-inspection` library by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11479 +* Bump `pydantic-core` to `v2.31.1` by [@sydney-runkle](https://github.com/sydney-runkle) in https://github.com/pydantic/pydantic/pull/11526 + +#### New Features + +* Support unsubstituted type variables with both a default and a bound or constraints by [@FyZzyss](https://github.com/FyZzyss) in https://github.com/pydantic/pydantic/pull/10789 +* Add a `default_factory_takes_validated_data` property to `FieldInfo` by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11034 +* Raise a better error when a generic alias is used inside `type[]` by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11088 +* Properly support PEP 695 generics syntax by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11189 +* Properly support type variable defaults by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11332 +* Add support for validating v6, v7, v8 UUIDs by [@astei](https://github.com/astei) in https://github.com/pydantic/pydantic/pull/11436 +* Improve alias configuration APIs by [@sydney-runkle](https://github.com/sydney-runkle) in https://github.com/pydantic/pydantic/pull/11468 + +#### Changes + +* Rework `create_model` field definitions format by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11032 +* Raise a deprecation warning when a field is annotated as final with a default value by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11168 +* Deprecate accessing `model_fields` and `model_computed_fields` on instances by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11169 +* **Breaking Change:** Move core schema generation logic for path types inside the `GenerateSchema` class by [@sydney-runkle](https://github.com/sydney-runkle) in https://github.com/pydantic/pydantic/pull/10846 +* Remove Python 3.8 Support by [@sydney-runkle](https://github.com/sydney-runkle) in https://github.com/pydantic/pydantic/pull/11258 +* Optimize calls to `get_type_ref` by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/10863 +* Disable `pydantic-core` core schema validation by [@sydney-runkle](https://github.com/sydney-runkle) in https://github.com/pydantic/pydantic/pull/11271 + +#### Performance + +* Only evaluate `FieldInfo` annotations if required during schema building by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/10769 +* Improve `__setattr__` performance of Pydantic models by caching setter functions by [@MarkusSintonen](https://github.com/MarkusSintonen) in https://github.com/pydantic/pydantic/pull/10868 +* Improve annotation application performance by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11186 +* Improve performance of `_typing_extra` module by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11255 +* Refactor and optimize schema cleaning logic by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11244 +* Create a single dictionary when creating a `CoreConfig` instance by [@sydney-runkle](https://github.com/sydney-runkle) in https://github.com/pydantic/pydantic/pull/11384 +* Bump `pydantic-core` and thus use `SchemaValidator` and `SchemaSerializer` caching by [@sydney-runkle](https://github.com/sydney-runkle) in https://github.com/pydantic/pydantic/pull/11402 +* Reuse cached core schemas for parametrized generic Pydantic models by [@MarkusSintonen](https://github.com/MarkusSintonen) in https://github.com/pydantic/pydantic/pull/11434 + +#### Fixes + +* Improve `TypeAdapter` instance repr by [@sydney-runkle](https://github.com/sydney-runkle) in https://github.com/pydantic/pydantic/pull/10872 +* Use the correct frame when instantiating a parametrized `TypeAdapter` by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/10893 +* Infer final fields with a default value as class variables in the mypy plugin by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11121 +* Recursively unpack `Literal` values if using PEP 695 type aliases by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11114 +* Override `__subclasscheck__` on `ModelMetaclass` to avoid memory leak and performance issues by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11116 +* Remove unused `_extract_get_pydantic_json_schema()` parameter by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11155 +* Improve discriminated union error message for invalid union variants by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11161 +* Unpack PEP 695 type aliases if using the `Annotated` form by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11109 +* Add missing stacklevel in `deprecated_instance_property` warning by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11200 +* Copy `WithJsonSchema` schema to avoid sharing mutated data by [@thejcannon](https://github.com/thejcannon) in https://github.com/pydantic/pydantic/pull/11014 +* Do not cache parametrized models when in the process of parametrizing another model by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/10704 +* Add discriminated union related metadata entries to the `CoreMetadata` definition by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11216 +* Consolidate schema definitions logic in the `_Definitions` class by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11208 +* Support initializing root model fields with values of the `root` type in the mypy plugin by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11212 +* Fix various issues with dataclasses and `use_attribute_docstrings` by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11246 +* Only compute normalized decimal places if necessary in `decimal_places_validator` by [@misrasaurabh1](https://github.com/misrasaurabh1) in https://github.com/pydantic/pydantic/pull/11281 +* Add support for `validation_alias` in the mypy plugin by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11295 +* Fix JSON Schema reference collection with `"examples"` keys by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11305 +* Do not transform model serializer functions as class methods in the mypy plugin by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11298 +* Simplify `GenerateJsonSchema.literal_schema()` implementation by [@misrasaurabh1](https://github.com/misrasaurabh1) in https://github.com/pydantic/pydantic/pull/11321 +* Add additional allowed schemes for `ClickHouseDsn` by [@Maze21127](https://github.com/Maze21127) in https://github.com/pydantic/pydantic/pull/11319 +* Coerce decimal constraints to `Decimal` instances by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11350 +* Use the correct JSON Schema mode when handling function schemas by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11367 +* Improve exception message when encountering recursion errors during type evaluation by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11356 +* Always include `additionalProperties: True` for arbitrary dictionary schemas by [@austinyu](https://github.com/austinyu) in https://github.com/pydantic/pydantic/pull/11392 +* Expose `fallback` parameter in serialization methods by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11398 +* Fix path serialization behavior by [@sydney-runkle](https://github.com/sydney-runkle) in https://github.com/pydantic/pydantic/pull/11416 +* Do not reuse validators and serializers during model rebuild by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11429 +* Collect model fields when rebuilding a model by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11388 +* Allow cached properties to be altered on frozen models by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11432 +* Fix tuple serialization for `Sequence` types by [@sydney-runkle](https://github.com/sydney-runkle) in https://github.com/pydantic/pydantic/pull/11435 +* Fix: do not check for `__get_validators__` on classes where `__get_pydantic_core_schema__` is also defined by [@tlambert03](https://github.com/tlambert03) in https://github.com/pydantic/pydantic/pull/11444 +* Allow callable instances to be used as serializers by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11451 +* Improve error thrown when overriding field with a property by [@sydney-runkle](https://github.com/sydney-runkle) in https://github.com/pydantic/pydantic/pull/11459 +* Fix JSON Schema generation with referenceable core schemas holding JSON metadata by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11475 +* Support strict specification on union member types by [@sydney-runkle](https://github.com/sydney-runkle) in https://github.com/pydantic/pydantic/pull/11481 +* Implicitly set `validate_by_name` to `True` when `validate_by_alias` is `False` by [@sydney-runkle](https://github.com/sydney-runkle) in https://github.com/pydantic/pydantic/pull/11503 +* Change type of `Any` when synthesizing `BaseSettings.__init__` signature in the mypy plugin by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11497 +* Support type variable defaults referencing other type variables by [@Viicos](https://github.com/Viicos) in https://github.com/pydantic/pydantic/pull/11520 +* Fix `ValueError` on year zero by [@davidhewitt](https://github.com/davidhewitt) in https://github.com/pydantic/pydantic-core/pull/1583 +* `dataclass` `InitVar` shouldn't be required on serialization by [@sydney-runkle](https://github.com/sydney-runkle) in https://github.com/pydantic/pydantic-core/pull/1602 + +## New Contributors +* [@FyZzyss](https://github.com/FyZzyss) made their first contribution in https://github.com/pydantic/pydantic/pull/10789 +* [@tamird](https://github.com/tamird) made their first contribution in https://github.com/pydantic/pydantic/pull/10948 +* [@felixxm](https://github.com/felixxm) made their first contribution in https://github.com/pydantic/pydantic/pull/11077 +* [@alexprabhat99](https://github.com/alexprabhat99) made their first contribution in https://github.com/pydantic/pydantic/pull/11082 +* [@Kharianne](https://github.com/Kharianne) made their first contribution in https://github.com/pydantic/pydantic/pull/11111 +* [@mdaffad](https://github.com/mdaffad) made their first contribution in https://github.com/pydantic/pydantic/pull/11177 +* [@thejcannon](https://github.com/thejcannon) made their first contribution in https://github.com/pydantic/pydantic/pull/11014 +* [@thomasfrimannkoren](https://github.com/thomasfrimannkoren) made their first contribution in https://github.com/pydantic/pydantic/pull/11251 +* [@usernameMAI](https://github.com/usernameMAI) made their first contribution in https://github.com/pydantic/pydantic/pull/11275 +* [@ananiavito](https://github.com/ananiavito) made their first contribution in https://github.com/pydantic/pydantic/pull/11302 +* [@pawamoy](https://github.com/pawamoy) made their first contribution in https://github.com/pydantic/pydantic/pull/11311 +* [@Maze21127](https://github.com/Maze21127) made their first contribution in https://github.com/pydantic/pydantic/pull/11319 +* [@kauabh](https://github.com/kauabh) made their first contribution in https://github.com/pydantic/pydantic/pull/11369 +* [@jaceklaskowski](https://github.com/jaceklaskowski) made their first contribution in https://github.com/pydantic/pydantic/pull/11353 +* [@tmpbeing](https://github.com/tmpbeing) made their first contribution in https://github.com/pydantic/pydantic/pull/11375 +* [@petyosi](https://github.com/petyosi) made their first contribution in https://github.com/pydantic/pydantic/pull/11405 +* [@austinyu](https://github.com/austinyu) made their first contribution in https://github.com/pydantic/pydantic/pull/11392 +* [@mikeedjones](https://github.com/mikeedjones) made their first contribution in https://github.com/pydantic/pydantic/pull/11402 +* [@astei](https://github.com/astei) made their first contribution in https://github.com/pydantic/pydantic/pull/11436 +* [@dsayling](https://github.com/dsayling) made their first contribution in https://github.com/pydantic/pydantic/pull/11522 +* [@sobolevn](https://github.com/sobolevn) made their first contribution in https://github.com/pydantic/pydantic-core/pull/1645 + +## v2.11.0a2 (2025-02-10) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.11.0a2) + +### What's Changed + +Pydantic v2.11 is a version strongly focused on build time performance of Pydantic models (and core schema generation in general). +This is another early alpha release, meant to collect early feedback from users having issues with core schema builds. + +#### Packaging + +* Bump `ruff` from 0.9.2 to 0.9.5 by [@Viicos](https://github.com/Viicos) in [#11407](https://github.com/pydantic/pydantic/pull/11407) +* Bump `pydantic-core` to v2.29.0 by [@mikeedjones](https://github.com/mikeedjones) in [#11402](https://github.com/pydantic/pydantic/pull/11402) +* Use locally-built rust with symbols & pgo by [@davidhewitt](https://github.com/davidhewitt) in [#11403](https://github.com/pydantic/pydantic/pull/11403) + + +#### Performance + +* Create a single dictionary when creating a `CoreConfig` instance by [@sydney-runkle](https://github.com/sydney-runkle) in [#11384](https://github.com/pydantic/pydantic/pull/11384) + +#### Fixes + +* Use the correct JSON Schema mode when handling function schemas by [@Viicos](https://github.com/Viicos) in [#11367](https://github.com/pydantic/pydantic/pull/11367) +* Fix JSON Schema reference logic with `examples` keys by [@Viicos](https://github.com/Viicos) in [#11366](https://github.com/pydantic/pydantic/pull/11366) +* Improve exception message when encountering recursion errors during type evaluation by [@Viicos](https://github.com/Viicos) in [#11356](https://github.com/pydantic/pydantic/pull/11356) +* Always include `additionalProperties: True` for arbitrary dictionary schemas by [@austinyu](https://github.com/austinyu) in [#11392](https://github.com/pydantic/pydantic/pull/11392) +* Expose `fallback` parameter in serialization methods by [@Viicos](https://github.com/Viicos) in [#11398](https://github.com/pydantic/pydantic/pull/11398) +* Fix path serialization behavior by [@sydney-runkle](https://github.com/sydney-runkle) in [#11416](https://github.com/pydantic/pydantic/pull/11416) + +### New Contributors + +* [@kauabh](https://github.com/kauabh) made their first contribution in [#11369](https://github.com/pydantic/pydantic/pull/11369) +* [@jaceklaskowski](https://github.com/jaceklaskowski) made their first contribution in [#11353](https://github.com/pydantic/pydantic/pull/11353) +* [@tmpbeing](https://github.com/tmpbeing) made their first contribution in [#11375](https://github.com/pydantic/pydantic/pull/11375) +* [@petyosi](https://github.com/petyosi) made their first contribution in [#11405](https://github.com/pydantic/pydantic/pull/11405) +* [@austinyu](https://github.com/austinyu) made their first contribution in [#11392](https://github.com/pydantic/pydantic/pull/11392) +* [@mikeedjones](https://github.com/mikeedjones) made their first contribution in [#11402](https://github.com/pydantic/pydantic/pull/11402) + +## v2.11.0a1 (2025-01-30) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.11.0a1) + +### What's Changed + +Pydantic v2.11 is a version strongly focused on build time performance of Pydantic models (and core schema generation in general). +This is an early alpha release, meant to collect early feedback from users having issues with core schema builds. + +#### Packaging + +* Bump dawidd6/action-download-artifact from 6 to 7 by [@dependabot](https://github.com/dependabot) in [#11018](https://github.com/pydantic/pydantic/pull/11018) +* Re-enable memray related tests on Python 3.12+ by [@Viicos](https://github.com/Viicos) in [#11191](https://github.com/pydantic/pydantic/pull/11191) +* Bump astral-sh/setup-uv to 5 by [@dependabot](https://github.com/dependabot) in [#11205](https://github.com/pydantic/pydantic/pull/11205) +* Bump `ruff` to v0.9.0 by [@sydney-runkle](https://github.com/sydney-runkle) in [#11254](https://github.com/pydantic/pydantic/pull/11254) +* Regular `uv.lock` deps update by [@sydney-runkle](https://github.com/sydney-runkle) in [#11333](https://github.com/pydantic/pydantic/pull/11333) +* Add a `check_pydantic_core_version()` function by [@Viicos](https://github.com/Viicos) in [#11324](https://github.com/pydantic/pydantic/pull/11324) +* Remove `greenlet` development dependency by [@Viicos](https://github.com/Viicos) in [#11351](https://github.com/pydantic/pydantic/pull/11351) +* Bump `pydantic-core` to v2.28.0 by [@Viicos](https://github.com/Viicos) in [#11364](https://github.com/pydantic/pydantic/pull/11364) + +#### New Features + +* Support unsubstituted type variables with both a default and a bound or constraints by [@FyZzyss](https://github.com/FyZzyss) in [#10789](https://github.com/pydantic/pydantic/pull/10789) +* Add a `default_factory_takes_validated_data` property to `FieldInfo` by [@Viicos](https://github.com/Viicos) in [#11034](https://github.com/pydantic/pydantic/pull/11034) +* Raise a better error when a generic alias is used inside `type[]` by [@Viicos](https://github.com/Viicos) in [#11088](https://github.com/pydantic/pydantic/pull/11088) +* Properly support PEP 695 generics syntax by [@Viicos](https://github.com/Viicos) in [#11189](https://github.com/pydantic/pydantic/pull/11189) +* Properly support type variable defaults by [@Viicos](https://github.com/Viicos) in [#11332](https://github.com/pydantic/pydantic/pull/11332) + +#### Changes + +* Rework `create_model` field definitions format by [@Viicos](https://github.com/Viicos) in [#11032](https://github.com/pydantic/pydantic/pull/11032) +* Raise a deprecation warning when a field is annotated as final with a default value by [@Viicos](https://github.com/Viicos) in [#11168](https://github.com/pydantic/pydantic/pull/11168) +* Deprecate accessing `model_fields` and `model_computed_fields` on instances by [@Viicos](https://github.com/Viicos) in [#11169](https://github.com/pydantic/pydantic/pull/11169) +* Move core schema generation logic for path types inside the `GenerateSchema` class by [@sydney-runkle](https://github.com/sydney-runkle) in [#10846](https://github.com/pydantic/pydantic/pull/10846) +* Move `deque` schema gen to `GenerateSchema` class by [@sydney-runkle](https://github.com/sydney-runkle) in [#11239](https://github.com/pydantic/pydantic/pull/11239) +* Move `Mapping` schema gen to `GenerateSchema` to complete removal of `prepare_annotations_for_known_type` workaround by [@sydney-runkle](https://github.com/sydney-runkle) in [#11247](https://github.com/pydantic/pydantic/pull/11247) +* Remove Python 3.8 Support by [@sydney-runkle](https://github.com/sydney-runkle) in [#11258](https://github.com/pydantic/pydantic/pull/11258) +* Disable `pydantic-core` core schema validation by [@sydney-runkle](https://github.com/sydney-runkle) in [#11271](https://github.com/pydantic/pydantic/pull/11271) + +#### Performance + +* Only evaluate `FieldInfo` annotations if required during schema building by [@Viicos](https://github.com/Viicos) in [#10769](https://github.com/pydantic/pydantic/pull/10769) +* Optimize calls to `get_type_ref` by [@Viicos](https://github.com/Viicos) in [#10863](https://github.com/pydantic/pydantic/pull/10863) +* Improve `__setattr__` performance of Pydantic models by caching setter functions by [@MarkusSintonen](https://github.com/MarkusSintonen) in [#10868](https://github.com/pydantic/pydantic/pull/10868) +* Improve annotation application performance by [@Viicos](https://github.com/Viicos) in [#11186](https://github.com/pydantic/pydantic/pull/11186) +* Improve performance of `_typing_extra` module by [@Viicos](https://github.com/Viicos) in [#11255](https://github.com/pydantic/pydantic/pull/11255) +* Refactor and optimize schema cleaning logic by [@Viicos](https://github.com/Viicos) and [@MarkusSintonen](https://github.com/MarkusSintonen) in [#11244](https://github.com/pydantic/pydantic/pull/11244) + +#### Fixes + +* Add validation tests for `_internal/_validators.py` by [@tkasuz](https://github.com/tkasuz) in [#10763](https://github.com/pydantic/pydantic/pull/10763) +* Improve `TypeAdapter` instance repr by [@sydney-runkle](https://github.com/sydney-runkle) in [#10872](https://github.com/pydantic/pydantic/pull/10872) +* Revert "ci: use locally built pydantic-core with debug symbols by [@sydney-runkle](https://github.com/sydney-runkle) in [#10942](https://github.com/pydantic/pydantic/pull/10942) +* Re-enable all FastAPI tests by [@tamird](https://github.com/tamird) in [#10948](https://github.com/pydantic/pydantic/pull/10948) +* Fix typo in HISTORY.md. by [@felixxm](https://github.com/felixxm) in [#11077](https://github.com/pydantic/pydantic/pull/11077) +* Infer final fields with a default value as class variables in the mypy plugin by [@Viicos](https://github.com/Viicos) in [#11121](https://github.com/pydantic/pydantic/pull/11121) +* Recursively unpack `Literal` values if using PEP 695 type aliases by [@Viicos](https://github.com/Viicos) in [#11114](https://github.com/pydantic/pydantic/pull/11114) +* Override `__subclasscheck__` on `ModelMetaclass` to avoid memory leak and performance issues by [@Viicos](https://github.com/Viicos) in [#11116](https://github.com/pydantic/pydantic/pull/11116) +* Remove unused `_extract_get_pydantic_json_schema()` parameter by [@Viicos](https://github.com/Viicos) in [#11155](https://github.com/pydantic/pydantic/pull/11155) +* Add FastAPI and SQLModel to third-party tests by [@sydney-runkle](https://github.com/sydney-runkle) in [#11044](https://github.com/pydantic/pydantic/pull/11044) +* Fix conditional expressions syntax for third-party tests by [@Viicos](https://github.com/Viicos) in [#11162](https://github.com/pydantic/pydantic/pull/11162) +* Move FastAPI tests to third-party workflow by [@Viicos](https://github.com/Viicos) in [#11164](https://github.com/pydantic/pydantic/pull/11164) +* Improve discriminated union error message for invalid union variants by [@Viicos](https://github.com/Viicos) in [#11161](https://github.com/pydantic/pydantic/pull/11161) +* Unpack PEP 695 type aliases if using the `Annotated` form by [@Viicos](https://github.com/Viicos) in [#11109](https://github.com/pydantic/pydantic/pull/11109) +* Include `openapi-python-client` check in issue creation for third-party failures, use `main` branch by [@sydney-runkle](https://github.com/sydney-runkle) in [#11182](https://github.com/pydantic/pydantic/pull/11182) +* Add pandera third-party tests by [@Viicos](https://github.com/Viicos) in [#11193](https://github.com/pydantic/pydantic/pull/11193) +* Add ODMantic third-party tests by [@sydney-runkle](https://github.com/sydney-runkle) in [#11197](https://github.com/pydantic/pydantic/pull/11197) +* Add missing stacklevel in `deprecated_instance_property` warning by [@Viicos](https://github.com/Viicos) in [#11200](https://github.com/pydantic/pydantic/pull/11200) +* Copy `WithJsonSchema` schema to avoid sharing mutated data by [@thejcannon](https://github.com/thejcannon) in [#11014](https://github.com/pydantic/pydantic/pull/11014) +* Do not cache parametrized models when in the process of parametrizing another model by [@Viicos](https://github.com/Viicos) in [#10704](https://github.com/pydantic/pydantic/pull/10704) +* Re-enable Beanie third-party tests by [@Viicos](https://github.com/Viicos) in [#11214](https://github.com/pydantic/pydantic/pull/11214) +* Add discriminated union related metadata entries to the `CoreMetadata` definition by [@Viicos](https://github.com/Viicos) in [#11216](https://github.com/pydantic/pydantic/pull/11216) +* Consolidate schema definitions logic in the `_Definitions` class by [@Viicos](https://github.com/Viicos) in [#11208](https://github.com/pydantic/pydantic/pull/11208) +* Support initializing root model fields with values of the `root` type in the mypy plugin by [@Viicos](https://github.com/Viicos) in [#11212](https://github.com/pydantic/pydantic/pull/11212) +* Fix various issues with dataclasses and `use_attribute_docstrings` by [@Viicos](https://github.com/Viicos) in [#11246](https://github.com/pydantic/pydantic/pull/11246) +* Only compute normalized decimal places if necessary in `decimal_places_validator` by [@misrasaurabh1](https://github.com/misrasaurabh1) in [#11281](https://github.com/pydantic/pydantic/pull/11281) +* Fix two misplaced sentences in validation errors documentation by [@ananiavito](https://github.com/ananiavito) in [#11302](https://github.com/pydantic/pydantic/pull/11302) +* Fix mkdocstrings inventory example in documentation by [@pawamoy](https://github.com/pawamoy) in [#11311](https://github.com/pydantic/pydantic/pull/11311) +* Add support for `validation_alias` in the mypy plugin by [@Viicos](https://github.com/Viicos) in [#11295](https://github.com/pydantic/pydantic/pull/11295) +* Do not transform model serializer functions as class methods in the mypy plugin by [@Viicos](https://github.com/Viicos) in [#11298](https://github.com/pydantic/pydantic/pull/11298) +* Simplify `GenerateJsonSchema.literal_schema()` implementation by [@misrasaurabh1](https://github.com/misrasaurabh1) in [#11321](https://github.com/pydantic/pydantic/pull/11321) +* Add additional allowed schemes for `ClickHouseDsn` by [@Maze21127](https://github.com/Maze21127) in [#11319](https://github.com/pydantic/pydantic/pull/11319) +* Coerce decimal constraints to `Decimal` instances by [@Viicos](https://github.com/Viicos) in [#11350](https://github.com/pydantic/pydantic/pull/11350) +* Fix `ValueError` on year zero by [@davidhewitt](https://github.com/davidhewitt) in [pydantic-core#1583](https://github.com/pydantic/pydantic-core/pull/1583) + +### New Contributors + +* [@FyZzyss](https://github.com/FyZzyss) made their first contribution in [#10789](https://github.com/pydantic/pydantic/pull/10789) +* [@tamird](https://github.com/tamird) made their first contribution in [#10948](https://github.com/pydantic/pydantic/pull/10948) +* [@felixxm](https://github.com/felixxm) made their first contribution in [#11077](https://github.com/pydantic/pydantic/pull/11077) +* [@alexprabhat99](https://github.com/alexprabhat99) made their first contribution in [#11082](https://github.com/pydantic/pydantic/pull/11082) +* [@Kharianne](https://github.com/Kharianne) made their first contribution in [#11111](https://github.com/pydantic/pydantic/pull/11111) +* [@mdaffad](https://github.com/mdaffad) made their first contribution in [#11177](https://github.com/pydantic/pydantic/pull/11177) +* [@thejcannon](https://github.com/thejcannon) made their first contribution in [#11014](https://github.com/pydantic/pydantic/pull/11014) +* [@thomasfrimannkoren](https://github.com/thomasfrimannkoren) made their first contribution in [#11251](https://github.com/pydantic/pydantic/pull/11251) +* [@usernameMAI](https://github.com/usernameMAI) made their first contribution in [#11275](https://github.com/pydantic/pydantic/pull/11275) +* [@ananiavito](https://github.com/ananiavito) made their first contribution in [#11302](https://github.com/pydantic/pydantic/pull/11302) +* [@pawamoy](https://github.com/pawamoy) made their first contribution in [#11311](https://github.com/pydantic/pydantic/pull/11311) +* [@Maze21127](https://github.com/Maze21127) made their first contribution in [#11319](https://github.com/pydantic/pydantic/pull/11319) + +## v2.10.6 (2025-01-23) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.10.6) + +### What's Changed + +#### Fixes + +* Fix JSON Schema reference collection with `'examples'` keys by [@Viicos](https://github.com/Viicos) in [#11325](https://github.com/pydantic/pydantic/pull/11325) +* Fix url python serialization by [@sydney-runkle](https://github.com/sydney-runkle) in [#11331](https://github.com/pydantic/pydantic/pull/11331) + +## v2.10.5 (2025-01-08) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.10.5) + +### What's Changed + +#### Fixes + +* Remove custom MRO implementation of Pydantic models by [@Viicos](https://github.com/Viicos) in [#11184](https://github.com/pydantic/pydantic/pull/11184) +* Fix URL serialization for unions by [@sydney-runkle](https://github.com/sydney-runkle) in [#11233](https://github.com/pydantic/pydantic/pull/11233) + +## v2.10.4 (2024-12-18) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.10.4) + +### What's Changed + +#### Packaging + +* Bump `pydantic-core` to v2.27.2 by [@davidhewitt](https://github.com/davidhewitt) in [#11138](https://github.com/pydantic/pydantic/pull/11138) + +#### Fixes + +* Fix for comparison of `AnyUrl` objects by [@alexprabhat99](https://github.com/alexprabhat99) in [#11082](https://github.com/pydantic/pydantic/pull/11082) +* Properly fetch PEP 695 type params for functions, do not fetch annotations from signature by [@Viicos](https://github.com/Viicos) in [#11093](https://github.com/pydantic/pydantic/pull/11093) +* Include JSON Schema input core schema in function schemas by [@Viicos](https://github.com/Viicos) in [#11085](https://github.com/pydantic/pydantic/pull/11085) +* Add `len` to `_BaseUrl` to avoid TypeError by [@Kharianne](https://github.com/Kharianne) in [#11111](https://github.com/pydantic/pydantic/pull/11111) +* Make sure the type reference is removed from the seen references by [@Viicos](https://github.com/Viicos) in [#11143](https://github.com/pydantic/pydantic/pull/11143) + +### New Contributors + +* [@FyZzyss](https://github.com/FyZzyss) made their first contribution in [#10789](https://github.com/pydantic/pydantic/pull/10789) +* [@tamird](https://github.com/tamird) made their first contribution in [#10948](https://github.com/pydantic/pydantic/pull/10948) +* [@felixxm](https://github.com/felixxm) made their first contribution in [#11077](https://github.com/pydantic/pydantic/pull/11077) +* [@alexprabhat99](https://github.com/alexprabhat99) made their first contribution in [#11082](https://github.com/pydantic/pydantic/pull/11082) +* [@Kharianne](https://github.com/Kharianne) made their first contribution in [#11111](https://github.com/pydantic/pydantic/pull/11111) + +## v2.10.3 (2024-12-03) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.10.3) + +### What's Changed + +#### Fixes + +* Set fields when `defer_build` is set on Pydantic dataclasses by [@Viicos](https://github.com/Viicos) in [#10984](https://github.com/pydantic/pydantic/pull/10984) +* Do not resolve the JSON Schema reference for `dict` core schema keys by [@Viicos](https://github.com/Viicos) in [#10989](https://github.com/pydantic/pydantic/pull/10989) +* Use the globals of the function when evaluating the return type for `PlainSerializer` and `WrapSerializer` functions by [@Viicos](https://github.com/Viicos) in [#11008](https://github.com/pydantic/pydantic/pull/11008) +* Fix host required enforcement for urls to be compatible with v2.9 behavior by [@sydney-runkle](https://github.com/sydney-runkle) in [#11027](https://github.com/pydantic/pydantic/pull/11027) +* Add a `default_factory_takes_validated_data` property to `FieldInfo` by [@Viicos](https://github.com/Viicos) in [#11034](https://github.com/pydantic/pydantic/pull/11034) +* Fix url json schema in `serialization` mode by [@sydney-runkle](https://github.com/sydney-runkle) in [#11035](https://github.com/pydantic/pydantic/pull/11035) + +## v2.10.2 (2024-11-25) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.10.2) + +### What's Changed + +#### Fixes + +* Only evaluate FieldInfo annotations if required during schema building by [@Viicos](https://github.com/Viicos) in [#10769](https://github.com/pydantic/pydantic/pull/10769) +* Do not evaluate annotations for private fields by [@Viicos](https://github.com/Viicos) in [#10962](https://github.com/pydantic/pydantic/pull/10962) +* Support serialization as any for `Secret` types and `Url` types by [@sydney-runkle](https://github.com/sydney-runkle) in [#10947](https://github.com/pydantic/pydantic/pull/10947) +* Fix type hint of `Field.default` to be compatible with Python 3.8 and 3.9 by [@Viicos](https://github.com/Viicos) in [#10972](https://github.com/pydantic/pydantic/pull/10972) +* Add hashing support for URL types by [@sydney-runkle](https://github.com/sydney-runkle) in [#10975](https://github.com/pydantic/pydantic/pull/10975) +* Hide `BaseModel.__replace__` definition from type checkers by [@Viicos](https://github.com/Viicos) in [#10979](https://github.com/pydantic/pydantic/pull/10979) + +## v2.10.1 (2024-11-21) + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.10.1) + +### What's Changed + +#### Packaging + +* Bump `pydantic-core` version to `v2.27.1` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10938](https://github.com/pydantic/pydantic/pull/10938) + +#### Fixes + +* Use the correct frame when instantiating a parametrized `TypeAdapter` by [@Viicos](https://github.com/Viicos) in [#10893](https://github.com/pydantic/pydantic/pull/10893) +* Relax check for validated data in `default_factory` utils by [@sydney-runkle](https://github.com/sydney-runkle) in [#10909](https://github.com/pydantic/pydantic/pull/10909) +* Fix type checking issue with `model_fields` and `model_computed_fields` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10911](https://github.com/pydantic/pydantic/pull/10911) +* Use the parent configuration during schema generation for stdlib `dataclass`es by [@sydney-runkle](https://github.com/sydney-runkle) in [#10928](https://github.com/pydantic/pydantic/pull/10928) +* Use the `globals` of the function when evaluating the return type of serializers and `computed_field`s by [@Viicos](https://github.com/Viicos) in [#10929](https://github.com/pydantic/pydantic/pull/10929) +* Fix URL constraint application by [@sydney-runkle](https://github.com/sydney-runkle) in [#10922](https://github.com/pydantic/pydantic/pull/10922) +* Fix URL equality with different validation methods by [@sydney-runkle](https://github.com/sydney-runkle) in [#10934](https://github.com/pydantic/pydantic/pull/10934) +* Fix JSON schema title when specified as `''` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10936](https://github.com/pydantic/pydantic/pull/10936) +* Fix `python` mode serialization for `complex` inference by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic-core#1549](https://github.com/pydantic/pydantic-core/pull/1549) + +### New Contributors + +## v2.10.0 (2024-11-20) + +The code released in v2.10.0 is practically identical to that of v2.10.0b2. + +[GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.10.0) + +See the [v2.10 release blog post](https://pydantic.dev/articles/pydantic-v2-10-release) for the highlights! + +### What's Changed + +#### Packaging + +* Bump `pydantic-core` to `v2.27.0` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10825](https://github.com/pydantic/pydantic/pull/10825) +* Replaced pdm with uv by [@frfahim](https://github.com/frfahim) in [#10727](https://github.com/pydantic/pydantic/pull/10727) + +#### New Features + +* Support `fractions.Fraction` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10318](https://github.com/pydantic/pydantic/pull/10318) +* Support `Hashable` for json validation by [@sydney-runkle](https://github.com/sydney-runkle) in [#10324](https://github.com/pydantic/pydantic/pull/10324) +* Add a `SocketPath` type for `linux` systems by [@theunkn0wn1](https://github.com/theunkn0wn1) in [#10378](https://github.com/pydantic/pydantic/pull/10378) +* Allow arbitrary refs in JSON schema `examples` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10417](https://github.com/pydantic/pydantic/pull/10417) +* Support `defer_build` for Pydantic dataclasses by [@Viicos](https://github.com/Viicos) in [#10313](https://github.com/pydantic/pydantic/pull/10313) +* Adding v1 / v2 incompatibility warning for nested v1 model by [@sydney-runkle](https://github.com/sydney-runkle) in [#10431](https://github.com/pydantic/pydantic/pull/10431) +* Add support for unpacked `TypedDict` to type hint variadic keyword arguments with `@validate_call` by [@Viicos](https://github.com/Viicos) in [#10416](https://github.com/pydantic/pydantic/pull/10416) +* Support compiled patterns in `protected_namespaces` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10522](https://github.com/pydantic/pydantic/pull/10522) +* Add support for `propertyNames` in JSON schema by [@FlorianSW](https://github.com/FlorianSW) in [#10478](https://github.com/pydantic/pydantic/pull/10478) +* Adding `__replace__` protocol for Python 3.13+ support by [@sydney-runkle](https://github.com/sydney-runkle) in [#10596](https://github.com/pydantic/pydantic/pull/10596) +* Expose public `sort` method for JSON schema generation by [@sydney-runkle](https://github.com/sydney-runkle) in [#10595](https://github.com/pydantic/pydantic/pull/10595) +* Add runtime validation of `@validate_call` callable argument by [@kc0506](https://github.com/kc0506) in [#10627](https://github.com/pydantic/pydantic/pull/10627) +* Add `experimental_allow_partial` support by [@samuelcolvin](https://github.com/samuelcolvin) in [#10748](https://github.com/pydantic/pydantic/pull/10748) +* Support default factories taking validated data as an argument by [@Viicos](https://github.com/Viicos) in [#10678](https://github.com/pydantic/pydantic/pull/10678) +* Allow subclassing `ValidationError` and `PydanticCustomError` by [@Youssefares](https://github.com/Youssefares) in [pydantic/pydantic-core#1413](https://github.com/pydantic/pydantic-core/pull/1413) +* Add `trailing-strings` support to `experimental_allow_partial` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10825](https://github.com/pydantic/pydantic/pull/10825) +* Add `rebuild()` method for `TypeAdapter` and simplify `defer_build` patterns by [@sydney-runkle](https://github.com/sydney-runkle) in [#10537](https://github.com/pydantic/pydantic/pull/10537) +* Improve `TypeAdapter` instance repr by [@sydney-runkle](https://github.com/sydney-runkle) in [#10872](https://github.com/pydantic/pydantic/pull/10872) + +#### Changes + +* Don't allow customization of `SchemaGenerator` until interface is more stable by [@sydney-runkle](https://github.com/sydney-runkle) in [#10303](https://github.com/pydantic/pydantic/pull/10303) +* Cleanly `defer_build` on `TypeAdapters`, removing experimental flag by [@sydney-runkle](https://github.com/sydney-runkle) in [#10329](https://github.com/pydantic/pydantic/pull/10329) +* Fix `mro` of generic subclass by [@kc0506](https://github.com/kc0506) in [#10100](https://github.com/pydantic/pydantic/pull/10100) +* Strip whitespaces on JSON Schema title generation by [@sydney-runkle](https://github.com/sydney-runkle) in [#10404](https://github.com/pydantic/pydantic/pull/10404) +* Use `b64decode` and `b64encode` for `Base64Bytes` type by [@sydney-runkle](https://github.com/sydney-runkle) in [#10486](https://github.com/pydantic/pydantic/pull/10486) +* Relax protected namespace config default by [@sydney-runkle](https://github.com/sydney-runkle) in [#10441](https://github.com/pydantic/pydantic/pull/10441) +* Revalidate parametrized generics if instance's origin is subclass of OG class by [@sydney-runkle](https://github.com/sydney-runkle) in [#10666](https://github.com/pydantic/pydantic/pull/10666) +* Warn if configuration is specified on the `@dataclass` decorator and with the `__pydantic_config__` attribute by [@sydney-runkle](https://github.com/sydney-runkle) in [#10406](https://github.com/pydantic/pydantic/pull/10406) +* Recommend against using `Ellipsis` (...) with `Field` by [@Viicos](https://github.com/Viicos) in [#10661](https://github.com/pydantic/pydantic/pull/10661) +* Migrate to subclassing instead of annotated approach for pydantic url types by [@sydney-runkle](https://github.com/sydney-runkle) in [#10662](https://github.com/pydantic/pydantic/pull/10662) +* Change JSON schema generation of `Literal`s and `Enums` by [@Viicos](https://github.com/Viicos) in [#10692](https://github.com/pydantic/pydantic/pull/10692) +* Simplify unions involving `Any` or `Never` when replacing type variables by [@Viicos](https://github.com/Viicos) in [#10338](https://github.com/pydantic/pydantic/pull/10338) +* Do not require padding when decoding `base64` bytes by [@bschoenmaeckers](https://github.com/bschoenmaeckers) in [pydantic/pydantic-core#1448](https://github.com/pydantic/pydantic-core/pull/1448) +* Support dates all the way to 1BC by [@changhc](https://github.com/changhc) in [pydantic/speedate#77](https://github.com/pydantic/speedate/pull/77) + +#### Performance + +* Schema cleaning: skip unnecessary copies during schema walking by [@Viicos](https://github.com/Viicos) in [#10286](https://github.com/pydantic/pydantic/pull/10286) +* Refactor namespace logic for annotations evaluation by [@Viicos](https://github.com/Viicos) in [#10530](https://github.com/pydantic/pydantic/pull/10530) +* Improve email regexp on edge cases by [@AlekseyLobanov](https://github.com/AlekseyLobanov) in [#10601](https://github.com/pydantic/pydantic/pull/10601) +* `CoreMetadata` refactor with an emphasis on documentation, schema build time performance, and reducing complexity by [@sydney-runkle](https://github.com/sydney-runkle) in [#10675](https://github.com/pydantic/pydantic/pull/10675) + +#### Fixes + +* Remove guarding check on `computed_field` with `field_serializer` by [@nix010](https://github.com/nix010) in [#10390](https://github.com/pydantic/pydantic/pull/10390) +* Fix `Predicate` issue in `v2.9.0` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10321](https://github.com/pydantic/pydantic/pull/10321) +* Fixing `annotated-types` bound by [@sydney-runkle](https://github.com/sydney-runkle) in [#10327](https://github.com/pydantic/pydantic/pull/10327) +* Turn `tzdata` install requirement into optional `timezone` dependency by [@jakob-keller](https://github.com/jakob-keller) in [#10331](https://github.com/pydantic/pydantic/pull/10331) +* Use correct types namespace when building `namedtuple` core schemas by [@Viicos](https://github.com/Viicos) in [#10337](https://github.com/pydantic/pydantic/pull/10337) +* Fix evaluation of stringified annotations during namespace inspection by [@Viicos](https://github.com/Viicos) in [#10347](https://github.com/pydantic/pydantic/pull/10347) +* Fix `IncEx` type alias definition by [@Viicos](https://github.com/Viicos) in [#10339](https://github.com/pydantic/pydantic/pull/10339) +* Do not error when trying to evaluate annotations of private attributes by [@Viicos](https://github.com/Viicos) in [#10358](https://github.com/pydantic/pydantic/pull/10358) +* Fix nested type statement by [@kc0506](https://github.com/kc0506) in [#10369](https://github.com/pydantic/pydantic/pull/10369) +* Improve typing of `ModelMetaclass.mro` by [@Viicos](https://github.com/Viicos) in [#10372](https://github.com/pydantic/pydantic/pull/10372) +* Fix class access of deprecated `computed_field`s by [@Viicos](https://github.com/Viicos) in [#10391](https://github.com/pydantic/pydantic/pull/10391) +* Make sure `inspect.iscoroutinefunction` works on coroutines decorated with `@validate_call` by [@MovisLi](https://github.com/MovisLi) in [#10374](https://github.com/pydantic/pydantic/pull/10374) +* Fix `NameError` when using `validate_call` with PEP 695 on a class by [@kc0506](https://github.com/kc0506) in [#10380](https://github.com/pydantic/pydantic/pull/10380) +* Fix `ZoneInfo` with various invalid types by [@sydney-runkle](https://github.com/sydney-runkle) in [#10408](https://github.com/pydantic/pydantic/pull/10408) +* Fix `PydanticUserError` on empty `model_config` with annotations by [@cdwilson](https://github.com/cdwilson) in [#10412](https://github.com/pydantic/pydantic/pull/10412) +* Fix variance issue in `_IncEx` type alias, only allow `True` by [@Viicos](https://github.com/Viicos) in [#10414](https://github.com/pydantic/pydantic/pull/10414) +* Fix serialization schema generation when using `PlainValidator` by [@Viicos](https://github.com/Viicos) in [#10427](https://github.com/pydantic/pydantic/pull/10427) +* Fix schema generation error when serialization schema holds references by [@Viicos](https://github.com/Viicos) in [#10444](https://github.com/pydantic/pydantic/pull/10444) +* Inline references if possible when generating schema for `json_schema_input_type` by [@Viicos](https://github.com/Viicos) in [#10439](https://github.com/pydantic/pydantic/pull/10439) +* Fix recursive arguments in `Representation` by [@Viicos](https://github.com/Viicos) in [#10480](https://github.com/pydantic/pydantic/pull/10480) +* Fix representation for builtin function types by [@kschwab](https://github.com/kschwab) in [#10479](https://github.com/pydantic/pydantic/pull/10479) +* Add python validators for decimal constraints (`max_digits` and `decimal_places`) by [@sydney-runkle](https://github.com/sydney-runkle) in [#10506](https://github.com/pydantic/pydantic/pull/10506) +* Only fetch `__pydantic_core_schema__` from the current class during schema generation by [@Viicos](https://github.com/Viicos) in [#10518](https://github.com/pydantic/pydantic/pull/10518) +* Fix `stacklevel` on deprecation warnings for `BaseModel` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10520](https://github.com/pydantic/pydantic/pull/10520) +* Fix warning `stacklevel` in `BaseModel.__init__` by [@Viicos](https://github.com/Viicos) in [#10526](https://github.com/pydantic/pydantic/pull/10526) +* Improve error handling for in-evaluable refs for discriminator application by [@sydney-runkle](https://github.com/sydney-runkle) in [#10440](https://github.com/pydantic/pydantic/pull/10440) +* Change the signature of `ConfigWrapper.core_config` to take the title directly by [@Viicos](https://github.com/Viicos) in [#10562](https://github.com/pydantic/pydantic/pull/10562) +* Do not use the previous config from the stack for dataclasses without config by [@Viicos](https://github.com/Viicos) in [#10576](https://github.com/pydantic/pydantic/pull/10576) +* Fix serialization for IP types with `mode='python'` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10594](https://github.com/pydantic/pydantic/pull/10594) +* Support constraint application for `Base64Etc` types by [@sydney-runkle](https://github.com/sydney-runkle) in [#10584](https://github.com/pydantic/pydantic/pull/10584) +* Fix `validate_call` ignoring `Field` in `Annotated` by [@kc0506](https://github.com/kc0506) in [#10610](https://github.com/pydantic/pydantic/pull/10610) +* Raise an error when `Self` is invalid by [@kc0506](https://github.com/kc0506) in [#10609](https://github.com/pydantic/pydantic/pull/10609) +* Using `core_schema.InvalidSchema` instead of metadata injection + checks by [@sydney-runkle](https://github.com/sydney-runkle) in [#10523](https://github.com/pydantic/pydantic/pull/10523) +* Tweak type alias logic by [@kc0506](https://github.com/kc0506) in [#10643](https://github.com/pydantic/pydantic/pull/10643) +* Support usage of `type` with `typing.Self` and type aliases by [@kc0506](https://github.com/kc0506) in [#10621](https://github.com/pydantic/pydantic/pull/10621) +* Use overloads for `Field` and `PrivateAttr` functions by [@Viicos](https://github.com/Viicos) in [#10651](https://github.com/pydantic/pydantic/pull/10651) +* Clean up the `mypy` plugin implementation by [@Viicos](https://github.com/Viicos) in [#10669](https://github.com/pydantic/pydantic/pull/10669) +* Properly check for `typing_extensions` variant of `TypeAliasType` by [@Daraan](https://github.com/Daraan) in [#10713](https://github.com/pydantic/pydantic/pull/10713) +* Allow any mapping in `BaseModel.model_copy()` by [@Viicos](https://github.com/Viicos) in [#10751](https://github.com/pydantic/pydantic/pull/10751) +* Fix `isinstance` behavior for urls by [@sydney-runkle](https://github.com/sydney-runkle) in [#10766](https://github.com/pydantic/pydantic/pull/10766) +* Ensure `cached_property` can be set on Pydantic models by [@Viicos](https://github.com/Viicos) in [#10774](https://github.com/pydantic/pydantic/pull/10774) +* Fix equality checks for primitives in literals by [@sydney-runkle](https://github.com/sydney-runkle) in [pydantic/pydantic-core#1459](https://github.com/pydantic/pydantic-core/pull/1459) +* Properly enforce `host_required` for URLs by [@Viicos](https://github.com/Viicos) in [pydantic/pydantic-core#1488](https://github.com/pydantic/pydantic-core/pull/1488) +* Fix when `coerce_numbers_to_str` enabled and string has invalid Unicode character by [@andrey-berenda](https://github.com/andrey-berenda) in [pydantic/pydantic-core#1515](https://github.com/pydantic/pydantic-core/pull/1515) +* Fix serializing `complex` values in `Enum`s by [@changhc](https://github.com/changhc) in [pydantic/pydantic-core#1524](https://github.com/pydantic/pydantic-core/pull/1524) +* Refactor `_typing_extra` module by [@Viicos](https://github.com/Viicos) in [#10725](https://github.com/pydantic/pydantic/pull/10725) +* Support intuitive equality for urls by [@sydney-runkle](https://github.com/sydney-runkle) in [#10798](https://github.com/pydantic/pydantic/pull/10798) +* Add `bytearray` to `TypeAdapter.validate_json` signature by [@samuelcolvin](https://github.com/samuelcolvin) in [#10802](https://github.com/pydantic/pydantic/pull/10802) +* Ensure class access of method descriptors is performed when used as a default with `Field` by [@Viicos](https://github.com/Viicos) in [#10816](https://github.com/pydantic/pydantic/pull/10816) +* Fix circular import with `validate_call` by [@sydney-runkle](https://github.com/sydney-runkle) in [#10807](https://github.com/pydantic/pydantic/pull/10807) +* Fix error when using type aliases referencing other type aliases by [@Viicos](https://github.com/Viicos) in [#10809](https://github.com/pydantic/pydantic/pull/10809) +* Fix `IncEx` type alias to be compatible with mypy by [@Viicos](https://github.com/Viicos) in [#10813](https://github.com/pydantic/pydantic/pull/10813) +* Make `__signature__` a lazy property, do not deepcopy defaults by [@Viicos](https://github.com/Viicos) in [#10818](https://github.com/pydantic/pydantic/pull/10818) +* Make `__signature__` lazy for dataclasses, too by [@sydney-runkle](https://github.com/sydney-runkle) in [#10832](https://github.com/pydantic/pydantic/pull/10832) +* Subclass all single host url classes from `AnyUrl` to preserve behavior from v2.9 by [@sydney-runkle](https://github.com/sydney-runkle) in [#10856](https://github.com/pydantic/pydantic/pull/10856) + +### New Contributors + +* [@jakob-keller](https://github.com/jakob-keller) made their first contribution in [#10331](https://github.com/pydantic/pydantic/pull/10331) +* [@MovisLi](https://github.com/MovisLi) made their first contribution in [#10374](https://github.com/pydantic/pydantic/pull/10374) +* [@joaopalmeiro](https://github.com/joaopalmeiro) made their first contribution in [#10405](https://github.com/pydantic/pydantic/pull/10405) +* [@theunkn0wn1](https://github.com/theunkn0wn1) made their first contribution in [#10378](https://github.com/pydantic/pydantic/pull/10378) +* [@cdwilson](https://github.com/cdwilson) made their first contribution in [#10412](https://github.com/pydantic/pydantic/pull/10412) +* [@dlax](https://github.com/dlax) made their first contribution in [#10421](https://github.com/pydantic/pydantic/pull/10421) +* [@kschwab](https://github.com/kschwab) made their first contribution in [#10479](https://github.com/pydantic/pydantic/pull/10479) +* [@santibreo](https://github.com/santibreo) made their first contribution in [#10453](https://github.com/pydantic/pydantic/pull/10453) +* [@FlorianSW](https://github.com/FlorianSW) made their first contribution in [#10478](https://github.com/pydantic/pydantic/pull/10478) +* [@tkasuz](https://github.com/tkasuz) made their first contribution in [#10555](https://github.com/pydantic/pydantic/pull/10555) +* [@AlekseyLobanov](https://github.com/AlekseyLobanov) made their first contribution in [#10601](https://github.com/pydantic/pydantic/pull/10601) +* [@NiclasvanEyk](https://github.com/NiclasvanEyk) made their first contribution in [#10667](https://github.com/pydantic/pydantic/pull/10667) +* [@mschoettle](https://github.com/mschoettle) made their first contribution in [#10677](https://github.com/pydantic/pydantic/pull/10677) +* [@Daraan](https://github.com/Daraan) made their first contribution in [#10713](https://github.com/pydantic/pydantic/pull/10713) +* [@k4nar](https://github.com/k4nar) made their first contribution in [#10736](https://github.com/pydantic/pydantic/pull/10736) +* [@UriyaHarpeness](https://github.com/UriyaHarpeness) made their first contribution in [#10740](https://github.com/pydantic/pydantic/pull/10740) +* [@frfahim](https://github.com/frfahim) made their first contribution in [#10727](https://github.com/pydantic/pydantic/pull/10727) + +## v2.10.0b2 (2024-11-13) + +Pre-release, see [the GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.10.0b2) for details. + +## v2.10.0b1 (2024-11-06) + +Pre-release, see [the GitHub release](https://github.com/pydantic/pydantic/releases/tag/v2.10.0b1) for details. + + +... see [here](https://docs.pydantic.dev/changelog/#v0322-2019-08-17) for earlier changes. diff --git a/venv/lib/python3.10/site-packages/pydantic-2.11.7.dist-info/RECORD b/venv/lib/python3.10/site-packages/pydantic-2.11.7.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..ca488f964bc1b15c91178eff6f86175ee5d925cb --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic-2.11.7.dist-info/RECORD @@ -0,0 +1,215 @@ +pydantic-2.11.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pydantic-2.11.7.dist-info/METADATA,sha256=95G0C96Zfbf_F1sji_-X1Qz5UFqKowfgv3kdV-0a4oI,67970 +pydantic-2.11.7.dist-info/RECORD,, +pydantic-2.11.7.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87 +pydantic-2.11.7.dist-info/licenses/LICENSE,sha256=qeGG88oWte74QxjnpwFyE1GgDLe4rjpDlLZ7SeNSnvM,1129 +pydantic/__init__.py,sha256=D3_-0aRPoAF5EH4T4JPVOYLNEc-DeaCcDt6UzIjP_D0,15395 +pydantic/__pycache__/__init__.cpython-310.pyc,, +pydantic/__pycache__/_migration.cpython-310.pyc,, +pydantic/__pycache__/alias_generators.cpython-310.pyc,, +pydantic/__pycache__/aliases.cpython-310.pyc,, +pydantic/__pycache__/annotated_handlers.cpython-310.pyc,, +pydantic/__pycache__/class_validators.cpython-310.pyc,, +pydantic/__pycache__/color.cpython-310.pyc,, +pydantic/__pycache__/config.cpython-310.pyc,, +pydantic/__pycache__/dataclasses.cpython-310.pyc,, +pydantic/__pycache__/datetime_parse.cpython-310.pyc,, +pydantic/__pycache__/decorator.cpython-310.pyc,, +pydantic/__pycache__/env_settings.cpython-310.pyc,, +pydantic/__pycache__/error_wrappers.cpython-310.pyc,, +pydantic/__pycache__/errors.cpython-310.pyc,, +pydantic/__pycache__/fields.cpython-310.pyc,, +pydantic/__pycache__/functional_serializers.cpython-310.pyc,, +pydantic/__pycache__/functional_validators.cpython-310.pyc,, +pydantic/__pycache__/generics.cpython-310.pyc,, +pydantic/__pycache__/json.cpython-310.pyc,, +pydantic/__pycache__/json_schema.cpython-310.pyc,, +pydantic/__pycache__/main.cpython-310.pyc,, +pydantic/__pycache__/mypy.cpython-310.pyc,, +pydantic/__pycache__/networks.cpython-310.pyc,, +pydantic/__pycache__/parse.cpython-310.pyc,, +pydantic/__pycache__/root_model.cpython-310.pyc,, +pydantic/__pycache__/schema.cpython-310.pyc,, +pydantic/__pycache__/tools.cpython-310.pyc,, +pydantic/__pycache__/type_adapter.cpython-310.pyc,, +pydantic/__pycache__/types.cpython-310.pyc,, +pydantic/__pycache__/typing.cpython-310.pyc,, +pydantic/__pycache__/utils.cpython-310.pyc,, +pydantic/__pycache__/validate_call_decorator.cpython-310.pyc,, +pydantic/__pycache__/validators.cpython-310.pyc,, +pydantic/__pycache__/version.cpython-310.pyc,, +pydantic/__pycache__/warnings.cpython-310.pyc,, +pydantic/_internal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pydantic/_internal/__pycache__/__init__.cpython-310.pyc,, +pydantic/_internal/__pycache__/_config.cpython-310.pyc,, +pydantic/_internal/__pycache__/_core_metadata.cpython-310.pyc,, +pydantic/_internal/__pycache__/_core_utils.cpython-310.pyc,, +pydantic/_internal/__pycache__/_dataclasses.cpython-310.pyc,, +pydantic/_internal/__pycache__/_decorators.cpython-310.pyc,, +pydantic/_internal/__pycache__/_decorators_v1.cpython-310.pyc,, +pydantic/_internal/__pycache__/_discriminated_union.cpython-310.pyc,, +pydantic/_internal/__pycache__/_docs_extraction.cpython-310.pyc,, +pydantic/_internal/__pycache__/_fields.cpython-310.pyc,, +pydantic/_internal/__pycache__/_forward_ref.cpython-310.pyc,, +pydantic/_internal/__pycache__/_generate_schema.cpython-310.pyc,, +pydantic/_internal/__pycache__/_generics.cpython-310.pyc,, +pydantic/_internal/__pycache__/_git.cpython-310.pyc,, +pydantic/_internal/__pycache__/_import_utils.cpython-310.pyc,, +pydantic/_internal/__pycache__/_internal_dataclass.cpython-310.pyc,, +pydantic/_internal/__pycache__/_known_annotated_metadata.cpython-310.pyc,, +pydantic/_internal/__pycache__/_mock_val_ser.cpython-310.pyc,, +pydantic/_internal/__pycache__/_model_construction.cpython-310.pyc,, +pydantic/_internal/__pycache__/_namespace_utils.cpython-310.pyc,, +pydantic/_internal/__pycache__/_repr.cpython-310.pyc,, +pydantic/_internal/__pycache__/_schema_gather.cpython-310.pyc,, +pydantic/_internal/__pycache__/_schema_generation_shared.cpython-310.pyc,, +pydantic/_internal/__pycache__/_serializers.cpython-310.pyc,, +pydantic/_internal/__pycache__/_signature.cpython-310.pyc,, +pydantic/_internal/__pycache__/_typing_extra.cpython-310.pyc,, +pydantic/_internal/__pycache__/_utils.cpython-310.pyc,, +pydantic/_internal/__pycache__/_validate_call.cpython-310.pyc,, +pydantic/_internal/__pycache__/_validators.cpython-310.pyc,, +pydantic/_internal/_config.py,sha256=WV07hp8xf0Q0yP9IwMvuGLQmu34AZl5sBs2JaOgCk9I,14253 +pydantic/_internal/_core_metadata.py,sha256=Y_g2t3i7uluK-wXCZvzJfRFMPUM23aBYLfae4FzBPy0,5162 +pydantic/_internal/_core_utils.py,sha256=_-ZuXhpi_0JDpZzz8jvGr82kgS3PEritWR22fjWpw48,6746 +pydantic/_internal/_dataclasses.py,sha256=GA-NO1cgYbce0UwZP-sfPe5AujHjhvgTKbPCyg9GGP8,8990 +pydantic/_internal/_decorators.py,sha256=NS7SKQvtDgnsAd37mjqtwPh19td57FJ69LsceO5SywI,32638 +pydantic/_internal/_decorators_v1.py,sha256=tfdfdpQKY4R2XCOwqHbZeoQMur6VNigRrfhudXBHx38,6185 +pydantic/_internal/_discriminated_union.py,sha256=aMl0SRSyQyHfW4-klnMTHNvwSRoqE3H3PRV_05vRsTg,25478 +pydantic/_internal/_docs_extraction.py,sha256=p-STFvLHUzxrj6bblpaAAYWmq4INxVCAdIupDgQYSIw,3831 +pydantic/_internal/_fields.py,sha256=tFmaX47Q2z8QCCPJ4K8MrPfgKDztx9clntzPxBv0OKo,23205 +pydantic/_internal/_forward_ref.py,sha256=5n3Y7-3AKLn8_FS3Yc7KutLiPUhyXmAtkEZOaFnonwM,611 +pydantic/_internal/_generate_schema.py,sha256=LWJsmvNdWDh1QxY4WelsFSw1_nScPwEfJdpwMZH5V4k,133821 +pydantic/_internal/_generics.py,sha256=D1_0xgqnL6TJQe_fFyaSk2Ug_F-kT_jRBfLjHFLCIqQ,23849 +pydantic/_internal/_git.py,sha256=IwPh3DPfa2Xq3rBuB9Nx8luR2A1i69QdeTfWWXIuCVg,809 +pydantic/_internal/_import_utils.py,sha256=TRhxD5OuY6CUosioBdBcJUs0om7IIONiZdYAV7zQ8jM,402 +pydantic/_internal/_internal_dataclass.py,sha256=_bedc1XbuuygRGiLZqkUkwwFpQaoR1hKLlR501nyySY,144 +pydantic/_internal/_known_annotated_metadata.py,sha256=lYAPiUhfSgfpY6qH9xJPJTEMoowv27QmcyOgQzys90U,16213 +pydantic/_internal/_mock_val_ser.py,sha256=wmRRFSBvqfcLbI41PsFliB4u2AZ3mJpZeiERbD3xKTo,8885 +pydantic/_internal/_model_construction.py,sha256=2Qa5Y4EgBojkhsVHu0OjpphUIlWYuVXMg1KC2opc00s,35228 +pydantic/_internal/_namespace_utils.py,sha256=CMG7nEAXVb-Idqyd3CgdulRrM-zEXOPe3kYEDBqnSKw,12878 +pydantic/_internal/_repr.py,sha256=t7GNyaUU8xvqwlDHxVE2IyDeaNZrK7p01ojQPP0UI_o,5081 +pydantic/_internal/_schema_gather.py,sha256=VLEv51TYEeeND2czsyrmJq1MVnJqTOmnLan7VG44c8A,9114 +pydantic/_internal/_schema_generation_shared.py,sha256=F_rbQbrkoomgxsskdHpP0jUJ7TCfe0BADAEkq6CJ4nM,4842 +pydantic/_internal/_serializers.py,sha256=qQ3Rak4J6bqbnjGCRjiAY4M8poLo0s5qH46sXZSQQuA,1474 +pydantic/_internal/_signature.py,sha256=8EljPJe4pSnapuirG5DkBAgD1hggHxEAyzFPH-9H0zE,6779 +pydantic/_internal/_typing_extra.py,sha256=PO3u2JmX3JKlTFy0Ew95iyjAgYHgJsqqskev4zooB2I,28216 +pydantic/_internal/_utils.py,sha256=iRmCSO0uoFhAL_ChHaYSCKrswpSrRHYoO_YQSFfCJxU,15344 +pydantic/_internal/_validate_call.py,sha256=PfdVnSzhXOrENtaDoDw3PFWPVYD5W_gNYPe8p3Ug6Lg,5321 +pydantic/_internal/_validators.py,sha256=TJcR9bxcPXjzntN6Qgib8cyPRkFZQxHW32SoKGEcp0k,20610 +pydantic/_migration.py,sha256=_6VCCVWNYB7fDpbP2MqW4bXXqo17C5_J907u9zNJQbM,11907 +pydantic/alias_generators.py,sha256=KM1n3u4JfLSBl1UuYg3hoYHzXJD-yvgrnq8u1ccwh_A,2124 +pydantic/aliases.py,sha256=vhCHyoSWnX-EJ-wWb5qj4xyRssgGWnTQfzQp4GSZ9ug,4937 +pydantic/annotated_handlers.py,sha256=WfyFSqwoEIFXBh7T73PycKloI1DiX45GWi0-JOsCR4Y,4407 +pydantic/class_validators.py,sha256=i_V3j-PYdGLSLmj_IJZekTRjunO8SIVz8LMlquPyP7E,148 +pydantic/color.py,sha256=AzqGfVQHF92_ZctDcue0DM4yTp2P6tekkwRINTWrLIo,21481 +pydantic/config.py,sha256=roz_FbfFPoVpJVpB1G7dJ8A3swghQjdN-ozrBxbLShM,42048 +pydantic/dataclasses.py,sha256=K2e76b_Cj1yvwcwfJVR7nQnLoPdetVig5yHVMGuzkpE,16644 +pydantic/datetime_parse.py,sha256=QC-WgMxMr_wQ_mNXUS7AVf-2hLEhvvsPY1PQyhSGOdk,150 +pydantic/decorator.py,sha256=YX-jUApu5AKaVWKPoaV-n-4l7UbS69GEt9Ra3hszmKI,145 +pydantic/deprecated/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pydantic/deprecated/__pycache__/__init__.cpython-310.pyc,, +pydantic/deprecated/__pycache__/class_validators.cpython-310.pyc,, +pydantic/deprecated/__pycache__/config.cpython-310.pyc,, +pydantic/deprecated/__pycache__/copy_internals.cpython-310.pyc,, +pydantic/deprecated/__pycache__/decorator.cpython-310.pyc,, +pydantic/deprecated/__pycache__/json.cpython-310.pyc,, +pydantic/deprecated/__pycache__/parse.cpython-310.pyc,, +pydantic/deprecated/__pycache__/tools.cpython-310.pyc,, +pydantic/deprecated/class_validators.py,sha256=rwfP165xity36foy1NNCg4Jf9Sul44sJLW-A5sseahI,10245 +pydantic/deprecated/config.py,sha256=k_lsVk57paxLJOcBueH07cu1OgEgWdVBxm6lfaC3CCU,2663 +pydantic/deprecated/copy_internals.py,sha256=Ku0LHLEU0WcoIInNHls7PjuBvpLFTQ4Uus77jQ3Yi08,7616 +pydantic/deprecated/decorator.py,sha256=TBm6bJ7wJsNih_8Wq5IzDcwP32m9_vfxs96desLuk00,10845 +pydantic/deprecated/json.py,sha256=HlWCG35RRrxyzuTS6LTQiZBwRhmDZWmeqQH8rLW6wA8,4657 +pydantic/deprecated/parse.py,sha256=Gzd6b_g8zJXcuE7QRq5adhx_EMJahXfcpXCF0RgrqqI,2511 +pydantic/deprecated/tools.py,sha256=Nrm9oFRZWp8-jlfvPgJILEsywp4YzZD52XIGPDLxHcI,3330 +pydantic/env_settings.py,sha256=6IHeeWEqlUPRUv3V-AXiF_W91fg2Jw_M3O0l34J_eyA,148 +pydantic/error_wrappers.py,sha256=RK6mqATc9yMD-KBD9IJS9HpKCprWHd8wo84Bnm-3fR8,150 +pydantic/errors.py,sha256=7ctBNCtt57kZFx71Ls2H86IufQARv4wPKf8DhdsVn5w,6002 +pydantic/experimental/__init__.py,sha256=j08eROfz-xW4k_X9W4m2AW26IVdyF3Eg1OzlIGA11vk,328 +pydantic/experimental/__pycache__/__init__.cpython-310.pyc,, +pydantic/experimental/__pycache__/arguments_schema.cpython-310.pyc,, +pydantic/experimental/__pycache__/pipeline.cpython-310.pyc,, +pydantic/experimental/arguments_schema.py,sha256=EFnjX_ulp-tPyUjQX5pmQtug1OFL_Acc8bcMbLd-fVY,1866 +pydantic/experimental/pipeline.py,sha256=znbMBvir3xvPA20Xj8Moco1oJMPf1VYVrIQ8KQNtDlM,23910 +pydantic/fields.py,sha256=9Ky1nTKaMhThaNkVEkJOFHQHGq2FCKSwA6-zwUB-KWo,64416 +pydantic/functional_serializers.py,sha256=3m81unH3lYovdMi00oZywlHhn1KDz9X2CO3iTtBya6A,17102 +pydantic/functional_validators.py,sha256=-yY6uj_9_GAI4aqqfZlzyGdzs06huzy6zNWD7TJp3_0,29560 +pydantic/generics.py,sha256=0ZqZ9O9annIj_3mGBRqps4htey3b5lV1-d2tUxPMMnA,144 +pydantic/json.py,sha256=ZH8RkI7h4Bz-zp8OdTAxbJUoVvcoU-jhMdRZ0B-k0xc,140 +pydantic/json_schema.py,sha256=KhsS_MWPox0PYqklnhJcb_3uiCVrEOgyhG53cUZv6QA,115430 +pydantic/main.py,sha256=v67a4-nFooC-GJ1oHgS__Vm399Ygp_NH-1WzHXwjFM0,81012 +pydantic/mypy.py,sha256=ta-lBmVd8P4S7px2qmWm-qyqSkBdqfBeOIzMilU0ifY,59265 +pydantic/networks.py,sha256=_YpSnBR2kMfoWX76sdq34cfCH-MWr5or0ve0tow7OWo,41446 +pydantic/parse.py,sha256=wkd82dgtvWtD895U_I6E1htqMlGhBSYEV39cuBSeo3A,141 +pydantic/plugin/__init__.py,sha256=5cXMmu5xL4LVZhWPE1XD8ozHZ-qEC2-s4seLe8tbN_Y,6965 +pydantic/plugin/__pycache__/__init__.cpython-310.pyc,, +pydantic/plugin/__pycache__/_loader.cpython-310.pyc,, +pydantic/plugin/__pycache__/_schema_validator.cpython-310.pyc,, +pydantic/plugin/_loader.py,sha256=nI3SEKr0mlCB556kvbyBXjYQw9b_s8UTKE9Q6iESX6s,2167 +pydantic/plugin/_schema_validator.py,sha256=QbmqsG33MBmftNQ2nNiuN22LhbrexUA7ipDVv3J02BU,5267 +pydantic/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pydantic/root_model.py,sha256=SCXhpRCgZgfqE9AGVJTC7kMAojKffL7PV4i0qcwOMm0,6279 +pydantic/schema.py,sha256=Vqqjvq_LnapVknebUd3Bp_J1p2gXZZnZRgL48bVEG7o,142 +pydantic/tools.py,sha256=iHQpd8SJ5DCTtPV5atAV06T89bjSaMFeZZ2LX9lasZY,141 +pydantic/type_adapter.py,sha256=Y3NE0YhFwxwoqrYU9caWymLWp1Avq4sRUdb5s01RoJk,31171 +pydantic/types.py,sha256=mWTvQH_Wt_CccQcEHYjcUWpyoj1U04WOnrMsMYod_64,104781 +pydantic/typing.py,sha256=P7feA35MwTcLsR1uL7db0S-oydBxobmXa55YDoBgajQ,138 +pydantic/utils.py,sha256=15nR2QpqTBFlQV4TNtTItMyTJx_fbyV-gPmIEY1Gooc,141 +pydantic/v1/__init__.py,sha256=SxQPklgBs4XHJwE6BZ9qoewYoGiNyYUnmHzEFCZbfnI,2946 +pydantic/v1/__pycache__/__init__.cpython-310.pyc,, +pydantic/v1/__pycache__/_hypothesis_plugin.cpython-310.pyc,, +pydantic/v1/__pycache__/annotated_types.cpython-310.pyc,, +pydantic/v1/__pycache__/class_validators.cpython-310.pyc,, +pydantic/v1/__pycache__/color.cpython-310.pyc,, +pydantic/v1/__pycache__/config.cpython-310.pyc,, +pydantic/v1/__pycache__/dataclasses.cpython-310.pyc,, +pydantic/v1/__pycache__/datetime_parse.cpython-310.pyc,, +pydantic/v1/__pycache__/decorator.cpython-310.pyc,, +pydantic/v1/__pycache__/env_settings.cpython-310.pyc,, +pydantic/v1/__pycache__/error_wrappers.cpython-310.pyc,, +pydantic/v1/__pycache__/errors.cpython-310.pyc,, +pydantic/v1/__pycache__/fields.cpython-310.pyc,, +pydantic/v1/__pycache__/generics.cpython-310.pyc,, +pydantic/v1/__pycache__/json.cpython-310.pyc,, +pydantic/v1/__pycache__/main.cpython-310.pyc,, +pydantic/v1/__pycache__/mypy.cpython-310.pyc,, +pydantic/v1/__pycache__/networks.cpython-310.pyc,, +pydantic/v1/__pycache__/parse.cpython-310.pyc,, +pydantic/v1/__pycache__/schema.cpython-310.pyc,, +pydantic/v1/__pycache__/tools.cpython-310.pyc,, +pydantic/v1/__pycache__/types.cpython-310.pyc,, +pydantic/v1/__pycache__/typing.cpython-310.pyc,, +pydantic/v1/__pycache__/utils.cpython-310.pyc,, +pydantic/v1/__pycache__/validators.cpython-310.pyc,, +pydantic/v1/__pycache__/version.cpython-310.pyc,, +pydantic/v1/_hypothesis_plugin.py,sha256=5ES5xWuw1FQAsymLezy8QgnVz0ZpVfU3jkmT74H27VQ,14847 +pydantic/v1/annotated_types.py,sha256=uk2NAAxqiNELKjiHhyhxKaIOh8F1lYW_LzrW3X7oZBc,3157 +pydantic/v1/class_validators.py,sha256=ULOaIUgYUDBsHL7EEVEarcM-UubKUggoN8hSbDonsFE,14672 +pydantic/v1/color.py,sha256=iZABLYp6OVoo2AFkP9Ipri_wSc6-Kklu8YuhSartd5g,16844 +pydantic/v1/config.py,sha256=a6P0Wer9x4cbwKW7Xv8poSUqM4WP-RLWwX6YMpYq9AA,6532 +pydantic/v1/dataclasses.py,sha256=784cqvInbwIPWr9usfpX3ch7z4t3J2tTK6N067_wk1o,18172 +pydantic/v1/datetime_parse.py,sha256=4Qy1kQpq3rNVZJeIHeSPDpuS2Bvhp1KPtzJG1xu-H00,7724 +pydantic/v1/decorator.py,sha256=zaaxxxoWPCm818D1bs0yhapRjXm32V8G0ZHWCdM1uXA,10339 +pydantic/v1/env_settings.py,sha256=A9VXwtRl02AY-jH0C0ouy5VNw3fi6F_pkzuHDjgAAOM,14105 +pydantic/v1/error_wrappers.py,sha256=6625Mfw9qkC2NwitB_JFAWe8B-Xv6zBU7rL9k28tfyo,5196 +pydantic/v1/errors.py,sha256=mIwPED5vGM5Q5v4C4Z1JPldTRH-omvEylH6ksMhOmPw,17726 +pydantic/v1/fields.py,sha256=VqWJCriUNiEyptXroDVJ501JpVA0en2VANcksqXL2b8,50649 +pydantic/v1/generics.py,sha256=VzC9YUV-EbPpQ3aAfk1cNFej79_IzznkQ7WrmTTZS9E,17871 +pydantic/v1/json.py,sha256=WQ5Hy_hIpfdR3YS8k6N2E6KMJzsdbBi_ldWOPJaV81M,3390 +pydantic/v1/main.py,sha256=zuNpdN5Q0V0wG2UUTKt0HUy3XJ4OAvPSZDdiXY-FIzs,44824 +pydantic/v1/mypy.py,sha256=AiZYkv127-WsgL9vwvLqj0dS8dz-HUMbH9Yvvlq4bfE,38949 +pydantic/v1/networks.py,sha256=HYNtKAfOmOnKJpsDg1g6SIkj9WPhU_-i8l5e2JKBpG4,22124 +pydantic/v1/parse.py,sha256=BJtdqiZRtav9VRFCmOxoY-KImQmjPy-A_NoojiFUZxY,1821 +pydantic/v1/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pydantic/v1/schema.py,sha256=aqBuA--cq8gAVkim5BJPFASHzOZ8dFtmFX_fNGr6ip4,47801 +pydantic/v1/tools.py,sha256=1lDdXHk0jL5uP3u5RCYAvUAlGClgAO-45lkq9j7fyBA,2881 +pydantic/v1/types.py,sha256=Fltx5GoP_qaUmAktlGz7nFeJa13yNy3FY1-RcMzEVt8,35455 +pydantic/v1/typing.py,sha256=HNtuKvgH4EHIeb2ytkd7VSyG6mxP9RKqEqEql-1ab14,19720 +pydantic/v1/utils.py,sha256=M5FRyfNUb1A2mk9laGgCVdfHHb3AtQgrjO5qfyBf4xA,25989 +pydantic/v1/validators.py,sha256=lyUkn1MWhHxlCX5ZfEgFj_CAHojoiPcaQeMdEM9XviU,22187 +pydantic/v1/version.py,sha256=HXnXW-1bMW5qKhlr5RgOEPohrZDCDSuyy8-gi8GCgZo,1039 +pydantic/validate_call_decorator.py,sha256=8jqLlgXTjWEj4dXDg0wI3EGQKkb0JnCsL_JSUjbU5Sg,4389 +pydantic/validators.py,sha256=pwbIJXVb1CV2mAE4w_EGfNj7DwzsKaWw_tTL6cviTus,146 +pydantic/version.py,sha256=JDhisYPKOiY2NwByzNV_hrl-cVn9ITA_ghLwiSB-2f8,2710 +pydantic/warnings.py,sha256=gqDTQ2FX7wGLZJV3XboQSiRXKHknss3pfIOXL0BDXTk,3772 diff --git a/venv/lib/python3.10/site-packages/pydantic-2.11.7.dist-info/WHEEL b/venv/lib/python3.10/site-packages/pydantic-2.11.7.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..12228d414b6cfed7c39d3781c85c63256a1d7fb5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic-2.11.7.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.27.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.10/site-packages/pydantic-2.11.7.dist-info/licenses/LICENSE b/venv/lib/python3.10/site-packages/pydantic-2.11.7.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..488c6260c10f2e88fa1fae58a63fccec8d600cd1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic-2.11.7.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2017 to present Pydantic Services Inc. and individual contributors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/METADATA b/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..8d14294c79f40770d26d70373245dcd4fdad961c --- /dev/null +++ b/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/METADATA @@ -0,0 +1,215 @@ +Metadata-Version: 2.4 +Name: pytest +Version: 8.4.2 +Summary: pytest: simple powerful testing with Python +Author: Holger Krekel, Bruno Oliveira, Ronny Pfannschmidt, Floris Bruynooghe, Brianna Laugher, Florian Bruhin, Others (See AUTHORS) +License: MIT +Project-URL: Changelog, https://docs.pytest.org/en/stable/changelog.html +Project-URL: Contact, https://docs.pytest.org/en/stable/contact.html +Project-URL: Funding, https://docs.pytest.org/en/stable/sponsor.html +Project-URL: Homepage, https://docs.pytest.org/en/latest/ +Project-URL: Source, https://github.com/pytest-dev/pytest +Project-URL: Tracker, https://github.com/pytest-dev/pytest/issues +Keywords: test,unittest +Classifier: Development Status :: 6 - Mature +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: MacOS +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX +Classifier: Operating System :: Unix +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Software Development :: Testing +Classifier: Topic :: Utilities +Requires-Python: >=3.9 +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: AUTHORS +Requires-Dist: colorama>=0.4; sys_platform == "win32" +Requires-Dist: exceptiongroup>=1; python_version < "3.11" +Requires-Dist: iniconfig>=1 +Requires-Dist: packaging>=20 +Requires-Dist: pluggy<2,>=1.5 +Requires-Dist: pygments>=2.7.2 +Requires-Dist: tomli>=1; python_version < "3.11" +Provides-Extra: dev +Requires-Dist: argcomplete; extra == "dev" +Requires-Dist: attrs>=19.2; extra == "dev" +Requires-Dist: hypothesis>=3.56; extra == "dev" +Requires-Dist: mock; extra == "dev" +Requires-Dist: requests; extra == "dev" +Requires-Dist: setuptools; extra == "dev" +Requires-Dist: xmlschema; extra == "dev" +Dynamic: license-file + +.. image:: https://github.com/pytest-dev/pytest/raw/main/doc/en/img/pytest_logo_curves.svg + :target: https://docs.pytest.org/en/stable/ + :align: center + :height: 200 + :alt: pytest + + +------ + +.. image:: https://img.shields.io/pypi/v/pytest.svg + :target: https://pypi.org/project/pytest/ + +.. image:: https://img.shields.io/conda/vn/conda-forge/pytest.svg + :target: https://anaconda.org/conda-forge/pytest + +.. image:: https://img.shields.io/pypi/pyversions/pytest.svg + :target: https://pypi.org/project/pytest/ + +.. image:: https://codecov.io/gh/pytest-dev/pytest/branch/main/graph/badge.svg + :target: https://codecov.io/gh/pytest-dev/pytest + :alt: Code coverage Status + +.. image:: https://github.com/pytest-dev/pytest/actions/workflows/test.yml/badge.svg + :target: https://github.com/pytest-dev/pytest/actions?query=workflow%3Atest + +.. image:: https://results.pre-commit.ci/badge/github/pytest-dev/pytest/main.svg + :target: https://results.pre-commit.ci/latest/github/pytest-dev/pytest/main + :alt: pre-commit.ci status + +.. image:: https://www.codetriage.com/pytest-dev/pytest/badges/users.svg + :target: https://www.codetriage.com/pytest-dev/pytest + +.. image:: https://readthedocs.org/projects/pytest/badge/?version=latest + :target: https://pytest.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +.. image:: https://img.shields.io/badge/Discord-pytest--dev-blue + :target: https://discord.com/invite/pytest-dev + :alt: Discord + +.. image:: https://img.shields.io/badge/Libera%20chat-%23pytest-orange + :target: https://web.libera.chat/#pytest + :alt: Libera chat + + +The ``pytest`` framework makes it easy to write small tests, yet +scales to support complex functional testing for applications and libraries. + +An example of a simple test: + +.. code-block:: python + + # content of test_sample.py + def inc(x): + return x + 1 + + + def test_answer(): + assert inc(3) == 5 + + +To execute it:: + + $ pytest + ============================= test session starts ============================= + collected 1 items + + test_sample.py F + + ================================== FAILURES =================================== + _________________________________ test_answer _________________________________ + + def test_answer(): + > assert inc(3) == 5 + E assert 4 == 5 + E + where 4 = inc(3) + + test_sample.py:5: AssertionError + ========================== 1 failed in 0.04 seconds =========================== + + +Due to ``pytest``'s detailed assertion introspection, only plain ``assert`` statements are used. See `getting-started `_ for more examples. + + +Features +-------- + +- Detailed info on failing `assert statements `_ (no need to remember ``self.assert*`` names) + +- `Auto-discovery + `_ + of test modules and functions + +- `Modular fixtures `_ for + managing small or parametrized long-lived test resources + +- Can run `unittest `_ (or trial) + test suites out of the box + +- Python 3.9+ or PyPy3 + +- Rich plugin architecture, with over 1300+ `external plugins `_ and thriving community + + +Documentation +------------- + +For full documentation, including installation, tutorials and PDF documents, please see https://docs.pytest.org/en/stable/. + + +Bugs/Requests +------------- + +Please use the `GitHub issue tracker `_ to submit bugs or request features. + + +Changelog +--------- + +Consult the `Changelog `__ page for fixes and enhancements of each version. + + +Support pytest +-------------- + +`Open Collective`_ is an online funding platform for open and transparent communities. +It provides tools to raise money and share your finances in full transparency. + +It is the platform of choice for individuals and companies that want to make one-time or +monthly donations directly to the project. + +See more details in the `pytest collective`_. + +.. _Open Collective: https://opencollective.com +.. _pytest collective: https://opencollective.com/pytest + + +pytest for enterprise +--------------------- + +Available as part of the Tidelift Subscription. + +The maintainers of pytest and thousands of other packages are working with Tidelift to deliver commercial support and +maintenance for the open source dependencies you use to build your applications. +Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. + +`Learn more. `_ + +Security +^^^^^^^^ + +pytest has never been associated with a security vulnerability, but in any case, to report a +security vulnerability please use the `Tidelift security contact `_. +Tidelift will coordinate the fix and disclosure. + + +License +------- + +Copyright Holger Krekel and others, 2004. + +Distributed under the terms of the `MIT`_ license, pytest is free and open source software. + +.. _`MIT`: https://github.com/pytest-dev/pytest/blob/main/LICENSE diff --git a/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/RECORD b/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..2364666158d0ffc0c67f6ea36efcbed13df68bc2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/RECORD @@ -0,0 +1,156 @@ +../../../bin/py.test,sha256=MU-anqUwBvlScDEA5gF4dwRmtVkQx1ZyICQszZG3898,291 +../../../bin/pytest,sha256=MU-anqUwBvlScDEA5gF4dwRmtVkQx1ZyICQszZG3898,291 +__pycache__/py.cpython-310.pyc,, +_pytest/__init__.py,sha256=4IdRJhnW5XG2KlaJkOxn5_TC9WeQ5tXDSF7tbb4vEso,391 +_pytest/__pycache__/__init__.cpython-310.pyc,, +_pytest/__pycache__/_argcomplete.cpython-310.pyc,, +_pytest/__pycache__/_version.cpython-310.pyc,, +_pytest/__pycache__/cacheprovider.cpython-310.pyc,, +_pytest/__pycache__/capture.cpython-310.pyc,, +_pytest/__pycache__/compat.cpython-310.pyc,, +_pytest/__pycache__/debugging.cpython-310.pyc,, +_pytest/__pycache__/deprecated.cpython-310.pyc,, +_pytest/__pycache__/doctest.cpython-310.pyc,, +_pytest/__pycache__/faulthandler.cpython-310.pyc,, +_pytest/__pycache__/fixtures.cpython-310.pyc,, +_pytest/__pycache__/freeze_support.cpython-310.pyc,, +_pytest/__pycache__/helpconfig.cpython-310.pyc,, +_pytest/__pycache__/hookspec.cpython-310.pyc,, +_pytest/__pycache__/junitxml.cpython-310.pyc,, +_pytest/__pycache__/legacypath.cpython-310.pyc,, +_pytest/__pycache__/logging.cpython-310.pyc,, +_pytest/__pycache__/main.cpython-310.pyc,, +_pytest/__pycache__/monkeypatch.cpython-310.pyc,, +_pytest/__pycache__/nodes.cpython-310.pyc,, +_pytest/__pycache__/outcomes.cpython-310.pyc,, +_pytest/__pycache__/pastebin.cpython-310.pyc,, +_pytest/__pycache__/pathlib.cpython-310.pyc,, +_pytest/__pycache__/pytester.cpython-310.pyc,, +_pytest/__pycache__/pytester_assertions.cpython-310.pyc,, +_pytest/__pycache__/python.cpython-310.pyc,, +_pytest/__pycache__/python_api.cpython-310.pyc,, +_pytest/__pycache__/raises.cpython-310.pyc,, +_pytest/__pycache__/recwarn.cpython-310.pyc,, +_pytest/__pycache__/reports.cpython-310.pyc,, +_pytest/__pycache__/runner.cpython-310.pyc,, +_pytest/__pycache__/scope.cpython-310.pyc,, +_pytest/__pycache__/setuponly.cpython-310.pyc,, +_pytest/__pycache__/setupplan.cpython-310.pyc,, +_pytest/__pycache__/skipping.cpython-310.pyc,, +_pytest/__pycache__/stash.cpython-310.pyc,, +_pytest/__pycache__/stepwise.cpython-310.pyc,, +_pytest/__pycache__/terminal.cpython-310.pyc,, +_pytest/__pycache__/threadexception.cpython-310.pyc,, +_pytest/__pycache__/timing.cpython-310.pyc,, +_pytest/__pycache__/tmpdir.cpython-310.pyc,, +_pytest/__pycache__/tracemalloc.cpython-310.pyc,, +_pytest/__pycache__/unittest.cpython-310.pyc,, +_pytest/__pycache__/unraisableexception.cpython-310.pyc,, +_pytest/__pycache__/warning_types.cpython-310.pyc,, +_pytest/__pycache__/warnings.cpython-310.pyc,, +_pytest/_argcomplete.py,sha256=gh0pna66p4LVb2D8ST4568WGxvdInGT43m6slYhqNqU,3776 +_pytest/_code/__init__.py,sha256=BKbowoYQADKjAJmTWdQ8SSQLbBBsh0-dZj3TGjtn6yM,521 +_pytest/_code/__pycache__/__init__.cpython-310.pyc,, +_pytest/_code/__pycache__/code.cpython-310.pyc,, +_pytest/_code/__pycache__/source.cpython-310.pyc,, +_pytest/_code/code.py,sha256=3WXnSecVdF1TgU7oRQV6b3Rfe6XuXPNWxsKdbBDep40,55913 +_pytest/_code/source.py,sha256=tsswD_1rYd8F7P9yloO1OqWWEYMw3_m5Z8Hr3SnA7pE,7773 +_pytest/_io/__init__.py,sha256=pkLF29VEFr6Dlr3eOtJL8sf47RLFt1Jf4X1DZBPlYmc,190 +_pytest/_io/__pycache__/__init__.cpython-310.pyc,, +_pytest/_io/__pycache__/pprint.cpython-310.pyc,, +_pytest/_io/__pycache__/saferepr.cpython-310.pyc,, +_pytest/_io/__pycache__/terminalwriter.cpython-310.pyc,, +_pytest/_io/__pycache__/wcwidth.cpython-310.pyc,, +_pytest/_io/pprint.py,sha256=GLBKL6dmnRr92GnVMkNzMkKqx08Op7tdJSeh3AewonY,19622 +_pytest/_io/saferepr.py,sha256=Hhx5F-75iz03hdk-WO86Bmy9RBuRHsuJj-YUzozfrgo,4082 +_pytest/_io/terminalwriter.py,sha256=T67ZhHYSIaOP3RtQcxELknyMbVl1DOZ_buDPGGiAJEY,8849 +_pytest/_io/wcwidth.py,sha256=cUEJ74UhweICwbKvU2q6noZcNgD0QlBEB9CfakGYaqA,1289 +_pytest/_py/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +_pytest/_py/__pycache__/__init__.cpython-310.pyc,, +_pytest/_py/__pycache__/error.cpython-310.pyc,, +_pytest/_py/__pycache__/path.cpython-310.pyc,, +_pytest/_py/error.py,sha256=kGQ7F8_fZ6YVBhAx-u9mkTQBTx0qIxxnVMC0CgiOd70,3475 +_pytest/_py/path.py,sha256=OnxtzhK8fTiuDdO1SEFgePeKNtcVx7R2E6CU0k08QAo,49220 +_pytest/_version.py,sha256=1PTJe17TiDwm1rOzsZVTOjR0QbsAJhs03xr5VUWIkgM,704 +_pytest/assertion/__init__.py,sha256=OjnJm4j6VHgwYjKvW8d-KFefjEdOSONFF4z10o9r7eg,7120 +_pytest/assertion/__pycache__/__init__.cpython-310.pyc,, +_pytest/assertion/__pycache__/rewrite.cpython-310.pyc,, +_pytest/assertion/__pycache__/truncate.cpython-310.pyc,, +_pytest/assertion/__pycache__/util.cpython-310.pyc,, +_pytest/assertion/rewrite.py,sha256=8jEEirkl74WF8wmhAiRwQ4rix3_6sd4OmGk-ZVR8MWw,48636 +_pytest/assertion/truncate.py,sha256=W4IyhGT0fqdUwgZTLWnw34_r4aFrtI4Bdadcgbs-Vrg,5437 +_pytest/assertion/util.py,sha256=3fgPprVDV7uCaC5-yJ6jvxzp2QqXxe7TxekldwuJl-0,20713 +_pytest/cacheprovider.py,sha256=rgBJnzmvsfJmQj-KtDG1gmmzCuPzU9qZbf-cYvurYDA,22375 +_pytest/capture.py,sha256=kulumJdRdHu7zoosOr4lfHR0ce6LsOthau9Byrw8xV4,36829 +_pytest/compat.py,sha256=L5QaTlFoFrztKCp7nkXAdnKdVVKmRDNpGE1SGa6NzIs,10662 +_pytest/config/__init__.py,sha256=mghX197CfFOJmGqYrs9h9auGnkbnLau45UaVpLlkHto,72712 +_pytest/config/__pycache__/__init__.cpython-310.pyc,, +_pytest/config/__pycache__/argparsing.cpython-310.pyc,, +_pytest/config/__pycache__/compat.cpython-310.pyc,, +_pytest/config/__pycache__/exceptions.cpython-310.pyc,, +_pytest/config/__pycache__/findpaths.cpython-310.pyc,, +_pytest/config/argparsing.py,sha256=k5hPuI0ZaeMqItS0xRO6deuJyV7mAKH10coNZLr48Vs,19102 +_pytest/config/compat.py,sha256=djDt_XTPwXDIgnnopti2ZVrqtwzO5hFWiMhgU5dgIM4,2947 +_pytest/config/exceptions.py,sha256=lUKnOtpRqK-qNL6JfOP-8tRqpmHU34CVxguR5y0Qfbw,288 +_pytest/config/findpaths.py,sha256=47u1MMxdFg1g-IsXfi2Pa67W21B8Y5rw2LoMQmUKYb4,8404 +_pytest/debugging.py,sha256=JkV7Ob7wQ53TFGkQ0Ta96jAMYGubgdXiEs39T7FPzHQ,13947 +_pytest/deprecated.py,sha256=sO9UiqEdy9Z-NCvDoYYA0QtafYogAb7lP5M9N_Hpnak,3147 +_pytest/doctest.py,sha256=TLSgJwd2PP59vS4Wuu1hU1caX-ozsXD9Rmqj-sb1Xfk,26259 +_pytest/faulthandler.py,sha256=bkhURB2--RMSIcWhm2ifza4-GlzIUP_5Elu7T7e-LDs,3683 +_pytest/fixtures.py,sha256=Qd_XLB5tphmQa_hJRWlMHLm5GbrRgwMnj6w1YwyMd0g,77746 +_pytest/freeze_support.py,sha256=X94IxipqebeA_HgzJh8dbjqGnrtEQFuMIC5hK7SGWXw,1300 +_pytest/helpconfig.py,sha256=LlPCtN_YyMVcfhn2DKstBA-N2IEMfMyPzWB-3RVu2cE,9386 +_pytest/hookspec.py,sha256=ylzm14WXDtMaIL1RNLrEcViS_MhSjqshWCdt-T7xHnI,42849 +_pytest/junitxml.py,sha256=UeqT-yASK4ql8sQSuc-Ua22vcZzeRw9sosUEML7UE10,25441 +_pytest/legacypath.py,sha256=_l6v8akNMfTc5TAjvbc6M-_t157p9QE6-118WM0DRt8,16588 +_pytest/logging.py,sha256=TZ67JQP_3Ylt0p11D2J68L_os9glsuggMvec0Hljtb8,35234 +_pytest/main.py,sha256=HPyHQ_0ZKEnSMJNT3j64tC3Ng4AeHRGxFp28dRmDM9c,37689 +_pytest/mark/__init__.py,sha256=nBC3MU-fKXOJ8_QELTl5YyOtFc36ef_59lbKXDKY6is,9885 +_pytest/mark/__pycache__/__init__.cpython-310.pyc,, +_pytest/mark/__pycache__/expression.cpython-310.pyc,, +_pytest/mark/__pycache__/structures.cpython-310.pyc,, +_pytest/mark/expression.py,sha256=R5KUyktUiRQGJngXosvksgbkMLWBmYqELhSRV_6eXx0,10154 +_pytest/mark/structures.py,sha256=49SHF81RJQF_SIM_M9J37tDTqNBAQvf7ps19RfVURjI,22972 +_pytest/monkeypatch.py,sha256=nfA7kmITAJ1wbjy-RR0iB52XxiPaQpgsqnIEGaut1cU,14625 +_pytest/nodes.py,sha256=aTsDhbLEVkZ2cgC8UXQW53bBDD5Y7l7ZNwB0kb5Nho4,26540 +_pytest/outcomes.py,sha256=DPRyqSzsRn-0ycMvb1LL7kEoL1bxNPc5Rk4hC9xomrw,10502 +_pytest/pastebin.py,sha256=p92zJtSNz9-xDEFzqQ3zemYggXRaDnxD6X4IyitevbA,4155 +_pytest/pathlib.py,sha256=gSeAg1m6qnEXdYYrMr--Cn5cFqLoyZI9YN3UXwMbZvo,37622 +_pytest/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +_pytest/pytester.py,sha256=zWYjgf-56aPmradO9Ug4wnhLa6SRL5aB3K_0O_uyohc,61960 +_pytest/pytester_assertions.py,sha256=xX_HbFPB-Rz_NNDttTY39ft7_wZLvPgQQBVevSCeVmA,2253 +_pytest/python.py,sha256=6_MahzgGWtQYw1TO7tmVYpJgVVh8ZkUB6fjRlOQHggI,66627 +_pytest/python_api.py,sha256=qBzXX85i_uXlDquZVnvcGRGMP1sppFLq7_FBG6BRu_0,31323 +_pytest/raises.py,sha256=IuHTdArTq6pO34hjNFEHaKZ95ZUJEk9gWS0UxUJI3Fw,60196 +_pytest/recwarn.py,sha256=lNRs-KreTNBr5HoZIqWj4m6VRO7_1Ff-gcBhmYhg_lI,13245 +_pytest/reports.py,sha256=yiIT-XerbgHou8D7dScoL9YvpBryBldbJitXSXfWORA,21406 +_pytest/runner.py,sha256=EPJDPMpz76D5dyxswZARmm6F1n9axh8YFUnBTk5kOM8,19543 +_pytest/scope.py,sha256=pB7jsiisth16PBFacV1Yxd3Pj3YAx2dmlSmGbG4mw6A,2738 +_pytest/setuponly.py,sha256=BsRrC4ERDVr42-2G_L0AxhNU4XVwbMsy5S0lOvKr8wA,3167 +_pytest/setupplan.py,sha256=l-ycFNxDZPyY52wh4f7yaqhzZ7SW1ijSKnQLmqzDZWA,1184 +_pytest/skipping.py,sha256=k8zuhWw8WlolGpBe_av51QfaPpnmOYYUPd-Z6huoAWA,10623 +_pytest/stash.py,sha256=5pE3kDx4q855TW9aVvYTdrkkKlMDU6-xiX4luKpJEgI,3090 +_pytest/stepwise.py,sha256=kD81DrnhnclKBmMfauwQmbeMbYUvuw07w5WnNkmIdEQ,7689 +_pytest/terminal.py,sha256=q99zvW3AFxD1OFfk0QvL7AehHWVbPMWf7k5EE73Wmxk,60419 +_pytest/threadexception.py,sha256=hTccpzZUrrQkDROVFAqHgXwAU481ca4Mq4CA4YB7my4,4953 +_pytest/timing.py,sha256=08clP5PJAL4VzzTqlw8_f4R9mL_MnzNqz7Ji56IIPvA,3065 +_pytest/tmpdir.py,sha256=I2kYwJAWDB9rk14WL_RKsnOnACIdX0CsFYkr515FA-4,11263 +_pytest/tracemalloc.py,sha256=lCUB_YUAb6R1vqq_b-LSYSXy-Tidbn2m7tfzmWAUrjk,778 +_pytest/unittest.py,sha256=-ifovmTfh-RnLGB1c9UCBPpg0rHQMXaadz08fUfqHkc,19249 +_pytest/unraisableexception.py,sha256=dNaBpBHkOB4pOISoaMdau2ojrGoc_i4ux76DVXLLT-w,5179 +_pytest/warning_types.py,sha256=4bNTmyyVvq1npipU4Z_irSgmPQumKOiMylvAn7g8MX8,4239 +_pytest/warnings.py,sha256=YTT4OJZKTgM7xqk348-NHZMHWCmMknxww6bDwibRBQs,5237 +py.py,sha256=txZ1tdmEW6CBTp6Idn-I2sOzzA0xKNoCi9Re27Uj6HE,329 +pytest-8.4.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pytest-8.4.2.dist-info/METADATA,sha256=k9bF7wqXFNU3FiQwNQN7d_p9X5cFlsSEM4h89X9_Z1o,7656 +pytest-8.4.2.dist-info/RECORD,, +pytest-8.4.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91 +pytest-8.4.2.dist-info/entry_points.txt,sha256=8IPrHPH3LNZQ7v5tNEOcNTZYk_SheNg64jsTM9erqL4,77 +pytest-8.4.2.dist-info/licenses/AUTHORS,sha256=wgF9YogmLPnDmuzI5R3zQiqfK7yh5LVdKEkQIgcMdXc,7369 +pytest-8.4.2.dist-info/licenses/LICENSE,sha256=yoNqX57Mo7LzUCMPqiCkj7ixRWU7VWjXhIYt-GRwa5s,1091 +pytest-8.4.2.dist-info/top_level.txt,sha256=yyhjvmXH7-JOaoQIdmNQHPuoBCxOyXS3jIths_6C8A4,18 +pytest/__init__.py,sha256=Zpk6XjkFAF4JgRWbR5TRCxrazzQaWKRNaWrSxEQtzcY,5373 +pytest/__main__.py,sha256=oVDrGGo7N0TNyzXntUblcgTKbhHGWtivcX5TC7tEcKo,154 +pytest/__pycache__/__init__.cpython-310.pyc,, +pytest/__pycache__/__main__.cpython-310.pyc,, +pytest/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/WHEEL b/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..e7fa31b6f3f78deb1022c1f7927f07d4d16da822 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/entry_points.txt b/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..192205dfa5fda066e479ba073379747ae0abbba5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +py.test = pytest:console_main +pytest = pytest:console_main diff --git a/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/licenses/AUTHORS b/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/licenses/AUTHORS new file mode 100644 index 0000000000000000000000000000000000000000..df338aa8c6e0b6645de42001b5eedd64c8dc59a1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/licenses/AUTHORS @@ -0,0 +1,497 @@ +Holger Krekel, holger at merlinux eu +merlinux GmbH, Germany, office at merlinux eu + +Contributors include:: + +Aaron Coleman +Abdeali JK +Abdelrahman Elbehery +Abhijeet Kasurde +Adam Johnson +Adam Stewart +Adam Uhlir +Ahn Ki-Wook +Akhilesh Ramakrishnan +Akiomi Kamakura +Alan Velasco +Alessio Izzo +Alex Jones +Alex Lambson +Alexander Johnson +Alexander King +Alexei Kozlenok +Alice Purcell +Allan Feldman +Aly Sivji +Amir Elkess +Ammar Askar +Anatoly Bubenkoff +Anders Hovmöller +Andras Mitzki +Andras Tim +Andrea Cimatoribus +Andreas Motl +Andreas Zeidler +Andrew Pikul +Andrew Shapton +Andrey Paramonov +Andrzej Klajnert +Andrzej Ostrowski +Andy Freeland +Anita Hammer +Anthon van der Neut +Anthony Shaw +Anthony Sottile +Anton Grinevich +Anton Lodder +Anton Zhilin +Antony Lee +Arel Cordero +Arias Emmanuel +Ariel Pillemer +Armin Rigo +Aron Coyle +Aron Curzon +Arthur Richard +Ashish Kurmi +Ashley Whetter +Aviral Verma +Aviv Palivoda +Babak Keyvani +Bahram Farahmand +Barney Gale +Ben Brown +Ben Gartner +Ben Leith +Ben Webb +Benjamin Peterson +Benjamin Schubert +Bernard Pratz +Bo Wu +Bob Ippolito +Brian Dorsey +Brian Larsen +Brian Maissy +Brian Okken +Brianna Laugher +Bruno Oliveira +Cal Jacobson +Cal Leeming +Carl Friedrich Bolz +Carlos Jenkins +Ceridwen +Charles Cloud +Charles Machalow +Charnjit SiNGH (CCSJ) +Cheuk Ting Ho +Chris Mahoney +Chris Lamb +Chris NeJame +Chris Rose +Chris Wheeler +Christian Boelsen +Christian Clauss +Christian Fetzer +Christian Neumüller +Christian Theunert +Christian Tismer +Christine Mecklenborg +Christoph Buelter +Christopher Dignam +Christopher Gilling +Christopher Head +Claire Cecil +Claudio Madotto +Clément M.T. Robert +Cornelius Riemenschneider +CrazyMerlyn +Cristian Vera +Cyrus Maden +Daara Shaw +Damian Skrzypczak +Daniel Grana +Daniel Hahler +Daniel Miller +Daniel Nuri +Daniel Sánchez Castelló +Daniel Valenzuela Zenteno +Daniel Wandschneider +Daniele Procida +Danielle Jenkins +Daniil Galiev +Dave Hunt +David Díaz-Barquero +David Mohr +David Paul Röthlisberger +David Peled +David Szotten +David Vierra +Daw-Ran Liou +Debi Mishra +Denis Kirisov +Denivy Braiam Rück +Deysha Rivera +Dheeraj C K +Dhiren Serai +Diego Russo +Dmitry Dygalo +Dmitry Pribysh +Dominic Mortlock +Duncan Betts +Edison Gustavo Muenz +Edoardo Batini +Edson Tadeu M. Manoel +Eduardo Schettino +Edward Haigh +Eero Vaher +Eli Boyarski +Elizaveta Shashkova +Éloi Rivard +Emil Hjelm +Endre Galaczi +Eric Hunsberger +Eric Liu +Eric Siegerman +Eric Yuan +Erik Aronesty +Erik Hasse +Erik M. Bray +Ethan Wass +Evan Kepner +Evgeny Seliverstov +Fabian Sturm +Fabien Zarifian +Fabio Zadrozny +Farbod Ahmadian +faph +Felix Hofstätter +Felix Nieuwenhuizen +Feng Ma +Florian Bruhin +Florian Dahlitz +Floris Bruynooghe +Frank Hoffmann +Fraser Stark +Gabriel Landau +Gabriel Reis +Garvit Shubham +Gene Wood +George Kussumoto +Georgy Dyuldin +Gergely Kalmár +Gleb Nikonorov +Graeme Smecher +Graham Horler +Greg Price +Gregory Lee +Grig Gheorghiu +Grigorii Eremeev (budulianin) +Guido Wesdorp +Guoqiang Zhang +Harald Armin Massa +Harshna +Henk-Jaap Wagenaar +Holger Kohr +Hugo van Kemenade +Hui Wang (coldnight) +Ian Bicking +Ian Lesperance +Ilya Konstantinov +Ionuț Turturică +Isaac Virshup +Israel Fruchter +Itxaso Aizpurua +Iwan Briquemont +Jaap Broekhuizen +Jake VanderPlas +Jakob van Santen +Jakub Mitoraj +James Bourbeau +James Frost +Jan Balster +Janne Vanhala +Jason R. Coombs +Javier Domingo Cansino +Javier Romero +Jeff Rackauckas +Jeff Widman +Jenni Rinker +Jens Tröger +Jiajun Xu +John Eddie Ayson +John Litborn +John Towler +Jon Parise +Jon Sonesen +Jonas Obrist +Jordan Guymon +Jordan Moldow +Jordan Speicher +Joseph Hunkeler +Joseph Sawaya +Josh Karpel +Joshua Bronson +Julian Valentin +Jurko Gospodnetić +Justice Ndou +Justyna Janczyszyn +Kale Kundert +Kamran Ahmad +Kenny Y +Karl O. Pinc +Karthikeyan Singaravelan +Katarzyna Jachim +Katarzyna Król +Katerina Koukiou +Keri Volans +Kevin C +Kevin Cox +Kevin Hierro Carrasco +Kevin J. Foley +Kian Eliasi +Kian-Meng Ang +Kodi B. Arfer +Kojo Idrissa +Kostis Anagnostopoulos +Kristoffer Nordström +Kyle Altendorf +Lawrence Mitchell +Lee Kamentsky +Leonardus Chen +Lev Maximov +Levon Saldamli +Lewis Cowles +Liam DeVoe +Llandy Riveron Del Risco +Loic Esteve +lovetheguitar +Lukas Bednar +Luke Murphy +Maciek Fijalkowski +Maggie Chung +Maho +Maik Figura +Mandeep Bhutani +Manuel Krebber +Marc Mueller +Marc Schlaich +Marcelo Duarte Trevisani +Marcin Augustynów +Marcin Bachry +Marc Bresson +Marco Gorelli +Mark Abramowitz +Mark Dickinson +Mark Vong +Marko Pacak +Markus Unterwaditzer +Martijn Faassen +Martin Altmayer +Martin K. Scherer +Martin Prusse +Mathieu Clabaut +Matt Bachmann +Matt Duck +Matt Williams +Matthias Hafner +Maxim Filipenko +Maximilian Cosmo Sitter +mbyt +Michael Aquilina +Michael Birtwell +Michael Droettboom +Michael Goerz +Michael Krebs +Michael Seifert +Michael Vogt +Michal Wajszczuk +Michał Górny +Michał Zięba +Mickey Pashov +Mihai Capotă +Mihail Milushev +Mike Hoyle (hoylemd) +Mike Lundy +Milan Lesnek +Miro Hrončok +mrbean-bremen +Nathan Goldbaum +Nathan Rousseau +Nathaniel Compton +Nathaniel Waisbrot +Nauman Ahmed +Ned Batchelder +Neil Martin +Neven Mundar +Nicholas Devenish +Nicholas Murphy +Niclas Olofsson +Nicolas Delaby +Nicolas Simonds +Nico Vidal +Nikolay Kondratyev +Nipunn Koorapati +Oleg Pidsadnyi +Oleg Sushchenko +Oleksandr Zavertniev +Olga Matoula +Oliver Bestwalter +Omar Kohl +Omer Hadari +Ondřej Súkup +Oscar Benjamin +Parth Patel +Patrick Hayes +Patrick Lannigan +Paul Müller +Paul Reece +Pauli Virtanen +Pavel Karateev +Pavel Zhukov +Paweł Adamczak +Pedro Algarvio +Peter Gessler +Petter Strandmark +Philipp Loose +Pierre Sassoulas +Pieter Mulder +Piotr Banaszkiewicz +Piotr Helm +Poulami Sau +Prakhar Gurunani +Prashant Anand +Prashant Sharma +Pulkit Goyal +Punyashloka Biswal +Quentin Pradet +q0w +Ralf Schmitt +Ralph Giles +Ram Rachum +Ran Benita +Raphael Castaneda +Raphael Pierzina +Rafal Semik +Reza Mousavi +Raquel Alegre +Ravi Chandra +Reagan Lee +Rob Arrow +Robert Holt +Roberto Aldera +Roberto Polli +Roland Puntaier +Romain Dorgueil +Roman Bolshakov +Ronny Pfannschmidt +Ross Lawley +Ruaridh Williamson +Russel Winder +Russell Martin +Ryan Puddephatt +Ryan Wooden +Sadra Barikbin +Saiprasad Kale +Samuel Colvin +Samuel Dion-Girardeau +Samuel Jirovec +Samuel Searles-Bryant +Samuel Therrien (Avasam) +Samuele Pedroni +Sanket Duthade +Sankt Petersbug +Saravanan Padmanaban +Sean Malloy +Segev Finer +Serhii Mozghovyi +Seth Junot +Shantanu Jain +Sharad Nair +Shaygan Hooshyari +Shubham Adep +Simon Blanchard +Simon Gomizelj +Simon Holesch +Simon Kerr +Skylar Downes +Srinivas Reddy Thatiparthy +Stefaan Lippens +Stefan Farmbauer +Stefan Scherfke +Stefan Zimmermann +Stefanie Molin +Stefano Taschini +Steffen Allner +Stephan Obermann +Sven +Sven-Hendrik Haase +Sviatoslav Sydorenko +Sylvain Marié +Tadek Teleżyński +Takafumi Arakaki +Takumi Otani +Taneli Hukkinen +Tanvi Mehta +Tanya Agarwal +Tarcisio Fischer +Tareq Alayan +Tatiana Ovary +Ted Xiao +Terje Runde +Thomas Grainger +Thomas Hisch +Tianyu Dongfang +Tim Hoffmann +Tim Strazny +TJ Bruno +Tobias Diez +Tobias Petersen +Tom Dalton +Tom Viner +Tomáš Gavenčiak +Tomer Keren +Tony Narlock +Tor Colvin +Trevor Bekolay +Tushar Sadhwani +Tyler Goodlet +Tyler Smart +Tzu-ping Chung +Vasily Kuznetsov +Victor Maryama +Victor Rodriguez +Victor Uriarte +Vidar T. Fauske +Vijay Arora +Virendra Patil +Virgil Dupras +Vitaly Lashmanov +Vivaan Verma +Vlad Dragos +Vlad Radziuk +Vladyslav Rachek +Volodymyr Kochetkov +Volodymyr Piskun +Wei Lin +Wil Cooley +Will Riley +William Lee +Wim Glenn +Wouter van Ackooy +Xixi Zhao +Xuan Luong +Xuecong Liao +Yannick Péroux +Yao Xiao +Yoav Caspi +Yuliang Shao +Yusuke Kadowaki +Yutian Li +Yuval Shimon +Zac Hatfield-Dodds +Zach Snicker +Zachary Kneupper +Zachary OBrien +Zhouxin Qiu +Zoltán Máté +Zsolt Cserna diff --git a/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/licenses/LICENSE b/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..c3f1657fce94589bd1ec7cead810639047f3d359 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2004 Holger Krekel and others + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..3084ae51ecc94b5979ae9075ebb0e08fbda8bdbd --- /dev/null +++ b/venv/lib/python3.10/site-packages/pytest-8.4.2.dist-info/top_level.txt @@ -0,0 +1,3 @@ +_pytest +py +pytest diff --git a/venv/lib/python3.10/site-packages/pyximport/__init__.py b/venv/lib/python3.10/site-packages/pyximport/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..378c42281f903e292bd7c9cf3263849812cd09af --- /dev/null +++ b/venv/lib/python3.10/site-packages/pyximport/__init__.py @@ -0,0 +1,4 @@ +from .pyximport import * + +# replicate docstring +from .pyximport import __doc__ diff --git a/venv/lib/python3.10/site-packages/pyximport/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/pyximport/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1c8f9a512d7ae11550361291d374789a6ee684b0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pyximport/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pyximport/__pycache__/pyxbuild.cpython-310.pyc b/venv/lib/python3.10/site-packages/pyximport/__pycache__/pyxbuild.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..769bace116aba9ede82ffab83fbbc888821c6afd Binary files /dev/null and b/venv/lib/python3.10/site-packages/pyximport/__pycache__/pyxbuild.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pyximport/__pycache__/pyximport.cpython-310.pyc b/venv/lib/python3.10/site-packages/pyximport/__pycache__/pyximport.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fe5baf55840c3ebb13bb200553f21f171bd0dfda Binary files /dev/null and b/venv/lib/python3.10/site-packages/pyximport/__pycache__/pyximport.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pyximport/pyxbuild.py b/venv/lib/python3.10/site-packages/pyximport/pyxbuild.py new file mode 100644 index 0000000000000000000000000000000000000000..61f9747eec4ea549db9970500f6199e8aaa4e7a3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pyximport/pyxbuild.py @@ -0,0 +1,160 @@ +"""Build a Pyrex file from .pyx source to .so loadable module using +the installed distutils infrastructure. Call: + +out_fname = pyx_to_dll("foo.pyx") +""" +import os +import sys + +from distutils.errors import DistutilsArgError, DistutilsError, CCompilerError +from distutils.extension import Extension +from distutils.util import grok_environment_error +try: + from Cython.Distutils.build_ext import build_ext + HAS_CYTHON = True +except ImportError: + HAS_CYTHON = False + +DEBUG = 0 + +_reloads={} + + +def pyx_to_dll(filename, ext=None, force_rebuild=0, build_in_temp=False, pyxbuild_dir=None, + setup_args=None, reload_support=False, inplace=False): + """Compile a PYX file to a DLL and return the name of the generated .so + or .dll .""" + assert os.path.exists(filename), "Could not find %s" % os.path.abspath(filename) + + path, name = os.path.split(os.path.abspath(filename)) + + if not ext: + modname, extension = os.path.splitext(name) + assert extension in (".pyx", ".py"), extension + if not HAS_CYTHON: + filename = filename[:-len(extension)] + '.c' + ext = Extension(name=modname, sources=[filename]) + + if setup_args is None: + setup_args = {} + if not pyxbuild_dir: + pyxbuild_dir = os.path.join(path, "_pyxbld") + + package_base_dir = path + for package_name in ext.name.split('.')[-2::-1]: + package_base_dir, pname = os.path.split(package_base_dir) + if pname != package_name: + # something is wrong - package path doesn't match file path + package_base_dir = None + break + + script_args=setup_args.get("script_args",[]) + if DEBUG or "--verbose" in script_args: + quiet = "--verbose" + else: + quiet = "--quiet" + if build_in_temp: + args = [quiet, "build_ext", '--cython-c-in-temp'] + else: + args = [quiet, "build_ext"] + if force_rebuild: + args.append("--force") + if inplace and package_base_dir: + args.extend(['--build-lib', package_base_dir]) + if ext.name == '__init__' or ext.name.endswith('.__init__'): + # package => provide __path__ early + if not hasattr(ext, 'cython_directives'): + ext.cython_directives = {'set_initial_path' : 'SOURCEFILE'} + elif 'set_initial_path' not in ext.cython_directives: + ext.cython_directives['set_initial_path'] = 'SOURCEFILE' + + sargs = setup_args.copy() + sargs.update({ + "script_name": None, + "script_args": args + script_args, + }) + # late import, in case setuptools replaced it + from distutils.dist import Distribution + dist = Distribution(sargs) + if not dist.ext_modules: + dist.ext_modules = [] + dist.ext_modules.append(ext) + if HAS_CYTHON: + dist.cmdclass = {'build_ext': build_ext} + build = dist.get_command_obj('build') + build.build_base = pyxbuild_dir + + cfgfiles = dist.find_config_files() + dist.parse_config_files(cfgfiles) + + try: + ok = dist.parse_command_line() + except DistutilsArgError: + raise + + if DEBUG: + print("options (after parsing command line):") + dist.dump_option_dicts() + assert ok + + + try: + obj_build_ext = dist.get_command_obj("build_ext") + dist.run_commands() + so_path = obj_build_ext.get_outputs()[0] + if obj_build_ext.inplace: + # Python distutils get_outputs()[ returns a wrong so_path + # when --inplace ; see https://bugs.python.org/issue5977 + # workaround: + so_path = os.path.join(os.path.dirname(filename), + os.path.basename(so_path)) + if reload_support: + org_path = so_path + timestamp = os.path.getmtime(org_path) + global _reloads + last_timestamp, last_path, count = _reloads.get(org_path, (None,None,0) ) + if last_timestamp == timestamp: + so_path = last_path + else: + basename = os.path.basename(org_path) + while count < 100: + count += 1 + r_path = os.path.join(obj_build_ext.build_lib, + basename + '.reload%s' % count) + try: + import shutil # late import / reload_support is: debugging + try: + # Try to unlink first --- if the .so file + # is mmapped by another process, + # overwriting its contents corrupts the + # loaded image (on Linux) and crashes the + # other process. On Windows, unlinking an + # open file just fails. + if os.path.isfile(r_path): + os.unlink(r_path) + except OSError: + continue + shutil.copy2(org_path, r_path) + so_path = r_path + except IOError: + continue + break + else: + # used up all 100 slots + raise ImportError("reload count for %s reached maximum" % org_path) + _reloads[org_path]=(timestamp, so_path, count) + return so_path + except KeyboardInterrupt: + sys.exit(1) + except (IOError, os.error): + exc = sys.exc_info()[1] + error = grok_environment_error(exc) + + if DEBUG: + sys.stderr.write(error + "\n") + raise + + +if __name__=="__main__": + pyx_to_dll("dummy.pyx") + from . import test diff --git a/venv/lib/python3.10/site-packages/pyximport/pyximport.py b/venv/lib/python3.10/site-packages/pyximport/pyximport.py new file mode 100644 index 0000000000000000000000000000000000000000..7b69db6cdc3b9df61d07bba936cef9f60574d88c --- /dev/null +++ b/venv/lib/python3.10/site-packages/pyximport/pyximport.py @@ -0,0 +1,482 @@ +""" +Import hooks; when installed with the install() function, these hooks +allow importing .pyx files as if they were Python modules. + +If you want the hook installed every time you run Python +you can add it to your Python version by adding these lines to +sitecustomize.py (which you can create from scratch in site-packages +if it doesn't exist there or somewhere else on your python path):: + + import pyximport + pyximport.install() + +For instance on the Mac with a non-system Python 2.3, you could create +sitecustomize.py with only those two lines at +/usr/local/lib/python2.3/site-packages/sitecustomize.py . + +A custom distutils.core.Extension instance and setup() args +(Distribution) for for the build can be defined by a .pyxbld +file like: + +# examplemod.pyxbld +def make_ext(modname, pyxfilename): + from distutils.extension import Extension + return Extension(name = modname, + sources=[pyxfilename, 'hello.c'], + include_dirs=['/myinclude'] ) +def make_setup_args(): + return dict(script_args=["--compiler=mingw32"]) + +Extra dependencies can be defined by a .pyxdep . +See README. + +Since Cython 0.11, the :mod:`pyximport` module also has experimental +compilation support for normal Python modules. This allows you to +automatically run Cython on every .pyx and .py module that Python +imports, including parts of the standard library and installed +packages. Cython will still fail to compile a lot of Python modules, +in which case the import mechanism will fall back to loading the +Python source modules instead. The .py import mechanism is installed +like this:: + + pyximport.install(pyimport = True) + +Running this module as a top-level script will run a test and then print +the documentation. +""" + +import glob +import importlib +import os +import sys +from importlib.abc import MetaPathFinder +from importlib.machinery import ExtensionFileLoader, SourceFileLoader +from importlib.util import spec_from_file_location + +mod_name = "pyximport" + +PY_EXT = ".py" +PYX_EXT = ".pyx" +PYXDEP_EXT = ".pyxdep" +PYXBLD_EXT = ".pyxbld" + +DEBUG_IMPORT = False + + +def _print(message, args): + if args: + message = message % args + print(message) + + +def _debug(message, *args): + if DEBUG_IMPORT: + _print(message, args) + + +def _info(message, *args): + _print(message, args) + + +def load_source(file_path): + import importlib.util + from importlib.machinery import SourceFileLoader + spec = importlib.util.spec_from_file_location("XXXX", file_path, loader=SourceFileLoader("XXXX", file_path)) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + return module + + +def get_distutils_extension(modname, pyxfilename, language_level=None): +# try: +# import hashlib +# except ImportError: +# import md5 as hashlib +# extra = "_" + hashlib.md5(open(pyxfilename).read()).hexdigest() +# modname = modname + extra + extension_mod,setup_args = handle_special_build(modname, pyxfilename) + if not extension_mod: + if not isinstance(pyxfilename, str): + # distutils is stupid in Py2 and requires exactly 'str' + # => encode accidentally coerced unicode strings back to str + pyxfilename = pyxfilename.encode(sys.getfilesystemencoding()) + from distutils.extension import Extension + extension_mod = Extension(name = modname, sources=[pyxfilename]) + if language_level is not None: + extension_mod.cython_directives = {'language_level': language_level} + return extension_mod,setup_args + + +def handle_special_build(modname, pyxfilename): + special_build = os.path.splitext(pyxfilename)[0] + PYXBLD_EXT + ext = None + setup_args={} + if os.path.exists(special_build): + # globls = {} + # locs = {} + # execfile(special_build, globls, locs) + # ext = locs["make_ext"](modname, pyxfilename) + mod = load_source(special_build) + make_ext = getattr(mod,'make_ext',None) + if make_ext: + ext = make_ext(modname, pyxfilename) + assert ext and ext.sources, "make_ext in %s did not return Extension" % special_build + make_setup_args = getattr(mod, 'make_setup_args',None) + if make_setup_args: + setup_args = make_setup_args() + assert isinstance(setup_args,dict), ("make_setup_args in %s did not return a dict" + % special_build) + assert ext or setup_args, ("neither make_ext nor make_setup_args %s" + % special_build) + ext.sources = [os.path.join(os.path.dirname(special_build), source) + for source in ext.sources] + return ext, setup_args + + +def handle_dependencies(pyxfilename): + testing = '_test_files' in globals() + dependfile = os.path.splitext(pyxfilename)[0] + PYXDEP_EXT + + # by default let distutils decide whether to rebuild on its own + # (it has a better idea of what the output file will be) + + # but we know more about dependencies so force a rebuild if + # some of the dependencies are newer than the pyxfile. + if os.path.exists(dependfile): + with open(dependfile) as fid: + depends = fid.readlines() + depends = [depend.strip() for depend in depends] + + # gather dependencies in the "files" variable + # the dependency file is itself a dependency + files = [dependfile] + for depend in depends: + fullpath = os.path.join(os.path.dirname(dependfile), + depend) + files.extend(glob.glob(fullpath)) + + # only for unit testing to see we did the right thing + if testing: + _test_files[:] = [] #$pycheck_no + + # if any file that the pyxfile depends upon is newer than + # the pyx file, 'touch' the pyx file so that distutils will + # be tricked into rebuilding it. + for file in files: + from distutils.dep_util import newer + if newer(file, pyxfilename): + _debug("Rebuilding %s because of %s", pyxfilename, file) + filetime = os.path.getmtime(file) + os.utime(pyxfilename, (filetime, filetime)) + if testing: + _test_files.append(file) + + +def build_module(name, pyxfilename, pyxbuild_dir=None, inplace=False, language_level=None): + assert os.path.exists(pyxfilename), "Path does not exist: %s" % pyxfilename + handle_dependencies(pyxfilename) + + extension_mod, setup_args = get_distutils_extension(name, pyxfilename, language_level) + build_in_temp = pyxargs.build_in_temp + sargs = pyxargs.setup_args.copy() + sargs.update(setup_args) + build_in_temp = sargs.pop('build_in_temp',build_in_temp) + + from . import pyxbuild + olddir = os.getcwd() + common = '' + if pyxbuild_dir and sys.platform == 'win32': + # Windows concatenates the pyxbuild_dir to the pyxfilename when + # compiling, and then complains that the filename is too long + common = os.path.commonprefix([pyxbuild_dir, pyxfilename]) + if len(common) > 30: + pyxfilename = os.path.relpath(pyxfilename, common) + pyxbuild_dir = os.path.relpath(pyxbuild_dir, common) + os.chdir(common) + try: + so_path = pyxbuild.pyx_to_dll(pyxfilename, extension_mod, + build_in_temp=build_in_temp, + pyxbuild_dir=pyxbuild_dir, + setup_args=sargs, + inplace=inplace, + reload_support=pyxargs.reload_support) + finally: + os.chdir(olddir) + so_path = os.path.join(common, so_path) + assert os.path.exists(so_path), "Cannot find: %s" % so_path + + junkpath = os.path.join(os.path.dirname(so_path), name+"_*") #very dangerous with --inplace ? yes, indeed, trying to eat my files ;) + junkstuff = glob.glob(junkpath) + for path in junkstuff: + if path != so_path: + try: + os.remove(path) + except IOError: + _info("Couldn't remove %s", path) + + return so_path + + +# import hooks + +class PyxImportMetaFinder(MetaPathFinder): + + def __init__(self, extension=PYX_EXT, pyxbuild_dir=None, inplace=False, language_level=None): + self.pyxbuild_dir = pyxbuild_dir + self.inplace = inplace + self.language_level = language_level + self.extension = extension + + def find_spec(self, fullname, path, target=None): + if not path: + path = [os.getcwd()] + sys.path # top level import -- + if "." in fullname: + *parents, name = fullname.split(".") + else: + name = fullname + for entry in path: + if os.path.isdir(os.path.join(entry, name)): + # this module has child modules + filename = os.path.join(entry, name, "__init__" + self.extension) + submodule_locations = [os.path.join(entry, name)] + else: + filename = os.path.join(entry, name + self.extension) + submodule_locations = None + if not os.path.exists(filename): + continue + + return spec_from_file_location( + fullname, filename, + loader=PyxImportLoader(filename, self.pyxbuild_dir, self.inplace, self.language_level), + submodule_search_locations=submodule_locations) + + return None # we don't know how to import this + + +class PyImportMetaFinder(MetaPathFinder): + + def __init__(self, extension=PY_EXT, pyxbuild_dir=None, inplace=False, language_level=None): + self.pyxbuild_dir = pyxbuild_dir + self.inplace = inplace + self.language_level = language_level + self.extension = extension + self.uncompilable_modules = {} + self.blocked_modules = ['Cython', 'pyxbuild', 'pyximport.pyxbuild', + 'distutils', 'cython'] + self.blocked_packages = ['Cython.', 'distutils.'] + self.found = False + + def find_spec(self, fullname, path, target=None): + if self.found: + return None + if fullname in sys.modules: + return None + if any([fullname.startswith(pkg) for pkg in self.blocked_packages]): + return None + if fullname in self.blocked_modules: + # prevent infinite recursion + return None + + self.blocked_modules.append(fullname) + name = fullname + if not path: + path = [os.getcwd()] + sys.path # top level import -- + try: + for entry in path: + if os.path.isdir(os.path.join(entry, name)): + # this module has child modules + filename = os.path.join(entry, name, "__init__" + self.extension) + submodule_locations = [os.path.join(entry, name)] + else: + filename = os.path.join(entry, name + self.extension) + submodule_locations = None + if not os.path.exists(filename): + continue + + self.found = True + return spec_from_file_location( + fullname, filename, + loader=PyxImportLoader(filename, self.pyxbuild_dir, self.inplace, self.language_level), + submodule_search_locations=submodule_locations) + finally: + self.blocked_modules.pop() + + return None # we don't know how to import this + + +class PyxImportLoader(ExtensionFileLoader): + + def __init__(self, filename, pyxbuild_dir, inplace, language_level): + module_name = os.path.splitext(os.path.basename(filename))[0] + super().__init__(module_name, filename) + self._pyxbuild_dir = pyxbuild_dir + self._inplace = inplace + self._language_level = language_level + + def create_module(self, spec): + try: + so_path = build_module(spec.name, pyxfilename=spec.origin, pyxbuild_dir=self._pyxbuild_dir, + inplace=self._inplace, language_level=self._language_level) + self.path = so_path + spec.origin = so_path + return super().create_module(spec) + except Exception as failure_exc: + _debug("Failed to load extension module: %r" % failure_exc) + if pyxargs.load_py_module_on_import_failure and spec.origin.endswith(PY_EXT): + spec = importlib.util.spec_from_file_location(spec.name, spec.origin, + loader=SourceFileLoader(spec.name, spec.origin)) + mod = importlib.util.module_from_spec(spec) + assert mod.__file__ in (spec.origin, spec.origin + 'c', spec.origin + 'o'), (mod.__file__, spec.origin) + return mod + else: + tb = sys.exc_info()[2] + import traceback + exc = ImportError("Building module %s failed: %s" % ( + spec.name, traceback.format_exception_only(*sys.exc_info()[:2]))) + raise exc.with_traceback(tb) + + def exec_module(self, module): + try: + return super().exec_module(module) + except Exception as failure_exc: + import traceback + _debug("Failed to load extension module: %r" % failure_exc) + raise ImportError("Executing module %s failed %s" % ( + module.__file__, traceback.format_exception_only(*sys.exc_info()[:2]))) + + +#install args +class PyxArgs(object): + build_dir=True + build_in_temp=True + setup_args={} #None + + +def _have_importers(): + has_py_importer = False + has_pyx_importer = False + for importer in sys.meta_path: + if isinstance(importer, PyxImportMetaFinder): + if isinstance(importer, PyImportMetaFinder): + has_py_importer = True + else: + has_pyx_importer = True + + return has_py_importer, has_pyx_importer + + +def install(pyximport=True, pyimport=False, build_dir=None, build_in_temp=True, + setup_args=None, reload_support=False, + load_py_module_on_import_failure=False, inplace=False, + language_level=None): + """ Main entry point for pyxinstall. + + Call this to install the ``.pyx`` import hook in + your meta-path for a single Python process. If you want it to be + installed whenever you use Python, add it to your ``sitecustomize`` + (as described above). + + :param pyximport: If set to False, does not try to import ``.pyx`` files. + + :param pyimport: You can pass ``pyimport=True`` to also + install the ``.py`` import hook + in your meta-path. Note, however, that it is rather experimental, + will not work at all for some ``.py`` files and packages, and will + heavily slow down your imports due to search and compilation. + Use at your own risk. + + :param build_dir: By default, compiled modules will end up in a ``.pyxbld`` + directory in the user's home directory. Passing a different path + as ``build_dir`` will override this. + + :param build_in_temp: If ``False``, will produce the C files locally. Working + with complex dependencies and debugging becomes more easy. This + can principally interfere with existing files of the same name. + + :param setup_args: Dict of arguments for Distribution. + See ``distutils.core.setup()``. + + :param reload_support: Enables support for dynamic + ``reload(my_module)``, e.g. after a change in the Cython code. + Additional files ``.reloadNN`` may arise on that account, when + the previously loaded module file cannot be overwritten. + + :param load_py_module_on_import_failure: If the compilation of a ``.py`` + file succeeds, but the subsequent import fails for some reason, + retry the import with the normal ``.py`` module instead of the + compiled module. Note that this may lead to unpredictable results + for modules that change the system state during their import, as + the second import will rerun these modifications in whatever state + the system was left after the import of the compiled module + failed. + + :param inplace: Install the compiled module + (``.so`` for Linux and Mac / ``.pyd`` for Windows) + next to the source file. + + :param language_level: The source language level to use: 2 or 3. + The default is to use the language level of the current Python + runtime for .py files and Py2 for ``.pyx`` files. + """ + if setup_args is None: + setup_args = {} + if not build_dir: + build_dir = os.path.join(os.path.expanduser('~'), '.pyxbld') + + global pyxargs + pyxargs = PyxArgs() #$pycheck_no + pyxargs.build_dir = build_dir + pyxargs.build_in_temp = build_in_temp + pyxargs.setup_args = (setup_args or {}).copy() + pyxargs.reload_support = reload_support + pyxargs.load_py_module_on_import_failure = load_py_module_on_import_failure + + has_py_importer, has_pyx_importer = _have_importers() + py_importer, pyx_importer = None, None + + if pyimport and not has_py_importer: + py_importer = PyImportMetaFinder(pyxbuild_dir=build_dir, inplace=inplace, + language_level=language_level) + # make sure we import Cython before we install the import hook + import Cython.Compiler.Main, Cython.Compiler.Pipeline, Cython.Compiler.Optimize + sys.meta_path.insert(0, py_importer) + + if pyximport and not has_pyx_importer: + pyx_importer = PyxImportMetaFinder(pyxbuild_dir=build_dir, inplace=inplace, + language_level=language_level) + sys.meta_path.append(pyx_importer) + + return py_importer, pyx_importer + + +def uninstall(py_importer, pyx_importer): + """ + Uninstall an import hook. + """ + try: + sys.meta_path.remove(py_importer) + except ValueError: + pass + + try: + sys.meta_path.remove(pyx_importer) + except ValueError: + pass + + +# MAIN + +def show_docs(): + import __main__ + __main__.__name__ = mod_name + for name in dir(__main__): + item = getattr(__main__, name) + try: + setattr(item, "__module__", mod_name) + except (AttributeError, TypeError): + pass + help(__main__) + + +if __name__ == '__main__': + show_docs() diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/__init__.py b/venv/lib/python3.10/site-packages/rapidfuzz/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b27b746691bb27f5e2e7b831eba9acaefac8dcda --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/__init__.py @@ -0,0 +1,33 @@ +""" +rapid string matching library +""" + +from __future__ import annotations + +__author__: str = "Max Bachmann" +__license__: str = "MIT" +__version__: str = "3.14.1" + +from rapidfuzz import distance, fuzz, process, utils + +__all__ = ["distance", "fuzz", "get_include", "process", "utils"] + + +def get_include(): + """ + Return the directory that contains the RapidFuzz \\*.h header files. + Extension modules that need to compile against RapidFuzz should use this + function to locate the appropriate include directory. + Notes + ----- + When using ``distutils``, for example in ``setup.py``. + :: + import rapidfuzz_capi + ... + Extension('extension_name', ... + include_dirs=[rapidfuzz_capi.get_include()]) + ... + """ + from pathlib import Path + + return str(Path(__file__).parent) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/__init__.pyi b/venv/lib/python3.10/site-packages/rapidfuzz/__init__.pyi new file mode 100644 index 0000000000000000000000000000000000000000..ffd9870f723f864eccaf7a2f16751fdc3e4774c2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/__init__.pyi @@ -0,0 +1,12 @@ +from __future__ import annotations + +__author__: str +__license__: str +__version__: str + +from rapidfuzz import ( + distance as distance, + fuzz as fuzz, + process as process, + utils as utils, +) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fe6660d34c3a39397e6aa33e4195f1a72d7b710b Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/_common_py.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/_common_py.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4ea1f4618d86e5332add480edf299c2bebdc9a39 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/_common_py.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/_feature_detector.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/_feature_detector.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5822fec0b11d1c00706b2fe45679156f7339d8bc Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/_feature_detector.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/_utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..045824f25b03d9eb0e3374425aa2339e6820ceb4 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/_utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/fuzz.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/fuzz.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1dd8f6e5ef29ef78f10a6b13d305161a833f38a4 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/fuzz.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/fuzz_py.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/fuzz_py.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1ae3a225eb8760f3882e564a792ccc33c8664fa2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/fuzz_py.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/process.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/process.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..366bf641da520c7c572c9b338f1ff4407c36c502 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/process.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/process_cpp.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/process_cpp.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..410cec22d7b9c106f172cb268a679c93d6d8841b Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/process_cpp.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/process_py.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/process_py.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5b420bdd6f33d71d2432c05913333e508cc0aea3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/process_py.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..81f3935e72ce7df16028083d9b0862ec1c3894be Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/utils_py.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/utils_py.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dd435621ccfd858793cbf9f3c1c0a7868fe81bb1 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/__pycache__/utils_py.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/__pyinstaller/__init__.py b/venv/lib/python3.10/site-packages/rapidfuzz/__pyinstaller/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..217dbee03df8552bae0d68d115610d4527fc8c43 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/__pyinstaller/__init__.py @@ -0,0 +1,7 @@ +from __future__ import annotations + +from pathlib import Path + + +def get_PyInstaller_tests(): + return [str(Path(__file__).parent)] diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/__pyinstaller/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/__pyinstaller/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..682410acd7a17f25bb695c78ebcd83233bfcfe67 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/__pyinstaller/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/__pyinstaller/__pycache__/test_rapidfuzz_packaging.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/__pyinstaller/__pycache__/test_rapidfuzz_packaging.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4fe48584b315c652a15fb19229cdb6a3348522d9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/__pyinstaller/__pycache__/test_rapidfuzz_packaging.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/__pyinstaller/test_rapidfuzz_packaging.py b/venv/lib/python3.10/site-packages/rapidfuzz/__pyinstaller/test_rapidfuzz_packaging.py new file mode 100644 index 0000000000000000000000000000000000000000..5bda451149958867854b2add2a99f3bd9b420b5b --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/__pyinstaller/test_rapidfuzz_packaging.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +import subprocess + +from PyInstaller import __main__ as pyi_main + + +# Test out the package by importing it, then running functions from it. +def test_pyi_hooksample(tmp_path): + app_name = "userapp" + workpath = tmp_path / "build" + distpath = tmp_path / "dist" + app = tmp_path / (app_name + ".py") + app.write_text( + "\n".join( + [ + "import rapidfuzz", + "from rapidfuzz.distance import metrics_py", + "from rapidfuzz.distance import metrics_cpp", + "rapidfuzz.distance.Levenshtein.distance('test', 'teste')", + "metrics_py.levenshtein_distance('test', 'teste')", + "metrics_cpp.levenshtein_distance('test', 'teste')", + ] + ) + ) + args = [ + # Place all generated files in ``tmp_path``. + "--workpath", + str(workpath), + "--distpath", + str(distpath), + "--specpath", + str(tmp_path), + str(app), + ] + pyi_main.run(args) + subprocess.run([str(distpath / app_name / app_name)], check=True) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/_common_py.py b/venv/lib/python3.10/site-packages/rapidfuzz/_common_py.py new file mode 100644 index 0000000000000000000000000000000000000000..f37eb23526687d131319a6b1141a1a109b7b2a75 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/_common_py.py @@ -0,0 +1,73 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2023 Max Bachmann + +from __future__ import annotations + +from array import array +from collections.abc import Hashable, Sequence + + +def conv_sequence(s: Sequence[Hashable]) -> Sequence[Hashable]: + if isinstance(s, str): + return [ord(x) for x in s] + + if isinstance(s, bytes): + return s + + if isinstance(s, array): + if s.typecode in ("u", "w"): + return [ord(x) for x in s] + + return s + + if s is None: + return s + + res = [] + for elem in s: + if isinstance(elem, str) and len(elem) == 1: + res.append(ord(elem)) + elif isinstance(elem, int) and elem == -1: + res.append(-1) + else: + res.append(hash(elem)) + + return res + + +def conv_sequences(s1: Sequence[Hashable], s2: Sequence[Hashable]) -> tuple[Sequence[Hashable], Sequence[Hashable]]: + if isinstance(s1, str) and isinstance(s2, str): + return s1, s2 + + if isinstance(s1, bytes) and isinstance(s2, bytes): + return s1, s2 + + return conv_sequence(s1), conv_sequence(s2) + + +def common_prefix(s1: Sequence[Hashable], s2: Sequence[Hashable]) -> int: + prefix_len = 0 + for ch1, ch2 in zip(s1, s2): + if ch1 != ch2: + break + + prefix_len += 1 + + return prefix_len + + +def common_suffix(s1: Sequence[Hashable], s2: Sequence[Hashable]) -> int: + suffix_len = 0 + for ch1, ch2 in zip(reversed(s1), reversed(s2)): + if ch1 != ch2: + break + + suffix_len += 1 + + return suffix_len + + +def common_affix(s1: Sequence[Hashable], s2: Sequence[Hashable]) -> tuple[int, int]: + prefix_len = common_prefix(s1, s2) + suffix_len = common_suffix(s1[prefix_len:], s2[prefix_len:]) + return (prefix_len, suffix_len) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/_feature_detector.py b/venv/lib/python3.10/site-packages/rapidfuzz/_feature_detector.py new file mode 100644 index 0000000000000000000000000000000000000000..967591039420e073f312a64a462e4c94d9928f80 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/_feature_detector.py @@ -0,0 +1,15 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann +from __future__ import annotations + +__all__ = ["AVX2", "SSE2", "supports"] + +try: + from rapidfuzz._feature_detector_cpp import AVX2, SSE2, supports +except ImportError: + SSE2 = 1 + AVX2 = 2 + + def supports(features): + _ = features + return False diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/_feature_detector_cpp.cpython-310-x86_64-linux-gnu.so b/venv/lib/python3.10/site-packages/rapidfuzz/_feature_detector_cpp.cpython-310-x86_64-linux-gnu.so new file mode 100644 index 0000000000000000000000000000000000000000..453c003c104aafd68d59352b6ae1fbdaa55a92f8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/_feature_detector_cpp.cpython-310-x86_64-linux-gnu.so differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/_utils.py b/venv/lib/python3.10/site-packages/rapidfuzz/_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..7da61ef5f54824cbbaa81ce4fb3142467ff5aad2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/_utils.py @@ -0,0 +1,85 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann + +from __future__ import annotations + +import sys +from math import isnan +from typing import Any, Callable + +pandas_NA = None + + +def setupPandas(): + global pandas_NA # noqa: PLW0603 + if pandas_NA is None: + pandas = sys.modules.get("pandas") + if hasattr(pandas, "NA"): + pandas_NA = pandas.NA + + +setupPandas() + + +class ScorerFlag: + RESULT_F64 = 1 << 5 + RESULT_I64 = 1 << 6 + RESULT_SIZE_T = 1 << 7 + SYMMETRIC = 1 << 11 + + +def _get_scorer_flags_distance(**_kwargs: Any) -> dict[str, Any]: + return { + "optimal_score": 0, + "worst_score": 2**63 - 1, + "flags": ScorerFlag.RESULT_SIZE_T | ScorerFlag.SYMMETRIC, + } + + +def _get_scorer_flags_similarity(**_kwargs: Any) -> dict[str, Any]: + return { + "optimal_score": 2**63 - 1, + "worst_score": 0, + "flags": ScorerFlag.RESULT_SIZE_T | ScorerFlag.SYMMETRIC, + } + + +def _get_scorer_flags_normalized_distance(**_kwargs: Any) -> dict[str, Any]: + return { + "optimal_score": 0, + "worst_score": 1, + "flags": ScorerFlag.RESULT_F64 | ScorerFlag.SYMMETRIC, + } + + +def _get_scorer_flags_normalized_similarity(**_kwargs: Any) -> dict[str, Any]: + return { + "optimal_score": 1, + "worst_score": 0, + "flags": ScorerFlag.RESULT_F64 | ScorerFlag.SYMMETRIC, + } + + +def is_none(s: Any) -> bool: + if s is None or s is pandas_NA: + return True + + return isinstance(s, float) and isnan(s) + + +def add_scorer_attrs(func: Any, cached_scorer_call: dict[str, Callable[..., dict[str, Any]]]): + func._RF_ScorerPy = cached_scorer_call + # used to detect the function hasn't been wrapped afterwards + func._RF_OriginalScorer = func + + +default_distance_attribute: dict[str, Callable[..., dict[str, Any]]] = {"get_scorer_flags": _get_scorer_flags_distance} +default_similarity_attribute: dict[str, Callable[..., dict[str, Any]]] = { + "get_scorer_flags": _get_scorer_flags_similarity +} +default_normalized_distance_attribute: dict[str, Callable[..., dict[str, Any]]] = { + "get_scorer_flags": _get_scorer_flags_normalized_distance +} +default_normalized_similarity_attribute: dict[str, Callable[..., dict[str, Any]]] = { + "get_scorer_flags": _get_scorer_flags_normalized_similarity +} diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/DamerauLevenshtein.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/DamerauLevenshtein.py new file mode 100644 index 0000000000000000000000000000000000000000..0af55a3bad23d5a41e4185874418700340199901 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/DamerauLevenshtein.py @@ -0,0 +1,93 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2025 Max Bachmann +# This file is generated by tools/generate_python.py +from __future__ import annotations + +import contextlib +import os + +from rapidfuzz._feature_detector import AVX2, SSE2, supports + +__all__ = ["distance", "normalized_distance", "normalized_similarity", "similarity"] + +_impl = os.environ.get("RAPIDFUZZ_IMPLEMENTATION") +if _impl == "cpp": + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + damerau_levenshtein_distance as distance, + damerau_levenshtein_normalized_distance as normalized_distance, + damerau_levenshtein_normalized_similarity as normalized_similarity, + damerau_levenshtein_similarity as similarity, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + damerau_levenshtein_distance as distance, + damerau_levenshtein_normalized_distance as normalized_distance, + damerau_levenshtein_normalized_similarity as normalized_similarity, + damerau_levenshtein_similarity as similarity, + ) + + imported = True + + if not imported: + from rapidfuzz.distance.metrics_cpp import ( # pyright: ignore[reportMissingImports] + damerau_levenshtein_distance as distance, + damerau_levenshtein_normalized_distance as normalized_distance, + damerau_levenshtein_normalized_similarity as normalized_similarity, + damerau_levenshtein_similarity as similarity, + ) +elif _impl == "python": + from rapidfuzz.distance.metrics_py import ( + damerau_levenshtein_distance as distance, + damerau_levenshtein_normalized_distance as normalized_distance, + damerau_levenshtein_normalized_similarity as normalized_similarity, + damerau_levenshtein_similarity as similarity, + ) +else: + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + damerau_levenshtein_distance as distance, + damerau_levenshtein_normalized_distance as normalized_distance, + damerau_levenshtein_normalized_similarity as normalized_similarity, + damerau_levenshtein_similarity as similarity, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + damerau_levenshtein_distance as distance, + damerau_levenshtein_normalized_distance as normalized_distance, + damerau_levenshtein_normalized_similarity as normalized_similarity, + damerau_levenshtein_similarity as similarity, + ) + + imported = True + + if not imported: + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp import ( # pyright: ignore[reportMissingImports] + damerau_levenshtein_distance as distance, + damerau_levenshtein_normalized_distance as normalized_distance, + damerau_levenshtein_normalized_similarity as normalized_similarity, + damerau_levenshtein_similarity as similarity, + ) + + imported = True + + if not imported: + from rapidfuzz.distance.metrics_py import ( + damerau_levenshtein_distance as distance, + damerau_levenshtein_normalized_distance as normalized_distance, + damerau_levenshtein_normalized_similarity as normalized_similarity, + damerau_levenshtein_similarity as similarity, + ) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/DamerauLevenshtein.pyi b/venv/lib/python3.10/site-packages/rapidfuzz/distance/DamerauLevenshtein.pyi new file mode 100644 index 0000000000000000000000000000000000000000..db0de6253bf0d81102f9e27d2598b1836e428efc --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/DamerauLevenshtein.pyi @@ -0,0 +1,75 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann + +from __future__ import annotations + +from collections.abc import Hashable, Sequence +from typing import Callable, TypeVar, overload + +_UnprocessedType1 = TypeVar("_UnprocessedType1") +_UnprocessedType2 = TypeVar("_UnprocessedType2") + +@overload +def distance( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: int | None = None, +) -> int: ... +@overload +def distance( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: int | None = None, +) -> int: ... +@overload +def normalized_distance( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def normalized_distance( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def similarity( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: int | None = None, +) -> int: ... +@overload +def similarity( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: int | None = None, +) -> int: ... +@overload +def normalized_similarity( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def normalized_similarity( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/DamerauLevenshtein_py.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/DamerauLevenshtein_py.py new file mode 100644 index 0000000000000000000000000000000000000000..9f8fca198ac77162f967c046017b2d78a697c6c9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/DamerauLevenshtein_py.py @@ -0,0 +1,233 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann +from __future__ import annotations + +from rapidfuzz._common_py import conv_sequences +from rapidfuzz._utils import is_none, setupPandas + + +def _damerau_levenshtein_distance_zhao(s1, s2): + maxVal = max(len(s1), len(s2)) + 1 + last_row_id = {} + last_row_id_get = last_row_id.get + size = len(s2) + 2 + FR = [maxVal] * size + R1 = [maxVal] * size + R = list(range(size)) + R[-1] = maxVal + + for i in range(1, len(s1) + 1): + R, R1 = R1, R + last_col_id = -1 + last_i2l1 = R[0] + R[0] = i + T = maxVal + + for j in range(1, len(s2) + 1): + diag = R1[j - 1] + (s1[i - 1] != s2[j - 1]) + left = R[j - 1] + 1 + up = R1[j] + 1 + temp = min(diag, left, up) + + if s1[i - 1] == s2[j - 1]: + last_col_id = j # last occurrence of s1_i + FR[j] = R1[j - 2] # save H_k-1,j-2 + T = last_i2l1 # save H_i-2,l-1 + else: + k = last_row_id_get(s2[j - 1], -1) + l = last_col_id # noqa: E741 + + if (j - l) == 1: + transpose = FR[j] + (i - k) + temp = min(temp, transpose) + elif (i - k) == 1: + transpose = T + (j - l) + temp = min(temp, transpose) + + last_i2l1 = R[j] + R[j] = temp + + last_row_id[s1[i - 1]] = i + + return R[len(s2)] + + +def distance( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates the Damerau-Levenshtein distance. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : int, optional + Maximum distance between s1 and s2, that is + considered as a result. If the distance is bigger than score_cutoff, + score_cutoff + 1 is returned instead. Default is None, which deactivates + this behaviour. + + Returns + ------- + distance : int + distance between s1 and s2 + + Examples + -------- + Find the Damerau-Levenshtein distance between two strings: + + >>> from rapidfuzz.distance import DamerauLevenshtein + >>> DamerauLevenshtein.distance("CA", "ABC") + 2 + """ + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + dist = _damerau_levenshtein_distance_zhao(s1, s2) + return dist if (score_cutoff is None or dist <= score_cutoff) else score_cutoff + 1 + + +def similarity( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates the Damerau-Levenshtein similarity in the range [max, 0]. + + This is calculated as ``max(len1, len2) - distance``. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : int, optional + Maximum distance between s1 and s2, that is + considered as a result. If the similarity is smaller than score_cutoff, + 0 is returned instead. Default is None, which deactivates + this behaviour. + + Returns + ------- + similarity : int + similarity between s1 and s2 + """ + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + maximum = max(len(s1), len(s2)) + dist = distance(s1, s2) + sim = maximum - dist + return sim if (score_cutoff is None or sim >= score_cutoff) else 0 + + +def normalized_distance( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates a normalized Damerau-Levenshtein distance in the range [1, 0]. + + This is calculated as ``distance / max(len1, len2)``. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For norm_dist > score_cutoff 1.0 is returned instead. Default is 1.0, + which deactivates this behaviour. + + Returns + ------- + norm_dist : float + normalized distance between s1 and s2 as a float between 0 and 1.0 + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 1.0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + maximum = max(len(s1), len(s2)) + dist = distance(s1, s2) + norm_dist = dist / maximum if maximum else 0 + return norm_dist if (score_cutoff is None or norm_dist <= score_cutoff) else 1 + + +def normalized_similarity( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates a normalized Damerau-Levenshtein similarity in the range [0, 1]. + + This is calculated as ``1 - normalized_distance`` + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For norm_sim < score_cutoff 0 is returned instead. Default is 0, + which deactivates this behaviour. + + Returns + ------- + norm_sim : float + normalized similarity between s1 and s2 as a float between 0 and 1.0 + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 0.0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + norm_dist = normalized_distance(s1, s2) + norm_sim = 1.0 - norm_dist + return norm_sim if (score_cutoff is None or norm_sim >= score_cutoff) else 0 diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/Hamming.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Hamming.py new file mode 100644 index 0000000000000000000000000000000000000000..b9450c5649797d16791b95f6082c6815fd0fe7e5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Hamming.py @@ -0,0 +1,116 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2025 Max Bachmann +# This file is generated by tools/generate_python.py +from __future__ import annotations + +import contextlib +import os + +from rapidfuzz._feature_detector import AVX2, SSE2, supports + +__all__ = [ + "distance", + "editops", + "normalized_distance", + "normalized_similarity", + "opcodes", + "similarity", +] + +_impl = os.environ.get("RAPIDFUZZ_IMPLEMENTATION") +if _impl == "cpp": + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + hamming_distance as distance, + hamming_editops as editops, + hamming_normalized_distance as normalized_distance, + hamming_normalized_similarity as normalized_similarity, + hamming_opcodes as opcodes, + hamming_similarity as similarity, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + hamming_distance as distance, + hamming_editops as editops, + hamming_normalized_distance as normalized_distance, + hamming_normalized_similarity as normalized_similarity, + hamming_opcodes as opcodes, + hamming_similarity as similarity, + ) + + imported = True + + if not imported: + from rapidfuzz.distance.metrics_cpp import ( # pyright: ignore[reportMissingImports] + hamming_distance as distance, + hamming_editops as editops, + hamming_normalized_distance as normalized_distance, + hamming_normalized_similarity as normalized_similarity, + hamming_opcodes as opcodes, + hamming_similarity as similarity, + ) +elif _impl == "python": + from rapidfuzz.distance.metrics_py import ( + hamming_distance as distance, + hamming_editops as editops, + hamming_normalized_distance as normalized_distance, + hamming_normalized_similarity as normalized_similarity, + hamming_opcodes as opcodes, + hamming_similarity as similarity, + ) +else: + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + hamming_distance as distance, + hamming_editops as editops, + hamming_normalized_distance as normalized_distance, + hamming_normalized_similarity as normalized_similarity, + hamming_opcodes as opcodes, + hamming_similarity as similarity, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + hamming_distance as distance, + hamming_editops as editops, + hamming_normalized_distance as normalized_distance, + hamming_normalized_similarity as normalized_similarity, + hamming_opcodes as opcodes, + hamming_similarity as similarity, + ) + + imported = True + + if not imported: + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp import ( # pyright: ignore[reportMissingImports] + hamming_distance as distance, + hamming_editops as editops, + hamming_normalized_distance as normalized_distance, + hamming_normalized_similarity as normalized_similarity, + hamming_opcodes as opcodes, + hamming_similarity as similarity, + ) + + imported = True + + if not imported: + from rapidfuzz.distance.metrics_py import ( + hamming_distance as distance, + hamming_editops as editops, + hamming_normalized_distance as normalized_distance, + hamming_normalized_similarity as normalized_similarity, + hamming_opcodes as opcodes, + hamming_similarity as similarity, + ) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/Hamming.pyi b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Hamming.pyi new file mode 100644 index 0000000000000000000000000000000000000000..4a06b371c9f7b52fda45b668ee46746683feffc8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Hamming.pyi @@ -0,0 +1,113 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann + +from __future__ import annotations + +from collections.abc import Hashable, Sequence +from typing import Callable, TypeVar, overload + +from rapidfuzz.distance import Editops, Opcodes + +_UnprocessedType1 = TypeVar("_UnprocessedType1") +_UnprocessedType2 = TypeVar("_UnprocessedType2") + +@overload +def distance( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + pad: bool = True, + processor: None = None, + score_cutoff: int | None = None, +) -> int: ... +@overload +def distance( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + pad: bool = True, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: int | None = None, +) -> int: ... +@overload +def normalized_distance( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + pad: bool = True, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def normalized_distance( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + pad: bool = True, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def similarity( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + pad: bool = True, + processor: None = None, + score_cutoff: int | None = None, +) -> int: ... +@overload +def similarity( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + pad: bool = True, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: int | None = None, +) -> int: ... +@overload +def normalized_similarity( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + pad: bool = True, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def normalized_similarity( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + pad: bool = True, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def editops( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, +) -> Editops: ... +@overload +def editops( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], +) -> Editops: ... +@overload +def opcodes( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, +) -> Opcodes: ... +@overload +def opcodes( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], +) -> Opcodes: ... diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/Hamming_py.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Hamming_py.py new file mode 100644 index 0000000000000000000000000000000000000000..90881e92d6b09e6d5be57d307c0591d85a935196 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Hamming_py.py @@ -0,0 +1,322 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann +from __future__ import annotations + +from rapidfuzz._common_py import conv_sequences +from rapidfuzz._utils import is_none, setupPandas +from rapidfuzz.distance._initialize_py import Editop, Editops + + +def distance( + s1, + s2, + *, + pad=True, + processor=None, + score_cutoff=None, +): + """ + Calculates the Hamming distance between two strings. + The hamming distance is defined as the number of positions + where the two strings differ. It describes the minimum + amount of substitutions required to transform s1 into s2. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + pad : bool, optional + should strings be padded if there is a length difference. + If pad is False and strings have a different length + a ValueError is thrown instead. Defaults is True. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : int or None, optional + Maximum distance between s1 and s2, that is + considered as a result. If the distance is bigger than score_cutoff, + score_cutoff + 1 is returned instead. Default is None, which deactivates + this behaviour. + + Returns + ------- + distance : int + distance between s1 and s2 + + Raises + ------ + ValueError + If s1 and s2 have a different length + """ + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + + if not pad and len(s1) != len(s2): + msg = "Sequences are not the same length." + raise ValueError(msg) + + min_len = min(len(s1), len(s2)) + dist = max(len(s1), len(s2)) + for i in range(min_len): + dist -= s1[i] == s2[i] + + return dist if (score_cutoff is None or dist <= score_cutoff) else score_cutoff + 1 + + +def similarity( + s1, + s2, + *, + pad=True, + processor=None, + score_cutoff=None, +): + """ + Calculates the Hamming similarity between two strings. + + This is calculated as ``len1 - distance``. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + pad : bool, optional + should strings be padded if there is a length difference. + If pad is False and strings have a different length + a ValueError is thrown instead. Defaults is True. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : int, optional + Maximum distance between s1 and s2, that is + considered as a result. If the similarity is smaller than score_cutoff, + 0 is returned instead. Default is None, which deactivates + this behaviour. + + Returns + ------- + distance : int + distance between s1 and s2 + + Raises + ------ + ValueError + If s1 and s2 have a different length + """ + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + maximum = max(len(s1), len(s2)) + dist = distance(s1, s2, pad=pad) + sim = maximum - dist + + return sim if (score_cutoff is None or sim >= score_cutoff) else 0 + + +def normalized_distance( + s1, + s2, + *, + pad=True, + processor=None, + score_cutoff=None, +): + """ + Calculates a normalized Hamming similarity in the range [1, 0]. + + This is calculated as ``distance / (len1 + len2)``. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + pad : bool, optional + should strings be padded if there is a length difference. + If pad is False and strings have a different length + a ValueError is thrown instead. Defaults is True. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For norm_dist > score_cutoff 1.0 is returned instead. Default is 1.0, + which deactivates this behaviour. + + Returns + ------- + norm_dist : float + normalized distance between s1 and s2 as a float between 0 and 1.0 + + Raises + ------ + ValueError + If s1 and s2 have a different length + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 1.0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + maximum = max(len(s1), len(s2)) + dist = distance(s1, s2, pad=pad) + norm_dist = dist / maximum if maximum else 0 + + return norm_dist if (score_cutoff is None or norm_dist <= score_cutoff) else 1.0 + + +def normalized_similarity( + s1, + s2, + *, + pad=True, + processor=None, + score_cutoff=None, +): + """ + Calculates a normalized Hamming similarity in the range [0, 1]. + + This is calculated as ``1 - normalized_distance`` + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + pad : bool, optional + should strings be padded if there is a length difference. + If pad is False and strings have a different length + a ValueError is thrown instead. Defaults is True. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For norm_sim < score_cutoff 0 is returned instead. Default is 0, + which deactivates this behaviour. + + Returns + ------- + norm_sim : float + normalized similarity between s1 and s2 as a float between 0 and 1.0 + + Raises + ------ + ValueError + If s1 and s2 have a different length + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 0.0 + + s1, s2 = conv_sequences(s1, s2) + norm_dist = normalized_distance(s1, s2, pad=pad, processor=processor) + norm_sim = 1 - norm_dist + + return norm_sim if (score_cutoff is None or norm_sim >= score_cutoff) else 0.0 + + +def editops( + s1, + s2, + *, + pad=True, + processor=None, +): + """ + Return Editops describing how to turn s1 into s2. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + pad : bool, optional + should strings be padded if there is a length difference. + If pad is False and strings have a different length + a ValueError is thrown instead. Defaults is True. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + + Returns + ------- + editops : Editops + edit operations required to turn s1 into s2 + """ + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + + if not pad and len(s1) != len(s2): + msg = "Sequences are not the same length." + raise ValueError(msg) + + ops_list = [] + min_len = min(len(s1), len(s2)) + for i in range(min_len): + if s1[i] != s2[i]: + ops_list.append(Editop("replace", i, i)) + + for i in range(min_len, len(s1)): + ops_list.append(Editop("delete", i, len(s2))) + + for i in range(min_len, len(s2)): + ops_list.append(Editop("insert", len(s1), i)) + + # sidestep input validation + ops = Editops.__new__(Editops) + ops._src_len = len(s1) + ops._dest_len = len(s2) + ops._editops = ops_list + return ops + + +def opcodes( + s1, + s2, + *, + pad=True, + processor=None, +): + """ + Return Opcodes describing how to turn s1 into s2. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + pad : bool, optional + should strings be padded if there is a length difference. + If pad is False and strings have a different length + a ValueError is thrown instead. Defaults is True. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + + Returns + ------- + opcodes : Opcodes + edit operations required to turn s1 into s2 + """ + return editops(s1, s2, pad=pad, processor=processor).as_opcodes() diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/Indel.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Indel.py new file mode 100644 index 0000000000000000000000000000000000000000..2cc6b38de5d322d320921a2711a2a246634bf1d8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Indel.py @@ -0,0 +1,116 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2025 Max Bachmann +# This file is generated by tools/generate_python.py +from __future__ import annotations + +import contextlib +import os + +from rapidfuzz._feature_detector import AVX2, SSE2, supports + +__all__ = [ + "distance", + "editops", + "normalized_distance", + "normalized_similarity", + "opcodes", + "similarity", +] + +_impl = os.environ.get("RAPIDFUZZ_IMPLEMENTATION") +if _impl == "cpp": + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + indel_distance as distance, + indel_editops as editops, + indel_normalized_distance as normalized_distance, + indel_normalized_similarity as normalized_similarity, + indel_opcodes as opcodes, + indel_similarity as similarity, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + indel_distance as distance, + indel_editops as editops, + indel_normalized_distance as normalized_distance, + indel_normalized_similarity as normalized_similarity, + indel_opcodes as opcodes, + indel_similarity as similarity, + ) + + imported = True + + if not imported: + from rapidfuzz.distance.metrics_cpp import ( # pyright: ignore[reportMissingImports] + indel_distance as distance, + indel_editops as editops, + indel_normalized_distance as normalized_distance, + indel_normalized_similarity as normalized_similarity, + indel_opcodes as opcodes, + indel_similarity as similarity, + ) +elif _impl == "python": + from rapidfuzz.distance.metrics_py import ( + indel_distance as distance, + indel_editops as editops, + indel_normalized_distance as normalized_distance, + indel_normalized_similarity as normalized_similarity, + indel_opcodes as opcodes, + indel_similarity as similarity, + ) +else: + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + indel_distance as distance, + indel_editops as editops, + indel_normalized_distance as normalized_distance, + indel_normalized_similarity as normalized_similarity, + indel_opcodes as opcodes, + indel_similarity as similarity, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + indel_distance as distance, + indel_editops as editops, + indel_normalized_distance as normalized_distance, + indel_normalized_similarity as normalized_similarity, + indel_opcodes as opcodes, + indel_similarity as similarity, + ) + + imported = True + + if not imported: + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp import ( # pyright: ignore[reportMissingImports] + indel_distance as distance, + indel_editops as editops, + indel_normalized_distance as normalized_distance, + indel_normalized_similarity as normalized_similarity, + indel_opcodes as opcodes, + indel_similarity as similarity, + ) + + imported = True + + if not imported: + from rapidfuzz.distance.metrics_py import ( + indel_distance as distance, + indel_editops as editops, + indel_normalized_distance as normalized_distance, + indel_normalized_similarity as normalized_similarity, + indel_opcodes as opcodes, + indel_similarity as similarity, + ) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/Indel.pyi b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Indel.pyi new file mode 100644 index 0000000000000000000000000000000000000000..c662f99baf4e5a188523265b03a5a59b0ab89e9a --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Indel.pyi @@ -0,0 +1,105 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann + +from __future__ import annotations + +from collections.abc import Hashable, Sequence +from typing import Callable, TypeVar, overload + +from rapidfuzz.distance import Editops, Opcodes + +_UnprocessedType1 = TypeVar("_UnprocessedType1") +_UnprocessedType2 = TypeVar("_UnprocessedType2") + +@overload +def distance( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: int | None = None, +) -> int: ... +@overload +def distance( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: int | None = None, +) -> int: ... +@overload +def normalized_distance( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def normalized_distance( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def similarity( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: int | None = None, +) -> int: ... +@overload +def similarity( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: int | None = None, +) -> int: ... +@overload +def normalized_similarity( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def normalized_similarity( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def editops( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, +) -> Editops: ... +@overload +def editops( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], +) -> Editops: ... +@overload +def opcodes( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, +) -> Opcodes: ... +@overload +def opcodes( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], +) -> Opcodes: ... diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/Indel_py.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Indel_py.py new file mode 100644 index 0000000000000000000000000000000000000000..c8f29a93b9c2e83adc9f19033be58c85f4ba79d2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Indel_py.py @@ -0,0 +1,358 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann +from __future__ import annotations + +from rapidfuzz._common_py import conv_sequences +from rapidfuzz._utils import is_none, setupPandas +from rapidfuzz.distance.LCSseq_py import ( + _block_similarity as lcs_seq_block_similarity, + editops as lcs_seq_editops, + opcodes as lcs_seq_opcodes, + similarity as lcs_seq_similarity, +) + + +def distance( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates the minimum number of insertions and deletions + required to change one sequence into the other. This is equivalent to the + Levenshtein distance with a substitution weight of 2. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : int, optional + Maximum distance between s1 and s2, that is + considered as a result. If the distance is bigger than score_cutoff, + score_cutoff + 1 is returned instead. Default is None, which deactivates + this behaviour. + + Returns + ------- + distance : int + distance between s1 and s2 + + Examples + -------- + Find the Indel distance between two strings: + + >>> from rapidfuzz.distance import Indel + >>> Indel.distance("lewenstein", "levenshtein") + 3 + + Setting a maximum distance allows the implementation to select + a more efficient implementation: + + >>> Indel.distance("lewenstein", "levenshtein", score_cutoff=1) + 2 + + """ + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + maximum = len(s1) + len(s2) + lcs_sim = lcs_seq_similarity(s1, s2) + dist = maximum - 2 * lcs_sim + return dist if (score_cutoff is None or dist <= score_cutoff) else score_cutoff + 1 + + +def _block_distance( + block, + s1, + s2, + score_cutoff=None, +): + maximum = len(s1) + len(s2) + lcs_sim = lcs_seq_block_similarity(block, s1, s2) + dist = maximum - 2 * lcs_sim + return dist if (score_cutoff is None or dist <= score_cutoff) else score_cutoff + 1 + + +def similarity( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates the Indel similarity in the range [max, 0]. + + This is calculated as ``(len1 + len2) - distance``. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : int, optional + Maximum distance between s1 and s2, that is + considered as a result. If the similarity is smaller than score_cutoff, + 0 is returned instead. Default is None, which deactivates + this behaviour. + + Returns + ------- + similarity : int + similarity between s1 and s2 + """ + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + maximum = len(s1) + len(s2) + dist = distance(s1, s2) + sim = maximum - dist + return sim if (score_cutoff is None or sim >= score_cutoff) else 0 + + +def normalized_distance( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates a normalized levenshtein similarity in the range [1, 0]. + + This is calculated as ``distance / (len1 + len2)``. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For norm_dist > score_cutoff 1.0 is returned instead. Default is 1.0, + which deactivates this behaviour. + + Returns + ------- + norm_dist : float + normalized distance between s1 and s2 as a float between 0 and 1.0 + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 1.0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + maximum = len(s1) + len(s2) + dist = distance(s1, s2) + norm_dist = dist / maximum if maximum else 0 + return norm_dist if (score_cutoff is None or norm_dist <= score_cutoff) else 1 + + +def _block_normalized_distance( + block, + s1, + s2, + score_cutoff=None, +): + maximum = len(s1) + len(s2) + dist = _block_distance(block, s1, s2) + norm_dist = dist / maximum if maximum else 0 + return norm_dist if (score_cutoff is None or norm_dist <= score_cutoff) else 1 + + +def normalized_similarity( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates a normalized indel similarity in the range [0, 1]. + + This is calculated as ``1 - normalized_distance`` + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For norm_sim < score_cutoff 0 is returned instead. Default is 0, + which deactivates this behaviour. + + Returns + ------- + norm_sim : float + normalized similarity between s1 and s2 as a float between 0 and 1.0 + + Examples + -------- + Find the normalized Indel similarity between two strings: + + >>> from rapidfuzz.distance import Indel + >>> Indel.normalized_similarity("lewenstein", "levenshtein") + 0.85714285714285 + + Setting a score_cutoff allows the implementation to select + a more efficient implementation: + + >>> Indel.normalized_similarity("lewenstein", "levenshtein", score_cutoff=0.9) + 0.0 + + When a different processor is used s1 and s2 do not have to be strings + + >>> Indel.normalized_similarity(["lewenstein"], ["levenshtein"], processor=lambda s: s[0]) + 0.8571428571428572 + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 0.0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + norm_dist = normalized_distance(s1, s2) + norm_sim = 1.0 - norm_dist + return norm_sim if (score_cutoff is None or norm_sim >= score_cutoff) else 0 + + +def _block_normalized_similarity( + block, + s1, + s2, + score_cutoff=None, +): + norm_dist = _block_normalized_distance(block, s1, s2) + norm_sim = 1.0 - norm_dist + return norm_sim if (score_cutoff is None or norm_sim >= score_cutoff) else 0 + + +def editops( + s1, + s2, + *, + processor=None, +): + """ + Return Editops describing how to turn s1 into s2. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + + Returns + ------- + editops : Editops + edit operations required to turn s1 into s2 + + Notes + ----- + The alignment is calculated using an algorithm of Heikki Hyyrö, which is + described [6]_. It has a time complexity and memory usage of ``O([N/64] * M)``. + + References + ---------- + .. [6] Hyyrö, Heikki. "A Note on Bit-Parallel Alignment Computation." + Stringology (2004). + + Examples + -------- + >>> from rapidfuzz.distance import Indel + >>> for tag, src_pos, dest_pos in Indel.editops("qabxcd", "abycdf"): + ... print(("%7s s1[%d] s2[%d]" % (tag, src_pos, dest_pos))) + delete s1[0] s2[0] + delete s1[3] s2[2] + insert s1[4] s2[2] + insert s1[6] s2[5] + """ + return lcs_seq_editops(s1, s2, processor=processor) + + +def opcodes( + s1, + s2, + *, + processor=None, +): + """ + Return Opcodes describing how to turn s1 into s2. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + + Returns + ------- + opcodes : Opcodes + edit operations required to turn s1 into s2 + + Notes + ----- + The alignment is calculated using an algorithm of Heikki Hyyrö, which is + described [7]_. It has a time complexity and memory usage of ``O([N/64] * M)``. + + References + ---------- + .. [7] Hyyrö, Heikki. "A Note on Bit-Parallel Alignment Computation." + Stringology (2004). + + Examples + -------- + >>> from rapidfuzz.distance import Indel + + >>> a = "qabxcd" + >>> b = "abycdf" + >>> for tag, i1, i2, j1, j2 in Indel.opcodes(a, b): + ... print(("%7s a[%d:%d] (%s) b[%d:%d] (%s)" % + ... (tag, i1, i2, a[i1:i2], j1, j2, b[j1:j2]))) + delete a[0:1] (q) b[0:0] () + equal a[1:3] (ab) b[0:2] (ab) + delete a[3:4] (x) b[2:2] () + insert a[4:4] () b[2:3] (y) + equal a[4:6] (cd) b[3:5] (cd) + insert a[6:6] () b[5:6] (f) + """ + return lcs_seq_opcodes(s1, s2, processor=processor) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/Jaro.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Jaro.py new file mode 100644 index 0000000000000000000000000000000000000000..1f87f54df79262ab57687f9a2a39c37283dc9e9e --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Jaro.py @@ -0,0 +1,93 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2025 Max Bachmann +# This file is generated by tools/generate_python.py +from __future__ import annotations + +import contextlib +import os + +from rapidfuzz._feature_detector import AVX2, SSE2, supports + +__all__ = ["distance", "normalized_distance", "normalized_similarity", "similarity"] + +_impl = os.environ.get("RAPIDFUZZ_IMPLEMENTATION") +if _impl == "cpp": + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + jaro_distance as distance, + jaro_normalized_distance as normalized_distance, + jaro_normalized_similarity as normalized_similarity, + jaro_similarity as similarity, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + jaro_distance as distance, + jaro_normalized_distance as normalized_distance, + jaro_normalized_similarity as normalized_similarity, + jaro_similarity as similarity, + ) + + imported = True + + if not imported: + from rapidfuzz.distance.metrics_cpp import ( # pyright: ignore[reportMissingImports] + jaro_distance as distance, + jaro_normalized_distance as normalized_distance, + jaro_normalized_similarity as normalized_similarity, + jaro_similarity as similarity, + ) +elif _impl == "python": + from rapidfuzz.distance.metrics_py import ( + jaro_distance as distance, + jaro_normalized_distance as normalized_distance, + jaro_normalized_similarity as normalized_similarity, + jaro_similarity as similarity, + ) +else: + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + jaro_distance as distance, + jaro_normalized_distance as normalized_distance, + jaro_normalized_similarity as normalized_similarity, + jaro_similarity as similarity, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + jaro_distance as distance, + jaro_normalized_distance as normalized_distance, + jaro_normalized_similarity as normalized_similarity, + jaro_similarity as similarity, + ) + + imported = True + + if not imported: + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp import ( # pyright: ignore[reportMissingImports] + jaro_distance as distance, + jaro_normalized_distance as normalized_distance, + jaro_normalized_similarity as normalized_similarity, + jaro_similarity as similarity, + ) + + imported = True + + if not imported: + from rapidfuzz.distance.metrics_py import ( + jaro_distance as distance, + jaro_normalized_distance as normalized_distance, + jaro_normalized_similarity as normalized_similarity, + jaro_similarity as similarity, + ) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/Jaro.pyi b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Jaro.pyi new file mode 100644 index 0000000000000000000000000000000000000000..d8ec0b6c45cde4ad9d04945c6af079dc56f38b52 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Jaro.pyi @@ -0,0 +1,75 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann + +from __future__ import annotations + +from collections.abc import Hashable, Sequence +from typing import Callable, TypeVar, overload + +_UnprocessedType1 = TypeVar("_UnprocessedType1") +_UnprocessedType2 = TypeVar("_UnprocessedType2") + +@overload +def distance( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def distance( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def normalized_distance( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def normalized_distance( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def similarity( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def similarity( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def normalized_similarity( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def normalized_similarity( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/JaroWinkler.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/JaroWinkler.py new file mode 100644 index 0000000000000000000000000000000000000000..53736647e3cd0e4c097ddd8a1196d6f8fce4b5db --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/JaroWinkler.py @@ -0,0 +1,93 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2025 Max Bachmann +# This file is generated by tools/generate_python.py +from __future__ import annotations + +import contextlib +import os + +from rapidfuzz._feature_detector import AVX2, SSE2, supports + +__all__ = ["distance", "normalized_distance", "normalized_similarity", "similarity"] + +_impl = os.environ.get("RAPIDFUZZ_IMPLEMENTATION") +if _impl == "cpp": + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + jaro_winkler_distance as distance, + jaro_winkler_normalized_distance as normalized_distance, + jaro_winkler_normalized_similarity as normalized_similarity, + jaro_winkler_similarity as similarity, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + jaro_winkler_distance as distance, + jaro_winkler_normalized_distance as normalized_distance, + jaro_winkler_normalized_similarity as normalized_similarity, + jaro_winkler_similarity as similarity, + ) + + imported = True + + if not imported: + from rapidfuzz.distance.metrics_cpp import ( # pyright: ignore[reportMissingImports] + jaro_winkler_distance as distance, + jaro_winkler_normalized_distance as normalized_distance, + jaro_winkler_normalized_similarity as normalized_similarity, + jaro_winkler_similarity as similarity, + ) +elif _impl == "python": + from rapidfuzz.distance.metrics_py import ( + jaro_winkler_distance as distance, + jaro_winkler_normalized_distance as normalized_distance, + jaro_winkler_normalized_similarity as normalized_similarity, + jaro_winkler_similarity as similarity, + ) +else: + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + jaro_winkler_distance as distance, + jaro_winkler_normalized_distance as normalized_distance, + jaro_winkler_normalized_similarity as normalized_similarity, + jaro_winkler_similarity as similarity, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + jaro_winkler_distance as distance, + jaro_winkler_normalized_distance as normalized_distance, + jaro_winkler_normalized_similarity as normalized_similarity, + jaro_winkler_similarity as similarity, + ) + + imported = True + + if not imported: + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp import ( # pyright: ignore[reportMissingImports] + jaro_winkler_distance as distance, + jaro_winkler_normalized_distance as normalized_distance, + jaro_winkler_normalized_similarity as normalized_similarity, + jaro_winkler_similarity as similarity, + ) + + imported = True + + if not imported: + from rapidfuzz.distance.metrics_py import ( + jaro_winkler_distance as distance, + jaro_winkler_normalized_distance as normalized_distance, + jaro_winkler_normalized_similarity as normalized_similarity, + jaro_winkler_similarity as similarity, + ) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/JaroWinkler.pyi b/venv/lib/python3.10/site-packages/rapidfuzz/distance/JaroWinkler.pyi new file mode 100644 index 0000000000000000000000000000000000000000..72b309d73e5cc336f6f6cf64b32cb1659f21598d --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/JaroWinkler.pyi @@ -0,0 +1,83 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann + +from __future__ import annotations + +from collections.abc import Hashable, Sequence +from typing import Callable, TypeVar, overload + +_UnprocessedType1 = TypeVar("_UnprocessedType1") +_UnprocessedType2 = TypeVar("_UnprocessedType2") + +@overload +def distance( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + prefix_weight: float = 0.1, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def distance( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + prefix_weight: float = 0.1, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def normalized_distance( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + prefix_weight: float = 0.1, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def normalized_distance( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + prefix_weight: float = 0.1, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def similarity( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + prefix_weight: float = 0.1, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def similarity( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + prefix_weight: float = 0.1, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def normalized_similarity( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + prefix_weight: float = 0.1, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def normalized_similarity( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + prefix_weight: float = 0.1, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/JaroWinkler_py.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/JaroWinkler_py.py new file mode 100644 index 0000000000000000000000000000000000000000..4e2338cbdad2b1a33d04f69d9f3cbd5fae42f33c --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/JaroWinkler_py.py @@ -0,0 +1,235 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann +from __future__ import annotations + +from rapidfuzz._common_py import conv_sequences +from rapidfuzz._utils import is_none, setupPandas +from rapidfuzz.distance import Jaro_py as Jaro + + +def similarity( + s1, + s2, + *, + prefix_weight=0.1, + processor=None, + score_cutoff=None, +): + """ + Calculates the jaro winkler similarity + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + prefix_weight : float, optional + Weight used for the common prefix of the two strings. + Has to be between 0 and 0.25. Default is 0.1. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For ratio < score_cutoff 0 is returned instead. Default is None, + which deactivates this behaviour. + + Returns + ------- + similarity : float + similarity between s1 and s2 as a float between 0 and 1.0 + + Raises + ------ + ValueError + If prefix_weight is invalid + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 0.0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + if score_cutoff is None: + score_cutoff = 0 + + if prefix_weight > 1.0 or prefix_weight < 0.0: + msg = "prefix_weight has to be in the range 0.0 - 1.0" + raise ValueError(msg) + + s1, s2 = conv_sequences(s1, s2) + P_len = len(s1) + T_len = len(s2) + min_len = min(P_len, T_len) + prefix = 0 + max_prefix = min(min_len, 4) + + for _ in range(max_prefix): + if s1[prefix] != s2[prefix]: + break + prefix += 1 + + jaro_score_cutoff = score_cutoff + if jaro_score_cutoff > 0.7: + prefix_sim = prefix * prefix_weight + + if prefix_sim >= 1.0: + jaro_score_cutoff = 0.7 + else: + jaro_score_cutoff = max(0.7, (prefix_sim - jaro_score_cutoff) / (prefix_sim - 1.0)) + + Sim = Jaro.similarity(s1, s2, score_cutoff=jaro_score_cutoff) + if Sim > 0.7: + Sim += prefix * prefix_weight * (1.0 - Sim) + Sim = min(Sim, 1.0) + + return Sim if Sim >= score_cutoff else 0 + + +def normalized_similarity( + s1, + s2, + *, + prefix_weight=0.1, + processor=None, + score_cutoff=None, +): + """ + Calculates the normalized jaro winkler similarity + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + prefix_weight : float, optional + Weight used for the common prefix of the two strings. + Has to be between 0 and 0.25. Default is 0.1. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For ratio < score_cutoff 0 is returned instead. Default is None, + which deactivates this behaviour. + + Returns + ------- + normalized similarity : float + normalized similarity between s1 and s2 as a float between 0 and 1.0 + + Raises + ------ + ValueError + If prefix_weight is invalid + """ + return similarity( + s1, + s2, + prefix_weight=prefix_weight, + processor=processor, + score_cutoff=score_cutoff, + ) + + +def distance( + s1, + s2, + *, + prefix_weight=0.1, + processor=None, + score_cutoff=None, +): + """ + Calculates the jaro winkler distance + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + prefix_weight : float, optional + Weight used for the common prefix of the two strings. + Has to be between 0 and 0.25. Default is 0.1. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For ratio < score_cutoff 0 is returned instead. Default is None, + which deactivates this behaviour. + + Returns + ------- + distance : float + distance between s1 and s2 as a float between 1.0 and 0.0 + + Raises + ------ + ValueError + If prefix_weight is invalid + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 1.0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + cutoff_distance = None if (score_cutoff is None or score_cutoff > 1.0) else 1.0 - score_cutoff + sim = similarity(s1, s2, prefix_weight=prefix_weight, score_cutoff=cutoff_distance) + dist = 1.0 - sim + return dist if (score_cutoff is None or dist <= score_cutoff) else 1.0 + + +def normalized_distance( + s1, + s2, + *, + prefix_weight=0.1, + processor=None, + score_cutoff=None, +): + """ + Calculates the normalized jaro winkler distance + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + prefix_weight : float, optional + Weight used for the common prefix of the two strings. + Has to be between 0 and 0.25. Default is 0.1. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For ratio < score_cutoff 0 is returned instead. Default is None, + which deactivates this behaviour. + + Returns + ------- + normalized distance : float + normalized distance between s1 and s2 as a float between 1.0 and 0.0 + + Raises + ------ + ValueError + If prefix_weight is invalid + """ + return distance( + s1, + s2, + prefix_weight=prefix_weight, + processor=processor, + score_cutoff=score_cutoff, + ) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/Jaro_py.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Jaro_py.py new file mode 100644 index 0000000000000000000000000000000000000000..1924633aff749a79b443d037764157ee12bbfb3e --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Jaro_py.py @@ -0,0 +1,255 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann +from __future__ import annotations + +from rapidfuzz._common_py import conv_sequences +from rapidfuzz._utils import is_none, setupPandas + + +def _jaro_calculate_similarity(pattern_len, text_len, common_chars, transpositions): + transpositions //= 2 + sim = 0.0 + sim += common_chars / pattern_len + sim += common_chars / text_len + sim += (common_chars - transpositions) / common_chars + return sim / 3.0 + + +def _jaro_length_filter(pattern_len, text_len, score_cutoff): + """ + filter matches below score_cutoff based on string lengths + """ + if not pattern_len or not text_len: + return False + + sim = _jaro_calculate_similarity(pattern_len, text_len, min(pattern_len, text_len), 0) + return sim >= score_cutoff + + +def _jaro_common_char_filter(pattern_len, text_len, common_chars, score_cutoff): + """ + filter matches below score_cutoff based on string lengths and common characters + """ + if not common_chars: + return False + + sim = _jaro_calculate_similarity(pattern_len, text_len, common_chars, 0) + return sim >= score_cutoff + + +def _jaro_bounds(s1, s2): + """ + find bounds and skip out of bound parts of the sequences + """ + pattern_len = len(s1) + text_len = len(s2) + + # since jaro uses a sliding window some parts of T/P might never be in + # range an can be removed ahead of time + bound = 0 + if text_len > pattern_len: + bound = text_len // 2 - 1 + if text_len > pattern_len + bound: + s2 = s2[: pattern_len + bound] + else: + bound = pattern_len // 2 - 1 + if pattern_len > text_len + bound: + s1 = s1[: text_len + bound] + return s1, s2, bound + + +def similarity( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates the jaro similarity + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For ratio < score_cutoff 0 is returned instead. Default is None, + which deactivates this behaviour. + + Returns + ------- + similarity : float + similarity between s1 and s2 as a float between 0 and 1.0 + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 0.0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + if not s1 and not s2: + return 1.0 + + if score_cutoff is None: + score_cutoff = 0 + + s1, s2 = conv_sequences(s1, s2) + pattern_len = len(s1) + text_len = len(s2) + + # short circuit if score_cutoff can not be reached + if not _jaro_length_filter(pattern_len, text_len, score_cutoff): + return 0 + + if pattern_len == 1 and text_len == 1: + return float(s1[0] == s2[0]) + + s1, s2, bound = _jaro_bounds(s1, s2) + + s1_flags = [False] * pattern_len + s2_flags = [False] * text_len + + # todo use bitparallel implementation + # looking only within search range, count & flag matched pairs + common_chars = 0 + for i, s1_ch in enumerate(s1): + low = max(0, i - bound) + hi = min(i + bound, text_len - 1) + for j in range(low, hi + 1): + if not s2_flags[j] and s2[j] == s1_ch: + s1_flags[i] = s2_flags[j] = True + common_chars += 1 + break + + # short circuit if score_cutoff can not be reached + if not _jaro_common_char_filter(pattern_len, text_len, common_chars, score_cutoff): + return 0 + + # todo use bitparallel implementation + # count transpositions + k = trans_count = 0 + for i, s1_f in enumerate(s1_flags): + if s1_f: + for j in range(k, text_len): + if s2_flags[j]: + k = j + 1 + break + if s1[i] != s2[j]: + trans_count += 1 + + return _jaro_calculate_similarity(pattern_len, text_len, common_chars, trans_count) + + +def normalized_similarity( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates the normalized jaro similarity + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For ratio < score_cutoff 0 is returned instead. Default is None, + which deactivates this behaviour. + + Returns + ------- + normalized similarity : float + normalized similarity between s1 and s2 as a float between 0 and 1.0 + """ + return similarity(s1, s2, processor=processor, score_cutoff=score_cutoff) + + +def distance( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates the jaro distance + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For ratio < score_cutoff 0 is returned instead. Default is None, + which deactivates this behaviour. + + Returns + ------- + distance : float + distance between s1 and s2 as a float between 1.0 and 0.0 + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 1.0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + cutoff_distance = None if (score_cutoff is None or score_cutoff > 1.0) else 1.0 - score_cutoff + sim = similarity(s1, s2, score_cutoff=cutoff_distance) + dist = 1.0 - sim + return dist if (score_cutoff is None or dist <= score_cutoff) else 1.0 + + +def normalized_distance( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates the normalized jaro distance + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For ratio < score_cutoff 0 is returned instead. Default is None, + which deactivates this behaviour. + + Returns + ------- + normalized distance : float + normalized distance between s1 and s2 as a float between 1.0 and 0.0 + """ + return distance(s1, s2, processor=processor, score_cutoff=score_cutoff) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/LCSseq.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/LCSseq.py new file mode 100644 index 0000000000000000000000000000000000000000..77f506e0625d8521a088e8e0295384cad3688a95 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/LCSseq.py @@ -0,0 +1,116 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2025 Max Bachmann +# This file is generated by tools/generate_python.py +from __future__ import annotations + +import contextlib +import os + +from rapidfuzz._feature_detector import AVX2, SSE2, supports + +__all__ = [ + "distance", + "editops", + "normalized_distance", + "normalized_similarity", + "opcodes", + "similarity", +] + +_impl = os.environ.get("RAPIDFUZZ_IMPLEMENTATION") +if _impl == "cpp": + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + lcs_seq_distance as distance, + lcs_seq_editops as editops, + lcs_seq_normalized_distance as normalized_distance, + lcs_seq_normalized_similarity as normalized_similarity, + lcs_seq_opcodes as opcodes, + lcs_seq_similarity as similarity, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + lcs_seq_distance as distance, + lcs_seq_editops as editops, + lcs_seq_normalized_distance as normalized_distance, + lcs_seq_normalized_similarity as normalized_similarity, + lcs_seq_opcodes as opcodes, + lcs_seq_similarity as similarity, + ) + + imported = True + + if not imported: + from rapidfuzz.distance.metrics_cpp import ( # pyright: ignore[reportMissingImports] + lcs_seq_distance as distance, + lcs_seq_editops as editops, + lcs_seq_normalized_distance as normalized_distance, + lcs_seq_normalized_similarity as normalized_similarity, + lcs_seq_opcodes as opcodes, + lcs_seq_similarity as similarity, + ) +elif _impl == "python": + from rapidfuzz.distance.metrics_py import ( + lcs_seq_distance as distance, + lcs_seq_editops as editops, + lcs_seq_normalized_distance as normalized_distance, + lcs_seq_normalized_similarity as normalized_similarity, + lcs_seq_opcodes as opcodes, + lcs_seq_similarity as similarity, + ) +else: + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + lcs_seq_distance as distance, + lcs_seq_editops as editops, + lcs_seq_normalized_distance as normalized_distance, + lcs_seq_normalized_similarity as normalized_similarity, + lcs_seq_opcodes as opcodes, + lcs_seq_similarity as similarity, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + lcs_seq_distance as distance, + lcs_seq_editops as editops, + lcs_seq_normalized_distance as normalized_distance, + lcs_seq_normalized_similarity as normalized_similarity, + lcs_seq_opcodes as opcodes, + lcs_seq_similarity as similarity, + ) + + imported = True + + if not imported: + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp import ( # pyright: ignore[reportMissingImports] + lcs_seq_distance as distance, + lcs_seq_editops as editops, + lcs_seq_normalized_distance as normalized_distance, + lcs_seq_normalized_similarity as normalized_similarity, + lcs_seq_opcodes as opcodes, + lcs_seq_similarity as similarity, + ) + + imported = True + + if not imported: + from rapidfuzz.distance.metrics_py import ( + lcs_seq_distance as distance, + lcs_seq_editops as editops, + lcs_seq_normalized_distance as normalized_distance, + lcs_seq_normalized_similarity as normalized_similarity, + lcs_seq_opcodes as opcodes, + lcs_seq_similarity as similarity, + ) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/LCSseq.pyi b/venv/lib/python3.10/site-packages/rapidfuzz/distance/LCSseq.pyi new file mode 100644 index 0000000000000000000000000000000000000000..c662f99baf4e5a188523265b03a5a59b0ab89e9a --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/LCSseq.pyi @@ -0,0 +1,105 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann + +from __future__ import annotations + +from collections.abc import Hashable, Sequence +from typing import Callable, TypeVar, overload + +from rapidfuzz.distance import Editops, Opcodes + +_UnprocessedType1 = TypeVar("_UnprocessedType1") +_UnprocessedType2 = TypeVar("_UnprocessedType2") + +@overload +def distance( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: int | None = None, +) -> int: ... +@overload +def distance( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: int | None = None, +) -> int: ... +@overload +def normalized_distance( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def normalized_distance( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def similarity( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: int | None = None, +) -> int: ... +@overload +def similarity( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: int | None = None, +) -> int: ... +@overload +def normalized_similarity( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def normalized_similarity( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def editops( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, +) -> Editops: ... +@overload +def editops( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], +) -> Editops: ... +@overload +def opcodes( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, +) -> Opcodes: ... +@overload +def opcodes( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], +) -> Opcodes: ... diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/LCSseq_py.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/LCSseq_py.py new file mode 100644 index 0000000000000000000000000000000000000000..00b26af4c7c475ec9a76cb669b0a9effa22b6075 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/LCSseq_py.py @@ -0,0 +1,426 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann +from __future__ import annotations + +from rapidfuzz._common_py import common_affix, conv_sequences +from rapidfuzz._utils import is_none, setupPandas +from rapidfuzz.distance._initialize_py import Editop, Editops + + +def similarity( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates the length of the longest common subsequence + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : int, optional + Maximum distance between s1 and s2, that is + considered as a result. If the similarity is smaller than score_cutoff, + 0 is returned instead. Default is None, which deactivates + this behaviour. + + Returns + ------- + similarity : int + similarity between s1 and s2 + """ + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + if not s1: + return 0 + + s1, s2 = conv_sequences(s1, s2) + S = (1 << len(s1)) - 1 + block = {} + block_get = block.get + x = 1 + for ch1 in s1: + block[ch1] = block_get(ch1, 0) | x + x <<= 1 + + for ch2 in s2: + Matches = block_get(ch2, 0) + u = S & Matches + S = (S + u) | (S - u) + + # calculate the equivalent of popcount(~S) in C. This breaks for len(s1) == 0 + res = bin(S)[-len(s1) :].count("0") + return res if (score_cutoff is None or res >= score_cutoff) else 0 + + +def _block_similarity( + block, + s1, + s2, + score_cutoff=None, +): + if not s1: + return 0 + + S = (1 << len(s1)) - 1 + block_get = block.get + + for ch2 in s2: + Matches = block_get(ch2, 0) + u = S & Matches + S = (S + u) | (S - u) + + # calculate the equivalent of popcount(~S) in C. This breaks for len(s1) == 0 + res = bin(S)[-len(s1) :].count("0") + return res if (score_cutoff is None or res >= score_cutoff) else 0 + + +def distance( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates the LCS distance in the range [0, max]. + + This is calculated as ``max(len1, len2) - similarity``. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : int, optional + Maximum distance between s1 and s2, that is + considered as a result. If the distance is bigger than score_cutoff, + score_cutoff + 1 is returned instead. Default is None, which deactivates + this behaviour. + + Returns + ------- + distance : int + distance between s1 and s2 + + Examples + -------- + Find the LCS distance between two strings: + + >>> from rapidfuzz.distance import LCSseq + >>> LCSseq.distance("lewenstein", "levenshtein") + 2 + + Setting a maximum distance allows the implementation to select + a more efficient implementation: + + >>> LCSseq.distance("lewenstein", "levenshtein", score_cutoff=1) + 2 + + """ + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + maximum = max(len(s1), len(s2)) + sim = similarity(s1, s2) + dist = maximum - sim + return dist if (score_cutoff is None or dist <= score_cutoff) else score_cutoff + 1 + + +def normalized_distance( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates a normalized LCS similarity in the range [1, 0]. + + This is calculated as ``distance / max(len1, len2)``. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For norm_dist > score_cutoff 1.0 is returned instead. Default is 1.0, + which deactivates this behaviour. + + Returns + ------- + norm_dist : float + normalized distance between s1 and s2 as a float between 0 and 1.0 + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 1.0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + if not s1 or not s2: + return 0 + + s1, s2 = conv_sequences(s1, s2) + maximum = max(len(s1), len(s2)) + norm_sim = distance(s1, s2) / maximum + return norm_sim if (score_cutoff is None or norm_sim <= score_cutoff) else 1 + + +def normalized_similarity( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates a normalized LCS similarity in the range [0, 1]. + + This is calculated as ``1 - normalized_distance`` + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For norm_sim < score_cutoff 0 is returned instead. Default is 0, + which deactivates this behaviour. + + Returns + ------- + norm_sim : float + normalized similarity between s1 and s2 as a float between 0 and 1.0 + + Examples + -------- + Find the normalized LCS similarity between two strings: + + >>> from rapidfuzz.distance import LCSseq + >>> LCSseq.normalized_similarity("lewenstein", "levenshtein") + 0.8181818181818181 + + Setting a score_cutoff allows the implementation to select + a more efficient implementation: + + >>> LCSseq.normalized_similarity("lewenstein", "levenshtein", score_cutoff=0.9) + 0.0 + + When a different processor is used s1 and s2 do not have to be strings + + >>> LCSseq.normalized_similarity(["lewenstein"], ["levenshtein"], processor=lambda s: s[0]) + 0.81818181818181 + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 0.0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + norm_sim = 1.0 - normalized_distance(s1, s2) + return norm_sim if (score_cutoff is None or norm_sim >= score_cutoff) else 0 + + +def _matrix(s1, s2): + if not s1: + return (0, []) + + S = (1 << len(s1)) - 1 + block = {} + block_get = block.get + x = 1 + for ch1 in s1: + block[ch1] = block_get(ch1, 0) | x + x <<= 1 + + matrix = [] + for ch2 in s2: + Matches = block_get(ch2, 0) + u = S & Matches + S = (S + u) | (S - u) + matrix.append(S) + + # calculate the equivalent of popcount(~S) in C. This breaks for len(s1) == 0 + sim = bin(S)[-len(s1) :].count("0") + return (sim, matrix) + + +def editops( + s1, + s2, + *, + processor=None, +): + """ + Return Editops describing how to turn s1 into s2. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + + Returns + ------- + editops : Editops + edit operations required to turn s1 into s2 + + Notes + ----- + The alignment is calculated using an algorithm of Heikki Hyyrö, which is + described in [6]_. It has a time complexity and memory usage of ``O([N/64] * M)``. + + References + ---------- + .. [6] Hyyrö, Heikki. "A Note on Bit-Parallel Alignment Computation." + Stringology (2004). + + Examples + -------- + >>> from rapidfuzz.distance import LCSseq + >>> for tag, src_pos, dest_pos in LCSseq.editops("qabxcd", "abycdf"): + ... print(("%7s s1[%d] s2[%d]" % (tag, src_pos, dest_pos))) + delete s1[0] s2[0] + delete s1[3] s2[2] + insert s1[4] s2[2] + insert s1[6] s2[5] + """ + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + prefix_len, suffix_len = common_affix(s1, s2) + s1 = s1[prefix_len : len(s1) - suffix_len] + s2 = s2[prefix_len : len(s2) - suffix_len] + sim, matrix = _matrix(s1, s2) + + editops = Editops([], 0, 0) + editops._src_len = len(s1) + prefix_len + suffix_len + editops._dest_len = len(s2) + prefix_len + suffix_len + + dist = len(s1) + len(s2) - 2 * sim + if dist == 0: + return editops + + editop_list = [None] * dist + col = len(s1) + row = len(s2) + while row != 0 and col != 0: + # deletion + if matrix[row - 1] & (1 << (col - 1)): + dist -= 1 + col -= 1 + editop_list[dist] = Editop("delete", col + prefix_len, row + prefix_len) + else: + row -= 1 + + # insertion + if row and not (matrix[row - 1] & (1 << (col - 1))): + dist -= 1 + editop_list[dist] = Editop("insert", col + prefix_len, row + prefix_len) + # match + else: + col -= 1 + + while col != 0: + dist -= 1 + col -= 1 + editop_list[dist] = Editop("delete", col + prefix_len, row + prefix_len) + + while row != 0: + dist -= 1 + row -= 1 + editop_list[dist] = Editop("insert", col + prefix_len, row + prefix_len) + + editops._editops = editop_list + return editops + + +def opcodes( + s1, + s2, + *, + processor=None, +): + """ + Return Opcodes describing how to turn s1 into s2. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + + Returns + ------- + opcodes : Opcodes + edit operations required to turn s1 into s2 + + Notes + ----- + The alignment is calculated using an algorithm of Heikki Hyyrö, which is + described in [7]_. It has a time complexity and memory usage of ``O([N/64] * M)``. + + References + ---------- + .. [7] Hyyrö, Heikki. "A Note on Bit-Parallel Alignment Computation." + Stringology (2004). + + Examples + -------- + >>> from rapidfuzz.distance import LCSseq + + >>> a = "qabxcd" + >>> b = "abycdf" + >>> for tag, i1, i2, j1, j2 in LCSseq.opcodes(a, b): + ... print(("%7s a[%d:%d] (%s) b[%d:%d] (%s)" % + ... (tag, i1, i2, a[i1:i2], j1, j2, b[j1:j2]))) + delete a[0:1] (q) b[0:0] () + equal a[1:3] (ab) b[0:2] (ab) + delete a[3:4] (x) b[2:2] () + insert a[4:4] () b[2:3] (y) + equal a[4:6] (cd) b[3:5] (cd) + insert a[6:6] () b[5:6] (f) + """ + return editops(s1, s2, processor=processor).as_opcodes() diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/Levenshtein.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Levenshtein.py new file mode 100644 index 0000000000000000000000000000000000000000..9a22f326b06bce42fc64de53523657d02612f379 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Levenshtein.py @@ -0,0 +1,116 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2025 Max Bachmann +# This file is generated by tools/generate_python.py +from __future__ import annotations + +import contextlib +import os + +from rapidfuzz._feature_detector import AVX2, SSE2, supports + +__all__ = [ + "distance", + "editops", + "normalized_distance", + "normalized_similarity", + "opcodes", + "similarity", +] + +_impl = os.environ.get("RAPIDFUZZ_IMPLEMENTATION") +if _impl == "cpp": + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + levenshtein_distance as distance, + levenshtein_editops as editops, + levenshtein_normalized_distance as normalized_distance, + levenshtein_normalized_similarity as normalized_similarity, + levenshtein_opcodes as opcodes, + levenshtein_similarity as similarity, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + levenshtein_distance as distance, + levenshtein_editops as editops, + levenshtein_normalized_distance as normalized_distance, + levenshtein_normalized_similarity as normalized_similarity, + levenshtein_opcodes as opcodes, + levenshtein_similarity as similarity, + ) + + imported = True + + if not imported: + from rapidfuzz.distance.metrics_cpp import ( # pyright: ignore[reportMissingImports] + levenshtein_distance as distance, + levenshtein_editops as editops, + levenshtein_normalized_distance as normalized_distance, + levenshtein_normalized_similarity as normalized_similarity, + levenshtein_opcodes as opcodes, + levenshtein_similarity as similarity, + ) +elif _impl == "python": + from rapidfuzz.distance.metrics_py import ( + levenshtein_distance as distance, + levenshtein_editops as editops, + levenshtein_normalized_distance as normalized_distance, + levenshtein_normalized_similarity as normalized_similarity, + levenshtein_opcodes as opcodes, + levenshtein_similarity as similarity, + ) +else: + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + levenshtein_distance as distance, + levenshtein_editops as editops, + levenshtein_normalized_distance as normalized_distance, + levenshtein_normalized_similarity as normalized_similarity, + levenshtein_opcodes as opcodes, + levenshtein_similarity as similarity, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + levenshtein_distance as distance, + levenshtein_editops as editops, + levenshtein_normalized_distance as normalized_distance, + levenshtein_normalized_similarity as normalized_similarity, + levenshtein_opcodes as opcodes, + levenshtein_similarity as similarity, + ) + + imported = True + + if not imported: + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp import ( # pyright: ignore[reportMissingImports] + levenshtein_distance as distance, + levenshtein_editops as editops, + levenshtein_normalized_distance as normalized_distance, + levenshtein_normalized_similarity as normalized_similarity, + levenshtein_opcodes as opcodes, + levenshtein_similarity as similarity, + ) + + imported = True + + if not imported: + from rapidfuzz.distance.metrics_py import ( + levenshtein_distance as distance, + levenshtein_editops as editops, + levenshtein_normalized_distance as normalized_distance, + levenshtein_normalized_similarity as normalized_similarity, + levenshtein_opcodes as opcodes, + levenshtein_similarity as similarity, + ) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/Levenshtein.pyi b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Levenshtein.pyi new file mode 100644 index 0000000000000000000000000000000000000000..8bd26720dfe101640e1e1da59416ff25e7cdfc3a --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Levenshtein.pyi @@ -0,0 +1,131 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann +""" +The Levenshtein (edit) distance is a string metric to measure the +difference between two strings/sequences s1 and s2. +It's defined as the minimum number of insertions, deletions or +substitutions required to transform s1 into s2. +""" + +from __future__ import annotations + +from collections.abc import Hashable, Sequence +from typing import Callable, TypeVar, overload + +from rapidfuzz.distance import Editops, Opcodes + +_UnprocessedType1 = TypeVar("_UnprocessedType1") +_UnprocessedType2 = TypeVar("_UnprocessedType2") + +@overload +def distance( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + weights: tuple[int, int, int] | None = (1, 1, 1), + processor: None = None, + score_cutoff: int | None = None, + score_hint: int | None = None, +) -> int: ... +@overload +def distance( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + weights: tuple[int, int, int] | None = (1, 1, 1), + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: int | None = None, + score_hint: int | None = None, +) -> int: ... +@overload +def normalized_distance( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + weights: tuple[int, int, int] | None = (1, 1, 1), + processor: None = None, + score_cutoff: float | None = 0, + score_hint: float | None = 0, +) -> float: ... +@overload +def normalized_distance( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + weights: tuple[int, int, int] | None = (1, 1, 1), + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, + score_hint: float | None = 0, +) -> float: ... +@overload +def similarity( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + weights: tuple[int, int, int] | None = (1, 1, 1), + processor: None = None, + score_cutoff: int | None = None, + score_hint: int | None = None, +) -> int: ... +@overload +def similarity( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + weights: tuple[int, int, int] | None = (1, 1, 1), + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: int | None = None, + score_hint: int | None = None, +) -> int: ... +@overload +def normalized_similarity( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + weights: tuple[int, int, int] | None = (1, 1, 1), + processor: None = None, + score_cutoff: float | None = 0, + score_hint: float | None = 0, +) -> float: ... +@overload +def normalized_similarity( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + weights: tuple[int, int, int] | None = (1, 1, 1), + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, + score_hint: float | None = 0, +) -> float: ... +@overload +def editops( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_hint: int | None = None, +) -> Editops: ... +@overload +def editops( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_hint: int | None = None, +) -> Editops: ... +@overload +def opcodes( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_hint: int | None = None, +) -> Opcodes: ... +@overload +def opcodes( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_hint: int | None = None, +) -> Opcodes: ... diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/Levenshtein_py.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Levenshtein_py.py new file mode 100644 index 0000000000000000000000000000000000000000..25d568e5e8fb5dcf0ace2b6a4aec87cf608a18f9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Levenshtein_py.py @@ -0,0 +1,571 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann +from __future__ import annotations + +from rapidfuzz._common_py import common_affix, conv_sequences +from rapidfuzz._utils import is_none, setupPandas +from rapidfuzz.distance import Indel_py as Indel +from rapidfuzz.distance._initialize_py import Editop, Editops + + +def _levenshtein_maximum(s1, s2, weights): + len1 = len(s1) + len2 = len(s2) + insert, delete, replace = weights + + max_dist = len1 * delete + len2 * insert + + if len1 >= len2: + max_dist = min(max_dist, len2 * replace + (len1 - len2) * delete) + else: + max_dist = min(max_dist, len1 * replace + (len2 - len1) * insert) + + return max_dist + + +def _uniform_generic(s1, s2, weights): + len1 = len(s1) + insert, delete, replace = weights + cache = list(range(0, (len1 + 1) * delete, delete)) + + for ch2 in s2: + temp = cache[0] + cache[0] += insert + for i in range(len1): + x = temp + if s1[i] != ch2: + x = min(cache[i] + delete, cache[i + 1] + insert, temp + replace) + temp = cache[i + 1] + cache[i + 1] = x + + return cache[-1] + + +def _uniform_distance(s1, s2): + if not s1: + return len(s2) + + VP = (1 << len(s1)) - 1 + VN = 0 + currDist = len(s1) + mask = 1 << (len(s1) - 1) + + block = {} + block_get = block.get + x = 1 + for ch1 in s1: + block[ch1] = block_get(ch1, 0) | x + x <<= 1 + + for ch2 in s2: + # Step 1: Computing D0 + PM_j = block_get(ch2, 0) + X = PM_j + D0 = (((X & VP) + VP) ^ VP) | X | VN + # Step 2: Computing HP and HN + HP = VN | ~(D0 | VP) + HN = D0 & VP + # Step 3: Computing the value D[m,j] + currDist += (HP & mask) != 0 + currDist -= (HN & mask) != 0 + # Step 4: Computing Vp and VN + HP = (HP << 1) | 1 + HN = HN << 1 + VP = HN | ~(D0 | HP) + VN = HP & D0 + + return currDist + + +def distance( + s1, + s2, + *, + weights=(1, 1, 1), + processor=None, + score_cutoff=None, + score_hint=None, +): + """ + Calculates the minimum number of insertions, deletions, and substitutions + required to change one sequence into the other according to Levenshtein with custom + costs for insertion, deletion and substitution + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + weights : tuple[int, int, int] or None, optional + The weights for the three operations in the form + (insertion, deletion, substitution). Default is (1, 1, 1), + which gives all three operations a weight of 1. + processor : callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : int, optional + Maximum distance between s1 and s2, that is + considered as a result. If the distance is bigger than score_cutoff, + score_cutoff + 1 is returned instead. Default is None, which deactivates + this behaviour. + score_hint : int, optional + Expected distance between s1 and s2. This is used to select a + faster implementation. Default is None, which deactivates this behaviour. + + Returns + ------- + distance : int + distance between s1 and s2 + + Raises + ------ + ValueError + If unsupported weights are provided a ValueError is thrown + + Examples + -------- + Find the Levenshtein distance between two strings: + + >>> from rapidfuzz.distance import Levenshtein + >>> Levenshtein.distance("lewenstein", "levenshtein") + 2 + + Setting a maximum distance allows the implementation to select + a more efficient implementation: + + >>> Levenshtein.distance("lewenstein", "levenshtein", score_cutoff=1) + 2 + + It is possible to select different weights by passing a `weight` + tuple. + + >>> Levenshtein.distance("lewenstein", "levenshtein", weights=(1,1,2)) + 3 + """ + _ = score_hint + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + if weights is None or weights == (1, 1, 1): + dist = _uniform_distance(s1, s2) + elif weights == (1, 1, 2): + dist = Indel.distance(s1, s2) + else: + dist = _uniform_generic(s1, s2, weights) + + return dist if (score_cutoff is None or dist <= score_cutoff) else score_cutoff + 1 + + +def similarity( + s1, + s2, + *, + weights=(1, 1, 1), + processor=None, + score_cutoff=None, + score_hint=None, +): + """ + Calculates the levenshtein similarity in the range [max, 0] using custom + costs for insertion, deletion and substitution. + + This is calculated as ``max - distance``, where max is the maximal possible + Levenshtein distance given the lengths of the sequences s1/s2 and the weights. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + weights : tuple[int, int, int] or None, optional + The weights for the three operations in the form + (insertion, deletion, substitution). Default is (1, 1, 1), + which gives all three operations a weight of 1. + processor : callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : int, optional + Maximum distance between s1 and s2, that is + considered as a result. If the similarity is smaller than score_cutoff, + 0 is returned instead. Default is None, which deactivates + this behaviour. + score_hint : int, optional + Expected similarity between s1 and s2. This is used to select a + faster implementation. Default is None, which deactivates this behaviour. + + Returns + ------- + similarity : int + similarity between s1 and s2 + + Raises + ------ + ValueError + If unsupported weights are provided a ValueError is thrown + """ + _ = score_hint + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + weights = weights or (1, 1, 1) + maximum = _levenshtein_maximum(s1, s2, weights) + dist = distance(s1, s2, weights=weights) + sim = maximum - dist + return sim if (score_cutoff is None or sim >= score_cutoff) else 0 + + +def normalized_distance( + s1, + s2, + *, + weights=(1, 1, 1), + processor=None, + score_cutoff=None, + score_hint=None, +): + """ + Calculates a normalized levenshtein distance in the range [1, 0] using custom + costs for insertion, deletion and substitution. + + This is calculated as ``distance / max``, where max is the maximal possible + Levenshtein distance given the lengths of the sequences s1/s2 and the weights. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + weights : tuple[int, int, int] or None, optional + The weights for the three operations in the form + (insertion, deletion, substitution). Default is (1, 1, 1), + which gives all three operations a weight of 1. + processor : callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For norm_dist > score_cutoff 1.0 is returned instead. Default is None, + which deactivates this behaviour. + score_hint : float, optional + Expected normalized distance between s1 and s2. This is used to select a + faster implementation. Default is None, which deactivates this behaviour. + + Returns + ------- + norm_dist : float + normalized distance between s1 and s2 as a float between 1.0 and 0.0 + + Raises + ------ + ValueError + If unsupported weights are provided a ValueError is thrown + """ + _ = score_hint + setupPandas() + if is_none(s1) or is_none(s2): + return 1.0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + weights = weights or (1, 1, 1) + maximum = _levenshtein_maximum(s1, s2, weights) + dist = distance(s1, s2, weights=weights) + norm_dist = dist / maximum if maximum else 0 + return norm_dist if (score_cutoff is None or norm_dist <= score_cutoff) else 1 + + +def normalized_similarity( + s1, + s2, + *, + weights=(1, 1, 1), + processor=None, + score_cutoff=None, + score_hint=None, +): + """ + Calculates a normalized levenshtein similarity in the range [0, 1] using custom + costs for insertion, deletion and substitution. + + This is calculated as ``1 - normalized_distance`` + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + weights : tuple[int, int, int] or None, optional + The weights for the three operations in the form + (insertion, deletion, substitution). Default is (1, 1, 1), + which gives all three operations a weight of 1. + processor : callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For norm_sim < score_cutoff 0 is returned instead. Default is None, + which deactivates this behaviour. + score_hint : int, optional + Expected normalized similarity between s1 and s2. This is used to select a + faster implementation. Default is None, which deactivates this behaviour. + + Returns + ------- + norm_sim : float + normalized similarity between s1 and s2 as a float between 0 and 1.0 + + Raises + ------ + ValueError + If unsupported weights are provided a ValueError is thrown + + Examples + -------- + Find the normalized Levenshtein similarity between two strings: + + >>> from rapidfuzz.distance import Levenshtein + >>> Levenshtein.normalized_similarity("lewenstein", "levenshtein") + 0.81818181818181 + + Setting a score_cutoff allows the implementation to select + a more efficient implementation: + + >>> Levenshtein.normalized_similarity("lewenstein", "levenshtein", score_cutoff=0.85) + 0.0 + + It is possible to select different weights by passing a `weight` + tuple. + + >>> Levenshtein.normalized_similarity("lewenstein", "levenshtein", weights=(1,1,2)) + 0.85714285714285 + + When a different processor is used s1 and s2 do not have to be strings + + >>> Levenshtein.normalized_similarity(["lewenstein"], ["levenshtein"], processor=lambda s: s[0]) + 0.81818181818181 + """ + _ = score_hint + setupPandas() + if is_none(s1) or is_none(s2): + return 0.0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + weights = weights or (1, 1, 1) + norm_dist = normalized_distance(s1, s2, weights=weights) + norm_sim = 1.0 - norm_dist + return norm_sim if (score_cutoff is None or norm_sim >= score_cutoff) else 0 + + +def _matrix(s1, s2): + if not s1: + return (len(s2), [], []) + + VP = (1 << len(s1)) - 1 + VN = 0 + currDist = len(s1) + mask = 1 << (len(s1) - 1) + + block = {} + block_get = block.get + x = 1 + for ch1 in s1: + block[ch1] = block_get(ch1, 0) | x + x <<= 1 + + matrix_VP = [] + matrix_VN = [] + for ch2 in s2: + # Step 1: Computing D0 + PM_j = block_get(ch2, 0) + X = PM_j + D0 = (((X & VP) + VP) ^ VP) | X | VN + # Step 2: Computing HP and HN + HP = VN | ~(D0 | VP) + HN = D0 & VP + # Step 3: Computing the value D[m,j] + currDist += (HP & mask) != 0 + currDist -= (HN & mask) != 0 + # Step 4: Computing Vp and VN + HP = (HP << 1) | 1 + HN = HN << 1 + VP = HN | ~(D0 | HP) + VN = HP & D0 + + matrix_VP.append(VP) + matrix_VN.append(VN) + + return (currDist, matrix_VP, matrix_VN) + + +def editops( + s1, + s2, + *, + processor=None, + score_hint=None, +): + """ + Return Editops describing how to turn s1 into s2. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor : callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_hint : int, optional + Expected distance between s1 and s2. This is used to select a + faster implementation. Default is None, which deactivates this behaviour. + + Returns + ------- + editops : Editops + edit operations required to turn s1 into s2 + + Notes + ----- + The alignment is calculated using an algorithm of Heikki Hyyrö, which is + described [8]_. It has a time complexity and memory usage of ``O([N/64] * M)``. + + References + ---------- + .. [8] Hyyrö, Heikki. "A Note on Bit-Parallel Alignment Computation." + Stringology (2004). + + Examples + -------- + >>> from rapidfuzz.distance import Levenshtein + >>> for tag, src_pos, dest_pos in Levenshtein.editops("qabxcd", "abycdf"): + ... print(("%7s s1[%d] s2[%d]" % (tag, src_pos, dest_pos))) + delete s1[1] s2[0] + replace s1[3] s2[2] + insert s1[6] s2[5] + """ + _ = score_hint + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + prefix_len, suffix_len = common_affix(s1, s2) + s1 = s1[prefix_len : len(s1) - suffix_len] + s2 = s2[prefix_len : len(s2) - suffix_len] + dist, VP, VN = _matrix(s1, s2) + + editops = Editops([], 0, 0) + editops._src_len = len(s1) + prefix_len + suffix_len + editops._dest_len = len(s2) + prefix_len + suffix_len + + if dist == 0: + return editops + + editop_list = [None] * dist + col = len(s1) + row = len(s2) + while row != 0 and col != 0: + # deletion + if VP[row - 1] & (1 << (col - 1)): + dist -= 1 + col -= 1 + editop_list[dist] = Editop("delete", col + prefix_len, row + prefix_len) + else: + row -= 1 + + # insertion + if row and (VN[row - 1] & (1 << (col - 1))): + dist -= 1 + editop_list[dist] = Editop("insert", col + prefix_len, row + prefix_len) + else: + col -= 1 + + # replace (Matches are not recorded) + if s1[col] != s2[row]: + dist -= 1 + editop_list[dist] = Editop("replace", col + prefix_len, row + prefix_len) + + while col != 0: + dist -= 1 + col -= 1 + editop_list[dist] = Editop("delete", col + prefix_len, row + prefix_len) + + while row != 0: + dist -= 1 + row -= 1 + editop_list[dist] = Editop("insert", col + prefix_len, row + prefix_len) + + editops._editops = editop_list + return editops + + +def opcodes( + s1, + s2, + *, + processor=None, + score_hint=None, +): + """ + Return Opcodes describing how to turn s1 into s2. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor : callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_hint : int, optional + Expected distance between s1 and s2. This is used to select a + faster implementation. Default is None, which deactivates this behaviour. + + Returns + ------- + opcodes : Opcodes + edit operations required to turn s1 into s2 + + Notes + ----- + The alignment is calculated using an algorithm of Heikki Hyyrö, which is + described [9]_. It has a time complexity and memory usage of ``O([N/64] * M)``. + + References + ---------- + .. [9] Hyyrö, Heikki. "A Note on Bit-Parallel Alignment Computation." + Stringology (2004). + + Examples + -------- + >>> from rapidfuzz.distance import Levenshtein + + >>> a = "qabxcd" + >>> b = "abycdf" + >>> for tag, i1, i2, j1, j2 in Levenshtein.opcodes("qabxcd", "abycdf"): + ... print(("%7s a[%d:%d] (%s) b[%d:%d] (%s)" % + ... (tag, i1, i2, a[i1:i2], j1, j2, b[j1:j2]))) + delete a[0:1] (q) b[0:0] () + equal a[1:3] (ab) b[0:2] (ab) + replace a[3:4] (x) b[2:3] (y) + equal a[4:6] (cd) b[3:5] (cd) + insert a[6:6] () b[5:6] (f) + """ + return editops(s1, s2, processor=processor, score_hint=score_hint).as_opcodes() diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/OSA.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/OSA.py new file mode 100644 index 0000000000000000000000000000000000000000..803cf3e380673e37905ac46e6bb923a5e7d91144 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/OSA.py @@ -0,0 +1,93 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2025 Max Bachmann +# This file is generated by tools/generate_python.py +from __future__ import annotations + +import contextlib +import os + +from rapidfuzz._feature_detector import AVX2, SSE2, supports + +__all__ = ["distance", "normalized_distance", "normalized_similarity", "similarity"] + +_impl = os.environ.get("RAPIDFUZZ_IMPLEMENTATION") +if _impl == "cpp": + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + osa_distance as distance, + osa_normalized_distance as normalized_distance, + osa_normalized_similarity as normalized_similarity, + osa_similarity as similarity, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + osa_distance as distance, + osa_normalized_distance as normalized_distance, + osa_normalized_similarity as normalized_similarity, + osa_similarity as similarity, + ) + + imported = True + + if not imported: + from rapidfuzz.distance.metrics_cpp import ( # pyright: ignore[reportMissingImports] + osa_distance as distance, + osa_normalized_distance as normalized_distance, + osa_normalized_similarity as normalized_similarity, + osa_similarity as similarity, + ) +elif _impl == "python": + from rapidfuzz.distance.metrics_py import ( + osa_distance as distance, + osa_normalized_distance as normalized_distance, + osa_normalized_similarity as normalized_similarity, + osa_similarity as similarity, + ) +else: + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + osa_distance as distance, + osa_normalized_distance as normalized_distance, + osa_normalized_similarity as normalized_similarity, + osa_similarity as similarity, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + osa_distance as distance, + osa_normalized_distance as normalized_distance, + osa_normalized_similarity as normalized_similarity, + osa_similarity as similarity, + ) + + imported = True + + if not imported: + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp import ( # pyright: ignore[reportMissingImports] + osa_distance as distance, + osa_normalized_distance as normalized_distance, + osa_normalized_similarity as normalized_similarity, + osa_similarity as similarity, + ) + + imported = True + + if not imported: + from rapidfuzz.distance.metrics_py import ( + osa_distance as distance, + osa_normalized_distance as normalized_distance, + osa_normalized_similarity as normalized_similarity, + osa_similarity as similarity, + ) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/OSA.pyi b/venv/lib/python3.10/site-packages/rapidfuzz/distance/OSA.pyi new file mode 100644 index 0000000000000000000000000000000000000000..db0de6253bf0d81102f9e27d2598b1836e428efc --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/OSA.pyi @@ -0,0 +1,75 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann + +from __future__ import annotations + +from collections.abc import Hashable, Sequence +from typing import Callable, TypeVar, overload + +_UnprocessedType1 = TypeVar("_UnprocessedType1") +_UnprocessedType2 = TypeVar("_UnprocessedType2") + +@overload +def distance( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: int | None = None, +) -> int: ... +@overload +def distance( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: int | None = None, +) -> int: ... +@overload +def normalized_distance( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def normalized_distance( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def similarity( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: int | None = None, +) -> int: ... +@overload +def similarity( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: int | None = None, +) -> int: ... +@overload +def normalized_similarity( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def normalized_similarity( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/OSA_py.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/OSA_py.py new file mode 100644 index 0000000000000000000000000000000000000000..43076538783d06b625611bb93104bcefc4575614 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/OSA_py.py @@ -0,0 +1,232 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann +from __future__ import annotations + +from rapidfuzz._common_py import conv_sequences +from rapidfuzz._utils import is_none, setupPandas + + +def _osa_distance_hyrroe2003(s1, s2): + if not s1: + return len(s2) + + VP = (1 << len(s1)) - 1 + VN = 0 + D0 = 0 + PM_j_old = 0 + currDist = len(s1) + mask = 1 << (len(s1) - 1) + + block = {} + block_get = block.get + x = 1 + for ch1 in s1: + block[ch1] = block_get(ch1, 0) | x + x <<= 1 + + for ch2 in s2: + # Step 1: Computing D0 + PM_j = block_get(ch2, 0) + TR = (((~D0) & PM_j) << 1) & PM_j_old + D0 = (((PM_j & VP) + VP) ^ VP) | PM_j | VN + D0 = D0 | TR + + # Step 2: Computing HP and HN + HP = VN | ~(D0 | VP) + HN = D0 & VP + + # Step 3: Computing the value D[m,j] + currDist += (HP & mask) != 0 + currDist -= (HN & mask) != 0 + + # Step 4: Computing Vp and VN + HP = (HP << 1) | 1 + HN = HN << 1 + VP = HN | ~(D0 | HP) + VN = HP & D0 + PM_j_old = PM_j + + return currDist + + +def distance( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates the optimal string alignment (OSA) distance. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : int, optional + Maximum distance between s1 and s2, that is + considered as a result. If the distance is bigger than score_cutoff, + score_cutoff + 1 is returned instead. Default is None, which deactivates + this behaviour. + + Returns + ------- + distance : int + distance between s1 and s2 + + Examples + -------- + Find the OSA distance between two strings: + + >>> from rapidfuzz.distance import OSA + >>> OSA.distance("CA", "AC") + 2 + >>> OSA.distance("CA", "ABC") + 3 + """ + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + dist = _osa_distance_hyrroe2003(s1, s2) + return dist if (score_cutoff is None or dist <= score_cutoff) else score_cutoff + 1 + + +def similarity( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates the optimal string alignment (OSA) similarity in the range [max, 0]. + + This is calculated as ``max(len1, len2) - distance``. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : int, optional + Maximum distance between s1 and s2, that is + considered as a result. If the similarity is smaller than score_cutoff, + 0 is returned instead. Default is None, which deactivates + this behaviour. + + Returns + ------- + similarity : int + similarity between s1 and s2 + """ + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + maximum = max(len(s1), len(s2)) + dist = distance(s1, s2) + sim = maximum - dist + return sim if (score_cutoff is None or sim >= score_cutoff) else 0 + + +def normalized_distance( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates a normalized optimal string alignment (OSA) similarity in the range [1, 0]. + + This is calculated as ``distance / max(len1, len2)``. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For norm_dist > score_cutoff 1.0 is returned instead. Default is 1.0, + which deactivates this behaviour. + + Returns + ------- + norm_dist : float + normalized distance between s1 and s2 as a float between 0 and 1.0 + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 1.0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + maximum = max(len(s1), len(s2)) + dist = distance(s1, s2) + norm_dist = dist / maximum if maximum else 0 + return norm_dist if (score_cutoff is None or norm_dist <= score_cutoff) else 1 + + +def normalized_similarity( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates a normalized optimal string alignment (OSA) similarity in the range [0, 1]. + + This is calculated as ``1 - normalized_distance`` + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For norm_sim < score_cutoff 0 is returned instead. Default is 0, + which deactivates this behaviour. + + Returns + ------- + norm_sim : float + normalized similarity between s1 and s2 as a float between 0 and 1.0 + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 0.0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + norm_dist = normalized_distance(s1, s2) + norm_sim = 1.0 - norm_dist + return norm_sim if (score_cutoff is None or norm_sim >= score_cutoff) else 0 diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/Postfix.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Postfix.py new file mode 100644 index 0000000000000000000000000000000000000000..df8f71c1509ffb8d648741ff951544f707d1ac4d --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Postfix.py @@ -0,0 +1,93 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2025 Max Bachmann +# This file is generated by tools/generate_python.py +from __future__ import annotations + +import contextlib +import os + +from rapidfuzz._feature_detector import AVX2, SSE2, supports + +__all__ = ["distance", "normalized_distance", "normalized_similarity", "similarity"] + +_impl = os.environ.get("RAPIDFUZZ_IMPLEMENTATION") +if _impl == "cpp": + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + postfix_distance as distance, + postfix_normalized_distance as normalized_distance, + postfix_normalized_similarity as normalized_similarity, + postfix_similarity as similarity, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + postfix_distance as distance, + postfix_normalized_distance as normalized_distance, + postfix_normalized_similarity as normalized_similarity, + postfix_similarity as similarity, + ) + + imported = True + + if not imported: + from rapidfuzz.distance.metrics_cpp import ( # pyright: ignore[reportMissingImports] + postfix_distance as distance, + postfix_normalized_distance as normalized_distance, + postfix_normalized_similarity as normalized_similarity, + postfix_similarity as similarity, + ) +elif _impl == "python": + from rapidfuzz.distance.metrics_py import ( + postfix_distance as distance, + postfix_normalized_distance as normalized_distance, + postfix_normalized_similarity as normalized_similarity, + postfix_similarity as similarity, + ) +else: + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + postfix_distance as distance, + postfix_normalized_distance as normalized_distance, + postfix_normalized_similarity as normalized_similarity, + postfix_similarity as similarity, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + postfix_distance as distance, + postfix_normalized_distance as normalized_distance, + postfix_normalized_similarity as normalized_similarity, + postfix_similarity as similarity, + ) + + imported = True + + if not imported: + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp import ( # pyright: ignore[reportMissingImports] + postfix_distance as distance, + postfix_normalized_distance as normalized_distance, + postfix_normalized_similarity as normalized_similarity, + postfix_similarity as similarity, + ) + + imported = True + + if not imported: + from rapidfuzz.distance.metrics_py import ( + postfix_distance as distance, + postfix_normalized_distance as normalized_distance, + postfix_normalized_similarity as normalized_similarity, + postfix_similarity as similarity, + ) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/Postfix.pyi b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Postfix.pyi new file mode 100644 index 0000000000000000000000000000000000000000..db0de6253bf0d81102f9e27d2598b1836e428efc --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Postfix.pyi @@ -0,0 +1,75 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann + +from __future__ import annotations + +from collections.abc import Hashable, Sequence +from typing import Callable, TypeVar, overload + +_UnprocessedType1 = TypeVar("_UnprocessedType1") +_UnprocessedType2 = TypeVar("_UnprocessedType2") + +@overload +def distance( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: int | None = None, +) -> int: ... +@overload +def distance( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: int | None = None, +) -> int: ... +@overload +def normalized_distance( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def normalized_distance( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def similarity( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: int | None = None, +) -> int: ... +@overload +def similarity( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: int | None = None, +) -> int: ... +@overload +def normalized_similarity( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def normalized_similarity( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/Postfix_py.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Postfix_py.py new file mode 100644 index 0000000000000000000000000000000000000000..3fe19fd39f297b3f3306ee56034adedf648ac6a3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Postfix_py.py @@ -0,0 +1,182 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann +from __future__ import annotations + +from rapidfuzz._common_py import conv_sequences +from rapidfuzz._utils import is_none, setupPandas + + +def distance( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates the postfix distance between two strings. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : int or None, optional + Maximum distance between s1 and s2, that is + considered as a result. If the distance is bigger than score_cutoff, + score_cutoff + 1 is returned instead. Default is None, which deactivates + this behaviour. + + Returns + ------- + distance : int + distance between s1 and s2 + """ + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + maximum = max(len(s1), len(s2)) + sim = similarity(s1, s2) + dist = maximum - sim + + return dist if (score_cutoff is None or dist <= score_cutoff) else score_cutoff + 1 + + +def similarity( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates the postfix similarity between two strings. + + This is calculated as ``len1 - distance``. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : int, optional + Maximum distance between s1 and s2, that is + considered as a result. If the similarity is smaller than score_cutoff, + 0 is returned instead. Default is None, which deactivates + this behaviour. + + Returns + ------- + distance : int + distance between s1 and s2 + """ + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + sim = 0 + for ch1, ch2 in zip(reversed(s1), reversed(s2)): + if ch1 != ch2: + break + sim += 1 + + return sim if (score_cutoff is None or sim >= score_cutoff) else 0 + + +def normalized_distance( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates a normalized postfix similarity in the range [1, 0]. + + This is calculated as ``distance / (len1 + len2)``. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For norm_dist > score_cutoff 1.0 is returned instead. Default is 1.0, + which deactivates this behaviour. + + Returns + ------- + norm_dist : float + normalized distance between s1 and s2 as a float between 0 and 1.0 + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 1.0 + + norm_sim = normalized_similarity(s1, s2, processor=processor) + norm_dist = 1.0 - norm_sim + + return norm_dist if (score_cutoff is None or norm_dist <= score_cutoff) else 1.0 + + +def normalized_similarity( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates a normalized postfix similarity in the range [0, 1]. + + This is calculated as ``1 - normalized_distance`` + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For norm_sim < score_cutoff 0 is returned instead. Default is 0, + which deactivates this behaviour. + + Returns + ------- + norm_sim : float + normalized similarity between s1 and s2 as a float between 0 and 1.0 + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 0.0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + maximum = max(len(s1), len(s2)) + sim = similarity(s1, s2) + norm_sim = sim / maximum if maximum else 1.0 + + return norm_sim if (score_cutoff is None or norm_sim >= score_cutoff) else 0.0 diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/Prefix.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Prefix.py new file mode 100644 index 0000000000000000000000000000000000000000..e20c19d59b4a772c5021d6e998879ffd7aec07b4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Prefix.py @@ -0,0 +1,93 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2025 Max Bachmann +# This file is generated by tools/generate_python.py +from __future__ import annotations + +import contextlib +import os + +from rapidfuzz._feature_detector import AVX2, SSE2, supports + +__all__ = ["distance", "normalized_distance", "normalized_similarity", "similarity"] + +_impl = os.environ.get("RAPIDFUZZ_IMPLEMENTATION") +if _impl == "cpp": + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + prefix_distance as distance, + prefix_normalized_distance as normalized_distance, + prefix_normalized_similarity as normalized_similarity, + prefix_similarity as similarity, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + prefix_distance as distance, + prefix_normalized_distance as normalized_distance, + prefix_normalized_similarity as normalized_similarity, + prefix_similarity as similarity, + ) + + imported = True + + if not imported: + from rapidfuzz.distance.metrics_cpp import ( # pyright: ignore[reportMissingImports] + prefix_distance as distance, + prefix_normalized_distance as normalized_distance, + prefix_normalized_similarity as normalized_similarity, + prefix_similarity as similarity, + ) +elif _impl == "python": + from rapidfuzz.distance.metrics_py import ( + prefix_distance as distance, + prefix_normalized_distance as normalized_distance, + prefix_normalized_similarity as normalized_similarity, + prefix_similarity as similarity, + ) +else: + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + prefix_distance as distance, + prefix_normalized_distance as normalized_distance, + prefix_normalized_similarity as normalized_similarity, + prefix_similarity as similarity, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + prefix_distance as distance, + prefix_normalized_distance as normalized_distance, + prefix_normalized_similarity as normalized_similarity, + prefix_similarity as similarity, + ) + + imported = True + + if not imported: + with contextlib.suppress(ImportError): + from rapidfuzz.distance.metrics_cpp import ( # pyright: ignore[reportMissingImports] + prefix_distance as distance, + prefix_normalized_distance as normalized_distance, + prefix_normalized_similarity as normalized_similarity, + prefix_similarity as similarity, + ) + + imported = True + + if not imported: + from rapidfuzz.distance.metrics_py import ( + prefix_distance as distance, + prefix_normalized_distance as normalized_distance, + prefix_normalized_similarity as normalized_similarity, + prefix_similarity as similarity, + ) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/Prefix.pyi b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Prefix.pyi new file mode 100644 index 0000000000000000000000000000000000000000..db0de6253bf0d81102f9e27d2598b1836e428efc --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Prefix.pyi @@ -0,0 +1,75 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann + +from __future__ import annotations + +from collections.abc import Hashable, Sequence +from typing import Callable, TypeVar, overload + +_UnprocessedType1 = TypeVar("_UnprocessedType1") +_UnprocessedType2 = TypeVar("_UnprocessedType2") + +@overload +def distance( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: int | None = None, +) -> int: ... +@overload +def distance( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: int | None = None, +) -> int: ... +@overload +def normalized_distance( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def normalized_distance( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def similarity( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: int | None = None, +) -> int: ... +@overload +def similarity( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: int | None = None, +) -> int: ... +@overload +def normalized_similarity( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def normalized_similarity( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/Prefix_py.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Prefix_py.py new file mode 100644 index 0000000000000000000000000000000000000000..7e118acca0923cf42e15069a9efd8e2ac2e8f21c --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/Prefix_py.py @@ -0,0 +1,182 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann +from __future__ import annotations + +from rapidfuzz._common_py import conv_sequences +from rapidfuzz._utils import is_none, setupPandas + + +def distance( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates the Prefix distance between two strings. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : int or None, optional + Maximum distance between s1 and s2, that is + considered as a result. If the distance is bigger than score_cutoff, + score_cutoff + 1 is returned instead. Default is None, which deactivates + this behaviour. + + Returns + ------- + distance : int + distance between s1 and s2 + """ + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + maximum = max(len(s1), len(s2)) + sim = similarity(s1, s2) + dist = maximum - sim + + return dist if (score_cutoff is None or dist <= score_cutoff) else score_cutoff + 1 + + +def similarity( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates the prefix similarity between two strings. + + This is calculated as ``len1 - distance``. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : int, optional + Maximum distance between s1 and s2, that is + considered as a result. If the similarity is smaller than score_cutoff, + 0 is returned instead. Default is None, which deactivates + this behaviour. + + Returns + ------- + distance : int + distance between s1 and s2 + """ + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + sim = 0 + for ch1, ch2 in zip(s1, s2): + if ch1 != ch2: + break + sim += 1 + + return sim if (score_cutoff is None or sim >= score_cutoff) else 0 + + +def normalized_distance( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates a normalized prefix similarity in the range [1, 0]. + + This is calculated as ``distance / (len1 + len2)``. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For norm_dist > score_cutoff 1.0 is returned instead. Default is 1.0, + which deactivates this behaviour. + + Returns + ------- + norm_dist : float + normalized distance between s1 and s2 as a float between 0 and 1.0 + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 1.0 + + norm_sim = normalized_similarity(s1, s2, processor=processor) + norm_dist = 1.0 - norm_sim + + return norm_dist if (score_cutoff is None or norm_dist <= score_cutoff) else 1.0 + + +def normalized_similarity( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates a normalized prefix similarity in the range [0, 1]. + + This is calculated as ``1 - normalized_distance`` + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 1.0. + For norm_sim < score_cutoff 0 is returned instead. Default is 0, + which deactivates this behaviour. + + Returns + ------- + norm_sim : float + normalized similarity between s1 and s2 as a float between 0 and 1.0 + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 0.0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + maximum = max(len(s1), len(s2)) + sim = similarity(s1, s2) + norm_sim = sim / maximum if maximum else 1.0 + + return norm_sim if (score_cutoff is None or norm_sim >= score_cutoff) else 0.0 diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__init__.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5beec47a7a604b19a20a2040221ae4b5ff52944e --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__init__.py @@ -0,0 +1,37 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann + +from __future__ import annotations + +from . import ( + OSA, + DamerauLevenshtein, + Hamming, + Indel, + Jaro, + JaroWinkler, + LCSseq, + Levenshtein, + Postfix, + Prefix, +) +from ._initialize import Editop, Editops, MatchingBlock, Opcode, Opcodes, ScoreAlignment + +__all__ = [ + "OSA", + "DamerauLevenshtein", + "Editop", + "Editops", + "Hamming", + "Indel", + "Jaro", + "JaroWinkler", + "LCSseq", + "Levenshtein", + "MatchingBlock", + "Opcode", + "Opcodes", + "Postfix", + "Prefix", + "ScoreAlignment", +] diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__init__.pyi b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__init__.pyi new file mode 100644 index 0000000000000000000000000000000000000000..219b44731ca75a1eb230ae15dc5c9d75dfd9173a --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__init__.pyi @@ -0,0 +1,25 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann + +from __future__ import annotations + +from . import ( + OSA as OSA, + DamerauLevenshtein as DamerauLevenshtein, + Hamming as Hamming, + Indel as Indel, + Jaro as Jaro, + JaroWinkler as JaroWinkler, + LCSseq as LCSseq, + Levenshtein as Levenshtein, + Postfix as Postfix, + Prefix as Prefix, +) +from ._initialize import ( + Editop as Editop, + Editops as Editops, + MatchingBlock as MatchingBlock, + Opcode as Opcode, + Opcodes as Opcodes, + ScoreAlignment as ScoreAlignment, +) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/DamerauLevenshtein.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/DamerauLevenshtein.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4550c3b43e7852ab2cbbe984a77a3fc12fa44998 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/DamerauLevenshtein.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/DamerauLevenshtein_py.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/DamerauLevenshtein_py.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..55bd17d78e4abd4d13dd68fbb9adf3d678f90118 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/DamerauLevenshtein_py.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Hamming.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Hamming.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..163cf5a7164f0d1927cdb0674a9ec5c955d848d4 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Hamming.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Hamming_py.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Hamming_py.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7807441f247d86b448fef094dce7aac87319cc21 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Hamming_py.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Indel.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Indel.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8e4e55a3d80b87529e96d62c4f9365042a21bfcd Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Indel.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Indel_py.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Indel_py.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6319faf766435ec2e85293087a4d0c12625f3578 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Indel_py.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Jaro.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Jaro.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..deb33ad47debc6bd552d1bc8ed33b98c847422c0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Jaro.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/JaroWinkler.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/JaroWinkler.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..56f3ffaa92443e9a3d48c4e4e206fc24643ce828 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/JaroWinkler.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/JaroWinkler_py.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/JaroWinkler_py.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fa879cc462fd34e706ba2217fa42b07749129905 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/JaroWinkler_py.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Jaro_py.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Jaro_py.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f4d6e8bd88d41495014abaece49f3d38691a6322 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Jaro_py.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/LCSseq.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/LCSseq.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..96160613d8985f1c252d1511e5436967374852c1 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/LCSseq.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/LCSseq_py.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/LCSseq_py.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4cfb6203e82fb88c38a7ac23c9d74f7e10ead587 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/LCSseq_py.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Levenshtein.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Levenshtein.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..94a224b3970fe3f2b1c8e96de3e005be80c227f8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Levenshtein.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Levenshtein_py.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Levenshtein_py.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b484247854d6845ce805e608e8bcc4659b15de0c Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Levenshtein_py.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/OSA.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/OSA.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fed1e2aab9253098c09a51db33597b9870fd3541 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/OSA.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/OSA_py.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/OSA_py.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..22794056fc3f1d0e20623b69fa1e2bbe9bcd8e90 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/OSA_py.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Postfix.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Postfix.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..74de57196dfb1592157b11cb7102d0db73ef6ac4 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Postfix.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Postfix_py.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Postfix_py.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bda33ce1f597724a9a683f957f120a0e7e2bd4e6 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Postfix_py.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Prefix.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Prefix.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3bfcaf4019db5dc4a0fde53cf96097a8c6e9f498 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Prefix.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Prefix_py.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Prefix_py.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3cbf74eddd4543b7d55ee3e7b42eee2c67572611 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/Prefix_py.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..40861376f5d9329d39b1b9a5ac64018006bb308c Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/_initialize.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/_initialize.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aef5064949ed966c73122c30101a87738e1aaea2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/_initialize.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/_initialize_py.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/_initialize_py.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cf22f63a743414fb981785b97a9bbed2e41aecf5 Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/_initialize_py.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/metrics_py.cpython-310.pyc b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/metrics_py.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..35611090e28f7dcda6906a07e481923b26bd631d Binary files /dev/null and b/venv/lib/python3.10/site-packages/rapidfuzz/distance/__pycache__/metrics_py.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/_initialize.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/_initialize.py new file mode 100644 index 0000000000000000000000000000000000000000..a38fcdb60671c2240a8d72f214c2961346e7efdf --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/_initialize.py @@ -0,0 +1,109 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2025 Max Bachmann +# This file is generated by tools/generate_python.py +from __future__ import annotations + +import contextlib +import os + +from rapidfuzz._feature_detector import AVX2, SSE2, supports + +__all__ = ["Editop", "Editops", "MatchingBlock", "Opcode", "Opcodes", "ScoreAlignment"] + +_impl = os.environ.get("RAPIDFUZZ_IMPLEMENTATION") +if _impl == "cpp": + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance._initialize_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + Editop, + Editops, + MatchingBlock, + Opcode, + Opcodes, + ScoreAlignment, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance._initialize_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + Editop, + Editops, + MatchingBlock, + Opcode, + Opcodes, + ScoreAlignment, + ) + + imported = True + + if not imported: + from rapidfuzz.distance._initialize_cpp import ( # pyright: ignore[reportMissingImports] + Editop, + Editops, + MatchingBlock, + Opcode, + Opcodes, + ScoreAlignment, + ) +elif _impl == "python": + from rapidfuzz.distance._initialize_py import ( + Editop, + Editops, + MatchingBlock, + Opcode, + Opcodes, + ScoreAlignment, + ) +else: + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance._initialize_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + Editop, + Editops, + MatchingBlock, + Opcode, + Opcodes, + ScoreAlignment, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.distance._initialize_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + Editop, + Editops, + MatchingBlock, + Opcode, + Opcodes, + ScoreAlignment, + ) + + imported = True + + if not imported: + with contextlib.suppress(ImportError): + from rapidfuzz.distance._initialize_cpp import ( # pyright: ignore[reportMissingImports] + Editop, + Editops, + MatchingBlock, + Opcode, + Opcodes, + ScoreAlignment, + ) + + imported = True + + if not imported: + from rapidfuzz.distance._initialize_py import ( + Editop, + Editops, + MatchingBlock, + Opcode, + Opcodes, + ScoreAlignment, + ) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/_initialize.pyi b/venv/lib/python3.10/site-packages/rapidfuzz/distance/_initialize.pyi new file mode 100644 index 0000000000000000000000000000000000000000..3d6eaea8c198b2c1c9c9317ebd1e60a73dfd03de --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/_initialize.pyi @@ -0,0 +1,133 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann + +from __future__ import annotations + +from collections.abc import Iterator + +_AnyOpList = list[Editop | tuple[str, int, int]] | list[Opcode | tuple[str, int, int, int, int]] + +class MatchingBlock: + a: int + b: int + size: int + + def __init__(self, a: int, b: int, size: int): ... + def __len__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + def __getitem__(self, i: int) -> int: ... + def __iter__(self) -> Iterator[int]: ... + def __repr__(self) -> str: ... + +class Editop: + tag: str + src_pos: int + dest_pos: int + + def __init__(self, tag: str, src_pos: int, dest_pos: int): ... + def __len__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + def __getitem__(self, i: int) -> int | str: ... + def __iter__(self) -> Iterator[int | str]: ... + def __repr__(self) -> str: ... + +class Editops: + _src_len: int + _dest_len: int + _editops: list[Editop] + + def __init__( + self, + editops: _AnyOpList | None = None, + src_len: int = 0, + dest_len: int = 0, + ): ... + @classmethod + def from_opcodes(cls, opcodes: Opcodes) -> Editops: ... + def as_matching_blocks(self) -> list[MatchingBlock]: ... + def as_list(self) -> list[Editop]: ... + def copy(self) -> Editops: ... + def inverse(self) -> Editops: ... + def remove_subsequence(self, subsequence: Editops) -> None: ... + def apply(self, source_string: str, destination_string: str) -> str: ... + @property + def src_len(self) -> int: ... + @src_len.setter + def src_len(self, value: int) -> None: ... + @property + def dest_len(self) -> int: ... + @dest_len.setter + def dest_len(self, value: int) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __len__(self) -> int: ... + def __delitem__(self, key: int | slice) -> None: ... + def __getitem__(self, key: int | slice) -> Editops | Editop: ... + def __iter__(self) -> Iterator[Editop]: ... + def __repr__(self) -> str: ... + +class Opcode: + tag: str + src_start: int + src_end: int + dest_start: int + dest_end: int + + def __init__(self, tag: str, src_start: int, src_end: int, dest_start: int, dest_end: int): ... + def __len__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + def __getitem__(self, i: int) -> int | str: ... + def __iter__(self) -> Iterator[int | str]: ... + +class Opcodes: + _src_len: int + _dest_len: int + _opcodes: list[Opcode] + + def __init__( + self, + opcodes: _AnyOpList | None = None, + src_len: int = 0, + dest_len: int = 0, + ): ... + @classmethod + def from_editops(cls, editops: Editops) -> Opcodes: ... + def as_editops(self) -> Editops: ... + def as_matching_blocks(self) -> list[MatchingBlock]: ... + def as_list(self) -> list[Opcode]: ... + def copy(self) -> Opcodes: ... + def inverse(self) -> Opcodes: ... + def apply(self, source_string: str, destination_string: str) -> str: ... + @property + def src_len(self) -> int: ... + @src_len.setter + def src_len(self, value: int) -> None: ... + @property + def dest_len(self) -> int: ... + @dest_len.setter + def dest_len(self, value: int) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __len__(self) -> int: ... + def __getitem__(self, key: int) -> Opcode: ... + def __iter__(self) -> Iterator[Opcode]: ... + def __repr__(self) -> str: ... + +class ScoreAlignment: + score: int | float + src_start: int + src_end: int + dest_start: int + dest_end: int + + def __init__( + self, + score: int | float, + src_start: int, + src_end: int, + dest_start: int, + dest_end: int, + ): ... + def __len__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + def __getitem__(self, i: int) -> int | float: ... + def __iter__(self) -> Iterator[int | float]: ... + def __repr__(self) -> str: ... diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/_initialize_py.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/_initialize_py.py new file mode 100644 index 0000000000000000000000000000000000000000..791020a3b3a2d8029e796d0bbba1bf716a78d1f7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/_initialize_py.py @@ -0,0 +1,884 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann +from __future__ import annotations + + +def _list_to_editops( + ops, + src_len, + dest_len, +): + if not ops: + return [] + + if len(ops[0]) == 5: + return Opcodes(ops, src_len, dest_len).as_editops()._editops + + blocks = [] + for op in ops: + edit_type, src_pos, dest_pos = op + + if src_pos > src_len or dest_pos > dest_len: + msg = "List of edit operations invalid" + raise ValueError(msg) + + if src_pos == src_len and edit_type != "insert": + msg = "List of edit operations invalid" + raise ValueError(msg) + if dest_pos == dest_len and edit_type != "delete": + msg = "List of edit operations invalid" + raise ValueError(msg) + + # keep operations are not relevant in editops + if edit_type == "equal": + continue + + blocks.append(Editop(edit_type, src_pos, dest_pos)) + + # validate order of editops + for i in range(len(blocks) - 1): + if blocks[i + 1].src_pos < blocks[i].src_pos or blocks[i + 1].dest_pos < blocks[i].dest_pos: + msg = "List of edit operations out of order" + raise ValueError(msg) + if blocks[i + 1].src_pos == blocks[i].src_pos and blocks[i + 1].dest_pos == blocks[i].dest_pos: + msg = "Duplicated edit operation" + raise ValueError(msg) + + return blocks + + +def _list_to_opcodes( + ops, + src_len, + dest_len, +): + if not ops or len(ops[0]) == 3: + return Editops(ops, src_len, dest_len).as_opcodes()._opcodes + + blocks = [] + for op in ops: + edit_type, src_start, src_end, dest_start, dest_end = op + + if src_end > src_len or dest_end > dest_len: + msg = "List of edit operations invalid" + raise ValueError(msg) + if src_end < src_start or dest_end < dest_start: + msg = "List of edit operations invalid" + raise ValueError(msg) + + if edit_type in {"equal", "replace"} and (src_end - src_start != dest_end - dest_start or src_start == src_end): + msg = "List of edit operations invalid" + raise ValueError(msg) + if edit_type == "insert" and (src_start != src_end or dest_start == dest_end): + msg = "List of edit operations invalid" + raise ValueError(msg) + if edit_type == "delete" and (src_start == src_end or dest_start != dest_end): + msg = "List of edit operations invalid" + raise ValueError(msg) + + # merge similar adjacent blocks + if blocks and ( + blocks[-1].tag == edit_type and blocks[-1].src_end == src_start and blocks[-1].dest_end == dest_start + ): + blocks[-1].src_end = src_end + blocks[-1].dest_end = dest_end + continue + + blocks.append(Opcode(edit_type, src_start, src_end, dest_start, dest_end)) + + # check if edit operations span the complete string + if blocks[0].src_start != 0 or blocks[0].dest_start != 0: + msg = "List of edit operations does not start at position 0" + raise ValueError(msg) + if blocks[-1].src_end != src_len or blocks[-1].dest_end != dest_len: + msg = "List of edit operations does not end at the string ends" + raise ValueError(msg) + for i in range(len(blocks) - 1): + if blocks[i + 1].src_start != blocks[i].src_end or blocks[i + 1].dest_start != blocks[i].dest_end: + msg = "List of edit operations is not continuous" + raise ValueError(msg) + + return blocks + + +class MatchingBlock: + """ + Triple describing matching subsequences + """ + + def __init__(self, a, b, size): + self.a = a + self.b = b + self.size = size + + def __len__(self): + return 3 + + def __eq__(self, other): + try: + if len(other) != 3: + return False + + return bool(other[0] == self.a and other[1] == self.b and other[2] == self.size) + except TypeError: + return False + + def __getitem__(self, i): + if i in {0, -3}: + return self.a + if i in {1, -2}: + return self.b + if i in {2, -1}: + return self.size + + msg = "MatchingBlock index out of range" + raise IndexError(msg) + + def __iter__(self): + for i in range(3): + yield self[i] + + def __repr__(self): + return f"MatchingBlock(a={self.a}, b={self.b}, size={self.size})" + + +class Editop: + """ + Tuple like object describing an edit operation. + It is in the form (tag, src_pos, dest_pos) + + The tags are strings, with these meanings: + + +-----------+---------------------------------------------------+ + | tag | explanation | + +===========+===================================================+ + | 'replace' | src[src_pos] should be replaced by dest[dest_pos] | + +-----------+---------------------------------------------------+ + | 'delete' | src[src_pos] should be deleted | + +-----------+---------------------------------------------------+ + | 'insert' | dest[dest_pos] should be inserted at src[src_pos] | + +-----------+---------------------------------------------------+ + """ + + def __init__(self, tag, src_pos, dest_pos): + self.tag = tag + self.src_pos = src_pos + self.dest_pos = dest_pos + + def __len__(self): + return 3 + + def __eq__(self, other): + try: + if len(other) != 3: + return False + + return bool(other[0] == self.tag and other[1] == self.src_pos and other[2] == self.dest_pos) + except TypeError: + return False + + def __getitem__(self, i): + if i in {0, -3}: + return self.tag + if i in {1, -2}: + return self.src_pos + if i in {2, -1}: + return self.dest_pos + + msg = "Editop index out of range" + raise IndexError(msg) + + def __iter__(self): + for i in range(3): + yield self[i] + + def __repr__(self): + return f"Editop(tag={self.tag!r}, src_pos={self.src_pos}, dest_pos={self.dest_pos})" + + +class Editops: + """ + List like object of Editops describing how to turn s1 into s2. + """ + + def __init__( + self, + editops=None, + src_len=0, + dest_len=0, + ): + self._src_len = src_len + self._dest_len = dest_len + self._editops = _list_to_editops(editops, src_len, dest_len) + + @classmethod + def from_opcodes(cls, opcodes): + """ + Create Editops from Opcodes + + Parameters + ---------- + opcodes : Opcodes + opcodes to convert to editops + + Returns + ------- + editops : Editops + Opcodes converted to Editops + """ + return opcodes.as_editops() + + def as_opcodes(self): + """ + Convert to Opcodes + + Returns + ------- + opcodes : Opcodes + Editops converted to Opcodes + """ + x = Opcodes.__new__(Opcodes) + x._src_len = self._src_len + x._dest_len = self._dest_len + blocks = [] + src_pos = 0 + dest_pos = 0 + i = 0 + while i < len(self._editops): + if src_pos < self._editops[i].src_pos or dest_pos < self._editops[i].dest_pos: + blocks.append( + Opcode( + "equal", + src_pos, + self._editops[i].src_pos, + dest_pos, + self._editops[i].dest_pos, + ) + ) + src_pos = self._editops[i].src_pos + dest_pos = self._editops[i].dest_pos + + src_begin = src_pos + dest_begin = dest_pos + tag = self._editops[i].tag + while ( + i < len(self._editops) + and self._editops[i].tag == tag + and src_pos == self._editops[i].src_pos + and dest_pos == self._editops[i].dest_pos + ): + if tag == "replace": + src_pos += 1 + dest_pos += 1 + elif tag == "insert": + dest_pos += 1 + elif tag == "delete": + src_pos += 1 + + i += 1 + + blocks.append(Opcode(tag, src_begin, src_pos, dest_begin, dest_pos)) + + if src_pos < self.src_len or dest_pos < self.dest_len: + blocks.append(Opcode("equal", src_pos, self.src_len, dest_pos, self.dest_len)) + + x._opcodes = blocks + return x + + def as_matching_blocks(self): + """ + Convert to matching blocks + + Returns + ------- + matching blocks : list[MatchingBlock] + Editops converted to matching blocks + """ + blocks = [] + src_pos = 0 + dest_pos = 0 + for op in self: + if src_pos < op.src_pos or dest_pos < op.dest_pos: + length = min(op.src_pos - src_pos, op.dest_pos - dest_pos) + if length > 0: + blocks.append(MatchingBlock(src_pos, dest_pos, length)) + src_pos = op.src_pos + dest_pos = op.dest_pos + + if op.tag == "replace": + src_pos += 1 + dest_pos += 1 + elif op.tag == "delete": + src_pos += 1 + elif op.tag == "insert": + dest_pos += 1 + + if src_pos < self.src_len or dest_pos < self.dest_len: + length = min(self.src_len - src_pos, self.dest_len - dest_pos) + if length > 0: + blocks.append(MatchingBlock(src_pos, dest_pos, length)) + + blocks.append(MatchingBlock(self.src_len, self.dest_len, 0)) + return blocks + + def as_list(self): + """ + Convert Editops to a list of tuples. + + This is the equivalent of ``[x for x in editops]`` + """ + return [tuple(op) for op in self._editops] + + def copy(self): + """ + performs copy of Editops + """ + x = Editops.__new__(Editops) + x._src_len = self._src_len + x._dest_len = self._dest_len + x._editops = self._editops[::] + return x + + def inverse(self): + """ + Invert Editops, so it describes how to transform the destination string to + the source string. + + Returns + ------- + editops : Editops + inverted Editops + + Examples + -------- + >>> from rapidfuzz.distance import Levenshtein + >>> Levenshtein.editops('spam', 'park') + [Editop(tag=delete, src_pos=0, dest_pos=0), + Editop(tag=replace, src_pos=3, dest_pos=2), + Editop(tag=insert, src_pos=4, dest_pos=3)] + + >>> Levenshtein.editops('spam', 'park').inverse() + [Editop(tag=insert, src_pos=0, dest_pos=0), + Editop(tag=replace, src_pos=2, dest_pos=3), + Editop(tag=delete, src_pos=3, dest_pos=4)] + """ + blocks = [] + for op in self: + tag = op.tag + if tag == "delete": + tag = "insert" + elif tag == "insert": + tag = "delete" + + blocks.append(Editop(tag, op.dest_pos, op.src_pos)) + + x = Editops.__new__(Editops) + x._src_len = self.dest_len + x._dest_len = self.src_len + x._editops = blocks + return x + + def remove_subsequence(self, subsequence): + """ + remove a subsequence + + Parameters + ---------- + subsequence : Editops + subsequence to remove (has to be a subset of editops) + + Returns + ------- + sequence : Editops + a copy of the editops without the subsequence + """ + result = Editops.__new__(Editops) + result._src_len = self._src_len + result._dest_len = self._dest_len + + if len(subsequence) > len(self): + msg = "subsequence is not a subsequence" + raise ValueError(msg) + + result._editops = [None] * (len(self) - len(subsequence)) + + # offset to correct removed edit operation + offset = 0 + op_pos = 0 + result_pos = 0 + + for sop in subsequence: + while op_pos != len(self) and sop != self._editops[op_pos]: + result[result_pos] = self._editops[op_pos] + result[result_pos].src_pos += offset + result_pos += 1 + op_pos += 1 + + # element of subsequence not part of the sequence + if op_pos == len(self): + msg = "subsequence is not a subsequence" + raise ValueError(msg) + + if sop.tag == "insert": + offset += 1 + elif sop.tag == "delete": + offset -= 1 + + op_pos += 1 + + # add remaining elements + while op_pos != len(self): + result[result_pos] = self._editops[op_pos] + result[result_pos].src_pos += offset + result_pos += 1 + op_pos += 1 + + return result + + def apply(self, source_string, destination_string): + """ + apply editops to source_string + + Parameters + ---------- + source_string : str | bytes + string to apply editops to + destination_string : str | bytes + string to use for replacements / insertions into source_string + + Returns + ------- + mod_string : str + modified source_string + + """ + res_str = "" + src_pos = 0 + + for op in self._editops: + # matches between last and current editop + while src_pos < op.src_pos: + res_str += source_string[src_pos] + src_pos += 1 + + if op.tag == "replace": + res_str += destination_string[op.dest_pos] + src_pos += 1 + elif op.tag == "insert": + res_str += destination_string[op.dest_pos] + elif op.tag == "delete": + src_pos += 1 + + # matches after the last editop + while src_pos < len(source_string): + res_str += source_string[src_pos] + src_pos += 1 + + return res_str + + @property + def src_len(self): + return self._src_len + + @src_len.setter + def src_len(self, value): + self._src_len = value + + @property + def dest_len(self): + return self._dest_len + + @dest_len.setter + def dest_len(self, value): + self._dest_len = value + + def __eq__(self, other): + if not isinstance(other, Editops): + return False + + return self.dest_len == other.dest_len and self.src_len == other.src_len and self._editops == other._editops + + def __len__(self): + return len(self._editops) + + def __delitem__(self, key): + del self._editops[key] + + def __getitem__(self, key): + if isinstance(key, int): + return self._editops[key] + + start, stop, step = key.indices(len(self._editops)) + if step < 0: + msg = "step sizes below 0 lead to an invalid order of editops" + raise ValueError(msg) + + x = Editops.__new__(Editops) + x._src_len = self._src_len + x._dest_len = self._dest_len + x._editops = self._editops[start:stop:step] + return x + + def __iter__(self): + yield from self._editops + + def __repr__(self): + return ( + "Editops([" + ", ".join(repr(op) for op in self) + f"], src_len={self.src_len}, dest_len={self.dest_len})" + ) + + +class Opcode: + """ + Tuple like object describing an edit operation. + It is in the form (tag, src_start, src_end, dest_start, dest_end) + + The tags are strings, with these meanings: + + +-----------+-----------------------------------------------------+ + | tag | explanation | + +===========+=====================================================+ + | 'replace' | src[src_start:src_end] should be | + | | replaced by dest[dest_start:dest_end] | + +-----------+-----------------------------------------------------+ + | 'delete' | src[src_start:src_end] should be deleted. | + | | Note that dest_start==dest_end in this case. | + +-----------+-----------------------------------------------------+ + | 'insert' | dest[dest_start:dest_end] should be inserted | + | | at src[src_start:src_start]. | + | | Note that src_start==src_end in this case. | + +-----------+-----------------------------------------------------+ + | 'equal' | src[src_start:src_end] == dest[dest_start:dest_end] | + +-----------+-----------------------------------------------------+ + + Note + ---- + Opcode is compatible with the tuples returned by difflib's SequenceMatcher to make them + interoperable + """ + + def __init__(self, tag, src_start, src_end, dest_start, dest_end): + self.tag = tag + self.src_start = src_start + self.src_end = src_end + self.dest_start = dest_start + self.dest_end = dest_end + + def __len__(self): + return 5 + + def __eq__(self, other): + try: + if len(other) != 5: + return False + + return bool( + other[0] == self.tag + and other[1] == self.src_start + and other[2] == self.src_end + and other[3] == self.dest_start + and other[4] == self.dest_end + ) + except TypeError: + return False + + def __getitem__(self, i): + if i in {0, -5}: + return self.tag + if i in {1, -4}: + return self.src_start + if i in {2, -3}: + return self.src_end + if i in {3, -2}: + return self.dest_start + if i in {4, -1}: + return self.dest_end + + msg = "Opcode index out of range" + raise IndexError(msg) + + def __iter__(self): + for i in range(5): + yield self[i] + + def __repr__(self): + return ( + f"Opcode(tag={self.tag!r}, src_start={self.src_start}, src_end={self.src_end}, " + f"dest_start={self.dest_start}, dest_end={self.dest_end})" + ) + + +class Opcodes: + """ + List like object of Opcodes describing how to turn s1 into s2. + The first Opcode has src_start == dest_start == 0, and remaining tuples + have src_start == the src_end from the tuple preceding it, + and likewise for dest_start == the previous dest_end. + """ + + def __init__( + self, + opcodes=None, + src_len=0, + dest_len=0, + ): + self._src_len = src_len + self._dest_len = dest_len + self._opcodes = _list_to_opcodes(opcodes, src_len, dest_len) + + @classmethod + def from_editops(cls, editops): + """ + Create Opcodes from Editops + + Parameters + ---------- + editops : Editops + editops to convert to opcodes + + Returns + ------- + opcodes : Opcodes + Editops converted to Opcodes + """ + return editops.as_opcodes() + + def as_editops(self): + """ + Convert Opcodes to Editops + + Returns + ------- + editops : Editops + Opcodes converted to Editops + """ + x = Editops.__new__(Editops) + x._src_len = self._src_len + x._dest_len = self._dest_len + blocks = [] + for op in self: + if op.tag == "replace": + for j in range(op.src_end - op.src_start): + blocks.append(Editop("replace", op.src_start + j, op.dest_start + j)) + elif op.tag == "insert": + for j in range(op.dest_end - op.dest_start): + blocks.append(Editop("insert", op.src_start, op.dest_start + j)) + elif op.tag == "delete": + for j in range(op.src_end - op.src_start): + blocks.append(Editop("delete", op.src_start + j, op.dest_start)) + + x._editops = blocks + return x + + def as_matching_blocks(self): + """ + Convert to matching blocks + + Returns + ------- + matching blocks : list[MatchingBlock] + Opcodes converted to matching blocks + """ + blocks = [] + for op in self: + if op.tag == "equal": + length = min(op.src_end - op.src_start, op.dest_end - op.dest_start) + if length > 0: + blocks.append(MatchingBlock(op.src_start, op.dest_start, length)) + + blocks.append(MatchingBlock(self.src_len, self.dest_len, 0)) + return blocks + + def as_list(self): + """ + Convert Opcodes to a list of tuples, which is compatible + with the opcodes of difflibs SequenceMatcher. + + This is the equivalent of ``[x for x in opcodes]`` + """ + return [tuple(op) for op in self._opcodes] + + def copy(self): + """ + performs copy of Opcodes + """ + x = Opcodes.__new__(Opcodes) + x._src_len = self._src_len + x._dest_len = self._dest_len + x._opcodes = self._opcodes[::] + return x + + def inverse(self): + """ + Invert Opcodes, so it describes how to transform the destination string to + the source string. + + Returns + ------- + opcodes : Opcodes + inverted Opcodes + + Examples + -------- + >>> from rapidfuzz.distance import Levenshtein + >>> Levenshtein.opcodes('spam', 'park') + [Opcode(tag=delete, src_start=0, src_end=1, dest_start=0, dest_end=0), + Opcode(tag=equal, src_start=1, src_end=3, dest_start=0, dest_end=2), + Opcode(tag=replace, src_start=3, src_end=4, dest_start=2, dest_end=3), + Opcode(tag=insert, src_start=4, src_end=4, dest_start=3, dest_end=4)] + + >>> Levenshtein.opcodes('spam', 'park').inverse() + [Opcode(tag=insert, src_start=0, src_end=0, dest_start=0, dest_end=1), + Opcode(tag=equal, src_start=0, src_end=2, dest_start=1, dest_end=3), + Opcode(tag=replace, src_start=2, src_end=3, dest_start=3, dest_end=4), + Opcode(tag=delete, src_start=3, src_end=4, dest_start=4, dest_end=4)] + """ + blocks = [] + for op in self: + tag = op.tag + if tag == "delete": + tag = "insert" + elif tag == "insert": + tag = "delete" + + blocks.append(Opcode(tag, op.dest_start, op.dest_end, op.src_start, op.src_end)) + + x = Opcodes.__new__(Opcodes) + x._src_len = self.dest_len + x._dest_len = self.src_len + x._opcodes = blocks + return x + + def apply(self, source_string, destination_string): + """ + apply opcodes to source_string + + Parameters + ---------- + source_string : str | bytes + string to apply opcodes to + destination_string : str | bytes + string to use for replacements / insertions into source_string + + Returns + ------- + mod_string : str + modified source_string + + """ + res_str = "" + + for op in self._opcodes: + if op.tag == "equal": + res_str += source_string[op.src_start : op.src_end] + elif op.tag in {"replace", "insert"}: + res_str += destination_string[op.dest_start : op.dest_end] + + return res_str + + @property + def src_len(self): + return self._src_len + + @src_len.setter + def src_len(self, value): + self._src_len = value + + @property + def dest_len(self): + return self._dest_len + + @dest_len.setter + def dest_len(self, value): + self._dest_len = value + + def __eq__(self, other): + if not isinstance(other, Opcodes): + return False + + return self.dest_len == other.dest_len and self.src_len == other.src_len and self._opcodes == other._opcodes + + def __len__(self): + return len(self._opcodes) + + def __getitem__(self, key): + if isinstance(key, int): + return self._opcodes[key] + + msg = "Expected index" + raise TypeError(msg) + + def __iter__(self): + yield from self._opcodes + + def __repr__(self): + return ( + "Opcodes([" + ", ".join(repr(op) for op in self) + f"], src_len={self.src_len}, dest_len={self.dest_len})" + ) + + +class ScoreAlignment: + """ + Tuple like object describing the position of the compared strings in + src and dest. + + It indicates that the score has been calculated between + src[src_start:src_end] and dest[dest_start:dest_end] + """ + + def __init__( + self, + score, + src_start, + src_end, + dest_start, + dest_end, + ): + self.score = score + self.src_start = src_start + self.src_end = src_end + self.dest_start = dest_start + self.dest_end = dest_end + + def __len__(self): + return 5 + + def __eq__(self, other): + try: + if len(other) != 5: + return False + + return bool( + other[0] == self.score + and other[1] == self.src_start + and other[2] == self.src_end + and other[3] == self.dest_start + and other[4] == self.dest_end + ) + except TypeError: + return False + + def __getitem__(self, i): + if i in {0, -5}: + return self.score + if i in {1, -4}: + return self.src_start + if i in {2, -3}: + return self.src_end + if i in {3, -2}: + return self.dest_start + if i in {4, -1}: + return self.dest_end + + msg = "Opcode index out of range" + raise IndexError(msg) + + def __iter__(self): + for i in range(5): + yield self[i] + + def __repr__(self): + return ( + f"ScoreAlignment(score={self.score}, src_start={self.src_start}, " + f"src_end={self.src_end}, dest_start={self.dest_start}, dest_end={self.dest_end})" + ) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/distance/metrics_py.py b/venv/lib/python3.10/site-packages/rapidfuzz/distance/metrics_py.py new file mode 100644 index 0000000000000000000000000000000000000000..5a7fa5b7d4c80d8c30e0bd32ed545b776b2c5502 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/distance/metrics_py.py @@ -0,0 +1,299 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann + +from __future__ import annotations + +from typing import Any, Callable + +from rapidfuzz._utils import ( + ScorerFlag, + add_scorer_attrs, + default_distance_attribute as dist_attr, + default_normalized_distance_attribute as norm_dist_attr, + default_normalized_similarity_attribute as norm_sim_attr, + default_similarity_attribute as sim_attr, +) + +# DamerauLevenshtein +from rapidfuzz.distance.DamerauLevenshtein_py import ( + distance as damerau_levenshtein_distance, + normalized_distance as damerau_levenshtein_normalized_distance, + normalized_similarity as damerau_levenshtein_normalized_similarity, + similarity as damerau_levenshtein_similarity, +) + +# Hamming +from rapidfuzz.distance.Hamming_py import ( + distance as hamming_distance, + editops as hamming_editops, + normalized_distance as hamming_normalized_distance, + normalized_similarity as hamming_normalized_similarity, + opcodes as hamming_opcodes, + similarity as hamming_similarity, +) + +# Indel +from rapidfuzz.distance.Indel_py import ( + distance as indel_distance, + editops as indel_editops, + normalized_distance as indel_normalized_distance, + normalized_similarity as indel_normalized_similarity, + opcodes as indel_opcodes, + similarity as indel_similarity, +) + +# Jaro +from rapidfuzz.distance.Jaro_py import ( + distance as jaro_distance, + normalized_distance as jaro_normalized_distance, + normalized_similarity as jaro_normalized_similarity, + similarity as jaro_similarity, +) + +# JaroWinkler +from rapidfuzz.distance.JaroWinkler_py import ( + distance as jaro_winkler_distance, + normalized_distance as jaro_winkler_normalized_distance, + normalized_similarity as jaro_winkler_normalized_similarity, + similarity as jaro_winkler_similarity, +) + +# LCSseq +from rapidfuzz.distance.LCSseq_py import ( + distance as lcs_seq_distance, + editops as lcs_seq_editops, + normalized_distance as lcs_seq_normalized_distance, + normalized_similarity as lcs_seq_normalized_similarity, + opcodes as lcs_seq_opcodes, + similarity as lcs_seq_similarity, +) + +# Levenshtein +from rapidfuzz.distance.Levenshtein_py import ( + distance as levenshtein_distance, + editops as levenshtein_editops, + normalized_distance as levenshtein_normalized_distance, + normalized_similarity as levenshtein_normalized_similarity, + opcodes as levenshtein_opcodes, + similarity as levenshtein_similarity, +) + +# OSA +from rapidfuzz.distance.OSA_py import ( + distance as osa_distance, + normalized_distance as osa_normalized_distance, + normalized_similarity as osa_normalized_similarity, + similarity as osa_similarity, +) + +# Postfix +from rapidfuzz.distance.Postfix_py import ( + distance as postfix_distance, + normalized_distance as postfix_normalized_distance, + normalized_similarity as postfix_normalized_similarity, + similarity as postfix_similarity, +) + +# Prefix +from rapidfuzz.distance.Prefix_py import ( + distance as prefix_distance, + normalized_distance as prefix_normalized_distance, + normalized_similarity as prefix_normalized_similarity, + similarity as prefix_similarity, +) + +__all__ = [] + +add_scorer_attrs(osa_distance, dist_attr) +add_scorer_attrs(osa_similarity, sim_attr) +add_scorer_attrs(osa_normalized_distance, norm_dist_attr) +add_scorer_attrs(osa_normalized_similarity, norm_sim_attr) + +__all__ += [ + "osa_distance", + "osa_normalized_distance", + "osa_normalized_similarity", + "osa_similarity", +] + + +add_scorer_attrs(prefix_distance, dist_attr) +add_scorer_attrs(prefix_similarity, sim_attr) +add_scorer_attrs(prefix_normalized_distance, norm_dist_attr) +add_scorer_attrs(prefix_normalized_similarity, norm_sim_attr) + +__all__ += [ + "prefix_distance", + "prefix_normalized_distance", + "prefix_normalized_similarity", + "prefix_similarity", +] + + +add_scorer_attrs(postfix_distance, dist_attr) +add_scorer_attrs(postfix_similarity, sim_attr) +add_scorer_attrs(postfix_normalized_distance, norm_dist_attr) +add_scorer_attrs(postfix_normalized_similarity, norm_sim_attr) + +__all__ += [ + "postfix_distance", + "postfix_normalized_distance", + "postfix_normalized_similarity", + "postfix_similarity", +] + + +add_scorer_attrs(jaro_distance, norm_dist_attr) +add_scorer_attrs(jaro_similarity, norm_sim_attr) +add_scorer_attrs(jaro_normalized_distance, norm_dist_attr) +add_scorer_attrs(jaro_normalized_similarity, norm_sim_attr) + +__all__ += [ + "jaro_distance", + "jaro_normalized_distance", + "jaro_normalized_similarity", + "jaro_similarity", +] + + +add_scorer_attrs(jaro_winkler_distance, norm_dist_attr) +add_scorer_attrs(jaro_winkler_similarity, norm_sim_attr) +add_scorer_attrs(jaro_winkler_normalized_distance, norm_dist_attr) +add_scorer_attrs(jaro_winkler_normalized_similarity, norm_sim_attr) + +__all__ += [ + "jaro_winkler_distance", + "jaro_winkler_normalized_distance", + "jaro_winkler_normalized_similarity", + "jaro_winkler_similarity", +] + + +add_scorer_attrs(damerau_levenshtein_distance, dist_attr) +add_scorer_attrs(damerau_levenshtein_similarity, sim_attr) +add_scorer_attrs(damerau_levenshtein_normalized_distance, norm_dist_attr) +add_scorer_attrs(damerau_levenshtein_normalized_similarity, norm_sim_attr) + +__all__ += [ + "damerau_levenshtein_distance", + "damerau_levenshtein_normalized_distance", + "damerau_levenshtein_normalized_similarity", + "damerau_levenshtein_similarity", +] + + +def _get_scorer_flags_levenshtein_distance(weights: tuple[int, int, int] | None = (1, 1, 1)) -> dict[str, Any]: + flags = ScorerFlag.RESULT_SIZE_T + if weights is None or weights[0] == weights[1]: + flags |= ScorerFlag.SYMMETRIC + + return { + "optimal_score": 0, + "worst_score": 2**63 - 1, + "flags": flags, + } + + +def _get_scorer_flags_levenshtein_similarity(weights: tuple[int, int, int] | None = (1, 1, 1)) -> dict[str, Any]: + flags = ScorerFlag.RESULT_SIZE_T + if weights is None or weights[0] == weights[1]: + flags |= ScorerFlag.SYMMETRIC + + return { + "optimal_score": 2**63 - 1, + "worst_score": 0, + "flags": flags, + } + + +def _get_scorer_flags_levenshtein_normalized_distance( + weights: tuple[int, int, int] | None = (1, 1, 1) +) -> dict[str, Any]: + flags = ScorerFlag.RESULT_F64 + if weights is None or weights[0] == weights[1]: + flags |= ScorerFlag.SYMMETRIC + + return {"optimal_score": 0, "worst_score": 1, "flags": flags} + + +def _get_scorer_flags_levenshtein_normalized_similarity( + weights: tuple[int, int, int] | None = (1, 1, 1) +) -> dict[str, Any]: + flags = ScorerFlag.RESULT_F64 + if weights is None or weights[0] == weights[1]: + flags |= ScorerFlag.SYMMETRIC + + return {"optimal_score": 1, "worst_score": 0, "flags": flags} + + +levenshtein_dist_attr: dict[str, Callable[..., dict[str, Any]]] = { + "get_scorer_flags": _get_scorer_flags_levenshtein_distance +} +levenshtein_sim_attr: dict[str, Callable[..., dict[str, Any]]] = { + "get_scorer_flags": _get_scorer_flags_levenshtein_similarity +} +levenshtein_norm_dist_attr: dict[str, Callable[..., dict[str, Any]]] = { + "get_scorer_flags": _get_scorer_flags_levenshtein_normalized_distance +} +levenshtein_norm_sim_attr: dict[str, Callable[..., dict[str, Any]]] = { + "get_scorer_flags": _get_scorer_flags_levenshtein_normalized_similarity +} + +add_scorer_attrs(levenshtein_distance, levenshtein_dist_attr) +add_scorer_attrs(levenshtein_similarity, levenshtein_sim_attr) +add_scorer_attrs(levenshtein_normalized_distance, levenshtein_norm_dist_attr) +add_scorer_attrs(levenshtein_normalized_similarity, levenshtein_norm_sim_attr) + +__all__ += [ + "levenshtein_distance", + "levenshtein_editops", + "levenshtein_normalized_distance", + "levenshtein_normalized_similarity", + "levenshtein_opcodes", + "levenshtein_similarity", +] + + +add_scorer_attrs(lcs_seq_distance, dist_attr) +add_scorer_attrs(lcs_seq_similarity, sim_attr) +add_scorer_attrs(lcs_seq_normalized_distance, norm_dist_attr) +add_scorer_attrs(lcs_seq_normalized_similarity, norm_sim_attr) + +__all__ += [ + "lcs_seq_distance", + "lcs_seq_editops", + "lcs_seq_normalized_distance", + "lcs_seq_normalized_similarity", + "lcs_seq_opcodes", + "lcs_seq_similarity", +] + + +add_scorer_attrs(indel_distance, dist_attr) +add_scorer_attrs(indel_similarity, sim_attr) +add_scorer_attrs(indel_normalized_distance, norm_dist_attr) +add_scorer_attrs(indel_normalized_similarity, norm_sim_attr) + +__all__ += [ + "indel_distance", + "indel_editops", + "indel_normalized_distance", + "indel_normalized_similarity", + "indel_opcodes", + "indel_similarity", +] + + +add_scorer_attrs(hamming_distance, dist_attr) +add_scorer_attrs(hamming_similarity, sim_attr) +add_scorer_attrs(hamming_normalized_distance, norm_dist_attr) +add_scorer_attrs(hamming_normalized_similarity, norm_sim_attr) + +__all__ += [ + "hamming_distance", + "hamming_editops", + "hamming_normalized_distance", + "hamming_normalized_similarity", + "hamming_opcodes", + "hamming_similarity", +] diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/fuzz.py b/venv/lib/python3.10/site-packages/rapidfuzz/fuzz.py new file mode 100644 index 0000000000000000000000000000000000000000..9bc0db9ed7076ea3d3104bb6cf38ba8ebb48f941 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/fuzz.py @@ -0,0 +1,161 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2025 Max Bachmann +# This file is generated by tools/generate_python.py +from __future__ import annotations + +import contextlib +import os + +from rapidfuzz._feature_detector import AVX2, SSE2, supports + +__all__ = [ + "QRatio", + "WRatio", + "partial_ratio", + "partial_ratio_alignment", + "partial_token_ratio", + "partial_token_set_ratio", + "partial_token_sort_ratio", + "ratio", + "token_ratio", + "token_set_ratio", + "token_sort_ratio", +] + +_impl = os.environ.get("RAPIDFUZZ_IMPLEMENTATION") +if _impl == "cpp": + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.fuzz_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + QRatio, + WRatio, + partial_ratio, + partial_ratio_alignment, + partial_token_ratio, + partial_token_set_ratio, + partial_token_sort_ratio, + ratio, + token_ratio, + token_set_ratio, + token_sort_ratio, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.fuzz_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + QRatio, + WRatio, + partial_ratio, + partial_ratio_alignment, + partial_token_ratio, + partial_token_set_ratio, + partial_token_sort_ratio, + ratio, + token_ratio, + token_set_ratio, + token_sort_ratio, + ) + + imported = True + + if not imported: + from rapidfuzz.fuzz_cpp import ( # pyright: ignore[reportMissingImports] + QRatio, + WRatio, + partial_ratio, + partial_ratio_alignment, + partial_token_ratio, + partial_token_set_ratio, + partial_token_sort_ratio, + ratio, + token_ratio, + token_set_ratio, + token_sort_ratio, + ) +elif _impl == "python": + from rapidfuzz.fuzz_py import ( + QRatio, + WRatio, + partial_ratio, + partial_ratio_alignment, + partial_token_ratio, + partial_token_set_ratio, + partial_token_sort_ratio, + ratio, + token_ratio, + token_set_ratio, + token_sort_ratio, + ) +else: + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.fuzz_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + QRatio, + WRatio, + partial_ratio, + partial_ratio_alignment, + partial_token_ratio, + partial_token_set_ratio, + partial_token_sort_ratio, + ratio, + token_ratio, + token_set_ratio, + token_sort_ratio, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.fuzz_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + QRatio, + WRatio, + partial_ratio, + partial_ratio_alignment, + partial_token_ratio, + partial_token_set_ratio, + partial_token_sort_ratio, + ratio, + token_ratio, + token_set_ratio, + token_sort_ratio, + ) + + imported = True + + if not imported: + with contextlib.suppress(ImportError): + from rapidfuzz.fuzz_cpp import ( # pyright: ignore[reportMissingImports] + QRatio, + WRatio, + partial_ratio, + partial_ratio_alignment, + partial_token_ratio, + partial_token_set_ratio, + partial_token_sort_ratio, + ratio, + token_ratio, + token_set_ratio, + token_sort_ratio, + ) + + imported = True + + if not imported: + from rapidfuzz.fuzz_py import ( + QRatio, + WRatio, + partial_ratio, + partial_ratio_alignment, + partial_token_ratio, + partial_token_set_ratio, + partial_token_sort_ratio, + ratio, + token_ratio, + token_set_ratio, + token_sort_ratio, + ) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/fuzz.pyi b/venv/lib/python3.10/site-packages/rapidfuzz/fuzz.pyi new file mode 100644 index 0000000000000000000000000000000000000000..0eb38bb84cf95630e269089576e4ca8316abd227 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/fuzz.pyi @@ -0,0 +1,189 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2021 Max Bachmann + +from __future__ import annotations + +from collections.abc import Hashable, Sequence +from typing import Callable, TypeVar, overload + +from rapidfuzz.distance import ScoreAlignment + +_UnprocessedType1 = TypeVar("_UnprocessedType1") +_UnprocessedType2 = TypeVar("_UnprocessedType2") + +@overload +def ratio( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def ratio( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def partial_ratio( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def partial_ratio( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def partial_ratio_alignment( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> ScoreAlignment | None: ... +@overload +def partial_ratio_alignment( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> ScoreAlignment | None: ... +@overload +def token_sort_ratio( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def token_sort_ratio( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def token_set_ratio( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def token_set_ratio( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def token_ratio( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def token_ratio( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def partial_token_sort_ratio( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def partial_token_sort_ratio( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def partial_token_set_ratio( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def partial_token_set_ratio( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def partial_token_ratio( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def partial_token_ratio( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def WRatio( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def WRatio( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... +@overload +def QRatio( + s1: Sequence[Hashable], + s2: Sequence[Hashable], + *, + processor: None = None, + score_cutoff: float | None = 0, +) -> float: ... +@overload +def QRatio( + s1: _UnprocessedType1, + s2: _UnprocessedType2, + *, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = 0, +) -> float: ... diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/fuzz_py.py b/venv/lib/python3.10/site-packages/rapidfuzz/fuzz_py.py new file mode 100644 index 0000000000000000000000000000000000000000..69cfee657f2c16ef6851aa9bd6c08421342df305 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/fuzz_py.py @@ -0,0 +1,877 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann +from __future__ import annotations + +from math import ceil + +from rapidfuzz._common_py import conv_sequences +from rapidfuzz._utils import ScorerFlag, add_scorer_attrs, is_none, setupPandas +from rapidfuzz.distance import ScoreAlignment +from rapidfuzz.distance.Indel_py import ( + _block_normalized_similarity as indel_block_normalized_similarity, + distance as indel_distance, + normalized_similarity as indel_normalized_similarity, +) + + +def get_scorer_flags_fuzz(**_kwargs): + return { + "optimal_score": 100, + "worst_score": 0, + "flags": ScorerFlag.RESULT_F64 | ScorerFlag.SYMMETRIC, + } + + +fuzz_attribute = {"get_scorer_flags": get_scorer_flags_fuzz} + + +def _norm_distance(dist, lensum, score_cutoff): + score = (100 - 100 * dist / lensum) if lensum else 100 + return score if score >= score_cutoff else 0 + + +def _split_sequence(seq): + if isinstance(seq, (str, bytes)): + return seq.split() + + splitted_seq = [[]] + for x in seq: + ch = x if isinstance(x, str) else chr(x) + if ch.isspace(): + splitted_seq.append([]) + else: + splitted_seq[-1].append(x) + + return [tuple(x) for x in splitted_seq if x] + + +def _join_splitted_sequence(seq_list): + if not seq_list: + return "" + if isinstance(next(iter(seq_list)), str): + return " ".join(seq_list) + if isinstance(next(iter(seq_list)), bytes): + return b" ".join(seq_list) + + joined = [] + for seq in seq_list: + joined += seq + joined += [ord(" ")] + return joined[:-1] + + +def ratio( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates the normalized Indel similarity. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 100. + For ratio < score_cutoff 0 is returned instead. Default is 0, + which deactivates this behaviour. + + Returns + ------- + similarity : float + similarity between s1 and s2 as a float between 0 and 100 + + See Also + -------- + rapidfuzz.distance.Indel.normalized_similarity : Normalized Indel similarity + + Notes + ----- + .. image:: img/ratio.svg + + Examples + -------- + >>> fuzz.ratio("this is a test", "this is a test!") + 96.55171966552734 + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 0 + + if score_cutoff is not None: + score_cutoff /= 100 + + score = indel_normalized_similarity(s1, s2, processor=processor, score_cutoff=score_cutoff) + return score * 100 + + +def _partial_ratio_impl(s1, s2, score_cutoff): + """ + implementation of partial_ratio. This assumes len(s1) <= len(s2). + """ + s1_char_set = set(s1) + len1 = len(s1) + len2 = len(s2) + + res = ScoreAlignment(0, 0, len1, 0, len1) + + block = {} + block_get = block.get + x = 1 + for ch1 in s1: + block[ch1] = block_get(ch1, 0) | x + x <<= 1 + + for i in range(1, len1): + substr_last = s2[i - 1] + if substr_last not in s1_char_set: + continue + + # todo cache map + ls_ratio = indel_block_normalized_similarity(block, s1, s2[:i], score_cutoff=score_cutoff) + if ls_ratio > res.score: + res.score = score_cutoff = ls_ratio + res.dest_start = 0 + res.dest_end = i + if res.score == 1: + res.score = 100 + return res + + for i in range(len2 - len1): + substr_last = s2[i + len1 - 1] + if substr_last not in s1_char_set: + continue + + # todo cache map + ls_ratio = indel_block_normalized_similarity(block, s1, s2[i : i + len1], score_cutoff=score_cutoff) + if ls_ratio > res.score: + res.score = score_cutoff = ls_ratio + res.dest_start = i + res.dest_end = i + len1 + if res.score == 1: + res.score = 100 + return res + + for i in range(len2 - len1, len2): + substr_first = s2[i] + if substr_first not in s1_char_set: + continue + + # todo cache map + ls_ratio = indel_block_normalized_similarity(block, s1, s2[i:], score_cutoff=score_cutoff) + if ls_ratio > res.score: + res.score = score_cutoff = ls_ratio + res.dest_start = i + res.dest_end = len2 + if res.score == 1: + res.score = 100 + return res + + res.score *= 100 + return res + + +def partial_ratio( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Searches for the optimal alignment of the shorter string in the + longer string and returns the fuzz.ratio for this alignment. + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 100. + For ratio < score_cutoff 0 is returned instead. Default is 0, + which deactivates this behaviour. + + Returns + ------- + similarity : float + similarity between s1 and s2 as a float between 0 and 100 + + Notes + ----- + Depending on the length of the needle (shorter string) different + implementations are used to improve the performance. + + short needle (length ≤ 64): + When using a short needle length the fuzz.ratio is calculated for all + alignments that could result in an optimal alignment. It is + guaranteed to find the optimal alignment. For short needles this is very + fast, since for them fuzz.ratio runs in ``O(N)`` time. This results in a worst + case performance of ``O(NM)``. + + .. image:: img/partial_ratio_short_needle.svg + + long needle (length > 64): + For long needles a similar implementation to FuzzyWuzzy is used. + This implementation only considers alignments which start at one + of the longest common substrings. This results in a worst case performance + of ``O(N[N/64]M)``. However usually most of the alignments can be skipped. + The following Python code shows the concept: + + .. code-block:: python + + blocks = SequenceMatcher(None, needle, longer, False).get_matching_blocks() + score = 0 + for block in blocks: + long_start = block[1] - block[0] if (block[1] - block[0]) > 0 else 0 + long_end = long_start + len(shorter) + long_substr = longer[long_start:long_end] + score = max(score, fuzz.ratio(needle, long_substr)) + + This is a lot faster than checking all possible alignments. However it + only finds one of the best alignments and not necessarily the optimal one. + + .. image:: img/partial_ratio_long_needle.svg + + Examples + -------- + >>> fuzz.partial_ratio("this is a test", "this is a test!") + 100.0 + """ + alignment = partial_ratio_alignment(s1, s2, processor=processor, score_cutoff=score_cutoff) + if alignment is None: + return 0 + + return alignment.score + + +def partial_ratio_alignment( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Searches for the optimal alignment of the shorter string in the + longer string and returns the fuzz.ratio and the corresponding + alignment. + + Parameters + ---------- + s1 : str | bytes + First string to compare. + s2 : str | bytes + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 100. + For ratio < score_cutoff None is returned instead. Default is 0, + which deactivates this behaviour. + + Returns + ------- + alignment : ScoreAlignment, optional + alignment between s1 and s2 with the score as a float between 0 and 100 + + Examples + -------- + >>> s1 = "a certain string" + >>> s2 = "cetain" + >>> res = fuzz.partial_ratio_alignment(s1, s2) + >>> res + ScoreAlignment(score=83.33333333333334, src_start=2, src_end=8, dest_start=0, dest_end=6) + + Using the alignment information it is possible to calculate the same fuzz.ratio + + >>> fuzz.ratio(s1[res.src_start:res.src_end], s2[res.dest_start:res.dest_end]) + 83.33333333333334 + """ + setupPandas() + if is_none(s1) or is_none(s2): + return None + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + if score_cutoff is None: + score_cutoff = 0 + + if not s1 and not s2: + return ScoreAlignment(100.0, 0, 0, 0, 0) + s1, s2 = conv_sequences(s1, s2) + len1 = len(s1) + len2 = len(s2) + if len1 <= len2: + shorter = s1 + longer = s2 + else: + shorter = s2 + longer = s1 + + res = _partial_ratio_impl(shorter, longer, score_cutoff / 100) + if res.score != 100 and len1 == len2: + score_cutoff = max(score_cutoff, res.score) + res2 = _partial_ratio_impl(longer, shorter, score_cutoff / 100) + if res2.score > res.score: + res = ScoreAlignment(res2.score, res2.dest_start, res2.dest_end, res2.src_start, res2.src_end) + + if res.score < score_cutoff: + return None + + if len1 <= len2: + return res + + return ScoreAlignment(res.score, res.dest_start, res.dest_end, res.src_start, res.src_end) + + +def token_sort_ratio( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Sorts the words in the strings and calculates the fuzz.ratio between them + + Parameters + ---------- + s1 : str + First string to compare. + s2 : str + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 100. + For ratio < score_cutoff 0 is returned instead. Default is 0, + which deactivates this behaviour. + + Returns + ------- + similarity : float + similarity between s1 and s2 as a float between 0 and 100 + + Notes + ----- + .. image:: img/token_sort_ratio.svg + + Examples + -------- + >>> fuzz.token_sort_ratio("fuzzy wuzzy was a bear", "wuzzy fuzzy was a bear") + 100.0 + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + sorted_s1 = _join_splitted_sequence(sorted(_split_sequence(s1))) + sorted_s2 = _join_splitted_sequence(sorted(_split_sequence(s2))) + return ratio(sorted_s1, sorted_s2, score_cutoff=score_cutoff) + + +def token_set_ratio( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Compares the words in the strings based on unique and common words between them + using fuzz.ratio + + Parameters + ---------- + s1 : str + First string to compare. + s2 : str + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 100. + For ratio < score_cutoff 0 is returned instead. Default is 0, + which deactivates this behaviour. + + Returns + ------- + similarity : float + similarity between s1 and s2 as a float between 0 and 100 + + Notes + ----- + .. image:: img/token_set_ratio.svg + + Examples + -------- + >>> fuzz.token_sort_ratio("fuzzy was a bear", "fuzzy fuzzy was a bear") + 83.8709716796875 + >>> fuzz.token_set_ratio("fuzzy was a bear", "fuzzy fuzzy was a bear") + 100.0 + # Returns 100.0 if one string is a subset of the other, regardless of extra content in the longer string + >>> fuzz.token_set_ratio("fuzzy was a bear but not a dog", "fuzzy was a bear") + 100.0 + # Score is reduced only when there is explicit disagreement in the two strings + >>> fuzz.token_set_ratio("fuzzy was a bear but not a dog", "fuzzy was a bear but not a cat") + 92.3076923076923 + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + if score_cutoff is None: + score_cutoff = 0 + + s1, s2 = conv_sequences(s1, s2) + + tokens_a = set(_split_sequence(s1)) + tokens_b = set(_split_sequence(s2)) + + # in FuzzyWuzzy this returns 0. For sake of compatibility return 0 here as well + # see https://github.com/rapidfuzz/RapidFuzz/issues/110 + if not tokens_a or not tokens_b: + return 0 + + intersect = tokens_a.intersection(tokens_b) + diff_ab = tokens_a.difference(tokens_b) + diff_ba = tokens_b.difference(tokens_a) + + # one sentence is part of the other one + if intersect and (not diff_ab or not diff_ba): + return 100 + + diff_ab_joined = _join_splitted_sequence(sorted(diff_ab)) + diff_ba_joined = _join_splitted_sequence(sorted(diff_ba)) + + ab_len = len(diff_ab_joined) + ba_len = len(diff_ba_joined) + # todo is length sum without joining faster? + sect_len = len(_join_splitted_sequence(intersect)) + + # string length sect+ab <-> sect and sect+ba <-> sect + sect_ab_len = sect_len + (sect_len != 0) + ab_len + sect_ba_len = sect_len + (sect_len != 0) + ba_len + + result = 0.0 + cutoff_distance = ceil((sect_ab_len + sect_ba_len) * (1 - score_cutoff / 100)) + dist = indel_distance(diff_ab_joined, diff_ba_joined, score_cutoff=cutoff_distance) + + if dist <= cutoff_distance: + result = _norm_distance(dist, sect_ab_len + sect_ba_len, score_cutoff) + + # exit early since the other ratios are 0 + if not sect_len: + return result + + # levenshtein distance sect+ab <-> sect and sect+ba <-> sect + # since only sect is similar in them the distance can be calculated based on + # the length difference + sect_ab_dist = (sect_len != 0) + ab_len + sect_ab_ratio = _norm_distance(sect_ab_dist, sect_len + sect_ab_len, score_cutoff) + + sect_ba_dist = (sect_len != 0) + ba_len + sect_ba_ratio = _norm_distance(sect_ba_dist, sect_len + sect_ba_len, score_cutoff) + + return max(result, sect_ab_ratio, sect_ba_ratio) + + +def token_ratio( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Helper method that returns the maximum of fuzz.token_set_ratio and fuzz.token_sort_ratio + (faster than manually executing the two functions) + + Parameters + ---------- + s1 : str + First string to compare. + s2 : str + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 100. + For ratio < score_cutoff 0 is returned instead. Default is 0, + which deactivates this behaviour. + + Returns + ------- + similarity : float + similarity between s1 and s2 as a float between 0 and 100 + + Notes + ----- + .. image:: img/token_ratio.svg + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + # todo write combined implementation + return max( + token_set_ratio(s1, s2, processor=None, score_cutoff=score_cutoff), + token_sort_ratio(s1, s2, processor=None, score_cutoff=score_cutoff), + ) + + +def partial_token_sort_ratio( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + sorts the words in the strings and calculates the fuzz.partial_ratio between them + + Parameters + ---------- + s1 : str + First string to compare. + s2 : str + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 100. + For ratio < score_cutoff 0 is returned instead. Default is 0, + which deactivates this behaviour. + + Returns + ------- + similarity : float + similarity between s1 and s2 as a float between 0 and 100 + + Notes + ----- + .. image:: img/partial_token_sort_ratio.svg + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + sorted_s1 = _join_splitted_sequence(sorted(_split_sequence(s1))) + sorted_s2 = _join_splitted_sequence(sorted(_split_sequence(s2))) + return partial_ratio(sorted_s1, sorted_s2, score_cutoff=score_cutoff) + + +def partial_token_set_ratio( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Compares the words in the strings based on unique and common words between them + using fuzz.partial_ratio + + Parameters + ---------- + s1 : str + First string to compare. + s2 : str + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 100. + For ratio < score_cutoff 0 is returned instead. Default is 0, + which deactivates this behaviour. + + Returns + ------- + similarity : float + similarity between s1 and s2 as a float between 0 and 100 + + Notes + ----- + .. image:: img/partial_token_set_ratio.svg + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + s1, s2 = conv_sequences(s1, s2) + + tokens_a = set(_split_sequence(s1)) + tokens_b = set(_split_sequence(s2)) + # in FuzzyWuzzy this returns 0. For sake of compatibility return 0 here as well + # see https://github.com/rapidfuzz/RapidFuzz/issues/110 + if not tokens_a or not tokens_b: + return 0 + + # exit early when there is a common word in both sequences + if tokens_a.intersection(tokens_b): + return 100 + + diff_ab = _join_splitted_sequence(sorted(tokens_a.difference(tokens_b))) + diff_ba = _join_splitted_sequence(sorted(tokens_b.difference(tokens_a))) + return partial_ratio(diff_ab, diff_ba, score_cutoff=score_cutoff) + + +def partial_token_ratio( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Helper method that returns the maximum of fuzz.partial_token_set_ratio and + fuzz.partial_token_sort_ratio (faster than manually executing the two functions) + + Parameters + ---------- + s1 : str + First string to compare. + s2 : str + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 100. + For ratio < score_cutoff 0 is returned instead. Default is 0, + which deactivates this behaviour. + + Returns + ------- + similarity : float + similarity between s1 and s2 as a float between 0 and 100 + + Notes + ----- + .. image:: img/partial_token_ratio.svg + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + if score_cutoff is None: + score_cutoff = 0 + + s1, s2 = conv_sequences(s1, s2) + + tokens_split_a = _split_sequence(s1) + tokens_split_b = _split_sequence(s2) + tokens_a = set(tokens_split_a) + tokens_b = set(tokens_split_b) + + # exit early when there is a common word in both sequences + if tokens_a.intersection(tokens_b): + return 100 + + diff_ab = tokens_a.difference(tokens_b) + diff_ba = tokens_b.difference(tokens_a) + + result = partial_ratio( + _join_splitted_sequence(sorted(tokens_split_a)), + _join_splitted_sequence(sorted(tokens_split_b)), + score_cutoff=score_cutoff, + ) + + # do not calculate the same partial_ratio twice + if len(tokens_split_a) == len(diff_ab) and len(tokens_split_b) == len(diff_ba): + return result + + score_cutoff = max(score_cutoff, result) + return max( + result, + partial_ratio( + _join_splitted_sequence(sorted(diff_ab)), + _join_splitted_sequence(sorted(diff_ba)), + score_cutoff=score_cutoff, + ), + ) + + +def WRatio( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates a weighted ratio based on the other ratio algorithms + + Parameters + ---------- + s1 : str + First string to compare. + s2 : str + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 100. + For ratio < score_cutoff 0 is returned instead. Default is 0, + which deactivates this behaviour. + + Returns + ------- + similarity : float + similarity between s1 and s2 as a float between 0 and 100 + + Notes + ----- + .. image:: img/WRatio.svg + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 0 + + UNBASE_SCALE = 0.95 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + + # in FuzzyWuzzy this returns 0. For sake of compatibility return 0 here as well + # see https://github.com/rapidfuzz/RapidFuzz/issues/110 + if not s1 or not s2: + return 0 + + if score_cutoff is None: + score_cutoff = 0 + + len1 = len(s1) + len2 = len(s2) + len_ratio = len1 / len2 if len1 > len2 else len2 / len1 + + end_ratio = ratio(s1, s2, score_cutoff=score_cutoff) + if len_ratio < 1.5: + score_cutoff = max(score_cutoff, end_ratio) / UNBASE_SCALE + return max( + end_ratio, + token_ratio(s1, s2, score_cutoff=score_cutoff, processor=None) * UNBASE_SCALE, + ) + + PARTIAL_SCALE = 0.9 if len_ratio <= 8.0 else 0.6 + score_cutoff = max(score_cutoff, end_ratio) / PARTIAL_SCALE + end_ratio = max(end_ratio, partial_ratio(s1, s2, score_cutoff=score_cutoff) * PARTIAL_SCALE) + + score_cutoff = max(score_cutoff, end_ratio) / UNBASE_SCALE + return max( + end_ratio, + partial_token_ratio(s1, s2, score_cutoff=score_cutoff, processor=None) * UNBASE_SCALE * PARTIAL_SCALE, + ) + + +def QRatio( + s1, + s2, + *, + processor=None, + score_cutoff=None, +): + """ + Calculates a quick ratio between two strings using fuzz.ratio. + + Since v3.0 this behaves similar to fuzz.ratio with the exception that this + returns 0 when comparing two empty strings + + Parameters + ---------- + s1 : Sequence[Hashable] + First string to compare. + s2 : Sequence[Hashable] + Second string to compare. + processor: callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : float, optional + Optional argument for a score threshold as a float between 0 and 100. + For ratio < score_cutoff 0 is returned instead. Default is 0, + which deactivates this behaviour. + + Returns + ------- + similarity : float + similarity between s1 and s2 as a float between 0 and 100 + + Examples + -------- + >>> fuzz.QRatio("this is a test", "this is a test!") + 96.55171966552734 + """ + setupPandas() + if is_none(s1) or is_none(s2): + return 0 + + if processor is not None: + s1 = processor(s1) + s2 = processor(s2) + # in FuzzyWuzzy this returns 0. For sake of compatibility return 0 here as well + # see https://github.com/rapidfuzz/RapidFuzz/issues/110 + if not s1 or not s2: + return 0 + + return ratio(s1, s2, score_cutoff=score_cutoff) + + +add_scorer_attrs(ratio, fuzz_attribute) +add_scorer_attrs(partial_ratio, fuzz_attribute) +add_scorer_attrs(token_sort_ratio, fuzz_attribute) +add_scorer_attrs(token_set_ratio, fuzz_attribute) +add_scorer_attrs(token_ratio, fuzz_attribute) +add_scorer_attrs(partial_token_sort_ratio, fuzz_attribute) +add_scorer_attrs(partial_token_set_ratio, fuzz_attribute) +add_scorer_attrs(partial_token_ratio, fuzz_attribute) +add_scorer_attrs(WRatio, fuzz_attribute) +add_scorer_attrs(QRatio, fuzz_attribute) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/process.py b/venv/lib/python3.10/site-packages/rapidfuzz/process.py new file mode 100644 index 0000000000000000000000000000000000000000..5b6fc001a05a6ffc8d9c44b39fc6075ac3d665d9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/process.py @@ -0,0 +1,95 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2025 Max Bachmann +# This file is generated by tools/generate_python.py +from __future__ import annotations + +import contextlib +import os + +from rapidfuzz._feature_detector import AVX2, SSE2, supports + +__all__ = ["cdist", "cpdist", "extract", "extractOne", "extract_iter"] + +_impl = os.environ.get("RAPIDFUZZ_IMPLEMENTATION") +if _impl == "cpp": + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.process_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + cdist, + cpdist, + extract, + extract_iter, + extractOne, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.process_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + cdist, + cpdist, + extract, + extract_iter, + extractOne, + ) + + imported = True + + if not imported: + from rapidfuzz.process_cpp import ( # pyright: ignore[reportMissingImports] + cdist, + cpdist, + extract, + extract_iter, + extractOne, + ) +elif _impl == "python": + from rapidfuzz.process_py import cdist, cpdist, extract, extract_iter, extractOne +else: + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.process_cpp_avx2 import ( # pyright: ignore[reportMissingImports] + cdist, + cpdist, + extract, + extract_iter, + extractOne, + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.process_cpp_sse2 import ( # pyright: ignore[reportMissingImports] + cdist, + cpdist, + extract, + extract_iter, + extractOne, + ) + + imported = True + + if not imported: + with contextlib.suppress(ImportError): + from rapidfuzz.process_cpp import ( # pyright: ignore[reportMissingImports] + cdist, + cpdist, + extract, + extract_iter, + extractOne, + ) + + imported = True + + if not imported: + from rapidfuzz.process_py import ( + cdist, + cpdist, + extract, + extract_iter, + extractOne, + ) diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/process.pyi b/venv/lib/python3.10/site-packages/rapidfuzz/process.pyi new file mode 100644 index 0000000000000000000000000000000000000000..983d68fa005560bb5d3277d1f6f1a93d141bb717 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/process.pyi @@ -0,0 +1,430 @@ +from __future__ import annotations + +from collections.abc import Collection, Generator, Hashable, Iterable, Mapping, Sequence +from typing import ( + Any, + Callable, + Protocol, + TypeVar, + overload, +) + +from rapidfuzz.fuzz import WRatio, ratio + +_StringType = Sequence[Hashable] +_StringType1 = TypeVar("_StringType1", bound=Sequence[Hashable]) +_StringType2 = TypeVar("_StringType2", bound=Sequence[Hashable]) +_UnprocessedType1 = TypeVar("_UnprocessedType1") +_UnprocessedType2 = TypeVar("_UnprocessedType2") +_KeyType = TypeVar("_KeyType") +_ResultType = TypeVar("_ResultType", int, float) + +_StringType1_contra = TypeVar("_StringType1_contra", contravariant=True, bound=Sequence[Hashable]) +_StringType2_contra = TypeVar("_StringType2_contra", contravariant=True, bound=Sequence[Hashable]) +_ResultType_contra = TypeVar("_ResultType_contra", int, float, contravariant=True) +_ResultType_co = TypeVar("_ResultType_co", int, float, covariant=True) + +class _Scorer(Protocol[_StringType1_contra, _StringType2_contra, _ResultType_contra, _ResultType_co]): + def __call__( + self, __s1: _StringType1_contra, __s2: _StringType2_contra, *, score_cutoff: _ResultType_contra | None + ) -> _ResultType_co: ... + +# mypy wants defaults to be valid for every possible parameterization of a generic function +# so add separate overloads for the default version +@overload +def extractOne( + query: Sequence[Hashable] | None, + choices: Mapping[_KeyType, _StringType2 | None], + *, + scorer: _Scorer[Sequence[Hashable], Sequence[Hashable], float, float] = WRatio, + processor: None = None, + score_cutoff: float | None = None, + score_hint: float | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> tuple[_StringType2, float, _KeyType] | None: ... +@overload +def extractOne( + query: Sequence[Hashable] | None, + choices: Iterable[_StringType2 | None], + *, + scorer: _Scorer[Sequence[Hashable], Sequence[Hashable], float, float] = WRatio, + processor: None = None, + score_cutoff: float | None = None, + score_hint: float | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> tuple[_StringType2, float, int] | None: ... +@overload +def extractOne( + query: _UnprocessedType1 | None, + choices: Mapping[_KeyType, _UnprocessedType2 | None], + *, + scorer: _Scorer[Sequence[Hashable], Sequence[Hashable], float, float] = WRatio, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = None, + score_hint: float | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> tuple[_UnprocessedType2, float, _KeyType] | None: ... +@overload +def extractOne( + query: _UnprocessedType1 | None, + choices: Iterable[_UnprocessedType2 | None], + *, + scorer: _Scorer[Sequence[Hashable], Sequence[Hashable], float, float] = WRatio, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = None, + score_hint: float | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> tuple[_UnprocessedType2, float, int] | None: ... +@overload +def extractOne( + query: _StringType1 | None, + choices: Mapping[_KeyType, _StringType2 | None], + *, + scorer: _Scorer[_StringType1, _StringType2, _ResultType, _ResultType], + processor: None = None, + score_cutoff: _ResultType | None = None, + score_hint: _ResultType | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> tuple[_StringType2, _ResultType, _KeyType] | None: ... +@overload +def extractOne( + query: _StringType1 | None, + choices: Iterable[_StringType2 | None], + *, + scorer: _Scorer[_StringType1, _StringType2, _ResultType, _ResultType], + processor: None = None, + score_cutoff: _ResultType | None = None, + score_hint: _ResultType | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> tuple[_StringType2, _ResultType, int] | None: ... +@overload +def extractOne( + query: _UnprocessedType1 | None, + choices: Mapping[_KeyType, _UnprocessedType2 | None], + *, + scorer: _Scorer[_StringType1, _StringType1, _ResultType, _ResultType], + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], _StringType1], + score_cutoff: _ResultType | None = None, + score_hint: _ResultType | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> tuple[_UnprocessedType2, _ResultType, _KeyType] | None: ... +@overload +def extractOne( + query: _UnprocessedType1 | None, + choices: Iterable[_UnprocessedType2 | None], + *, + scorer: _Scorer[_StringType1, _StringType1, _ResultType, _ResultType], + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], _StringType1], + score_cutoff: _ResultType | None = None, + score_hint: _ResultType | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> tuple[_UnprocessedType2, _ResultType, int] | None: ... + +# mypy wants defaults to be valid for every possible parameterization of a generic function +# so add separate overloads for the default version +@overload +def extract( + query: Sequence[Hashable] | None, + choices: Mapping[_KeyType, _StringType2 | None], + *, + scorer: _Scorer[Sequence[Hashable], Sequence[Hashable], float, float] = WRatio, + processor: None = None, + limit: int | None = 5, + score_cutoff: float | None = None, + score_hint: float | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> list[tuple[_StringType2, float, _KeyType]]: ... +@overload +def extract( + query: Sequence[Hashable] | None, + choices: Iterable[_StringType2 | None], + *, + scorer: _Scorer[Sequence[Hashable], Sequence[Hashable], float, float] = WRatio, + processor: None = None, + limit: int | None = 5, + score_cutoff: float | None = None, + score_hint: float | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> list[tuple[_StringType2, float, int]]: ... +@overload +def extract( + query: _UnprocessedType1 | None, + choices: Mapping[_KeyType, _UnprocessedType2 | None], + *, + scorer: _Scorer[Sequence[Hashable], Sequence[Hashable], float, float] = WRatio, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + limit: int | None = 5, + score_cutoff: float | None = None, + score_hint: float | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> list[tuple[_UnprocessedType2, float, _KeyType]]: ... +@overload +def extract( + query: _UnprocessedType1 | None, + choices: Iterable[_UnprocessedType2 | None], + *, + scorer: _Scorer[Sequence[Hashable], Sequence[Hashable], float, float] = WRatio, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + limit: int | None = 5, + score_cutoff: float | None = None, + score_hint: float | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> list[tuple[_UnprocessedType2, float, int]]: ... +@overload +def extract( + query: _StringType1 | None, + choices: Mapping[_KeyType, _StringType2 | None], + *, + scorer: _Scorer[_StringType1, _StringType2, _ResultType, _ResultType], + processor: None = None, + limit: int | None = 5, + score_cutoff: _ResultType | None = None, + score_hint: _ResultType | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> list[tuple[_StringType2, _ResultType, _KeyType]]: ... +@overload +def extract( + query: _StringType1 | None, + choices: Collection[_StringType2 | None], + *, + scorer: _Scorer[_StringType1, _StringType2, _ResultType, _ResultType], + processor: None = None, + limit: int | None = 5, + score_cutoff: _ResultType | None = None, + score_hint: _ResultType | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> list[tuple[_StringType2, _ResultType, int]]: ... +@overload +def extract( + query: _UnprocessedType1 | None, + choices: Mapping[_KeyType, _UnprocessedType2 | None], + *, + scorer: _Scorer[_StringType1, _StringType1, _ResultType, _ResultType], + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], _StringType1], + limit: int | None = 5, + score_cutoff: _ResultType | None = None, + score_hint: _ResultType | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> list[tuple[_UnprocessedType2, _ResultType, _KeyType]]: ... +@overload +def extract( + query: _UnprocessedType1 | None, + choices: Collection[_UnprocessedType2 | None], + *, + scorer: _Scorer[_StringType1, _StringType1, _ResultType, _ResultType], + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], _StringType1], + limit: int | None = 5, + score_cutoff: _ResultType | None = None, + score_hint: _ResultType | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> list[tuple[_UnprocessedType2, _ResultType, int]]: ... + +# mypy wants defaults to be valid for every possible parameterization of a generic function +# so add separate overloads for the default version +@overload +def extract_iter( + query: Sequence[Hashable] | None, + choices: Mapping[_KeyType, _StringType2 | None], + *, + scorer: _Scorer[Sequence[Hashable], Sequence[Hashable], float, float] = WRatio, + processor: None = None, + score_cutoff: float | None = None, + score_hint: float | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> Generator[tuple[_StringType2, float, _KeyType], None, None]: ... +@overload +def extract_iter( + query: Sequence[Hashable] | None, + choices: Iterable[_StringType2 | None], + *, + scorer: _Scorer[Sequence[Hashable], Sequence[Hashable], float, float] = WRatio, + processor: None = None, + score_cutoff: float | None = None, + score_hint: float | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> Generator[tuple[_StringType2, float, int], None, None]: ... +@overload +def extract_iter( + query: _UnprocessedType1 | None, + choices: Mapping[_KeyType, _UnprocessedType2 | None], + *, + scorer: _Scorer[Sequence[Hashable], Sequence[Hashable], float, float] = WRatio, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = None, + score_hint: float | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> Generator[tuple[_UnprocessedType2, float, _KeyType], None, None]: ... +@overload +def extract_iter( + query: _UnprocessedType1 | None, + choices: Iterable[_UnprocessedType2 | None], + *, + scorer: _Scorer[Sequence[Hashable], Sequence[Hashable], float, float] = WRatio, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = None, + score_hint: float | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> Generator[tuple[_UnprocessedType2, float, int], None, None]: ... +@overload +def extract_iter( + query: _StringType1 | None, + choices: Mapping[_KeyType, _StringType2 | None], + *, + scorer: _Scorer[_StringType1, _StringType2, _ResultType, _ResultType], + processor: None = None, + score_cutoff: _ResultType | None = None, + score_hint: _ResultType | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> Generator[tuple[_StringType2, _ResultType, _KeyType], None, None]: ... +@overload +def extract_iter( + query: _StringType1 | None, + choices: Iterable[_StringType2 | None], + *, + scorer: _Scorer[_StringType1, _StringType2, _ResultType, _ResultType], + processor: None = None, + score_cutoff: _ResultType | None = None, + score_hint: _ResultType | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> Generator[tuple[_StringType2, _ResultType, int], None, None]: ... +@overload +def extract_iter( + query: _UnprocessedType1 | None, + choices: Mapping[_KeyType, _UnprocessedType2 | None], + *, + scorer: _Scorer[_StringType1, _StringType1, _ResultType, _ResultType], + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], _StringType1], + score_cutoff: _ResultType | None = None, + score_hint: _ResultType | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> Generator[tuple[_UnprocessedType2, _ResultType, _KeyType], None, None]: ... +@overload +def extract_iter( + query: _UnprocessedType1 | None, + choices: Iterable[_UnprocessedType2 | None], + *, + scorer: _Scorer[_StringType1, _StringType1, _ResultType, _ResultType], + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], _StringType1], + score_cutoff: _ResultType | None = None, + score_hint: _ResultType | None = None, + scorer_kwargs: dict[str, Any] | None = None, +) -> Generator[tuple[_UnprocessedType2, _ResultType, int], None, None]: ... + +try: + import numpy.typing as npt + + @overload + def cdist( + queries: Iterable[Sequence[Hashable] | None], + choices: Iterable[Sequence[Hashable] | None], + *, + scorer: _Scorer[Sequence[Hashable], Sequence[Hashable], float, float] = ratio, + processor: None = None, + score_cutoff: float | None = None, + score_hint: float | None = None, + score_multiplier: float = 1, + dtype: npt.DTypeLike | None = None, + workers: int = 1, + scorer_kwargs: dict[str, Any] | None = None, + ) -> npt.NDArray[Any]: ... + @overload + def cdist( + queries: Iterable[_UnprocessedType1 | None], + choices: Iterable[_UnprocessedType2 | None], + *, + scorer: _Scorer[Sequence[Hashable], Sequence[Hashable], float, float] = ratio, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = None, + score_hint: float | None = None, + score_multiplier: float = 1, + dtype: npt.DTypeLike | None = None, + workers: int = 1, + scorer_kwargs: dict[str, Any] | None = None, + ) -> npt.NDArray[Any]: ... + @overload + def cdist( + queries: Iterable[_StringType1 | None], + choices: Iterable[_StringType2 | None], + *, + scorer: _Scorer[_StringType1, _StringType2, _ResultType, _ResultType], + processor: None = None, + score_cutoff: _ResultType | None = None, + score_hint: _ResultType | None = None, + score_multiplier: _ResultType = 1, + dtype: npt.DTypeLike | None = None, + workers: int = 1, + scorer_kwargs: dict[str, Any] | None = None, + ) -> npt.NDArray[Any]: ... + @overload + def cdist( + queries: Iterable[_UnprocessedType1 | None], + choices: Iterable[_UnprocessedType2 | None], + *, + scorer: _Scorer[_StringType1, _StringType1, _ResultType, _ResultType], + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], _StringType1], + score_cutoff: _ResultType | None = None, + score_hint: _ResultType | None = None, + score_multiplier: _ResultType = 1, + dtype: npt.DTypeLike | None = None, + workers: int = 1, + scorer_kwargs: dict[str, Any] | None = None, + ) -> npt.NDArray[Any]: ... + @overload + def cpdist( + queries: Iterable[Sequence[Hashable] | None], + choices: Iterable[Sequence[Hashable] | None], + *, + scorer: _Scorer[Sequence[Hashable], Sequence[Hashable], float, float] = ratio, + processor: None = None, + score_cutoff: float | None = None, + score_hint: float | None = None, + score_multiplier: float = 1, + dtype: npt.DTypeLike | None = None, + workers: int = 1, + scorer_kwargs: dict[str, Any] | None = None, + ) -> npt.NDArray[Any]: ... + @overload + def cpdist( + queries: Iterable[_UnprocessedType1 | None], + choices: Iterable[_UnprocessedType2 | None], + *, + scorer: _Scorer[Sequence[Hashable], Sequence[Hashable], float, float] = ratio, + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], Sequence[Hashable]], + score_cutoff: float | None = None, + score_hint: float | None = None, + score_multiplier: float = 1, + dtype: npt.DTypeLike | None = None, + workers: int = 1, + scorer_kwargs: dict[str, Any] | None = None, + ) -> npt.NDArray[Any]: ... + @overload + def cpdist( + queries: Iterable[_StringType1 | None], + choices: Iterable[_StringType2 | None], + *, + scorer: _Scorer[_StringType1, _StringType2, _ResultType, _ResultType], + processor: None = None, + score_cutoff: _ResultType | None = None, + score_hint: _ResultType | None = None, + score_multiplier: _ResultType = 1, + dtype: npt.DTypeLike | None = None, + workers: int = 1, + scorer_kwargs: dict[str, Any] | None = None, + ) -> npt.NDArray[Any]: ... + @overload + def cpdist( + queries: Iterable[_UnprocessedType1 | None], + choices: Iterable[_UnprocessedType2 | None], + *, + scorer: _Scorer[_StringType1, _StringType1, _ResultType, _ResultType], + processor: Callable[[_UnprocessedType1 | _UnprocessedType2], _StringType1], + score_cutoff: _ResultType | None = None, + score_hint: _ResultType | None = None, + score_multiplier: _ResultType = 1, + dtype: npt.DTypeLike | None = None, + workers: int = 1, + scorer_kwargs: dict[str, Any] | None = None, + ) -> npt.NDArray[Any]: ... + +except ImportError: + pass diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/process_cpp.py b/venv/lib/python3.10/site-packages/rapidfuzz/process_cpp.py new file mode 100644 index 0000000000000000000000000000000000000000..8f69624d76a100c417e08d831fa95e3a71ec6b5f --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/process_cpp.py @@ -0,0 +1,125 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann +from __future__ import annotations + +from rapidfuzz.fuzz import ratio +from rapidfuzz.process_cpp_impl import ( + FLOAT32 as _FLOAT32, + FLOAT64 as _FLOAT64, + INT8 as _INT8, + INT16 as _INT16, + INT32 as _INT32, + INT64 as _INT64, + UINT8 as _UINT8, + UINT16 as _UINT16, + UINT32 as _UINT32, + UINT64 as _UINT64, + cdist as _cdist, + cpdist as _cpdist, + extract, + extract_iter, + extractOne, +) + +__all__ = ["cdist", "cpdist", "extract", "extractOne", "extract_iter"] + + +def _dtype_to_type_num(dtype): + import numpy as np + + if dtype is None: + return None + + dtype = np.dtype(dtype) + if dtype == np.int32: + return _INT32 + if dtype == np.int8: + return _INT8 + if dtype == np.int16: + return _INT16 + if dtype == np.int64: + return _INT64 + if dtype == np.uint8: + return _UINT8 + if dtype == np.uint16: + return _UINT16 + if dtype == np.uint32: + return _UINT32 + if dtype == np.uint64: + return _UINT64 + if dtype == np.float32: + return _FLOAT32 + if dtype == np.float64: + return _FLOAT64 + + msg = f"unsupported dtype: {dtype}" + raise TypeError(msg) + + +def cdist( + queries, + choices, + *, + scorer=ratio, + processor=None, + score_cutoff=None, + score_hint=None, + score_multiplier=1, + dtype=None, + workers=1, + **kwargs, +): + import numpy as np + + dtype = _dtype_to_type_num(dtype) + return np.asarray( + _cdist( + queries, + choices, + scorer=scorer, + processor=processor, + score_cutoff=score_cutoff, + score_hint=score_hint, + score_multiplier=score_multiplier, + dtype=dtype, + workers=workers, + **kwargs, + ) + ) + + +cdist.__doc__ = _cdist.__doc__ + + +def cpdist( + queries, + choices, + *, + scorer=ratio, + processor=None, + score_cutoff=None, + score_hint=None, + score_multiplier=1, + dtype=None, + workers=1, + **kwargs, +): + import numpy as np + + dtype = _dtype_to_type_num(dtype) + distance_matrix = _cpdist( + queries, + choices, + scorer=scorer, + processor=processor, + score_cutoff=score_cutoff, + score_hint=score_hint, + score_multiplier=score_multiplier, + dtype=dtype, + workers=workers, + **kwargs, + ) + return np.asarray(distance_matrix) + + +cpdist.__doc__ = _cpdist.__doc__ diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/process_py.py b/venv/lib/python3.10/site-packages/rapidfuzz/process_py.py new file mode 100644 index 0000000000000000000000000000000000000000..dcee33aa81f46681b1f3d4a7bb3afc7d334c71c1 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/process_py.py @@ -0,0 +1,679 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann +from __future__ import annotations + +import heapq + +from rapidfuzz._utils import ScorerFlag, is_none, setupPandas +from rapidfuzz.fuzz import WRatio, ratio + +__all__ = ["cdist", "extract", "extractOne", "extract_iter"] + + +def _get_scorer_flags_py(scorer, scorer_kwargs): + params = getattr(scorer, "_RF_ScorerPy", None) + if params is not None: + flags = params["get_scorer_flags"](**scorer_kwargs) + return (flags["worst_score"], flags["optimal_score"]) + return (0, 100) + + +def extract_iter( + query, + choices, + *, + scorer=WRatio, + processor=None, + score_cutoff=None, + score_hint=None, + scorer_kwargs=None, +): + """ + Find the best match in a list of choices + + Parameters + ---------- + query : Sequence[Hashable] + string we want to find + choices : Iterable[Sequence[Hashable]] | Mapping[Sequence[Hashable]] + list of all strings the query should be compared with or dict with a mapping + {: } + scorer : Callable, optional + Optional callable that is used to calculate the matching score between + the query and each choice. This can be any of the scorers included in RapidFuzz + (both scorers that calculate the edit distance or the normalized edit distance), or + a custom function, which returns a normalized edit distance. + fuzz.WRatio is used by default. + processor : Callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : Any, optional + Optional argument for a score threshold. When an edit distance is used this represents the maximum + edit distance and matches with a `distance > score_cutoff` are ignored. When a + normalized edit distance is used this represents the minimal similarity + and matches with a `similarity < score_cutoff` are ignored. Default is None, which deactivates this behaviour. + score_hint : Any, optional + Optional argument for an expected score to be passed to the scorer. + This is used to select a faster implementation. Default is None, + which deactivates this behaviour. + scorer_kwargs : dict[str, Any], optional + any other named parameters are passed to the scorer. This can be used to pass + e.g. weights to `Levenshtein.distance` + + Yields + ------- + tuple[Sequence[Hashable], Any, Any] + Yields similarity between the query and each choice in form of a Tuple with 3 elements. + The values stored in the tuple depend on the types of the input arguments. + + * The first element is always the current `choice`, which is the value that's compared to the query. + + * The second value represents the similarity calculated by the scorer. This can be: + + * An edit distance (distance is 0 for a perfect match and > 0 for non perfect matches). + In this case only choices which have a `distance <= score_cutoff` are yielded. + An example of a scorer with this behavior is `Levenshtein.distance`. + * A normalized edit distance (similarity is a score between 0 and 100, with 100 being a perfect match). + In this case only choices which have a `similarity >= score_cutoff` are yielded. + An example of a scorer with this behavior is `Levenshtein.normalized_similarity`. + + Note, that for all scorers, which are not provided by RapidFuzz, only normalized edit distances are supported. + + * The third parameter depends on the type of the `choices` argument it is: + + * The `index of choice` when choices is a simple iterable like a list + * The `key of choice` when choices is a mapping like a dict, or a pandas Series + + """ + _ = score_hint + scorer_kwargs = scorer_kwargs or {} + worst_score, optimal_score = _get_scorer_flags_py(scorer, scorer_kwargs) + lowest_score_worst = optimal_score > worst_score + + setupPandas() + + if is_none(query): + return + + if score_cutoff is None: + score_cutoff = worst_score + + # preprocess the query + if processor is not None: + query = processor(query) + + choices_iter = choices.items() if hasattr(choices, "items") else enumerate(choices) + for key, choice in choices_iter: + if is_none(choice): + continue + + if processor is None: + score = scorer(query, choice, score_cutoff=score_cutoff, **scorer_kwargs) + else: + score = scorer( + query, + processor(choice), + score_cutoff=score_cutoff, + **scorer_kwargs, + ) + + if lowest_score_worst: + if score >= score_cutoff: + yield (choice, score, key) + else: + if score <= score_cutoff: + yield (choice, score, key) + + +def extractOne( + query, + choices, + *, + scorer=WRatio, + processor=None, + score_cutoff=None, + score_hint=None, + scorer_kwargs=None, +): + """ + Find the best match in a list of choices. When multiple elements have the same similarity, + the first element is returned. + + Parameters + ---------- + query : Sequence[Hashable] + string we want to find + choices : Iterable[Sequence[Hashable]] | Mapping[Sequence[Hashable]] + list of all strings the query should be compared with or dict with a mapping + {: } + scorer : Callable, optional + Optional callable that is used to calculate the matching score between + the query and each choice. This can be any of the scorers included in RapidFuzz + (both scorers that calculate the edit distance or the normalized edit distance), or + a custom function, which returns a normalized edit distance. + fuzz.WRatio is used by default. + processor : Callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : Any, optional + Optional argument for a score threshold. When an edit distance is used this represents the maximum + edit distance and matches with a `distance > score_cutoff` are ignored. When a + normalized edit distance is used this represents the minimal similarity + and matches with a `similarity < score_cutoff` are ignored. Default is None, which deactivates this behaviour. + score_hint : Any, optional + Optional argument for an expected score to be passed to the scorer. + This is used to select a faster implementation. Default is None, + which deactivates this behaviour. + scorer_kwargs : dict[str, Any], optional + any other named parameters are passed to the scorer. This can be used to pass + e.g. weights to `Levenshtein.distance` + + Returns + ------- + tuple[Sequence[Hashable], Any, Any] + Returns the best match in form of a Tuple with 3 elements. The values stored in the + tuple depend on the types of the input arguments. + + * The first element is always the `choice`, which is the value that's compared to the query. + + * The second value represents the similarity calculated by the scorer. This can be: + + * An edit distance (distance is 0 for a perfect match and > 0 for non perfect matches). + In this case only choices which have a `distance <= score_cutoff` are returned. + An example of a scorer with this behavior is `Levenshtein.distance`. + * A normalized edit distance (similarity is a score between 0 and 100, with 100 being a perfect match). + In this case only choices which have a `similarity >= score_cutoff` are returned. + An example of a scorer with this behavior is `Levenshtein.normalized_similarity`. + + Note, that for all scorers, which are not provided by RapidFuzz, only normalized edit distances are supported. + + * The third parameter depends on the type of the `choices` argument it is: + + * The `index of choice` when choices is a simple iterable like a list + * The `key of choice` when choices is a mapping like a dict, or a pandas Series + + None + When no choice has a `similarity >= score_cutoff`/`distance <= score_cutoff` None is returned + + Examples + -------- + + >>> from rapidfuzz.process import extractOne + >>> from rapidfuzz.distance import Levenshtein + >>> from rapidfuzz.fuzz import ratio + + extractOne can be used with normalized edit distances. + + >>> extractOne("abcd", ["abce"], scorer=ratio) + ("abcd", 75.0, 1) + >>> extractOne("abcd", ["abce"], scorer=Levenshtein.normalized_similarity) + ("abcd", 0.75, 1) + + extractOne can be used with edit distances as well. + + >>> extractOne("abcd", ["abce"], scorer=Levenshtein.distance) + ("abce", 1, 0) + + additional settings of the scorer can be passed via the scorer_kwargs argument to extractOne + + >>> extractOne("abcd", ["abce"], scorer=Levenshtein.distance, scorer_kwargs={"weights":(1,1,2)}) + ("abcde", 2, 1) + + when a mapping is used for the choices the key of the choice is returned instead of the List index + + >>> extractOne("abcd", {"key": "abce"}, scorer=ratio) + ("abcd", 75.0, "key") + + It is possible to specify a processor function which is used to preprocess the strings before comparing them. + + >>> extractOne("abcd", ["abcD"], scorer=ratio) + ("abcD", 75.0, 0) + >>> extractOne("abcd", ["abcD"], scorer=ratio, processor=utils.default_process) + ("abcD", 100.0, 0) + >>> extractOne("abcd", ["abcD"], scorer=ratio, processor=lambda s: s.upper()) + ("abcD", 100.0, 0) + + When only results with a similarity above a certain threshold are relevant, the parameter score_cutoff can be + used to filter out results with a lower similarity. This threshold is used by some of the scorers to exit early, + when they are sure, that the similarity is below the threshold. + For normalized edit distances all results with a similarity below score_cutoff are filtered out + + >>> extractOne("abcd", ["abce"], scorer=ratio) + ("abce", 75.0, 0) + >>> extractOne("abcd", ["abce"], scorer=ratio, score_cutoff=80) + None + + For edit distances all results with an edit distance above the score_cutoff are filtered out + + >>> extractOne("abcd", ["abce"], scorer=Levenshtein.distance, scorer_kwargs={"weights":(1,1,2)}) + ("abce", 2, 0) + >>> extractOne("abcd", ["abce"], scorer=Levenshtein.distance, scorer_kwargs={"weights":(1,1,2)}, score_cutoff=1) + None + + """ + _ = score_hint + scorer_kwargs = scorer_kwargs or {} + worst_score, optimal_score = _get_scorer_flags_py(scorer, scorer_kwargs) + lowest_score_worst = optimal_score > worst_score + + setupPandas() + + if is_none(query): + return None + + if score_cutoff is None: + score_cutoff = worst_score + + # preprocess the query + if processor is not None: + query = processor(query) + + result = None + + choices_iter = choices.items() if hasattr(choices, "items") else enumerate(choices) + for key, choice in choices_iter: + if is_none(choice): + continue + + if processor is None: + score = scorer(query, choice, score_cutoff=score_cutoff, **scorer_kwargs) + else: + score = scorer( + query, + processor(choice), + score_cutoff=score_cutoff, + **scorer_kwargs, + ) + + if lowest_score_worst: + if score >= score_cutoff and (result is None or score > result[1]): + score_cutoff = score + result = (choice, score, key) + else: + if score <= score_cutoff and (result is None or score < result[1]): + score_cutoff = score + result = (choice, score, key) + + if score == optimal_score: + break + + return result + + +def extract( + query, + choices, + *, + scorer=WRatio, + processor=None, + limit=5, + score_cutoff=None, + score_hint=None, + scorer_kwargs=None, +): + """ + Find the best matches in a list of choices. The list is sorted by the similarity. + When multiple choices have the same similarity, they are sorted by their index + + Parameters + ---------- + query : Sequence[Hashable] + string we want to find + choices : Collection[Sequence[Hashable]] | Mapping[Sequence[Hashable]] + list of all strings the query should be compared with or dict with a mapping + {: } + scorer : Callable, optional + Optional callable that is used to calculate the matching score between + the query and each choice. This can be any of the scorers included in RapidFuzz + (both scorers that calculate the edit distance or the normalized edit distance), or + a custom function, which returns a normalized edit distance. + fuzz.WRatio is used by default. + processor : Callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + limit : int, optional + maximum amount of results to return. None can be passed to disable this behavior. + Default is 5. + score_cutoff : Any, optional + Optional argument for a score threshold. When an edit distance is used this represents the maximum + edit distance and matches with a `distance > score_cutoff` are ignored. When a + normalized edit distance is used this represents the minimal similarity + and matches with a `similarity < score_cutoff` are ignored. Default is None, which deactivates this behaviour. + score_hint : Any, optional + Optional argument for an expected score to be passed to the scorer. + This is used to select a faster implementation. Default is None, + which deactivates this behaviour. + scorer_kwargs : dict[str, Any], optional + any other named parameters are passed to the scorer. This can be used to pass + e.g. weights to `Levenshtein.distance` + + Returns + ------- + list[tuple[Sequence[Hashable], Any, Any]] + The return type is always a List of Tuples with 3 elements. However the values stored in the + tuple depend on the types of the input arguments. + + * The first element is always the `choice`, which is the value that's compared to the query. + + * The second value represents the similarity calculated by the scorer. This can be: + + * An edit distance (distance is 0 for a perfect match and > 0 for non perfect matches). + In this case only choices which have a `distance <= score_cutoff` are returned. + An example of a scorer with this behavior is `Levenshtein.distance`. + * A normalized edit distance (similarity is a score between 0 and 100, with 100 being a perfect match). + In this case only choices which have a `similarity >= score_cutoff` are returned. + An example of a scorer with this behavior is `Levenshtein.normalized_similarity`. + + Note, that for all scorers, which are not provided by RapidFuzz, only normalized edit distances are supported. + + * The third parameter depends on the type of the `choices` argument it is: + + * The `index of choice` when choices is a simple iterable like a list + * The `key of choice` when choices is a mapping like a dict, or a pandas Series + + The list is sorted by similarity or distance depending on the scorer used. The first element in the list + has the `highest similarity`/`smallest distance`. + + """ + scorer_kwargs = scorer_kwargs or {} + worst_score, optimal_score = _get_scorer_flags_py(scorer, scorer_kwargs) + lowest_score_worst = optimal_score > worst_score + + if limit == 1: + res = extractOne( + query, + choices, + processor=processor, + scorer=scorer, + score_cutoff=score_cutoff, + score_hint=score_hint, + scorer_kwargs=scorer_kwargs, + ) + if res is None: + return [] + return [res] + + result_iter = extract_iter( + query, + choices, + processor=processor, + scorer=scorer, + score_cutoff=score_cutoff, + score_hint=score_hint, + scorer_kwargs=scorer_kwargs, + ) + + if limit is None: + return sorted(result_iter, key=lambda i: i[1], reverse=lowest_score_worst) + + if lowest_score_worst: + return heapq.nlargest(limit, result_iter, key=lambda i: i[1]) + return heapq.nsmallest(limit, result_iter, key=lambda i: i[1]) + + +def _dtype_to_type_num( + dtype, + scorer, + scorer_kwargs, +): + import numpy as np + + if dtype is not None: + return np.dtype(dtype) + + params = getattr(scorer, "_RF_ScorerPy", None) + if params is not None: + flags = params["get_scorer_flags"](**scorer_kwargs) + if flags["flags"] & ScorerFlag.RESULT_I64: + return np.int32 + if flags["flags"] & ScorerFlag.RESULT_SIZE_T: + return np.uint32 + return np.float32 + + return np.float32 + + +def _is_symmetric(scorer, scorer_kwargs): + params = getattr(scorer, "_RF_ScorerPy", None) + if params is not None: + flags = params["get_scorer_flags"](**scorer_kwargs) + if flags["flags"] & ScorerFlag.SYMMETRIC: + return True + + return False + + +def cdist( + queries, + choices, + *, + scorer=ratio, + processor=None, + score_cutoff=None, + score_hint=None, + score_multiplier=1, + dtype=None, + workers=1, + scorer_kwargs=None, +): + """ + Compute distance/similarity between each pair of the two collections of inputs. + + Parameters + ---------- + queries : Collection[Sequence[Hashable]] + list of all strings the queries + choices : Collection[Sequence[Hashable]] + list of all strings the query should be compared + scorer : Callable, optional + Optional callable that is used to calculate the matching score between + the query and each choice. This can be any of the scorers included in RapidFuzz + (both scorers that calculate the edit distance or the normalized edit distance), or + a custom function, which returns a normalized edit distance. + fuzz.ratio is used by default. + processor : Callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : Any, optional + Optional argument for a score threshold to be passed to the scorer. + Default is None, which deactivates this behaviour. + score_hint : Any, optional + Optional argument for an expected score to be passed to the scorer. + This is used to select a faster implementation. Default is None, + which deactivates this behaviour. + score_multiplier: Any, optional + Optional argument to multiply the calculated score with. This is applied as the final step, + so e.g. score_cutoff is applied on the unmodified score. This is mostly useful to map from + a floating point range to an integer to reduce the memory usage. Default is 1, + which deactivates this behaviour. + dtype : data-type, optional + The desired data-type for the result array. Depending on the scorer type the following + dtypes are supported: + + - similarity: + - np.float32, np.float64 + - np.uint8 -> stores fixed point representation of the result scaled to a range 0-100 + - distance: + - np.int8, np.int16, np.int32, np.int64 + + If not given, then the type will be np.float32 for similarities and np.int32 for distances. + workers : int, optional + The calculation is subdivided into workers sections and evaluated in parallel. + Supply -1 to use all available CPU cores. + This argument is only available for scorers using the RapidFuzz C-API so far, since it + releases the Python GIL. + scorer_kwargs : dict[str, Any], optional + any other named parameters are passed to the scorer. This can be used to pass + e.g. weights to `Levenshtein.distance` + + Returns + ------- + ndarray + Returns a matrix of dtype with the distance/similarity between each pair + of the two collections of inputs. + """ + import numpy as np + + _ = workers, score_hint + scorer_kwargs = scorer_kwargs or {} + dtype = _dtype_to_type_num(dtype, scorer, scorer_kwargs) + results = np.zeros((len(queries), len(choices)), dtype=dtype) + + setupPandas() + + if processor is None: + proc_choices = list(choices) + else: + proc_choices = [x if is_none(x) else processor(x) for x in choices] + + if queries is choices and _is_symmetric(scorer, scorer_kwargs): + for i, proc_query in enumerate(proc_choices): + score = scorer(proc_query, proc_query, score_cutoff=score_cutoff, **scorer_kwargs) * score_multiplier + + if np.issubdtype(dtype, np.integer): + score = round(score) + + results[i, i] = score + for j in range(i + 1, len(proc_choices)): + score = ( + scorer( + proc_query, + proc_choices[j], + score_cutoff=score_cutoff, + **scorer_kwargs, + ) + * score_multiplier + ) + + if np.issubdtype(dtype, np.integer): + score = round(score) + + results[i, j] = results[j, i] = score + else: + for i, query in enumerate(queries): + proc_query = processor(query) if (processor and not is_none(query)) else query + for j, choice in enumerate(proc_choices): + score = ( + scorer( + proc_query, + choice, + score_cutoff=score_cutoff, + **scorer_kwargs, + ) + * score_multiplier + ) + + if np.issubdtype(dtype, np.integer): + score = round(score) + + results[i, j] = score + + return results + + +def cpdist( + queries, + choices, + *, + scorer=ratio, + processor=None, + score_cutoff=None, + score_hint=None, + score_multiplier=1, + dtype=None, + workers=1, + scorer_kwargs=None, +): + """ + Compute the pairwise distance/similarity between corresponding elements of the queries & choices. + + Parameters + ---------- + queries : Collection[Sequence[Hashable]] + list of strings used to compute the distance/similarity. + choices : Collection[Sequence[Hashable]] + list of strings the queries should be compared with. Must be the same length as the queries. + scorer : Callable, optional + Optional callable that is used to calculate the matching score between + the query and each choice. This can be any of the scorers included in RapidFuzz + (both scorers that calculate the edit distance or the normalized edit distance), or + a custom function, which returns a normalized edit distance. + fuzz.ratio is used by default. + processor : Callable, optional + Optional callable that is used to preprocess the strings before + comparing them. Default is None, which deactivates this behaviour. + score_cutoff : Any, optional + Optional argument for a score threshold to be passed to the scorer. + Default is None, which deactivates this behaviour. + score_hint : Any, optional + Optional argument for an expected score to be passed to the scorer. + This is used to select a faster implementation. Default is None, + which deactivates this behaviour. + score_multiplier: Any, optional + Optional argument to multiply the calculated score with. This is applied as the final step, + so e.g. score_cutoff is applied on the unmodified score. This is mostly useful to map from + a floating point range to an integer to reduce the memory usage. Default is 1, + which deactivates this behaviour. + dtype : data-type, optional + The desired data-type for the result array. Depending on the scorer type the following + dtypes are supported: + + - similarity: + - np.float32, np.float64 + - np.uint8 -> stores fixed point representation of the result scaled to a range 0-100 + - distance: + - np.int8, np.int16, np.int32, np.int64 + + If not given, then the type will be np.float32 for similarities and np.int32 for distances. + workers : int, optional + The calculation is subdivided into workers sections and evaluated in parallel. + Supply -1 to use all available CPU cores. + This argument is only available for scorers using the RapidFuzz C-API so far, since it + releases the Python GIL. + scorer_kwargs : dict[str, Any], optional + any other named parameters are passed to the scorer. This can be used to pass + e.g. weights to `Levenshtein.distance` + + Returns + ------- + ndarray + Returns a matrix of size (n x 1) of dtype with the distance/similarity between each pair + of the two collections of inputs. + """ + import numpy as np + + len_queries = len(queries) + len_choices = len(choices) + + if len_queries != len_choices: + error_message = "Length of queries and choices must be the same!" + raise ValueError(error_message) + + _ = workers, score_hint + scorer_kwargs = scorer_kwargs or {} + dtype = _dtype_to_type_num(dtype, scorer, scorer_kwargs) + results = np.zeros((len_queries,), dtype=dtype) + + setupPandas() + + for i, (query, choice) in enumerate(zip(queries, choices)): + proc_query = processor(query) if (processor and not is_none(query)) else query + proc_choice = processor(choice) if (processor and not is_none(choice)) else choice + score = scorer( + proc_query, + proc_choice, + score_cutoff=score_cutoff, + **scorer_kwargs, + ) + + # Apply score multiplier + score *= score_multiplier + + # Round the result if dtype is integral + if np.issubdtype(dtype, np.integer): + score = round(score) + + # Store the score in the results matrix + results[i] = score + + return results diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/py.typed b/venv/lib/python3.10/site-packages/rapidfuzz/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/utils.py b/venv/lib/python3.10/site-packages/rapidfuzz/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..5ff801bacc8628caa7cb97a702876a06068200ed --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/utils.py @@ -0,0 +1,65 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2025 Max Bachmann +# This file is generated by tools/generate_python.py +from __future__ import annotations + +import contextlib +import os + +from rapidfuzz._feature_detector import AVX2, SSE2, supports + +__all__ = ["default_process"] + +_impl = os.environ.get("RAPIDFUZZ_IMPLEMENTATION") +if _impl == "cpp": + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.utils_cpp_avx2 import ( + default_process, # pyright: ignore[reportMissingImports] + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.utils_cpp_sse2 import ( + default_process, # pyright: ignore[reportMissingImports] + ) + + imported = True + + if not imported: + from rapidfuzz.utils_cpp import ( + default_process, # pyright: ignore[reportMissingImports] + ) +elif _impl == "python": + from rapidfuzz.utils_py import default_process +else: + imported = False + if supports(AVX2): + with contextlib.suppress(ImportError): + from rapidfuzz.utils_cpp_avx2 import ( + default_process, # pyright: ignore[reportMissingImports] + ) + + imported = True + + if not imported and supports(SSE2): + with contextlib.suppress(ImportError): + from rapidfuzz.utils_cpp_sse2 import ( + default_process, # pyright: ignore[reportMissingImports] + ) + + imported = True + + if not imported: + with contextlib.suppress(ImportError): + from rapidfuzz.utils_cpp import ( + default_process, # pyright: ignore[reportMissingImports] + ) + + imported = True + + if not imported: + from rapidfuzz.utils_py import default_process diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/utils.pyi b/venv/lib/python3.10/site-packages/rapidfuzz/utils.pyi new file mode 100644 index 0000000000000000000000000000000000000000..94b0062b58c7c0a7b2f2a8fb378801dbfe6d9d38 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/utils.pyi @@ -0,0 +1,11 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann + +from __future__ import annotations + +from collections.abc import Hashable, Sequence +from typing import TypeVar + +_StringType = TypeVar("_StringType", bound=Sequence[Hashable]) + +def default_process(sentence: _StringType) -> _StringType: ... diff --git a/venv/lib/python3.10/site-packages/rapidfuzz/utils_py.py b/venv/lib/python3.10/site-packages/rapidfuzz/utils_py.py new file mode 100644 index 0000000000000000000000000000000000000000..cafa67791f0737c313394925a9b1bd147911a80a --- /dev/null +++ b/venv/lib/python3.10/site-packages/rapidfuzz/utils_py.py @@ -0,0 +1,32 @@ +# SPDX-License-Identifier: MIT +# Copyright (C) 2022 Max Bachmann + +from __future__ import annotations + +import re + +_alnum_regex = re.compile(r"(?ui)\W") + + +def default_process(sentence: str) -> str: + """ + This function preprocesses a string by: + + * removing all non alphanumeric characters + + * trimming whitespaces + + * converting all characters to lower case + + Parameters + ---------- + sentence : str + String to preprocess + + Returns + ------- + processed_string : str + processed string + """ + string_out = _alnum_regex.sub(" ", sentence) + return string_out.strip().lower() diff --git a/venv/lib/python3.10/site-packages/rouge_score-0.1.2.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/rouge_score-0.1.2.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rouge_score-0.1.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/rouge_score-0.1.2.dist-info/METADATA b/venv/lib/python3.10/site-packages/rouge_score-0.1.2.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..09bbbd03bcbd7134ec611d7263f62fca8b3ab190 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rouge_score-0.1.2.dist-info/METADATA @@ -0,0 +1,118 @@ +Metadata-Version: 2.1 +Name: rouge-score +Version: 0.1.2 +Summary: Pure python implementation of ROUGE-1.5.5. +Home-page: https://github.com/google-research/google-research/tree/master/rouge +Author: Google LLC +Author-email: rouge-opensource@google.com +License: UNKNOWN +Platform: UNKNOWN +Classifier: Programming Language :: Python :: 3 +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: OS Independent +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +Requires-Dist: absl-py +Requires-Dist: nltk +Requires-Dist: numpy +Requires-Dist: six>=1.14.0 + +# Python ROUGE Implementation + +## Overview + +This is a native python implementation of ROUGE, designed to replicate results +from the original perl package. + +Maintainers may be contacted at rouge-opensource@google.com. + +ROUGE was originally introduced in the paper: + +Lin, Chin-Yew. ROUGE: a Package for Automatic Evaluation of Summaries. In +Proceedings of the Workshop on Text Summarization Branches Out (WAS 2004), +Barcelona, Spain, July 25 - 26, 2004. + +## ROUGE for Python + +There are ROUGE implementations available for Python, however some are not +native python due to their dependency on the perl script, and others provide +differing results when compared with the original implementation. This makes it +difficult to directly compare with known results. + +This package is designed to replicate perl results. It implements: + +* ROUGE-N (N-gram) scoring +* ROUGE-L (Longest Common Subsequence) scoring +* Text normalization +* Bootstrap resampling for confidence interval calculation +* Optional Porter stemming to remove plurals and word suffixes such as (ing, + ion, ment). + +Note that not all options provided by the original perl ROUGE script are +supported, but the subset of options that are implemented should replicate the +original functionality. + +## Stopword removal + +The original ROUGE perl script implemented optional stopword removal (using the +-s parameter). However, there were ~600 stopwords used by ROUGE, borrowed from +another now defunct package. This word list contained many words that may not be +suited to some tasks, such as day and month names and numbers. It also has no +clear license for redistribution. Since we are unable to replicate this +functionality precisely we do not include stopword removal. + +## Two flavors of ROUGE-L +In the ROUGE paper, two flavors of ROUGE are described: + +1. sentence-level: Compute longest common subsequence (LCS) between two pieces of +text. Newlines are ignored. This is called `rougeL` in this package. +2. summary-level: Newlines in the text are interpreted as sentence boundaries, +and the LCS is computed between each pair of reference and candidate sentences, +and something called union-LCS is computed. This is called `rougeLsum` in this +package. This is the ROUGE-L reported in *[Get To The Point: Summarization with +Pointer-Generator Networks](https://arxiv.org/abs/1704.04368)*, for example. +If your references/candidates do not have newline delimiters, you can use the +--split_summaries flag (or optional argument in RougeScorer). + +## How to run + +This package compares target files (containing one example per line) with +prediction files in the same format. It can be launched as follows (from +google-research/): + +```shell +python -m rouge.rouge \ + --target_filepattern=*.targets \ + --prediction_filepattern=*.decodes \ + --output_filename=scores.csv \ + --use_stemmer=true \ + --split_summaries=true +``` + +## Using pip +``` +pip install -r rouge/requirements.txt +pip install rouge-score +``` + +Then in python: + +```python +from rouge_score import rouge_scorer + +scorer = rouge_scorer.RougeScorer(['rouge1', 'rougeL'], use_stemmer=True) +scores = scorer.score('The quick brown fox jumps over the lazy dog', + 'The quick brown dog jumps on the log.') +``` + +## License + +Licensed under the +[Apache 2.0](https://github.com/google-research/google-research/blob/master/LICENSE) +License. + +## Disclaimer + +This is not an official Google product. + + diff --git a/venv/lib/python3.10/site-packages/rouge_score-0.1.2.dist-info/RECORD b/venv/lib/python3.10/site-packages/rouge_score-0.1.2.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..1ca757ad04d8bc6e41ba488a56437a3fed1ea5fb --- /dev/null +++ b/venv/lib/python3.10/site-packages/rouge_score-0.1.2.dist-info/RECORD @@ -0,0 +1,33 @@ +rouge_score-0.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +rouge_score-0.1.2.dist-info/METADATA,sha256=iuQfFE6kORQyUsHz-2WO7RtFWl3tpEml1fx7xyfqLuo,4026 +rouge_score-0.1.2.dist-info/RECORD,, +rouge_score-0.1.2.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92 +rouge_score-0.1.2.dist-info/top_level.txt,sha256=xXbfZGc99vE5G15chRRzRXSrJ1Mc065jCgs8XWnnOKk,12 +rouge_score/__init__.py,sha256=fQB5BePjj8ELQYIc9PSYB3MiCOaD3siy_0isHEssvJE,590 +rouge_score/__pycache__/__init__.cpython-310.pyc,, +rouge_score/__pycache__/create_pyrouge_files.cpython-310.pyc,, +rouge_score/__pycache__/io.cpython-310.pyc,, +rouge_score/__pycache__/io_test.cpython-310.pyc,, +rouge_score/__pycache__/rouge.cpython-310.pyc,, +rouge_score/__pycache__/rouge_scorer.cpython-310.pyc,, +rouge_score/__pycache__/rouge_scorer_test.cpython-310.pyc,, +rouge_score/__pycache__/scoring.cpython-310.pyc,, +rouge_score/__pycache__/scoring_test.cpython-310.pyc,, +rouge_score/__pycache__/test_util.cpython-310.pyc,, +rouge_score/__pycache__/tokenize.cpython-310.pyc,, +rouge_score/__pycache__/tokenize_test.cpython-310.pyc,, +rouge_score/__pycache__/tokenizers.cpython-310.pyc,, +rouge_score/__pycache__/tokenizers_test.cpython-310.pyc,, +rouge_score/create_pyrouge_files.py,sha256=_GtmIniVeVjSWtBMoZIBbeH6XMunz1a3bjsn1DmGboA,2492 +rouge_score/io.py,sha256=tQdxk0ICl0gV3BeXp0-lQuQX-CpIPUU__ZdCDrUs8F8,6892 +rouge_score/io_test.py,sha256=-NKUTtsUR0aSE6nxPcAW707a2YIJf2FcwOn48l_Va0k,3637 +rouge_score/rouge.py,sha256=1KJx8Yrw7qQRNpmK_XCtAfNhMOvtZkOFVCRznTuIqtg,3101 +rouge_score/rouge_scorer.py,sha256=lITF_QXiLNKLUFO_neWGs2IM9lsMOAUrhpC63VHzHRo,10707 +rouge_score/rouge_scorer_test.py,sha256=ODscYxBy3M05gyH1AeOn39id1RqCmbQuYh_SyyTtkNo,13589 +rouge_score/scoring.py,sha256=s_wVNJnkhGZSlN2664dkUrQKj6ISDoJLgEkoyW4_LBo,5712 +rouge_score/scoring_test.py,sha256=ZOdRwJnaDT_0k0xfGQovIBr1X-1G0u-vxAMFTOZOIjY,7850 +rouge_score/test_util.py,sha256=ka6AzXzzCWOEBUb4nfKQZgG3k7ygJ6z2Xkm0bHSutTw,1291 +rouge_score/tokenize.py,sha256=3JHOqPCVB_dEVJFgxFgDHQlW41-iMMgNAfmQ66IKdAM,1882 +rouge_score/tokenize_test.py,sha256=mkwTMrvsnE08T7P8PnzS_Skl2aYUpEKmA37wSTXRwLI,1137 +rouge_score/tokenizers.py,sha256=K3udrlBc6HOQZLqOR5G4cbE6ofZbxU4laS8am6hgBQg,1648 +rouge_score/tokenizers_test.py,sha256=if3Xx_--SdJywmYbYQEWi3NMigQrPpzjQ7qneNINcbM,1375 diff --git a/venv/lib/python3.10/site-packages/rouge_score-0.1.2.dist-info/WHEEL b/venv/lib/python3.10/site-packages/rouge_score-0.1.2.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..79d5c89a71989389294854aa34e329701325f8b0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/rouge_score-0.1.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.45.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/rouge_score-0.1.2.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/rouge_score-0.1.2.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..ec47eb53957702d6fba3decef23ddecf6061f7ec --- /dev/null +++ b/venv/lib/python3.10/site-packages/rouge_score-0.1.2.dist-info/top_level.txt @@ -0,0 +1 @@ +rouge_score diff --git a/venv/lib/python3.10/site-packages/starlette-0.47.2.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/starlette-0.47.2.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/starlette-0.47.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/starlette-0.47.2.dist-info/METADATA b/venv/lib/python3.10/site-packages/starlette-0.47.2.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..b657b3e8e50fcdd2a71c159582feb430cda3089d --- /dev/null +++ b/venv/lib/python3.10/site-packages/starlette-0.47.2.dist-info/METADATA @@ -0,0 +1,176 @@ +Metadata-Version: 2.4 +Name: starlette +Version: 0.47.2 +Summary: The little ASGI library that shines. +Project-URL: Homepage, https://github.com/encode/starlette +Project-URL: Documentation, https://www.starlette.io/ +Project-URL: Changelog, https://www.starlette.io/release-notes/ +Project-URL: Funding, https://github.com/sponsors/encode +Project-URL: Source, https://github.com/encode/starlette +Author-email: Tom Christie +License-Expression: BSD-3-Clause +License-File: LICENSE.md +Classifier: Development Status :: 3 - Alpha +Classifier: Environment :: Web Environment +Classifier: Framework :: AnyIO +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Topic :: Internet :: WWW/HTTP +Requires-Python: >=3.9 +Requires-Dist: anyio<5,>=3.6.2 +Requires-Dist: typing-extensions>=4.10.0; python_version < '3.13' +Provides-Extra: full +Requires-Dist: httpx<0.29.0,>=0.27.0; extra == 'full' +Requires-Dist: itsdangerous; extra == 'full' +Requires-Dist: jinja2; extra == 'full' +Requires-Dist: python-multipart>=0.0.18; extra == 'full' +Requires-Dist: pyyaml; extra == 'full' +Description-Content-Type: text/markdown + +

+ + + + starlette-logo + +

+ +

+ ✨ The little ASGI framework that shines. ✨ +

+ +--- + +[![Build Status](https://github.com/encode/starlette/workflows/Test%20Suite/badge.svg)](https://github.com/encode/starlette/actions) +[![Package version](https://badge.fury.io/py/starlette.svg)](https://pypi.python.org/pypi/starlette) +[![Supported Python Version](https://img.shields.io/pypi/pyversions/starlette.svg?color=%2334D058)](https://pypi.org/project/starlette) + +--- + +**Documentation**: https://www.starlette.io + +**Source Code**: https://github.com/encode/starlette + +--- + +# Starlette + +Starlette is a lightweight [ASGI][asgi] framework/toolkit, +which is ideal for building async web services in Python. + +It is production-ready, and gives you the following: + +* A lightweight, low-complexity HTTP web framework. +* WebSocket support. +* In-process background tasks. +* Startup and shutdown events. +* Test client built on `httpx`. +* CORS, GZip, Static Files, Streaming responses. +* Session and Cookie support. +* 100% test coverage. +* 100% type annotated codebase. +* Few hard dependencies. +* Compatible with `asyncio` and `trio` backends. +* Great overall performance [against independent benchmarks][techempower]. + +## Installation + +```shell +$ pip install starlette +``` + +You'll also want to install an ASGI server, such as [uvicorn](https://www.uvicorn.org/), [daphne](https://github.com/django/daphne/), or [hypercorn](https://hypercorn.readthedocs.io/en/latest/). + +```shell +$ pip install uvicorn +``` + +## Example + +```python title="main.py" +from starlette.applications import Starlette +from starlette.responses import JSONResponse +from starlette.routing import Route + + +async def homepage(request): + return JSONResponse({'hello': 'world'}) + +routes = [ + Route("/", endpoint=homepage) +] + +app = Starlette(debug=True, routes=routes) +``` + +Then run the application using Uvicorn: + +```shell +$ uvicorn main:app +``` + +## Dependencies + +Starlette only requires `anyio`, and the following are optional: + +* [`httpx`][httpx] - Required if you want to use the `TestClient`. +* [`jinja2`][jinja2] - Required if you want to use `Jinja2Templates`. +* [`python-multipart`][python-multipart] - Required if you want to support form parsing, with `request.form()`. +* [`itsdangerous`][itsdangerous] - Required for `SessionMiddleware` support. +* [`pyyaml`][pyyaml] - Required for `SchemaGenerator` support. + +You can install all of these with `pip install starlette[full]`. + +## Framework or Toolkit + +Starlette is designed to be used either as a complete framework, or as +an ASGI toolkit. You can use any of its components independently. + +```python +from starlette.responses import PlainTextResponse + + +async def app(scope, receive, send): + assert scope['type'] == 'http' + response = PlainTextResponse('Hello, world!') + await response(scope, receive, send) +``` + +Run the `app` application in `example.py`: + +```shell +$ uvicorn example:app +INFO: Started server process [11509] +INFO: Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit) +``` + +Run uvicorn with `--reload` to enable auto-reloading on code changes. + +## Modularity + +The modularity that Starlette is designed on promotes building re-usable +components that can be shared between any ASGI framework. This should enable +an ecosystem of shared middleware and mountable applications. + +The clean API separation also means it's easier to understand each component +in isolation. + +--- + +

Starlette is BSD licensed code.
Designed & crafted with care.

— ⭐️ —

+ +[asgi]: https://asgi.readthedocs.io/en/latest/ +[httpx]: https://www.python-httpx.org/ +[jinja2]: https://jinja.palletsprojects.com/ +[python-multipart]: https://multipart.fastapiexpert.com/ +[itsdangerous]: https://itsdangerous.palletsprojects.com/ +[sqlalchemy]: https://www.sqlalchemy.org +[pyyaml]: https://pyyaml.org/wiki/PyYAMLDocumentation +[techempower]: https://www.techempower.com/benchmarks/#hw=ph&test=fortune&l=zijzen-sf diff --git a/venv/lib/python3.10/site-packages/starlette-0.47.2.dist-info/RECORD b/venv/lib/python3.10/site-packages/starlette-0.47.2.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..a2413457f18dbae1eaa4048013cecccd41b7b17b --- /dev/null +++ b/venv/lib/python3.10/site-packages/starlette-0.47.2.dist-info/RECORD @@ -0,0 +1,74 @@ +starlette-0.47.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +starlette-0.47.2.dist-info/METADATA,sha256=Gp3DONrBsRQXXFPCP0uRUVEad70_v_yipv2vN2IDtQI,6167 +starlette-0.47.2.dist-info/RECORD,, +starlette-0.47.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87 +starlette-0.47.2.dist-info/licenses/LICENSE.md,sha256=3LlWd6AiQCQxh-lk-UGEfRmxeCHPmeWvrmhPqzKMGb8,1518 +starlette/__init__.py,sha256=bu6mRFVuc26fcL23zWvQhYit3xF405gXkXUboXH80AM,23 +starlette/__pycache__/__init__.cpython-310.pyc,, +starlette/__pycache__/_exception_handler.cpython-310.pyc,, +starlette/__pycache__/_utils.cpython-310.pyc,, +starlette/__pycache__/applications.cpython-310.pyc,, +starlette/__pycache__/authentication.cpython-310.pyc,, +starlette/__pycache__/background.cpython-310.pyc,, +starlette/__pycache__/concurrency.cpython-310.pyc,, +starlette/__pycache__/config.cpython-310.pyc,, +starlette/__pycache__/convertors.cpython-310.pyc,, +starlette/__pycache__/datastructures.cpython-310.pyc,, +starlette/__pycache__/endpoints.cpython-310.pyc,, +starlette/__pycache__/exceptions.cpython-310.pyc,, +starlette/__pycache__/formparsers.cpython-310.pyc,, +starlette/__pycache__/requests.cpython-310.pyc,, +starlette/__pycache__/responses.cpython-310.pyc,, +starlette/__pycache__/routing.cpython-310.pyc,, +starlette/__pycache__/schemas.cpython-310.pyc,, +starlette/__pycache__/staticfiles.cpython-310.pyc,, +starlette/__pycache__/status.cpython-310.pyc,, +starlette/__pycache__/templating.cpython-310.pyc,, +starlette/__pycache__/testclient.cpython-310.pyc,, +starlette/__pycache__/types.cpython-310.pyc,, +starlette/__pycache__/websockets.cpython-310.pyc,, +starlette/_exception_handler.py,sha256=izcMiP2VuVbIvwTUQjhMlchcaA5795-Ra1SCn5KWPTM,2205 +starlette/_utils.py,sha256=OxDWH1nVIsFBBvFwzCGZkQT6bUXfvjjHdc1XcLtSksg,2748 +starlette/applications.py,sha256=AJjz1iDAGxTOYqO5VAibLMQVuHCQtZFz5wbUt14BvnY,10515 +starlette/authentication.py,sha256=By_wHye1Ok3ntrMmzfznHwgeffGmjDvA7eg6rOQrFK4,4906 +starlette/background.py,sha256=0xdn_QTncyx9vX6MFdPcYbv87X-bhZjcAWy2OLdVnOU,1278 +starlette/concurrency.py,sha256=wWoZThL3krwtqWckvjqWSHIJ_E66qwa2l1G7y2oLllM,1786 +starlette/config.py,sha256=felVr3EXGBUe52dqXIk3Sl-eHFpHBHFr9OnGf0eZh1I,4349 +starlette/convertors.py,sha256=F1rse3AacN9rsfJnTeuDnjbN51r_ouHc3WLyYkjkX_o,2304 +starlette/datastructures.py,sha256=zhbGGcmeRVB6Ouvt9HwoB8gSK9k5biH3zUhjb5cV-ow,22465 +starlette/endpoints.py,sha256=ZHBYN1M2xE05qoB0-0wv0aAzEqXZcv42nwcuqrYmQEE,5099 +starlette/exceptions.py,sha256=tIphlZa8EsQfKw3-xw5J3ZN1GjaR4UcxfJK69Ad2hG8,1066 +starlette/formparsers.py,sha256=Ndl5dGXZtopzJUjM04M5zYhS8sT33e_5JwK2T7Md4zA,11086 +starlette/middleware/__init__.py,sha256=3WljcfADnSltJrVUuFgpvJiZKcjsjC1Ih9aqYUvSknk,1224 +starlette/middleware/__pycache__/__init__.cpython-310.pyc,, +starlette/middleware/__pycache__/authentication.cpython-310.pyc,, +starlette/middleware/__pycache__/base.cpython-310.pyc,, +starlette/middleware/__pycache__/cors.cpython-310.pyc,, +starlette/middleware/__pycache__/errors.cpython-310.pyc,, +starlette/middleware/__pycache__/exceptions.cpython-310.pyc,, +starlette/middleware/__pycache__/gzip.cpython-310.pyc,, +starlette/middleware/__pycache__/httpsredirect.cpython-310.pyc,, +starlette/middleware/__pycache__/sessions.cpython-310.pyc,, +starlette/middleware/__pycache__/trustedhost.cpython-310.pyc,, +starlette/middleware/__pycache__/wsgi.cpython-310.pyc,, +starlette/middleware/authentication.py,sha256=d6CbLD_IP19bAH7-WpAgM8qaEJmW4s8tJ3QznSshGNs,1791 +starlette/middleware/base.py,sha256=4w2r5PK51HY2PdMYKMvUxlCg9Zeupq58k0dnGLMAV98,9631 +starlette/middleware/cors.py,sha256=Hp1OBFB1OQbYGRa6hfTzBqkkJHOhUpjrsRryrHItHFQ,7046 +starlette/middleware/errors.py,sha256=h76TfVDrdYSvpBAEWgZ91VvPZQe3vRpAl6ChoiXG-Tk,8037 +starlette/middleware/exceptions.py,sha256=7OgSUiBgwHS4VMmpaWlw21uDKNDmOMzLVZrWDLDUqWo,2784 +starlette/middleware/gzip.py,sha256=_thpCRctguw0tMM6J2iDlAj5vZlol9T673IHtfvfxQE,5899 +starlette/middleware/httpsredirect.py,sha256=SNTleaYALGoITV7xwbic4gB6VYdM8Ylea_ykciUz31g,848 +starlette/middleware/sessions.py,sha256=IgZkTkgbOhU9tQceQV0KjLAiNp-dKhngcHpu4VYaDXQ,3572 +starlette/middleware/trustedhost.py,sha256=byKCUyPge54Z4MznyunD_2DsMfJc2UsfV4b2Du-WYTc,2219 +starlette/middleware/wsgi.py,sha256=yNQho3FVK0BcDTVT2NGmYLOxtrxPFUox9aU3cUqGDgc,5350 +starlette/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +starlette/requests.py,sha256=jis4sBbEaZ1mEVTbnGWOHDxs1nm3qrHZ5yi3J0zVQdo,11683 +starlette/responses.py,sha256=Smxc4Zum-x7LGzk6rg7qpu0YPHFjBh02EXMX3ZF2BJs,20731 +starlette/routing.py,sha256=IffJH6R54duDQtZcu3FeiGY_9Booi_Ag-XiMxHUMsGg,34213 +starlette/schemas.py,sha256=AxKqw3Q-XL2fU1ryUPn-ye1j1VVeWpgSukJ_8EPJwkg,5142 +starlette/staticfiles.py,sha256=3ej5_KoxEvGejo5GoIkmsk9r43JryCTQEzZgLcQ6ZIc,8478 +starlette/status.py,sha256=e70xV6wYFR5bdmkYkgYCSwZk1L2FdKDwA6u4zCjmypQ,2820 +starlette/templating.py,sha256=k0R875jbaR9vXlCg-5kGYkYr6UJHxyFeiRgt6m2kZp8,8293 +starlette/testclient.py,sha256=PR_UiimFBSKlHJhbo9CEOtvJ00oBzxBmwNSy49toc0o,28011 +starlette/types.py,sha256=vLpBwFPqy_q87U8eX5R0nJP67kYImNyvcsjOI7KN7NM,1060 +starlette/websockets.py,sha256=phsWgpXclYreVhg-wAyUWpgBWJTibNF5Pi-tNxbmQFY,8336 diff --git a/venv/lib/python3.10/site-packages/starlette-0.47.2.dist-info/WHEEL b/venv/lib/python3.10/site-packages/starlette-0.47.2.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..12228d414b6cfed7c39d3781c85c63256a1d7fb5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/starlette-0.47.2.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.27.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.10/site-packages/starlette-0.47.2.dist-info/licenses/LICENSE.md b/venv/lib/python3.10/site-packages/starlette-0.47.2.dist-info/licenses/LICENSE.md new file mode 100644 index 0000000000000000000000000000000000000000..d16a60ec5b9963ef86b35a52ac92227014618e6c --- /dev/null +++ b/venv/lib/python3.10/site-packages/starlette-0.47.2.dist-info/licenses/LICENSE.md @@ -0,0 +1,27 @@ +Copyright © 2018, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.10/site-packages/threadpoolctl-3.6.0.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/threadpoolctl-3.6.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/threadpoolctl-3.6.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/threadpoolctl-3.6.0.dist-info/METADATA b/venv/lib/python3.10/site-packages/threadpoolctl-3.6.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..2410f166f31a21271d0728d19ba083cd37d24206 --- /dev/null +++ b/venv/lib/python3.10/site-packages/threadpoolctl-3.6.0.dist-info/METADATA @@ -0,0 +1,400 @@ +Metadata-Version: 2.4 +Name: threadpoolctl +Version: 3.6.0 +Summary: threadpoolctl +Home-page: https://github.com/joblib/threadpoolctl +Author: Thomas Moreau +Author-email: thomas.moreau.2010@gmail.com +Requires-Python: >=3.9 +Description-Content-Type: text/markdown +License: BSD-3-Clause +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +License-File: LICENSE + +# Thread-pool Controls [![Build Status](https://github.com/joblib/threadpoolctl/actions/workflows/test.yml/badge.svg?branch=master)](https://github.com/joblib/threadpoolctl/actions?query=branch%3Amaster) [![codecov](https://codecov.io/gh/joblib/threadpoolctl/branch/master/graph/badge.svg)](https://codecov.io/gh/joblib/threadpoolctl) + +Python helpers to limit the number of threads used in the +threadpool-backed of common native libraries used for scientific +computing and data science (e.g. BLAS and OpenMP). + +Fine control of the underlying thread-pool size can be useful in +workloads that involve nested parallelism so as to mitigate +oversubscription issues. + +## Installation + +- For users, install the last published version from PyPI: + + ```bash + pip install threadpoolctl + ``` + +- For contributors, install from the source repository in developer + mode: + + ```bash + pip install -r dev-requirements.txt + flit install --symlink + ``` + + then you run the tests with pytest: + + ```bash + pytest + ``` + +## Usage + +### Command Line Interface + +Get a JSON description of thread-pools initialized when importing python +packages such as numpy or scipy for instance: + +``` +python -m threadpoolctl -i numpy scipy.linalg +[ + { + "filepath": "/home/ogrisel/miniconda3/envs/tmp/lib/libmkl_rt.so", + "prefix": "libmkl_rt", + "user_api": "blas", + "internal_api": "mkl", + "version": "2019.0.4", + "num_threads": 2, + "threading_layer": "intel" + }, + { + "filepath": "/home/ogrisel/miniconda3/envs/tmp/lib/libiomp5.so", + "prefix": "libiomp", + "user_api": "openmp", + "internal_api": "openmp", + "version": null, + "num_threads": 4 + } +] +``` + +The JSON information is written on STDOUT. If some of the packages are missing, +a warning message is displayed on STDERR. + +### Python Runtime Programmatic Introspection + +Introspect the current state of the threadpool-enabled runtime libraries +that are loaded when importing Python packages: + +```python +>>> from threadpoolctl import threadpool_info +>>> from pprint import pprint +>>> pprint(threadpool_info()) +[] + +>>> import numpy +>>> pprint(threadpool_info()) +[{'filepath': '/home/ogrisel/miniconda3/envs/tmp/lib/libmkl_rt.so', + 'internal_api': 'mkl', + 'num_threads': 2, + 'prefix': 'libmkl_rt', + 'threading_layer': 'intel', + 'user_api': 'blas', + 'version': '2019.0.4'}, + {'filepath': '/home/ogrisel/miniconda3/envs/tmp/lib/libiomp5.so', + 'internal_api': 'openmp', + 'num_threads': 4, + 'prefix': 'libiomp', + 'user_api': 'openmp', + 'version': None}] + +>>> import xgboost +>>> pprint(threadpool_info()) +[{'filepath': '/home/ogrisel/miniconda3/envs/tmp/lib/libmkl_rt.so', + 'internal_api': 'mkl', + 'num_threads': 2, + 'prefix': 'libmkl_rt', + 'threading_layer': 'intel', + 'user_api': 'blas', + 'version': '2019.0.4'}, + {'filepath': '/home/ogrisel/miniconda3/envs/tmp/lib/libiomp5.so', + 'internal_api': 'openmp', + 'num_threads': 4, + 'prefix': 'libiomp', + 'user_api': 'openmp', + 'version': None}, + {'filepath': '/home/ogrisel/miniconda3/envs/tmp/lib/libgomp.so.1.0.0', + 'internal_api': 'openmp', + 'num_threads': 4, + 'prefix': 'libgomp', + 'user_api': 'openmp', + 'version': None}] +``` + +In the above example, `numpy` was installed from the default anaconda channel and comes +with MKL and its Intel OpenMP (`libiomp5`) implementation while `xgboost` was installed +from pypi.org and links against GNU OpenMP (`libgomp`) so both OpenMP runtimes are +loaded in the same Python program. + +The state of these libraries is also accessible through the object oriented API: + +```python +>>> from threadpoolctl import ThreadpoolController, threadpool_info +>>> from pprint import pprint +>>> import numpy +>>> controller = ThreadpoolController() +>>> pprint(controller.info()) +[{'architecture': 'Haswell', + 'filepath': '/home/jeremie/miniconda/envs/dev/lib/libopenblasp-r0.3.17.so', + 'internal_api': 'openblas', + 'num_threads': 4, + 'prefix': 'libopenblas', + 'threading_layer': 'pthreads', + 'user_api': 'blas', + 'version': '0.3.17'}] + +>>> controller.info() == threadpool_info() +True +``` + +### Setting the Maximum Size of Thread-Pools + +Control the number of threads used by the underlying runtime libraries +in specific sections of your Python program: + +```python +>>> from threadpoolctl import threadpool_limits +>>> import numpy as np + +>>> with threadpool_limits(limits=1, user_api='blas'): +... # In this block, calls to blas implementation (like openblas or MKL) +... # will be limited to use only one thread. They can thus be used jointly +... # with thread-parallelism. +... a = np.random.randn(1000, 1000) +... a_squared = a @ a +``` + +The threadpools can also be controlled via the object oriented API, which is especially +useful to avoid searching through all the loaded shared libraries each time. It will +however not act on libraries loaded after the instantiation of the +`ThreadpoolController`: + +```python +>>> from threadpoolctl import ThreadpoolController +>>> import numpy as np +>>> controller = ThreadpoolController() + +>>> with controller.limit(limits=1, user_api='blas'): +... a = np.random.randn(1000, 1000) +... a_squared = a @ a +``` + +### Restricting the limits to the scope of a function + +`threadpool_limits` and `ThreadpoolController` can also be used as decorators to set +the maximum number of threads used by the supported libraries at a function level. The +decorators are accessible through their `wrap` method: + +```python +>>> from threadpoolctl import ThreadpoolController, threadpool_limits +>>> import numpy as np +>>> controller = ThreadpoolController() + +>>> @controller.wrap(limits=1, user_api='blas') +... # or @threadpool_limits.wrap(limits=1, user_api='blas') +... def my_func(): +... # Inside this function, calls to blas implementation (like openblas or MKL) +... # will be limited to use only one thread. +... a = np.random.randn(1000, 1000) +... a_squared = a @ a +... +``` + +### Switching the FlexiBLAS backend + +`FlexiBLAS` is a BLAS wrapper for which the BLAS backend can be switched at runtime. +`threadpoolctl` exposes python bindings for this feature. Here's an example but note +that this part of the API is experimental and subject to change without deprecation: + +```python +>>> from threadpoolctl import ThreadpoolController +>>> import numpy as np +>>> controller = ThreadpoolController() + +>>> controller.info() +[{'user_api': 'blas', + 'internal_api': 'flexiblas', + 'num_threads': 1, + 'prefix': 'libflexiblas', + 'filepath': '/usr/local/lib/libflexiblas.so.3.3', + 'version': '3.3.1', + 'available_backends': ['NETLIB', 'OPENBLASPTHREAD', 'ATLAS'], + 'loaded_backends': ['NETLIB'], + 'current_backend': 'NETLIB'}] + +# Retrieve the flexiblas controller +>>> flexiblas_ct = controller.select(internal_api="flexiblas").lib_controllers[0] + +# Switch the backend with one predefined at build time (listed in "available_backends") +>>> flexiblas_ct.switch_backend("OPENBLASPTHREAD") +>>> controller.info() +[{'user_api': 'blas', + 'internal_api': 'flexiblas', + 'num_threads': 4, + 'prefix': 'libflexiblas', + 'filepath': '/usr/local/lib/libflexiblas.so.3.3', + 'version': '3.3.1', + 'available_backends': ['NETLIB', 'OPENBLASPTHREAD', 'ATLAS'], + 'loaded_backends': ['NETLIB', 'OPENBLASPTHREAD'], + 'current_backend': 'OPENBLASPTHREAD'}, + {'user_api': 'blas', + 'internal_api': 'openblas', + 'num_threads': 4, + 'prefix': 'libopenblas', + 'filepath': '/usr/lib/x86_64-linux-gnu/openblas-pthread/libopenblasp-r0.3.8.so', + 'version': '0.3.8', + 'threading_layer': 'pthreads', + 'architecture': 'Haswell'}] + +# It's also possible to directly give the path to a shared library +>>> flexiblas_controller.switch_backend("/home/jeremie/miniforge/envs/flexiblas_threadpoolctl/lib/libmkl_rt.so") +>>> controller.info() +[{'user_api': 'blas', + 'internal_api': 'flexiblas', + 'num_threads': 2, + 'prefix': 'libflexiblas', + 'filepath': '/usr/local/lib/libflexiblas.so.3.3', + 'version': '3.3.1', + 'available_backends': ['NETLIB', 'OPENBLASPTHREAD', 'ATLAS'], + 'loaded_backends': ['NETLIB', + 'OPENBLASPTHREAD', + '/home/jeremie/miniforge/envs/flexiblas_threadpoolctl/lib/libmkl_rt.so'], + 'current_backend': '/home/jeremie/miniforge/envs/flexiblas_threadpoolctl/lib/libmkl_rt.so'}, + {'user_api': 'openmp', + 'internal_api': 'openmp', + 'num_threads': 4, + 'prefix': 'libomp', + 'filepath': '/home/jeremie/miniforge/envs/flexiblas_threadpoolctl/lib/libomp.so', + 'version': None}, + {'user_api': 'blas', + 'internal_api': 'openblas', + 'num_threads': 4, + 'prefix': 'libopenblas', + 'filepath': '/usr/lib/x86_64-linux-gnu/openblas-pthread/libopenblasp-r0.3.8.so', + 'version': '0.3.8', + 'threading_layer': 'pthreads', + 'architecture': 'Haswell'}, + {'user_api': 'blas', + 'internal_api': 'mkl', + 'num_threads': 2, + 'prefix': 'libmkl_rt', + 'filepath': '/home/jeremie/miniforge/envs/flexiblas_threadpoolctl/lib/libmkl_rt.so.2', + 'version': '2024.0-Product', + 'threading_layer': 'gnu'}] +``` + +You can observe that the previously linked OpenBLAS shared object stays loaded by +the Python program indefinitely, but FlexiBLAS itself no longer delegates BLAS calls +to OpenBLAS as indicated by the `current_backend` attribute. +### Writing a custom library controller + +Currently, `threadpoolctl` has support for `OpenMP` and the main `BLAS` libraries. +However it can also be used to control the threadpool of other native libraries, +provided that they expose an API to get and set the limit on the number of threads. +For that, one must implement a controller for this library and register it to +`threadpoolctl`. + +A custom controller must be a subclass of the `LibController` class and implement +the attributes and methods described in the docstring of `LibController`. Then this +new controller class must be registered using the `threadpoolctl.register` function. +An complete example can be found [here]( + https://github.com/joblib/threadpoolctl/blob/master/tests/_pyMylib/__init__.py). + +### Sequential BLAS within OpenMP parallel region + +When one wants to have sequential BLAS calls within an OpenMP parallel region, it's +safer to set `limits="sequential_blas_under_openmp"` since setting `limits=1` and +`user_api="blas"` might not lead to the expected behavior in some configurations +(e.g. OpenBLAS with the OpenMP threading layer +https://github.com/xianyi/OpenBLAS/issues/2985). + +### Known Limitations + +- `threadpool_limits` can fail to limit the number of inner threads when nesting + parallel loops managed by distinct OpenMP runtime implementations (for instance + libgomp from GCC and libomp from clang/llvm or libiomp from ICC). + + See the `test_openmp_nesting` function in [tests/test_threadpoolctl.py]( + https://github.com/joblib/threadpoolctl/blob/master/tests/test_threadpoolctl.py) + for an example. More information can be found at: + https://github.com/jeremiedbb/Nested_OpenMP + + Note however that this problem does not happen when `threadpool_limits` is + used to limit the number of threads used internally by BLAS calls that are + themselves nested under OpenMP parallel loops. `threadpool_limits` works as + expected, even if the inner BLAS implementation relies on a distinct OpenMP + implementation. + +- Using Intel OpenMP (ICC) and LLVM OpenMP (clang) in the same Python program + under Linux is known to cause problems. See the following guide for more details + and workarounds: + https://github.com/joblib/threadpoolctl/blob/master/multiple_openmp.md + +- Setting the maximum number of threads of the OpenMP and BLAS libraries has a global + effect and impacts the whole Python process. There is no thread level isolation as + these libraries do not offer thread-local APIs to configure the number of threads to + use in nested parallel calls. + + +## Maintainers + +To make a release: + +- Bump the version number (`__version__`) in `threadpoolctl.py` and update the + release date in `CHANGES.md`. + +- Build the distribution archives: + +```bash +pip install flit +flit build +``` + +and check the contents of `dist/`. + +- If everything is fine, make a commit for the release, tag it and push the +tag to github: + +```bash +git tag -a X.Y.Z +git push git@github.com:joblib/threadpoolctl.git X.Y.Z +``` + +- Upload the wheels and source distribution to PyPI using flit. Since PyPI doesn't + allow password authentication anymore, the username needs to be changed to the + generic name `__token__`: + +```bash +FLIT_USERNAME=__token__ flit publish +``` + + and a PyPI token has to be passed in place of the password. + +- Create a PR for the release on the [conda-forge feedstock](https://github.com/conda-forge/threadpoolctl-feedstock) (or wait for the bot to make it). + +- Publish the release on github. + +### Credits + +The initial dynamic library introspection code was written by @anton-malakhov +for the smp package available at https://github.com/IntelPython/smp . + +threadpoolctl extends this for other operating systems. Contrary to smp, +threadpoolctl does not attempt to limit the size of Python multiprocessing +pools (threads or processes) or set operating system-level CPU affinity +constraints: threadpoolctl only interacts with native libraries via their +public runtime APIs. + diff --git a/venv/lib/python3.10/site-packages/threadpoolctl-3.6.0.dist-info/RECORD b/venv/lib/python3.10/site-packages/threadpoolctl-3.6.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..f8697dae6e04d8c1c68479174dbce9e117abe5f7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/threadpoolctl-3.6.0.dist-info/RECORD @@ -0,0 +1,7 @@ +__pycache__/threadpoolctl.cpython-310.pyc,, +threadpoolctl-3.6.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +threadpoolctl-3.6.0.dist-info/METADATA,sha256=pF340H6hiD13IYOlAdfVJgdqpw38_dsnaiy9wE3vU0E,13843 +threadpoolctl-3.6.0.dist-info/RECORD,, +threadpoolctl-3.6.0.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82 +threadpoolctl-3.6.0.dist-info/licenses/LICENSE,sha256=gaxhkHUkiwblNmC2UtEOSF9GdfXQrg-X6iI3DaH34js,1507 +threadpoolctl.py,sha256=EvuVJranTS5oa37BSNwWXDWHmZsU-oaYSqGA4QgCQAs,50722 diff --git a/venv/lib/python3.10/site-packages/threadpoolctl-3.6.0.dist-info/WHEEL b/venv/lib/python3.10/site-packages/threadpoolctl-3.6.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..23d2d7e9a5d381ef8a375db09f82052144d1fd96 --- /dev/null +++ b/venv/lib/python3.10/site-packages/threadpoolctl-3.6.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.11.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.10/site-packages/threadpoolctl-3.6.0.dist-info/licenses/LICENSE b/venv/lib/python3.10/site-packages/threadpoolctl-3.6.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..f2927f5f8147f137783bb5072794999e04655cfd --- /dev/null +++ b/venv/lib/python3.10/site-packages/threadpoolctl-3.6.0.dist-info/licenses/LICENSE @@ -0,0 +1,24 @@ +Copyright (c) 2019, threadpoolctl contributors + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of copyright holder nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/venv/lib/python3.10/site-packages/tomli-2.3.0.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/tomli-2.3.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/tomli-2.3.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/tomli-2.3.0.dist-info/METADATA b/venv/lib/python3.10/site-packages/tomli-2.3.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..ae66753ea68149eab62fd524cd6c5da066098700 --- /dev/null +++ b/venv/lib/python3.10/site-packages/tomli-2.3.0.dist-info/METADATA @@ -0,0 +1,269 @@ +Metadata-Version: 2.4 +Name: tomli +Version: 2.3.0 +Summary: A lil' TOML parser +Keywords: toml +Author-email: Taneli Hukkinen +Requires-Python: >=3.8 +Description-Content-Type: text/markdown +License-Expression: MIT +Classifier: Operating System :: MacOS +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX :: Linux +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Typing :: Typed +License-File: LICENSE +Project-URL: Changelog, https://github.com/hukkin/tomli/blob/master/CHANGELOG.md +Project-URL: Homepage, https://github.com/hukkin/tomli + +[![Build Status](https://github.com/hukkin/tomli/actions/workflows/tests.yaml/badge.svg?branch=master)](https://github.com/hukkin/tomli/actions?query=workflow%3ATests+branch%3Amaster+event%3Apush) +[![codecov.io](https://codecov.io/gh/hukkin/tomli/branch/master/graph/badge.svg)](https://codecov.io/gh/hukkin/tomli) +[![PyPI version](https://img.shields.io/pypi/v/tomli)](https://pypi.org/project/tomli) + +# Tomli + +> A lil' TOML parser + +**Table of Contents** *generated with [mdformat-toc](https://github.com/hukkin/mdformat-toc)* + + + +- [Intro](#intro) +- [Installation](#installation) +- [Usage](#usage) + - [Parse a TOML string](#parse-a-toml-string) + - [Parse a TOML file](#parse-a-toml-file) + - [Handle invalid TOML](#handle-invalid-toml) + - [Construct `decimal.Decimal`s from TOML floats](#construct-decimaldecimals-from-toml-floats) + - [Building a `tomli`/`tomllib` compatibility layer](#building-a-tomlitomllib-compatibility-layer) +- [FAQ](#faq) + - [Why this parser?](#why-this-parser) + - [Is comment preserving round-trip parsing supported?](#is-comment-preserving-round-trip-parsing-supported) + - [Is there a `dumps`, `write` or `encode` function?](#is-there-a-dumps-write-or-encode-function) + - [How do TOML types map into Python types?](#how-do-toml-types-map-into-python-types) +- [Performance](#performance) + - [Pure Python](#pure-python) + - [Mypyc generated wheel](#mypyc-generated-wheel) + + + +## Intro + +Tomli is a Python library for parsing [TOML](https://toml.io). +It is fully compatible with [TOML v1.0.0](https://toml.io/en/v1.0.0). + +A version of Tomli, the `tomllib` module, +was added to the standard library in Python 3.11 +via [PEP 680](https://www.python.org/dev/peps/pep-0680/). +Tomli continues to provide a backport on PyPI for Python versions +where the standard library module is not available +and that have not yet reached their end-of-life. + +Tomli uses [mypyc](https://github.com/mypyc/mypyc) +to generate binary wheels for most of the widely used platforms, +so Python 3.11+ users may prefer it over `tomllib` for improved performance. +Pure Python wheels are available on any platform and should perform the same as `tomllib`. + +## Installation + +```bash +pip install tomli +``` + +## Usage + +### Parse a TOML string + +```python +import tomli + +toml_str = """ +[[players]] +name = "Lehtinen" +number = 26 + +[[players]] +name = "Numminen" +number = 27 +""" + +toml_dict = tomli.loads(toml_str) +assert toml_dict == { + "players": [{"name": "Lehtinen", "number": 26}, {"name": "Numminen", "number": 27}] +} +``` + +### Parse a TOML file + +```python +import tomli + +with open("path_to_file/conf.toml", "rb") as f: + toml_dict = tomli.load(f) +``` + +The file must be opened in binary mode (with the `"rb"` flag). +Binary mode will enforce decoding the file as UTF-8 with universal newlines disabled, +both of which are required to correctly parse TOML. + +### Handle invalid TOML + +```python +import tomli + +try: + toml_dict = tomli.loads("]] this is invalid TOML [[") +except tomli.TOMLDecodeError: + print("Yep, definitely not valid.") +``` + +Note that error messages are considered informational only. +They should not be assumed to stay constant across Tomli versions. + +### Construct `decimal.Decimal`s from TOML floats + +```python +from decimal import Decimal +import tomli + +toml_dict = tomli.loads("precision-matters = 0.982492", parse_float=Decimal) +assert isinstance(toml_dict["precision-matters"], Decimal) +assert toml_dict["precision-matters"] == Decimal("0.982492") +``` + +Note that `decimal.Decimal` can be replaced with another callable that converts a TOML float from string to a Python type. +The `decimal.Decimal` is, however, a practical choice for use cases where float inaccuracies can not be tolerated. + +Illegal types are `dict` and `list`, and their subtypes. +A `ValueError` will be raised if `parse_float` produces illegal types. + +### Building a `tomli`/`tomllib` compatibility layer + +Python versions 3.11+ ship with a version of Tomli: +the `tomllib` standard library module. +To build code that uses the standard library if available, +but still works seamlessly with Python 3.6+, +do the following. + +Instead of a hard Tomli dependency, use the following +[dependency specifier](https://packaging.python.org/en/latest/specifications/dependency-specifiers/) +to only require Tomli when the standard library module is not available: + +``` +tomli >= 1.1.0 ; python_version < "3.11" +``` + +Then, in your code, import a TOML parser using the following fallback mechanism: + +```python +import sys + +if sys.version_info >= (3, 11): + import tomllib +else: + import tomli as tomllib + +tomllib.loads("['This parses fine with Python 3.6+']") +``` + +## FAQ + +### Why this parser? + +- it's lil' +- pure Python with zero dependencies +- the fastest pure Python parser [\*](#pure-python): + 18x as fast as [tomlkit](https://pypi.org/project/tomlkit/), + 2.1x as fast as [toml](https://pypi.org/project/toml/) +- outputs [basic data types](#how-do-toml-types-map-into-python-types) only +- 100% spec compliant: passes all tests in + [BurntSushi/toml-test](https://github.com/BurntSushi/toml-test) + test suite +- thoroughly tested: 100% branch coverage + +### Is comment preserving round-trip parsing supported? + +No. + +The `tomli.loads` function returns a plain `dict` that is populated with builtin types and types from the standard library only. +Preserving comments requires a custom type to be returned so will not be supported, +at least not by the `tomli.loads` and `tomli.load` functions. + +Look into [TOML Kit](https://github.com/sdispater/tomlkit) if preservation of style is what you need. + +### Is there a `dumps`, `write` or `encode` function? + +[Tomli-W](https://github.com/hukkin/tomli-w) is the write-only counterpart of Tomli, providing `dump` and `dumps` functions. + +The core library does not include write capability, as most TOML use cases are read-only, and Tomli intends to be minimal. + +### How do TOML types map into Python types? + +| TOML type | Python type | Details | +| ---------------- | ------------------- | ------------------------------------------------------------ | +| Document Root | `dict` | | +| Key | `str` | | +| String | `str` | | +| Integer | `int` | | +| Float | `float` | | +| Boolean | `bool` | | +| Offset Date-Time | `datetime.datetime` | `tzinfo` attribute set to an instance of `datetime.timezone` | +| Local Date-Time | `datetime.datetime` | `tzinfo` attribute set to `None` | +| Local Date | `datetime.date` | | +| Local Time | `datetime.time` | | +| Array | `list` | | +| Table | `dict` | | +| Inline Table | `dict` | | + +## Performance + +The `benchmark/` folder in this repository contains a performance benchmark for comparing the various Python TOML parsers. + +Below are the results for commit [0724e2a](https://github.com/hukkin/tomli/tree/0724e2ab1858da7f5e05a9bffdb24c33589d951c). + +### Pure Python + +```console +foo@bar:~/dev/tomli$ python --version +Python 3.12.7 +foo@bar:~/dev/tomli$ pip freeze +attrs==21.4.0 +click==8.1.7 +pytomlpp==1.0.13 +qtoml==0.3.1 +rtoml==0.11.0 +toml==0.10.2 +tomli @ file:///home/foo/dev/tomli +tomlkit==0.13.2 +foo@bar:~/dev/tomli$ python benchmark/run.py +Parsing data.toml 5000 times: +------------------------------------------------------ + parser | exec time | performance (more is better) +-----------+------------+----------------------------- + rtoml | 0.647 s | baseline (100%) + pytomlpp | 0.891 s | 72.62% + tomli | 3.14 s | 20.56% + toml | 6.69 s | 9.67% + qtoml | 8.27 s | 7.82% + tomlkit | 56.1 s | 1.15% +``` + +### Mypyc generated wheel + +```console +foo@bar:~/dev/tomli$ python benchmark/run.py +Parsing data.toml 5000 times: +------------------------------------------------------ + parser | exec time | performance (more is better) +-----------+------------+----------------------------- + rtoml | 0.668 s | baseline (100%) + pytomlpp | 0.893 s | 74.81% + tomli | 1.96 s | 34.18% + toml | 6.64 s | 10.07% + qtoml | 8.26 s | 8.09% + tomlkit | 52.9 s | 1.26% +``` + diff --git a/venv/lib/python3.10/site-packages/tomli-2.3.0.dist-info/RECORD b/venv/lib/python3.10/site-packages/tomli-2.3.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..7f109d843033ec7b6c9fc0a68104ea5c421a70cf --- /dev/null +++ b/venv/lib/python3.10/site-packages/tomli-2.3.0.dist-info/RECORD @@ -0,0 +1,14 @@ +tomli-2.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +tomli-2.3.0.dist-info/METADATA,sha256=Vt9iQpR7WMdlapqzHELHOQz5NS2pAjsM3GEW8JwxKhw,10481 +tomli-2.3.0.dist-info/RECORD,, +tomli-2.3.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 +tomli-2.3.0.dist-info/licenses/LICENSE,sha256=uAgWsNUwuKzLTCIReDeQmEpuO2GSLCte6S8zcqsnQv4,1072 +tomli/__init__.py,sha256=qzEGl8QHhqgQPCuLzfKyPIuH3KKPspf-UVPbZ0ppBD4,314 +tomli/__pycache__/__init__.cpython-310.pyc,, +tomli/__pycache__/_parser.cpython-310.pyc,, +tomli/__pycache__/_re.cpython-310.pyc,, +tomli/__pycache__/_types.cpython-310.pyc,, +tomli/_parser.py,sha256=bO8tUYmnyA2K6m4TnbQbfUqmIFcDv7mG1KuC9gqRVmA,25778 +tomli/_re.py,sha256=n8-Io8ZK1U-F6jzlg7Pabc40hLFJsawE2uNLKH9w7iU,3235 +tomli/_types.py,sha256=-GTG2VUqkpxwMqzmVO4F7ybKddIbAnuAHXfmWQcTi3Q,254 +tomli/py.typed,sha256=8PjyZ1aVoQpRVvt71muvuq5qE-jTFZkK-GLHkhdebmc,26 diff --git a/venv/lib/python3.10/site-packages/tomli-2.3.0.dist-info/WHEEL b/venv/lib/python3.10/site-packages/tomli-2.3.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..d8b9936dad9ab2513fa6979f411560d3b6b57e37 --- /dev/null +++ b/venv/lib/python3.10/site-packages/tomli-2.3.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.12.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/venv/lib/python3.10/site-packages/tomli-2.3.0.dist-info/licenses/LICENSE b/venv/lib/python3.10/site-packages/tomli-2.3.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..e859590f886cd78344206af1a8ccb3080d4385e0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/tomli-2.3.0.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Taneli Hukkinen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/torchaudio/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ddb156c3276f82ba0c1ce740827c94c376db0a6c Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/__pycache__/kaldi_io.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/__pycache__/kaldi_io.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e5b5fb95c9f64c8ac44765f6042764291972b3c0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/__pycache__/kaldi_io.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/__pycache__/version.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/__pycache__/version.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..adc15a3da510becf4fd03b05bb8d2e83b22a65f8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/__pycache__/version.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/_backend/__init__.py b/venv/lib/python3.10/site-packages/torchaudio/_backend/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..27337013ff12edbb9a6b18608a555e5c33031499 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/_backend/__init__.py @@ -0,0 +1,61 @@ +from typing import List, Optional + +from torchaudio._internal.module_utils import deprecated + +from . import utils +from .common import AudioMetaData + +__all__ = [ + "AudioMetaData", + "load", + "info", + "save", + "list_audio_backends", + "get_audio_backend", + "set_audio_backend", +] + + +info = utils.get_info_func() +load = utils.get_load_func() +save = utils.get_save_func() + + +def list_audio_backends() -> List[str]: + """List available backends + + Returns: + list of str: The list of available backends. + + The possible values are; ``"ffmpeg"``, ``"sox"`` and ``"soundfile"``. + """ + + return list(utils.get_available_backends().keys()) + + +# Temporary until global backend is removed +@deprecated("With dispatcher enabled, this function is no-op. You can remove the function call.") +def get_audio_backend() -> Optional[str]: + """Get the name of the current global backend + + Returns: + str or None: + If dispatcher mode is enabled, returns ``None`` otherwise, + the name of current backend or ``None`` (no backend is set). + """ + return None + + +# Temporary until global backend is removed +@deprecated("With dispatcher enabled, this function is no-op. You can remove the function call.") +def set_audio_backend(backend: Optional[str]): # noqa + """Set the global backend. + + This is a no-op when dispatcher mode is enabled. + + Args: + backend (str or None): Name of the backend. + One of ``"sox_io"`` or ``"soundfile"`` based on availability + of the system. If ``None`` is provided the current backend is unassigned. + """ + pass diff --git a/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..64a17e00d1cfb944d77aab54fa58d4028beaabf6 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/backend.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/backend.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fcd535d38a28ab62875a9c76fce9401dce9dd35f Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/backend.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/common.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/common.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..337601c6bee99c88813bbc8b040539715fca0683 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/common.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/ffmpeg.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/ffmpeg.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..89e725d2f0967b38717f79e28168f2f34408a509 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/ffmpeg.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/soundfile.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/soundfile.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a06647306a9f30df64c4612041b4c626e4a65a9b Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/soundfile.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/soundfile_backend.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/soundfile_backend.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..343ab43f45dd5d0ab1a92bdf7f849dcc0cbc3c72 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/soundfile_backend.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/sox.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/sox.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..23c7fe8c800c2717726350a5d2ee8efc535d4e83 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/sox.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..15eb83fd75754e2c387b8f2a43af0361aaf3e774 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/_backend/__pycache__/utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/_backend/backend.py b/venv/lib/python3.10/site-packages/torchaudio/_backend/backend.py new file mode 100644 index 0000000000000000000000000000000000000000..579340962c42a2210d5d7a5a41a1886b5fb62045 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/_backend/backend.py @@ -0,0 +1,53 @@ +import os +from abc import ABC, abstractmethod +from typing import BinaryIO, Optional, Tuple, Union + +from torch import Tensor +from torchaudio.io import CodecConfig + +from .common import AudioMetaData + + +class Backend(ABC): + @staticmethod + @abstractmethod + def info(uri: Union[BinaryIO, str, os.PathLike], format: Optional[str], buffer_size: int = 4096) -> AudioMetaData: + raise NotImplementedError + + @staticmethod + @abstractmethod + def load( + uri: Union[BinaryIO, str, os.PathLike], + frame_offset: int = 0, + num_frames: int = -1, + normalize: bool = True, + channels_first: bool = True, + format: Optional[str] = None, + buffer_size: int = 4096, + ) -> Tuple[Tensor, int]: + raise NotImplementedError + + @staticmethod + @abstractmethod + def save( + uri: Union[BinaryIO, str, os.PathLike], + src: Tensor, + sample_rate: int, + channels_first: bool = True, + format: Optional[str] = None, + encoding: Optional[str] = None, + bits_per_sample: Optional[int] = None, + buffer_size: int = 4096, + compression: Optional[Union[CodecConfig, float, int]] = None, + ) -> None: + raise NotImplementedError + + @staticmethod + @abstractmethod + def can_decode(uri: Union[BinaryIO, str, os.PathLike], format: Optional[str]) -> bool: + raise NotImplementedError + + @staticmethod + @abstractmethod + def can_encode(uri: Union[BinaryIO, str, os.PathLike], format: Optional[str]) -> bool: + raise NotImplementedError diff --git a/venv/lib/python3.10/site-packages/torchaudio/_backend/common.py b/venv/lib/python3.10/site-packages/torchaudio/_backend/common.py new file mode 100644 index 0000000000000000000000000000000000000000..804b18d461b93d4a371e02d9cde902b59aba3111 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/_backend/common.py @@ -0,0 +1,52 @@ +class AudioMetaData: + """AudioMetaData() + + Return type of ``torchaudio.info`` function. + + :ivar int sample_rate: Sample rate + :ivar int num_frames: The number of frames + :ivar int num_channels: The number of channels + :ivar int bits_per_sample: The number of bits per sample. This is 0 for lossy formats, + or when it cannot be accurately inferred. + :ivar str encoding: Audio encoding + The values encoding can take are one of the following: + + * ``PCM_S``: Signed integer linear PCM + * ``PCM_U``: Unsigned integer linear PCM + * ``PCM_F``: Floating point linear PCM + * ``FLAC``: Flac, Free Lossless Audio Codec + * ``ULAW``: Mu-law + * ``ALAW``: A-law + * ``MP3`` : MP3, MPEG-1 Audio Layer III + * ``VORBIS``: OGG Vorbis + * ``AMR_WB``: Adaptive Multi-Rate Wideband + * ``AMR_NB``: Adaptive Multi-Rate Narrowband + * ``OPUS``: Opus + * ``HTK``: Single channel 16-bit PCM + * ``UNKNOWN`` : None of above + """ + + def __init__( + self, + sample_rate: int, + num_frames: int, + num_channels: int, + bits_per_sample: int, + encoding: str, + ): + self.sample_rate = sample_rate + self.num_frames = num_frames + self.num_channels = num_channels + self.bits_per_sample = bits_per_sample + self.encoding = encoding + + def __str__(self): + return ( + f"AudioMetaData(" + f"sample_rate={self.sample_rate}, " + f"num_frames={self.num_frames}, " + f"num_channels={self.num_channels}, " + f"bits_per_sample={self.bits_per_sample}, " + f"encoding={self.encoding}" + f")" + ) diff --git a/venv/lib/python3.10/site-packages/torchaudio/_backend/ffmpeg.py b/venv/lib/python3.10/site-packages/torchaudio/_backend/ffmpeg.py new file mode 100644 index 0000000000000000000000000000000000000000..ca8374ea07de9c9c06615f773d62b1ca910efb95 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/_backend/ffmpeg.py @@ -0,0 +1,334 @@ +import os +import re +import sys +from typing import BinaryIO, Optional, Tuple, Union + +import torch +import torchaudio + +from .backend import Backend +from .common import AudioMetaData + +InputType = Union[BinaryIO, str, os.PathLike] + + +def info_audio( + src: InputType, + format: Optional[str], + buffer_size: int = 4096, +) -> AudioMetaData: + s = torchaudio.io.StreamReader(src, format, None, buffer_size) + sinfo = s.get_src_stream_info(s.default_audio_stream) + if sinfo.num_frames == 0: + waveform = _load_audio(s) + num_frames = waveform.size(1) + else: + num_frames = sinfo.num_frames + return AudioMetaData( + int(sinfo.sample_rate), + num_frames, + sinfo.num_channels, + sinfo.bits_per_sample, + sinfo.codec.upper(), + ) + + +def _get_load_filter( + frame_offset: int = 0, + num_frames: int = -1, + convert: bool = True, +) -> Optional[str]: + if frame_offset < 0: + raise RuntimeError("Invalid argument: frame_offset must be non-negative. Found: {}".format(frame_offset)) + if num_frames == 0 or num_frames < -1: + raise RuntimeError("Invalid argument: num_frames must be -1 or greater than 0. Found: {}".format(num_frames)) + + # All default values -> no filter + if frame_offset == 0 and num_frames == -1 and not convert: + return None + # Only convert + aformat = "aformat=sample_fmts=fltp" + if frame_offset == 0 and num_frames == -1 and convert: + return aformat + # At least one of frame_offset or num_frames has non-default value + if num_frames > 0: + atrim = "atrim=start_sample={}:end_sample={}".format(frame_offset, frame_offset + num_frames) + else: + atrim = "atrim=start_sample={}".format(frame_offset) + if not convert: + return atrim + return "{},{}".format(atrim, aformat) + + +def _load_audio( + s: "torchaudio.io.StreamReader", + filter: Optional[str] = None, + channels_first: bool = True, +) -> torch.Tensor: + s.add_audio_stream(-1, -1, filter_desc=filter) + s.process_all_packets() + chunk = s.pop_chunks()[0] + if chunk is None: + raise RuntimeError("Failed to decode audio.") + waveform = chunk._elem + return waveform.T if channels_first else waveform + + +def load_audio( + src: InputType, + frame_offset: int = 0, + num_frames: int = -1, + convert: bool = True, + channels_first: bool = True, + format: Optional[str] = None, + buffer_size: int = 4096, +) -> Tuple[torch.Tensor, int]: + if hasattr(src, "read") and format == "vorbis": + format = "ogg" + s = torchaudio.io.StreamReader(src, format, None, buffer_size) + sample_rate = int(s.get_src_stream_info(s.default_audio_stream).sample_rate) + filter = _get_load_filter(frame_offset, num_frames, convert) + waveform = _load_audio(s, filter, channels_first) + return waveform, sample_rate + + +def _get_sample_format(dtype: torch.dtype) -> str: + dtype_to_format = { + torch.uint8: "u8", + torch.int16: "s16", + torch.int32: "s32", + torch.int64: "s64", + torch.float32: "flt", + torch.float64: "dbl", + } + format = dtype_to_format.get(dtype) + if format is None: + raise ValueError(f"No format found for dtype {dtype}; dtype must be one of {list(dtype_to_format.keys())}.") + return format + + +def _native_endianness() -> str: + if sys.byteorder == "little": + return "le" + else: + return "be" + + +def _get_encoder_for_wav(encoding: str, bits_per_sample: int) -> str: + if bits_per_sample not in {None, 8, 16, 24, 32, 64}: + raise ValueError(f"Invalid bits_per_sample {bits_per_sample} for WAV encoding.") + endianness = _native_endianness() + if not encoding: + if not bits_per_sample: + # default to PCM S16 + return f"pcm_s16{endianness}" + if bits_per_sample == 8: + return "pcm_u8" + return f"pcm_s{bits_per_sample}{endianness}" + if encoding == "PCM_S": + if not bits_per_sample: + bits_per_sample = 16 + if bits_per_sample == 8: + raise ValueError("For WAV signed PCM, 8-bit encoding is not supported.") + return f"pcm_s{bits_per_sample}{endianness}" + if encoding == "PCM_U": + if bits_per_sample in (None, 8): + return "pcm_u8" + raise ValueError("For WAV unsigned PCM, only 8-bit encoding is supported.") + if encoding == "PCM_F": + if not bits_per_sample: + bits_per_sample = 32 + if bits_per_sample in (32, 64): + return f"pcm_f{bits_per_sample}{endianness}" + raise ValueError("For WAV float PCM, only 32- and 64-bit encodings are supported.") + if encoding == "ULAW": + if bits_per_sample in (None, 8): + return "pcm_mulaw" + raise ValueError("For WAV PCM mu-law, only 8-bit encoding is supported.") + if encoding == "ALAW": + if bits_per_sample in (None, 8): + return "pcm_alaw" + raise ValueError("For WAV PCM A-law, only 8-bit encoding is supported.") + raise ValueError(f"WAV encoding {encoding} is not supported.") + + +def _get_flac_sample_fmt(bps): + if bps is None or bps == 16: + return "s16" + if bps == 24: + return "s32" + raise ValueError(f"FLAC only supports bits_per_sample values of 16 and 24 ({bps} specified).") + + +def _parse_save_args( + ext: Optional[str], + format: Optional[str], + encoding: Optional[str], + bps: Optional[int], +): + # torchaudio's save function accepts the followings, which do not 1to1 map + # to FFmpeg. + # + # - format: audio format + # - bits_per_sample: encoder sample format + # - encoding: such as PCM_U8. + # + # In FFmpeg, format is specified with the following three (and more) + # + # - muxer: could be audio format or container format. + # the one we passed to the constructor of StreamWriter + # - encoder: the audio encoder used to encode audio + # - encoder sample format: the format used by encoder to encode audio. + # + # If encoder sample format is different from source sample format, StreamWriter + # will insert a filter automatically. + # + def _type(spec): + # either format is exactly the specified one + # or extension matches to the spec AND there is no format override. + return format == spec or (format is None and ext == spec) + + if _type("wav") or _type("amb"): + # wav is special because it supports different encoding through encoders + # each encoder only supports one encoder format + # + # amb format is a special case originated from libsox. + # It is basically a WAV format, with slight modification. + # https://github.com/chirlu/sox/commit/4a4ea33edbca5972a1ed8933cc3512c7302fa67a#diff-39171191a858add9df87f5f210a34a776ac2c026842ae6db6ce97f5e68836795 + # It is a format so that decoders will recognize it as ambisonic. + # https://www.ambisonia.com/Members/mleese/file-format-for-b-format/ + # FFmpeg does not recognize amb because it is basically a WAV format. + muxer = "wav" + encoder = _get_encoder_for_wav(encoding, bps) + sample_fmt = None + elif _type("vorbis"): + # FFpmeg does not recognize vorbis extension, while libsox used to do. + # For the sake of bakward compatibility, (and the simplicity), + # we support the case where users want to do save("foo.vorbis") + muxer = "ogg" + encoder = "vorbis" + sample_fmt = None + else: + muxer = format + encoder = None + sample_fmt = None + if _type("flac"): + sample_fmt = _get_flac_sample_fmt(bps) + if _type("ogg"): + sample_fmt = _get_flac_sample_fmt(bps) + return muxer, encoder, sample_fmt + + +def save_audio( + uri: InputType, + src: torch.Tensor, + sample_rate: int, + channels_first: bool = True, + format: Optional[str] = None, + encoding: Optional[str] = None, + bits_per_sample: Optional[int] = None, + buffer_size: int = 4096, + compression: Optional[torchaudio.io.CodecConfig] = None, +) -> None: + ext = None + if hasattr(uri, "write"): + if format is None: + raise RuntimeError("'format' is required when saving to file object.") + else: + uri = os.path.normpath(uri) + if tokens := str(uri).split(".")[1:]: + ext = tokens[-1].lower() + + muxer, encoder, enc_fmt = _parse_save_args(ext, format, encoding, bits_per_sample) + + if channels_first: + src = src.T + + s = torchaudio.io.StreamWriter(uri, format=muxer, buffer_size=buffer_size) + s.add_audio_stream( + sample_rate, + num_channels=src.size(-1), + format=_get_sample_format(src.dtype), + encoder=encoder, + encoder_format=enc_fmt, + codec_config=compression, + ) + with s.open(): + s.write_audio_chunk(0, src) + + +def _map_encoding(encoding: str) -> str: + for dst in ["PCM_S", "PCM_U", "PCM_F"]: + if dst in encoding: + return dst + if encoding == "PCM_MULAW": + return "ULAW" + elif encoding == "PCM_ALAW": + return "ALAW" + return encoding + + +def _get_bits_per_sample(encoding: str, bits_per_sample: int) -> str: + if m := re.search(r"PCM_\w(\d+)\w*", encoding): + return int(m.group(1)) + elif encoding in ["PCM_ALAW", "PCM_MULAW"]: + return 8 + return bits_per_sample + + +class FFmpegBackend(Backend): + @staticmethod + def info(uri: InputType, format: Optional[str], buffer_size: int = 4096) -> AudioMetaData: + metadata = info_audio(uri, format, buffer_size) + metadata.bits_per_sample = _get_bits_per_sample(metadata.encoding, metadata.bits_per_sample) + metadata.encoding = _map_encoding(metadata.encoding) + return metadata + + @staticmethod + def load( + uri: InputType, + frame_offset: int = 0, + num_frames: int = -1, + normalize: bool = True, + channels_first: bool = True, + format: Optional[str] = None, + buffer_size: int = 4096, + ) -> Tuple[torch.Tensor, int]: + return load_audio(uri, frame_offset, num_frames, normalize, channels_first, format) + + @staticmethod + def save( + uri: InputType, + src: torch.Tensor, + sample_rate: int, + channels_first: bool = True, + format: Optional[str] = None, + encoding: Optional[str] = None, + bits_per_sample: Optional[int] = None, + buffer_size: int = 4096, + compression: Optional[Union[torchaudio.io.CodecConfig, float, int]] = None, + ) -> None: + if not isinstance(compression, (torchaudio.io.CodecConfig, type(None))): + raise ValueError( + "FFmpeg backend expects non-`None` value for argument `compression` to be of ", + f"type `torchaudio.io.CodecConfig`, but received value of type {type(compression)}", + ) + save_audio( + uri, + src, + sample_rate, + channels_first, + format, + encoding, + bits_per_sample, + buffer_size, + compression, + ) + + @staticmethod + def can_decode(uri: InputType, format: Optional[str]) -> bool: + return True + + @staticmethod + def can_encode(uri: InputType, format: Optional[str]) -> bool: + return True diff --git a/venv/lib/python3.10/site-packages/torchaudio/_backend/soundfile.py b/venv/lib/python3.10/site-packages/torchaudio/_backend/soundfile.py new file mode 100644 index 0000000000000000000000000000000000000000..f4be1f70999db86e4ff70b6b703d5784a891a84c --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/_backend/soundfile.py @@ -0,0 +1,54 @@ +import os +from typing import BinaryIO, Optional, Tuple, Union + +import torch +from torchaudio.io import CodecConfig + +from . import soundfile_backend +from .backend import Backend +from .common import AudioMetaData + + +class SoundfileBackend(Backend): + @staticmethod + def info(uri: Union[BinaryIO, str, os.PathLike], format: Optional[str], buffer_size: int = 4096) -> AudioMetaData: + return soundfile_backend.info(uri, format) + + @staticmethod + def load( + uri: Union[BinaryIO, str, os.PathLike], + frame_offset: int = 0, + num_frames: int = -1, + normalize: bool = True, + channels_first: bool = True, + format: Optional[str] = None, + buffer_size: int = 4096, + ) -> Tuple[torch.Tensor, int]: + return soundfile_backend.load(uri, frame_offset, num_frames, normalize, channels_first, format) + + @staticmethod + def save( + uri: Union[BinaryIO, str, os.PathLike], + src: torch.Tensor, + sample_rate: int, + channels_first: bool = True, + format: Optional[str] = None, + encoding: Optional[str] = None, + bits_per_sample: Optional[int] = None, + buffer_size: int = 4096, + compression: Optional[Union[CodecConfig, float, int]] = None, + ) -> None: + if compression: + raise ValueError("soundfile backend does not support argument `compression`.") + + soundfile_backend.save( + uri, src, sample_rate, channels_first, format=format, encoding=encoding, bits_per_sample=bits_per_sample + ) + + @staticmethod + def can_decode(uri, format) -> bool: + return True + + @staticmethod + def can_encode(uri, format) -> bool: + return True diff --git a/venv/lib/python3.10/site-packages/torchaudio/_backend/soundfile_backend.py b/venv/lib/python3.10/site-packages/torchaudio/_backend/soundfile_backend.py new file mode 100644 index 0000000000000000000000000000000000000000..9e7b0b13cd9adf2106ef4d6885f89341822add13 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/_backend/soundfile_backend.py @@ -0,0 +1,457 @@ +"""The new soundfile backend which will become default in 0.8.0 onward""" +import warnings +from typing import Optional, Tuple + +import torch +from torchaudio._internal import module_utils as _mod_utils + +from .common import AudioMetaData + + +_IS_SOUNDFILE_AVAILABLE = False + +# TODO: import soundfile only when it is used. +if _mod_utils.is_module_available("soundfile"): + try: + import soundfile + + _requires_soundfile = _mod_utils.no_op + _IS_SOUNDFILE_AVAILABLE = True + except Exception: + _requires_soundfile = _mod_utils.fail_with_message( + "requires soundfile, but we failed to import it. Please check the installation of soundfile." + ) +else: + _requires_soundfile = _mod_utils.fail_with_message( + "requires soundfile, but it is not installed. Please install soundfile." + ) + + +# Mapping from soundfile subtype to number of bits per sample. +# This is mostly heuristical and the value is set to 0 when it is irrelevant +# (lossy formats) or when it can't be inferred. +# For ADPCM (and G72X) subtypes, it's hard to infer the bit depth because it's not part of the standard: +# According to https://en.wikipedia.org/wiki/Adaptive_differential_pulse-code_modulation#In_telephony, +# the default seems to be 8 bits but it can be compressed further to 4 bits. +# The dict is inspired from +# https://github.com/bastibe/python-soundfile/blob/744efb4b01abc72498a96b09115b42a4cabd85e4/soundfile.py#L66-L94 +_SUBTYPE_TO_BITS_PER_SAMPLE = { + "PCM_S8": 8, # Signed 8 bit data + "PCM_16": 16, # Signed 16 bit data + "PCM_24": 24, # Signed 24 bit data + "PCM_32": 32, # Signed 32 bit data + "PCM_U8": 8, # Unsigned 8 bit data (WAV and RAW only) + "FLOAT": 32, # 32 bit float data + "DOUBLE": 64, # 64 bit float data + "ULAW": 8, # U-Law encoded. See https://en.wikipedia.org/wiki/G.711#Types + "ALAW": 8, # A-Law encoded. See https://en.wikipedia.org/wiki/G.711#Types + "IMA_ADPCM": 0, # IMA ADPCM. + "MS_ADPCM": 0, # Microsoft ADPCM. + "GSM610": 0, # GSM 6.10 encoding. (Wikipedia says 1.625 bit depth?? https://en.wikipedia.org/wiki/Full_Rate) + "VOX_ADPCM": 0, # OKI / Dialogix ADPCM + "G721_32": 0, # 32kbs G721 ADPCM encoding. + "G723_24": 0, # 24kbs G723 ADPCM encoding. + "G723_40": 0, # 40kbs G723 ADPCM encoding. + "DWVW_12": 12, # 12 bit Delta Width Variable Word encoding. + "DWVW_16": 16, # 16 bit Delta Width Variable Word encoding. + "DWVW_24": 24, # 24 bit Delta Width Variable Word encoding. + "DWVW_N": 0, # N bit Delta Width Variable Word encoding. + "DPCM_8": 8, # 8 bit differential PCM (XI only) + "DPCM_16": 16, # 16 bit differential PCM (XI only) + "VORBIS": 0, # Xiph Vorbis encoding. (lossy) + "ALAC_16": 16, # Apple Lossless Audio Codec (16 bit). + "ALAC_20": 20, # Apple Lossless Audio Codec (20 bit). + "ALAC_24": 24, # Apple Lossless Audio Codec (24 bit). + "ALAC_32": 32, # Apple Lossless Audio Codec (32 bit). +} + + +def _get_bit_depth(subtype): + if subtype not in _SUBTYPE_TO_BITS_PER_SAMPLE: + warnings.warn( + f"The {subtype} subtype is unknown to TorchAudio. As a result, the bits_per_sample " + "attribute will be set to 0. If you are seeing this warning, please " + "report by opening an issue on github (after checking for existing/closed ones). " + "You may otherwise ignore this warning." + ) + return _SUBTYPE_TO_BITS_PER_SAMPLE.get(subtype, 0) + + +_SUBTYPE_TO_ENCODING = { + "PCM_S8": "PCM_S", + "PCM_16": "PCM_S", + "PCM_24": "PCM_S", + "PCM_32": "PCM_S", + "PCM_U8": "PCM_U", + "FLOAT": "PCM_F", + "DOUBLE": "PCM_F", + "ULAW": "ULAW", + "ALAW": "ALAW", + "VORBIS": "VORBIS", +} + + +def _get_encoding(format: str, subtype: str): + if format == "FLAC": + return "FLAC" + return _SUBTYPE_TO_ENCODING.get(subtype, "UNKNOWN") + + +@_requires_soundfile +def info(filepath: str, format: Optional[str] = None) -> AudioMetaData: + """Get signal information of an audio file. + + Note: + ``filepath`` argument is intentionally annotated as ``str`` only, even though it accepts + ``pathlib.Path`` object as well. This is for the consistency with ``"sox_io"`` backend, + which has a restriction on type annotation due to TorchScript compiler compatiblity. + + Args: + filepath (path-like object or file-like object): + Source of audio data. + format (str or None, optional): + Not used. PySoundFile does not accept format hint. + + Returns: + AudioMetaData: meta data of the given audio. + + """ + sinfo = soundfile.info(filepath) + return AudioMetaData( + sinfo.samplerate, + sinfo.frames, + sinfo.channels, + bits_per_sample=_get_bit_depth(sinfo.subtype), + encoding=_get_encoding(sinfo.format, sinfo.subtype), + ) + + +_SUBTYPE2DTYPE = { + "PCM_S8": "int8", + "PCM_U8": "uint8", + "PCM_16": "int16", + "PCM_32": "int32", + "FLOAT": "float32", + "DOUBLE": "float64", +} + + +@_requires_soundfile +def load( + filepath: str, + frame_offset: int = 0, + num_frames: int = -1, + normalize: bool = True, + channels_first: bool = True, + format: Optional[str] = None, +) -> Tuple[torch.Tensor, int]: + """Load audio data from file. + + Note: + The formats this function can handle depend on the soundfile installation. + This function is tested on the following formats; + + * WAV + + * 32-bit floating-point + * 32-bit signed integer + * 16-bit signed integer + * 8-bit unsigned integer + + * FLAC + * OGG/VORBIS + * SPHERE + + By default (``normalize=True``, ``channels_first=True``), this function returns Tensor with + ``float32`` dtype, and the shape of `[channel, time]`. + + .. warning:: + + ``normalize`` argument does not perform volume normalization. + It only converts the sample type to `torch.float32` from the native sample + type. + + When the input format is WAV with integer type, such as 32-bit signed integer, 16-bit + signed integer, 24-bit signed integer, and 8-bit unsigned integer, by providing ``normalize=False``, + this function can return integer Tensor, where the samples are expressed within the whole range + of the corresponding dtype, that is, ``int32`` tensor for 32-bit signed PCM, + ``int16`` for 16-bit signed PCM and ``uint8`` for 8-bit unsigned PCM. Since torch does not + support ``int24`` dtype, 24-bit signed PCM are converted to ``int32`` tensors. + + ``normalize`` argument has no effect on 32-bit floating-point WAV and other formats, such as + ``flac`` and ``mp3``. + + For these formats, this function always returns ``float32`` Tensor with values. + + Note: + ``filepath`` argument is intentionally annotated as ``str`` only, even though it accepts + ``pathlib.Path`` object as well. This is for the consistency with ``"sox_io"`` backend, + which has a restriction on type annotation due to TorchScript compiler compatiblity. + + Args: + filepath (path-like object or file-like object): + Source of audio data. + frame_offset (int, optional): + Number of frames to skip before start reading data. + num_frames (int, optional): + Maximum number of frames to read. ``-1`` reads all the remaining samples, + starting from ``frame_offset``. + This function may return the less number of frames if there is not enough + frames in the given file. + normalize (bool, optional): + When ``True``, this function converts the native sample type to ``float32``. + Default: ``True``. + + If input file is integer WAV, giving ``False`` will change the resulting Tensor type to + integer type. + This argument has no effect for formats other than integer WAV type. + + channels_first (bool, optional): + When True, the returned Tensor has dimension `[channel, time]`. + Otherwise, the returned Tensor's dimension is `[time, channel]`. + format (str or None, optional): + Not used. PySoundFile does not accept format hint. + + Returns: + (torch.Tensor, int): Resulting Tensor and sample rate. + If the input file has integer wav format and normalization is off, then it has + integer type, else ``float32`` type. If ``channels_first=True``, it has + `[channel, time]` else `[time, channel]`. + """ + with soundfile.SoundFile(filepath, "r") as file_: + if file_.format != "WAV" or normalize: + dtype = "float32" + elif file_.subtype not in _SUBTYPE2DTYPE: + raise ValueError(f"Unsupported subtype: {file_.subtype}") + else: + dtype = _SUBTYPE2DTYPE[file_.subtype] + + frames = file_._prepare_read(frame_offset, None, num_frames) + waveform = file_.read(frames, dtype, always_2d=True) + sample_rate = file_.samplerate + + waveform = torch.from_numpy(waveform) + if channels_first: + waveform = waveform.t() + return waveform, sample_rate + + +def _get_subtype_for_wav(dtype: torch.dtype, encoding: str, bits_per_sample: int): + if not encoding: + if not bits_per_sample: + subtype = { + torch.uint8: "PCM_U8", + torch.int16: "PCM_16", + torch.int32: "PCM_32", + torch.float32: "FLOAT", + torch.float64: "DOUBLE", + }.get(dtype) + if not subtype: + raise ValueError(f"Unsupported dtype for wav: {dtype}") + return subtype + if bits_per_sample == 8: + return "PCM_U8" + return f"PCM_{bits_per_sample}" + if encoding == "PCM_S": + if not bits_per_sample: + return "PCM_32" + if bits_per_sample == 8: + raise ValueError("wav does not support 8-bit signed PCM encoding.") + return f"PCM_{bits_per_sample}" + if encoding == "PCM_U": + if bits_per_sample in (None, 8): + return "PCM_U8" + raise ValueError("wav only supports 8-bit unsigned PCM encoding.") + if encoding == "PCM_F": + if bits_per_sample in (None, 32): + return "FLOAT" + if bits_per_sample == 64: + return "DOUBLE" + raise ValueError("wav only supports 32/64-bit float PCM encoding.") + if encoding == "ULAW": + if bits_per_sample in (None, 8): + return "ULAW" + raise ValueError("wav only supports 8-bit mu-law encoding.") + if encoding == "ALAW": + if bits_per_sample in (None, 8): + return "ALAW" + raise ValueError("wav only supports 8-bit a-law encoding.") + raise ValueError(f"wav does not support {encoding}.") + + +def _get_subtype_for_sphere(encoding: str, bits_per_sample: int): + if encoding in (None, "PCM_S"): + return f"PCM_{bits_per_sample}" if bits_per_sample else "PCM_32" + if encoding in ("PCM_U", "PCM_F"): + raise ValueError(f"sph does not support {encoding} encoding.") + if encoding == "ULAW": + if bits_per_sample in (None, 8): + return "ULAW" + raise ValueError("sph only supports 8-bit for mu-law encoding.") + if encoding == "ALAW": + return "ALAW" + raise ValueError(f"sph does not support {encoding}.") + + +def _get_subtype(dtype: torch.dtype, format: str, encoding: str, bits_per_sample: int): + if format == "wav": + return _get_subtype_for_wav(dtype, encoding, bits_per_sample) + if format == "flac": + if encoding: + raise ValueError("flac does not support encoding.") + if not bits_per_sample: + return "PCM_16" + if bits_per_sample > 24: + raise ValueError("flac does not support bits_per_sample > 24.") + return "PCM_S8" if bits_per_sample == 8 else f"PCM_{bits_per_sample}" + if format in ("ogg", "vorbis"): + if bits_per_sample: + raise ValueError("ogg/vorbis does not support bits_per_sample.") + if encoding is None or encoding == "vorbis": + return "VORBIS" + if encoding == "opus": + return "OPUS" + raise ValueError(f"Unexpected encoding: {encoding}") + if format == "mp3": + return "MPEG_LAYER_III" + if format == "sph": + return _get_subtype_for_sphere(encoding, bits_per_sample) + if format in ("nis", "nist"): + return "PCM_16" + raise ValueError(f"Unsupported format: {format}") + + +@_requires_soundfile +def save( + filepath: str, + src: torch.Tensor, + sample_rate: int, + channels_first: bool = True, + compression: Optional[float] = None, + format: Optional[str] = None, + encoding: Optional[str] = None, + bits_per_sample: Optional[int] = None, +): + """Save audio data to file. + + Note: + The formats this function can handle depend on the soundfile installation. + This function is tested on the following formats; + + * WAV + + * 32-bit floating-point + * 32-bit signed integer + * 16-bit signed integer + * 8-bit unsigned integer + + * FLAC + * OGG/VORBIS + * SPHERE + + Note: + ``filepath`` argument is intentionally annotated as ``str`` only, even though it accepts + ``pathlib.Path`` object as well. This is for the consistency with ``"sox_io"`` backend, + which has a restriction on type annotation due to TorchScript compiler compatiblity. + + Args: + filepath (str or pathlib.Path): Path to audio file. + src (torch.Tensor): Audio data to save. must be 2D tensor. + sample_rate (int): sampling rate + channels_first (bool, optional): If ``True``, the given tensor is interpreted as `[channel, time]`, + otherwise `[time, channel]`. + compression (float of None, optional): Not used. + It is here only for interface compatibility reson with "sox_io" backend. + format (str or None, optional): Override the audio format. + When ``filepath`` argument is path-like object, audio format is + inferred from file extension. If the file extension is missing or + different, you can specify the correct format with this argument. + + When ``filepath`` argument is file-like object, + this argument is required. + + Valid values are ``"wav"``, ``"ogg"``, ``"vorbis"``, + ``"flac"`` and ``"sph"``. + encoding (str or None, optional): Changes the encoding for supported formats. + This argument is effective only for supported formats, sush as + ``"wav"``, ``""flac"`` and ``"sph"``. Valid values are; + + - ``"PCM_S"`` (signed integer Linear PCM) + - ``"PCM_U"`` (unsigned integer Linear PCM) + - ``"PCM_F"`` (floating point PCM) + - ``"ULAW"`` (mu-law) + - ``"ALAW"`` (a-law) + + bits_per_sample (int or None, optional): Changes the bit depth for the + supported formats. + When ``format`` is one of ``"wav"``, ``"flac"`` or ``"sph"``, + you can change the bit depth. + Valid values are ``8``, ``16``, ``24``, ``32`` and ``64``. + + Supported formats/encodings/bit depth/compression are: + + ``"wav"`` + - 32-bit floating-point PCM + - 32-bit signed integer PCM + - 24-bit signed integer PCM + - 16-bit signed integer PCM + - 8-bit unsigned integer PCM + - 8-bit mu-law + - 8-bit a-law + + Note: + Default encoding/bit depth is determined by the dtype of + the input Tensor. + + ``"flac"`` + - 8-bit + - 16-bit (default) + - 24-bit + + ``"ogg"``, ``"vorbis"`` + - Doesn't accept changing configuration. + + ``"sph"`` + - 8-bit signed integer PCM + - 16-bit signed integer PCM + - 24-bit signed integer PCM + - 32-bit signed integer PCM (default) + - 8-bit mu-law + - 8-bit a-law + - 16-bit a-law + - 24-bit a-law + - 32-bit a-law + + """ + if src.ndim != 2: + raise ValueError(f"Expected 2D Tensor, got {src.ndim}D.") + if compression is not None: + warnings.warn( + '`save` function of "soundfile" backend does not support "compression" parameter. ' + "The argument is silently ignored." + ) + if hasattr(filepath, "write"): + if format is None: + raise RuntimeError("`format` is required when saving to file object.") + ext = format.lower() + else: + ext = str(filepath).split(".")[-1].lower() + + if bits_per_sample not in (None, 8, 16, 24, 32, 64): + raise ValueError("Invalid bits_per_sample.") + if bits_per_sample == 24: + warnings.warn( + "Saving audio with 24 bits per sample might warp samples near -1. " + "Using 16 bits per sample might be able to avoid this." + ) + subtype = _get_subtype(src.dtype, ext, encoding, bits_per_sample) + + # sph is a extension used in TED-LIUM but soundfile does not recognize it as NIST format, + # so we extend the extensions manually here + if ext in ["nis", "nist", "sph"] and format is None: + format = "NIST" + + if channels_first: + src = src.t() + + soundfile.write(file=filepath, data=src, samplerate=sample_rate, subtype=subtype, format=format) diff --git a/venv/lib/python3.10/site-packages/torchaudio/_backend/sox.py b/venv/lib/python3.10/site-packages/torchaudio/_backend/sox.py new file mode 100644 index 0000000000000000000000000000000000000000..bfcd8a4f8beadcd80787a354d47219f9edfb98e8 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/_backend/sox.py @@ -0,0 +1,91 @@ +import os +from typing import BinaryIO, Optional, Tuple, Union + +import torch +import torchaudio + +from .backend import Backend +from .common import AudioMetaData + +sox_ext = torchaudio._extension.lazy_import_sox_ext() + + +class SoXBackend(Backend): + @staticmethod + def info(uri: Union[BinaryIO, str, os.PathLike], format: Optional[str], buffer_size: int = 4096) -> AudioMetaData: + if hasattr(uri, "read"): + raise ValueError( + "SoX backend does not support reading from file-like objects. ", + "Please use an alternative backend that does support reading from file-like objects, e.g. FFmpeg.", + ) + else: + sinfo = sox_ext.get_info(uri, format) + if sinfo: + return AudioMetaData(*sinfo) + else: + raise RuntimeError(f"Failed to fetch metadata for {uri}.") + + @staticmethod + def load( + uri: Union[BinaryIO, str, os.PathLike], + frame_offset: int = 0, + num_frames: int = -1, + normalize: bool = True, + channels_first: bool = True, + format: Optional[str] = None, + buffer_size: int = 4096, + ) -> Tuple[torch.Tensor, int]: + if hasattr(uri, "read"): + raise ValueError( + "SoX backend does not support loading from file-like objects. ", + "Please use an alternative backend that does support loading from file-like objects, e.g. FFmpeg.", + ) + else: + ret = sox_ext.load_audio_file(uri, frame_offset, num_frames, normalize, channels_first, format) + if not ret: + raise RuntimeError(f"Failed to load audio from {uri}.") + return ret + + @staticmethod + def save( + uri: Union[BinaryIO, str, os.PathLike], + src: torch.Tensor, + sample_rate: int, + channels_first: bool = True, + format: Optional[str] = None, + encoding: Optional[str] = None, + bits_per_sample: Optional[int] = None, + buffer_size: int = 4096, + compression: Optional[Union[torchaudio.io.CodecConfig, float, int]] = None, + ) -> None: + if not isinstance(compression, (float, int, type(None))): + raise ValueError( + "SoX backend expects non-`None` value for argument `compression` to be of ", + f"type `float` or `int`, but received value of type {type(compression)}", + ) + if hasattr(uri, "write"): + raise ValueError( + "SoX backend does not support writing to file-like objects. ", + "Please use an alternative backend that does support writing to file-like objects, e.g. FFmpeg.", + ) + else: + sox_ext.save_audio_file( + uri, + src, + sample_rate, + channels_first, + compression, + format, + encoding, + bits_per_sample, + ) + + @staticmethod + def can_decode(uri: Union[BinaryIO, str, os.PathLike], format: Optional[str]) -> bool: + # i.e. not a file-like object. + return not hasattr(uri, "read") + + @staticmethod + def can_encode(uri: Union[BinaryIO, str, os.PathLike], format: Optional[str]) -> bool: + # i.e. not a file-like object. + return not hasattr(uri, "write") diff --git a/venv/lib/python3.10/site-packages/torchaudio/_backend/utils.py b/venv/lib/python3.10/site-packages/torchaudio/_backend/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..0cde6b1927d2b35e34d4ab0e99124fde146e724d --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/_backend/utils.py @@ -0,0 +1,317 @@ +import os +from functools import lru_cache +from typing import BinaryIO, Dict, Optional, Tuple, Type, Union + +import torch + +from torchaudio._extension import lazy_import_sox_ext +from torchaudio.io import CodecConfig +from torio._extension import lazy_import_ffmpeg_ext + +from . import soundfile_backend + +from .backend import Backend +from .common import AudioMetaData +from .ffmpeg import FFmpegBackend +from .soundfile import SoundfileBackend +from .sox import SoXBackend + + +@lru_cache(None) +def get_available_backends() -> Dict[str, Type[Backend]]: + backend_specs: Dict[str, Type[Backend]] = {} + if lazy_import_ffmpeg_ext().is_available(): + backend_specs["ffmpeg"] = FFmpegBackend + if lazy_import_sox_ext().is_available(): + backend_specs["sox"] = SoXBackend + if soundfile_backend._IS_SOUNDFILE_AVAILABLE: + backend_specs["soundfile"] = SoundfileBackend + return backend_specs + + +def get_backend(backend_name, backends) -> Backend: + if backend := backends.get(backend_name): + return backend + else: + raise ValueError( + f"Unsupported backend '{backend_name}' specified; ", + f"please select one of {list(backends.keys())} instead.", + ) + + +def get_info_func(): + backends = get_available_backends() + + def dispatcher( + uri: Union[BinaryIO, str, os.PathLike], format: Optional[str], backend_name: Optional[str] + ) -> Backend: + if backend_name is not None: + return get_backend(backend_name, backends) + + for backend in backends.values(): + if backend.can_decode(uri, format): + return backend + raise RuntimeError(f"Couldn't find appropriate backend to handle uri {uri} and format {format}.") + + def info( + uri: Union[BinaryIO, str, os.PathLike], + format: Optional[str] = None, + buffer_size: int = 4096, + backend: Optional[str] = None, + ) -> AudioMetaData: + """Get signal information of an audio file. + + Note: + When the input type is file-like object, this function cannot + get the correct length (``num_samples``) for certain formats, + such as ``vorbis``. + In this case, the value of ``num_samples`` is ``0``. + + Args: + uri (path-like object or file-like object): + Source of audio data. The following types are accepted: + + * ``path-like``: File path or URL. + * ``file-like``: Object with ``read(size: int) -> bytes`` method, + which returns byte string of at most ``size`` length. + + format (str or None, optional): + If not ``None``, interpreted as hint that may allow backend to override the detected format. + (Default: ``None``) + + buffer_size (int, optional): + Size of buffer to use when processing file-like objects, in bytes. (Default: ``4096``) + + backend (str or None, optional): + I/O backend to use. + If ``None``, function selects backend given input and available backends. + Otherwise, must be one of [``"ffmpeg"``, ``"sox"``, ``"soundfile"``], + with the corresponding backend available. + (Default: ``None``) + + .. seealso:: + :ref:`backend` + + Returns: + AudioMetaData + """ + backend = dispatcher(uri, format, backend) + return backend.info(uri, format, buffer_size) + + return info + + +def get_load_func(): + backends = get_available_backends() + + def dispatcher( + uri: Union[BinaryIO, str, os.PathLike], format: Optional[str], backend_name: Optional[str] + ) -> Backend: + if backend_name is not None: + return get_backend(backend_name, backends) + + for backend in backends.values(): + if backend.can_decode(uri, format): + return backend + raise RuntimeError(f"Couldn't find appropriate backend to handle uri {uri} and format {format}.") + + def load( + uri: Union[BinaryIO, str, os.PathLike], + frame_offset: int = 0, + num_frames: int = -1, + normalize: bool = True, + channels_first: bool = True, + format: Optional[str] = None, + buffer_size: int = 4096, + backend: Optional[str] = None, + ) -> Tuple[torch.Tensor, int]: + """Load audio data from source. + + By default (``normalize=True``, ``channels_first=True``), this function returns Tensor with + ``float32`` dtype, and the shape of `[channel, time]`. + + Note: + The formats this function can handle depend on the availability of backends. + Please use the following functions to fetch the supported formats. + + - FFmpeg: :py:func:`torchaudio.utils.ffmpeg_utils.get_audio_decoders` + - Sox: :py:func:`torchaudio.utils.sox_utils.list_read_formats` + - SoundFile: Refer to `the official document `__. + + .. warning:: + + ``normalize`` argument does not perform volume normalization. + It only converts the sample type to `torch.float32` from the native sample + type. + + When the input format is WAV with integer type, such as 32-bit signed integer, 16-bit + signed integer, 24-bit signed integer, and 8-bit unsigned integer, by providing ``normalize=False``, + this function can return integer Tensor, where the samples are expressed within the whole range + of the corresponding dtype, that is, ``int32`` tensor for 32-bit signed PCM, + ``int16`` for 16-bit signed PCM and ``uint8`` for 8-bit unsigned PCM. Since torch does not + support ``int24`` dtype, 24-bit signed PCM are converted to ``int32`` tensors. + + ``normalize`` argument has no effect on 32-bit floating-point WAV and other formats, such as + ``flac`` and ``mp3``. + + For these formats, this function always returns ``float32`` Tensor with values. + + + Args: + uri (path-like object or file-like object): + Source of audio data. + frame_offset (int, optional): + Number of frames to skip before start reading data. + num_frames (int, optional): + Maximum number of frames to read. ``-1`` reads all the remaining samples, + starting from ``frame_offset``. + This function may return the less number of frames if there is not enough + frames in the given file. + normalize (bool, optional): + When ``True``, this function converts the native sample type to ``float32``. + Default: ``True``. + + If input file is integer WAV, giving ``False`` will change the resulting Tensor type to + integer type. + This argument has no effect for formats other than integer WAV type. + + channels_first (bool, optional): + When True, the returned Tensor has dimension `[channel, time]`. + Otherwise, the returned Tensor's dimension is `[time, channel]`. + + format (str or None, optional): + If not ``None``, interpreted as hint that may allow backend to override the detected format. + (Default: ``None``) + + buffer_size (int, optional): + Size of buffer to use when processing file-like objects, in bytes. (Default: ``4096``) + + backend (str or None, optional): + I/O backend to use. + If ``None``, function selects backend given input and available backends. + Otherwise, must be one of [``"ffmpeg"``, ``"sox"``, ``"soundfile"``], + with the corresponding backend being available. (Default: ``None``) + + .. seealso:: + :ref:`backend` + + Returns: + (torch.Tensor, int): Resulting Tensor and sample rate. + If the input file has integer wav format and normalization is off, then it has + integer type, else ``float32`` type. If ``channels_first=True``, it has + `[channel, time]` else `[time, channel]`. + """ + backend = dispatcher(uri, format, backend) + return backend.load(uri, frame_offset, num_frames, normalize, channels_first, format, buffer_size) + + return load + + +def get_save_func(): + backends = get_available_backends() + + def dispatcher( + uri: Union[BinaryIO, str, os.PathLike], format: Optional[str], backend_name: Optional[str] + ) -> Backend: + if backend_name is not None: + return get_backend(backend_name, backends) + + for backend in backends.values(): + if backend.can_encode(uri, format): + return backend + raise RuntimeError(f"Couldn't find appropriate backend to handle uri {uri} and format {format}.") + + def save( + uri: Union[BinaryIO, str, os.PathLike], + src: torch.Tensor, + sample_rate: int, + channels_first: bool = True, + format: Optional[str] = None, + encoding: Optional[str] = None, + bits_per_sample: Optional[int] = None, + buffer_size: int = 4096, + backend: Optional[str] = None, + compression: Optional[Union[CodecConfig, float, int]] = None, + ): + """Save audio data to file. + + Note: + The formats this function can handle depend on the availability of backends. + Please use the following functions to fetch the supported formats. + + - FFmpeg: :py:func:`torchaudio.utils.ffmpeg_utils.get_audio_encoders` + - Sox: :py:func:`torchaudio.utils.sox_utils.list_write_formats` + - SoundFile: Refer to `the official document `__. + + Args: + uri (str or pathlib.Path): Path to audio file. + src (torch.Tensor): Audio data to save. must be 2D tensor. + sample_rate (int): sampling rate + channels_first (bool, optional): If ``True``, the given tensor is interpreted as `[channel, time]`, + otherwise `[time, channel]`. + format (str or None, optional): Override the audio format. + When ``uri`` argument is path-like object, audio format is + inferred from file extension. If the file extension is missing or + different, you can specify the correct format with this argument. + + When ``uri`` argument is file-like object, + this argument is required. + + Valid values are ``"wav"``, ``"ogg"``, and ``"flac"``. + encoding (str or None, optional): Changes the encoding for supported formats. + This argument is effective only for supported formats, i.e. + ``"wav"`` and ``""flac"```. Valid values are + + - ``"PCM_S"`` (signed integer Linear PCM) + - ``"PCM_U"`` (unsigned integer Linear PCM) + - ``"PCM_F"`` (floating point PCM) + - ``"ULAW"`` (mu-law) + - ``"ALAW"`` (a-law) + + bits_per_sample (int or None, optional): Changes the bit depth for the + supported formats. + When ``format`` is one of ``"wav"`` and ``"flac"``, + you can change the bit depth. + Valid values are ``8``, ``16``, ``24``, ``32`` and ``64``. + + buffer_size (int, optional): + Size of buffer to use when processing file-like objects, in bytes. (Default: ``4096``) + + backend (str or None, optional): + I/O backend to use. + If ``None``, function selects backend given input and available backends. + Otherwise, must be one of [``"ffmpeg"``, ``"sox"``, ``"soundfile"``], + with the corresponding backend being available. + (Default: ``None``) + + .. seealso:: + :ref:`backend` + + compression (CodecConfig, float, int, or None, optional): + Compression configuration to apply. + + If the selected backend is FFmpeg, an instance of :py:class:`CodecConfig` must be provided. + + Otherwise, if the selected backend is SoX, a float or int value corresponding to option ``-C`` of the + ``sox`` command line interface must be provided. For instance: + + ``"mp3"`` + Either bitrate (in ``kbps``) with quality factor, such as ``128.2``, or + VBR encoding with quality factor such as ``-4.2``. Default: ``-4.5``. + + ``"flac"`` + Whole number from ``0`` to ``8``. ``8`` is default and highest compression. + + ``"ogg"``, ``"vorbis"`` + Number from ``-1`` to ``10``; ``-1`` is the highest compression + and lowest quality. Default: ``3``. + + Refer to http://sox.sourceforge.net/soxformat.html for more details. + + """ + backend = dispatcher(uri, format, backend) + return backend.save( + uri, src, sample_rate, channels_first, format, encoding, bits_per_sample, buffer_size, compression + ) + + return save diff --git a/venv/lib/python3.10/site-packages/torchaudio/_extension/__init__.py b/venv/lib/python3.10/site-packages/torchaudio/_extension/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5c2ff55583ba48aa5859e26bfceaba9a78ee23e2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/_extension/__init__.py @@ -0,0 +1,74 @@ +import logging +import os +import sys + +from torchaudio._internal.module_utils import fail_with_message, is_module_available, no_op + +from .utils import _check_cuda_version, _init_dll_path, _init_sox, _LazyImporter, _load_lib + +_LG = logging.getLogger(__name__) + + +# Note: +# `_check_cuda_version` is not meant to be used by regular users. +# Builder uses it for debugging purpose, so we export it. +# https://github.com/pytorch/builder/blob/e2e4542b8eb0bdf491214451a1a4128bd606cce2/test/smoke_test/smoke_test.py#L80 +__all__ = [ + "_check_cuda_version", + "_IS_TORCHAUDIO_EXT_AVAILABLE", + "_IS_RIR_AVAILABLE", + "lazy_import_sox_ext", +] + + +if os.name == "nt" and (3, 8) <= sys.version_info < (3, 9): + _init_dll_path() + + +# When the extension module is built, we initialize it. +# In case of an error, we do not catch the failure as it suggests there is something +# wrong with the installation. +_IS_TORCHAUDIO_EXT_AVAILABLE = is_module_available("torchaudio.lib._torchaudio") +# RIR features are implemented in _torchaudio extension, but they can be individually +# turned on/off at build time. Available means that _torchaudio is loaded properly, and +# RIR features are found there. +_IS_RIR_AVAILABLE = False +_IS_ALIGN_AVAILABLE = False +if _IS_TORCHAUDIO_EXT_AVAILABLE: + _load_lib("libtorchaudio") + + import torchaudio.lib._torchaudio # noqa + + _check_cuda_version() + _IS_RIR_AVAILABLE = torchaudio.lib._torchaudio.is_rir_available() + _IS_ALIGN_AVAILABLE = torchaudio.lib._torchaudio.is_align_available() + + +_SOX_EXT = None + + +def lazy_import_sox_ext(): + """Load SoX integration based on availability in lazy manner""" + + global _SOX_EXT + if _SOX_EXT is None: + _SOX_EXT = _LazyImporter("_torchaudio_sox", _init_sox) + return _SOX_EXT + + +fail_if_no_rir = ( + no_op + if _IS_RIR_AVAILABLE + else fail_with_message( + "requires RIR extension, but TorchAudio is not compiled with it. Please build TorchAudio with RIR support." + ) +) + +fail_if_no_align = ( + no_op + if _IS_ALIGN_AVAILABLE + else fail_with_message( + "Requires alignment extension, but TorchAudio is not compiled with it. \ + Please build TorchAudio with alignment support." + ) +) diff --git a/venv/lib/python3.10/site-packages/torchaudio/_extension/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/_extension/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..39a666262da82adbc5ee0017a4050d1b9aa78e3e Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/_extension/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/_extension/__pycache__/utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/_extension/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..057f50850467f8e1546640aa158278d0130b49f1 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/_extension/__pycache__/utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/_extension/utils.py b/venv/lib/python3.10/site-packages/torchaudio/_extension/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..c5660a1e22c2b7f73ee6f351fb8b37cde4c3260f --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/_extension/utils.py @@ -0,0 +1,180 @@ +"""Module to implement logics used for initializing extensions. + +The implementations here should be stateless. +They should not depend on external state. +Anything that depends on external state should happen in __init__.py +""" +import importlib +import logging +import os +import types +from pathlib import Path + +import torch +from torchaudio._internal.module_utils import eval_env + +_LG = logging.getLogger(__name__) +_LIB_DIR = Path(__file__).parent.parent / "lib" + + +def _get_lib_path(lib: str): + suffix = "pyd" if os.name == "nt" else "so" + path = _LIB_DIR / f"{lib}.{suffix}" + return path + + +def _load_lib(lib: str) -> bool: + """Load extension module + + Note: + In case `torchaudio` is deployed with `pex` format, the library file + is not in a standard location. + In this case, we expect that `libtorchaudio` is available somewhere + in the search path of dynamic loading mechanism, so that importing + `_torchaudio` will have library loader find and load `libtorchaudio`. + This is the reason why the function should not raising an error when the library + file is not found. + + Returns: + bool: + True if the library file is found AND the library loaded without failure. + False if the library file is not found (like in the case where torchaudio + is deployed with pex format, thus the shared library file is + in a non-standard location.). + If the library file is found but there is an issue loading the library, + (such as missing dependency) then this function raises the exception as-is. + + Raises: + Exception: + If the library file is found, but there is an issue loading the library file, + (when underlying `ctype.DLL` throws an exception), this function will pass + the exception as-is, instead of catching it and returning bool. + The expected case is `OSError` thrown by `ctype.DLL` when a dynamic dependency + is not found. + This behavior was chosen because the expected failure case is not recoverable. + If a dependency is missing, then users have to install it. + """ + path = _get_lib_path(lib) + if not path.exists(): + return False + torch.ops.load_library(path) + return True + + +def _import_sox_ext(): + if os.name == "nt": + raise RuntimeError("sox extension is not supported on Windows") + if not eval_env("TORCHAUDIO_USE_SOX", True): + raise RuntimeError("sox extension is disabled. (TORCHAUDIO_USE_SOX=0)") + + ext = "torchaudio.lib._torchaudio_sox" + + if not importlib.util.find_spec(ext): + raise RuntimeError( + # fmt: off + "TorchAudio is not built with sox extension. " + "Please build TorchAudio with libsox support. (BUILD_SOX=1)" + # fmt: on + ) + + _load_lib("libtorchaudio_sox") + return importlib.import_module(ext) + + +def _init_sox(): + ext = _import_sox_ext() + ext.set_verbosity(0) + + import atexit + + torch.ops.torchaudio_sox.initialize_sox_effects() + atexit.register(torch.ops.torchaudio_sox.shutdown_sox_effects) + + # Bundle functions registered with TORCH_LIBRARY into extension + # so that they can also be accessed in the same (lazy) manner + # from the extension. + keys = [ + "get_info", + "load_audio_file", + "save_audio_file", + "apply_effects_tensor", + "apply_effects_file", + ] + for key in keys: + setattr(ext, key, getattr(torch.ops.torchaudio_sox, key)) + + return ext + + +class _LazyImporter(types.ModuleType): + """Lazily import module/extension.""" + + def __init__(self, name, import_func): + super().__init__(name) + self.import_func = import_func + self.module = None + + # Note: + # Python caches what was retrieved with `__getattr__`, so this method will not be + # called again for the same item. + def __getattr__(self, item): + self._import_once() + return getattr(self.module, item) + + def __repr__(self): + if self.module is None: + return f"" + return repr(self.module) + + def __dir__(self): + self._import_once() + return dir(self.module) + + def _import_once(self): + if self.module is None: + self.module = self.import_func() + # Note: + # By attaching the module attributes to self, + # module attributes are directly accessible. + # This allows to avoid calling __getattr__ for every attribute access. + self.__dict__.update(self.module.__dict__) + + def is_available(self): + try: + self._import_once() + except Exception: + return False + return True + + +def _init_dll_path(): + # On Windows Python-3.8+ has `os.add_dll_directory` call, + # which is called to configure dll search path. + # To find cuda related dlls we need to make sure the + # conda environment/bin path is configured Please take a look: + # https://stackoverflow.com/questions/59330863/cant-import-dll-module-in-python + # Please note: if some path can't be added using add_dll_directory we simply ignore this path + for path in os.environ.get("PATH", "").split(";"): + if os.path.exists(path): + try: + os.add_dll_directory(path) + except Exception: + pass + + +def _check_cuda_version(): + import torchaudio.lib._torchaudio + + version = torchaudio.lib._torchaudio.cuda_version() + if version is not None and torch.version.cuda is not None: + version_str = str(version) + ta_version = f"{version_str[:-3]}.{version_str[-2]}" + t_version = torch.version.cuda.split(".") + t_version = f"{t_version[0]}.{t_version[1]}" + if ta_version != t_version: + raise RuntimeError( + "Detected that PyTorch and TorchAudio were compiled with different CUDA versions. " + f"PyTorch has CUDA version {t_version} whereas TorchAudio has CUDA version {ta_version}. " + "Please install the TorchAudio version that matches your PyTorch version." + ) + return version diff --git a/venv/lib/python3.10/site-packages/torchaudio/_internal/__init__.py b/venv/lib/python3.10/site-packages/torchaudio/_internal/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..363e94f13bb5059ab6888af2fb60314699f1ab1e --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/_internal/__init__.py @@ -0,0 +1,10 @@ +try: + from .fb import download_url_to_file, load_state_dict_from_url +except ImportError: + from torch.hub import download_url_to_file, load_state_dict_from_url + + +__all__ = [ + "load_state_dict_from_url", + "download_url_to_file", +] diff --git a/venv/lib/python3.10/site-packages/torchaudio/_internal/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/_internal/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d481b2ffd62f2e59ca161299e90dc39b84753ef0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/_internal/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/_internal/__pycache__/module_utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/_internal/__pycache__/module_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e3412b1184e6ae0b6910de14840b74f321531edc Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/_internal/__pycache__/module_utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/_internal/module_utils.py b/venv/lib/python3.10/site-packages/torchaudio/_internal/module_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..bc484104bfd86460c57b2f773146ea1a14e47984 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/_internal/module_utils.py @@ -0,0 +1,113 @@ +import importlib.util +import os +import warnings +from functools import wraps +from typing import Optional + + +def eval_env(var, default): + """Check if environment varable has True-y value""" + if var not in os.environ: + return default + + val = os.environ.get(var, "0") + trues = ["1", "true", "TRUE", "on", "ON", "yes", "YES"] + falses = ["0", "false", "FALSE", "off", "OFF", "no", "NO"] + if val in trues: + return True + if val not in falses: + # fmt: off + raise RuntimeError( + f"Unexpected environment variable value `{var}={val}`. " + f"Expected one of {trues + falses}") + # fmt: on + return False + + +def is_module_available(*modules: str) -> bool: + r"""Returns if a top-level module with :attr:`name` exists *without** + importing it. This is generally safer than try-catch block around a + `import X`. It avoids third party libraries breaking assumptions of some of + our tests, e.g., setting multiprocessing start method when imported + (see librosa/#747, torchvision/#544). + """ + return all(importlib.util.find_spec(m) is not None for m in modules) + + +def requires_module(*modules: str): + """Decorate function to give error message if invoked without required optional modules. + + This decorator is to give better error message to users rather + than raising ``NameError: name 'module' is not defined`` at random places. + """ + missing = [m for m in modules if not is_module_available(m)] + + if not missing: + # fall through. If all the modules are available, no need to decorate + def decorator(func): + return func + + else: + req = f"module: {missing[0]}" if len(missing) == 1 else f"modules: {missing}" + + def decorator(func): + @wraps(func) + def wrapped(*args, **kwargs): + raise RuntimeError(f"{func.__module__}.{func.__name__} requires {req}") + + return wrapped + + return decorator + + +def deprecated(direction: str, version: Optional[str] = None, remove: bool = False): + """Decorator to add deprecation message + + Args: + direction (str): Migration steps to be given to users. + version (str or int): The version when the object will be removed + remove (bool): If enabled, append future removal message. + """ + + def decorator(func): + @wraps(func) + def wrapped(*args, **kwargs): + message = f"{func.__module__}.{func.__name__} has been deprecated. {direction}" + if remove: + message += f' It will be removed from {"future" if version is None else version} release. ' + warnings.warn(message, stacklevel=2) + return func(*args, **kwargs) + + message = "This function has been deprecated. " + if remove: + message += f'It will be removed from {"future" if version is None else version} release. ' + + wrapped.__doc__ = f"""DEPRECATED: {func.__doc__} + + .. warning:: + + {message} + {direction} + """ + + return wrapped + + return decorator + + +def fail_with_message(message): + """Generate decorator to give users message about missing TorchAudio extension.""" + + def decorator(func): + @wraps(func) + def wrapped(*args, **kwargs): + raise RuntimeError(f"{func.__module__}.{func.__name__} {message}") + + return wrapped + + return decorator + + +def no_op(func): + """Op-op decorator. Used in place of fail_with_message when a functionality that requires extension works fine.""" + return func diff --git a/venv/lib/python3.10/site-packages/torchaudio/backend/__init__.py b/venv/lib/python3.10/site-packages/torchaudio/backend/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..84df7e7d697616076d549dc4163b55cd34335a25 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/backend/__init__.py @@ -0,0 +1,8 @@ +# NOTE: +# The entire `torchaudio.backend` module is deprecated. +# New things should be added to `torchaudio._backend`. +# Only things related to backward compatibility should be placed here. + +from . import common, no_backend, soundfile_backend, sox_io_backend # noqa + +__all__ = [] diff --git a/venv/lib/python3.10/site-packages/torchaudio/backend/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/backend/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9fb79579b79d632b2afad7f2ca84168de00ae0ed Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/backend/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/backend/__pycache__/_no_backend.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/backend/__pycache__/_no_backend.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..db129d6b3369ff61fce0cbc7d94a9d16287fef78 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/backend/__pycache__/_no_backend.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/backend/__pycache__/_sox_io_backend.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/backend/__pycache__/_sox_io_backend.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d0a9749f1440bd981e0a61c8a1b3798adaec215d Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/backend/__pycache__/_sox_io_backend.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/backend/__pycache__/common.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/backend/__pycache__/common.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f132f0f7a0b448c72b726712f6951b5a1102586e Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/backend/__pycache__/common.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/backend/__pycache__/no_backend.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/backend/__pycache__/no_backend.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4f3f401091c2c18e8f84066ab11648c4335413ee Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/backend/__pycache__/no_backend.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/backend/__pycache__/soundfile_backend.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/backend/__pycache__/soundfile_backend.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7ddae0b2610da3f92d2bca5a4f3f4b040d6c9cc7 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/backend/__pycache__/soundfile_backend.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/backend/__pycache__/sox_io_backend.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/backend/__pycache__/sox_io_backend.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..06cf1c6fa175a29611310e1cf1d626ecf964195a Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/backend/__pycache__/sox_io_backend.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/backend/_no_backend.py b/venv/lib/python3.10/site-packages/torchaudio/backend/_no_backend.py new file mode 100644 index 0000000000000000000000000000000000000000..fcbb2ad84aefcf33b181b686ee1105e532a8661d --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/backend/_no_backend.py @@ -0,0 +1,25 @@ +from pathlib import Path +from typing import Callable, Optional, Tuple, Union + +from torch import Tensor +from torchaudio import AudioMetaData + + +def load( + filepath: Union[str, Path], + out: Optional[Tensor] = None, + normalization: Union[bool, float, Callable] = True, + channels_first: bool = True, + num_frames: int = 0, + offset: int = 0, + filetype: Optional[str] = None, +) -> Tuple[Tensor, int]: + raise RuntimeError("No audio I/O backend is available.") + + +def save(filepath: str, src: Tensor, sample_rate: int, precision: int = 16, channels_first: bool = True) -> None: + raise RuntimeError("No audio I/O backend is available.") + + +def info(filepath: str) -> AudioMetaData: + raise RuntimeError("No audio I/O backend is available.") diff --git a/venv/lib/python3.10/site-packages/torchaudio/backend/_sox_io_backend.py b/venv/lib/python3.10/site-packages/torchaudio/backend/_sox_io_backend.py new file mode 100644 index 0000000000000000000000000000000000000000..6af267b17a48d330c699e72dd3e31bc336a7d3da --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/backend/_sox_io_backend.py @@ -0,0 +1,294 @@ +import os +from typing import Optional, Tuple + +import torch +import torchaudio +from torchaudio import AudioMetaData + +sox_ext = torchaudio._extension.lazy_import_sox_ext() + + +def info( + filepath: str, + format: Optional[str] = None, +) -> AudioMetaData: + """Get signal information of an audio file. + + Args: + filepath (str): + Source of audio data. + + format (str or None, optional): + Override the format detection with the given format. + Providing the argument might help when libsox can not infer the format + from header or extension. + + Returns: + AudioMetaData: Metadata of the given audio. + """ + if not torch.jit.is_scripting(): + if hasattr(filepath, "read"): + raise RuntimeError("sox_io backend does not support file-like object.") + filepath = os.fspath(filepath) + sinfo = sox_ext.get_info(filepath, format) + return AudioMetaData(*sinfo) + + +def load( + filepath: str, + frame_offset: int = 0, + num_frames: int = -1, + normalize: bool = True, + channels_first: bool = True, + format: Optional[str] = None, +) -> Tuple[torch.Tensor, int]: + """Load audio data from file. + + Note: + This function can handle all the codecs that underlying libsox can handle, + however it is tested on the following formats; + + * WAV, AMB + + * 32-bit floating-point + * 32-bit signed integer + * 24-bit signed integer + * 16-bit signed integer + * 8-bit unsigned integer (WAV only) + + * MP3 + * FLAC + * OGG/VORBIS + * OPUS + * SPHERE + * AMR-NB + + To load ``MP3``, ``FLAC``, ``OGG/VORBIS``, ``OPUS`` and other codecs ``libsox`` does not + handle natively, your installation of ``torchaudio`` has to be linked to ``libsox`` + and corresponding codec libraries such as ``libmad`` or ``libmp3lame`` etc. + + By default (``normalize=True``, ``channels_first=True``), this function returns Tensor with + ``float32`` dtype, and the shape of `[channel, time]`. + + .. warning:: + + ``normalize`` argument does not perform volume normalization. + It only converts the sample type to `torch.float32` from the native sample + type. + + When the input format is WAV with integer type, such as 32-bit signed integer, 16-bit + signed integer, 24-bit signed integer, and 8-bit unsigned integer, by providing ``normalize=False``, + this function can return integer Tensor, where the samples are expressed within the whole range + of the corresponding dtype, that is, ``int32`` tensor for 32-bit signed PCM, + ``int16`` for 16-bit signed PCM and ``uint8`` for 8-bit unsigned PCM. Since torch does not + support ``int24`` dtype, 24-bit signed PCM are converted to ``int32`` tensors. + + ``normalize`` argument has no effect on 32-bit floating-point WAV and other formats, such as + ``flac`` and ``mp3``. + + For these formats, this function always returns ``float32`` Tensor with values. + + Args: + filepath (path-like object): Source of audio data. + frame_offset (int): + Number of frames to skip before start reading data. + num_frames (int, optional): + Maximum number of frames to read. ``-1`` reads all the remaining samples, + starting from ``frame_offset``. + This function may return the less number of frames if there is not enough + frames in the given file. + normalize (bool, optional): + When ``True``, this function converts the native sample type to ``float32``. + Default: ``True``. + + If input file is integer WAV, giving ``False`` will change the resulting Tensor type to + integer type. + This argument has no effect for formats other than integer WAV type. + + channels_first (bool, optional): + When True, the returned Tensor has dimension `[channel, time]`. + Otherwise, the returned Tensor's dimension is `[time, channel]`. + format (str or None, optional): + Override the format detection with the given format. + Providing the argument might help when libsox can not infer the format + from header or extension. + + Returns: + (torch.Tensor, int): Resulting Tensor and sample rate. + If the input file has integer wav format and ``normalize=False``, then it has + integer type, else ``float32`` type. If ``channels_first=True``, it has + `[channel, time]` else `[time, channel]`. + """ + if not torch.jit.is_scripting(): + if hasattr(filepath, "read"): + raise RuntimeError("sox_io backend does not support file-like object.") + filepath = os.fspath(filepath) + return sox_ext.load_audio_file(filepath, frame_offset, num_frames, normalize, channels_first, format) + + +def save( + filepath: str, + src: torch.Tensor, + sample_rate: int, + channels_first: bool = True, + compression: Optional[float] = None, + format: Optional[str] = None, + encoding: Optional[str] = None, + bits_per_sample: Optional[int] = None, +): + """Save audio data to file. + + Args: + filepath (path-like object): Path to save file. + src (torch.Tensor): Audio data to save. must be 2D tensor. + sample_rate (int): sampling rate + channels_first (bool, optional): If ``True``, the given tensor is interpreted as `[channel, time]`, + otherwise `[time, channel]`. + compression (float or None, optional): Used for formats other than WAV. + This corresponds to ``-C`` option of ``sox`` command. + + ``"mp3"`` + Either bitrate (in ``kbps``) with quality factor, such as ``128.2``, or + VBR encoding with quality factor such as ``-4.2``. Default: ``-4.5``. + + ``"flac"`` + Whole number from ``0`` to ``8``. ``8`` is default and highest compression. + + ``"ogg"``, ``"vorbis"`` + Number from ``-1`` to ``10``; ``-1`` is the highest compression + and lowest quality. Default: ``3``. + + See the detail at http://sox.sourceforge.net/soxformat.html. + format (str or None, optional): Override the audio format. + When ``filepath`` argument is path-like object, audio format is infered from + file extension. If file extension is missing or different, you can specify the + correct format with this argument. + + When ``filepath`` argument is file-like object, this argument is required. + + Valid values are ``"wav"``, ``"mp3"``, ``"ogg"``, ``"vorbis"``, ``"amr-nb"``, + ``"amb"``, ``"flac"``, ``"sph"``, ``"gsm"``, and ``"htk"``. + + encoding (str or None, optional): Changes the encoding for the supported formats. + This argument is effective only for supported formats, such as ``"wav"``, ``""amb"`` + and ``"sph"``. Valid values are; + + - ``"PCM_S"`` (signed integer Linear PCM) + - ``"PCM_U"`` (unsigned integer Linear PCM) + - ``"PCM_F"`` (floating point PCM) + - ``"ULAW"`` (mu-law) + - ``"ALAW"`` (a-law) + + Default values + If not provided, the default value is picked based on ``format`` and ``bits_per_sample``. + + ``"wav"``, ``"amb"`` + - | If both ``encoding`` and ``bits_per_sample`` are not provided, the ``dtype`` of the + | Tensor is used to determine the default value. + + - ``"PCM_U"`` if dtype is ``uint8`` + - ``"PCM_S"`` if dtype is ``int16`` or ``int32`` + - ``"PCM_F"`` if dtype is ``float32`` + + - ``"PCM_U"`` if ``bits_per_sample=8`` + - ``"PCM_S"`` otherwise + + ``"sph"`` format; + - the default value is ``"PCM_S"`` + + bits_per_sample (int or None, optional): Changes the bit depth for the supported formats. + When ``format`` is one of ``"wav"``, ``"flac"``, ``"sph"``, or ``"amb"``, you can change the + bit depth. Valid values are ``8``, ``16``, ``32`` and ``64``. + + Default Value; + If not provided, the default values are picked based on ``format`` and ``"encoding"``; + + ``"wav"``, ``"amb"``; + - | If both ``encoding`` and ``bits_per_sample`` are not provided, the ``dtype`` of the + | Tensor is used. + + - ``8`` if dtype is ``uint8`` + - ``16`` if dtype is ``int16`` + - ``32`` if dtype is ``int32`` or ``float32`` + + - ``8`` if ``encoding`` is ``"PCM_U"``, ``"ULAW"`` or ``"ALAW"`` + - ``16`` if ``encoding`` is ``"PCM_S"`` + - ``32`` if ``encoding`` is ``"PCM_F"`` + + ``"flac"`` format; + - the default value is ``24`` + + ``"sph"`` format; + - ``16`` if ``encoding`` is ``"PCM_U"``, ``"PCM_S"``, ``"PCM_F"`` or not provided. + - ``8`` if ``encoding`` is ``"ULAW"`` or ``"ALAW"`` + + ``"amb"`` format; + - ``8`` if ``encoding`` is ``"PCM_U"``, ``"ULAW"`` or ``"ALAW"`` + - ``16`` if ``encoding`` is ``"PCM_S"`` or not provided. + - ``32`` if ``encoding`` is ``"PCM_F"`` + + Supported formats/encodings/bit depth/compression are; + + ``"wav"``, ``"amb"`` + - 32-bit floating-point PCM + - 32-bit signed integer PCM + - 24-bit signed integer PCM + - 16-bit signed integer PCM + - 8-bit unsigned integer PCM + - 8-bit mu-law + - 8-bit a-law + + Note: Default encoding/bit depth is determined by the dtype of the input Tensor. + + ``"mp3"`` + Fixed bit rate (such as 128kHz) and variable bit rate compression. + Default: VBR with high quality. + + ``"flac"`` + - 8-bit + - 16-bit + - 24-bit (default) + + ``"ogg"``, ``"vorbis"`` + - Different quality level. Default: approx. 112kbps + + ``"sph"`` + - 8-bit signed integer PCM + - 16-bit signed integer PCM + - 24-bit signed integer PCM + - 32-bit signed integer PCM (default) + - 8-bit mu-law + - 8-bit a-law + - 16-bit a-law + - 24-bit a-law + - 32-bit a-law + + ``"amr-nb"`` + Bitrate ranging from 4.75 kbit/s to 12.2 kbit/s. Default: 4.75 kbit/s + + ``"gsm"`` + Lossy Speech Compression, CPU intensive. + + ``"htk"`` + Uses a default single-channel 16-bit PCM format. + + Note: + To save into formats that ``libsox`` does not handle natively, (such as ``"mp3"``, + ``"flac"``, ``"ogg"`` and ``"vorbis"``), your installation of ``torchaudio`` has + to be linked to ``libsox`` and corresponding codec libraries such as ``libmad`` + or ``libmp3lame`` etc. + """ + if not torch.jit.is_scripting(): + if hasattr(filepath, "write"): + raise RuntimeError("sox_io backend does not handle file-like object.") + filepath = os.fspath(filepath) + sox_ext.save_audio_file( + filepath, + src, + sample_rate, + channels_first, + compression, + format, + encoding, + bits_per_sample, + ) diff --git a/venv/lib/python3.10/site-packages/torchaudio/backend/common.py b/venv/lib/python3.10/site-packages/torchaudio/backend/common.py new file mode 100644 index 0000000000000000000000000000000000000000..3f736bf4017c952c850dcbb3cc5fe1fe14f2715f --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/backend/common.py @@ -0,0 +1,13 @@ +def __getattr__(name: str): + if name == "AudioMetaData": + import warnings + + warnings.warn( + "`torchaudio.backend.common.AudioMetaData` has been moved to " + "`torchaudio.AudioMetaData`. Please update the import path.", + stacklevel=2, + ) + from torchaudio import AudioMetaData + + return AudioMetaData + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") diff --git a/venv/lib/python3.10/site-packages/torchaudio/backend/no_backend.py b/venv/lib/python3.10/site-packages/torchaudio/backend/no_backend.py new file mode 100644 index 0000000000000000000000000000000000000000..2645a86bc80538fa0522f5eb80e304881f30acc7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/backend/no_backend.py @@ -0,0 +1,14 @@ +def __getattr__(name: str): + import warnings + + warnings.warn( + "Torchaudio's I/O functions now support par-call bakcend dispatch. " + "Importing backend implementation directly is no longer guaranteed to work. " + "Please use `backend` keyword with load/save/info function, instead of " + "calling the udnerlying implementation directly.", + stacklevel=2, + ) + + from . import _no_backend + + return getattr(_no_backend, name) diff --git a/venv/lib/python3.10/site-packages/torchaudio/backend/soundfile_backend.py b/venv/lib/python3.10/site-packages/torchaudio/backend/soundfile_backend.py new file mode 100644 index 0000000000000000000000000000000000000000..5e81db372a12800a869f4a48291a77739c4f07e6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/backend/soundfile_backend.py @@ -0,0 +1,14 @@ +def __getattr__(name: str): + import warnings + + warnings.warn( + "Torchaudio's I/O functions now support par-call bakcend dispatch. " + "Importing backend implementation directly is no longer guaranteed to work. " + "Please use `backend` keyword with load/save/info function, instead of " + "calling the udnerlying implementation directly.", + stacklevel=2, + ) + + from torchaudio._backend import soundfile_backend + + return getattr(soundfile_backend, name) diff --git a/venv/lib/python3.10/site-packages/torchaudio/backend/sox_io_backend.py b/venv/lib/python3.10/site-packages/torchaudio/backend/sox_io_backend.py new file mode 100644 index 0000000000000000000000000000000000000000..a361ab87a5dba694e247f7f2205c0f2a25b19686 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/backend/sox_io_backend.py @@ -0,0 +1,14 @@ +def __getattr__(name: str): + import warnings + + warnings.warn( + "Torchaudio's I/O functions now support par-call bakcend dispatch. " + "Importing backend implementation directly is no longer guaranteed to work. " + "Please use `backend` keyword with load/save/info function, instead of " + "calling the udnerlying implementation directly.", + stacklevel=2, + ) + + from . import _sox_io_backend + + return getattr(_sox_io_backend, name) diff --git a/venv/lib/python3.10/site-packages/torchaudio/compliance/__init__.py b/venv/lib/python3.10/site-packages/torchaudio/compliance/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..65579b4f01ba09695860717f1e6cd90d6e42b631 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/compliance/__init__.py @@ -0,0 +1,5 @@ +from . import kaldi + +__all__ = [ + "kaldi", +] diff --git a/venv/lib/python3.10/site-packages/torchaudio/compliance/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/compliance/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..13ffca4b60859163914f26b91ea1f0468526a091 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/compliance/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/compliance/__pycache__/kaldi.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/compliance/__pycache__/kaldi.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6732a721f54583841eff246fb79bbb7f0843642f Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/compliance/__pycache__/kaldi.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/compliance/kaldi.py b/venv/lib/python3.10/site-packages/torchaudio/compliance/kaldi.py new file mode 100644 index 0000000000000000000000000000000000000000..98358f40b522facc0abdfbaceec45f5887e00e54 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/compliance/kaldi.py @@ -0,0 +1,813 @@ +import math +from typing import Tuple + +import torch +import torchaudio +from torch import Tensor + +__all__ = [ + "get_mel_banks", + "inverse_mel_scale", + "inverse_mel_scale_scalar", + "mel_scale", + "mel_scale_scalar", + "spectrogram", + "fbank", + "mfcc", + "vtln_warp_freq", + "vtln_warp_mel_freq", +] + +# numeric_limits::epsilon() 1.1920928955078125e-07 +EPSILON = torch.tensor(torch.finfo(torch.float).eps) +# 1 milliseconds = 0.001 seconds +MILLISECONDS_TO_SECONDS = 0.001 + +# window types +HAMMING = "hamming" +HANNING = "hanning" +POVEY = "povey" +RECTANGULAR = "rectangular" +BLACKMAN = "blackman" +WINDOWS = [HAMMING, HANNING, POVEY, RECTANGULAR, BLACKMAN] + + +def _get_epsilon(device, dtype): + return EPSILON.to(device=device, dtype=dtype) + + +def _next_power_of_2(x: int) -> int: + r"""Returns the smallest power of 2 that is greater than x""" + return 1 if x == 0 else 2 ** (x - 1).bit_length() + + +def _get_strided(waveform: Tensor, window_size: int, window_shift: int, snip_edges: bool) -> Tensor: + r"""Given a waveform (1D tensor of size ``num_samples``), it returns a 2D tensor (m, ``window_size``) + representing how the window is shifted along the waveform. Each row is a frame. + + Args: + waveform (Tensor): Tensor of size ``num_samples`` + window_size (int): Frame length + window_shift (int): Frame shift + snip_edges (bool): If True, end effects will be handled by outputting only frames that completely fit + in the file, and the number of frames depends on the frame_length. If False, the number of frames + depends only on the frame_shift, and we reflect the data at the ends. + + Returns: + Tensor: 2D tensor of size (m, ``window_size``) where each row is a frame + """ + assert waveform.dim() == 1 + num_samples = waveform.size(0) + strides = (window_shift * waveform.stride(0), waveform.stride(0)) + + if snip_edges: + if num_samples < window_size: + return torch.empty((0, 0), dtype=waveform.dtype, device=waveform.device) + else: + m = 1 + (num_samples - window_size) // window_shift + else: + reversed_waveform = torch.flip(waveform, [0]) + m = (num_samples + (window_shift // 2)) // window_shift + pad = window_size // 2 - window_shift // 2 + pad_right = reversed_waveform + if pad > 0: + # torch.nn.functional.pad returns [2,1,0,1,2] for 'reflect' + # but we want [2, 1, 0, 0, 1, 2] + pad_left = reversed_waveform[-pad:] + waveform = torch.cat((pad_left, waveform, pad_right), dim=0) + else: + # pad is negative so we want to trim the waveform at the front + waveform = torch.cat((waveform[-pad:], pad_right), dim=0) + + sizes = (m, window_size) + return waveform.as_strided(sizes, strides) + + +def _feature_window_function( + window_type: str, + window_size: int, + blackman_coeff: float, + device: torch.device, + dtype: int, +) -> Tensor: + r"""Returns a window function with the given type and size""" + if window_type == HANNING: + return torch.hann_window(window_size, periodic=False, device=device, dtype=dtype) + elif window_type == HAMMING: + return torch.hamming_window(window_size, periodic=False, alpha=0.54, beta=0.46, device=device, dtype=dtype) + elif window_type == POVEY: + # like hanning but goes to zero at edges + return torch.hann_window(window_size, periodic=False, device=device, dtype=dtype).pow(0.85) + elif window_type == RECTANGULAR: + return torch.ones(window_size, device=device, dtype=dtype) + elif window_type == BLACKMAN: + a = 2 * math.pi / (window_size - 1) + window_function = torch.arange(window_size, device=device, dtype=dtype) + # can't use torch.blackman_window as they use different coefficients + return ( + blackman_coeff + - 0.5 * torch.cos(a * window_function) + + (0.5 - blackman_coeff) * torch.cos(2 * a * window_function) + ).to(device=device, dtype=dtype) + else: + raise Exception("Invalid window type " + window_type) + + +def _get_log_energy(strided_input: Tensor, epsilon: Tensor, energy_floor: float) -> Tensor: + r"""Returns the log energy of size (m) for a strided_input (m,*)""" + device, dtype = strided_input.device, strided_input.dtype + log_energy = torch.max(strided_input.pow(2).sum(1), epsilon).log() # size (m) + if energy_floor == 0.0: + return log_energy + return torch.max(log_energy, torch.tensor(math.log(energy_floor), device=device, dtype=dtype)) + + +def _get_waveform_and_window_properties( + waveform: Tensor, + channel: int, + sample_frequency: float, + frame_shift: float, + frame_length: float, + round_to_power_of_two: bool, + preemphasis_coefficient: float, +) -> Tuple[Tensor, int, int, int]: + r"""Gets the waveform and window properties""" + channel = max(channel, 0) + assert channel < waveform.size(0), "Invalid channel {} for size {}".format(channel, waveform.size(0)) + waveform = waveform[channel, :] # size (n) + window_shift = int(sample_frequency * frame_shift * MILLISECONDS_TO_SECONDS) + window_size = int(sample_frequency * frame_length * MILLISECONDS_TO_SECONDS) + padded_window_size = _next_power_of_2(window_size) if round_to_power_of_two else window_size + + assert 2 <= window_size <= len(waveform), "choose a window size {} that is [2, {}]".format( + window_size, len(waveform) + ) + assert 0 < window_shift, "`window_shift` must be greater than 0" + assert padded_window_size % 2 == 0, ( + "the padded `window_size` must be divisible by two." " use `round_to_power_of_two` or change `frame_length`" + ) + assert 0.0 <= preemphasis_coefficient <= 1.0, "`preemphasis_coefficient` must be between [0,1]" + assert sample_frequency > 0, "`sample_frequency` must be greater than zero" + return waveform, window_shift, window_size, padded_window_size + + +def _get_window( + waveform: Tensor, + padded_window_size: int, + window_size: int, + window_shift: int, + window_type: str, + blackman_coeff: float, + snip_edges: bool, + raw_energy: bool, + energy_floor: float, + dither: float, + remove_dc_offset: bool, + preemphasis_coefficient: float, +) -> Tuple[Tensor, Tensor]: + r"""Gets a window and its log energy + + Returns: + (Tensor, Tensor): strided_input of size (m, ``padded_window_size``) and signal_log_energy of size (m) + """ + device, dtype = waveform.device, waveform.dtype + epsilon = _get_epsilon(device, dtype) + + # size (m, window_size) + strided_input = _get_strided(waveform, window_size, window_shift, snip_edges) + + if dither != 0.0: + rand_gauss = torch.randn(strided_input.shape, device=device, dtype=dtype) + strided_input = strided_input + rand_gauss * dither + + if remove_dc_offset: + # Subtract each row/frame by its mean + row_means = torch.mean(strided_input, dim=1).unsqueeze(1) # size (m, 1) + strided_input = strided_input - row_means + + if raw_energy: + # Compute the log energy of each row/frame before applying preemphasis and + # window function + signal_log_energy = _get_log_energy(strided_input, epsilon, energy_floor) # size (m) + + if preemphasis_coefficient != 0.0: + # strided_input[i,j] -= preemphasis_coefficient * strided_input[i, max(0, j-1)] for all i,j + offset_strided_input = torch.nn.functional.pad(strided_input.unsqueeze(0), (1, 0), mode="replicate").squeeze( + 0 + ) # size (m, window_size + 1) + strided_input = strided_input - preemphasis_coefficient * offset_strided_input[:, :-1] + + # Apply window_function to each row/frame + window_function = _feature_window_function(window_type, window_size, blackman_coeff, device, dtype).unsqueeze( + 0 + ) # size (1, window_size) + strided_input = strided_input * window_function # size (m, window_size) + + # Pad columns with zero until we reach size (m, padded_window_size) + if padded_window_size != window_size: + padding_right = padded_window_size - window_size + strided_input = torch.nn.functional.pad( + strided_input.unsqueeze(0), (0, padding_right), mode="constant", value=0 + ).squeeze(0) + + # Compute energy after window function (not the raw one) + if not raw_energy: + signal_log_energy = _get_log_energy(strided_input, epsilon, energy_floor) # size (m) + + return strided_input, signal_log_energy + + +def _subtract_column_mean(tensor: Tensor, subtract_mean: bool) -> Tensor: + # subtracts the column mean of the tensor size (m, n) if subtract_mean=True + # it returns size (m, n) + if subtract_mean: + col_means = torch.mean(tensor, dim=0).unsqueeze(0) + tensor = tensor - col_means + return tensor + + +def spectrogram( + waveform: Tensor, + blackman_coeff: float = 0.42, + channel: int = -1, + dither: float = 0.0, + energy_floor: float = 1.0, + frame_length: float = 25.0, + frame_shift: float = 10.0, + min_duration: float = 0.0, + preemphasis_coefficient: float = 0.97, + raw_energy: bool = True, + remove_dc_offset: bool = True, + round_to_power_of_two: bool = True, + sample_frequency: float = 16000.0, + snip_edges: bool = True, + subtract_mean: bool = False, + window_type: str = POVEY, +) -> Tensor: + r"""Create a spectrogram from a raw audio signal. This matches the input/output of Kaldi's + compute-spectrogram-feats. + + Args: + waveform (Tensor): Tensor of audio of size (c, n) where c is in the range [0,2) + blackman_coeff (float, optional): Constant coefficient for generalized Blackman window. (Default: ``0.42``) + channel (int, optional): Channel to extract (-1 -> expect mono, 0 -> left, 1 -> right) (Default: ``-1``) + dither (float, optional): Dithering constant (0.0 means no dither). If you turn this off, you should set + the energy_floor option, e.g. to 1.0 or 0.1 (Default: ``0.0``) + energy_floor (float, optional): Floor on energy (absolute, not relative) in Spectrogram computation. Caution: + this floor is applied to the zeroth component, representing the total signal energy. The floor on the + individual spectrogram elements is fixed at std::numeric_limits::epsilon(). (Default: ``1.0``) + frame_length (float, optional): Frame length in milliseconds (Default: ``25.0``) + frame_shift (float, optional): Frame shift in milliseconds (Default: ``10.0``) + min_duration (float, optional): Minimum duration of segments to process (in seconds). (Default: ``0.0``) + preemphasis_coefficient (float, optional): Coefficient for use in signal preemphasis (Default: ``0.97``) + raw_energy (bool, optional): If True, compute energy before preemphasis and windowing (Default: ``True``) + remove_dc_offset (bool, optional): Subtract mean from waveform on each frame (Default: ``True``) + round_to_power_of_two (bool, optional): If True, round window size to power of two by zero-padding input + to FFT. (Default: ``True``) + sample_frequency (float, optional): Waveform data sample frequency (must match the waveform file, if + specified there) (Default: ``16000.0``) + snip_edges (bool, optional): If True, end effects will be handled by outputting only frames that completely fit + in the file, and the number of frames depends on the frame_length. If False, the number of frames + depends only on the frame_shift, and we reflect the data at the ends. (Default: ``True``) + subtract_mean (bool, optional): Subtract mean of each feature file [CMS]; not recommended to do + it this way. (Default: ``False``) + window_type (str, optional): Type of window ('hamming'|'hanning'|'povey'|'rectangular'|'blackman') + (Default: ``'povey'``) + + Returns: + Tensor: A spectrogram identical to what Kaldi would output. The shape is + (m, ``padded_window_size // 2 + 1``) where m is calculated in _get_strided + """ + device, dtype = waveform.device, waveform.dtype + epsilon = _get_epsilon(device, dtype) + + waveform, window_shift, window_size, padded_window_size = _get_waveform_and_window_properties( + waveform, channel, sample_frequency, frame_shift, frame_length, round_to_power_of_two, preemphasis_coefficient + ) + + if len(waveform) < min_duration * sample_frequency: + # signal is too short + return torch.empty(0) + + strided_input, signal_log_energy = _get_window( + waveform, + padded_window_size, + window_size, + window_shift, + window_type, + blackman_coeff, + snip_edges, + raw_energy, + energy_floor, + dither, + remove_dc_offset, + preemphasis_coefficient, + ) + + # size (m, padded_window_size // 2 + 1, 2) + fft = torch.fft.rfft(strided_input) + + # Convert the FFT into a power spectrum + power_spectrum = torch.max(fft.abs().pow(2.0), epsilon).log() # size (m, padded_window_size // 2 + 1) + power_spectrum[:, 0] = signal_log_energy + + power_spectrum = _subtract_column_mean(power_spectrum, subtract_mean) + return power_spectrum + + +def inverse_mel_scale_scalar(mel_freq: float) -> float: + return 700.0 * (math.exp(mel_freq / 1127.0) - 1.0) + + +def inverse_mel_scale(mel_freq: Tensor) -> Tensor: + return 700.0 * ((mel_freq / 1127.0).exp() - 1.0) + + +def mel_scale_scalar(freq: float) -> float: + return 1127.0 * math.log(1.0 + freq / 700.0) + + +def mel_scale(freq: Tensor) -> Tensor: + return 1127.0 * (1.0 + freq / 700.0).log() + + +def vtln_warp_freq( + vtln_low_cutoff: float, + vtln_high_cutoff: float, + low_freq: float, + high_freq: float, + vtln_warp_factor: float, + freq: Tensor, +) -> Tensor: + r"""This computes a VTLN warping function that is not the same as HTK's one, + but has similar inputs (this function has the advantage of never producing + empty bins). + + This function computes a warp function F(freq), defined between low_freq + and high_freq inclusive, with the following properties: + F(low_freq) == low_freq + F(high_freq) == high_freq + The function is continuous and piecewise linear with two inflection + points. + The lower inflection point (measured in terms of the unwarped + frequency) is at frequency l, determined as described below. + The higher inflection point is at a frequency h, determined as + described below. + If l <= f <= h, then F(f) = f/vtln_warp_factor. + If the higher inflection point (measured in terms of the unwarped + frequency) is at h, then max(h, F(h)) == vtln_high_cutoff. + Since (by the last point) F(h) == h/vtln_warp_factor, then + max(h, h/vtln_warp_factor) == vtln_high_cutoff, so + h = vtln_high_cutoff / max(1, 1/vtln_warp_factor). + = vtln_high_cutoff * min(1, vtln_warp_factor). + If the lower inflection point (measured in terms of the unwarped + frequency) is at l, then min(l, F(l)) == vtln_low_cutoff + This implies that l = vtln_low_cutoff / min(1, 1/vtln_warp_factor) + = vtln_low_cutoff * max(1, vtln_warp_factor) + Args: + vtln_low_cutoff (float): Lower frequency cutoffs for VTLN + vtln_high_cutoff (float): Upper frequency cutoffs for VTLN + low_freq (float): Lower frequency cutoffs in mel computation + high_freq (float): Upper frequency cutoffs in mel computation + vtln_warp_factor (float): Vtln warp factor + freq (Tensor): given frequency in Hz + + Returns: + Tensor: Freq after vtln warp + """ + assert vtln_low_cutoff > low_freq, "be sure to set the vtln_low option higher than low_freq" + assert vtln_high_cutoff < high_freq, "be sure to set the vtln_high option lower than high_freq [or negative]" + l = vtln_low_cutoff * max(1.0, vtln_warp_factor) + h = vtln_high_cutoff * min(1.0, vtln_warp_factor) + scale = 1.0 / vtln_warp_factor + Fl = scale * l # F(l) + Fh = scale * h # F(h) + assert l > low_freq and h < high_freq + # slope of left part of the 3-piece linear function + scale_left = (Fl - low_freq) / (l - low_freq) + # [slope of center part is just "scale"] + + # slope of right part of the 3-piece linear function + scale_right = (high_freq - Fh) / (high_freq - h) + + res = torch.empty_like(freq) + + outside_low_high_freq = torch.lt(freq, low_freq) | torch.gt(freq, high_freq) # freq < low_freq || freq > high_freq + before_l = torch.lt(freq, l) # freq < l + before_h = torch.lt(freq, h) # freq < h + after_h = torch.ge(freq, h) # freq >= h + + # order of operations matter here (since there is overlapping frequency regions) + res[after_h] = high_freq + scale_right * (freq[after_h] - high_freq) + res[before_h] = scale * freq[before_h] + res[before_l] = low_freq + scale_left * (freq[before_l] - low_freq) + res[outside_low_high_freq] = freq[outside_low_high_freq] + + return res + + +def vtln_warp_mel_freq( + vtln_low_cutoff: float, + vtln_high_cutoff: float, + low_freq, + high_freq: float, + vtln_warp_factor: float, + mel_freq: Tensor, +) -> Tensor: + r""" + Args: + vtln_low_cutoff (float): Lower frequency cutoffs for VTLN + vtln_high_cutoff (float): Upper frequency cutoffs for VTLN + low_freq (float): Lower frequency cutoffs in mel computation + high_freq (float): Upper frequency cutoffs in mel computation + vtln_warp_factor (float): Vtln warp factor + mel_freq (Tensor): Given frequency in Mel + + Returns: + Tensor: ``mel_freq`` after vtln warp + """ + return mel_scale( + vtln_warp_freq( + vtln_low_cutoff, vtln_high_cutoff, low_freq, high_freq, vtln_warp_factor, inverse_mel_scale(mel_freq) + ) + ) + + +def get_mel_banks( + num_bins: int, + window_length_padded: int, + sample_freq: float, + low_freq: float, + high_freq: float, + vtln_low: float, + vtln_high: float, + vtln_warp_factor: float, +) -> Tuple[Tensor, Tensor]: + """ + Returns: + (Tensor, Tensor): The tuple consists of ``bins`` (which is + melbank of size (``num_bins``, ``num_fft_bins``)) and ``center_freqs`` (which is + center frequencies of bins of size (``num_bins``)). + """ + assert num_bins > 3, "Must have at least 3 mel bins" + assert window_length_padded % 2 == 0 + num_fft_bins = window_length_padded / 2 + nyquist = 0.5 * sample_freq + + if high_freq <= 0.0: + high_freq += nyquist + + assert ( + (0.0 <= low_freq < nyquist) and (0.0 < high_freq <= nyquist) and (low_freq < high_freq) + ), "Bad values in options: low-freq {} and high-freq {} vs. nyquist {}".format(low_freq, high_freq, nyquist) + + # fft-bin width [think of it as Nyquist-freq / half-window-length] + fft_bin_width = sample_freq / window_length_padded + mel_low_freq = mel_scale_scalar(low_freq) + mel_high_freq = mel_scale_scalar(high_freq) + + # divide by num_bins+1 in next line because of end-effects where the bins + # spread out to the sides. + mel_freq_delta = (mel_high_freq - mel_low_freq) / (num_bins + 1) + + if vtln_high < 0.0: + vtln_high += nyquist + + assert vtln_warp_factor == 1.0 or ( + (low_freq < vtln_low < high_freq) and (0.0 < vtln_high < high_freq) and (vtln_low < vtln_high) + ), "Bad values in options: vtln-low {} and vtln-high {}, versus " "low-freq {} and high-freq {}".format( + vtln_low, vtln_high, low_freq, high_freq + ) + + bin = torch.arange(num_bins).unsqueeze(1) + left_mel = mel_low_freq + bin * mel_freq_delta # size(num_bins, 1) + center_mel = mel_low_freq + (bin + 1.0) * mel_freq_delta # size(num_bins, 1) + right_mel = mel_low_freq + (bin + 2.0) * mel_freq_delta # size(num_bins, 1) + + if vtln_warp_factor != 1.0: + left_mel = vtln_warp_mel_freq(vtln_low, vtln_high, low_freq, high_freq, vtln_warp_factor, left_mel) + center_mel = vtln_warp_mel_freq(vtln_low, vtln_high, low_freq, high_freq, vtln_warp_factor, center_mel) + right_mel = vtln_warp_mel_freq(vtln_low, vtln_high, low_freq, high_freq, vtln_warp_factor, right_mel) + + center_freqs = inverse_mel_scale(center_mel) # size (num_bins) + # size(1, num_fft_bins) + mel = mel_scale(fft_bin_width * torch.arange(num_fft_bins)).unsqueeze(0) + + # size (num_bins, num_fft_bins) + up_slope = (mel - left_mel) / (center_mel - left_mel) + down_slope = (right_mel - mel) / (right_mel - center_mel) + + if vtln_warp_factor == 1.0: + # left_mel < center_mel < right_mel so we can min the two slopes and clamp negative values + bins = torch.max(torch.zeros(1), torch.min(up_slope, down_slope)) + else: + # warping can move the order of left_mel, center_mel, right_mel anywhere + bins = torch.zeros_like(up_slope) + up_idx = torch.gt(mel, left_mel) & torch.le(mel, center_mel) # left_mel < mel <= center_mel + down_idx = torch.gt(mel, center_mel) & torch.lt(mel, right_mel) # center_mel < mel < right_mel + bins[up_idx] = up_slope[up_idx] + bins[down_idx] = down_slope[down_idx] + + return bins, center_freqs + + +def fbank( + waveform: Tensor, + blackman_coeff: float = 0.42, + channel: int = -1, + dither: float = 0.0, + energy_floor: float = 1.0, + frame_length: float = 25.0, + frame_shift: float = 10.0, + high_freq: float = 0.0, + htk_compat: bool = False, + low_freq: float = 20.0, + min_duration: float = 0.0, + num_mel_bins: int = 23, + preemphasis_coefficient: float = 0.97, + raw_energy: bool = True, + remove_dc_offset: bool = True, + round_to_power_of_two: bool = True, + sample_frequency: float = 16000.0, + snip_edges: bool = True, + subtract_mean: bool = False, + use_energy: bool = False, + use_log_fbank: bool = True, + use_power: bool = True, + vtln_high: float = -500.0, + vtln_low: float = 100.0, + vtln_warp: float = 1.0, + window_type: str = POVEY, +) -> Tensor: + r"""Create a fbank from a raw audio signal. This matches the input/output of Kaldi's + compute-fbank-feats. + + Args: + waveform (Tensor): Tensor of audio of size (c, n) where c is in the range [0,2) + blackman_coeff (float, optional): Constant coefficient for generalized Blackman window. (Default: ``0.42``) + channel (int, optional): Channel to extract (-1 -> expect mono, 0 -> left, 1 -> right) (Default: ``-1``) + dither (float, optional): Dithering constant (0.0 means no dither). If you turn this off, you should set + the energy_floor option, e.g. to 1.0 or 0.1 (Default: ``0.0``) + energy_floor (float, optional): Floor on energy (absolute, not relative) in Spectrogram computation. Caution: + this floor is applied to the zeroth component, representing the total signal energy. The floor on the + individual spectrogram elements is fixed at std::numeric_limits::epsilon(). (Default: ``1.0``) + frame_length (float, optional): Frame length in milliseconds (Default: ``25.0``) + frame_shift (float, optional): Frame shift in milliseconds (Default: ``10.0``) + high_freq (float, optional): High cutoff frequency for mel bins (if <= 0, offset from Nyquist) + (Default: ``0.0``) + htk_compat (bool, optional): If true, put energy last. Warning: not sufficient to get HTK compatible features + (need to change other parameters). (Default: ``False``) + low_freq (float, optional): Low cutoff frequency for mel bins (Default: ``20.0``) + min_duration (float, optional): Minimum duration of segments to process (in seconds). (Default: ``0.0``) + num_mel_bins (int, optional): Number of triangular mel-frequency bins (Default: ``23``) + preemphasis_coefficient (float, optional): Coefficient for use in signal preemphasis (Default: ``0.97``) + raw_energy (bool, optional): If True, compute energy before preemphasis and windowing (Default: ``True``) + remove_dc_offset (bool, optional): Subtract mean from waveform on each frame (Default: ``True``) + round_to_power_of_two (bool, optional): If True, round window size to power of two by zero-padding input + to FFT. (Default: ``True``) + sample_frequency (float, optional): Waveform data sample frequency (must match the waveform file, if + specified there) (Default: ``16000.0``) + snip_edges (bool, optional): If True, end effects will be handled by outputting only frames that completely fit + in the file, and the number of frames depends on the frame_length. If False, the number of frames + depends only on the frame_shift, and we reflect the data at the ends. (Default: ``True``) + subtract_mean (bool, optional): Subtract mean of each feature file [CMS]; not recommended to do + it this way. (Default: ``False``) + use_energy (bool, optional): Add an extra dimension with energy to the FBANK output. (Default: ``False``) + use_log_fbank (bool, optional):If true, produce log-filterbank, else produce linear. (Default: ``True``) + use_power (bool, optional): If true, use power, else use magnitude. (Default: ``True``) + vtln_high (float, optional): High inflection point in piecewise linear VTLN warping function (if + negative, offset from high-mel-freq (Default: ``-500.0``) + vtln_low (float, optional): Low inflection point in piecewise linear VTLN warping function (Default: ``100.0``) + vtln_warp (float, optional): Vtln warp factor (only applicable if vtln_map not specified) (Default: ``1.0``) + window_type (str, optional): Type of window ('hamming'|'hanning'|'povey'|'rectangular'|'blackman') + (Default: ``'povey'``) + + Returns: + Tensor: A fbank identical to what Kaldi would output. The shape is (m, ``num_mel_bins + use_energy``) + where m is calculated in _get_strided + """ + device, dtype = waveform.device, waveform.dtype + + waveform, window_shift, window_size, padded_window_size = _get_waveform_and_window_properties( + waveform, channel, sample_frequency, frame_shift, frame_length, round_to_power_of_two, preemphasis_coefficient + ) + + if len(waveform) < min_duration * sample_frequency: + # signal is too short + return torch.empty(0, device=device, dtype=dtype) + + # strided_input, size (m, padded_window_size) and signal_log_energy, size (m) + strided_input, signal_log_energy = _get_window( + waveform, + padded_window_size, + window_size, + window_shift, + window_type, + blackman_coeff, + snip_edges, + raw_energy, + energy_floor, + dither, + remove_dc_offset, + preemphasis_coefficient, + ) + + # size (m, padded_window_size // 2 + 1) + spectrum = torch.fft.rfft(strided_input).abs() + if use_power: + spectrum = spectrum.pow(2.0) + + # size (num_mel_bins, padded_window_size // 2) + mel_energies, _ = get_mel_banks( + num_mel_bins, padded_window_size, sample_frequency, low_freq, high_freq, vtln_low, vtln_high, vtln_warp + ) + mel_energies = mel_energies.to(device=device, dtype=dtype) + + # pad right column with zeros and add dimension, size (num_mel_bins, padded_window_size // 2 + 1) + mel_energies = torch.nn.functional.pad(mel_energies, (0, 1), mode="constant", value=0) + + # sum with mel fiterbanks over the power spectrum, size (m, num_mel_bins) + mel_energies = torch.mm(spectrum, mel_energies.T) + if use_log_fbank: + # avoid log of zero (which should be prevented anyway by dithering) + mel_energies = torch.max(mel_energies, _get_epsilon(device, dtype)).log() + + # if use_energy then add it as the last column for htk_compat == true else first column + if use_energy: + signal_log_energy = signal_log_energy.unsqueeze(1) # size (m, 1) + # returns size (m, num_mel_bins + 1) + if htk_compat: + mel_energies = torch.cat((mel_energies, signal_log_energy), dim=1) + else: + mel_energies = torch.cat((signal_log_energy, mel_energies), dim=1) + + mel_energies = _subtract_column_mean(mel_energies, subtract_mean) + return mel_energies + + +def _get_dct_matrix(num_ceps: int, num_mel_bins: int) -> Tensor: + # returns a dct matrix of size (num_mel_bins, num_ceps) + # size (num_mel_bins, num_mel_bins) + dct_matrix = torchaudio.functional.create_dct(num_mel_bins, num_mel_bins, "ortho") + # kaldi expects the first cepstral to be weighted sum of factor sqrt(1/num_mel_bins) + # this would be the first column in the dct_matrix for torchaudio as it expects a + # right multiply (which would be the first column of the kaldi's dct_matrix as kaldi + # expects a left multiply e.g. dct_matrix * vector). + dct_matrix[:, 0] = math.sqrt(1 / float(num_mel_bins)) + dct_matrix = dct_matrix[:, :num_ceps] + return dct_matrix + + +def _get_lifter_coeffs(num_ceps: int, cepstral_lifter: float) -> Tensor: + # returns size (num_ceps) + # Compute liftering coefficients (scaling on cepstral coeffs) + # coeffs are numbered slightly differently from HTK: the zeroth index is C0, which is not affected. + i = torch.arange(num_ceps) + return 1.0 + 0.5 * cepstral_lifter * torch.sin(math.pi * i / cepstral_lifter) + + +def mfcc( + waveform: Tensor, + blackman_coeff: float = 0.42, + cepstral_lifter: float = 22.0, + channel: int = -1, + dither: float = 0.0, + energy_floor: float = 1.0, + frame_length: float = 25.0, + frame_shift: float = 10.0, + high_freq: float = 0.0, + htk_compat: bool = False, + low_freq: float = 20.0, + num_ceps: int = 13, + min_duration: float = 0.0, + num_mel_bins: int = 23, + preemphasis_coefficient: float = 0.97, + raw_energy: bool = True, + remove_dc_offset: bool = True, + round_to_power_of_two: bool = True, + sample_frequency: float = 16000.0, + snip_edges: bool = True, + subtract_mean: bool = False, + use_energy: bool = False, + vtln_high: float = -500.0, + vtln_low: float = 100.0, + vtln_warp: float = 1.0, + window_type: str = POVEY, +) -> Tensor: + r"""Create a mfcc from a raw audio signal. This matches the input/output of Kaldi's + compute-mfcc-feats. + + Args: + waveform (Tensor): Tensor of audio of size (c, n) where c is in the range [0,2) + blackman_coeff (float, optional): Constant coefficient for generalized Blackman window. (Default: ``0.42``) + cepstral_lifter (float, optional): Constant that controls scaling of MFCCs (Default: ``22.0``) + channel (int, optional): Channel to extract (-1 -> expect mono, 0 -> left, 1 -> right) (Default: ``-1``) + dither (float, optional): Dithering constant (0.0 means no dither). If you turn this off, you should set + the energy_floor option, e.g. to 1.0 or 0.1 (Default: ``0.0``) + energy_floor (float, optional): Floor on energy (absolute, not relative) in Spectrogram computation. Caution: + this floor is applied to the zeroth component, representing the total signal energy. The floor on the + individual spectrogram elements is fixed at std::numeric_limits::epsilon(). (Default: ``1.0``) + frame_length (float, optional): Frame length in milliseconds (Default: ``25.0``) + frame_shift (float, optional): Frame shift in milliseconds (Default: ``10.0``) + high_freq (float, optional): High cutoff frequency for mel bins (if <= 0, offset from Nyquist) + (Default: ``0.0``) + htk_compat (bool, optional): If true, put energy last. Warning: not sufficient to get HTK compatible + features (need to change other parameters). (Default: ``False``) + low_freq (float, optional): Low cutoff frequency for mel bins (Default: ``20.0``) + num_ceps (int, optional): Number of cepstra in MFCC computation (including C0) (Default: ``13``) + min_duration (float, optional): Minimum duration of segments to process (in seconds). (Default: ``0.0``) + num_mel_bins (int, optional): Number of triangular mel-frequency bins (Default: ``23``) + preemphasis_coefficient (float, optional): Coefficient for use in signal preemphasis (Default: ``0.97``) + raw_energy (bool, optional): If True, compute energy before preemphasis and windowing (Default: ``True``) + remove_dc_offset (bool, optional): Subtract mean from waveform on each frame (Default: ``True``) + round_to_power_of_two (bool, optional): If True, round window size to power of two by zero-padding input + to FFT. (Default: ``True``) + sample_frequency (float, optional): Waveform data sample frequency (must match the waveform file, if + specified there) (Default: ``16000.0``) + snip_edges (bool, optional): If True, end effects will be handled by outputting only frames that completely fit + in the file, and the number of frames depends on the frame_length. If False, the number of frames + depends only on the frame_shift, and we reflect the data at the ends. (Default: ``True``) + subtract_mean (bool, optional): Subtract mean of each feature file [CMS]; not recommended to do + it this way. (Default: ``False``) + use_energy (bool, optional): Add an extra dimension with energy to the FBANK output. (Default: ``False``) + vtln_high (float, optional): High inflection point in piecewise linear VTLN warping function (if + negative, offset from high-mel-freq (Default: ``-500.0``) + vtln_low (float, optional): Low inflection point in piecewise linear VTLN warping function (Default: ``100.0``) + vtln_warp (float, optional): Vtln warp factor (only applicable if vtln_map not specified) (Default: ``1.0``) + window_type (str, optional): Type of window ('hamming'|'hanning'|'povey'|'rectangular'|'blackman') + (Default: ``"povey"``) + + Returns: + Tensor: A mfcc identical to what Kaldi would output. The shape is (m, ``num_ceps``) + where m is calculated in _get_strided + """ + assert num_ceps <= num_mel_bins, "num_ceps cannot be larger than num_mel_bins: %d vs %d" % (num_ceps, num_mel_bins) + + device, dtype = waveform.device, waveform.dtype + + # The mel_energies should not be squared (use_power=True), not have mean subtracted + # (subtract_mean=False), and use log (use_log_fbank=True). + # size (m, num_mel_bins + use_energy) + feature = fbank( + waveform=waveform, + blackman_coeff=blackman_coeff, + channel=channel, + dither=dither, + energy_floor=energy_floor, + frame_length=frame_length, + frame_shift=frame_shift, + high_freq=high_freq, + htk_compat=htk_compat, + low_freq=low_freq, + min_duration=min_duration, + num_mel_bins=num_mel_bins, + preemphasis_coefficient=preemphasis_coefficient, + raw_energy=raw_energy, + remove_dc_offset=remove_dc_offset, + round_to_power_of_two=round_to_power_of_two, + sample_frequency=sample_frequency, + snip_edges=snip_edges, + subtract_mean=False, + use_energy=use_energy, + use_log_fbank=True, + use_power=True, + vtln_high=vtln_high, + vtln_low=vtln_low, + vtln_warp=vtln_warp, + window_type=window_type, + ) + + if use_energy: + # size (m) + signal_log_energy = feature[:, num_mel_bins if htk_compat else 0] + # offset is 0 if htk_compat==True else 1 + mel_offset = int(not htk_compat) + feature = feature[:, mel_offset : (num_mel_bins + mel_offset)] + + # size (num_mel_bins, num_ceps) + dct_matrix = _get_dct_matrix(num_ceps, num_mel_bins).to(dtype=dtype, device=device) + + # size (m, num_ceps) + feature = feature.matmul(dct_matrix) + + if cepstral_lifter != 0.0: + # size (1, num_ceps) + lifter_coeffs = _get_lifter_coeffs(num_ceps, cepstral_lifter).unsqueeze(0) + feature *= lifter_coeffs.to(device=device, dtype=dtype) + + # if use_energy then replace the last column for htk_compat == true else first column + if use_energy: + feature[:, 0] = signal_log_energy + + if htk_compat: + energy = feature[:, 0].unsqueeze(1) # size (m, 1) + feature = feature[:, 1:] # size (m, num_ceps - 1) + if not use_energy: + # scale on C0 (actually removing a scale we previously added that's + # part of one common definition of the cosine transform.) + energy *= math.sqrt(2) + + feature = torch.cat((feature, energy), dim=1) + + feature = _subtract_column_mean(feature, subtract_mean) + return feature diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__init__.py b/venv/lib/python3.10/site-packages/torchaudio/datasets/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..609cb14fdcc38c48270acd5803f4bfe603c39e71 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/datasets/__init__.py @@ -0,0 +1,47 @@ +from .cmuarctic import CMUARCTIC +from .cmudict import CMUDict +from .commonvoice import COMMONVOICE +from .dr_vctk import DR_VCTK +from .fluentcommands import FluentSpeechCommands +from .gtzan import GTZAN +from .iemocap import IEMOCAP +from .librilight_limited import LibriLightLimited +from .librimix import LibriMix +from .librispeech import LIBRISPEECH +from .librispeech_biasing import LibriSpeechBiasing +from .libritts import LIBRITTS +from .ljspeech import LJSPEECH +from .musdb_hq import MUSDB_HQ +from .quesst14 import QUESST14 +from .snips import Snips +from .speechcommands import SPEECHCOMMANDS +from .tedlium import TEDLIUM +from .vctk import VCTK_092 +from .voxceleb1 import VoxCeleb1Identification, VoxCeleb1Verification +from .yesno import YESNO + + +__all__ = [ + "COMMONVOICE", + "LIBRISPEECH", + "LibriSpeechBiasing", + "LibriLightLimited", + "SPEECHCOMMANDS", + "VCTK_092", + "DR_VCTK", + "YESNO", + "LJSPEECH", + "GTZAN", + "CMUARCTIC", + "CMUDict", + "LibriMix", + "LIBRITTS", + "TEDLIUM", + "QUESST14", + "MUSDB_HQ", + "FluentSpeechCommands", + "VoxCeleb1Identification", + "VoxCeleb1Verification", + "IEMOCAP", + "Snips", +] diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..21202156b306ca7c573e8d9b70e20e7eeaad8dd5 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/cmuarctic.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/cmuarctic.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a2dfff6264b301615f1142d2b9d8b4d803e07af3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/cmuarctic.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/cmudict.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/cmudict.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8a177e37a6622d0cc3761c5d4f53b6e34e22695d Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/cmudict.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/commonvoice.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/commonvoice.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4a53a572a1cbd961f85627f7a112a4fe59b8ed6b Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/commonvoice.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/dr_vctk.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/dr_vctk.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d3327a10d01fba25b77be6587de4b270dd884560 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/dr_vctk.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/fluentcommands.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/fluentcommands.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c5d89cf9c1011f548dd7fd91f91489c445d2d588 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/fluentcommands.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/gtzan.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/gtzan.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1acbd280f8b4398ef4a056890fa9486891740a36 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/gtzan.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/iemocap.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/iemocap.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..49f132e3af05b3fec84922e18c5c88f20aedf5c9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/iemocap.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/librilight_limited.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/librilight_limited.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7f640aa7017d42706ab1ecca5119642392fdebb6 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/librilight_limited.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/librimix.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/librimix.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..092dff03894348126e638110a0d47325fa6d096d Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/librimix.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/librispeech.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/librispeech.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d373dfc18139be221d8fc2f23d809f84e84a92dc Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/librispeech.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/librispeech_biasing.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/librispeech_biasing.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f7cbabf65c1f05286250ace1b9c1a6cd2925037f Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/librispeech_biasing.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/libritts.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/libritts.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aacea62feae7d88141888b00ea743a87b615c152 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/libritts.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/ljspeech.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/ljspeech.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6e6ad2369e8a79c0d4168dd3cec049f04a836476 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/ljspeech.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/musdb_hq.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/musdb_hq.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..78028fd376d7dce1eeb1dd6f7c31999b19d97c7e Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/musdb_hq.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/quesst14.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/quesst14.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..75953dca1b0a86e598ba460f9548aaf87252bddc Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/quesst14.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/snips.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/snips.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2a9b74ebbe127d459632a4ab5ae8528e1cc0e8f8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/snips.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/speechcommands.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/speechcommands.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..de512d3d149619a6432368596bb10acbcc252fe8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/speechcommands.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/tedlium.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/tedlium.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2e296ff7731241a298b5101fda37c40e69b9a1e7 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/tedlium.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3a8a8652659e9cd332f80961447b1f547d26410a Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/vctk.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/vctk.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7929d68b82db7ba2bd66cff4dcaabe7d68464ee1 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/vctk.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/voxceleb1.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/voxceleb1.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..20d6a1fa15b72fb12cb58af0901b91330d4b6a9f Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/voxceleb1.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/yesno.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/yesno.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..50c5563c72f3ab9daa7b888b99ea34afb1bf7a44 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/datasets/__pycache__/yesno.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/cmuarctic.py b/venv/lib/python3.10/site-packages/torchaudio/datasets/cmuarctic.py new file mode 100644 index 0000000000000000000000000000000000000000..96f498f00f04a2f1b6d7d3e33510eafcb9ffe6bc --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/datasets/cmuarctic.py @@ -0,0 +1,157 @@ +import csv +import os +from pathlib import Path +from typing import Tuple, Union + +import torchaudio +from torch import Tensor +from torch.utils.data import Dataset +from torchaudio._internal import download_url_to_file +from torchaudio.datasets.utils import _extract_tar + +URL = "aew" +FOLDER_IN_ARCHIVE = "ARCTIC" +_CHECKSUMS = { + "http://festvox.org/cmu_arctic/packed/cmu_us_aew_arctic.tar.bz2": "645cb33c0f0b2ce41384fdd8d3db2c3f5fc15c1e688baeb74d2e08cab18ab406", # noqa: E501 + "http://festvox.org/cmu_arctic/packed/cmu_us_ahw_arctic.tar.bz2": "024664adeb892809d646a3efd043625b46b5bfa3e6189b3500b2d0d59dfab06c", # noqa: E501 + "http://festvox.org/cmu_arctic/packed/cmu_us_aup_arctic.tar.bz2": "2c55bc3050caa996758869126ad10cf42e1441212111db034b3a45189c18b6fc", # noqa: E501 + "http://festvox.org/cmu_arctic/packed/cmu_us_awb_arctic.tar.bz2": "d74a950c9739a65f7bfc4dfa6187f2730fa03de5b8eb3f2da97a51b74df64d3c", # noqa: E501 + "http://festvox.org/cmu_arctic/packed/cmu_us_axb_arctic.tar.bz2": "dd65c3d2907d1ee52f86e44f578319159e60f4bf722a9142be01161d84e330ff", # noqa: E501 + "http://festvox.org/cmu_arctic/packed/cmu_us_bdl_arctic.tar.bz2": "26b91aaf48b2799b2956792b4632c2f926cd0542f402b5452d5adecb60942904", # noqa: E501 + "http://festvox.org/cmu_arctic/packed/cmu_us_clb_arctic.tar.bz2": "3f16dc3f3b97955ea22623efb33b444341013fc660677b2e170efdcc959fa7c6", # noqa: E501 + "http://festvox.org/cmu_arctic/packed/cmu_us_eey_arctic.tar.bz2": "8a0ee4e5acbd4b2f61a4fb947c1730ab3adcc9dc50b195981d99391d29928e8a", # noqa: E501 + "http://festvox.org/cmu_arctic/packed/cmu_us_fem_arctic.tar.bz2": "3fcff629412b57233589cdb058f730594a62c4f3a75c20de14afe06621ef45e2", # noqa: E501 + "http://festvox.org/cmu_arctic/packed/cmu_us_gka_arctic.tar.bz2": "dc82e7967cbd5eddbed33074b0699128dbd4482b41711916d58103707e38c67f", # noqa: E501 + "http://festvox.org/cmu_arctic/packed/cmu_us_jmk_arctic.tar.bz2": "3a37c0e1dfc91e734fdbc88b562d9e2ebca621772402cdc693bbc9b09b211d73", # noqa: E501 + "http://festvox.org/cmu_arctic/packed/cmu_us_ksp_arctic.tar.bz2": "8029cafce8296f9bed3022c44ef1e7953332b6bf6943c14b929f468122532717", # noqa: E501 + "http://festvox.org/cmu_arctic/packed/cmu_us_ljm_arctic.tar.bz2": "b23993765cbf2b9e7bbc3c85b6c56eaf292ac81ee4bb887b638a24d104f921a0", # noqa: E501 + "http://festvox.org/cmu_arctic/packed/cmu_us_lnh_arctic.tar.bz2": "4faf34d71aa7112813252fb20c5433e2fdd9a9de55a00701ffcbf05f24a5991a", # noqa: E501 + "http://festvox.org/cmu_arctic/packed/cmu_us_rms_arctic.tar.bz2": "c6dc11235629c58441c071a7ba8a2d067903dfefbaabc4056d87da35b72ecda4", # noqa: E501 + "http://festvox.org/cmu_arctic/packed/cmu_us_rxr_arctic.tar.bz2": "1fa4271c393e5998d200e56c102ff46fcfea169aaa2148ad9e9469616fbfdd9b", # noqa: E501 + "http://festvox.org/cmu_arctic/packed/cmu_us_slp_arctic.tar.bz2": "54345ed55e45c23d419e9a823eef427f1cc93c83a710735ec667d068c916abf1", # noqa: E501 + "http://festvox.org/cmu_arctic/packed/cmu_us_slt_arctic.tar.bz2": "7c173297916acf3cc7fcab2713be4c60b27312316765a90934651d367226b4ea", # noqa: E501 +} + + +def load_cmuarctic_item(line: str, path: str, folder_audio: str, ext_audio: str) -> Tuple[Tensor, int, str, str]: + + utterance_id, transcript = line[0].strip().split(" ", 2)[1:] + + # Remove space, double quote, and single parenthesis from transcript + transcript = transcript[1:-3] + + file_audio = os.path.join(path, folder_audio, utterance_id + ext_audio) + + # Load audio + waveform, sample_rate = torchaudio.load(file_audio) + + return (waveform, sample_rate, transcript, utterance_id.split("_")[1]) + + +class CMUARCTIC(Dataset): + """*CMU ARCTIC* :cite:`Kominek03cmuarctic` dataset. + + Args: + root (str or Path): Path to the directory where the dataset is found or downloaded. + url (str, optional): + The URL to download the dataset from or the type of the dataset to download. + (default: ``"aew"``) + Allowed type values are ``"aew"``, ``"ahw"``, ``"aup"``, ``"awb"``, ``"axb"``, ``"bdl"``, + ``"clb"``, ``"eey"``, ``"fem"``, ``"gka"``, ``"jmk"``, ``"ksp"``, ``"ljm"``, ``"lnh"``, + ``"rms"``, ``"rxr"``, ``"slp"`` or ``"slt"``. + folder_in_archive (str, optional): + The top-level directory of the dataset. (default: ``"ARCTIC"``) + download (bool, optional): + Whether to download the dataset if it is not found at root path. (default: ``False``). + """ + + _file_text = "txt.done.data" + _folder_text = "etc" + _ext_audio = ".wav" + _folder_audio = "wav" + + def __init__( + self, root: Union[str, Path], url: str = URL, folder_in_archive: str = FOLDER_IN_ARCHIVE, download: bool = False + ) -> None: + + if url in [ + "aew", + "ahw", + "aup", + "awb", + "axb", + "bdl", + "clb", + "eey", + "fem", + "gka", + "jmk", + "ksp", + "ljm", + "lnh", + "rms", + "rxr", + "slp", + "slt", + ]: + + url = "cmu_us_" + url + "_arctic" + ext_archive = ".tar.bz2" + base_url = "http://www.festvox.org/cmu_arctic/packed/" + + url = os.path.join(base_url, url + ext_archive) + + # Get string representation of 'root' in case Path object is passed + root = os.fspath(root) + + basename = os.path.basename(url) + root = os.path.join(root, folder_in_archive) + if not os.path.isdir(root): + os.mkdir(root) + archive = os.path.join(root, basename) + + basename = basename.split(".")[0] + + self._path = os.path.join(root, basename) + + if download: + if not os.path.isdir(self._path): + if not os.path.isfile(archive): + checksum = _CHECKSUMS.get(url, None) + download_url_to_file(url, archive, hash_prefix=checksum) + _extract_tar(archive) + else: + if not os.path.exists(self._path): + raise RuntimeError( + f"The path {self._path} doesn't exist. " + "Please check the ``root`` path or set `download=True` to download it" + ) + self._text = os.path.join(self._path, self._folder_text, self._file_text) + + with open(self._text, "r") as text: + walker = csv.reader(text, delimiter="\n") + self._walker = list(walker) + + def __getitem__(self, n: int) -> Tuple[Tensor, int, str, str]: + """Load the n-th sample from the dataset. + + Args: + n (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + Tensor: + Waveform + int: + Sample rate + str: + Transcript + str: + Utterance ID + """ + line = self._walker[n] + return load_cmuarctic_item(line, self._path, self._folder_audio, self._ext_audio) + + def __len__(self) -> int: + return len(self._walker) diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/cmudict.py b/venv/lib/python3.10/site-packages/torchaudio/datasets/cmudict.py new file mode 100644 index 0000000000000000000000000000000000000000..d1038f48badde6f5db589691c5aee2ddf1f1d5de --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/datasets/cmudict.py @@ -0,0 +1,186 @@ +import os +import re +from pathlib import Path +from typing import Iterable, List, Tuple, Union + +from torch.utils.data import Dataset +from torchaudio._internal import download_url_to_file + + +_CHECKSUMS = { + "http://svn.code.sf.net/p/cmusphinx/code/trunk/cmudict/cmudict-0.7b": "209a8b4cd265013e96f4658632a9878103b0c5abf62b50d4ef3ae1be226b29e4", # noqa: E501 + "http://svn.code.sf.net/p/cmusphinx/code/trunk/cmudict/cmudict-0.7b.symbols": "408ccaae803641c6d7b626b6299949320c2dbca96b2220fd3fb17887b023b027", # noqa: E501 +} +_PUNCTUATIONS = { + "!EXCLAMATION-POINT", + '"CLOSE-QUOTE', + '"DOUBLE-QUOTE', + '"END-OF-QUOTE', + '"END-QUOTE', + '"IN-QUOTES', + '"QUOTE', + '"UNQUOTE', + "#HASH-MARK", + "#POUND-SIGN", + "#SHARP-SIGN", + "%PERCENT", + "&ERSAND", + "'END-INNER-QUOTE", + "'END-QUOTE", + "'INNER-QUOTE", + "'QUOTE", + "'SINGLE-QUOTE", + "(BEGIN-PARENS", + "(IN-PARENTHESES", + "(LEFT-PAREN", + "(OPEN-PARENTHESES", + "(PAREN", + "(PARENS", + "(PARENTHESES", + ")CLOSE-PAREN", + ")CLOSE-PARENTHESES", + ")END-PAREN", + ")END-PARENS", + ")END-PARENTHESES", + ")END-THE-PAREN", + ")PAREN", + ")PARENS", + ")RIGHT-PAREN", + ")UN-PARENTHESES", + "+PLUS", + ",COMMA", + "--DASH", + "-DASH", + "-HYPHEN", + "...ELLIPSIS", + ".DECIMAL", + ".DOT", + ".FULL-STOP", + ".PERIOD", + ".POINT", + "/SLASH", + ":COLON", + ";SEMI-COLON", + ";SEMI-COLON(1)", + "?QUESTION-MARK", + "{BRACE", + "{LEFT-BRACE", + "{OPEN-BRACE", + "}CLOSE-BRACE", + "}RIGHT-BRACE", +} + + +def _parse_dictionary(lines: Iterable[str], exclude_punctuations: bool) -> List[str]: + _alt_re = re.compile(r"\([0-9]+\)") + cmudict: List[Tuple[str, List[str]]] = [] + for line in lines: + if not line or line.startswith(";;;"): # ignore comments + continue + + word, phones = line.strip().split(" ") + if word in _PUNCTUATIONS: + if exclude_punctuations: + continue + # !EXCLAMATION-POINT -> ! + # --DASH -> -- + # ...ELLIPSIS -> ... + if word.startswith("..."): + word = "..." + elif word.startswith("--"): + word = "--" + else: + word = word[0] + + # if a word have multiple pronunciations, there will be (number) appended to it + # for example, DATAPOINTS and DATAPOINTS(1), + # the regular expression `_alt_re` removes the '(1)' and change the word DATAPOINTS(1) to DATAPOINTS + word = re.sub(_alt_re, "", word) + phones = phones.split(" ") + cmudict.append((word, phones)) + + return cmudict + + +class CMUDict(Dataset): + """*CMU Pronouncing Dictionary* :cite:`cmudict` (CMUDict) dataset. + + Args: + root (str or Path): Path to the directory where the dataset is found or downloaded. + exclude_punctuations (bool, optional): + When enabled, exclude the pronounciation of punctuations, such as + `!EXCLAMATION-POINT` and `#HASH-MARK`. + download (bool, optional): + Whether to download the dataset if it is not found at root path. (default: ``False``). + url (str, optional): + The URL to download the dictionary from. + (default: ``"http://svn.code.sf.net/p/cmusphinx/code/trunk/cmudict/cmudict-0.7b"``) + url_symbols (str, optional): + The URL to download the list of symbols from. + (default: ``"http://svn.code.sf.net/p/cmusphinx/code/trunk/cmudict/cmudict-0.7b.symbols"``) + """ + + def __init__( + self, + root: Union[str, Path], + exclude_punctuations: bool = True, + *, + download: bool = False, + url: str = "http://svn.code.sf.net/p/cmusphinx/code/trunk/cmudict/cmudict-0.7b", + url_symbols: str = "http://svn.code.sf.net/p/cmusphinx/code/trunk/cmudict/cmudict-0.7b.symbols", + ) -> None: + + self.exclude_punctuations = exclude_punctuations + + self._root_path = Path(root) + if not os.path.isdir(self._root_path): + raise RuntimeError(f"The root directory does not exist; {root}") + + dict_file = self._root_path / os.path.basename(url) + symbol_file = self._root_path / os.path.basename(url_symbols) + if not os.path.exists(dict_file): + if not download: + raise RuntimeError( + "The dictionary file is not found in the following location. " + f"Set `download=True` to download it. {dict_file}" + ) + checksum = _CHECKSUMS.get(url, None) + download_url_to_file(url, dict_file, checksum) + if not os.path.exists(symbol_file): + if not download: + raise RuntimeError( + "The symbol file is not found in the following location. " + f"Set `download=True` to download it. {symbol_file}" + ) + checksum = _CHECKSUMS.get(url_symbols, None) + download_url_to_file(url_symbols, symbol_file, checksum) + + with open(symbol_file, "r") as text: + self._symbols = [line.strip() for line in text.readlines()] + + with open(dict_file, "r", encoding="latin-1") as text: + self._dictionary = _parse_dictionary(text.readlines(), exclude_punctuations=self.exclude_punctuations) + + def __getitem__(self, n: int) -> Tuple[str, List[str]]: + """Load the n-th sample from the dataset. + + Args: + n (int): The index of the sample to be loaded. + + Returns: + Tuple of a word and its phonemes + + str: + Word + List[str]: + Phonemes + """ + return self._dictionary[n] + + def __len__(self) -> int: + return len(self._dictionary) + + @property + def symbols(self) -> List[str]: + """list[str]: A list of phonemes symbols, such as ``"AA"``, ``"AE"``, ``"AH"``.""" + return self._symbols.copy() diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/commonvoice.py b/venv/lib/python3.10/site-packages/torchaudio/datasets/commonvoice.py new file mode 100644 index 0000000000000000000000000000000000000000..db0e035c6116487a87efcffaeea31a19212be458 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/datasets/commonvoice.py @@ -0,0 +1,86 @@ +import csv +import os +from pathlib import Path +from typing import Dict, List, Tuple, Union + +import torchaudio +from torch import Tensor +from torch.utils.data import Dataset + + +def load_commonvoice_item( + line: List[str], header: List[str], path: str, folder_audio: str, ext_audio: str +) -> Tuple[Tensor, int, Dict[str, str]]: + # Each line as the following data: + # client_id, path, sentence, up_votes, down_votes, age, gender, accent + + if header[1] != "path": + raise ValueError(f"expect `header[1]` to be 'path', but got {header[1]}") + fileid = line[1] + filename = os.path.join(path, folder_audio, fileid) + if not filename.endswith(ext_audio): + filename += ext_audio + waveform, sample_rate = torchaudio.load(filename) + + dic = dict(zip(header, line)) + + return waveform, sample_rate, dic + + +class COMMONVOICE(Dataset): + """*CommonVoice* :cite:`ardila2020common` dataset. + + Args: + root (str or Path): Path to the directory where the dataset is located. + (Where the ``tsv`` file is present.) + tsv (str, optional): + The name of the tsv file used to construct the metadata, such as + ``"train.tsv"``, ``"test.tsv"``, ``"dev.tsv"``, ``"invalidated.tsv"``, + ``"validated.tsv"`` and ``"other.tsv"``. (default: ``"train.tsv"``) + """ + + _ext_txt = ".txt" + _ext_audio = ".mp3" + _folder_audio = "clips" + + def __init__(self, root: Union[str, Path], tsv: str = "train.tsv") -> None: + + # Get string representation of 'root' in case Path object is passed + self._path = os.fspath(root) + self._tsv = os.path.join(self._path, tsv) + + with open(self._tsv, "r") as tsv_: + walker = csv.reader(tsv_, delimiter="\t") + self._header = next(walker) + self._walker = list(walker) + + def __getitem__(self, n: int) -> Tuple[Tensor, int, Dict[str, str]]: + """Load the n-th sample from the dataset. + + Args: + n (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + Tensor: + Waveform + int: + Sample rate + Dict[str, str]: + Dictionary containing the following items from the corresponding TSV file; + + * ``"client_id"`` + * ``"path"`` + * ``"sentence"`` + * ``"up_votes"`` + * ``"down_votes"`` + * ``"age"`` + * ``"gender"`` + * ``"accent"`` + """ + line = self._walker[n] + return load_commonvoice_item(line, self._header, self._path, self._folder_audio, self._ext_audio) + + def __len__(self) -> int: + return len(self._walker) diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/dr_vctk.py b/venv/lib/python3.10/site-packages/torchaudio/datasets/dr_vctk.py new file mode 100644 index 0000000000000000000000000000000000000000..a634b968949480738eefef926d25b05846f0b67d --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/datasets/dr_vctk.py @@ -0,0 +1,121 @@ +from pathlib import Path +from typing import Dict, Tuple, Union + +import torchaudio +from torch import Tensor +from torch.utils.data import Dataset +from torchaudio._internal import download_url_to_file +from torchaudio.datasets.utils import _extract_zip + + +_URL = "https://datashare.ed.ac.uk/bitstream/handle/10283/3038/DR-VCTK.zip" +_CHECKSUM = "781f12f4406ed36ed27ae3bce55da47ba176e2d8bae67319e389e07b2c9bd769" +_SUPPORTED_SUBSETS = {"train", "test"} + + +class DR_VCTK(Dataset): + """*Device Recorded VCTK (Small subset version)* :cite:`Sarfjoo2018DeviceRV` dataset. + + Args: + root (str or Path): Root directory where the dataset's top level directory is found. + subset (str): The subset to use. Can be one of ``"train"`` and ``"test"``. (default: ``"train"``). + download (bool): + Whether to download the dataset if it is not found at root path. (default: ``False``). + url (str): The URL to download the dataset from. + (default: ``"https://datashare.ed.ac.uk/bitstream/handle/10283/3038/DR-VCTK.zip"``) + """ + + def __init__( + self, + root: Union[str, Path], + subset: str = "train", + *, + download: bool = False, + url: str = _URL, + ) -> None: + if subset not in _SUPPORTED_SUBSETS: + raise RuntimeError( + f"The subset '{subset}' does not match any of the supported subsets: {_SUPPORTED_SUBSETS}" + ) + + root = Path(root).expanduser() + archive = root / "DR-VCTK.zip" + + self._subset = subset + self._path = root / "DR-VCTK" / "DR-VCTK" + self._clean_audio_dir = self._path / f"clean_{self._subset}set_wav_16k" + self._noisy_audio_dir = self._path / f"device-recorded_{self._subset}set_wav_16k" + self._config_filepath = self._path / "configurations" / f"{self._subset}_ch_log.txt" + + if not self._path.is_dir(): + if not archive.is_file(): + if not download: + raise RuntimeError("Dataset not found. Please use `download=True` to download it.") + download_url_to_file(url, archive, hash_prefix=_CHECKSUM) + _extract_zip(archive, root) + + self._config = self._load_config(self._config_filepath) + self._filename_list = sorted(self._config) + + def _load_config(self, filepath: str) -> Dict[str, Tuple[str, int]]: + # Skip header + skip_rows = 2 if self._subset == "train" else 1 + + config = {} + with open(filepath) as f: + for i, line in enumerate(f): + if i < skip_rows or not line: + continue + filename, source, channel_id = line.strip().split("\t") + config[filename] = (source, int(channel_id)) + return config + + def _load_dr_vctk_item(self, filename: str) -> Tuple[Tensor, int, Tensor, int, str, str, str, int]: + speaker_id, utterance_id = filename.split(".")[0].split("_") + source, channel_id = self._config[filename] + file_clean_audio = self._clean_audio_dir / filename + file_noisy_audio = self._noisy_audio_dir / filename + waveform_clean, sample_rate_clean = torchaudio.load(file_clean_audio) + waveform_noisy, sample_rate_noisy = torchaudio.load(file_noisy_audio) + return ( + waveform_clean, + sample_rate_clean, + waveform_noisy, + sample_rate_noisy, + speaker_id, + utterance_id, + source, + channel_id, + ) + + def __getitem__(self, n: int) -> Tuple[Tensor, int, Tensor, int, str, str, str, int]: + """Load the n-th sample from the dataset. + + Args: + n (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + Tensor: + Clean waveform + int: + Sample rate of the clean waveform + Tensor: + Noisy waveform + int: + Sample rate of the noisy waveform + str: + Speaker ID + str: + Utterance ID + str: + Source + int: + Channel ID + """ + filename = self._filename_list[n] + return self._load_dr_vctk_item(filename) + + def __len__(self) -> int: + return len(self._filename_list) diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/fluentcommands.py b/venv/lib/python3.10/site-packages/torchaudio/datasets/fluentcommands.py new file mode 100644 index 0000000000000000000000000000000000000000..5cdee398d6e31a5e622321d1f73177606d9c8640 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/datasets/fluentcommands.py @@ -0,0 +1,108 @@ +import csv +import os +from pathlib import Path +from typing import Tuple, Union + +from torch import Tensor +from torch.utils.data import Dataset +from torchaudio.datasets.utils import _load_waveform + +SAMPLE_RATE = 16000 + + +class FluentSpeechCommands(Dataset): + """*Fluent Speech Commands* :cite:`fluent` dataset + + Args: + root (str of Path): Path to the directory where the dataset is found. + subset (str, optional): subset of the dataset to use. + Options: [``"train"``, ``"valid"``, ``"test"``]. + (Default: ``"train"``) + """ + + def __init__(self, root: Union[str, Path], subset: str = "train"): + if subset not in ["train", "valid", "test"]: + raise ValueError("`subset` must be one of ['train', 'valid', 'test']") + + root = os.fspath(root) + self._path = os.path.join(root, "fluent_speech_commands_dataset") + + if not os.path.isdir(self._path): + raise RuntimeError("Dataset not found.") + + subset_path = os.path.join(self._path, "data", f"{subset}_data.csv") + with open(subset_path) as subset_csv: + subset_reader = csv.reader(subset_csv) + data = list(subset_reader) + + self.header = data[0] + self.data = data[1:] + + def get_metadata(self, n: int) -> Tuple[str, int, str, int, str, str, str, str]: + """Get metadata for the n-th sample from the dataset. Returns filepath instead of waveform, + but otherwise returns the same fields as :py:func:`__getitem__`. + + Args: + n (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + str: + Path to audio + int: + Sample rate + str: + File name + int: + Speaker ID + str: + Transcription + str: + Action + str: + Object + str: + Location + """ + sample = self.data[n] + + file_name = sample[self.header.index("path")].split("/")[-1] + file_name = file_name.split(".")[0] + speaker_id, transcription, action, obj, location = sample[2:] + file_path = os.path.join("wavs", "speakers", speaker_id, f"{file_name}.wav") + + return file_path, SAMPLE_RATE, file_name, speaker_id, transcription, action, obj, location + + def __len__(self) -> int: + return len(self.data) + + def __getitem__(self, n: int) -> Tuple[Tensor, int, str, int, str, str, str, str]: + """Load the n-th sample from the dataset. + + Args: + n (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + Tensor: + Waveform + int: + Sample rate + str: + File name + int: + Speaker ID + str: + Transcription + str: + Action + str: + Object + str: + Location + """ + metadata = self.get_metadata(n) + waveform = _load_waveform(self._path, metadata[0], metadata[1]) + return (waveform,) + metadata[1:] diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/gtzan.py b/venv/lib/python3.10/site-packages/torchaudio/datasets/gtzan.py new file mode 100644 index 0000000000000000000000000000000000000000..347e7e71831770f42d7fdaf0b3c63a09409f659d --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/datasets/gtzan.py @@ -0,0 +1,1118 @@ +import os +from pathlib import Path +from typing import Optional, Tuple, Union + +import torchaudio +from torch import Tensor +from torch.utils.data import Dataset +from torchaudio._internal import download_url_to_file +from torchaudio.datasets.utils import _extract_tar + +# The following lists prefixed with `filtered_` provide a filtered split +# that: +# +# a. Mitigate a known issue with GTZAN (duplication) +# +# b. Provide a standard split for testing it against other +# methods (e.g. the one in jordipons/sklearn-audio-transfer-learning). +# +# Those are used when GTZAN is initialised with the `filtered` keyword. +# The split was taken from (github) jordipons/sklearn-audio-transfer-learning. + +gtzan_genres = [ + "blues", + "classical", + "country", + "disco", + "hiphop", + "jazz", + "metal", + "pop", + "reggae", + "rock", +] + +filtered_test = [ + "blues.00012", + "blues.00013", + "blues.00014", + "blues.00015", + "blues.00016", + "blues.00017", + "blues.00018", + "blues.00019", + "blues.00020", + "blues.00021", + "blues.00022", + "blues.00023", + "blues.00024", + "blues.00025", + "blues.00026", + "blues.00027", + "blues.00028", + "blues.00061", + "blues.00062", + "blues.00063", + "blues.00064", + "blues.00065", + "blues.00066", + "blues.00067", + "blues.00068", + "blues.00069", + "blues.00070", + "blues.00071", + "blues.00072", + "blues.00098", + "blues.00099", + "classical.00011", + "classical.00012", + "classical.00013", + "classical.00014", + "classical.00015", + "classical.00016", + "classical.00017", + "classical.00018", + "classical.00019", + "classical.00020", + "classical.00021", + "classical.00022", + "classical.00023", + "classical.00024", + "classical.00025", + "classical.00026", + "classical.00027", + "classical.00028", + "classical.00029", + "classical.00034", + "classical.00035", + "classical.00036", + "classical.00037", + "classical.00038", + "classical.00039", + "classical.00040", + "classical.00041", + "classical.00049", + "classical.00077", + "classical.00078", + "classical.00079", + "country.00030", + "country.00031", + "country.00032", + "country.00033", + "country.00034", + "country.00035", + "country.00036", + "country.00037", + "country.00038", + "country.00039", + "country.00040", + "country.00043", + "country.00044", + "country.00046", + "country.00047", + "country.00048", + "country.00050", + "country.00051", + "country.00053", + "country.00054", + "country.00055", + "country.00056", + "country.00057", + "country.00058", + "country.00059", + "country.00060", + "country.00061", + "country.00062", + "country.00063", + "country.00064", + "disco.00001", + "disco.00021", + "disco.00058", + "disco.00062", + "disco.00063", + "disco.00064", + "disco.00065", + "disco.00066", + "disco.00069", + "disco.00076", + "disco.00077", + "disco.00078", + "disco.00079", + "disco.00080", + "disco.00081", + "disco.00082", + "disco.00083", + "disco.00084", + "disco.00085", + "disco.00086", + "disco.00087", + "disco.00088", + "disco.00091", + "disco.00092", + "disco.00093", + "disco.00094", + "disco.00096", + "disco.00097", + "disco.00099", + "hiphop.00000", + "hiphop.00026", + "hiphop.00027", + "hiphop.00030", + "hiphop.00040", + "hiphop.00043", + "hiphop.00044", + "hiphop.00045", + "hiphop.00051", + "hiphop.00052", + "hiphop.00053", + "hiphop.00054", + "hiphop.00062", + "hiphop.00063", + "hiphop.00064", + "hiphop.00065", + "hiphop.00066", + "hiphop.00067", + "hiphop.00068", + "hiphop.00069", + "hiphop.00070", + "hiphop.00071", + "hiphop.00072", + "hiphop.00073", + "hiphop.00074", + "hiphop.00075", + "hiphop.00099", + "jazz.00073", + "jazz.00074", + "jazz.00075", + "jazz.00076", + "jazz.00077", + "jazz.00078", + "jazz.00079", + "jazz.00080", + "jazz.00081", + "jazz.00082", + "jazz.00083", + "jazz.00084", + "jazz.00085", + "jazz.00086", + "jazz.00087", + "jazz.00088", + "jazz.00089", + "jazz.00090", + "jazz.00091", + "jazz.00092", + "jazz.00093", + "jazz.00094", + "jazz.00095", + "jazz.00096", + "jazz.00097", + "jazz.00098", + "jazz.00099", + "metal.00012", + "metal.00013", + "metal.00014", + "metal.00015", + "metal.00022", + "metal.00023", + "metal.00025", + "metal.00026", + "metal.00027", + "metal.00028", + "metal.00029", + "metal.00030", + "metal.00031", + "metal.00032", + "metal.00033", + "metal.00038", + "metal.00039", + "metal.00067", + "metal.00070", + "metal.00073", + "metal.00074", + "metal.00075", + "metal.00078", + "metal.00083", + "metal.00085", + "metal.00087", + "metal.00088", + "pop.00000", + "pop.00001", + "pop.00013", + "pop.00014", + "pop.00043", + "pop.00063", + "pop.00064", + "pop.00065", + "pop.00066", + "pop.00069", + "pop.00070", + "pop.00071", + "pop.00072", + "pop.00073", + "pop.00074", + "pop.00075", + "pop.00076", + "pop.00077", + "pop.00078", + "pop.00079", + "pop.00082", + "pop.00088", + "pop.00089", + "pop.00090", + "pop.00091", + "pop.00092", + "pop.00093", + "pop.00094", + "pop.00095", + "pop.00096", + "reggae.00034", + "reggae.00035", + "reggae.00036", + "reggae.00037", + "reggae.00038", + "reggae.00039", + "reggae.00040", + "reggae.00046", + "reggae.00047", + "reggae.00048", + "reggae.00052", + "reggae.00053", + "reggae.00064", + "reggae.00065", + "reggae.00066", + "reggae.00067", + "reggae.00068", + "reggae.00071", + "reggae.00079", + "reggae.00082", + "reggae.00083", + "reggae.00084", + "reggae.00087", + "reggae.00088", + "reggae.00089", + "reggae.00090", + "rock.00010", + "rock.00011", + "rock.00012", + "rock.00013", + "rock.00014", + "rock.00015", + "rock.00027", + "rock.00028", + "rock.00029", + "rock.00030", + "rock.00031", + "rock.00032", + "rock.00033", + "rock.00034", + "rock.00035", + "rock.00036", + "rock.00037", + "rock.00039", + "rock.00040", + "rock.00041", + "rock.00042", + "rock.00043", + "rock.00044", + "rock.00045", + "rock.00046", + "rock.00047", + "rock.00048", + "rock.00086", + "rock.00087", + "rock.00088", + "rock.00089", + "rock.00090", +] + +filtered_train = [ + "blues.00029", + "blues.00030", + "blues.00031", + "blues.00032", + "blues.00033", + "blues.00034", + "blues.00035", + "blues.00036", + "blues.00037", + "blues.00038", + "blues.00039", + "blues.00040", + "blues.00041", + "blues.00042", + "blues.00043", + "blues.00044", + "blues.00045", + "blues.00046", + "blues.00047", + "blues.00048", + "blues.00049", + "blues.00073", + "blues.00074", + "blues.00075", + "blues.00076", + "blues.00077", + "blues.00078", + "blues.00079", + "blues.00080", + "blues.00081", + "blues.00082", + "blues.00083", + "blues.00084", + "blues.00085", + "blues.00086", + "blues.00087", + "blues.00088", + "blues.00089", + "blues.00090", + "blues.00091", + "blues.00092", + "blues.00093", + "blues.00094", + "blues.00095", + "blues.00096", + "blues.00097", + "classical.00030", + "classical.00031", + "classical.00032", + "classical.00033", + "classical.00043", + "classical.00044", + "classical.00045", + "classical.00046", + "classical.00047", + "classical.00048", + "classical.00050", + "classical.00051", + "classical.00052", + "classical.00053", + "classical.00054", + "classical.00055", + "classical.00056", + "classical.00057", + "classical.00058", + "classical.00059", + "classical.00060", + "classical.00061", + "classical.00062", + "classical.00063", + "classical.00064", + "classical.00065", + "classical.00066", + "classical.00067", + "classical.00080", + "classical.00081", + "classical.00082", + "classical.00083", + "classical.00084", + "classical.00085", + "classical.00086", + "classical.00087", + "classical.00088", + "classical.00089", + "classical.00090", + "classical.00091", + "classical.00092", + "classical.00093", + "classical.00094", + "classical.00095", + "classical.00096", + "classical.00097", + "classical.00098", + "classical.00099", + "country.00019", + "country.00020", + "country.00021", + "country.00022", + "country.00023", + "country.00024", + "country.00025", + "country.00026", + "country.00028", + "country.00029", + "country.00065", + "country.00066", + "country.00067", + "country.00068", + "country.00069", + "country.00070", + "country.00071", + "country.00072", + "country.00073", + "country.00074", + "country.00075", + "country.00076", + "country.00077", + "country.00078", + "country.00079", + "country.00080", + "country.00081", + "country.00082", + "country.00083", + "country.00084", + "country.00085", + "country.00086", + "country.00087", + "country.00088", + "country.00089", + "country.00090", + "country.00091", + "country.00092", + "country.00093", + "country.00094", + "country.00095", + "country.00096", + "country.00097", + "country.00098", + "country.00099", + "disco.00005", + "disco.00015", + "disco.00016", + "disco.00017", + "disco.00018", + "disco.00019", + "disco.00020", + "disco.00022", + "disco.00023", + "disco.00024", + "disco.00025", + "disco.00026", + "disco.00027", + "disco.00028", + "disco.00029", + "disco.00030", + "disco.00031", + "disco.00032", + "disco.00033", + "disco.00034", + "disco.00035", + "disco.00036", + "disco.00037", + "disco.00039", + "disco.00040", + "disco.00041", + "disco.00042", + "disco.00043", + "disco.00044", + "disco.00045", + "disco.00047", + "disco.00049", + "disco.00053", + "disco.00054", + "disco.00056", + "disco.00057", + "disco.00059", + "disco.00061", + "disco.00070", + "disco.00073", + "disco.00074", + "disco.00089", + "hiphop.00002", + "hiphop.00003", + "hiphop.00004", + "hiphop.00005", + "hiphop.00006", + "hiphop.00007", + "hiphop.00008", + "hiphop.00009", + "hiphop.00010", + "hiphop.00011", + "hiphop.00012", + "hiphop.00013", + "hiphop.00014", + "hiphop.00015", + "hiphop.00016", + "hiphop.00017", + "hiphop.00018", + "hiphop.00019", + "hiphop.00020", + "hiphop.00021", + "hiphop.00022", + "hiphop.00023", + "hiphop.00024", + "hiphop.00025", + "hiphop.00028", + "hiphop.00029", + "hiphop.00031", + "hiphop.00032", + "hiphop.00033", + "hiphop.00034", + "hiphop.00035", + "hiphop.00036", + "hiphop.00037", + "hiphop.00038", + "hiphop.00041", + "hiphop.00042", + "hiphop.00055", + "hiphop.00056", + "hiphop.00057", + "hiphop.00058", + "hiphop.00059", + "hiphop.00060", + "hiphop.00061", + "hiphop.00077", + "hiphop.00078", + "hiphop.00079", + "hiphop.00080", + "jazz.00000", + "jazz.00001", + "jazz.00011", + "jazz.00012", + "jazz.00013", + "jazz.00014", + "jazz.00015", + "jazz.00016", + "jazz.00017", + "jazz.00018", + "jazz.00019", + "jazz.00020", + "jazz.00021", + "jazz.00022", + "jazz.00023", + "jazz.00024", + "jazz.00041", + "jazz.00047", + "jazz.00048", + "jazz.00049", + "jazz.00050", + "jazz.00051", + "jazz.00052", + "jazz.00053", + "jazz.00054", + "jazz.00055", + "jazz.00056", + "jazz.00057", + "jazz.00058", + "jazz.00059", + "jazz.00060", + "jazz.00061", + "jazz.00062", + "jazz.00063", + "jazz.00064", + "jazz.00065", + "jazz.00066", + "jazz.00067", + "jazz.00068", + "jazz.00069", + "jazz.00070", + "jazz.00071", + "jazz.00072", + "metal.00002", + "metal.00003", + "metal.00005", + "metal.00021", + "metal.00024", + "metal.00035", + "metal.00046", + "metal.00047", + "metal.00048", + "metal.00049", + "metal.00050", + "metal.00051", + "metal.00052", + "metal.00053", + "metal.00054", + "metal.00055", + "metal.00056", + "metal.00057", + "metal.00059", + "metal.00060", + "metal.00061", + "metal.00062", + "metal.00063", + "metal.00064", + "metal.00065", + "metal.00066", + "metal.00069", + "metal.00071", + "metal.00072", + "metal.00079", + "metal.00080", + "metal.00084", + "metal.00086", + "metal.00089", + "metal.00090", + "metal.00091", + "metal.00092", + "metal.00093", + "metal.00094", + "metal.00095", + "metal.00096", + "metal.00097", + "metal.00098", + "metal.00099", + "pop.00002", + "pop.00003", + "pop.00004", + "pop.00005", + "pop.00006", + "pop.00007", + "pop.00008", + "pop.00009", + "pop.00011", + "pop.00012", + "pop.00016", + "pop.00017", + "pop.00018", + "pop.00019", + "pop.00020", + "pop.00023", + "pop.00024", + "pop.00025", + "pop.00026", + "pop.00027", + "pop.00028", + "pop.00029", + "pop.00031", + "pop.00032", + "pop.00033", + "pop.00034", + "pop.00035", + "pop.00036", + "pop.00038", + "pop.00039", + "pop.00040", + "pop.00041", + "pop.00042", + "pop.00044", + "pop.00046", + "pop.00049", + "pop.00050", + "pop.00080", + "pop.00097", + "pop.00098", + "pop.00099", + "reggae.00000", + "reggae.00001", + "reggae.00002", + "reggae.00004", + "reggae.00006", + "reggae.00009", + "reggae.00011", + "reggae.00012", + "reggae.00014", + "reggae.00015", + "reggae.00016", + "reggae.00017", + "reggae.00018", + "reggae.00019", + "reggae.00020", + "reggae.00021", + "reggae.00022", + "reggae.00023", + "reggae.00024", + "reggae.00025", + "reggae.00026", + "reggae.00027", + "reggae.00028", + "reggae.00029", + "reggae.00030", + "reggae.00031", + "reggae.00032", + "reggae.00042", + "reggae.00043", + "reggae.00044", + "reggae.00045", + "reggae.00049", + "reggae.00050", + "reggae.00051", + "reggae.00054", + "reggae.00055", + "reggae.00056", + "reggae.00057", + "reggae.00058", + "reggae.00059", + "reggae.00060", + "reggae.00063", + "reggae.00069", + "rock.00000", + "rock.00001", + "rock.00002", + "rock.00003", + "rock.00004", + "rock.00005", + "rock.00006", + "rock.00007", + "rock.00008", + "rock.00009", + "rock.00016", + "rock.00017", + "rock.00018", + "rock.00019", + "rock.00020", + "rock.00021", + "rock.00022", + "rock.00023", + "rock.00024", + "rock.00025", + "rock.00026", + "rock.00057", + "rock.00058", + "rock.00059", + "rock.00060", + "rock.00061", + "rock.00062", + "rock.00063", + "rock.00064", + "rock.00065", + "rock.00066", + "rock.00067", + "rock.00068", + "rock.00069", + "rock.00070", + "rock.00091", + "rock.00092", + "rock.00093", + "rock.00094", + "rock.00095", + "rock.00096", + "rock.00097", + "rock.00098", + "rock.00099", +] + +filtered_valid = [ + "blues.00000", + "blues.00001", + "blues.00002", + "blues.00003", + "blues.00004", + "blues.00005", + "blues.00006", + "blues.00007", + "blues.00008", + "blues.00009", + "blues.00010", + "blues.00011", + "blues.00050", + "blues.00051", + "blues.00052", + "blues.00053", + "blues.00054", + "blues.00055", + "blues.00056", + "blues.00057", + "blues.00058", + "blues.00059", + "blues.00060", + "classical.00000", + "classical.00001", + "classical.00002", + "classical.00003", + "classical.00004", + "classical.00005", + "classical.00006", + "classical.00007", + "classical.00008", + "classical.00009", + "classical.00010", + "classical.00068", + "classical.00069", + "classical.00070", + "classical.00071", + "classical.00072", + "classical.00073", + "classical.00074", + "classical.00075", + "classical.00076", + "country.00000", + "country.00001", + "country.00002", + "country.00003", + "country.00004", + "country.00005", + "country.00006", + "country.00007", + "country.00009", + "country.00010", + "country.00011", + "country.00012", + "country.00013", + "country.00014", + "country.00015", + "country.00016", + "country.00017", + "country.00018", + "country.00027", + "country.00041", + "country.00042", + "country.00045", + "country.00049", + "disco.00000", + "disco.00002", + "disco.00003", + "disco.00004", + "disco.00006", + "disco.00007", + "disco.00008", + "disco.00009", + "disco.00010", + "disco.00011", + "disco.00012", + "disco.00013", + "disco.00014", + "disco.00046", + "disco.00048", + "disco.00052", + "disco.00067", + "disco.00068", + "disco.00072", + "disco.00075", + "disco.00090", + "disco.00095", + "hiphop.00081", + "hiphop.00082", + "hiphop.00083", + "hiphop.00084", + "hiphop.00085", + "hiphop.00086", + "hiphop.00087", + "hiphop.00088", + "hiphop.00089", + "hiphop.00090", + "hiphop.00091", + "hiphop.00092", + "hiphop.00093", + "hiphop.00094", + "hiphop.00095", + "hiphop.00096", + "hiphop.00097", + "hiphop.00098", + "jazz.00002", + "jazz.00003", + "jazz.00004", + "jazz.00005", + "jazz.00006", + "jazz.00007", + "jazz.00008", + "jazz.00009", + "jazz.00010", + "jazz.00025", + "jazz.00026", + "jazz.00027", + "jazz.00028", + "jazz.00029", + "jazz.00030", + "jazz.00031", + "jazz.00032", + "metal.00000", + "metal.00001", + "metal.00006", + "metal.00007", + "metal.00008", + "metal.00009", + "metal.00010", + "metal.00011", + "metal.00016", + "metal.00017", + "metal.00018", + "metal.00019", + "metal.00020", + "metal.00036", + "metal.00037", + "metal.00068", + "metal.00076", + "metal.00077", + "metal.00081", + "metal.00082", + "pop.00010", + "pop.00053", + "pop.00055", + "pop.00058", + "pop.00059", + "pop.00060", + "pop.00061", + "pop.00062", + "pop.00081", + "pop.00083", + "pop.00084", + "pop.00085", + "pop.00086", + "reggae.00061", + "reggae.00062", + "reggae.00070", + "reggae.00072", + "reggae.00074", + "reggae.00076", + "reggae.00077", + "reggae.00078", + "reggae.00085", + "reggae.00092", + "reggae.00093", + "reggae.00094", + "reggae.00095", + "reggae.00096", + "reggae.00097", + "reggae.00098", + "reggae.00099", + "rock.00038", + "rock.00049", + "rock.00050", + "rock.00051", + "rock.00052", + "rock.00053", + "rock.00054", + "rock.00055", + "rock.00056", + "rock.00071", + "rock.00072", + "rock.00073", + "rock.00074", + "rock.00075", + "rock.00076", + "rock.00077", + "rock.00078", + "rock.00079", + "rock.00080", + "rock.00081", + "rock.00082", + "rock.00083", + "rock.00084", + "rock.00085", +] + + +URL = "http://opihi.cs.uvic.ca/sound/genres.tar.gz" +FOLDER_IN_ARCHIVE = "genres" +_CHECKSUMS = { + "http://opihi.cs.uvic.ca/sound/genres.tar.gz": "24347e0223d2ba798e0a558c4c172d9d4a19c00bb7963fe055d183dadb4ef2c6" +} + + +def load_gtzan_item(fileid: str, path: str, ext_audio: str) -> Tuple[Tensor, str]: + """ + Loads a file from the dataset and returns the raw waveform + as a Torch Tensor, its sample rate as an integer, and its + genre as a string. + """ + # Filenames are of the form label.id, e.g. blues.00078 + label, _ = fileid.split(".") + + # Read wav + file_audio = os.path.join(path, label, fileid + ext_audio) + waveform, sample_rate = torchaudio.load(file_audio) + + return waveform, sample_rate, label + + +class GTZAN(Dataset): + """*GTZAN* :cite:`tzanetakis_essl_cook_2001` dataset. + + Note: + Please see http://marsyas.info/downloads/datasets.html if you are planning to use + this dataset to publish results. + + Note: + As of October 2022, the download link is not currently working. Setting ``download=True`` + in GTZAN dataset will result in a URL connection error. + + Args: + root (str or Path): Path to the directory where the dataset is found or downloaded. + url (str, optional): The URL to download the dataset from. + (default: ``"http://opihi.cs.uvic.ca/sound/genres.tar.gz"``) + folder_in_archive (str, optional): The top-level directory of the dataset. + download (bool, optional): + Whether to download the dataset if it is not found at root path. (default: ``False``). + subset (str or None, optional): Which subset of the dataset to use. + One of ``"training"``, ``"validation"``, ``"testing"`` or ``None``. + If ``None``, the entire dataset is used. (default: ``None``). + """ + + _ext_audio = ".wav" + + def __init__( + self, + root: Union[str, Path], + url: str = URL, + folder_in_archive: str = FOLDER_IN_ARCHIVE, + download: bool = False, + subset: Optional[str] = None, + ) -> None: + + # super(GTZAN, self).__init__() + + # Get string representation of 'root' in case Path object is passed + root = os.fspath(root) + + self.root = root + self.url = url + self.folder_in_archive = folder_in_archive + self.download = download + self.subset = subset + + if subset is not None and subset not in ["training", "validation", "testing"]: + raise ValueError("When `subset` is not None, it must be one of ['training', 'validation', 'testing'].") + + archive = os.path.basename(url) + archive = os.path.join(root, archive) + self._path = os.path.join(root, folder_in_archive) + + if download: + if not os.path.isdir(self._path): + if not os.path.isfile(archive): + checksum = _CHECKSUMS.get(url, None) + download_url_to_file(url, archive, hash_prefix=checksum) + _extract_tar(archive) + + if not os.path.isdir(self._path): + raise RuntimeError("Dataset not found. Please use `download=True` to download it.") + + if self.subset is None: + # Check every subdirectory under dataset root + # which has the same name as the genres in + # GTZAN (e.g. `root_dir'/blues/, `root_dir'/rock, etc.) + # This lets users remove or move around song files, + # useful when e.g. they want to use only some of the files + # in a genre or want to label other files with a different + # genre. + self._walker = [] + + root = os.path.expanduser(self._path) + + for directory in gtzan_genres: + fulldir = os.path.join(root, directory) + + if not os.path.exists(fulldir): + continue + + songs_in_genre = os.listdir(fulldir) + songs_in_genre.sort() + for fname in songs_in_genre: + name, ext = os.path.splitext(fname) + if ext.lower() == ".wav" and "." in name: + # Check whether the file is of the form + # `gtzan_genre`.`5 digit number`.wav + genre, num = name.split(".") + if genre in gtzan_genres and len(num) == 5 and num.isdigit(): + self._walker.append(name) + else: + if self.subset == "training": + self._walker = filtered_train + elif self.subset == "validation": + self._walker = filtered_valid + elif self.subset == "testing": + self._walker = filtered_test + + def __getitem__(self, n: int) -> Tuple[Tensor, int, str]: + """Load the n-th sample from the dataset. + + Args: + n (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + Tensor: + Waveform + int: + Sample rate + str: + Label + """ + fileid = self._walker[n] + item = load_gtzan_item(fileid, self._path, self._ext_audio) + waveform, sample_rate, label = item + return waveform, sample_rate, label + + def __len__(self) -> int: + return len(self._walker) diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/iemocap.py b/venv/lib/python3.10/site-packages/torchaudio/datasets/iemocap.py new file mode 100644 index 0000000000000000000000000000000000000000..224300a84f5ec3ae217f030783c825fc3db56c8a --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/datasets/iemocap.py @@ -0,0 +1,147 @@ +import os +import re +from pathlib import Path +from typing import Optional, Tuple, Union + +from torch import Tensor +from torch.utils.data import Dataset +from torchaudio.datasets.utils import _load_waveform + + +_SAMPLE_RATE = 16000 + + +def _get_wavs_paths(data_dir): + wav_dir = data_dir / "sentences" / "wav" + wav_paths = sorted(str(p) for p in wav_dir.glob("*/*.wav")) + relative_paths = [] + for wav_path in wav_paths: + start = wav_path.find("Session") + wav_path = wav_path[start:] + relative_paths.append(wav_path) + return relative_paths + + +class IEMOCAP(Dataset): + """*IEMOCAP* :cite:`iemocap` dataset. + + Args: + root (str or Path): Root directory where the dataset's top level directory is found + sessions (Tuple[int]): Tuple of sessions (1-5) to use. (Default: ``(1, 2, 3, 4, 5)``) + utterance_type (str or None, optional): Which type(s) of utterances to include in the dataset. + Options: ("scripted", "improvised", ``None``). If ``None``, both scripted and improvised + data are used. + """ + + def __init__( + self, + root: Union[str, Path], + sessions: Tuple[str] = (1, 2, 3, 4, 5), + utterance_type: Optional[str] = None, + ): + root = Path(root) + self._path = root / "IEMOCAP" + + if not os.path.isdir(self._path): + raise RuntimeError("Dataset not found.") + + if utterance_type not in ["scripted", "improvised", None]: + raise ValueError("utterance_type must be one of ['scripted', 'improvised', or None]") + + all_data = [] + self.data = [] + self.mapping = {} + + for session in sessions: + session_name = f"Session{session}" + session_dir = self._path / session_name + + # get wav paths + wav_paths = _get_wavs_paths(session_dir) + for wav_path in wav_paths: + wav_stem = str(Path(wav_path).stem) + all_data.append(wav_stem) + + # add labels + label_dir = session_dir / "dialog" / "EmoEvaluation" + query = "*.txt" + if utterance_type == "scripted": + query = "*script*.txt" + elif utterance_type == "improvised": + query = "*impro*.txt" + label_paths = label_dir.glob(query) + + for label_path in label_paths: + with open(label_path, "r") as f: + for line in f: + if not line.startswith("["): + continue + line = re.split("[\t\n]", line) + wav_stem = line[1] + label = line[2] + if wav_stem not in all_data: + continue + if label not in ["neu", "hap", "ang", "sad", "exc", "fru"]: + continue + self.mapping[wav_stem] = {} + self.mapping[wav_stem]["label"] = label + + for wav_path in wav_paths: + wav_stem = str(Path(wav_path).stem) + if wav_stem in self.mapping: + self.data.append(wav_stem) + self.mapping[wav_stem]["path"] = wav_path + + def get_metadata(self, n: int) -> Tuple[str, int, str, str, str]: + """Get metadata for the n-th sample from the dataset. Returns filepath instead of waveform, + but otherwise returns the same fields as :py:meth:`__getitem__`. + + Args: + n (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + str: + Path to audio + int: + Sample rate + str: + File name + str: + Label (one of ``"neu"``, ``"hap"``, ``"ang"``, ``"sad"``, ``"exc"``, ``"fru"``) + str: + Speaker + """ + wav_stem = self.data[n] + wav_path = self.mapping[wav_stem]["path"] + label = self.mapping[wav_stem]["label"] + speaker = wav_stem.split("_")[0] + return (wav_path, _SAMPLE_RATE, wav_stem, label, speaker) + + def __getitem__(self, n: int) -> Tuple[Tensor, int, str, str, str]: + """Load the n-th sample from the dataset. + + Args: + n (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + Tensor: + Waveform + int: + Sample rate + str: + File name + str: + Label (one of ``"neu"``, ``"hap"``, ``"ang"``, ``"sad"``, ``"exc"``, ``"fru"``) + str: + Speaker + """ + metadata = self.get_metadata(n) + waveform = _load_waveform(self._path, metadata[0], metadata[1]) + return (waveform,) + metadata[1:] + + def __len__(self): + return len(self.data) diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/librilight_limited.py b/venv/lib/python3.10/site-packages/torchaudio/datasets/librilight_limited.py new file mode 100644 index 0000000000000000000000000000000000000000..f0cb3100f7c4ad2e488c20bdfaac3833e0a136dd --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/datasets/librilight_limited.py @@ -0,0 +1,111 @@ +import os +from pathlib import Path +from typing import List, Tuple, Union + +import torchaudio +from torch import Tensor +from torch.utils.data import Dataset +from torchaudio._internal import download_url_to_file +from torchaudio.datasets.librispeech import _get_librispeech_metadata +from torchaudio.datasets.utils import _extract_tar + + +_ARCHIVE_NAME = "librispeech_finetuning" +_URL = "https://dl.fbaipublicfiles.com/librilight/data/librispeech_finetuning.tgz" +_CHECKSUM = "5d1efdc777b548194d7e09ba89126e2188026df9fd57aa57eb14408d2b2342af" +_SUBSET_MAP = {"10min": ["1h/0"], "1h": ["1h/*"], "10h": ["1h/*", "9h"]} + + +def _get_fileids_paths(path: Path, folders: List[str], _ext_audio: str) -> List[Tuple[str, str]]: + """Get the file names and the corresponding file paths without `speaker_id` + and `chapter_id` directories. + The format of path is like: + {root}/{_ARCHIVE_NAME}/1h/[0-5]/[clean, other] or + {root}/{_ARCHIVE_NAME}/9h/[clean, other] + + Args: + path (Path): Root path to the dataset. + folders (List[str]): Folders that contain the desired audio files. + _ext_audio (str): Extension of audio files. + + Returns: + List[Tuple[str, str]]: + List of tuples where the first element is the relative path to the audio file. + The format of relative path is like: + 1h/[0-5]/[clean, other] or 9h/[clean, other] + The second element is the file name without audio extension. + """ + + path = Path(path) + files_paths = [] + for folder in folders: + paths = [p.relative_to(path) for p in path.glob(f"{folder}/*/*/*/*{_ext_audio}")] + files_paths += [(str(p.parent.parent.parent), str(p.stem)) for p in paths] # get subset folder and file name + files_paths.sort(key=lambda x: x[0] + x[1]) + return files_paths + + +class LibriLightLimited(Dataset): + """Subset of Libri-light :cite:`librilight` dataset, + which was used in HuBERT :cite:`hsu2021hubert` for supervised fine-tuning. + + Args: + root (str or Path): Path to the directory where the dataset is found or downloaded. + subset (str, optional): The subset to use. Options: [``"10min"``, ``"1h"``, ``"10h"``] + (Default: ``"10min"``). + download (bool, optional): + Whether to download the dataset if it is not found at root path. (default: ``False``). + """ + + _ext_txt = ".trans.txt" + _ext_audio = ".flac" + + def __init__( + self, + root: Union[str, Path], + subset: str = "10min", + download: bool = False, + ) -> None: + if subset not in _SUBSET_MAP: + raise ValueError(f"`subset` must be one of {_SUBSET_MAP.keys()}. Found: {subset}") + folders = _SUBSET_MAP[subset] + + root = os.fspath(root) + self._path = os.path.join(root, _ARCHIVE_NAME) + archive = os.path.join(root, f"{_ARCHIVE_NAME}.tgz") + if not os.path.isdir(self._path): + if not download: + raise RuntimeError("Dataset not found. Please use `download=True` to download") + if not os.path.isfile(archive): + download_url_to_file(_URL, archive, hash_prefix=_CHECKSUM) + _extract_tar(archive) + self._fileids_paths = _get_fileids_paths(self._path, folders, self._ext_audio) + + def __getitem__(self, n: int) -> Tuple[Tensor, int, str, int, int, int]: + """Load the n-th sample from the dataset. + + Args: + n (int): The index of the sample to be loaded + Returns: + Tuple of the following items; + + Tensor: + Waveform + int: + Sample rate + str: + Transcript + int: + Speaker ID + int: + Chapter ID + int: + Utterance ID + """ + file_path, fileid = self._fileids_paths[n] + metadata = _get_librispeech_metadata(fileid, self._path, file_path, self._ext_audio, self._ext_txt) + waveform, _ = torchaudio.load(os.path.join(self._path, metadata[0])) + return (waveform,) + metadata[1:] + + def __len__(self) -> int: + return len(self._fileids_paths) diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/librimix.py b/venv/lib/python3.10/site-packages/torchaudio/datasets/librimix.py new file mode 100644 index 0000000000000000000000000000000000000000..2c6c6f18600ab35f037dda11f9f5bc32c8a5cbf5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/datasets/librimix.py @@ -0,0 +1,133 @@ +import os +from pathlib import Path +from typing import List, Tuple, Union + +import torch +from torch.utils.data import Dataset +from torchaudio.datasets.utils import _load_waveform + +_TASKS_TO_MIXTURE = { + "sep_clean": "mix_clean", + "enh_single": "mix_single", + "enh_both": "mix_both", + "sep_noisy": "mix_both", +} + + +class LibriMix(Dataset): + r"""*LibriMix* :cite:`cosentino2020librimix` dataset. + + Args: + root (str or Path): The path where the directory ``Libri2Mix`` or + ``Libri3Mix`` is stored. Not the path of those directories. + subset (str, optional): The subset to use. Options: [``"train-360"``, ``"train-100"``, + ``"dev"``, and ``"test"``] (Default: ``"train-360"``). + num_speakers (int, optional): The number of speakers, which determines the directories + to traverse. The Dataset will traverse ``s1`` to ``sN`` directories to collect + N source audios. (Default: 2) + sample_rate (int, optional): Sample rate of audio files. The ``sample_rate`` determines + which subdirectory the audio are fetched. If any of the audio has a different sample + rate, raises ``ValueError``. Options: [8000, 16000] (Default: 8000) + task (str, optional): The task of LibriMix. + Options: [``"enh_single"``, ``"enh_both"``, ``"sep_clean"``, ``"sep_noisy"``] + (Default: ``"sep_clean"``) + mode (str, optional): The mode when creating the mixture. If set to ``"min"``, the lengths of mixture + and sources are the minimum length of all sources. If set to ``"max"``, the lengths of mixture and + sources are zero padded to the maximum length of all sources. + Options: [``"min"``, ``"max"``] + (Default: ``"min"``) + + Note: + The LibriMix dataset needs to be manually generated. Please check https://github.com/JorisCos/LibriMix + """ + + def __init__( + self, + root: Union[str, Path], + subset: str = "train-360", + num_speakers: int = 2, + sample_rate: int = 8000, + task: str = "sep_clean", + mode: str = "min", + ): + self.root = Path(root) / f"Libri{num_speakers}Mix" + if not os.path.exists(self.root): + raise RuntimeError( + f"The path {self.root} doesn't exist. " + "Please check the ``root`` path and ``num_speakers`` or download the dataset manually." + ) + if mode not in ["max", "min"]: + raise ValueError(f'Expect ``mode`` to be one in ["min", "max"]. Found {mode}.') + if sample_rate == 8000: + mix_dir = self.root / "wav8k" / mode / subset + elif sample_rate == 16000: + mix_dir = self.root / "wav16k" / mode / subset + else: + raise ValueError(f"Unsupported sample rate. Found {sample_rate}.") + self.sample_rate = sample_rate + self.task = task + + self.mix_dir = mix_dir / _TASKS_TO_MIXTURE[task] + if task == "enh_both": + self.src_dirs = [(mix_dir / "mix_clean")] + else: + self.src_dirs = [(mix_dir / f"s{i+1}") for i in range(num_speakers)] + + self.files = [p.name for p in self.mix_dir.glob("*.wav")] + self.files.sort() + + def _load_sample(self, key) -> Tuple[int, torch.Tensor, List[torch.Tensor]]: + metadata = self.get_metadata(key) + mixed = _load_waveform(self.root, metadata[1], metadata[0]) + srcs = [] + for i, path_ in enumerate(metadata[2]): + src = _load_waveform(self.root, path_, metadata[0]) + if mixed.shape != src.shape: + raise ValueError(f"Different waveform shapes. mixed: {mixed.shape}, src[{i}]: {src.shape}") + srcs.append(src) + return self.sample_rate, mixed, srcs + + def get_metadata(self, key: int) -> Tuple[int, str, List[str]]: + """Get metadata for the n-th sample from the dataset. + + Args: + key (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + int: + Sample rate + str: + Path to mixed audio + List of str: + List of paths to source audios + """ + filename = self.files[key] + mixed_path = os.path.relpath(self.mix_dir / filename, self.root) + srcs_paths = [] + for dir_ in self.src_dirs: + src = os.path.relpath(dir_ / filename, self.root) + srcs_paths.append(src) + return self.sample_rate, mixed_path, srcs_paths + + def __len__(self) -> int: + return len(self.files) + + def __getitem__(self, key: int) -> Tuple[int, torch.Tensor, List[torch.Tensor]]: + """Load the n-th sample from the dataset. + + Args: + key (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + int: + Sample rate + Tensor: + Mixture waveform + List of Tensors: + List of source waveforms + """ + return self._load_sample(key) diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/librispeech.py b/venv/lib/python3.10/site-packages/torchaudio/datasets/librispeech.py new file mode 100644 index 0000000000000000000000000000000000000000..7cf05dbecb5cce24c91e3bbcf232935e1f6d8cd9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/datasets/librispeech.py @@ -0,0 +1,174 @@ +import os +from pathlib import Path +from typing import Tuple, Union + +from torch import Tensor +from torch.utils.data import Dataset +from torchaudio._internal import download_url_to_file +from torchaudio.datasets.utils import _extract_tar, _load_waveform + +URL = "train-clean-100" +FOLDER_IN_ARCHIVE = "LibriSpeech" +SAMPLE_RATE = 16000 +_DATA_SUBSETS = [ + "dev-clean", + "dev-other", + "test-clean", + "test-other", + "train-clean-100", + "train-clean-360", + "train-other-500", +] +_CHECKSUMS = { + "http://www.openslr.org/resources/12/dev-clean.tar.gz": "76f87d090650617fca0cac8f88b9416e0ebf80350acb97b343a85fa903728ab3", # noqa: E501 + "http://www.openslr.org/resources/12/dev-other.tar.gz": "12661c48e8c3fe1de2c1caa4c3e135193bfb1811584f11f569dd12645aa84365", # noqa: E501 + "http://www.openslr.org/resources/12/test-clean.tar.gz": "39fde525e59672dc6d1551919b1478f724438a95aa55f874b576be21967e6c23", # noqa: E501 + "http://www.openslr.org/resources/12/test-other.tar.gz": "d09c181bba5cf717b3dee7d4d592af11a3ee3a09e08ae025c5506f6ebe961c29", # noqa: E501 + "http://www.openslr.org/resources/12/train-clean-100.tar.gz": "d4ddd1d5a6ab303066f14971d768ee43278a5f2a0aa43dc716b0e64ecbbbf6e2", # noqa: E501 + "http://www.openslr.org/resources/12/train-clean-360.tar.gz": "146a56496217e96c14334a160df97fffedd6e0a04e66b9c5af0d40be3c792ecf", # noqa: E501 + "http://www.openslr.org/resources/12/train-other-500.tar.gz": "ddb22f27f96ec163645d53215559df6aa36515f26e01dd70798188350adcb6d2", # noqa: E501 +} + + +def _download_librispeech(root, url): + base_url = "http://www.openslr.org/resources/12/" + ext_archive = ".tar.gz" + + filename = url + ext_archive + archive = os.path.join(root, filename) + download_url = os.path.join(base_url, filename) + if not os.path.isfile(archive): + checksum = _CHECKSUMS.get(download_url, None) + download_url_to_file(download_url, archive, hash_prefix=checksum) + _extract_tar(archive) + + +def _get_librispeech_metadata( + fileid: str, root: str, folder: str, ext_audio: str, ext_txt: str +) -> Tuple[str, int, str, int, int, int]: + speaker_id, chapter_id, utterance_id = fileid.split("-") + + # Get audio path and sample rate + fileid_audio = f"{speaker_id}-{chapter_id}-{utterance_id}" + filepath = os.path.join(folder, speaker_id, chapter_id, f"{fileid_audio}{ext_audio}") + + # Load text + file_text = f"{speaker_id}-{chapter_id}{ext_txt}" + file_text = os.path.join(root, folder, speaker_id, chapter_id, file_text) + with open(file_text) as ft: + for line in ft: + fileid_text, transcript = line.strip().split(" ", 1) + if fileid_audio == fileid_text: + break + else: + # Translation not found + raise FileNotFoundError(f"Translation not found for {fileid_audio}") + + return ( + filepath, + SAMPLE_RATE, + transcript, + int(speaker_id), + int(chapter_id), + int(utterance_id), + ) + + +class LIBRISPEECH(Dataset): + """*LibriSpeech* :cite:`7178964` dataset. + + Args: + root (str or Path): Path to the directory where the dataset is found or downloaded. + url (str, optional): The URL to download the dataset from, + or the type of the dataset to dowload. + Allowed type values are ``"dev-clean"``, ``"dev-other"``, ``"test-clean"``, + ``"test-other"``, ``"train-clean-100"``, ``"train-clean-360"`` and + ``"train-other-500"``. (default: ``"train-clean-100"``) + folder_in_archive (str, optional): + The top-level directory of the dataset. (default: ``"LibriSpeech"``) + download (bool, optional): + Whether to download the dataset if it is not found at root path. (default: ``False``). + """ + + _ext_txt = ".trans.txt" + _ext_audio = ".flac" + + def __init__( + self, + root: Union[str, Path], + url: str = URL, + folder_in_archive: str = FOLDER_IN_ARCHIVE, + download: bool = False, + ) -> None: + self._url = url + if url not in _DATA_SUBSETS: + raise ValueError(f"Invalid url '{url}' given; please provide one of {_DATA_SUBSETS}.") + + root = os.fspath(root) + self._archive = os.path.join(root, folder_in_archive) + self._path = os.path.join(root, folder_in_archive, url) + + if not os.path.isdir(self._path): + if download: + _download_librispeech(root, url) + else: + raise RuntimeError( + f"Dataset not found at {self._path}. Please set `download=True` to download the dataset." + ) + + self._walker = sorted(str(p.stem) for p in Path(self._path).glob("*/*/*" + self._ext_audio)) + + def get_metadata(self, n: int) -> Tuple[str, int, str, int, int, int]: + """Get metadata for the n-th sample from the dataset. Returns filepath instead of waveform, + but otherwise returns the same fields as :py:func:`__getitem__`. + + Args: + n (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + str: + Path to audio + int: + Sample rate + str: + Transcript + int: + Speaker ID + int: + Chapter ID + int: + Utterance ID + """ + fileid = self._walker[n] + return _get_librispeech_metadata(fileid, self._archive, self._url, self._ext_audio, self._ext_txt) + + def __getitem__(self, n: int) -> Tuple[Tensor, int, str, int, int, int]: + """Load the n-th sample from the dataset. + + Args: + n (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + Tensor: + Waveform + int: + Sample rate + str: + Transcript + int: + Speaker ID + int: + Chapter ID + int: + Utterance ID + """ + metadata = self.get_metadata(n) + waveform = _load_waveform(self._archive, metadata[0], metadata[1]) + return (waveform,) + metadata[1:] + + def __len__(self) -> int: + return len(self._walker) diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/librispeech_biasing.py b/venv/lib/python3.10/site-packages/torchaudio/datasets/librispeech_biasing.py new file mode 100644 index 0000000000000000000000000000000000000000..bd518cf2b69094728f8693fe2cb8a2a535bd7d3c --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/datasets/librispeech_biasing.py @@ -0,0 +1,189 @@ +import os +from pathlib import Path +from typing import List, Tuple, Union + +from torch import Tensor +from torch.utils.data import Dataset +from torchaudio._internal import download_url_to_file +from torchaudio.datasets.utils import _extract_tar, _load_waveform + +URL = "train-clean-100" +FOLDER_IN_ARCHIVE = "LibriSpeech" +SAMPLE_RATE = 16000 +_DATA_SUBSETS = [ + "dev-clean", + "dev-other", + "test-clean", + "test-other", + "train-clean-100", + "train-clean-360", + "train-other-500", +] +_CHECKSUMS = { + "http://www.openslr.org/resources/12/dev-clean.tar.gz": "76f87d090650617fca0cac8f88b9416e0ebf80350acb97b343a85fa903728ab3", # noqa: E501 + "http://www.openslr.org/resources/12/dev-other.tar.gz": "12661c48e8c3fe1de2c1caa4c3e135193bfb1811584f11f569dd12645aa84365", # noqa: E501 + "http://www.openslr.org/resources/12/test-clean.tar.gz": "39fde525e59672dc6d1551919b1478f724438a95aa55f874b576be21967e6c23", # noqa: E501 + "http://www.openslr.org/resources/12/test-other.tar.gz": "d09c181bba5cf717b3dee7d4d592af11a3ee3a09e08ae025c5506f6ebe961c29", # noqa: E501 + "http://www.openslr.org/resources/12/train-clean-100.tar.gz": "d4ddd1d5a6ab303066f14971d768ee43278a5f2a0aa43dc716b0e64ecbbbf6e2", # noqa: E501 + "http://www.openslr.org/resources/12/train-clean-360.tar.gz": "146a56496217e96c14334a160df97fffedd6e0a04e66b9c5af0d40be3c792ecf", # noqa: E501 + "http://www.openslr.org/resources/12/train-other-500.tar.gz": "ddb22f27f96ec163645d53215559df6aa36515f26e01dd70798188350adcb6d2", # noqa: E501 +} + + +def _download_librispeech(root, url): + base_url = "http://www.openslr.org/resources/12/" + ext_archive = ".tar.gz" + + filename = url + ext_archive + archive = os.path.join(root, filename) + download_url = os.path.join(base_url, filename) + if not os.path.isfile(archive): + checksum = _CHECKSUMS.get(download_url, None) + download_url_to_file(download_url, archive, hash_prefix=checksum) + _extract_tar(archive) + + +def _get_librispeech_metadata( + fileid: str, root: str, folder: str, ext_audio: str, ext_txt: str, blist: List[str] +) -> Tuple[str, int, str, int, int, int]: + blist = blist or [] + speaker_id, chapter_id, utterance_id = fileid.split("-") + + # Get audio path and sample rate + fileid_audio = f"{speaker_id}-{chapter_id}-{utterance_id}" + filepath = os.path.join(folder, speaker_id, chapter_id, f"{fileid_audio}{ext_audio}") + + # Load text + file_text = f"{speaker_id}-{chapter_id}{ext_txt}" + file_text = os.path.join(root, folder, speaker_id, chapter_id, file_text) + uttblist = [] + with open(file_text) as ft: + for line in ft: + fileid_text, transcript = line.strip().split(" ", 1) + if fileid_audio == fileid_text: + # get utterance biasing list + for word in transcript.split(): + if word in blist and word not in uttblist: + uttblist.append(word) + break + else: + # Translation not found + raise FileNotFoundError(f"Translation not found for {fileid_audio}") + + return ( + filepath, + SAMPLE_RATE, + transcript, + int(speaker_id), + int(chapter_id), + int(utterance_id), + uttblist, + ) + + +class LibriSpeechBiasing(Dataset): + """*LibriSpeech* :cite:`7178964` dataset with prefix-tree construction and biasing support. + + Args: + root (str or Path): Path to the directory where the dataset is found or downloaded. + url (str, optional): The URL to download the dataset from, + or the type of the dataset to dowload. + Allowed type values are ``"dev-clean"``, ``"dev-other"``, ``"test-clean"``, + ``"test-other"``, ``"train-clean-100"``, ``"train-clean-360"`` and + ``"train-other-500"``. (default: ``"train-clean-100"``) + folder_in_archive (str, optional): + The top-level directory of the dataset. (default: ``"LibriSpeech"``) + download (bool, optional): + Whether to download the dataset if it is not found at root path. (default: ``False``). + blist (list, optional): + The list of biasing words (default: ``[]``). + """ + + _ext_txt = ".trans.txt" + _ext_audio = ".flac" + + def __init__( + self, + root: Union[str, Path], + url: str = URL, + folder_in_archive: str = FOLDER_IN_ARCHIVE, + download: bool = False, + blist: List[str] = None, + ) -> None: + self._url = url + if url not in _DATA_SUBSETS: + raise ValueError(f"Invalid url '{url}' given; please provide one of {_DATA_SUBSETS}.") + + root = os.fspath(root) + self._archive = os.path.join(root, folder_in_archive) + self._path = os.path.join(root, folder_in_archive, url) + + if not os.path.isdir(self._path): + if download: + _download_librispeech(root, url) + else: + raise RuntimeError( + f"Dataset not found at {self._path}. Please set `download=True` to download the dataset." + ) + + self._walker = sorted(str(p.stem) for p in Path(self._path).glob("*/*/*" + self._ext_audio)) + self.blist = blist + + def get_metadata(self, n: int) -> Tuple[str, int, str, int, int, int]: + """Get metadata for the n-th sample from the dataset. Returns filepath instead of waveform, + but otherwise returns the same fields as :py:func:`__getitem__`. + + Args: + n (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + str: + Path to audio + int: + Sample rate + str: + Transcript + int: + Speaker ID + int: + Chapter ID + int: + Utterance ID + list: + List of biasing words in the utterance + """ + fileid = self._walker[n] + return _get_librispeech_metadata(fileid, self._archive, self._url, self._ext_audio, self._ext_txt, self.blist) + + def __getitem__(self, n: int) -> Tuple[Tensor, int, str, int, int, int]: + """Load the n-th sample from the dataset. + + Args: + n (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + Tensor: + Waveform + int: + Sample rate + str: + Transcript + int: + Speaker ID + int: + Chapter ID + int: + Utterance ID + list: + List of biasing words in the utterance + """ + metadata = self.get_metadata(n) + waveform = _load_waveform(self._archive, metadata[0], metadata[1]) + return (waveform,) + metadata[1:] + + def __len__(self) -> int: + return len(self._walker) diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/libritts.py b/venv/lib/python3.10/site-packages/torchaudio/datasets/libritts.py new file mode 100644 index 0000000000000000000000000000000000000000..829ce9572920c31ec7a4b393379f779a7df14ea9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/datasets/libritts.py @@ -0,0 +1,168 @@ +import os +from pathlib import Path +from typing import Tuple, Union + +import torchaudio +from torch import Tensor +from torch.utils.data import Dataset +from torchaudio._internal import download_url_to_file +from torchaudio.datasets.utils import _extract_tar + +URL = "train-clean-100" +FOLDER_IN_ARCHIVE = "LibriTTS" +_CHECKSUMS = { + "http://www.openslr.org/resources/60/dev-clean.tar.gz": "da0864e1bd26debed35da8a869dd5c04dfc27682921936de7cff9c8a254dbe1a", # noqa: E501 + "http://www.openslr.org/resources/60/dev-other.tar.gz": "d413eda26f3a152ac7c9cf3658ef85504dfb1b625296e5fa83727f5186cca79c", # noqa: E501 + "http://www.openslr.org/resources/60/test-clean.tar.gz": "234ea5b25859102a87024a4b9b86641f5b5aaaf1197335c95090cde04fe9a4f5", # noqa: E501 + "http://www.openslr.org/resources/60/test-other.tar.gz": "33a5342094f3bba7ccc2e0500b9e72d558f72eb99328ac8debe1d9080402f10d", # noqa: E501 + "http://www.openslr.org/resources/60/train-clean-100.tar.gz": "c5608bf1ef74bb621935382b8399c5cdd51cd3ee47cec51f00f885a64c6c7f6b", # noqa: E501 + "http://www.openslr.org/resources/60/train-clean-360.tar.gz": "ce7cff44dcac46009d18379f37ef36551123a1dc4e5c8e4eb73ae57260de4886", # noqa: E501 + "http://www.openslr.org/resources/60/train-other-500.tar.gz": "e35f7e34deeb2e2bdfe4403d88c8fdd5fbf64865cae41f027a185a6965f0a5df", # noqa: E501 +} + + +def load_libritts_item( + fileid: str, + path: str, + ext_audio: str, + ext_original_txt: str, + ext_normalized_txt: str, +) -> Tuple[Tensor, int, str, str, int, int, str]: + speaker_id, chapter_id, segment_id, utterance_id = fileid.split("_") + utterance_id = fileid + + normalized_text = utterance_id + ext_normalized_txt + normalized_text = os.path.join(path, speaker_id, chapter_id, normalized_text) + + original_text = utterance_id + ext_original_txt + original_text = os.path.join(path, speaker_id, chapter_id, original_text) + + file_audio = utterance_id + ext_audio + file_audio = os.path.join(path, speaker_id, chapter_id, file_audio) + + # Load audio + waveform, sample_rate = torchaudio.load(file_audio) + + # Load original text + with open(original_text) as ft: + original_text = ft.readline() + + # Load normalized text + with open(normalized_text, "r") as ft: + normalized_text = ft.readline() + + return ( + waveform, + sample_rate, + original_text, + normalized_text, + int(speaker_id), + int(chapter_id), + utterance_id, + ) + + +class LIBRITTS(Dataset): + """*LibriTTS* :cite:`Zen2019LibriTTSAC` dataset. + + Args: + root (str or Path): Path to the directory where the dataset is found or downloaded. + url (str, optional): The URL to download the dataset from, + or the type of the dataset to dowload. + Allowed type values are ``"dev-clean"``, ``"dev-other"``, ``"test-clean"``, + ``"test-other"``, ``"train-clean-100"``, ``"train-clean-360"`` and + ``"train-other-500"``. (default: ``"train-clean-100"``) + folder_in_archive (str, optional): + The top-level directory of the dataset. (default: ``"LibriTTS"``) + download (bool, optional): + Whether to download the dataset if it is not found at root path. (default: ``False``). + """ + + _ext_original_txt = ".original.txt" + _ext_normalized_txt = ".normalized.txt" + _ext_audio = ".wav" + + def __init__( + self, + root: Union[str, Path], + url: str = URL, + folder_in_archive: str = FOLDER_IN_ARCHIVE, + download: bool = False, + ) -> None: + + if url in [ + "dev-clean", + "dev-other", + "test-clean", + "test-other", + "train-clean-100", + "train-clean-360", + "train-other-500", + ]: + + ext_archive = ".tar.gz" + base_url = "http://www.openslr.org/resources/60/" + + url = os.path.join(base_url, url + ext_archive) + + # Get string representation of 'root' in case Path object is passed + root = os.fspath(root) + + basename = os.path.basename(url) + archive = os.path.join(root, basename) + + basename = basename.split(".")[0] + folder_in_archive = os.path.join(folder_in_archive, basename) + + self._path = os.path.join(root, folder_in_archive) + + if download: + if not os.path.isdir(self._path): + if not os.path.isfile(archive): + checksum = _CHECKSUMS.get(url, None) + download_url_to_file(url, archive, hash_prefix=checksum) + _extract_tar(archive) + else: + if not os.path.exists(self._path): + raise RuntimeError( + f"The path {self._path} doesn't exist. " + "Please check the ``root`` path or set `download=True` to download it" + ) + + self._walker = sorted(str(p.stem) for p in Path(self._path).glob("*/*/*" + self._ext_audio)) + + def __getitem__(self, n: int) -> Tuple[Tensor, int, str, str, int, int, str]: + """Load the n-th sample from the dataset. + + Args: + n (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + Tensor: + Waveform + int: + Sample rate + str: + Original text + str: + Normalized text + int: + Speaker ID + int: + Chapter ID + str: + Utterance ID + """ + fileid = self._walker[n] + return load_libritts_item( + fileid, + self._path, + self._ext_audio, + self._ext_original_txt, + self._ext_normalized_txt, + ) + + def __len__(self) -> int: + return len(self._walker) diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/ljspeech.py b/venv/lib/python3.10/site-packages/torchaudio/datasets/ljspeech.py new file mode 100644 index 0000000000000000000000000000000000000000..9cdaeeb0f3e67a29fc57e9d0e9ed3056d98c24df --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/datasets/ljspeech.py @@ -0,0 +1,107 @@ +import csv +import os +from pathlib import Path +from typing import Tuple, Union + +import torchaudio +from torch import Tensor +from torch.utils.data import Dataset +from torchaudio._internal import download_url_to_file +from torchaudio.datasets.utils import _extract_tar + + +_RELEASE_CONFIGS = { + "release1": { + "folder_in_archive": "wavs", + "url": "https://data.keithito.com/data/speech/LJSpeech-1.1.tar.bz2", + "checksum": "be1a30453f28eb8dd26af4101ae40cbf2c50413b1bb21936cbcdc6fae3de8aa5", + } +} + + +class LJSPEECH(Dataset): + """*LJSpeech-1.1* :cite:`ljspeech17` dataset. + + Args: + root (str or Path): Path to the directory where the dataset is found or downloaded. + url (str, optional): The URL to download the dataset from. + (default: ``"https://data.keithito.com/data/speech/LJSpeech-1.1.tar.bz2"``) + folder_in_archive (str, optional): + The top-level directory of the dataset. (default: ``"wavs"``) + download (bool, optional): + Whether to download the dataset if it is not found at root path. (default: ``False``). + """ + + def __init__( + self, + root: Union[str, Path], + url: str = _RELEASE_CONFIGS["release1"]["url"], + folder_in_archive: str = _RELEASE_CONFIGS["release1"]["folder_in_archive"], + download: bool = False, + ) -> None: + + self._parse_filesystem(root, url, folder_in_archive, download) + + def _parse_filesystem(self, root: str, url: str, folder_in_archive: str, download: bool) -> None: + root = Path(root) + + basename = os.path.basename(url) + archive = root / basename + + basename = Path(basename.split(".tar.bz2")[0]) + folder_in_archive = basename / folder_in_archive + + self._path = root / folder_in_archive + self._metadata_path = root / basename / "metadata.csv" + + if download: + if not os.path.isdir(self._path): + if not os.path.isfile(archive): + checksum = _RELEASE_CONFIGS["release1"]["checksum"] + download_url_to_file(url, archive, hash_prefix=checksum) + _extract_tar(archive) + else: + if not os.path.exists(self._path): + raise RuntimeError( + f"The path {self._path} doesn't exist. " + "Please check the ``root`` path or set `download=True` to download it" + ) + + with open(self._metadata_path, "r", newline="") as metadata: + flist = csv.reader(metadata, delimiter="|", quoting=csv.QUOTE_NONE) + self._flist = list(flist) + + def __getitem__(self, n: int) -> Tuple[Tensor, int, str, str]: + """Load the n-th sample from the dataset. + + Args: + n (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + Tensor: + Waveform + int: + Sample rate + str: + Transcript + str: + Normalized Transcript + """ + line = self._flist[n] + fileid, transcript, normalized_transcript = line + fileid_audio = self._path / (fileid + ".wav") + + # Load audio + waveform, sample_rate = torchaudio.load(fileid_audio) + + return ( + waveform, + sample_rate, + transcript, + normalized_transcript, + ) + + def __len__(self) -> int: + return len(self._flist) diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/musdb_hq.py b/venv/lib/python3.10/site-packages/torchaudio/datasets/musdb_hq.py new file mode 100644 index 0000000000000000000000000000000000000000..dd4bc9f340f3fde076ea31a683a7b41b7b3741d7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/datasets/musdb_hq.py @@ -0,0 +1,139 @@ +import os +from pathlib import Path +from typing import List, Optional, Tuple, Union + +import torch +import torchaudio +from torch.utils.data import Dataset +from torchaudio._internal import download_url_to_file +from torchaudio.datasets.utils import _extract_zip + +_URL = "https://zenodo.org/record/3338373/files/musdb18hq.zip" +_CHECKSUM = "baac80d0483c61d74b2e5f3be75fa557eec52898339e6aa45c1fa48833c5d21d" +_EXT = ".wav" +_SAMPLE_RATE = 44100 +_VALIDATION_SET = [ + "Actions - One Minute Smile", + "Clara Berry And Wooldog - Waltz For My Victims", + "Johnny Lokke - Promises & Lies", + "Patrick Talbot - A Reason To Leave", + "Triviul - Angelsaint", + "Alexander Ross - Goodbye Bolero", + "Fergessen - Nos Palpitants", + "Leaf - Summerghost", + "Skelpolu - Human Mistakes", + "Young Griffo - Pennies", + "ANiMAL - Rockshow", + "James May - On The Line", + "Meaxic - Take A Step", + "Traffic Experiment - Sirens", +] + + +class MUSDB_HQ(Dataset): + """*MUSDB_HQ* :cite:`MUSDB18HQ` dataset. + + Args: + root (str or Path): Root directory where the dataset's top level directory is found + subset (str): Subset of the dataset to use. Options: [``"train"``, ``"test"``]. + sources (List[str] or None, optional): Sources extract data from. + List can contain the following options: [``"bass"``, ``"drums"``, ``"other"``, ``"mixture"``, ``"vocals"``]. + If ``None``, dataset consists of tracks except mixture. + (default: ``None``) + split (str or None, optional): Whether to split training set into train and validation set. + If ``None``, no splitting occurs. If ``train`` or ``validation``, returns respective set. + (default: ``None``) + download (bool, optional): Whether to download the dataset if it is not found at root path. + (default: ``False``) + """ + + def __init__( + self, + root: Union[str, Path], + subset: str, + sources: Optional[List[str]] = None, + split: Optional[str] = None, + download: bool = False, + ) -> None: + self.sources = ["bass", "drums", "other", "vocals"] if not sources else sources + self.split = split + + basename = os.path.basename(_URL) + archive = os.path.join(root, basename) + basename = basename.rsplit(".", 2)[0] + + if subset not in ["test", "train"]: + raise ValueError("`subset` must be one of ['test', 'train']") + if self.split is not None and self.split not in ["train", "validation"]: + raise ValueError("`split` must be one of ['train', 'validation']") + base_path = os.path.join(root, basename) + self._path = os.path.join(base_path, subset) + if not os.path.isdir(self._path): + if not os.path.isfile(archive): + if not download: + raise RuntimeError("Dataset not found. Please use `download=True` to download") + download_url_to_file(_URL, archive, hash_prefix=_CHECKSUM) + os.makedirs(base_path, exist_ok=True) + _extract_zip(archive, base_path) + + self.names = self._collect_songs() + + def _get_track(self, name, source): + return Path(self._path) / name / f"{source}{_EXT}" + + def _load_sample(self, n: int) -> Tuple[torch.Tensor, int, int, str]: + name = self.names[n] + wavs = [] + num_frames = None + for source in self.sources: + track = self._get_track(name, source) + wav, sr = torchaudio.load(str(track)) + if sr != _SAMPLE_RATE: + raise ValueError(f"expected sample rate {_SAMPLE_RATE}, but got {sr}") + if num_frames is None: + num_frames = wav.shape[-1] + else: + if wav.shape[-1] != num_frames: + raise ValueError("num_frames do not match across sources") + wavs.append(wav) + + stacked = torch.stack(wavs) + + return stacked, _SAMPLE_RATE, num_frames, name + + def _collect_songs(self): + if self.split == "validation": + return _VALIDATION_SET + path = Path(self._path) + names = [] + for root, folders, _ in os.walk(path, followlinks=True): + root = Path(root) + if root.name.startswith(".") or folders or root == path: + continue + name = str(root.relative_to(path)) + if self.split and name in _VALIDATION_SET: + continue + names.append(name) + return sorted(names) + + def __getitem__(self, n: int) -> Tuple[torch.Tensor, int, int, str]: + """Load the n-th sample from the dataset. + + Args: + n (int): The index of the sample to be loaded + Returns: + Tuple of the following items; + + Tensor: + Waveform + int: + Sample rate + int: + Num frames + str: + Track name + """ + return self._load_sample(n) + + def __len__(self) -> int: + return len(self.names) diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/quesst14.py b/venv/lib/python3.10/site-packages/torchaudio/datasets/quesst14.py new file mode 100644 index 0000000000000000000000000000000000000000..064423c4494850f2ad8f43fb00a956be21fcb95e --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/datasets/quesst14.py @@ -0,0 +1,136 @@ +import os +import re +from pathlib import Path +from typing import Optional, Tuple, Union + +import torch +from torch.utils.data import Dataset +from torchaudio._internal import download_url_to_file +from torchaudio.datasets.utils import _extract_tar, _load_waveform + + +URL = "https://speech.fit.vutbr.cz/files/quesst14Database.tgz" +SAMPLE_RATE = 8000 +_CHECKSUM = "4f869e06bc066bbe9c5dde31dbd3909a0870d70291110ebbb38878dcbc2fc5e4" +_LANGUAGES = [ + "albanian", + "basque", + "czech", + "nnenglish", + "romanian", + "slovak", +] + + +class QUESST14(Dataset): + """*QUESST14* :cite:`Mir2015QUESST2014EQ` dataset. + + Args: + root (str or Path): Root directory where the dataset's top level directory is found + subset (str): Subset of the dataset to use. Options: [``"docs"``, ``"dev"``, ``"eval"``]. + language (str or None, optional): Language to get dataset for. + Options: [``None``, ``albanian``, ``basque``, ``czech``, ``nnenglish``, ``romanian``, ``slovak``]. + If ``None``, dataset consists of all languages. (default: ``"nnenglish"``) + download (bool, optional): Whether to download the dataset if it is not found at root path. + (default: ``False``) + """ + + def __init__( + self, + root: Union[str, Path], + subset: str, + language: Optional[str] = "nnenglish", + download: bool = False, + ) -> None: + if subset not in ["docs", "dev", "eval"]: + raise ValueError("`subset` must be one of ['docs', 'dev', 'eval']") + + if language is not None and language not in _LANGUAGES: + raise ValueError(f"`language` must be None or one of {str(_LANGUAGES)}") + + # Get string representation of 'root' + root = os.fspath(root) + + basename = os.path.basename(URL) + archive = os.path.join(root, basename) + + basename = basename.rsplit(".", 2)[0] + self._path = os.path.join(root, basename) + + if not os.path.isdir(self._path): + if not os.path.isfile(archive): + if not download: + raise RuntimeError("Dataset not found. Please use `download=True` to download") + download_url_to_file(URL, archive, hash_prefix=_CHECKSUM) + _extract_tar(archive, root) + + if subset == "docs": + self.data = filter_audio_paths(self._path, language, "language_key_utterances.lst") + elif subset == "dev": + self.data = filter_audio_paths(self._path, language, "language_key_dev.lst") + elif subset == "eval": + self.data = filter_audio_paths(self._path, language, "language_key_eval.lst") + + def get_metadata(self, n: int) -> Tuple[str, int, str]: + """Get metadata for the n-th sample from the dataset. Returns filepath instead of waveform, + but otherwise returns the same fields as :py:func:`__getitem__`. + + Args: + n (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + str: + Path to audio + int: + Sample rate + str: + File name + """ + audio_path = self.data[n] + relpath = os.path.relpath(audio_path, self._path) + return relpath, SAMPLE_RATE, audio_path.with_suffix("").name + + def __getitem__(self, n: int) -> Tuple[torch.Tensor, int, str]: + """Load the n-th sample from the dataset. + + Args: + n (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + Tensor: + Waveform + int: + Sample rate + str: + File name + """ + metadata = self.get_metadata(n) + waveform = _load_waveform(self._path, metadata[0], metadata[1]) + return (waveform,) + metadata[1:] + + def __len__(self) -> int: + return len(self.data) + + +def filter_audio_paths( + path: str, + language: str, + lst_name: str, +): + """Extract audio paths for the given language.""" + audio_paths = [] + + path = Path(path) + with open(path / "scoring" / lst_name) as f: + for line in f: + audio_path, lang = line.strip().split() + if language is not None and lang != language: + continue + audio_path = re.sub(r"^.*?\/", "", audio_path) + audio_paths.append(path / audio_path) + + return audio_paths diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/snips.py b/venv/lib/python3.10/site-packages/torchaudio/datasets/snips.py new file mode 100644 index 0000000000000000000000000000000000000000..6b15d677f7fa1f9c1baccad7625a6fa14c73d70f --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/datasets/snips.py @@ -0,0 +1,157 @@ +import os +from pathlib import Path +from typing import List, Optional, Tuple, Union + +import torch +from torch.utils.data import Dataset +from torchaudio.datasets.utils import _load_waveform + + +_SAMPLE_RATE = 16000 +_SPEAKERS = [ + "Aditi", + "Amy", + "Brian", + "Emma", + "Geraint", + "Ivy", + "Joanna", + "Joey", + "Justin", + "Kendra", + "Kimberly", + "Matthew", + "Nicole", + "Raveena", + "Russell", + "Salli", +] + + +def _load_labels(file: Path, subset: str): + """Load transcirpt, iob, and intent labels for all utterances. + + Args: + file (Path): The path to the label file. + subset (str): Subset of the dataset to use. Options: [``"train"``, ``"valid"``, ``"test"``]. + + Returns: + Dictionary of labels, where the key is the filename of the audio, + and the label is a Tuple of transcript, Inside–outside–beginning (IOB) label, and intention label. + """ + labels = {} + with open(file, "r") as f: + for line in f: + line = line.strip().split(" ") + index = line[0] + trans, iob_intent = " ".join(line[1:]).split("\t") + trans = " ".join(trans.split(" ")[1:-1]) + iob = " ".join(iob_intent.split(" ")[1:-1]) + intent = iob_intent.split(" ")[-1] + if subset in index: + labels[index] = (trans, iob, intent) + return labels + + +class Snips(Dataset): + """*Snips* :cite:`coucke2018snips` dataset. + + Args: + root (str or Path): Root directory where the dataset's top level directory is found. + subset (str): Subset of the dataset to use. Options: [``"train"``, ``"valid"``, ``"test"``]. + speakers (List[str] or None, optional): The speaker list to include in the dataset. If ``None``, + include all speakers in the subset. (Default: ``None``) + audio_format (str, optional): The extension of the audios. Options: [``"mp3"``, ``"wav"``]. + (Default: ``"mp3"``) + """ + + _trans_file = "all.iob.snips.txt" + + def __init__( + self, + root: Union[str, Path], + subset: str, + speakers: Optional[List[str]] = None, + audio_format: str = "mp3", + ) -> None: + if subset not in ["train", "valid", "test"]: + raise ValueError('`subset` must be one of ["train", "valid", "test"].') + if audio_format not in ["mp3", "wav"]: + raise ValueError('`audio_format` must be one of ["mp3", "wav].') + + root = Path(root) + self._path = root / "SNIPS" + self.audio_path = self._path / subset + if speakers is None: + speakers = _SPEAKERS + + if not os.path.isdir(self._path): + raise RuntimeError("Dataset not found.") + + self.audio_paths = self.audio_path.glob(f"*.{audio_format}") + self.data = [] + for audio_path in sorted(self.audio_paths): + audio_name = str(audio_path.name) + speaker = audio_name.split("-")[0] + if speaker in speakers: + self.data.append(audio_path) + transcript_path = self._path / self._trans_file + self.labels = _load_labels(transcript_path, subset) + + def get_metadata(self, n: int) -> Tuple[str, int, str, str, str]: + """Get metadata for the n-th sample from the dataset. Returns filepath instead of waveform, + but otherwise returns the same fields as :py:func:`__getitem__`. + + Args: + n (int): The index of the sample to be loaded. + + Returns: + Tuple of the following items: + + str: + Path to audio + int: + Sample rate + str: + File name + str: + Transcription of audio + str: + Inside–outside–beginning (IOB) label of transcription + str: + Intention label of the audio. + """ + audio_path = self.data[n] + relpath = os.path.relpath(audio_path, self._path) + file_name = audio_path.with_suffix("").name + transcript, iob, intent = self.labels[file_name] + return relpath, _SAMPLE_RATE, file_name, transcript, iob, intent + + def __getitem__(self, n: int) -> Tuple[torch.Tensor, int, str, str, str]: + """Load the n-th sample from the dataset. + + Args: + n (int): The index of the sample to be loaded + + Returns: + Tuple of the following items: + + Tensor: + Waveform + int: + Sample rate + str: + File name + str: + Transcription of audio + str: + Inside–outside–beginning (IOB) label of transcription + str: + Intention label of the audio. + """ + metadata = self.get_metadata(n) + waveform = _load_waveform(self._path, metadata[0], metadata[1]) + return (waveform,) + metadata[1:] + + def __len__(self) -> int: + return len(self.data) diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/speechcommands.py b/venv/lib/python3.10/site-packages/torchaudio/datasets/speechcommands.py new file mode 100644 index 0000000000000000000000000000000000000000..1945fc75c18b474404b733e43d50156f3c3d6652 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/datasets/speechcommands.py @@ -0,0 +1,183 @@ +import os +from pathlib import Path +from typing import Optional, Tuple, Union + +from torch import Tensor +from torch.utils.data import Dataset +from torchaudio._internal import download_url_to_file +from torchaudio.datasets.utils import _extract_tar, _load_waveform + +FOLDER_IN_ARCHIVE = "SpeechCommands" +URL = "speech_commands_v0.02" +HASH_DIVIDER = "_nohash_" +EXCEPT_FOLDER = "_background_noise_" +SAMPLE_RATE = 16000 +_CHECKSUMS = { + "http://download.tensorflow.org/data/speech_commands_v0.01.tar.gz": "743935421bb51cccdb6bdd152e04c5c70274e935c82119ad7faeec31780d811d", # noqa: E501 + "http://download.tensorflow.org/data/speech_commands_v0.02.tar.gz": "af14739ee7dc311471de98f5f9d2c9191b18aedfe957f4a6ff791c709868ff58", # noqa: E501 +} + + +def _load_list(root, *filenames): + output = [] + for filename in filenames: + filepath = os.path.join(root, filename) + with open(filepath) as fileobj: + output += [os.path.normpath(os.path.join(root, line.strip())) for line in fileobj] + return output + + +def _get_speechcommands_metadata(filepath: str, path: str) -> Tuple[str, int, str, str, int]: + relpath = os.path.relpath(filepath, path) + reldir, filename = os.path.split(relpath) + _, label = os.path.split(reldir) + # Besides the officially supported split method for datasets defined by "validation_list.txt" + # and "testing_list.txt" over "speech_commands_v0.0x.tar.gz" archives, an alternative split + # method referred to in paragraph 2-3 of Section 7.1, references 13 and 14 of the original + # paper, and the checksums file from the tensorflow_datasets package [1] is also supported. + # Some filenames in those "speech_commands_test_set_v0.0x.tar.gz" archives have the form + # "xxx.wav.wav", so file extensions twice needs to be stripped twice. + # [1] https://github.com/tensorflow/datasets/blob/master/tensorflow_datasets/url_checksums/speech_commands.txt + speaker, _ = os.path.splitext(filename) + speaker, _ = os.path.splitext(speaker) + + speaker_id, utterance_number = speaker.split(HASH_DIVIDER) + utterance_number = int(utterance_number) + + return relpath, SAMPLE_RATE, label, speaker_id, utterance_number + + +class SPEECHCOMMANDS(Dataset): + """*Speech Commands* :cite:`speechcommandsv2` dataset. + + Args: + root (str or Path): Path to the directory where the dataset is found or downloaded. + url (str, optional): The URL to download the dataset from, + or the type of the dataset to dowload. + Allowed type values are ``"speech_commands_v0.01"`` and ``"speech_commands_v0.02"`` + (default: ``"speech_commands_v0.02"``) + folder_in_archive (str, optional): + The top-level directory of the dataset. (default: ``"SpeechCommands"``) + download (bool, optional): + Whether to download the dataset if it is not found at root path. (default: ``False``). + subset (str or None, optional): + Select a subset of the dataset [None, "training", "validation", "testing"]. None means + the whole dataset. "validation" and "testing" are defined in "validation_list.txt" and + "testing_list.txt", respectively, and "training" is the rest. Details for the files + "validation_list.txt" and "testing_list.txt" are explained in the README of the dataset + and in the introduction of Section 7 of the original paper and its reference 12. The + original paper can be found `here `_. (Default: ``None``) + """ + + def __init__( + self, + root: Union[str, Path], + url: str = URL, + folder_in_archive: str = FOLDER_IN_ARCHIVE, + download: bool = False, + subset: Optional[str] = None, + ) -> None: + + if subset is not None and subset not in ["training", "validation", "testing"]: + raise ValueError("When `subset` is not None, it must be one of ['training', 'validation', 'testing'].") + + if url in [ + "speech_commands_v0.01", + "speech_commands_v0.02", + ]: + base_url = "http://download.tensorflow.org/data/" + ext_archive = ".tar.gz" + + url = os.path.join(base_url, url + ext_archive) + + # Get string representation of 'root' in case Path object is passed + root = os.fspath(root) + self._archive = os.path.join(root, folder_in_archive) + + basename = os.path.basename(url) + archive = os.path.join(root, basename) + + basename = basename.rsplit(".", 2)[0] + folder_in_archive = os.path.join(folder_in_archive, basename) + + self._path = os.path.join(root, folder_in_archive) + + if download: + if not os.path.isdir(self._path): + if not os.path.isfile(archive): + checksum = _CHECKSUMS.get(url, None) + download_url_to_file(url, archive, hash_prefix=checksum) + _extract_tar(archive, self._path) + else: + if not os.path.exists(self._path): + raise RuntimeError( + f"The path {self._path} doesn't exist. " + "Please check the ``root`` path or set `download=True` to download it" + ) + + if subset == "validation": + self._walker = _load_list(self._path, "validation_list.txt") + elif subset == "testing": + self._walker = _load_list(self._path, "testing_list.txt") + elif subset == "training": + excludes = set(_load_list(self._path, "validation_list.txt", "testing_list.txt")) + walker = sorted(str(p) for p in Path(self._path).glob("*/*.wav")) + self._walker = [ + w + for w in walker + if HASH_DIVIDER in w and EXCEPT_FOLDER not in w and os.path.normpath(w) not in excludes + ] + else: + walker = sorted(str(p) for p in Path(self._path).glob("*/*.wav")) + self._walker = [w for w in walker if HASH_DIVIDER in w and EXCEPT_FOLDER not in w] + + def get_metadata(self, n: int) -> Tuple[str, int, str, str, int]: + """Get metadata for the n-th sample from the dataset. Returns filepath instead of waveform, + but otherwise returns the same fields as :py:func:`__getitem__`. + + Args: + n (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + str: + Path to the audio + int: + Sample rate + str: + Label + str: + Speaker ID + int: + Utterance number + """ + fileid = self._walker[n] + return _get_speechcommands_metadata(fileid, self._archive) + + def __getitem__(self, n: int) -> Tuple[Tensor, int, str, str, int]: + """Load the n-th sample from the dataset. + + Args: + n (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + Tensor: + Waveform + int: + Sample rate + str: + Label + str: + Speaker ID + int: + Utterance number + """ + metadata = self.get_metadata(n) + waveform = _load_waveform(self._archive, metadata[0], metadata[1]) + return (waveform,) + metadata[1:] + + def __len__(self) -> int: + return len(self._walker) diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/tedlium.py b/venv/lib/python3.10/site-packages/torchaudio/datasets/tedlium.py new file mode 100644 index 0000000000000000000000000000000000000000..7e7d22195a772d18770f6db3253d83672743c81c --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/datasets/tedlium.py @@ -0,0 +1,218 @@ +import os +from pathlib import Path +from typing import Tuple, Union + +import torchaudio +from torch import Tensor +from torch.utils.data import Dataset +from torchaudio._internal import download_url_to_file +from torchaudio.datasets.utils import _extract_tar + + +_RELEASE_CONFIGS = { + "release1": { + "folder_in_archive": "TEDLIUM_release1", + "url": "http://www.openslr.org/resources/7/TEDLIUM_release1.tar.gz", + "checksum": "30301975fd8c5cac4040c261c0852f57cfa8adbbad2ce78e77e4986957445f27", + "data_path": "", + "subset": "train", + "supported_subsets": ["train", "test", "dev"], + "dict": "TEDLIUM.150K.dic", + }, + "release2": { + "folder_in_archive": "TEDLIUM_release2", + "url": "http://www.openslr.org/resources/19/TEDLIUM_release2.tar.gz", + "checksum": "93281b5fcaaae5c88671c9d000b443cb3c7ea3499ad12010b3934ca41a7b9c58", + "data_path": "", + "subset": "train", + "supported_subsets": ["train", "test", "dev"], + "dict": "TEDLIUM.152k.dic", + }, + "release3": { + "folder_in_archive": "TEDLIUM_release-3", + "url": "http://www.openslr.org/resources/51/TEDLIUM_release-3.tgz", + "checksum": "ad1e454d14d1ad550bc2564c462d87c7a7ec83d4dc2b9210f22ab4973b9eccdb", + "data_path": "data/", + "subset": "train", + "supported_subsets": ["train", "test", "dev"], + "dict": "TEDLIUM.152k.dic", + }, +} + + +class TEDLIUM(Dataset): + """*Tedlium* :cite:`rousseau2012tedlium` dataset (releases 1,2 and 3). + + Args: + root (str or Path): Path to the directory where the dataset is found or downloaded. + release (str, optional): Release version. + Allowed values are ``"release1"``, ``"release2"`` or ``"release3"``. + (default: ``"release1"``). + subset (str, optional): The subset of dataset to use. Valid options are ``"train"``, ``"dev"``, + and ``"test"``. Defaults to ``"train"``. + download (bool, optional): + Whether to download the dataset if it is not found at root path. (default: ``False``). + audio_ext (str, optional): extension for audio file (default: ``".sph"``) + """ + + def __init__( + self, + root: Union[str, Path], + release: str = "release1", + subset: str = "train", + download: bool = False, + audio_ext: str = ".sph", + ) -> None: + self._ext_audio = audio_ext + if release in _RELEASE_CONFIGS.keys(): + folder_in_archive = _RELEASE_CONFIGS[release]["folder_in_archive"] + url = _RELEASE_CONFIGS[release]["url"] + subset = subset if subset else _RELEASE_CONFIGS[release]["subset"] + else: + # Raise warning + raise RuntimeError( + "The release {} does not match any of the supported tedlium releases{} ".format( + release, + _RELEASE_CONFIGS.keys(), + ) + ) + if subset not in _RELEASE_CONFIGS[release]["supported_subsets"]: + # Raise warning + raise RuntimeError( + "The subset {} does not match any of the supported tedlium subsets{} ".format( + subset, + _RELEASE_CONFIGS[release]["supported_subsets"], + ) + ) + + # Get string representation of 'root' in case Path object is passed + root = os.fspath(root) + + basename = os.path.basename(url) + archive = os.path.join(root, basename) + + basename = basename.split(".")[0] + + if release == "release3": + if subset == "train": + self._path = os.path.join(root, folder_in_archive, _RELEASE_CONFIGS[release]["data_path"]) + else: + self._path = os.path.join(root, folder_in_archive, "legacy", subset) + else: + self._path = os.path.join(root, folder_in_archive, _RELEASE_CONFIGS[release]["data_path"], subset) + + if download: + if not os.path.isdir(self._path): + if not os.path.isfile(archive): + checksum = _RELEASE_CONFIGS[release]["checksum"] + download_url_to_file(url, archive, hash_prefix=checksum) + _extract_tar(archive) + else: + if not os.path.exists(self._path): + raise RuntimeError( + f"The path {self._path} doesn't exist. " + "Please check the ``root`` path or set `download=True` to download it" + ) + + # Create list for all samples + self._filelist = [] + stm_path = os.path.join(self._path, "stm") + for file in sorted(os.listdir(stm_path)): + if file.endswith(".stm"): + stm_path = os.path.join(self._path, "stm", file) + with open(stm_path) as f: + l = len(f.readlines()) + file = file.replace(".stm", "") + self._filelist.extend((file, line) for line in range(l)) + # Create dict path for later read + self._dict_path = os.path.join(root, folder_in_archive, _RELEASE_CONFIGS[release]["dict"]) + self._phoneme_dict = None + + def _load_tedlium_item(self, fileid: str, line: int, path: str) -> Tuple[Tensor, int, str, int, int, int]: + """Loads a TEDLIUM dataset sample given a file name and corresponding sentence name. + + Args: + fileid (str): File id to identify both text and audio files corresponding to the sample + line (int): Line identifier for the sample inside the text file + path (str): Dataset root path + + Returns: + (Tensor, int, str, int, int, int): + ``(waveform, sample_rate, transcript, talk_id, speaker_id, identifier)`` + """ + transcript_path = os.path.join(path, "stm", fileid) + with open(transcript_path + ".stm") as f: + transcript = f.readlines()[line] + talk_id, _, speaker_id, start_time, end_time, identifier, transcript = transcript.split(" ", 6) + + wave_path = os.path.join(path, "sph", fileid) + waveform, sample_rate = self._load_audio(wave_path + self._ext_audio, start_time=start_time, end_time=end_time) + + return (waveform, sample_rate, transcript, talk_id, speaker_id, identifier) + + def _load_audio(self, path: str, start_time: float, end_time: float, sample_rate: int = 16000) -> [Tensor, int]: + """Default load function used in TEDLIUM dataset, you can overwrite this function to customize functionality + and load individual sentences from a full ted audio talk file. + + Args: + path (str): Path to audio file + start_time (int): Time in seconds where the sample sentence stars + end_time (int): Time in seconds where the sample sentence finishes + sample_rate (float, optional): Sampling rate + + Returns: + [Tensor, int]: Audio tensor representation and sample rate + """ + start_time = int(float(start_time) * sample_rate) + end_time = int(float(end_time) * sample_rate) + + kwargs = {"frame_offset": start_time, "num_frames": end_time - start_time} + + return torchaudio.load(path, **kwargs) + + def __getitem__(self, n: int) -> Tuple[Tensor, int, str, int, int, int]: + """Load the n-th sample from the dataset. + + Args: + n (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + Tensor: + Waveform + int: + Sample rate + str: + Transcript + int: + Talk ID + int: + Speaker ID + int: + Identifier + """ + fileid, line = self._filelist[n] + return self._load_tedlium_item(fileid, line, self._path) + + def __len__(self) -> int: + """TEDLIUM dataset custom function overwritting len default behaviour. + + Returns: + int: TEDLIUM dataset length + """ + return len(self._filelist) + + @property + def phoneme_dict(self): + """dict[str, tuple[str]]: Phonemes. Mapping from word to tuple of phonemes. + Note that some words have empty phonemes. + """ + # Read phoneme dictionary + if not self._phoneme_dict: + self._phoneme_dict = {} + with open(self._dict_path, "r", encoding="utf-8") as f: + for line in f.readlines(): + content = line.strip().split() + self._phoneme_dict[content[0]] = tuple(content[1:]) # content[1:] can be empty list + return self._phoneme_dict.copy() diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/utils.py b/venv/lib/python3.10/site-packages/torchaudio/datasets/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..b4599f83aae535d5c4126b5d0bab4ed325f494f3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/datasets/utils.py @@ -0,0 +1,54 @@ +import logging +import os +import tarfile +import zipfile +from typing import Any, List, Optional + +import torchaudio + +_LG = logging.getLogger(__name__) + + +def _extract_tar(from_path: str, to_path: Optional[str] = None, overwrite: bool = False) -> List[str]: + if to_path is None: + to_path = os.path.dirname(from_path) + with tarfile.open(from_path, "r") as tar: + files = [] + for file_ in tar: # type: Any + file_path = os.path.join(to_path, file_.name) + if file_.isfile(): + files.append(file_path) + if os.path.exists(file_path): + _LG.info("%s already extracted.", file_path) + if not overwrite: + continue + tar.extract(file_, to_path) + return files + + +def _extract_zip(from_path: str, to_path: Optional[str] = None, overwrite: bool = False) -> List[str]: + if to_path is None: + to_path = os.path.dirname(from_path) + + with zipfile.ZipFile(from_path, "r") as zfile: + files = zfile.namelist() + for file_ in files: + file_path = os.path.join(to_path, file_) + if os.path.exists(file_path): + _LG.info("%s already extracted.", file_path) + if not overwrite: + continue + zfile.extract(file_, to_path) + return files + + +def _load_waveform( + root: str, + filename: str, + exp_sample_rate: int, +): + path = os.path.join(root, filename) + waveform, sample_rate = torchaudio.load(path) + if exp_sample_rate != sample_rate: + raise ValueError(f"sample rate should be {exp_sample_rate}, but got {sample_rate}") + return waveform diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/vctk.py b/venv/lib/python3.10/site-packages/torchaudio/datasets/vctk.py new file mode 100644 index 0000000000000000000000000000000000000000..3195b9b4276b643e934baadc26c872fc690383df --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/datasets/vctk.py @@ -0,0 +1,143 @@ +import os +from typing import Tuple + +import torchaudio +from torch import Tensor +from torch.utils.data import Dataset +from torchaudio._internal import download_url_to_file +from torchaudio.datasets.utils import _extract_zip + +URL = "https://datashare.is.ed.ac.uk/bitstream/handle/10283/3443/VCTK-Corpus-0.92.zip" +_CHECKSUMS = { + "https://datashare.is.ed.ac.uk/bitstream/handle/10283/3443/VCTK-Corpus-0.92.zip": "f96258be9fdc2cbff6559541aae7ea4f59df3fcaf5cf963aae5ca647357e359c" # noqa: E501 +} + + +SampleType = Tuple[Tensor, int, str, str, str] + + +class VCTK_092(Dataset): + """*VCTK 0.92* :cite:`yamagishi2019vctk` dataset + + Args: + root (str): Root directory where the dataset's top level directory is found. + mic_id (str, optional): Microphone ID. Either ``"mic1"`` or ``"mic2"``. (default: ``"mic2"``) + download (bool, optional): + Whether to download the dataset if it is not found at root path. (default: ``False``). + url (str, optional): The URL to download the dataset from. + (default: ``"https://datashare.is.ed.ac.uk/bitstream/handle/10283/3443/VCTK-Corpus-0.92.zip"``) + audio_ext (str, optional): Custom audio extension if dataset is converted to non-default audio format. + + Note: + * All the speeches from speaker ``p315`` will be skipped due to the lack of the corresponding text files. + * All the speeches from ``p280`` will be skipped for ``mic_id="mic2"`` due to the lack of the audio files. + * Some of the speeches from speaker ``p362`` will be skipped due to the lack of the audio files. + * See Also: https://datashare.is.ed.ac.uk/handle/10283/3443 + """ + + def __init__( + self, + root: str, + mic_id: str = "mic2", + download: bool = False, + url: str = URL, + audio_ext=".flac", + ): + if mic_id not in ["mic1", "mic2"]: + raise RuntimeError(f'`mic_id` has to be either "mic1" or "mic2". Found: {mic_id}') + + archive = os.path.join(root, "VCTK-Corpus-0.92.zip") + + self._path = os.path.join(root, "VCTK-Corpus-0.92") + self._txt_dir = os.path.join(self._path, "txt") + self._audio_dir = os.path.join(self._path, "wav48_silence_trimmed") + self._mic_id = mic_id + self._audio_ext = audio_ext + + if download: + if not os.path.isdir(self._path): + if not os.path.isfile(archive): + checksum = _CHECKSUMS.get(url, None) + download_url_to_file(url, archive, hash_prefix=checksum) + _extract_zip(archive, self._path) + + if not os.path.isdir(self._path): + raise RuntimeError("Dataset not found. Please use `download=True` to download it.") + + # Extracting speaker IDs from the folder structure + self._speaker_ids = sorted(os.listdir(self._txt_dir)) + self._sample_ids = [] + + """ + Due to some insufficient data complexity in the 0.92 version of this dataset, + we start traversing the audio folder structure in accordance with the text folder. + As some of the audio files are missing of either ``mic_1`` or ``mic_2`` but the + text is present for the same, we first check for the existence of the audio file + before adding it to the ``sample_ids`` list. + + Once the ``audio_ids`` are loaded into memory we can quickly access the list for + different parameters required by the user. + """ + for speaker_id in self._speaker_ids: + if speaker_id == "p280" and mic_id == "mic2": + continue + utterance_dir = os.path.join(self._txt_dir, speaker_id) + for utterance_file in sorted(f for f in os.listdir(utterance_dir) if f.endswith(".txt")): + utterance_id = os.path.splitext(utterance_file)[0] + audio_path_mic = os.path.join( + self._audio_dir, + speaker_id, + f"{utterance_id}_{mic_id}{self._audio_ext}", + ) + if speaker_id == "p362" and not os.path.isfile(audio_path_mic): + continue + self._sample_ids.append(utterance_id.split("_")) + + def _load_text(self, file_path) -> str: + with open(file_path) as file_path: + return file_path.readlines()[0] + + def _load_audio(self, file_path) -> Tuple[Tensor, int]: + return torchaudio.load(file_path) + + def _load_sample(self, speaker_id: str, utterance_id: str, mic_id: str) -> SampleType: + transcript_path = os.path.join(self._txt_dir, speaker_id, f"{speaker_id}_{utterance_id}.txt") + audio_path = os.path.join( + self._audio_dir, + speaker_id, + f"{speaker_id}_{utterance_id}_{mic_id}{self._audio_ext}", + ) + + # Reading text + transcript = self._load_text(transcript_path) + + # Reading FLAC + waveform, sample_rate = self._load_audio(audio_path) + + return (waveform, sample_rate, transcript, speaker_id, utterance_id) + + def __getitem__(self, n: int) -> SampleType: + """Load the n-th sample from the dataset. + + Args: + n (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + Tensor: + Waveform + int: + Sample rate + str: + Transcript + str: + Speaker ID + std: + Utterance ID + """ + speaker_id, utterance_id = self._sample_ids[n] + return self._load_sample(speaker_id, utterance_id, self._mic_id) + + def __len__(self) -> int: + return len(self._sample_ids) diff --git a/venv/lib/python3.10/site-packages/torchaudio/datasets/voxceleb1.py b/venv/lib/python3.10/site-packages/torchaudio/datasets/voxceleb1.py new file mode 100644 index 0000000000000000000000000000000000000000..5112fff0898a88adb1d2c33acf9bdd905ca883f3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/datasets/voxceleb1.py @@ -0,0 +1,309 @@ +import os +from pathlib import Path +from typing import List, Tuple, Union + +from torch import Tensor +from torch.utils.data import Dataset +from torchaudio._internal import download_url_to_file +from torchaudio.datasets.utils import _extract_zip, _load_waveform + + +SAMPLE_RATE = 16000 +_ARCHIVE_CONFIGS = { + "dev": { + "archive_name": "vox1_dev_wav.zip", + "urls": [ + "https://thor.robots.ox.ac.uk/~vgg/data/voxceleb/vox1a/vox1_dev_wav_partaa", + "https://thor.robots.ox.ac.uk/~vgg/data/voxceleb/vox1a/vox1_dev_wav_partab", + "https://thor.robots.ox.ac.uk/~vgg/data/voxceleb/vox1a/vox1_dev_wav_partac", + "https://thor.robots.ox.ac.uk/~vgg/data/voxceleb/vox1a/vox1_dev_wav_partad", + ], + "checksums": [ + "21ec6ca843659ebc2fdbe04b530baa4f191ad4b0971912672d92c158f32226a0", + "311d21e0c8cbf33573a4fce6c80e5a279d80736274b381c394319fc557159a04", + "92b64465f2b2a3dc0e4196ae8dd6828cbe9ddd1f089419a11e4cbfe2e1750df0", + "00e6190c770b27f27d2a3dd26ee15596b17066b715ac111906861a7d09a211a5", + ], + }, + "test": { + "archive_name": "vox1_test_wav.zip", + "url": "https://thor.robots.ox.ac.uk/~vgg/data/voxceleb/vox1a/vox1_test_wav.zip", + "checksum": "8de57f347fe22b2c24526e9f444f689ecf5096fc2a92018cf420ff6b5b15eaea", + }, +} +_IDEN_SPLIT_URL = "https://www.robots.ox.ac.uk/~vgg/data/voxceleb/meta/iden_split.txt" +_VERI_TEST_URL = "https://www.robots.ox.ac.uk/~vgg/data/voxceleb/meta/veri_test.txt" + + +def _download_extract_wavs(root: str): + for archive in ["dev", "test"]: + archive_name = _ARCHIVE_CONFIGS[archive]["archive_name"] + archive_path = os.path.join(root, archive_name) + # The zip file of dev data is splited to 4 chunks. + # Download and combine them into one file before extraction. + if archive == "dev": + urls = _ARCHIVE_CONFIGS[archive]["urls"] + checksums = _ARCHIVE_CONFIGS[archive]["checksums"] + with open(archive_path, "wb") as f: + for url, checksum in zip(urls, checksums): + file_path = os.path.join(root, os.path.basename(url)) + download_url_to_file(url, file_path, hash_prefix=checksum) + with open(file_path, "rb") as f_split: + f.write(f_split.read()) + else: + url = _ARCHIVE_CONFIGS[archive]["url"] + checksum = _ARCHIVE_CONFIGS[archive]["checksum"] + download_url_to_file(url, archive_path, hash_prefix=checksum) + _extract_zip(archive_path) + + +def _get_flist(root: str, file_path: str, subset: str) -> List[str]: + f_list = [] + if subset == "train": + index = 1 + elif subset == "dev": + index = 2 + else: + index = 3 + with open(file_path, "r") as f: + for line in f: + id, path = line.split() + if int(id) == index: + f_list.append(path) + return sorted(f_list) + + +def _get_paired_flist(root: str, veri_test_path: str): + f_list = [] + with open(veri_test_path, "r") as f: + for line in f: + label, path1, path2 = line.split() + f_list.append((label, path1, path2)) + return f_list + + +def _get_file_id(file_path: str, _ext_audio: str): + speaker_id, youtube_id, utterance_id = file_path.split("/")[-3:] + utterance_id = utterance_id.replace(_ext_audio, "") + file_id = "-".join([speaker_id, youtube_id, utterance_id]) + return file_id + + +class VoxCeleb1(Dataset): + """*VoxCeleb1* :cite:`nagrani2017voxceleb` dataset. + + Args: + root (str or Path): Path to the directory where the dataset is found or downloaded. + download (bool, optional): + Whether to download the dataset if it is not found at root path. (Default: ``False``). + """ + + _ext_audio = ".wav" + + def __init__(self, root: Union[str, Path], download: bool = False) -> None: + # Get string representation of 'root' in case Path object is passed + root = os.fspath(root) + self._path = os.path.join(root, "wav") + if not os.path.isdir(self._path): + if not download: + raise RuntimeError( + f"Dataset not found at {self._path}. Please set `download=True` to download the dataset." + ) + _download_extract_wavs(root) + + def get_metadata(self, n: int): + raise NotImplementedError + + def __getitem__(self, n: int): + raise NotImplementedError + + def __len__(self) -> int: + raise NotImplementedError + + +class VoxCeleb1Identification(VoxCeleb1): + """*VoxCeleb1* :cite:`nagrani2017voxceleb` dataset for speaker identification task. + + Each data sample contains the waveform, sample rate, speaker id, and the file id. + + Args: + root (str or Path): Path to the directory where the dataset is found or downloaded. + subset (str, optional): Subset of the dataset to use. Options: ["train", "dev", "test"]. (Default: ``"train"``) + meta_url (str, optional): The url of meta file that contains the list of subset labels and file paths. + The format of each row is ``subset file_path". For example: ``1 id10006/nLEBBc9oIFs/00003.wav``. + ``1``, ``2``, ``3`` mean ``train``, ``dev``, and ``test`` subest, respectively. + (Default: ``"https://www.robots.ox.ac.uk/~vgg/data/voxceleb/meta/iden_split.txt"``) + download (bool, optional): + Whether to download the dataset if it is not found at root path. (Default: ``False``). + + Note: + The file structure of `VoxCeleb1Identification` dataset is as follows: + + └─ root/ + + └─ wav/ + + └─ speaker_id folders + + Users who pre-downloaded the ``"vox1_dev_wav.zip"`` and ``"vox1_test_wav.zip"`` files need to move + the extracted files into the same ``root`` directory. + """ + + def __init__( + self, root: Union[str, Path], subset: str = "train", meta_url: str = _IDEN_SPLIT_URL, download: bool = False + ) -> None: + super().__init__(root, download) + if subset not in ["train", "dev", "test"]: + raise ValueError("`subset` must be one of ['train', 'dev', 'test']") + # download the iden_split.txt to get the train, dev, test lists. + meta_list_path = os.path.join(root, os.path.basename(meta_url)) + if not os.path.exists(meta_list_path): + download_url_to_file(meta_url, meta_list_path) + self._flist = _get_flist(self._path, meta_list_path, subset) + + def get_metadata(self, n: int) -> Tuple[str, int, int, str]: + """Get metadata for the n-th sample from the dataset. Returns filepath instead of waveform, + but otherwise returns the same fields as :py:func:`__getitem__`. + + Args: + n (int): The index of the sample + + Returns: + Tuple of the following items; + + str: + Path to audio + int: + Sample rate + int: + Speaker ID + str: + File ID + """ + file_path = self._flist[n] + file_id = _get_file_id(file_path, self._ext_audio) + speaker_id = file_id.split("-")[0] + speaker_id = int(speaker_id[3:]) + return file_path, SAMPLE_RATE, speaker_id, file_id + + def __getitem__(self, n: int) -> Tuple[Tensor, int, int, str]: + """Load the n-th sample from the dataset. + + Args: + n (int): The index of the sample to be loaded + + Returns: + Tuple of the following items; + + Tensor: + Waveform + int: + Sample rate + int: + Speaker ID + str: + File ID + """ + metadata = self.get_metadata(n) + waveform = _load_waveform(self._path, metadata[0], metadata[1]) + return (waveform,) + metadata[1:] + + def __len__(self) -> int: + return len(self._flist) + + +class VoxCeleb1Verification(VoxCeleb1): + """*VoxCeleb1* :cite:`nagrani2017voxceleb` dataset for speaker verification task. + + Each data sample contains a pair of waveforms, sample rate, the label indicating if they are + from the same speaker, and the file ids. + + Args: + root (str or Path): Path to the directory where the dataset is found or downloaded. + meta_url (str, optional): The url of meta file that contains a list of utterance pairs + and the corresponding labels. The format of each row is ``label file_path1 file_path2". + For example: ``1 id10270/x6uYqmx31kE/00001.wav id10270/8jEAjG6SegY/00008.wav``. + ``1`` means the two utterances are from the same speaker, ``0`` means not. + (Default: ``"https://www.robots.ox.ac.uk/~vgg/data/voxceleb/meta/veri_test.txt"``) + download (bool, optional): + Whether to download the dataset if it is not found at root path. (Default: ``False``). + + Note: + The file structure of `VoxCeleb1Verification` dataset is as follows: + + └─ root/ + + └─ wav/ + + └─ speaker_id folders + + Users who pre-downloaded the ``"vox1_dev_wav.zip"`` and ``"vox1_test_wav.zip"`` files need to move + the extracted files into the same ``root`` directory. + """ + + def __init__(self, root: Union[str, Path], meta_url: str = _VERI_TEST_URL, download: bool = False) -> None: + super().__init__(root, download) + # download the veri_test.txt to get the list of training pairs and labels. + meta_list_path = os.path.join(root, os.path.basename(meta_url)) + if not os.path.exists(meta_list_path): + download_url_to_file(meta_url, meta_list_path) + self._flist = _get_paired_flist(self._path, meta_list_path) + + def get_metadata(self, n: int) -> Tuple[str, str, int, int, str, str]: + """Get metadata for the n-th sample from the dataset. Returns filepaths instead of waveforms, + but otherwise returns the same fields as :py:func:`__getitem__`. + + Args: + n (int): The index of the sample + + Returns: + Tuple of the following items; + + str: + Path to audio file of speaker 1 + str: + Path to audio file of speaker 2 + int: + Sample rate + int: + Label + str: + File ID of speaker 1 + str: + File ID of speaker 2 + """ + label, file_path_spk1, file_path_spk2 = self._flist[n] + label = int(label) + file_id_spk1 = _get_file_id(file_path_spk1, self._ext_audio) + file_id_spk2 = _get_file_id(file_path_spk2, self._ext_audio) + return file_path_spk1, file_path_spk2, SAMPLE_RATE, label, file_id_spk1, file_id_spk2 + + def __getitem__(self, n: int) -> Tuple[Tensor, Tensor, int, int, str, str]: + """Load the n-th sample from the dataset. + + Args: + n (int): The index of the sample to be loaded. + + Returns: + Tuple of the following items; + + Tensor: + Waveform of speaker 1 + Tensor: + Waveform of speaker 2 + int: + Sample rate + int: + Label + str: + File ID of speaker 1 + str: + File ID of speaker 2 + """ + metadata = self.get_metadata(n) + waveform_spk1 = _load_waveform(self._path, metadata[0], metadata[2]) + waveform_spk2 = _load_waveform(self._path, metadata[1], metadata[2]) + return (waveform_spk1, waveform_spk2) + metadata[2:] + + def __len__(self) -> int: + return len(self._flist) diff --git a/venv/lib/python3.10/site-packages/torchaudio/functional/__init__.py b/venv/lib/python3.10/site-packages/torchaudio/functional/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b866977c67c9cbcb6098c39ea5c26ed7b19d5979 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/functional/__init__.py @@ -0,0 +1,127 @@ +from ._alignment import forced_align, merge_tokens, TokenSpan +from .filtering import ( + allpass_biquad, + band_biquad, + bandpass_biquad, + bandreject_biquad, + bass_biquad, + biquad, + contrast, + dcshift, + deemph_biquad, + dither, + equalizer_biquad, + filtfilt, + flanger, + gain, + highpass_biquad, + lfilter, + lowpass_biquad, + overdrive, + phaser, + riaa_biquad, + treble_biquad, + vad, +) +from .functional import ( + add_noise, + amplitude_to_DB, + apply_beamforming, + apply_codec, + compute_deltas, + convolve, + create_dct, + DB_to_amplitude, + deemphasis, + detect_pitch_frequency, + edit_distance, + fftconvolve, + frechet_distance, + griffinlim, + inverse_spectrogram, + linear_fbanks, + loudness, + mask_along_axis, + mask_along_axis_iid, + melscale_fbanks, + mu_law_decoding, + mu_law_encoding, + mvdr_weights_rtf, + mvdr_weights_souden, + phase_vocoder, + pitch_shift, + preemphasis, + psd, + resample, + rnnt_loss, + rtf_evd, + rtf_power, + sliding_window_cmn, + spectral_centroid, + spectrogram, + speed, +) + +__all__ = [ + "amplitude_to_DB", + "compute_deltas", + "create_dct", + "melscale_fbanks", + "linear_fbanks", + "DB_to_amplitude", + "loudness", + "detect_pitch_frequency", + "griffinlim", + "mask_along_axis", + "mask_along_axis_iid", + "mu_law_encoding", + "mu_law_decoding", + "phase_vocoder", + "sliding_window_cmn", + "spectrogram", + "inverse_spectrogram", + "spectral_centroid", + "allpass_biquad", + "band_biquad", + "bandpass_biquad", + "bandreject_biquad", + "bass_biquad", + "biquad", + "contrast", + "dither", + "dcshift", + "deemph_biquad", + "equalizer_biquad", + "filtfilt", + "flanger", + "forced_align", + "merge_tokens", + "TokenSpan", + "gain", + "highpass_biquad", + "lfilter", + "lowpass_biquad", + "overdrive", + "phaser", + "riaa_biquad", + "treble_biquad", + "vad", + "apply_codec", + "resample", + "edit_distance", + "pitch_shift", + "rnnt_loss", + "psd", + "mvdr_weights_souden", + "mvdr_weights_rtf", + "rtf_evd", + "rtf_power", + "apply_beamforming", + "fftconvolve", + "convolve", + "add_noise", + "speed", + "preemphasis", + "deemphasis", + "frechet_distance", +] diff --git a/venv/lib/python3.10/site-packages/torchaudio/functional/__pycache__/filtering.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/functional/__pycache__/filtering.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..13b31cde0318e1f848f6d96a8bdf18604bda3e65 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/functional/__pycache__/filtering.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/functional/__pycache__/functional.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/functional/__pycache__/functional.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..73abe636578579721543bf08f4c62aaca4530af6 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/functional/__pycache__/functional.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/functional/_alignment.py b/venv/lib/python3.10/site-packages/torchaudio/functional/_alignment.py new file mode 100644 index 0000000000000000000000000000000000000000..70d1e995e41b8e6817b2dcc25f6a2d0ec3d83752 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/functional/_alignment.py @@ -0,0 +1,128 @@ +from dataclasses import dataclass +from typing import List, Optional, Tuple + +import torch +from torch import Tensor +from torchaudio._extension import fail_if_no_align + +__all__ = [] + + +@fail_if_no_align +def forced_align( + log_probs: Tensor, + targets: Tensor, + input_lengths: Optional[Tensor] = None, + target_lengths: Optional[Tensor] = None, + blank: int = 0, +) -> Tuple[Tensor, Tensor]: + r"""Align a CTC label sequence to an emission. + + .. devices:: CPU CUDA + + .. properties:: TorchScript + + Args: + log_probs (Tensor): log probability of CTC emission output. + Tensor of shape `(B, T, C)`. where `B` is the batch size, `T` is the input length, + `C` is the number of characters in alphabet including blank. + targets (Tensor): Target sequence. Tensor of shape `(B, L)`, + where `L` is the target length. + input_lengths (Tensor or None, optional): + Lengths of the inputs (max value must each be <= `T`). 1-D Tensor of shape `(B,)`. + target_lengths (Tensor or None, optional): + Lengths of the targets. 1-D Tensor of shape `(B,)`. + blank_id (int, optional): The index of blank symbol in CTC emission. (Default: 0) + + Returns: + Tuple(Tensor, Tensor): + Tensor: Label for each time step in the alignment path computed using forced alignment. + + Tensor: Log probability scores of the labels for each time step. + + Note: + The sequence length of `log_probs` must satisfy: + + + .. math:: + L_{\text{log\_probs}} \ge L_{\text{label}} + N_{\text{repeat}} + + where :math:`N_{\text{repeat}}` is the number of consecutively repeated tokens. + For example, in str `"aabbc"`, the number of repeats are `2`. + + Note: + The current version only supports ``batch_size==1``. + """ + if blank in targets: + raise ValueError(f"targets Tensor shouldn't contain blank index. Found {targets}.") + if torch.max(targets) >= log_probs.shape[-1]: + raise ValueError("targets values must be less than the CTC dimension") + + if input_lengths is None: + batch_size, length = log_probs.size(0), log_probs.size(1) + input_lengths = torch.full((batch_size,), length, dtype=torch.int64, device=log_probs.device) + if target_lengths is None: + batch_size, length = targets.size(0), targets.size(1) + target_lengths = torch.full((batch_size,), length, dtype=torch.int64, device=targets.device) + + # For TorchScript compatibility + assert input_lengths is not None + assert target_lengths is not None + + paths, scores = torch.ops.torchaudio.forced_align(log_probs, targets, input_lengths, target_lengths, blank) + return paths, scores + + +@dataclass +class TokenSpan: + """TokenSpan() + Token with time stamps and score. Returned by :py:func:`merge_tokens`. + """ + + token: int + """The token""" + start: int + """The start time (inclusive) in emission time axis.""" + end: int + """The end time (exclusive) in emission time axis.""" + score: float + """The score of the this token.""" + + def __len__(self) -> int: + """Returns the time span""" + return self.end - self.start + + +def merge_tokens(tokens: Tensor, scores: Tensor, blank: int = 0) -> List[TokenSpan]: + """Removes repeated tokens and blank tokens from the given CTC token sequence. + + Args: + tokens (Tensor): Alignment tokens (unbatched) returned from :py:func:`forced_align`. + Shape: `(time, )`. + scores (Tensor): Alignment scores (unbatched) returned from :py:func:`forced_align`. + Shape: `(time, )`. When computing the token-size score, the given score is averaged + across the corresponding time span. + + Returns: + list of TokenSpan + + Example: + >>> aligned_tokens, scores = forced_align(emission, targets, input_lengths, target_lengths) + >>> token_spans = merge_tokens(aligned_tokens[0], scores[0]) + """ + if tokens.ndim != 1 or scores.ndim != 1: + raise ValueError("`tokens` and `scores` must be 1D Tensor.") + if len(tokens) != len(scores): + raise ValueError("`tokens` and `scores` must be the same length.") + + diff = torch.diff( + tokens, prepend=torch.tensor([-1], device=tokens.device), append=torch.tensor([-1], device=tokens.device) + ) + changes_wo_blank = torch.nonzero((diff != 0)).squeeze().tolist() + tokens = tokens.tolist() + spans = [ + TokenSpan(token=token, start=start, end=end, score=scores[start:end].mean().item()) + for start, end in zip(changes_wo_blank[:-1], changes_wo_blank[1:]) + if (token := tokens[start]) != blank + ] + return spans diff --git a/venv/lib/python3.10/site-packages/torchaudio/lib/__init__.py b/venv/lib/python3.10/site-packages/torchaudio/lib/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b9687650d602bad251200bd2b20a652c89d198af Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/_hdemucs.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/_hdemucs.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f0fe97ee85631885206edf19074da32175468ca6 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/_hdemucs.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/conv_tasnet.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/conv_tasnet.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ef151c574b431b5a3d5e754cc3022b1d419fc241 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/conv_tasnet.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/deepspeech.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/deepspeech.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0331b1eee52a83fe21c444d52c8fabe343a687b8 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/deepspeech.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/emformer.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/emformer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..24cb5105f4b955a21da5d3231c933781a17d5d00 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/emformer.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/rnnt.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/rnnt.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7c8e4b2de64af7244ddca0213d355cfb45a9027b Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/rnnt.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/rnnt_decoder.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/rnnt_decoder.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9adea993386053d5ff0e4f56337d9cd8edd0a7f5 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/rnnt_decoder.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/tacotron2.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/tacotron2.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f289170930880b77459a042c1d4ad50ba95c4ffd Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/tacotron2.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/wav2letter.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/wav2letter.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1244af4a228c99ed27e6e79d0b4e91a9b4735055 Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/wav2letter.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/wavernn.cpython-310.pyc b/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/wavernn.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1904db4736d0a0500cf091456acf1559b17b286f Binary files /dev/null and b/venv/lib/python3.10/site-packages/torchaudio/models/__pycache__/wavernn.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/torchaudio/models/decoder/__init__.py b/venv/lib/python3.10/site-packages/torchaudio/models/decoder/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d2e9b06d52ef7af302a000bb0f572b4c563e12bd --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/models/decoder/__init__.py @@ -0,0 +1,46 @@ +_CTC_DECODERS = [ + "CTCHypothesis", + "CTCDecoder", + "CTCDecoderLM", + "CTCDecoderLMState", + "ctc_decoder", + "download_pretrained_files", +] +_CUDA_CTC_DECODERS = [ + "CUCTCDecoder", + "CUCTCHypothesis", + "cuda_ctc_decoder", +] + + +def __getattr__(name: str): + if name in _CTC_DECODERS: + try: + from . import _ctc_decoder + except Exception as err: + raise RuntimeError( + "CTC Decoder suit requires flashlight-text package and optionally KenLM. Please install them." + ) from err + + item = getattr(_ctc_decoder, name) + globals()[name] = item + return item + elif name in _CUDA_CTC_DECODERS: + try: + from . import _cuda_ctc_decoder + except AttributeError as err: + raise RuntimeError( + "To use CUCTC decoder, please set BUILD_CUDA_CTC_DECODER=1 when building from source." + ) from err + + item = getattr(_cuda_ctc_decoder, name) + globals()[name] = item + return item + raise AttributeError(f"module {__name__} has no attribute {name}") + + +def __dir__(): + return sorted(__all__) + + +__all__ = _CTC_DECODERS + _CUDA_CTC_DECODERS diff --git a/venv/lib/python3.10/site-packages/torchaudio/models/decoder/_ctc_decoder.py b/venv/lib/python3.10/site-packages/torchaudio/models/decoder/_ctc_decoder.py new file mode 100644 index 0000000000000000000000000000000000000000..4d45f12f523cc7748e1552ad410557fe9a1f6664 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/models/decoder/_ctc_decoder.py @@ -0,0 +1,568 @@ +from __future__ import annotations + +import itertools as it + +from abc import abstractmethod +from collections import namedtuple +from typing import Dict, List, NamedTuple, Optional, Tuple, Union + +import torch + +from flashlight.lib.text.decoder import ( + CriterionType as _CriterionType, + LexiconDecoder as _LexiconDecoder, + LexiconDecoderOptions as _LexiconDecoderOptions, + LexiconFreeDecoder as _LexiconFreeDecoder, + LexiconFreeDecoderOptions as _LexiconFreeDecoderOptions, + LM as _LM, + LMState as _LMState, + SmearingMode as _SmearingMode, + Trie as _Trie, + ZeroLM as _ZeroLM, +) +from flashlight.lib.text.dictionary import ( + create_word_dict as _create_word_dict, + Dictionary as _Dictionary, + load_words as _load_words, +) +from torchaudio.utils import download_asset + +try: + from flashlight.lib.text.decoder.kenlm import KenLM as _KenLM +except Exception: + try: + from flashlight.lib.text.decoder import KenLM as _KenLM + except Exception: + _KenLM = None + +__all__ = [ + "CTCHypothesis", + "CTCDecoder", + "CTCDecoderLM", + "CTCDecoderLMState", + "ctc_decoder", + "download_pretrained_files", +] + +_PretrainedFiles = namedtuple("PretrainedFiles", ["lexicon", "tokens", "lm"]) + + +def _construct_trie(tokens_dict, word_dict, lexicon, lm, silence): + vocab_size = tokens_dict.index_size() + trie = _Trie(vocab_size, silence) + start_state = lm.start(False) + + for word, spellings in lexicon.items(): + word_idx = word_dict.get_index(word) + _, score = lm.score(start_state, word_idx) + for spelling in spellings: + spelling_idx = [tokens_dict.get_index(token) for token in spelling] + trie.insert(spelling_idx, word_idx, score) + trie.smear(_SmearingMode.MAX) + return trie + + +def _get_word_dict(lexicon, lm, lm_dict, tokens_dict, unk_word): + word_dict = None + if lm_dict is not None: + word_dict = _Dictionary(lm_dict) + + if lexicon and word_dict is None: + word_dict = _create_word_dict(lexicon) + elif not lexicon and word_dict is None and type(lm) == str: + d = {tokens_dict.get_entry(i): [[tokens_dict.get_entry(i)]] for i in range(tokens_dict.index_size())} + d[unk_word] = [[unk_word]] + word_dict = _create_word_dict(d) + + return word_dict + + +class CTCHypothesis(NamedTuple): + r"""Represents hypothesis generated by CTC beam search decoder :class:`CTCDecoder`.""" + tokens: torch.LongTensor + """Predicted sequence of token IDs. Shape `(L, )`, where `L` is the length of the output sequence""" + + words: List[str] + """List of predicted words. + + Note: + This attribute is only applicable if a lexicon is provided to the decoder. If + decoding without a lexicon, it will be blank. Please refer to :attr:`tokens` and + :func:`~torchaudio.models.decoder.CTCDecoder.idxs_to_tokens` instead. + """ + + score: float + """Score corresponding to hypothesis""" + + timesteps: torch.IntTensor + """Timesteps corresponding to the tokens. Shape `(L, )`, where `L` is the length of the output sequence""" + + +class CTCDecoderLMState(_LMState): + """Language model state.""" + + @property + def children(self) -> Dict[int, CTCDecoderLMState]: + """Map of indices to LM states""" + return super().children + + def child(self, usr_index: int) -> CTCDecoderLMState: + """Returns child corresponding to usr_index, or creates and returns a new state if input index + is not found. + + Args: + usr_index (int): index corresponding to child state + + Returns: + CTCDecoderLMState: child state corresponding to usr_index + """ + return super().child(usr_index) + + def compare(self, state: CTCDecoderLMState) -> CTCDecoderLMState: + """Compare two language model states. + + Args: + state (CTCDecoderLMState): LM state to compare against + + Returns: + int: 0 if the states are the same, -1 if self is less, +1 if self is greater. + """ + pass + + +class CTCDecoderLM(_LM): + """Language model base class for creating custom language models to use with the decoder.""" + + @abstractmethod + def start(self, start_with_nothing: bool) -> CTCDecoderLMState: + """Initialize or reset the language model. + + Args: + start_with_nothing (bool): whether or not to start sentence with sil token. + + Returns: + CTCDecoderLMState: starting state + """ + raise NotImplementedError + + @abstractmethod + def score(self, state: CTCDecoderLMState, usr_token_idx: int) -> Tuple[CTCDecoderLMState, float]: + """Evaluate the language model based on the current LM state and new word. + + Args: + state (CTCDecoderLMState): current LM state + usr_token_idx (int): index of the word + + Returns: + (CTCDecoderLMState, float) + CTCDecoderLMState: + new LM state + float: + score + """ + raise NotImplementedError + + @abstractmethod + def finish(self, state: CTCDecoderLMState) -> Tuple[CTCDecoderLMState, float]: + """Evaluate end for language model based on current LM state. + + Args: + state (CTCDecoderLMState): current LM state + + Returns: + (CTCDecoderLMState, float) + CTCDecoderLMState: + new LM state + float: + score + """ + raise NotImplementedError + + +class CTCDecoder: + """CTC beam search decoder from *Flashlight* :cite:`kahn2022flashlight`. + + .. devices:: CPU + + Note: + To build the decoder, please use the factory function :func:`ctc_decoder`. + """ + + def __init__( + self, + nbest: int, + lexicon: Optional[Dict], + word_dict: _Dictionary, + tokens_dict: _Dictionary, + lm: CTCDecoderLM, + decoder_options: Union[_LexiconDecoderOptions, _LexiconFreeDecoderOptions], + blank_token: str, + sil_token: str, + unk_word: str, + ) -> None: + """ + Args: + nbest (int): number of best decodings to return + lexicon (Dict or None): lexicon mapping of words to spellings, or None for lexicon-free decoder + word_dict (_Dictionary): dictionary of words + tokens_dict (_Dictionary): dictionary of tokens + lm (CTCDecoderLM): language model. If using a lexicon, only word level LMs are currently supported + decoder_options (_LexiconDecoderOptions or _LexiconFreeDecoderOptions): + parameters used for beam search decoding + blank_token (str): token corresopnding to blank + sil_token (str): token corresponding to silence + unk_word (str): word corresponding to unknown + """ + + self.nbest = nbest + self.word_dict = word_dict + self.tokens_dict = tokens_dict + self.blank = self.tokens_dict.get_index(blank_token) + silence = self.tokens_dict.get_index(sil_token) + transitions = [] + + if lexicon: + trie = _construct_trie(tokens_dict, word_dict, lexicon, lm, silence) + unk_word = word_dict.get_index(unk_word) + token_lm = False # use word level LM + + self.decoder = _LexiconDecoder( + decoder_options, + trie, + lm, + silence, + self.blank, + unk_word, + transitions, + token_lm, + ) + else: + self.decoder = _LexiconFreeDecoder(decoder_options, lm, silence, self.blank, transitions) + # https://github.com/pytorch/audio/issues/3218 + # If lm is passed like rvalue reference, the lm object gets garbage collected, + # and later call to the lm fails. + # This ensures that lm object is not deleted as long as the decoder is alive. + # https://github.com/pybind/pybind11/discussions/4013 + self.lm = lm + + def _get_tokens(self, idxs: torch.IntTensor) -> torch.LongTensor: + idxs = (g[0] for g in it.groupby(idxs)) + idxs = filter(lambda x: x != self.blank, idxs) + return torch.LongTensor(list(idxs)) + + def _get_timesteps(self, idxs: torch.IntTensor) -> torch.IntTensor: + """Returns frame numbers corresponding to non-blank tokens.""" + + timesteps = [] + for i, idx in enumerate(idxs): + if idx == self.blank: + continue + if i == 0 or idx != idxs[i - 1]: + timesteps.append(i) + return torch.IntTensor(timesteps) + + def decode_begin(self): + """Initialize the internal state of the decoder. + + See :py:meth:`decode_step` for the usage. + + .. note:: + + This method is required only when performing online decoding. + It is not necessary when performing batch decoding with :py:meth:`__call__`. + """ + self.decoder.decode_begin() + + def decode_end(self): + """Finalize the internal state of the decoder. + + See :py:meth:`decode_step` for the usage. + + .. note:: + + This method is required only when performing online decoding. + It is not necessary when performing batch decoding with :py:meth:`__call__`. + """ + self.decoder.decode_end() + + def decode_step(self, emissions: torch.FloatTensor): + """Perform incremental decoding on top of the curent internal state. + + .. note:: + + This method is required only when performing online decoding. + It is not necessary when performing batch decoding with :py:meth:`__call__`. + + Args: + emissions (torch.FloatTensor): CPU tensor of shape `(frame, num_tokens)` storing sequences of + probability distribution over labels; output of acoustic model. + + Example: + >>> decoder = torchaudio.models.decoder.ctc_decoder(...) + >>> decoder.decode_begin() + >>> decoder.decode_step(emission1) + >>> decoder.decode_step(emission2) + >>> decoder.decode_end() + >>> result = decoder.get_final_hypothesis() + """ + if emissions.dtype != torch.float32: + raise ValueError("emissions must be float32.") + + if not emissions.is_cpu: + raise RuntimeError("emissions must be a CPU tensor.") + + if not emissions.is_contiguous(): + raise RuntimeError("emissions must be contiguous.") + + if emissions.ndim != 2: + raise RuntimeError(f"emissions must be 2D. Found {emissions.shape}") + + T, N = emissions.size() + self.decoder.decode_step(emissions.data_ptr(), T, N) + + def _to_hypo(self, results) -> List[CTCHypothesis]: + return [ + CTCHypothesis( + tokens=self._get_tokens(result.tokens), + words=[self.word_dict.get_entry(x) for x in result.words if x >= 0], + score=result.score, + timesteps=self._get_timesteps(result.tokens), + ) + for result in results + ] + + def get_final_hypothesis(self) -> List[CTCHypothesis]: + """Get the final hypothesis + + Returns: + List[CTCHypothesis]: + List of sorted best hypotheses. + + .. note:: + + This method is required only when performing online decoding. + It is not necessary when performing batch decoding with :py:meth:`__call__`. + """ + results = self.decoder.get_all_final_hypothesis() + return self._to_hypo(results[: self.nbest]) + + def __call__( + self, emissions: torch.FloatTensor, lengths: Optional[torch.Tensor] = None + ) -> List[List[CTCHypothesis]]: + """ + Performs batched offline decoding. + + .. note:: + + This method performs offline decoding in one go. To perform incremental decoding, + please refer to :py:meth:`decode_step`. + + Args: + emissions (torch.FloatTensor): CPU tensor of shape `(batch, frame, num_tokens)` storing sequences of + probability distribution over labels; output of acoustic model. + lengths (Tensor or None, optional): CPU tensor of shape `(batch, )` storing the valid length of + in time axis of the output Tensor in each batch. + + Returns: + List[List[CTCHypothesis]]: + List of sorted best hypotheses for each audio sequence in the batch. + """ + + if emissions.dtype != torch.float32: + raise ValueError("emissions must be float32.") + + if not emissions.is_cpu: + raise RuntimeError("emissions must be a CPU tensor.") + + if not emissions.is_contiguous(): + raise RuntimeError("emissions must be contiguous.") + + if emissions.ndim != 3: + raise RuntimeError(f"emissions must be 3D. Found {emissions.shape}") + + if lengths is not None and not lengths.is_cpu: + raise RuntimeError("lengths must be a CPU tensor.") + + B, T, N = emissions.size() + if lengths is None: + lengths = torch.full((B,), T) + + float_bytes = 4 + hypos = [] + + for b in range(B): + emissions_ptr = emissions.data_ptr() + float_bytes * b * emissions.stride(0) + results = self.decoder.decode(emissions_ptr, lengths[b], N) + hypos.append(self._to_hypo(results[: self.nbest])) + return hypos + + def idxs_to_tokens(self, idxs: torch.LongTensor) -> List: + """ + Map raw token IDs into corresponding tokens + + Args: + idxs (LongTensor): raw token IDs generated from decoder + + Returns: + List: tokens corresponding to the input IDs + """ + return [self.tokens_dict.get_entry(idx.item()) for idx in idxs] + + +def ctc_decoder( + lexicon: Optional[str], + tokens: Union[str, List[str]], + lm: Union[str, CTCDecoderLM] = None, + lm_dict: Optional[str] = None, + nbest: int = 1, + beam_size: int = 50, + beam_size_token: Optional[int] = None, + beam_threshold: float = 50, + lm_weight: float = 2, + word_score: float = 0, + unk_score: float = float("-inf"), + sil_score: float = 0, + log_add: bool = False, + blank_token: str = "-", + sil_token: str = "|", + unk_word: str = "", +) -> CTCDecoder: + """Builds an instance of :class:`CTCDecoder`. + + Args: + lexicon (str or None): lexicon file containing the possible words and corresponding spellings. + Each line consists of a word and its space separated spelling. If `None`, uses lexicon-free + decoding. + tokens (str or List[str]): file or list containing valid tokens. If using a file, the expected + format is for tokens mapping to the same index to be on the same line + lm (str, CTCDecoderLM, or None, optional): either a path containing KenLM language model, + custom language model of type `CTCDecoderLM`, or `None` if not using a language model + lm_dict (str or None, optional): file consisting of the dictionary used for the LM, with a word + per line sorted by LM index. If decoding with a lexicon, entries in lm_dict must also occur + in the lexicon file. If `None`, dictionary for LM is constructed using the lexicon file. + (Default: None) + nbest (int, optional): number of best decodings to return (Default: 1) + beam_size (int, optional): max number of hypos to hold after each decode step (Default: 50) + beam_size_token (int, optional): max number of tokens to consider at each decode step. + If `None`, it is set to the total number of tokens (Default: None) + beam_threshold (float, optional): threshold for pruning hypothesis (Default: 50) + lm_weight (float, optional): weight of language model (Default: 2) + word_score (float, optional): word insertion score (Default: 0) + unk_score (float, optional): unknown word insertion score (Default: -inf) + sil_score (float, optional): silence insertion score (Default: 0) + log_add (bool, optional): whether or not to use logadd when merging hypotheses (Default: False) + blank_token (str, optional): token corresponding to blank (Default: "-") + sil_token (str, optional): token corresponding to silence (Default: "|") + unk_word (str, optional): word corresponding to unknown (Default: "") + + Returns: + CTCDecoder: decoder + + Example + >>> decoder = ctc_decoder( + >>> lexicon="lexicon.txt", + >>> tokens="tokens.txt", + >>> lm="kenlm.bin", + >>> ) + >>> results = decoder(emissions) # List of shape (B, nbest) of Hypotheses + """ + if lm_dict is not None and type(lm_dict) is not str: + raise ValueError("lm_dict must be None or str type.") + + tokens_dict = _Dictionary(tokens) + + # decoder options + if lexicon: + lexicon = _load_words(lexicon) + decoder_options = _LexiconDecoderOptions( + beam_size=beam_size, + beam_size_token=beam_size_token or tokens_dict.index_size(), + beam_threshold=beam_threshold, + lm_weight=lm_weight, + word_score=word_score, + unk_score=unk_score, + sil_score=sil_score, + log_add=log_add, + criterion_type=_CriterionType.CTC, + ) + else: + decoder_options = _LexiconFreeDecoderOptions( + beam_size=beam_size, + beam_size_token=beam_size_token or tokens_dict.index_size(), + beam_threshold=beam_threshold, + lm_weight=lm_weight, + sil_score=sil_score, + log_add=log_add, + criterion_type=_CriterionType.CTC, + ) + + # construct word dict and language model + word_dict = _get_word_dict(lexicon, lm, lm_dict, tokens_dict, unk_word) + + if type(lm) == str: + if _KenLM is None: + raise RuntimeError( + "flashlight-text is installed, but KenLM is not installed. " + "Please refer to https://github.com/kpu/kenlm#python-module for how to install it." + ) + lm = _KenLM(lm, word_dict) + elif lm is None: + lm = _ZeroLM() + + return CTCDecoder( + nbest=nbest, + lexicon=lexicon, + word_dict=word_dict, + tokens_dict=tokens_dict, + lm=lm, + decoder_options=decoder_options, + blank_token=blank_token, + sil_token=sil_token, + unk_word=unk_word, + ) + + +def _get_filenames(model: str) -> _PretrainedFiles: + if model not in ["librispeech", "librispeech-3-gram", "librispeech-4-gram"]: + raise ValueError( + f"{model} not supported. Must be one of ['librispeech-3-gram', 'librispeech-4-gram', 'librispeech']" + ) + + prefix = f"decoder-assets/{model}" + return _PretrainedFiles( + lexicon=f"{prefix}/lexicon.txt", + tokens=f"{prefix}/tokens.txt", + lm=f"{prefix}/lm.bin" if model != "librispeech" else None, + ) + + +def download_pretrained_files(model: str) -> _PretrainedFiles: + """ + Retrieves pretrained data files used for :func:`ctc_decoder`. + + Args: + model (str): pretrained language model to download. + Valid values are: ``"librispeech-3-gram"``, ``"librispeech-4-gram"`` and ``"librispeech"``. + + Returns: + Object with the following attributes + + * ``lm``: path corresponding to downloaded language model, + or ``None`` if the model is not associated with an lm + * ``lexicon``: path corresponding to downloaded lexicon file + * ``tokens``: path corresponding to downloaded tokens file + """ + + files = _get_filenames(model) + lexicon_file = download_asset(files.lexicon) + tokens_file = download_asset(files.tokens) + if files.lm is not None: + lm_file = download_asset(files.lm) + else: + lm_file = None + + return _PretrainedFiles( + lexicon=lexicon_file, + tokens=tokens_file, + lm=lm_file, + ) diff --git a/venv/lib/python3.10/site-packages/torchaudio/models/squim/__init__.py b/venv/lib/python3.10/site-packages/torchaudio/models/squim/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..092d6eb8e36e2329c78d21bf609a8458818995e6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/models/squim/__init__.py @@ -0,0 +1,11 @@ +from .objective import squim_objective_base, squim_objective_model, SquimObjective +from .subjective import squim_subjective_base, squim_subjective_model, SquimSubjective + +__all__ = [ + "squim_objective_base", + "squim_objective_model", + "squim_subjective_base", + "squim_subjective_model", + "SquimObjective", + "SquimSubjective", +] diff --git a/venv/lib/python3.10/site-packages/torchaudio/models/squim/objective.py b/venv/lib/python3.10/site-packages/torchaudio/models/squim/objective.py new file mode 100644 index 0000000000000000000000000000000000000000..d0a0671a4ec3828fa5dbe1613c7378fbc55d34dd --- /dev/null +++ b/venv/lib/python3.10/site-packages/torchaudio/models/squim/objective.py @@ -0,0 +1,326 @@ +import math +from typing import List, Optional, Tuple + +import torch +import torch.nn as nn +import torch.nn.functional as F + + +def transform_wb_pesq_range(x: float) -> float: + """The metric defined by ITU-T P.862 is often called 'PESQ score', which is defined + for narrow-band signals and has a value range of [-0.5, 4.5] exactly. Here, we use the metric + defined by ITU-T P.862.2, commonly known as 'wide-band PESQ' and will be referred to as "PESQ score". + + Args: + x (float): Narrow-band PESQ score. + + Returns: + (float): Wide-band PESQ score. + """ + return 0.999 + (4.999 - 0.999) / (1 + math.exp(-1.3669 * x + 3.8224)) + + +PESQRange: Tuple[float, float] = ( + 1.0, # P.862.2 uses a different input filter than P.862, and the lower bound of + # the raw score is not -0.5 anymore. It's hard to figure out the true lower bound. + # We are using 1.0 as a reasonable approximation. + transform_wb_pesq_range(4.5), +) + + +class RangeSigmoid(nn.Module): + def __init__(self, val_range: Tuple[float, float] = (0.0, 1.0)) -> None: + super(RangeSigmoid, self).__init__() + assert isinstance(val_range, tuple) and len(val_range) == 2 + self.val_range: Tuple[float, float] = val_range + self.sigmoid: nn.modules.Module = nn.Sigmoid() + + def forward(self, x: torch.Tensor) -> torch.Tensor: + out = self.sigmoid(x) * (self.val_range[1] - self.val_range[0]) + self.val_range[0] + return out + + +class Encoder(nn.Module): + """Encoder module that transform 1D waveform to 2D representations. + + Args: + feat_dim (int, optional): The feature dimension after Encoder module. (Default: 512) + win_len (int, optional): kernel size in the Conv1D layer. (Default: 32) + """ + + def __init__(self, feat_dim: int = 512, win_len: int = 32) -> None: + super(Encoder, self).__init__() + + self.conv1d = nn.Conv1d(1, feat_dim, win_len, stride=win_len // 2, bias=False) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + """Apply waveforms to convolutional layer and ReLU layer. + + Args: + x (torch.Tensor): Input waveforms. Tensor with dimensions `(batch, time)`. + + Returns: + (torch,Tensor): Feature Tensor with dimensions `(batch, channel, frame)`. + """ + out = x.unsqueeze(dim=1) + out = F.relu(self.conv1d(out)) + return out + + +class SingleRNN(nn.Module): + def __init__(self, rnn_type: str, input_size: int, hidden_size: int, dropout: float = 0.0) -> None: + super(SingleRNN, self).__init__() + + self.rnn_type = rnn_type + self.input_size = input_size + self.hidden_size = hidden_size + + self.rnn: nn.modules.Module = getattr(nn, rnn_type)( + input_size, + hidden_size, + 1, + dropout=dropout, + batch_first=True, + bidirectional=True, + ) + + self.proj = nn.Linear(hidden_size * 2, input_size) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + # input shape: batch, seq, dim + out, _ = self.rnn(x) + out = self.proj(out) + return out + + +class DPRNN(nn.Module): + """*Dual-path recurrent neural networks (DPRNN)* :cite:`luo2020dual`. + + Args: + feat_dim (int, optional): The feature dimension after Encoder module. (Default: 64) + hidden_dim (int, optional): Hidden dimension in the RNN layer of DPRNN. (Default: 128) + num_blocks (int, optional): Number of DPRNN layers. (Default: 6) + rnn_type (str, optional): Type of RNN in DPRNN. Valid options are ["RNN", "LSTM", "GRU"]. (Default: "LSTM") + d_model (int, optional): The number of expected features in the input. (Default: 256) + chunk_size (int, optional): Chunk size of input for DPRNN. (Default: 100) + chunk_stride (int, optional): Stride of chunk input for DPRNN. (Default: 50) + """ + + def __init__( + self, + feat_dim: int = 64, + hidden_dim: int = 128, + num_blocks: int = 6, + rnn_type: str = "LSTM", + d_model: int = 256, + chunk_size: int = 100, + chunk_stride: int = 50, + ) -> None: + super(DPRNN, self).__init__() + + self.num_blocks = num_blocks + + self.row_rnn = nn.ModuleList([]) + self.col_rnn = nn.ModuleList([]) + self.row_norm = nn.ModuleList([]) + self.col_norm = nn.ModuleList([]) + for _ in range(num_blocks): + self.row_rnn.append(SingleRNN(rnn_type, feat_dim, hidden_dim)) + self.col_rnn.append(SingleRNN(rnn_type, feat_dim, hidden_dim)) + self.row_norm.append(nn.GroupNorm(1, feat_dim, eps=1e-8)) + self.col_norm.append(nn.GroupNorm(1, feat_dim, eps=1e-8)) + self.conv = nn.Sequential( + nn.Conv2d(feat_dim, d_model, 1), + nn.PReLU(), + ) + self.chunk_size = chunk_size + self.chunk_stride = chunk_stride + + def pad_chunk(self, x: torch.Tensor) -> Tuple[torch.Tensor, int]: + # input shape: (B, N, T) + seq_len = x.shape[-1] + + rest = self.chunk_size - (self.chunk_stride + seq_len % self.chunk_size) % self.chunk_size + out = F.pad(x, [self.chunk_stride, rest + self.chunk_stride]) + + return out, rest + + def chunking(self, x: torch.Tensor) -> Tuple[torch.Tensor, int]: + out, rest = self.pad_chunk(x) + batch_size, feat_dim, seq_len = out.shape + + segments1 = out[:, :, : -self.chunk_stride].contiguous().view(batch_size, feat_dim, -1, self.chunk_size) + segments2 = out[:, :, self.chunk_stride :].contiguous().view(batch_size, feat_dim, -1, self.chunk_size) + out = torch.cat([segments1, segments2], dim=3) + out = out.view(batch_size, feat_dim, -1, self.chunk_size).transpose(2, 3).contiguous() + + return out, rest + + def merging(self, x: torch.Tensor, rest: int) -> torch.Tensor: + batch_size, dim, _, _ = x.shape + out = x.transpose(2, 3).contiguous().view(batch_size, dim, -1, self.chunk_size * 2) + out1 = out[:, :, :, : self.chunk_size].contiguous().view(batch_size, dim, -1)[:, :, self.chunk_stride :] + out2 = out[:, :, :, self.chunk_size :].contiguous().view(batch_size, dim, -1)[:, :, : -self.chunk_stride] + out = out1 + out2 + if rest > 0: + out = out[:, :, :-rest] + out = out.contiguous() + return out + + def forward(self, x: torch.Tensor) -> torch.Tensor: + x, rest = self.chunking(x) + batch_size, _, dim1, dim2 = x.shape + out = x + for row_rnn, row_norm, col_rnn, col_norm in zip(self.row_rnn, self.row_norm, self.col_rnn, self.col_norm): + row_in = out.permute(0, 3, 2, 1).contiguous().view(batch_size * dim2, dim1, -1).contiguous() + row_out = row_rnn(row_in) + row_out = row_out.view(batch_size, dim2, dim1, -1).permute(0, 3, 2, 1).contiguous() + row_out = row_norm(row_out) + out = out + row_out + + col_in = out.permute(0, 2, 3, 1).contiguous().view(batch_size * dim1, dim2, -1).contiguous() + col_out = col_rnn(col_in) + col_out = col_out.view(batch_size, dim1, dim2, -1).permute(0, 3, 1, 2).contiguous() + col_out = col_norm(col_out) + out = out + col_out + out = self.conv(out) + out = self.merging(out, rest) + out = out.transpose(1, 2).contiguous() + return out + + +class AutoPool(nn.Module): + def __init__(self, pool_dim: int = 1) -> None: + super(AutoPool, self).__init__() + self.pool_dim: int = pool_dim + self.softmax: nn.modules.Module = nn.Softmax(dim=pool_dim) + self.register_parameter("alpha", nn.Parameter(torch.ones(1))) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + weight = self.softmax(torch.mul(x, self.alpha)) + out = torch.sum(torch.mul(x, weight), dim=self.pool_dim) + return out + + +class SquimObjective(nn.Module): + """Speech Quality and Intelligibility Measures (SQUIM) model that predicts **objective** metric scores + for speech enhancement (e.g., STOI, PESQ, and SI-SDR). + + Args: + encoder (torch.nn.Module): Encoder module to transform 1D waveform to 2D feature representation. + dprnn (torch.nn.Module): DPRNN module to model sequential feature. + branches (torch.nn.ModuleList): Transformer branches in which each branch estimate one objective metirc score. + """ + + def __init__( + self, + encoder: nn.Module, + dprnn: nn.Module, + branches: nn.ModuleList, + ): + super(SquimObjective, self).__init__() + self.encoder = encoder + self.dprnn = dprnn + self.branches = branches + + def forward(self, x: torch.Tensor) -> List[torch.Tensor]: + """ + Args: + x (torch.Tensor): Input waveforms. Tensor with dimensions `(batch, time)`. + + Returns: + List(torch.Tensor): List of score Tenosrs. Each Tensor is with dimension `(batch,)`. + """ + if x.ndim != 2: + raise ValueError(f"The input must be a 2D Tensor. Found dimension {x.ndim}.") + x = x / (torch.mean(x**2, dim=1, keepdim=True) ** 0.5 * 20) + out = self.encoder(x) + out = self.dprnn(out) + scores = [] + for branch in self.branches: + scores.append(branch(out).squeeze(dim=1)) + return scores + + +def _create_branch(d_model: int, nhead: int, metric: str) -> nn.modules.Module: + """Create branch module after DPRNN model for predicting metric score. + + Args: + d_model (int): The number of expected features in the input. + nhead (int): Number of heads in the multi-head attention model. + metric (str): The metric name to predict. + + Returns: + (nn.Module): Returned module to predict corresponding metric score. + """ + layer1 = nn.TransformerEncoderLayer(d_model, nhead, d_model * 4, dropout=0.0, batch_first=True) + layer2 = AutoPool() + if metric == "stoi": + layer3 = nn.Sequential( + nn.Linear(d_model, d_model), + nn.PReLU(), + nn.Linear(d_model, 1), + RangeSigmoid(), + ) + elif metric == "pesq": + layer3 = nn.Sequential( + nn.Linear(d_model, d_model), + nn.PReLU(), + nn.Linear(d_model, 1), + RangeSigmoid(val_range=PESQRange), + ) + else: + layer3: nn.modules.Module = nn.Sequential(nn.Linear(d_model, d_model), nn.PReLU(), nn.Linear(d_model, 1)) + return nn.Sequential(layer1, layer2, layer3) + + +def squim_objective_model( + feat_dim: int, + win_len: int, + d_model: int, + nhead: int, + hidden_dim: int, + num_blocks: int, + rnn_type: str, + chunk_size: int, + chunk_stride: Optional[int] = None, +) -> SquimObjective: + """Build a custome :class:`torchaudio.prototype.models.SquimObjective` model. + + Args: + feat_dim (int, optional): The feature dimension after Encoder module. + win_len (int): Kernel size in the Encoder module. + d_model (int): The number of expected features in the input. + nhead (int): Number of heads in the multi-head attention model. + hidden_dim (int): Hidden dimension in the RNN layer of DPRNN. + num_blocks (int): Number of DPRNN layers. + rnn_type (str): Type of RNN in DPRNN. Valid options are ["RNN", "LSTM", "GRU"]. + chunk_size (int): Chunk size of input for DPRNN. + chunk_stride (int or None, optional): Stride of chunk input for DPRNN. + """ + if chunk_stride is None: + chunk_stride = chunk_size // 2 + encoder = Encoder(feat_dim, win_len) + dprnn = DPRNN(feat_dim, hidden_dim, num_blocks, rnn_type, d_model, chunk_size, chunk_stride) + branches = nn.ModuleList( + [ + _create_branch(d_model, nhead, "stoi"), + _create_branch(d_model, nhead, "pesq"), + _create_branch(d_model, nhead, "sisdr"), + ] + ) + return SquimObjective(encoder, dprnn, branches) + + +def squim_objective_base() -> SquimObjective: + """Build :class:`torchaudio.prototype.models.SquimObjective` model with default arguments.""" + return squim_objective_model( + feat_dim=256, + win_len=64, + d_model=256, + nhead=4, + hidden_dim=256, + num_blocks=2, + rnn_type="LSTM", + chunk_size=71, + ) diff --git a/venv/lib/python3.10/site-packages/tqdm-4.67.1.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/tqdm-4.67.1.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/tqdm-4.67.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/tqdm-4.67.1.dist-info/LICENCE b/venv/lib/python3.10/site-packages/tqdm-4.67.1.dist-info/LICENCE new file mode 100644 index 0000000000000000000000000000000000000000..a8922b182e80d9bcb955e8b8ae2bd9a017d72977 --- /dev/null +++ b/venv/lib/python3.10/site-packages/tqdm-4.67.1.dist-info/LICENCE @@ -0,0 +1,49 @@ +`tqdm` is a product of collaborative work. +Unless otherwise stated, all authors (see commit logs) retain copyright +for their respective work, and release the work under the MIT licence +(text below). + +Exceptions or notable authors are listed below +in reverse chronological order: + +* files: * + MPL-2.0 2015-2024 (c) Casper da Costa-Luis + [casperdcl](https://github.com/casperdcl). +* files: tqdm/_tqdm.py + MIT 2016 (c) [PR #96] on behalf of Google Inc. +* files: tqdm/_tqdm.py README.rst .gitignore + MIT 2013 (c) Noam Yorav-Raphael, original author. + +[PR #96]: https://github.com/tqdm/tqdm/pull/96 + + +Mozilla Public Licence (MPL) v. 2.0 - Exhibit A +----------------------------------------------- + +This Source Code Form is subject to the terms of the +Mozilla Public License, v. 2.0. +If a copy of the MPL was not distributed with this project, +You can obtain one at https://mozilla.org/MPL/2.0/. + + +MIT License (MIT) +----------------- + +Copyright (c) 2013 noamraph + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/lib/python3.10/site-packages/tqdm-4.67.1.dist-info/METADATA b/venv/lib/python3.10/site-packages/tqdm-4.67.1.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..181b4dc8b2f8697d1c0374a612ffd8b2f2db346a --- /dev/null +++ b/venv/lib/python3.10/site-packages/tqdm-4.67.1.dist-info/METADATA @@ -0,0 +1,1594 @@ +Metadata-Version: 2.1 +Name: tqdm +Version: 4.67.1 +Summary: Fast, Extensible Progress Meter +Maintainer-email: tqdm developers +License: MPL-2.0 AND MIT +Project-URL: homepage, https://tqdm.github.io +Project-URL: repository, https://github.com/tqdm/tqdm +Project-URL: changelog, https://tqdm.github.io/releases +Project-URL: wiki, https://github.com/tqdm/tqdm/wiki +Keywords: progressbar,progressmeter,progress,bar,meter,rate,eta,console,terminal,time +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Environment :: MacOS X +Classifier: Environment :: Other Environment +Classifier: Environment :: Win32 (MS Windows) +Classifier: Environment :: X11 Applications +Classifier: Framework :: IPython +Classifier: Framework :: Jupyter +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: End Users/Desktop +Classifier: Intended Audience :: Other Audience +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: MIT License +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Operating System :: MacOS +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft +Classifier: Operating System :: Microsoft :: MS-DOS +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX +Classifier: Operating System :: POSIX :: BSD +Classifier: Operating System :: POSIX :: BSD :: FreeBSD +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: POSIX :: SunOS/Solaris +Classifier: Operating System :: Unix +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation +Classifier: Programming Language :: Python :: Implementation :: IronPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Unix Shell +Classifier: Topic :: Desktop Environment +Classifier: Topic :: Education :: Computer Aided Instruction (CAI) +Classifier: Topic :: Education :: Testing +Classifier: Topic :: Office/Business +Classifier: Topic :: Other/Nonlisted Topic +Classifier: Topic :: Software Development :: Build Tools +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Software Development :: Pre-processors +Classifier: Topic :: Software Development :: User Interfaces +Classifier: Topic :: System :: Installation/Setup +Classifier: Topic :: System :: Logging +Classifier: Topic :: System :: Monitoring +Classifier: Topic :: System :: Shells +Classifier: Topic :: Terminals +Classifier: Topic :: Utilities +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENCE +Requires-Dist: colorama; platform_system == "Windows" +Provides-Extra: dev +Requires-Dist: pytest>=6; extra == "dev" +Requires-Dist: pytest-cov; extra == "dev" +Requires-Dist: pytest-timeout; extra == "dev" +Requires-Dist: pytest-asyncio>=0.24; extra == "dev" +Requires-Dist: nbval; extra == "dev" +Provides-Extra: discord +Requires-Dist: requests; extra == "discord" +Provides-Extra: slack +Requires-Dist: slack-sdk; extra == "slack" +Provides-Extra: telegram +Requires-Dist: requests; extra == "telegram" +Provides-Extra: notebook +Requires-Dist: ipywidgets>=6; extra == "notebook" + +|Logo| + +tqdm +==== + +|Py-Versions| |Versions| |Conda-Forge-Status| |Docker| |Snapcraft| + +|Build-Status| |Coverage-Status| |Branch-Coverage-Status| |Codacy-Grade| |Libraries-Rank| |PyPI-Downloads| + +|LICENCE| |OpenHub-Status| |binder-demo| |awesome-python| + +``tqdm`` derives from the Arabic word *taqaddum* (تقدّم) which can mean "progress," +and is an abbreviation for "I love you so much" in Spanish (*te quiero demasiado*). + +Instantly make your loops show a smart progress meter - just wrap any +iterable with ``tqdm(iterable)``, and you're done! + +.. code:: python + + from tqdm import tqdm + for i in tqdm(range(10000)): + ... + +``76%|████████████████████████        | 7568/10000 [00:33<00:10, 229.00it/s]`` + +``trange(N)`` can be also used as a convenient shortcut for +``tqdm(range(N))``. + +|Screenshot| + |Video| |Slides| |Merch| + +It can also be executed as a module with pipes: + +.. code:: sh + + $ seq 9999999 | tqdm --bytes | wc -l + 75.2MB [00:00, 217MB/s] + 9999999 + + $ tar -zcf - docs/ | tqdm --bytes --total `du -sb docs/ | cut -f1` \ + > backup.tgz + 32%|██████████▍ | 8.89G/27.9G [00:42<01:31, 223MB/s] + +Overhead is low -- about 60ns per iteration (80ns with ``tqdm.gui``), and is +unit tested against performance regression. +By comparison, the well-established +`ProgressBar `__ has +an 800ns/iter overhead. + +In addition to its low overhead, ``tqdm`` uses smart algorithms to predict +the remaining time and to skip unnecessary iteration displays, which allows +for a negligible overhead in most cases. + +``tqdm`` works on any platform +(Linux, Windows, Mac, FreeBSD, NetBSD, Solaris/SunOS), +in any console or in a GUI, and is also friendly with IPython/Jupyter notebooks. + +``tqdm`` does not require any dependencies (not even ``curses``!), just +Python and an environment supporting ``carriage return \r`` and +``line feed \n`` control characters. + +------------------------------------------ + +.. contents:: Table of contents + :backlinks: top + :local: + + +Installation +------------ + +Latest PyPI stable release +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +|Versions| |PyPI-Downloads| |Libraries-Dependents| + +.. code:: sh + + pip install tqdm + +Latest development release on GitHub +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +|GitHub-Status| |GitHub-Stars| |GitHub-Commits| |GitHub-Forks| |GitHub-Updated| + +Pull and install pre-release ``devel`` branch: + +.. code:: sh + + pip install "git+https://github.com/tqdm/tqdm.git@devel#egg=tqdm" + +Latest Conda release +~~~~~~~~~~~~~~~~~~~~ + +|Conda-Forge-Status| + +.. code:: sh + + conda install -c conda-forge tqdm + +Latest Snapcraft release +~~~~~~~~~~~~~~~~~~~~~~~~ + +|Snapcraft| + +There are 3 channels to choose from: + +.. code:: sh + + snap install tqdm # implies --stable, i.e. latest tagged release + snap install tqdm --candidate # master branch + snap install tqdm --edge # devel branch + +Note that ``snap`` binaries are purely for CLI use (not ``import``-able), and +automatically set up ``bash`` tab-completion. + +Latest Docker release +~~~~~~~~~~~~~~~~~~~~~ + +|Docker| + +.. code:: sh + + docker pull tqdm/tqdm + docker run -i --rm tqdm/tqdm --help + +Other +~~~~~ + +There are other (unofficial) places where ``tqdm`` may be downloaded, particularly for CLI use: + +|Repology| + +.. |Repology| image:: https://repology.org/badge/tiny-repos/python:tqdm.svg + :target: https://repology.org/project/python:tqdm/versions + +Changelog +--------- + +The list of all changes is available either on GitHub's Releases: +|GitHub-Status|, on the +`wiki `__, or on the +`website `__. + + +Usage +----- + +``tqdm`` is very versatile and can be used in a number of ways. +The three main ones are given below. + +Iterable-based +~~~~~~~~~~~~~~ + +Wrap ``tqdm()`` around any iterable: + +.. code:: python + + from tqdm import tqdm + from time import sleep + + text = "" + for char in tqdm(["a", "b", "c", "d"]): + sleep(0.25) + text = text + char + +``trange(i)`` is a special optimised instance of ``tqdm(range(i))``: + +.. code:: python + + from tqdm import trange + + for i in trange(100): + sleep(0.01) + +Instantiation outside of the loop allows for manual control over ``tqdm()``: + +.. code:: python + + pbar = tqdm(["a", "b", "c", "d"]) + for char in pbar: + sleep(0.25) + pbar.set_description("Processing %s" % char) + +Manual +~~~~~~ + +Manual control of ``tqdm()`` updates using a ``with`` statement: + +.. code:: python + + with tqdm(total=100) as pbar: + for i in range(10): + sleep(0.1) + pbar.update(10) + +If the optional variable ``total`` (or an iterable with ``len()``) is +provided, predictive stats are displayed. + +``with`` is also optional (you can just assign ``tqdm()`` to a variable, +but in this case don't forget to ``del`` or ``close()`` at the end: + +.. code:: python + + pbar = tqdm(total=100) + for i in range(10): + sleep(0.1) + pbar.update(10) + pbar.close() + +Module +~~~~~~ + +Perhaps the most wonderful use of ``tqdm`` is in a script or on the command +line. Simply inserting ``tqdm`` (or ``python -m tqdm``) between pipes will pass +through all ``stdin`` to ``stdout`` while printing progress to ``stderr``. + +The example below demonstrate counting the number of lines in all Python files +in the current directory, with timing information included. + +.. code:: sh + + $ time find . -name '*.py' -type f -exec cat \{} \; | wc -l + 857365 + + real 0m3.458s + user 0m0.274s + sys 0m3.325s + + $ time find . -name '*.py' -type f -exec cat \{} \; | tqdm | wc -l + 857366it [00:03, 246471.31it/s] + 857365 + + real 0m3.585s + user 0m0.862s + sys 0m3.358s + +Note that the usual arguments for ``tqdm`` can also be specified. + +.. code:: sh + + $ find . -name '*.py' -type f -exec cat \{} \; | + tqdm --unit loc --unit_scale --total 857366 >> /dev/null + 100%|█████████████████████████████████| 857K/857K [00:04<00:00, 246Kloc/s] + +Backing up a large directory? + +.. code:: sh + + $ tar -zcf - docs/ | tqdm --bytes --total `du -sb docs/ | cut -f1` \ + > backup.tgz + 44%|██████████████▊ | 153M/352M [00:14<00:18, 11.0MB/s] + +This can be beautified further: + +.. code:: sh + + $ BYTES=$(du -sb docs/ | cut -f1) + $ tar -cf - docs/ \ + | tqdm --bytes --total "$BYTES" --desc Processing | gzip \ + | tqdm --bytes --total "$BYTES" --desc Compressed --position 1 \ + > ~/backup.tgz + Processing: 100%|██████████████████████| 352M/352M [00:14<00:00, 30.2MB/s] + Compressed: 42%|█████████▎ | 148M/352M [00:14<00:19, 10.9MB/s] + +Or done on a file level using 7-zip: + +.. code:: sh + + $ 7z a -bd -r backup.7z docs/ | grep Compressing \ + | tqdm --total $(find docs/ -type f | wc -l) --unit files \ + | grep -v Compressing + 100%|██████████████████████████▉| 15327/15327 [01:00<00:00, 712.96files/s] + +Pre-existing CLI programs already outputting basic progress information will +benefit from ``tqdm``'s ``--update`` and ``--update_to`` flags: + +.. code:: sh + + $ seq 3 0.1 5 | tqdm --total 5 --update_to --null + 100%|████████████████████████████████████| 5.0/5 [00:00<00:00, 9673.21it/s] + $ seq 10 | tqdm --update --null # 1 + 2 + ... + 10 = 55 iterations + 55it [00:00, 90006.52it/s] + +FAQ and Known Issues +-------------------- + +|GitHub-Issues| + +The most common issues relate to excessive output on multiple lines, instead +of a neat one-line progress bar. + +- Consoles in general: require support for carriage return (``CR``, ``\r``). + + * Some cloud logging consoles which don't support ``\r`` properly + (`cloudwatch `__, + `K8s `__) may benefit from + ``export TQDM_POSITION=-1``. + +- Nested progress bars: + + * Consoles in general: require support for moving cursors up to the + previous line. For example, + `IDLE `__, + `ConEmu `__ and + `PyCharm `__ (also + `here `__, + `here `__, and + `here `__) + lack full support. + * Windows: additionally may require the Python module ``colorama`` + to ensure nested bars stay within their respective lines. + +- Unicode: + + * Environments which report that they support unicode will have solid smooth + progressbars. The fallback is an ``ascii``-only bar. + * Windows consoles often only partially support unicode and thus + `often require explicit ascii=True `__ + (also `here `__). This is due to + either normal-width unicode characters being incorrectly displayed as + "wide", or some unicode characters not rendering. + +- Wrapping generators: + + * Generator wrapper functions tend to hide the length of iterables. + ``tqdm`` does not. + * Replace ``tqdm(enumerate(...))`` with ``enumerate(tqdm(...))`` or + ``tqdm(enumerate(x), total=len(x), ...)``. + The same applies to ``numpy.ndenumerate``. + * Replace ``tqdm(zip(a, b))`` with ``zip(tqdm(a), b)`` or even + ``zip(tqdm(a), tqdm(b))``. + * The same applies to ``itertools``. + * Some useful convenience functions can be found under ``tqdm.contrib``. + +- `No intermediate output in docker-compose `__: + use ``docker-compose run`` instead of ``docker-compose up`` and ``tty: true``. + +- Overriding defaults via environment variables: + e.g. in CI/cloud jobs, ``export TQDM_MININTERVAL=5`` to avoid log spam. + This override logic is handled by the ``tqdm.utils.envwrap`` decorator + (useful independent of ``tqdm``). + +If you come across any other difficulties, browse and file |GitHub-Issues|. + +Documentation +------------- + +|Py-Versions| |README-Hits| (Since 19 May 2016) + +.. code:: python + + class tqdm(): + """ + Decorate an iterable object, returning an iterator which acts exactly + like the original iterable, but prints a dynamically updating + progressbar every time a value is requested. + """ + + @envwrap("TQDM_") # override defaults via env vars + def __init__(self, iterable=None, desc=None, total=None, leave=True, + file=None, ncols=None, mininterval=0.1, + maxinterval=10.0, miniters=None, ascii=None, disable=False, + unit='it', unit_scale=False, dynamic_ncols=False, + smoothing=0.3, bar_format=None, initial=0, position=None, + postfix=None, unit_divisor=1000, write_bytes=False, + lock_args=None, nrows=None, colour=None, delay=0): + +Parameters +~~~~~~~~~~ + +* iterable : iterable, optional + Iterable to decorate with a progressbar. + Leave blank to manually manage the updates. +* desc : str, optional + Prefix for the progressbar. +* total : int or float, optional + The number of expected iterations. If unspecified, + len(iterable) is used if possible. If float("inf") or as a last + resort, only basic progress statistics are displayed + (no ETA, no progressbar). + If ``gui`` is True and this parameter needs subsequent updating, + specify an initial arbitrary large positive number, + e.g. 9e9. +* leave : bool, optional + If [default: True], keeps all traces of the progressbar + upon termination of iteration. + If ``None``, will leave only if ``position`` is ``0``. +* file : ``io.TextIOWrapper`` or ``io.StringIO``, optional + Specifies where to output the progress messages + (default: sys.stderr). Uses ``file.write(str)`` and ``file.flush()`` + methods. For encoding, see ``write_bytes``. +* ncols : int, optional + The width of the entire output message. If specified, + dynamically resizes the progressbar to stay within this bound. + If unspecified, attempts to use environment width. The + fallback is a meter width of 10 and no limit for the counter and + statistics. If 0, will not print any meter (only stats). +* mininterval : float, optional + Minimum progress display update interval [default: 0.1] seconds. +* maxinterval : float, optional + Maximum progress display update interval [default: 10] seconds. + Automatically adjusts ``miniters`` to correspond to ``mininterval`` + after long display update lag. Only works if ``dynamic_miniters`` + or monitor thread is enabled. +* miniters : int or float, optional + Minimum progress display update interval, in iterations. + If 0 and ``dynamic_miniters``, will automatically adjust to equal + ``mininterval`` (more CPU efficient, good for tight loops). + If > 0, will skip display of specified number of iterations. + Tweak this and ``mininterval`` to get very efficient loops. + If your progress is erratic with both fast and slow iterations + (network, skipping items, etc) you should set miniters=1. +* ascii : bool or str, optional + If unspecified or False, use unicode (smooth blocks) to fill + the meter. The fallback is to use ASCII characters " 123456789#". +* disable : bool, optional + Whether to disable the entire progressbar wrapper + [default: False]. If set to None, disable on non-TTY. +* unit : str, optional + String that will be used to define the unit of each iteration + [default: it]. +* unit_scale : bool or int or float, optional + If 1 or True, the number of iterations will be reduced/scaled + automatically and a metric prefix following the + International System of Units standard will be added + (kilo, mega, etc.) [default: False]. If any other non-zero + number, will scale ``total`` and ``n``. +* dynamic_ncols : bool, optional + If set, constantly alters ``ncols`` and ``nrows`` to the + environment (allowing for window resizes) [default: False]. +* smoothing : float, optional + Exponential moving average smoothing factor for speed estimates + (ignored in GUI mode). Ranges from 0 (average speed) to 1 + (current/instantaneous speed) [default: 0.3]. +* bar_format : str, optional + Specify a custom bar string formatting. May impact performance. + [default: '{l_bar}{bar}{r_bar}'], where + l_bar='{desc}: {percentage:3.0f}%|' and + r_bar='| {n_fmt}/{total_fmt} [{elapsed}<{remaining}, ' + '{rate_fmt}{postfix}]' + Possible vars: l_bar, bar, r_bar, n, n_fmt, total, total_fmt, + percentage, elapsed, elapsed_s, ncols, nrows, desc, unit, + rate, rate_fmt, rate_noinv, rate_noinv_fmt, + rate_inv, rate_inv_fmt, postfix, unit_divisor, + remaining, remaining_s, eta. + Note that a trailing ": " is automatically removed after {desc} + if the latter is empty. +* initial : int or float, optional + The initial counter value. Useful when restarting a progress + bar [default: 0]. If using float, consider specifying ``{n:.3f}`` + or similar in ``bar_format``, or specifying ``unit_scale``. +* position : int, optional + Specify the line offset to print this bar (starting from 0) + Automatic if unspecified. + Useful to manage multiple bars at once (eg, from threads). +* postfix : dict or ``*``, optional + Specify additional stats to display at the end of the bar. + Calls ``set_postfix(**postfix)`` if possible (dict). +* unit_divisor : float, optional + [default: 1000], ignored unless ``unit_scale`` is True. +* write_bytes : bool, optional + Whether to write bytes. If (default: False) will write unicode. +* lock_args : tuple, optional + Passed to ``refresh`` for intermediate output + (initialisation, iterating, and updating). +* nrows : int, optional + The screen height. If specified, hides nested bars outside this + bound. If unspecified, attempts to use environment height. + The fallback is 20. +* colour : str, optional + Bar colour (e.g. 'green', '#00ff00'). +* delay : float, optional + Don't display until [default: 0] seconds have elapsed. + +Extra CLI Options +~~~~~~~~~~~~~~~~~ + +* delim : chr, optional + Delimiting character [default: '\n']. Use '\0' for null. + N.B.: on Windows systems, Python converts '\n' to '\r\n'. +* buf_size : int, optional + String buffer size in bytes [default: 256] + used when ``delim`` is specified. +* bytes : bool, optional + If true, will count bytes, ignore ``delim``, and default + ``unit_scale`` to True, ``unit_divisor`` to 1024, and ``unit`` to 'B'. +* tee : bool, optional + If true, passes ``stdin`` to both ``stderr`` and ``stdout``. +* update : bool, optional + If true, will treat input as newly elapsed iterations, + i.e. numbers to pass to ``update()``. Note that this is slow + (~2e5 it/s) since every input must be decoded as a number. +* update_to : bool, optional + If true, will treat input as total elapsed iterations, + i.e. numbers to assign to ``self.n``. Note that this is slow + (~2e5 it/s) since every input must be decoded as a number. +* null : bool, optional + If true, will discard input (no stdout). +* manpath : str, optional + Directory in which to install tqdm man pages. +* comppath : str, optional + Directory in which to place tqdm completion. +* log : str, optional + CRITICAL|FATAL|ERROR|WARN(ING)|[default: 'INFO']|DEBUG|NOTSET. + +Returns +~~~~~~~ + +* out : decorated iterator. + +.. code:: python + + class tqdm(): + def update(self, n=1): + """ + Manually update the progress bar, useful for streams + such as reading files. + E.g.: + >>> t = tqdm(total=filesize) # Initialise + >>> for current_buffer in stream: + ... ... + ... t.update(len(current_buffer)) + >>> t.close() + The last line is highly recommended, but possibly not necessary if + ``t.update()`` will be called in such a way that ``filesize`` will be + exactly reached and printed. + + Parameters + ---------- + n : int or float, optional + Increment to add to the internal counter of iterations + [default: 1]. If using float, consider specifying ``{n:.3f}`` + or similar in ``bar_format``, or specifying ``unit_scale``. + + Returns + ------- + out : bool or None + True if a ``display()`` was triggered. + """ + + def close(self): + """Cleanup and (if leave=False) close the progressbar.""" + + def clear(self, nomove=False): + """Clear current bar display.""" + + def refresh(self): + """ + Force refresh the display of this bar. + + Parameters + ---------- + nolock : bool, optional + If ``True``, does not lock. + If [default: ``False``]: calls ``acquire()`` on internal lock. + lock_args : tuple, optional + Passed to internal lock's ``acquire()``. + If specified, will only ``display()`` if ``acquire()`` returns ``True``. + """ + + def unpause(self): + """Restart tqdm timer from last print time.""" + + def reset(self, total=None): + """ + Resets to 0 iterations for repeated use. + + Consider combining with ``leave=True``. + + Parameters + ---------- + total : int or float, optional. Total to use for the new bar. + """ + + def set_description(self, desc=None, refresh=True): + """ + Set/modify description of the progress bar. + + Parameters + ---------- + desc : str, optional + refresh : bool, optional + Forces refresh [default: True]. + """ + + def set_postfix(self, ordered_dict=None, refresh=True, **tqdm_kwargs): + """ + Set/modify postfix (additional stats) + with automatic formatting based on datatype. + + Parameters + ---------- + ordered_dict : dict or OrderedDict, optional + refresh : bool, optional + Forces refresh [default: True]. + kwargs : dict, optional + """ + + @classmethod + def write(cls, s, file=sys.stdout, end="\n"): + """Print a message via tqdm (without overlap with bars).""" + + @property + def format_dict(self): + """Public API for read-only member access.""" + + def display(self, msg=None, pos=None): + """ + Use ``self.sp`` to display ``msg`` in the specified ``pos``. + + Consider overloading this function when inheriting to use e.g.: + ``self.some_frontend(**self.format_dict)`` instead of ``self.sp``. + + Parameters + ---------- + msg : str, optional. What to display (default: ``repr(self)``). + pos : int, optional. Position to ``moveto`` + (default: ``abs(self.pos)``). + """ + + @classmethod + @contextmanager + def wrapattr(cls, stream, method, total=None, bytes=True, **tqdm_kwargs): + """ + stream : file-like object. + method : str, "read" or "write". The result of ``read()`` and + the first argument of ``write()`` should have a ``len()``. + + >>> with tqdm.wrapattr(file_obj, "read", total=file_obj.size) as fobj: + ... while True: + ... chunk = fobj.read(chunk_size) + ... if not chunk: + ... break + """ + + @classmethod + def pandas(cls, *targs, **tqdm_kwargs): + """Registers the current `tqdm` class with `pandas`.""" + + def trange(*args, **tqdm_kwargs): + """Shortcut for `tqdm(range(*args), **tqdm_kwargs)`.""" + +Convenience Functions +~~~~~~~~~~~~~~~~~~~~~ + +.. code:: python + + def tqdm.contrib.tenumerate(iterable, start=0, total=None, + tqdm_class=tqdm.auto.tqdm, **tqdm_kwargs): + """Equivalent of `numpy.ndenumerate` or builtin `enumerate`.""" + + def tqdm.contrib.tzip(iter1, *iter2plus, **tqdm_kwargs): + """Equivalent of builtin `zip`.""" + + def tqdm.contrib.tmap(function, *sequences, **tqdm_kwargs): + """Equivalent of builtin `map`.""" + +Submodules +~~~~~~~~~~ + +.. code:: python + + class tqdm.notebook.tqdm(tqdm.tqdm): + """IPython/Jupyter Notebook widget.""" + + class tqdm.auto.tqdm(tqdm.tqdm): + """Automatically chooses beween `tqdm.notebook` and `tqdm.tqdm`.""" + + class tqdm.asyncio.tqdm(tqdm.tqdm): + """Asynchronous version.""" + @classmethod + def as_completed(cls, fs, *, loop=None, timeout=None, total=None, + **tqdm_kwargs): + """Wrapper for `asyncio.as_completed`.""" + + class tqdm.gui.tqdm(tqdm.tqdm): + """Matplotlib GUI version.""" + + class tqdm.tk.tqdm(tqdm.tqdm): + """Tkinter GUI version.""" + + class tqdm.rich.tqdm(tqdm.tqdm): + """`rich.progress` version.""" + + class tqdm.keras.TqdmCallback(keras.callbacks.Callback): + """Keras callback for epoch and batch progress.""" + + class tqdm.dask.TqdmCallback(dask.callbacks.Callback): + """Dask callback for task progress.""" + + +``contrib`` ++++++++++++ + +The ``tqdm.contrib`` package also contains experimental modules: + +- ``tqdm.contrib.itertools``: Thin wrappers around ``itertools`` +- ``tqdm.contrib.concurrent``: Thin wrappers around ``concurrent.futures`` +- ``tqdm.contrib.slack``: Posts to `Slack `__ bots +- ``tqdm.contrib.discord``: Posts to `Discord `__ bots +- ``tqdm.contrib.telegram``: Posts to `Telegram `__ bots +- ``tqdm.contrib.bells``: Automagically enables all optional features + + * ``auto``, ``pandas``, ``slack``, ``discord``, ``telegram`` + +Examples and Advanced Usage +--------------------------- + +- See the `examples `__ + folder; +- import the module and run ``help()``; +- consult the `wiki `__; + + * this has an + `excellent article `__ + on how to make a **great** progressbar; + +- check out the `slides from PyData London `__, or +- run the |binder-demo|. + +Description and additional stats +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Custom information can be displayed and updated dynamically on ``tqdm`` bars +with the ``desc`` and ``postfix`` arguments: + +.. code:: python + + from tqdm import tqdm, trange + from random import random, randint + from time import sleep + + with trange(10) as t: + for i in t: + # Description will be displayed on the left + t.set_description('GEN %i' % i) + # Postfix will be displayed on the right, + # formatted automatically based on argument's datatype + t.set_postfix(loss=random(), gen=randint(1,999), str='h', + lst=[1, 2]) + sleep(0.1) + + with tqdm(total=10, bar_format="{postfix[0]} {postfix[1][value]:>8.2g}", + postfix=["Batch", {"value": 0}]) as t: + for i in range(10): + sleep(0.1) + t.postfix[1]["value"] = i / 2 + t.update() + +Points to remember when using ``{postfix[...]}`` in the ``bar_format`` string: + +- ``postfix`` also needs to be passed as an initial argument in a compatible + format, and +- ``postfix`` will be auto-converted to a string if it is a ``dict``-like + object. To prevent this behaviour, insert an extra item into the dictionary + where the key is not a string. + +Additional ``bar_format`` parameters may also be defined by overriding +``format_dict``, and the bar itself may be modified using ``ascii``: + +.. code:: python + + from tqdm import tqdm + class TqdmExtraFormat(tqdm): + """Provides a `total_time` format parameter""" + @property + def format_dict(self): + d = super().format_dict + total_time = d["elapsed"] * (d["total"] or 0) / max(d["n"], 1) + d.update(total_time=self.format_interval(total_time) + " in total") + return d + + for i in TqdmExtraFormat( + range(9), ascii=" .oO0", + bar_format="{total_time}: {percentage:.0f}%|{bar}{r_bar}"): + if i == 4: + break + +.. code:: + + 00:00 in total: 44%|0000. | 4/9 [00:00<00:00, 962.93it/s] + +Note that ``{bar}`` also supports a format specifier ``[width][type]``. + +- ``width`` + + * unspecified (default): automatic to fill ``ncols`` + * ``int >= 0``: fixed width overriding ``ncols`` logic + * ``int < 0``: subtract from the automatic default + +- ``type`` + + * ``a``: ascii (``ascii=True`` override) + * ``u``: unicode (``ascii=False`` override) + * ``b``: blank (``ascii=" "`` override) + +This means a fixed bar with right-justified text may be created by using: +``bar_format="{l_bar}{bar:10}|{bar:-10b}right-justified"`` + +Nested progress bars +~~~~~~~~~~~~~~~~~~~~ + +``tqdm`` supports nested progress bars. Here's an example: + +.. code:: python + + from tqdm.auto import trange + from time import sleep + + for i in trange(4, desc='1st loop'): + for j in trange(5, desc='2nd loop'): + for k in trange(50, desc='3rd loop', leave=False): + sleep(0.01) + +For manual control over positioning (e.g. for multi-processing use), +you may specify ``position=n`` where ``n=0`` for the outermost bar, +``n=1`` for the next, and so on. +However, it's best to check if ``tqdm`` can work without manual ``position`` +first. + +.. code:: python + + from time import sleep + from tqdm import trange, tqdm + from multiprocessing import Pool, RLock, freeze_support + + L = list(range(9)) + + def progresser(n): + interval = 0.001 / (n + 2) + total = 5000 + text = f"#{n}, est. {interval * total:<04.2}s" + for _ in trange(total, desc=text, position=n): + sleep(interval) + + if __name__ == '__main__': + freeze_support() # for Windows support + tqdm.set_lock(RLock()) # for managing output contention + p = Pool(initializer=tqdm.set_lock, initargs=(tqdm.get_lock(),)) + p.map(progresser, L) + +Note that in Python 3, ``tqdm.write`` is thread-safe: + +.. code:: python + + from time import sleep + from tqdm import tqdm, trange + from concurrent.futures import ThreadPoolExecutor + + L = list(range(9)) + + def progresser(n): + interval = 0.001 / (n + 2) + total = 5000 + text = f"#{n}, est. {interval * total:<04.2}s" + for _ in trange(total, desc=text): + sleep(interval) + if n == 6: + tqdm.write("n == 6 completed.") + tqdm.write("`tqdm.write()` is thread-safe in py3!") + + if __name__ == '__main__': + with ThreadPoolExecutor() as p: + p.map(progresser, L) + +Hooks and callbacks +~~~~~~~~~~~~~~~~~~~ + +``tqdm`` can easily support callbacks/hooks and manual updates. +Here's an example with ``urllib``: + +**``urllib.urlretrieve`` documentation** + + | [...] + | If present, the hook function will be called once + | on establishment of the network connection and once after each block read + | thereafter. The hook will be passed three arguments; a count of blocks + | transferred so far, a block size in bytes, and the total size of the file. + | [...] + +.. code:: python + + import urllib, os + from tqdm import tqdm + urllib = getattr(urllib, 'request', urllib) + + class TqdmUpTo(tqdm): + """Provides `update_to(n)` which uses `tqdm.update(delta_n)`.""" + def update_to(self, b=1, bsize=1, tsize=None): + """ + b : int, optional + Number of blocks transferred so far [default: 1]. + bsize : int, optional + Size of each block (in tqdm units) [default: 1]. + tsize : int, optional + Total size (in tqdm units). If [default: None] remains unchanged. + """ + if tsize is not None: + self.total = tsize + return self.update(b * bsize - self.n) # also sets self.n = b * bsize + + eg_link = "https://caspersci.uk.to/matryoshka.zip" + with TqdmUpTo(unit='B', unit_scale=True, unit_divisor=1024, miniters=1, + desc=eg_link.split('/')[-1]) as t: # all optional kwargs + urllib.urlretrieve(eg_link, filename=os.devnull, + reporthook=t.update_to, data=None) + t.total = t.n + +Inspired by `twine#242 `__. +Functional alternative in +`examples/tqdm_wget.py `__. + +It is recommend to use ``miniters=1`` whenever there is potentially +large differences in iteration speed (e.g. downloading a file over +a patchy connection). + +**Wrapping read/write methods** + +To measure throughput through a file-like object's ``read`` or ``write`` +methods, use ``CallbackIOWrapper``: + +.. code:: python + + from tqdm.auto import tqdm + from tqdm.utils import CallbackIOWrapper + + with tqdm(total=file_obj.size, + unit='B', unit_scale=True, unit_divisor=1024) as t: + fobj = CallbackIOWrapper(t.update, file_obj, "read") + while True: + chunk = fobj.read(chunk_size) + if not chunk: + break + t.reset() + # ... continue to use `t` for something else + +Alternatively, use the even simpler ``wrapattr`` convenience function, +which would condense both the ``urllib`` and ``CallbackIOWrapper`` examples +down to: + +.. code:: python + + import urllib, os + from tqdm import tqdm + + eg_link = "https://caspersci.uk.to/matryoshka.zip" + response = getattr(urllib, 'request', urllib).urlopen(eg_link) + with tqdm.wrapattr(open(os.devnull, "wb"), "write", + miniters=1, desc=eg_link.split('/')[-1], + total=getattr(response, 'length', None)) as fout: + for chunk in response: + fout.write(chunk) + +The ``requests`` equivalent is nearly identical: + +.. code:: python + + import requests, os + from tqdm import tqdm + + eg_link = "https://caspersci.uk.to/matryoshka.zip" + response = requests.get(eg_link, stream=True) + with tqdm.wrapattr(open(os.devnull, "wb"), "write", + miniters=1, desc=eg_link.split('/')[-1], + total=int(response.headers.get('content-length', 0))) as fout: + for chunk in response.iter_content(chunk_size=4096): + fout.write(chunk) + +**Custom callback** + +``tqdm`` is known for intelligently skipping unnecessary displays. To make a +custom callback take advantage of this, simply use the return value of +``update()``. This is set to ``True`` if a ``display()`` was triggered. + +.. code:: python + + from tqdm.auto import tqdm as std_tqdm + + def external_callback(*args, **kwargs): + ... + + class TqdmExt(std_tqdm): + def update(self, n=1): + displayed = super().update(n) + if displayed: + external_callback(**self.format_dict) + return displayed + +``asyncio`` +~~~~~~~~~~~ + +Note that ``break`` isn't currently caught by asynchronous iterators. +This means that ``tqdm`` cannot clean up after itself in this case: + +.. code:: python + + from tqdm.asyncio import tqdm + + async for i in tqdm(range(9)): + if i == 2: + break + +Instead, either call ``pbar.close()`` manually or use the context manager syntax: + +.. code:: python + + from tqdm.asyncio import tqdm + + with tqdm(range(9)) as pbar: + async for i in pbar: + if i == 2: + break + +Pandas Integration +~~~~~~~~~~~~~~~~~~ + +Due to popular demand we've added support for ``pandas`` -- here's an example +for ``DataFrame.progress_apply`` and ``DataFrameGroupBy.progress_apply``: + +.. code:: python + + import pandas as pd + import numpy as np + from tqdm import tqdm + + df = pd.DataFrame(np.random.randint(0, 100, (100000, 6))) + + # Register `pandas.progress_apply` and `pandas.Series.map_apply` with `tqdm` + # (can use `tqdm.gui.tqdm`, `tqdm.notebook.tqdm`, optional kwargs, etc.) + tqdm.pandas(desc="my bar!") + + # Now you can use `progress_apply` instead of `apply` + # and `progress_map` instead of `map` + df.progress_apply(lambda x: x**2) + # can also groupby: + # df.groupby(0).progress_apply(lambda x: x**2) + +In case you're interested in how this works (and how to modify it for your +own callbacks), see the +`examples `__ +folder or import the module and run ``help()``. + +Keras Integration +~~~~~~~~~~~~~~~~~ + +A ``keras`` callback is also available: + +.. code:: python + + from tqdm.keras import TqdmCallback + + ... + + model.fit(..., verbose=0, callbacks=[TqdmCallback()]) + +Dask Integration +~~~~~~~~~~~~~~~~ + +A ``dask`` callback is also available: + +.. code:: python + + from tqdm.dask import TqdmCallback + + with TqdmCallback(desc="compute"): + ... + arr.compute() + + # or use callback globally + cb = TqdmCallback(desc="global") + cb.register() + arr.compute() + +IPython/Jupyter Integration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +IPython/Jupyter is supported via the ``tqdm.notebook`` submodule: + +.. code:: python + + from tqdm.notebook import trange, tqdm + from time import sleep + + for i in trange(3, desc='1st loop'): + for j in tqdm(range(100), desc='2nd loop'): + sleep(0.01) + +In addition to ``tqdm`` features, the submodule provides a native Jupyter +widget (compatible with IPython v1-v4 and Jupyter), fully working nested bars +and colour hints (blue: normal, green: completed, red: error/interrupt, +light blue: no ETA); as demonstrated below. + +|Screenshot-Jupyter1| +|Screenshot-Jupyter2| +|Screenshot-Jupyter3| + +The ``notebook`` version supports percentage or pixels for overall width +(e.g.: ``ncols='100%'`` or ``ncols='480px'``). + +It is also possible to let ``tqdm`` automatically choose between +console or notebook versions by using the ``autonotebook`` submodule: + +.. code:: python + + from tqdm.autonotebook import tqdm + tqdm.pandas() + +Note that this will issue a ``TqdmExperimentalWarning`` if run in a notebook +since it is not meant to be possible to distinguish between ``jupyter notebook`` +and ``jupyter console``. Use ``auto`` instead of ``autonotebook`` to suppress +this warning. + +Note that notebooks will display the bar in the cell where it was created. +This may be a different cell from the one where it is used. +If this is not desired, either + +- delay the creation of the bar to the cell where it must be displayed, or +- create the bar with ``display=False``, and in a later cell call + ``display(bar.container)``: + +.. code:: python + + from tqdm.notebook import tqdm + pbar = tqdm(..., display=False) + +.. code:: python + + # different cell + display(pbar.container) + +The ``keras`` callback has a ``display()`` method which can be used likewise: + +.. code:: python + + from tqdm.keras import TqdmCallback + cbk = TqdmCallback(display=False) + +.. code:: python + + # different cell + cbk.display() + model.fit(..., verbose=0, callbacks=[cbk]) + +Another possibility is to have a single bar (near the top of the notebook) +which is constantly re-used (using ``reset()`` rather than ``close()``). +For this reason, the notebook version (unlike the CLI version) does not +automatically call ``close()`` upon ``Exception``. + +.. code:: python + + from tqdm.notebook import tqdm + pbar = tqdm() + +.. code:: python + + # different cell + iterable = range(100) + pbar.reset(total=len(iterable)) # initialise with new `total` + for i in iterable: + pbar.update() + pbar.refresh() # force print final status but don't `close()` + +Custom Integration +~~~~~~~~~~~~~~~~~~ + +To change the default arguments (such as making ``dynamic_ncols=True``), +simply use built-in Python magic: + +.. code:: python + + from functools import partial + from tqdm import tqdm as std_tqdm + tqdm = partial(std_tqdm, dynamic_ncols=True) + +For further customisation, +``tqdm`` may be inherited from to create custom callbacks (as with the +``TqdmUpTo`` example `above <#hooks-and-callbacks>`__) or for custom frontends +(e.g. GUIs such as notebook or plotting packages). In the latter case: + +1. ``def __init__()`` to call ``super().__init__(..., gui=True)`` to disable + terminal ``status_printer`` creation. +2. Redefine: ``close()``, ``clear()``, ``display()``. + +Consider overloading ``display()`` to use e.g. +``self.frontend(**self.format_dict)`` instead of ``self.sp(repr(self))``. + +Some submodule examples of inheritance: + +- `tqdm/notebook.py `__ +- `tqdm/gui.py `__ +- `tqdm/tk.py `__ +- `tqdm/contrib/slack.py `__ +- `tqdm/contrib/discord.py `__ +- `tqdm/contrib/telegram.py `__ + +Dynamic Monitor/Meter +~~~~~~~~~~~~~~~~~~~~~ + +You can use a ``tqdm`` as a meter which is not monotonically increasing. +This could be because ``n`` decreases (e.g. a CPU usage monitor) or ``total`` +changes. + +One example would be recursively searching for files. The ``total`` is the +number of objects found so far, while ``n`` is the number of those objects which +are files (rather than folders): + +.. code:: python + + from tqdm import tqdm + import os.path + + def find_files_recursively(path, show_progress=True): + files = [] + # total=1 assumes `path` is a file + t = tqdm(total=1, unit="file", disable=not show_progress) + if not os.path.exists(path): + raise IOError("Cannot find:" + path) + + def append_found_file(f): + files.append(f) + t.update() + + def list_found_dir(path): + """returns os.listdir(path) assuming os.path.isdir(path)""" + listing = os.listdir(path) + # subtract 1 since a "file" we found was actually this directory + t.total += len(listing) - 1 + # fancy way to give info without forcing a refresh + t.set_postfix(dir=path[-10:], refresh=False) + t.update(0) # may trigger a refresh + return listing + + def recursively_search(path): + if os.path.isdir(path): + for f in list_found_dir(path): + recursively_search(os.path.join(path, f)) + else: + append_found_file(path) + + recursively_search(path) + t.set_postfix(dir=path) + t.close() + return files + +Using ``update(0)`` is a handy way to let ``tqdm`` decide when to trigger a +display refresh to avoid console spamming. + +Writing messages +~~~~~~~~~~~~~~~~ + +This is a work in progress (see +`#737 `__). + +Since ``tqdm`` uses a simple printing mechanism to display progress bars, +you should not write any message in the terminal using ``print()`` while +a progressbar is open. + +To write messages in the terminal without any collision with ``tqdm`` bar +display, a ``.write()`` method is provided: + +.. code:: python + + from tqdm.auto import tqdm, trange + from time import sleep + + bar = trange(10) + for i in bar: + # Print using tqdm class method .write() + sleep(0.1) + if not (i % 3): + tqdm.write("Done task %i" % i) + # Can also use bar.write() + +By default, this will print to standard output ``sys.stdout``. but you can +specify any file-like object using the ``file`` argument. For example, this +can be used to redirect the messages writing to a log file or class. + +Redirecting writing +~~~~~~~~~~~~~~~~~~~ + +If using a library that can print messages to the console, editing the library +by replacing ``print()`` with ``tqdm.write()`` may not be desirable. +In that case, redirecting ``sys.stdout`` to ``tqdm.write()`` is an option. + +To redirect ``sys.stdout``, create a file-like class that will write +any input string to ``tqdm.write()``, and supply the arguments +``file=sys.stdout, dynamic_ncols=True``. + +A reusable canonical example is given below: + +.. code:: python + + from time import sleep + import contextlib + import sys + from tqdm import tqdm + from tqdm.contrib import DummyTqdmFile + + + @contextlib.contextmanager + def std_out_err_redirect_tqdm(): + orig_out_err = sys.stdout, sys.stderr + try: + sys.stdout, sys.stderr = map(DummyTqdmFile, orig_out_err) + yield orig_out_err[0] + # Relay exceptions + except Exception as exc: + raise exc + # Always restore sys.stdout/err if necessary + finally: + sys.stdout, sys.stderr = orig_out_err + + def some_fun(i): + print("Fee, fi, fo,".split()[i]) + + # Redirect stdout to tqdm.write() (don't forget the `as save_stdout`) + with std_out_err_redirect_tqdm() as orig_stdout: + # tqdm needs the original stdout + # and dynamic_ncols=True to autodetect console width + for i in tqdm(range(3), file=orig_stdout, dynamic_ncols=True): + sleep(.5) + some_fun(i) + + # After the `with`, printing is restored + print("Done!") + +Redirecting ``logging`` +~~~~~~~~~~~~~~~~~~~~~~~ + +Similar to ``sys.stdout``/``sys.stderr`` as detailed above, console ``logging`` +may also be redirected to ``tqdm.write()``. + +Warning: if also redirecting ``sys.stdout``/``sys.stderr``, make sure to +redirect ``logging`` first if needed. + +Helper methods are available in ``tqdm.contrib.logging``. For example: + +.. code:: python + + import logging + from tqdm import trange + from tqdm.contrib.logging import logging_redirect_tqdm + + LOG = logging.getLogger(__name__) + + if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + with logging_redirect_tqdm(): + for i in trange(9): + if i == 4: + LOG.info("console logging redirected to `tqdm.write()`") + # logging restored + +Monitoring thread, intervals and miniters +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``tqdm`` implements a few tricks to increase efficiency and reduce overhead. + +- Avoid unnecessary frequent bar refreshing: ``mininterval`` defines how long + to wait between each refresh. ``tqdm`` always gets updated in the background, + but it will display only every ``mininterval``. +- Reduce number of calls to check system clock/time. +- ``mininterval`` is more intuitive to configure than ``miniters``. + A clever adjustment system ``dynamic_miniters`` will automatically adjust + ``miniters`` to the amount of iterations that fit into time ``mininterval``. + Essentially, ``tqdm`` will check if it's time to print without actually + checking time. This behaviour can be still be bypassed by manually setting + ``miniters``. + +However, consider a case with a combination of fast and slow iterations. +After a few fast iterations, ``dynamic_miniters`` will set ``miniters`` to a +large number. When iteration rate subsequently slows, ``miniters`` will +remain large and thus reduce display update frequency. To address this: + +- ``maxinterval`` defines the maximum time between display refreshes. + A concurrent monitoring thread checks for overdue updates and forces one + where necessary. + +The monitoring thread should not have a noticeable overhead, and guarantees +updates at least every 10 seconds by default. +This value can be directly changed by setting the ``monitor_interval`` of +any ``tqdm`` instance (i.e. ``t = tqdm.tqdm(...); t.monitor_interval = 2``). +The monitor thread may be disabled application-wide by setting +``tqdm.tqdm.monitor_interval = 0`` before instantiation of any ``tqdm`` bar. + + +Merch +----- + +You can buy `tqdm branded merch `__ now! + +Contributions +------------- + +|GitHub-Commits| |GitHub-Issues| |GitHub-PRs| |OpenHub-Status| |GitHub-Contributions| |CII Best Practices| + +All source code is hosted on `GitHub `__. +Contributions are welcome. + +See the +`CONTRIBUTING `__ +file for more information. + +Developers who have made significant contributions, ranked by *SLoC* +(surviving lines of code, +`git fame `__ ``-wMC --excl '\.(png|gif|jpg)$'``), +are: + +==================== ======================================================== ==== ================================ +Name ID SLoC Notes +==================== ======================================================== ==== ================================ +Casper da Costa-Luis `casperdcl `__ ~80% primary maintainer |Gift-Casper| +Stephen Larroque `lrq3000 `__ ~9% team member +Martin Zugnoni `martinzugnoni `__ ~3% +Daniel Ecer `de-code `__ ~2% +Richard Sheridan `richardsheridan `__ ~1% +Guangshuo Chen `chengs `__ ~1% +Helio Machado `0x2b3bfa0 `__ ~1% +Kyle Altendorf `altendky `__ <1% +Noam Yorav-Raphael `noamraph `__ <1% original author +Matthew Stevens `mjstevens777 `__ <1% +Hadrien Mary `hadim `__ <1% team member +Mikhail Korobov `kmike `__ <1% team member +==================== ======================================================== ==== ================================ + +Ports to Other Languages +~~~~~~~~~~~~~~~~~~~~~~~~ + +A list is available on +`this wiki page `__. + + +LICENCE +------- + +Open Source (OSI approved): |LICENCE| + +Citation information: |DOI| + +|README-Hits| (Since 19 May 2016) + +.. |Logo| image:: https://tqdm.github.io/img/logo.gif +.. |Screenshot| image:: https://tqdm.github.io/img/tqdm.gif +.. |Video| image:: https://tqdm.github.io/img/video.jpg + :target: https://tqdm.github.io/video +.. |Slides| image:: https://tqdm.github.io/img/slides.jpg + :target: https://tqdm.github.io/PyData2019/slides.html +.. |Merch| image:: https://tqdm.github.io/img/merch.jpg + :target: https://tqdm.github.io/merch +.. |Build-Status| image:: https://img.shields.io/github/actions/workflow/status/tqdm/tqdm/test.yml?branch=master&label=tqdm&logo=GitHub + :target: https://github.com/tqdm/tqdm/actions/workflows/test.yml +.. |Coverage-Status| image:: https://img.shields.io/coveralls/github/tqdm/tqdm/master?logo=coveralls + :target: https://coveralls.io/github/tqdm/tqdm +.. |Branch-Coverage-Status| image:: https://codecov.io/gh/tqdm/tqdm/branch/master/graph/badge.svg + :target: https://codecov.io/gh/tqdm/tqdm +.. |Codacy-Grade| image:: https://app.codacy.com/project/badge/Grade/3f965571598f44549c7818f29cdcf177 + :target: https://www.codacy.com/gh/tqdm/tqdm/dashboard +.. |CII Best Practices| image:: https://bestpractices.coreinfrastructure.org/projects/3264/badge + :target: https://bestpractices.coreinfrastructure.org/projects/3264 +.. |GitHub-Status| image:: https://img.shields.io/github/tag/tqdm/tqdm.svg?maxAge=86400&logo=github&logoColor=white + :target: https://github.com/tqdm/tqdm/releases +.. |GitHub-Forks| image:: https://img.shields.io/github/forks/tqdm/tqdm.svg?logo=github&logoColor=white + :target: https://github.com/tqdm/tqdm/network +.. |GitHub-Stars| image:: https://img.shields.io/github/stars/tqdm/tqdm.svg?logo=github&logoColor=white + :target: https://github.com/tqdm/tqdm/stargazers +.. |GitHub-Commits| image:: https://img.shields.io/github/commit-activity/y/tqdm/tqdm.svg?logo=git&logoColor=white + :target: https://github.com/tqdm/tqdm/graphs/commit-activity +.. |GitHub-Issues| image:: https://img.shields.io/github/issues-closed/tqdm/tqdm.svg?logo=github&logoColor=white + :target: https://github.com/tqdm/tqdm/issues?q= +.. |GitHub-PRs| image:: https://img.shields.io/github/issues-pr-closed/tqdm/tqdm.svg?logo=github&logoColor=white + :target: https://github.com/tqdm/tqdm/pulls +.. |GitHub-Contributions| image:: https://img.shields.io/github/contributors/tqdm/tqdm.svg?logo=github&logoColor=white + :target: https://github.com/tqdm/tqdm/graphs/contributors +.. |GitHub-Updated| image:: https://img.shields.io/github/last-commit/tqdm/tqdm/master.svg?logo=github&logoColor=white&label=pushed + :target: https://github.com/tqdm/tqdm/pulse +.. |Gift-Casper| image:: https://img.shields.io/badge/dynamic/json.svg?color=ff69b4&label=gifts%20received&prefix=%C2%A3&query=%24..sum&url=https%3A%2F%2Fcaspersci.uk.to%2Fgifts.json + :target: https://cdcl.ml/sponsor +.. |Versions| image:: https://img.shields.io/pypi/v/tqdm.svg + :target: https://tqdm.github.io/releases +.. |PyPI-Downloads| image:: https://img.shields.io/pypi/dm/tqdm.svg?label=pypi%20downloads&logo=PyPI&logoColor=white + :target: https://pepy.tech/project/tqdm +.. |Py-Versions| image:: https://img.shields.io/pypi/pyversions/tqdm.svg?logo=python&logoColor=white + :target: https://pypi.org/project/tqdm +.. |Conda-Forge-Status| image:: https://img.shields.io/conda/v/conda-forge/tqdm.svg?label=conda-forge&logo=conda-forge + :target: https://anaconda.org/conda-forge/tqdm +.. |Snapcraft| image:: https://img.shields.io/badge/snap-install-82BEA0.svg?logo=snapcraft + :target: https://snapcraft.io/tqdm +.. |Docker| image:: https://img.shields.io/badge/docker-pull-blue.svg?logo=docker&logoColor=white + :target: https://hub.docker.com/r/tqdm/tqdm +.. |Libraries-Rank| image:: https://img.shields.io/librariesio/sourcerank/pypi/tqdm.svg?logo=koding&logoColor=white + :target: https://libraries.io/pypi/tqdm +.. |Libraries-Dependents| image:: https://img.shields.io/librariesio/dependent-repos/pypi/tqdm.svg?logo=koding&logoColor=white + :target: https://github.com/tqdm/tqdm/network/dependents +.. |OpenHub-Status| image:: https://www.openhub.net/p/tqdm/widgets/project_thin_badge?format=gif + :target: https://www.openhub.net/p/tqdm?ref=Thin+badge +.. |awesome-python| image:: https://awesome.re/mentioned-badge.svg + :target: https://github.com/vinta/awesome-python +.. |LICENCE| image:: https://img.shields.io/pypi/l/tqdm.svg + :target: https://raw.githubusercontent.com/tqdm/tqdm/master/LICENCE +.. |DOI| image:: https://img.shields.io/badge/DOI-10.5281/zenodo.595120-blue.svg + :target: https://doi.org/10.5281/zenodo.595120 +.. |binder-demo| image:: https://mybinder.org/badge_logo.svg + :target: https://mybinder.org/v2/gh/tqdm/tqdm/master?filepath=DEMO.ipynb +.. |Screenshot-Jupyter1| image:: https://tqdm.github.io/img/jupyter-1.gif +.. |Screenshot-Jupyter2| image:: https://tqdm.github.io/img/jupyter-2.gif +.. |Screenshot-Jupyter3| image:: https://tqdm.github.io/img/jupyter-3.gif +.. |README-Hits| image:: https://cgi.cdcl.ml/hits?q=tqdm&style=social&r=https://github.com/tqdm/tqdm&l=https://tqdm.github.io/img/favicon.png&f=https://tqdm.github.io/img/logo.gif + :target: https://cgi.cdcl.ml/hits?q=tqdm&a=plot&r=https://github.com/tqdm/tqdm&l=https://tqdm.github.io/img/favicon.png&f=https://tqdm.github.io/img/logo.gif&style=social diff --git a/venv/lib/python3.10/site-packages/tqdm-4.67.1.dist-info/RECORD b/venv/lib/python3.10/site-packages/tqdm-4.67.1.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..a39d762100497eefea0146621ae15458d4eccee0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/tqdm-4.67.1.dist-info/RECORD @@ -0,0 +1,74 @@ +../../../bin/tqdm,sha256=Vdk-2Pm7pGXY1X18CLssE4LP2VAiN9Bay_0IH9Syl-M,277 +tqdm-4.67.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +tqdm-4.67.1.dist-info/LICENCE,sha256=3DMlLoKQFeOxUAhvubOkD2rW-zLC9GEM6BL6Z301mGo,1985 +tqdm-4.67.1.dist-info/METADATA,sha256=aIoWMt9SWhmP7FLc_vsSRtMerO6cA1qsrC1-r42P9mk,57675 +tqdm-4.67.1.dist-info/RECORD,, +tqdm-4.67.1.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91 +tqdm-4.67.1.dist-info/entry_points.txt,sha256=ReJCH7Ui3Zyh6M16E4OhsZ1oU7WtMXCfbtoyBhGO29Y,39 +tqdm-4.67.1.dist-info/top_level.txt,sha256=NLiUJNfmc9At15s7JURiwvqMEjUi9G5PMGRrmMYzNSM,5 +tqdm/__init__.py,sha256=9mQNYSSqP99JasubEC1POJLMmhkkBH6cJZxPIR5G2pQ,1572 +tqdm/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30 +tqdm/__pycache__/__init__.cpython-310.pyc,, +tqdm/__pycache__/__main__.cpython-310.pyc,, +tqdm/__pycache__/_dist_ver.cpython-310.pyc,, +tqdm/__pycache__/_main.cpython-310.pyc,, +tqdm/__pycache__/_monitor.cpython-310.pyc,, +tqdm/__pycache__/_tqdm.cpython-310.pyc,, +tqdm/__pycache__/_tqdm_gui.cpython-310.pyc,, +tqdm/__pycache__/_tqdm_notebook.cpython-310.pyc,, +tqdm/__pycache__/_tqdm_pandas.cpython-310.pyc,, +tqdm/__pycache__/_utils.cpython-310.pyc,, +tqdm/__pycache__/asyncio.cpython-310.pyc,, +tqdm/__pycache__/auto.cpython-310.pyc,, +tqdm/__pycache__/autonotebook.cpython-310.pyc,, +tqdm/__pycache__/cli.cpython-310.pyc,, +tqdm/__pycache__/dask.cpython-310.pyc,, +tqdm/__pycache__/gui.cpython-310.pyc,, +tqdm/__pycache__/keras.cpython-310.pyc,, +tqdm/__pycache__/notebook.cpython-310.pyc,, +tqdm/__pycache__/rich.cpython-310.pyc,, +tqdm/__pycache__/std.cpython-310.pyc,, +tqdm/__pycache__/tk.cpython-310.pyc,, +tqdm/__pycache__/utils.cpython-310.pyc,, +tqdm/__pycache__/version.cpython-310.pyc,, +tqdm/_dist_ver.py,sha256=m5AdYI-jB-v6P0VJ_70isH_p24EzSOGSwVvuAZmkmKY,23 +tqdm/_main.py,sha256=9ySvgmi_2Sw4CAo5UDW0Q2dxfTryboEWGHohfCJz0sA,283 +tqdm/_monitor.py,sha256=Uku-DPWgzJ7dO5CK08xKJK-E_F6qQ-JB3ksuXczSYR0,3699 +tqdm/_tqdm.py,sha256=LfLCuJ6bpsVo9xilmtBXyEm1vGnUCFrliW85j3J-nD4,283 +tqdm/_tqdm_gui.py,sha256=03Hc8KayxJveieI5-0-2NGiDpLvw9jZekofJUV7CCwk,287 +tqdm/_tqdm_notebook.py,sha256=BuHiLuxu6uEfZFaPJW3RPpPaxaVctEQA3kdSJSDL1hw,307 +tqdm/_tqdm_pandas.py,sha256=c9jptUgigN6axRDhRd4Rif98Tmxeopc1nFNFhIpbFUE,888 +tqdm/_utils.py,sha256=_4E73bfDj4f1s3sM42NLHNrZDOkijZoWq-n6xWLkdZ8,553 +tqdm/asyncio.py,sha256=Kp2rSkNRf9KRqa3d9YpgeZQ7L7EZf2Ki4bSc7UPIyoo,2757 +tqdm/auto.py,sha256=nDZflj6p2zKkjBCNBourrhS81zYfZy1_dQvbckrdW8o,871 +tqdm/autonotebook.py,sha256=Yb9F5uaiBPhfbDDFpbtoG8I2YUw3uQJ89rUDLbfR6ws,956 +tqdm/cli.py,sha256=SbKlN8QyZ2ogenqt-wT_p6_sx2OOdCjCyhoZBFnlmyI,11010 +tqdm/completion.sh,sha256=j79KbSmpIj_E11jfTfBXrGnUTzKXVpQ1vGVQvsyDRl4,946 +tqdm/contrib/__init__.py,sha256=OgSwVXm-vlDJ-2imtoQ9z8qdom4snMSRztH72KMA82A,2494 +tqdm/contrib/__pycache__/__init__.cpython-310.pyc,, +tqdm/contrib/__pycache__/bells.cpython-310.pyc,, +tqdm/contrib/__pycache__/concurrent.cpython-310.pyc,, +tqdm/contrib/__pycache__/discord.cpython-310.pyc,, +tqdm/contrib/__pycache__/itertools.cpython-310.pyc,, +tqdm/contrib/__pycache__/logging.cpython-310.pyc,, +tqdm/contrib/__pycache__/slack.cpython-310.pyc,, +tqdm/contrib/__pycache__/telegram.cpython-310.pyc,, +tqdm/contrib/__pycache__/utils_worker.cpython-310.pyc,, +tqdm/contrib/bells.py,sha256=Yx1HqGCmHrESCAO700j5wE__JCleNODJxedh1ijPLD0,837 +tqdm/contrib/concurrent.py,sha256=K1yjloKS5WRNFyjLRth0DmU5PAnDbF0A-GD27N-J4a8,3986 +tqdm/contrib/discord.py,sha256=MtVIL1s_dxH21G4sL8FBgQ4Wei23ho9Ek5T-AommvNc,5243 +tqdm/contrib/itertools.py,sha256=WdKKQU5eSzsqHu29SN_oH12huYZo0Jihqoi9-nVhwz4,774 +tqdm/contrib/logging.py,sha256=NsYtnKttj2mMrGm58mEdo5a9DP_2vv8pZyrimSuWulA,3760 +tqdm/contrib/slack.py,sha256=eP_Mr5sQonYniHxxQNGue3jk2JkIPmPWFZqIYxnOui0,4007 +tqdm/contrib/telegram.py,sha256=vn_9SATMbbwn2PAbzSDyOX6av3eBB01QBug11P4H-Og,5008 +tqdm/contrib/utils_worker.py,sha256=HJP5Mz1S1xyzEke2JaqJ2sYLHXADYoo2epT5AzQ38eA,1207 +tqdm/dask.py,sha256=9Ei58eVqTossRLhAfWyUFCduXYKjmLmwkaXIy-CHYfs,1319 +tqdm/gui.py,sha256=STIB3K8iDzDgkNUqWIpvcI_u0OGtbGNy5NwpALXhfWs,5479 +tqdm/keras.py,sha256=op9sBkb6q6c6dw2wJ0SD2ZwpPK7yM1Vbg4l1Qiy3MIo,4373 +tqdm/notebook.py,sha256=GtZ3IapLL1v8WNDaTSvPw0bJGTyfp71Vfz5HDnAzx1M,10895 +tqdm/rich.py,sha256=YyMPkEHVyYUVUR3adJKbVX26iTmNKpNMf3DEqmm-m60,5021 +tqdm/std.py,sha256=tWjz6-QCa92aqYjz7PIdkLUCAfiy-lJZheBtZyIIyO0,57461 +tqdm/tk.py,sha256=Gu0uwXwLCGPRGHORdi3WvBLGiseUp_xxX_h_gp9VpK0,6701 +tqdm/tqdm.1,sha256=aILyUPk2S4OPe_uWy2P4AMjUf0oQ6PUW0nLYXB-BWwI,7889 +tqdm/utils.py,sha256=6E0BQw3Sg7uGWKBM_cDn3P42tXswRhzkggbhBgLDjl8,11821 +tqdm/version.py,sha256=-1yWjfu3P0eghVsysHH07fbzdiADNRdzRtYPqOaqR2A,333 diff --git a/venv/lib/python3.10/site-packages/tqdm-4.67.1.dist-info/WHEEL b/venv/lib/python3.10/site-packages/tqdm-4.67.1.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..ae527e7d64811439e61b93aa375defb30e06edfe --- /dev/null +++ b/venv/lib/python3.10/site-packages/tqdm-4.67.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (75.6.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/venv/lib/python3.10/site-packages/tqdm-4.67.1.dist-info/entry_points.txt b/venv/lib/python3.10/site-packages/tqdm-4.67.1.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..540e60f4e073bc53a5f0a521a3639e0d80780af4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/tqdm-4.67.1.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +tqdm = tqdm.cli:main diff --git a/venv/lib/python3.10/site-packages/tqdm-4.67.1.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/tqdm-4.67.1.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..78620c472c9d799a14ccb02a0233f4669b3bcdcb --- /dev/null +++ b/venv/lib/python3.10/site-packages/tqdm-4.67.1.dist-info/top_level.txt @@ -0,0 +1 @@ +tqdm diff --git a/venv/lib/python3.10/site-packages/yarl-1.20.1.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/yarl-1.20.1.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/yarl-1.20.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/yarl-1.20.1.dist-info/METADATA b/venv/lib/python3.10/site-packages/yarl-1.20.1.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..1e411d8f16ed2993132b521ce537997b204a6c6b --- /dev/null +++ b/venv/lib/python3.10/site-packages/yarl-1.20.1.dist-info/METADATA @@ -0,0 +1,2427 @@ +Metadata-Version: 2.4 +Name: yarl +Version: 1.20.1 +Summary: Yet another URL library +Home-page: https://github.com/aio-libs/yarl +Author: Andrew Svetlov +Author-email: andrew.svetlov@gmail.com +Maintainer: aiohttp team +Maintainer-email: team@aiohttp.org +License: Apache-2.0 +Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org +Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org +Project-URL: CI: GitHub Workflows, https://github.com/aio-libs/yarl/actions?query=branch:master +Project-URL: Code of Conduct, https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md +Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/yarl +Project-URL: Docs: Changelog, https://yarl.aio-libs.org/en/latest/changes/ +Project-URL: Docs: RTD, https://yarl.aio-libs.org +Project-URL: GitHub: issues, https://github.com/aio-libs/yarl/issues +Project-URL: GitHub: repo, https://github.com/aio-libs/yarl +Keywords: cython,cext,yarl +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Cython +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.9 +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: NOTICE +Requires-Dist: idna>=2.0 +Requires-Dist: multidict>=4.0 +Requires-Dist: propcache>=0.2.1 +Dynamic: license-file + +yarl +==== + +The module provides handy URL class for URL parsing and changing. + +.. image:: https://github.com/aio-libs/yarl/workflows/CI/badge.svg + :target: https://github.com/aio-libs/yarl/actions?query=workflow%3ACI + :align: right + +.. image:: https://codecov.io/gh/aio-libs/yarl/graph/badge.svg?flag=pytest + :target: https://app.codecov.io/gh/aio-libs/yarl?flags[]=pytest + :alt: Codecov coverage for the pytest-driven measurements + +.. image:: https://img.shields.io/endpoint?url=https://codspeed.io/badge.json + :target: https://codspeed.io/aio-libs/yarl + +.. image:: https://badge.fury.io/py/yarl.svg + :target: https://badge.fury.io/py/yarl + +.. image:: https://readthedocs.org/projects/yarl/badge/?version=latest + :target: https://yarl.aio-libs.org + +.. image:: https://img.shields.io/pypi/pyversions/yarl.svg + :target: https://pypi.python.org/pypi/yarl + +.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat + :target: https://matrix.to/#/%23aio-libs:matrix.org + :alt: Matrix Room — #aio-libs:matrix.org + +.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat + :target: https://matrix.to/#/%23aio-libs-space:matrix.org + :alt: Matrix Space — #aio-libs-space:matrix.org + + +Introduction +------------ + +Url is constructed from ``str``: + +.. code-block:: pycon + + >>> from yarl import URL + >>> url = URL('https://www.python.org/~guido?arg=1#frag') + >>> url + URL('https://www.python.org/~guido?arg=1#frag') + +All url parts: *scheme*, *user*, *password*, *host*, *port*, *path*, +*query* and *fragment* are accessible by properties: + +.. code-block:: pycon + + >>> url.scheme + 'https' + >>> url.host + 'www.python.org' + >>> url.path + '/~guido' + >>> url.query_string + 'arg=1' + >>> url.query + + >>> url.fragment + 'frag' + +All url manipulations produce a new url object: + +.. code-block:: pycon + + >>> url = URL('https://www.python.org') + >>> url / 'foo' / 'bar' + URL('https://www.python.org/foo/bar') + >>> url / 'foo' % {'bar': 'baz'} + URL('https://www.python.org/foo?bar=baz') + +Strings passed to constructor and modification methods are +automatically encoded giving canonical representation as result: + +.. code-block:: pycon + + >>> url = URL('https://www.python.org/шлях') + >>> url + URL('https://www.python.org/%D1%88%D0%BB%D1%8F%D1%85') + +Regular properties are *percent-decoded*, use ``raw_`` versions for +getting *encoded* strings: + +.. code-block:: pycon + + >>> url.path + '/шлях' + + >>> url.raw_path + '/%D1%88%D0%BB%D1%8F%D1%85' + +Human readable representation of URL is available as ``.human_repr()``: + +.. code-block:: pycon + + >>> url.human_repr() + 'https://www.python.org/шлях' + +For full documentation please read https://yarl.aio-libs.org. + + +Installation +------------ + +:: + + $ pip install yarl + +The library is Python 3 only! + +PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install +``yarl`` on another operating system where wheels are not provided, +the tarball will be used to compile the library from +the source code. It requires a C compiler and and Python headers installed. + +To skip the compilation you must explicitly opt-in by using a PEP 517 +configuration setting ``pure-python``, or setting the ``YARL_NO_EXTENSIONS`` +environment variable to a non-empty value, e.g.: + +.. code-block:: console + + $ pip install yarl --config-settings=pure-python=false + +Please note that the pure-Python (uncompiled) version is much slower. However, +PyPy always uses a pure-Python implementation, and, as such, it is unaffected +by this variable. + +Dependencies +------------ + +YARL requires multidict_ and propcache_ libraries. + + +API documentation +------------------ + +The documentation is located at https://yarl.aio-libs.org. + + +Why isn't boolean supported by the URL query API? +------------------------------------------------- + +There is no standard for boolean representation of boolean values. + +Some systems prefer ``true``/``false``, others like ``yes``/``no``, ``on``/``off``, +``Y``/``N``, ``1``/``0``, etc. + +``yarl`` cannot make an unambiguous decision on how to serialize ``bool`` values because +it is specific to how the end-user's application is built and would be different for +different apps. The library doesn't accept booleans in the API; a user should convert +bools into strings using own preferred translation protocol. + + +Comparison with other URL libraries +------------------------------------ + +* furl (https://pypi.python.org/pypi/furl) + + The library has rich functionality but the ``furl`` object is mutable. + + I'm afraid to pass this object into foreign code: who knows if the + code will modify my url in a terrible way while I just want to send URL + with handy helpers for accessing URL properties. + + ``furl`` has other non-obvious tricky things but the main objection + is mutability. + +* URLObject (https://pypi.python.org/pypi/URLObject) + + URLObject is immutable, that's pretty good. + + Every URL change generates a new URL object. + + But the library doesn't do any decode/encode transformations leaving the + end user to cope with these gory details. + + +Source code +----------- + +The project is hosted on GitHub_ + +Please file an issue on the `bug tracker +`_ if you have found a bug +or have some suggestion in order to improve the library. + +Discussion list +--------------- + +*aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs + +Feel free to post your questions and ideas here. + + +Authors and License +------------------- + +The ``yarl`` package is written by Andrew Svetlov. + +It's *Apache 2* licensed and freely available. + + +.. _GitHub: https://github.com/aio-libs/yarl + +.. _multidict: https://github.com/aio-libs/multidict + +.. _propcache: https://github.com/aio-libs/propcache + +========= +Changelog +========= + +.. + You should *NOT* be adding new change log entries to this file, this + file is managed by towncrier. You *may* edit previous change logs to + fix problems like typo corrections or such. + To add a new change log entry, please see + https://pip.pypa.io/en/latest/development/#adding-a-news-entry + we named the news folder "changes". + + WARNING: Don't drop the next directive! + +.. towncrier release notes start + +1.20.1 +====== + +*(2025-06-09)* + + +Bug fixes +--------- + +- Started raising a ``ValueError`` exception raised for corrupted + IPv6 URL values. + + These fixes the issue where exception ``IndexError`` was + leaking from the internal code because of not being handled and + transformed into a user-facing error. The problem was happening + under the following conditions: empty IPv6 URL, brackets in + reverse order. + + -- by `@MaelPic `__. + + *Related issues and pull requests on GitHub:* + `#1512 `__. + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Updated to use Cython 3.1 universally across the build path -- by `@lysnikolaou `__. + + *Related issues and pull requests on GitHub:* + `#1514 `__. + +- Made Cython line tracing opt-in via the ``with-cython-tracing`` build config setting -- by `@bdraco `__. + + Previously, line tracing was enabled by default in ``pyproject.toml``, which caused build issues for some users and made wheels nearly twice as slow. + Now line tracing is only enabled when explicitly requested via ``pip install . --config-setting=with-cython-tracing=true`` or by setting the ``YARL_CYTHON_TRACING`` environment variable. + + *Related issues and pull requests on GitHub:* + `#1521 `__. + + +---- + + +1.20.0 +====== + +*(2025-04-16)* + + +Features +-------- + +- Implemented support for the free-threaded build of CPython 3.13 -- by `@lysnikolaou `__. + + *Related issues and pull requests on GitHub:* + `#1456 `__. + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Started building wheels for the free-threaded build of CPython 3.13 -- by `@lysnikolaou `__. + + *Related issues and pull requests on GitHub:* + `#1456 `__. + + +---- + + +1.19.0 +====== + +*(2025-04-05)* + + +Bug fixes +--------- + +- Fixed entire name being re-encoded when using ``yarl.URL.with_suffix()`` -- by `@NTFSvolume `__. + + *Related issues and pull requests on GitHub:* + `#1468 `__. + + +Features +-------- + +- Started building armv7l wheels for manylinux -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1495 `__. + + +Contributor-facing changes +-------------------------- + +- GitHub Actions CI/CD is now configured to manage caching pip-ecosystem + dependencies using `re-actors/cache-python-deps`_ -- an action by + `@webknjaz `__ that takes into account ABI stability and the exact + version of Python runtime. + + .. _`re-actors/cache-python-deps`: + https://github.com/marketplace/actions/cache-python-deps + + *Related issues and pull requests on GitHub:* + `#1471 `__. + +- Increased minimum `propcache`_ version to 0.2.1 to fix failing tests -- by `@bdraco `__. + + .. _`propcache`: + https://github.com/aio-libs/propcache + + *Related issues and pull requests on GitHub:* + `#1479 `__. + +- Added all hidden folders to pytest's ``norecursedirs`` to prevent it + from trying to collect tests there -- by `@lysnikolaou `__. + + *Related issues and pull requests on GitHub:* + `#1480 `__. + + +Miscellaneous internal changes +------------------------------ + +- Improved accuracy of type annotations -- by `@Dreamsorcerer `__. + + *Related issues and pull requests on GitHub:* + `#1484 `__. + +- Improved performance of parsing query strings -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1493 `__, `#1497 `__. + +- Improved performance of the C unquoter -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1496 `__, `#1498 `__. + + +---- + + +1.18.3 +====== + +*(2024-12-01)* + + +Bug fixes +--------- + +- Fixed uppercase ASCII hosts being rejected by ``URL.build()()`` and ``yarl.URL.with_host()`` -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#954 `__, `#1442 `__. + + +Miscellaneous internal changes +------------------------------ + +- Improved performances of multiple path properties on cache miss -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1443 `__. + + +---- + + +1.18.2 +====== + +*(2024-11-29)* + + +No significant changes. + + +---- + + +1.18.1 +====== + +*(2024-11-29)* + + +Miscellaneous internal changes +------------------------------ + +- Improved cache performance when ``~yarl.URL`` objects are constructed from ``yarl.URL.build()`` with ``encoded=True`` -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1432 `__. + +- Improved cache performance for operations that produce a new ``~yarl.URL`` object -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1434 `__, `#1436 `__. + + +---- + + +1.18.0 +====== + +*(2024-11-21)* + + +Features +-------- + +- Added ``keep_query`` and ``keep_fragment`` flags in the ``yarl.URL.with_path()``, ``yarl.URL.with_name()`` and ``yarl.URL.with_suffix()`` methods, allowing users to optionally retain the query string and fragment in the resulting URL when replacing the path -- by `@paul-nameless `__. + + *Related issues and pull requests on GitHub:* + `#111 `__, `#1421 `__. + + +Contributor-facing changes +-------------------------- + +- Started running downstream ``aiohttp`` tests in CI -- by `@Cycloctane `__. + + *Related issues and pull requests on GitHub:* + `#1415 `__. + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of converting ``~yarl.URL`` to a string -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1422 `__. + + +---- + + +1.17.2 +====== + +*(2024-11-17)* + + +Bug fixes +--------- + +- Stopped implicitly allowing the use of Cython pre-release versions when + building the distribution package -- by `@ajsanchezsanz `__ and + `@markgreene74 `__. + + *Related issues and pull requests on GitHub:* + `#1411 `__, `#1412 `__. + +- Fixed a bug causing ``~yarl.URL.port`` to return the default port when the given port was zero + -- by `@gmacon `__. + + *Related issues and pull requests on GitHub:* + `#1413 `__. + + +Features +-------- + +- Make error messages include details of incorrect type when ``port`` is not int in ``yarl.URL.build()``. + -- by `@Cycloctane `__. + + *Related issues and pull requests on GitHub:* + `#1414 `__. + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Stopped implicitly allowing the use of Cython pre-release versions when + building the distribution package -- by `@ajsanchezsanz `__ and + `@markgreene74 `__. + + *Related issues and pull requests on GitHub:* + `#1411 `__, `#1412 `__. + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of the ``yarl.URL.joinpath()`` method -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1418 `__. + + +---- + + +1.17.1 +====== + +*(2024-10-30)* + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of many ``~yarl.URL`` methods -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1396 `__, `#1397 `__, `#1398 `__. + +- Improved performance of passing a `dict` or `str` to ``yarl.URL.extend_query()`` -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1401 `__. + + +---- + + +1.17.0 +====== + +*(2024-10-28)* + + +Features +-------- + +- Added ``~yarl.URL.host_port_subcomponent`` which returns the ``3986#section-3.2.2`` host and ``3986#section-3.2.3`` port subcomponent -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1375 `__. + + +---- + + +1.16.0 +====== + +*(2024-10-21)* + + +Bug fixes +--------- + +- Fixed blocking I/O to load Python code when creating a new ``~yarl.URL`` with non-ascii characters in the network location part -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1342 `__. + + +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- Migrated to using a single cache for encoding hosts -- by `@bdraco `__. + + Passing ``ip_address_size`` and ``host_validate_size`` to ``yarl.cache_configure()`` is deprecated in favor of the new ``encode_host_size`` parameter and will be removed in a future release. For backwards compatibility, the old parameters affect the ``encode_host`` cache size. + + *Related issues and pull requests on GitHub:* + `#1348 `__, `#1357 `__, `#1363 `__. + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of constructing ``~yarl.URL`` -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1336 `__. + +- Improved performance of calling ``yarl.URL.build()`` and constructing unencoded ``~yarl.URL`` -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1345 `__. + +- Reworked the internal encoding cache to improve performance on cache hit -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1369 `__. + + +---- + + +1.15.5 +====== + +*(2024-10-18)* + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of the ``yarl.URL.joinpath()`` method -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1304 `__. + +- Improved performance of the ``yarl.URL.extend_query()`` method -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1305 `__. + +- Improved performance of the ``yarl.URL.origin()`` method -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1306 `__. + +- Improved performance of the ``yarl.URL.with_path()`` method -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1307 `__. + +- Improved performance of the ``yarl.URL.with_query()`` method -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1308 `__, `#1328 `__. + +- Improved performance of the ``yarl.URL.update_query()`` method -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1309 `__, `#1327 `__. + +- Improved performance of the ``yarl.URL.join()`` method -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1313 `__. + +- Improved performance of ``~yarl.URL`` equality checks -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1315 `__. + +- Improved performance of ``~yarl.URL`` methods that modify the network location -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1316 `__. + +- Improved performance of the ``yarl.URL.with_fragment()`` method -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1317 `__. + +- Improved performance of calculating the hash of ``~yarl.URL`` objects -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1318 `__. + +- Improved performance of the ``yarl.URL.relative()`` method -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1319 `__. + +- Improved performance of the ``yarl.URL.with_name()`` method -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1320 `__. + +- Improved performance of ``~yarl.URL.parent`` -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1321 `__. + +- Improved performance of the ``yarl.URL.with_scheme()`` method -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1322 `__. + + +---- + + +1.15.4 +====== + +*(2024-10-16)* + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of the quoter when all characters are safe -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1288 `__. + +- Improved performance of unquoting strings -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1292 `__, `#1293 `__. + +- Improved performance of calling ``yarl.URL.build()`` -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1297 `__. + + +---- + + +1.15.3 +====== + +*(2024-10-15)* + + +Bug fixes +--------- + +- Fixed ``yarl.URL.build()`` failing to validate paths must start with a ``/`` when passing ``authority`` -- by `@bdraco `__. + + The validation only worked correctly when passing ``host``. + + *Related issues and pull requests on GitHub:* + `#1265 `__. + + +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- Removed support for Python 3.8 as it has reached end of life -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1203 `__. + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of constructing ``~yarl.URL`` when the net location is only the host -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1271 `__. + + +---- + + +1.15.2 +====== + +*(2024-10-13)* + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of converting ``~yarl.URL`` to a string -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1234 `__. + +- Improved performance of ``yarl.URL.joinpath()`` -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1248 `__, `#1250 `__. + +- Improved performance of constructing query strings from ``~multidict.MultiDict`` -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1256 `__. + +- Improved performance of constructing query strings with ``int`` values -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1259 `__. + + +---- + + +1.15.1 +====== + +*(2024-10-12)* + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of calling ``yarl.URL.build()`` -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1222 `__. + +- Improved performance of all ``~yarl.URL`` methods that create new ``~yarl.URL`` objects -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1226 `__. + +- Improved performance of ``~yarl.URL`` methods that modify the network location -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1229 `__. + + +---- + + +1.15.0 +====== + +*(2024-10-11)* + + +Bug fixes +--------- + +- Fixed validation with ``yarl.URL.with_scheme()`` when passed scheme is not lowercase -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1189 `__. + + +Features +-------- + +- Started building ``armv7l`` wheels -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1204 `__. + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of constructing unencoded ``~yarl.URL`` objects -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1188 `__. + +- Added a cache for parsing hosts to reduce overhead of encoding ``~yarl.URL`` -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1190 `__. + +- Improved performance of constructing query strings from ``~collections.abc.Mapping`` -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1193 `__. + +- Improved performance of converting ``~yarl.URL`` objects to strings -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1198 `__. + + +---- + + +1.14.0 +====== + +*(2024-10-08)* + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Switched to using the ``propcache`` package for property caching + -- by `@bdraco `__. + + The ``propcache`` package is derived from the property caching + code in ``yarl`` and has been broken out to avoid maintaining it for multiple + projects. + + *Related issues and pull requests on GitHub:* + `#1169 `__. + + +Contributor-facing changes +-------------------------- + +- Started testing with Hypothesis -- by `@webknjaz `__ and `@bdraco `__. + + Special thanks to `@Zac-HD `__ for helping us get started with this framework. + + *Related issues and pull requests on GitHub:* + `#860 `__. + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of ``yarl.URL.is_default_port()`` when no explicit port is set -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1168 `__. + +- Improved performance of converting ``~yarl.URL`` to a string when no explicit port is set -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1170 `__. + +- Improved performance of the ``yarl.URL.origin()`` method -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1175 `__. + +- Improved performance of encoding hosts -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1176 `__. + + +---- + + +1.13.1 +====== + +*(2024-09-27)* + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of calling ``yarl.URL.build()`` with ``authority`` -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1163 `__. + + +---- + + +1.13.0 +====== + +*(2024-09-26)* + + +Bug fixes +--------- + +- Started rejecting ASCII hostnames with invalid characters. For host strings that + look like authority strings, the exception message includes advice on what to do + instead -- by `@mjpieters `__. + + *Related issues and pull requests on GitHub:* + `#880 `__, `#954 `__. + +- Fixed IPv6 addresses missing brackets when the ``~yarl.URL`` was converted to a string -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1157 `__, `#1158 `__. + + +Features +-------- + +- Added ``~yarl.URL.host_subcomponent`` which returns the ``3986#section-3.2.2`` host subcomponent -- by `@bdraco `__. + + The only current practical difference between ``~yarl.URL.raw_host`` and ``~yarl.URL.host_subcomponent`` is that IPv6 addresses are returned bracketed. + + *Related issues and pull requests on GitHub:* + `#1159 `__. + + +---- + + +1.12.1 +====== + +*(2024-09-23)* + + +No significant changes. + + +---- + + +1.12.0 +====== + +*(2024-09-23)* + + +Features +-------- + +- Added ``~yarl.URL.path_safe`` to be able to fetch the path without ``%2F`` and ``%25`` decoded -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1150 `__. + + +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- Restore decoding ``%2F`` (``/``) in ``URL.path`` -- by `@bdraco `__. + + This change restored the behavior before `#1057 `__. + + *Related issues and pull requests on GitHub:* + `#1151 `__. + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of processing paths -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1143 `__. + + +---- + + +1.11.1 +====== + +*(2024-09-09)* + + +Bug fixes +--------- + +- Allowed scheme replacement for relative URLs if the scheme does not require a host -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#280 `__, `#1138 `__. + +- Allowed empty host for URL schemes other than the special schemes listed in the WHATWG URL spec -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1136 `__. + + +Features +-------- + +- Loosened restriction on integers as query string values to allow classes that implement ``__int__`` -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1139 `__. + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of normalizing paths -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1137 `__. + + +---- + + +1.11.0 +====== + +*(2024-09-08)* + + +Features +-------- + +- Added ``URL.extend_query()()`` method, which can be used to extend parameters without replacing same named keys -- by `@bdraco `__. + + This method was primarily added to replace the inefficient hand rolled method currently used in ``aiohttp``. + + *Related issues and pull requests on GitHub:* + `#1128 `__. + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of the Cython ``cached_property`` implementation -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1122 `__. + +- Simplified computing ports by removing unnecessary code -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1123 `__. + +- Improved performance of encoding non IPv6 hosts -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1125 `__. + +- Improved performance of ``URL.build()()`` when the path, query string, or fragment is an empty string -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1126 `__. + +- Improved performance of the ``URL.update_query()()`` method -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1130 `__. + +- Improved performance of processing query string changes when arguments are ``str`` -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1131 `__. + + +---- + + +1.10.0 +====== + +*(2024-09-06)* + + +Bug fixes +--------- + +- Fixed joining a path when the existing path was empty -- by `@bdraco `__. + + A regression in ``URL.join()()`` was introduced in `#1082 `__. + + *Related issues and pull requests on GitHub:* + `#1118 `__. + + +Features +-------- + +- Added ``URL.without_query_params()()`` method, to drop some parameters from query string -- by `@hongquan `__. + + *Related issues and pull requests on GitHub:* + `#774 `__, `#898 `__, `#1010 `__. + +- The previously protected types ``_SimpleQuery``, ``_QueryVariable``, and ``_Query`` are now available for use externally as ``SimpleQuery``, ``QueryVariable``, and ``Query`` -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1050 `__, `#1113 `__. + + +Contributor-facing changes +-------------------------- + +- Replaced all ``~typing.Optional`` with ``~typing.Union`` -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1095 `__. + + +Miscellaneous internal changes +------------------------------ + +- Significantly improved performance of parsing the network location -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1112 `__. + +- Added internal types to the cache to prevent future refactoring errors -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1117 `__. + + +---- + + +1.9.11 +====== + +*(2024-09-04)* + + +Bug fixes +--------- + +- Fixed a ``TypeError`` with ``MultiDictProxy`` and Python 3.8 -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1084 `__, `#1105 `__, `#1107 `__. + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of encoding hosts -- by `@bdraco `__. + + Previously, the library would unconditionally try to parse a host as an IP Address. The library now avoids trying to parse a host as an IP Address if the string is not in one of the formats described in ``3986#section-3.2.2``. + + *Related issues and pull requests on GitHub:* + `#1104 `__. + + +---- + + +1.9.10 +====== + +*(2024-09-04)* + + +Bug fixes +--------- + +- ``URL.join()()`` has been changed to match + ``3986`` and align with + ``/ operation()`` and ``URL.joinpath()()`` + when joining URLs with empty segments. + Previously ``urllib.parse.urljoin`` was used, + which has known issues with empty segments + (`python/cpython#84774 `_). + + Due to the semantics of ``URL.join()()``, joining an + URL with scheme requires making it relative, prefixing with ``./``. + + .. code-block:: pycon + + >>> URL("https://web.archive.org/web/").join(URL("./https://github.com/aio-libs/yarl")) + URL('https://web.archive.org/web/https://github.com/aio-libs/yarl') + + + Empty segments are honored in the base as well as the joined part. + + .. code-block:: pycon + + >>> URL("https://web.archive.org/web/https://").join(URL("github.com/aio-libs/yarl")) + URL('https://web.archive.org/web/https://github.com/aio-libs/yarl') + + + + -- by `@commonism `__ + + This change initially appeared in 1.9.5 but was reverted in 1.9.6 to resolve a problem with query string handling. + + *Related issues and pull requests on GitHub:* + `#1039 `__, `#1082 `__. + + +Features +-------- + +- Added ``~yarl.URL.absolute`` which is now preferred over ``URL.is_absolute()`` -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1100 `__. + + +---- + + +1.9.9 +===== + +*(2024-09-04)* + + +Bug fixes +--------- + +- Added missing type on ``~yarl.URL.port`` -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1097 `__. + + +---- + + +1.9.8 +===== + +*(2024-09-03)* + + +Features +-------- + +- Covered the ``~yarl.URL`` object with types -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1084 `__. + +- Cache parsing of IP Addresses when encoding hosts -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1086 `__. + + +Contributor-facing changes +-------------------------- + +- Covered the ``~yarl.URL`` object with types -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1084 `__. + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of handling ports -- by `@bdraco `__. + + *Related issues and pull requests on GitHub:* + `#1081 `__. + + +---- + + +1.9.7 +===== + +*(2024-09-01)* + + +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- Removed support ``3986#section-3.2.3`` port normalization when the scheme is not one of ``http``, ``https``, ``wss``, or ``ws`` -- by `@bdraco `__. + + Support for port normalization was recently added in `#1033 `__ and contained code that would do blocking I/O if the scheme was not one of the four listed above. The code has been removed because this library is intended to be safe for usage with ``asyncio``. + + *Related issues and pull requests on GitHub:* + `#1076 `__. + + +Miscellaneous internal changes +------------------------------ + +- Improved performance of property caching -- by `@bdraco `__. + + The ``reify`` implementation from ``aiohttp`` was adapted to replace the internal ``cached_property`` implementation. + + *Related issues and pull requests on GitHub:* + `#1070 `__. + + +---- + + +1.9.6 +===== + +*(2024-08-30)* + + +Bug fixes +--------- + +- Reverted ``3986`` compatible ``URL.join()()`` honoring empty segments which was introduced in `#1039 `__. + + This change introduced a regression handling query string parameters with joined URLs. The change was reverted to maintain compatibility with the previous behavior. + + *Related issues and pull requests on GitHub:* + `#1067 `__. + + +---- + + +1.9.5 +===== + +*(2024-08-30)* + + +Bug fixes +--------- + +- Joining URLs with empty segments has been changed + to match ``3986``. + + Previously empty segments would be removed from path, + breaking use-cases such as + + .. code-block:: python + + URL("https://web.archive.org/web/") / "https://github.com/" + + Now ``/ operation()`` and ``URL.joinpath()()`` + keep empty segments, but do not introduce new empty segments. + e.g. + + .. code-block:: python + + URL("https://example.org/") / "" + + does not introduce an empty segment. + + -- by `@commonism `__ and `@youtux `__ + + *Related issues and pull requests on GitHub:* + `#1026 `__. + +- The default protocol ports of well-known URI schemes are now taken into account + during the normalization of the URL string representation in accordance with + ``3986#section-3.2.3``. + + Specified ports are removed from the ``str`` representation of a ``~yarl.URL`` + if the port matches the scheme's default port -- by `@commonism `__. + + *Related issues and pull requests on GitHub:* + `#1033 `__. + +- ``URL.join()()`` has been changed to match + ``3986`` and align with + ``/ operation()`` and ``URL.joinpath()()`` + when joining URLs with empty segments. + Previously ``urllib.parse.urljoin`` was used, + which has known issues with empty segments + (`python/cpython#84774 `_). + + Due to the semantics of ``URL.join()()``, joining an + URL with scheme requires making it relative, prefixing with ``./``. + + .. code-block:: pycon + + >>> URL("https://web.archive.org/web/").join(URL("./https://github.com/aio-libs/yarl")) + URL('https://web.archive.org/web/https://github.com/aio-libs/yarl') + + + Empty segments are honored in the base as well as the joined part. + + .. code-block:: pycon + + >>> URL("https://web.archive.org/web/https://").join(URL("github.com/aio-libs/yarl")) + URL('https://web.archive.org/web/https://github.com/aio-libs/yarl') + + + + -- by `@commonism `__ + + *Related issues and pull requests on GitHub:* + `#1039 `__. + + +Removals and backward incompatible breaking changes +--------------------------------------------------- + +- Stopped decoding ``%2F`` (``/``) in ``URL.path``, as this could lead to code incorrectly treating it as a path separator + -- by `@Dreamsorcerer `__. + + *Related issues and pull requests on GitHub:* + `#1057 `__. + +- Dropped support for Python 3.7 -- by `@Dreamsorcerer `__. + + *Related issues and pull requests on GitHub:* + `#1016 `__. + + +Improved documentation +---------------------- + +- On the ``Contributing docs`` page, + a link to the ``Towncrier philosophy`` has been fixed. + + *Related issues and pull requests on GitHub:* + `#981 `__. + +- The pre-existing ``/ magic method()`` + has been documented in the API reference -- by `@commonism `__. + + *Related issues and pull requests on GitHub:* + `#1026 `__. + + +Packaging updates and notes for downstreams +------------------------------------------- + +- A flaw in the logic for copying the project directory into a + temporary folder that led to infinite recursion when ``TMPDIR`` + was set to a project subdirectory path. This was happening in Fedora + and its downstream due to the use of `pyproject-rpm-macros + `__. It was + only reproducible with ``pip wheel`` and was not affecting the + ``pyproject-build`` users. + + -- by `@hroncok `__ and `@webknjaz `__ + + *Related issues and pull requests on GitHub:* + `#992 `__, `#1014 `__. + +- Support Python 3.13 and publish non-free-threaded wheels + + *Related issues and pull requests on GitHub:* + `#1054 `__. + + +Contributor-facing changes +-------------------------- + +- The CI/CD setup has been updated to test ``arm64`` wheels + under macOS 14, except for Python 3.7 that is unsupported + in that environment -- by `@webknjaz `__. + + *Related issues and pull requests on GitHub:* + `#1015 `__. + +- Removed unused type ignores and casts -- by `@hauntsaninja `__. + + *Related issues and pull requests on GitHub:* + `#1031 `__. + + +Miscellaneous internal changes +------------------------------ + +- ``port``, ``scheme``, and ``raw_host`` are now ``cached_property`` -- by `@bdraco `__. + + ``aiohttp`` accesses these properties quite often, which cause ``urllib`` to build the ``_hostinfo`` property every time. ``port``, ``scheme``, and ``raw_host`` are now cached properties, which will improve performance. + + *Related issues and pull requests on GitHub:* + `#1044 `__, `#1058 `__. + + +---- + + +1.9.4 (2023-12-06) +================== + +Bug fixes +--------- + +- Started raising ``TypeError`` when a string value is passed into + ``yarl.URL.build()`` as the ``port`` argument -- by `@commonism `__. + + Previously the empty string as port would create malformed URLs when rendered as string representations. (`#883 `__) + + +Packaging updates and notes for downstreams +------------------------------------------- + +- The leading ``--`` has been dropped from the `PEP 517 `__ in-tree build + backend config setting names. ``--pure-python`` is now just ``pure-python`` + -- by `@webknjaz `__. + + The usage now looks as follows: + + .. code-block:: console + + $ python -m build \ + --config-setting=pure-python=true \ + --config-setting=with-cython-tracing=true + + (`#963 `__) + + +Contributor-facing changes +-------------------------- + +- A step-by-step ``Release Guide`` guide has + been added, describing how to release *yarl* -- by `@webknjaz `__. + + This is primarily targeting maintainers. (`#960 `__) +- Coverage collection has been implemented for the Cython modules + -- by `@webknjaz `__. + + It will also be reported to Codecov from any non-release CI jobs. + + To measure coverage in a development environment, *yarl* can be + installed in editable mode: + + .. code-block:: console + + $ python -Im pip install -e . + + Editable install produces C-files required for the Cython coverage + plugin to map the measurements back to the PYX-files. + + `#961 `__ + +- It is now possible to request line tracing in Cython builds using the + ``with-cython-tracing`` `PEP 517 `__ config setting + -- `@webknjaz `__. + + This can be used in CI and development environment to measure coverage + on Cython modules, but is not normally useful to the end-users or + downstream packagers. + + Here's a usage example: + + .. code-block:: console + + $ python -Im pip install . --config-settings=with-cython-tracing=true + + For editable installs, this setting is on by default. Otherwise, it's + off unless requested explicitly. + + The following produces C-files required for the Cython coverage + plugin to map the measurements back to the PYX-files: + + .. code-block:: console + + $ python -Im pip install -e . + + Alternatively, the ``YARL_CYTHON_TRACING=1`` environment variable + can be set to do the same as the `PEP 517 `__ config setting. + + `#962 `__ + + +1.9.3 (2023-11-20) +================== + +Bug fixes +--------- + +- Stopped dropping trailing slashes in ``yarl.URL.joinpath()`` -- by `@gmacon `__. (`#862 `__, `#866 `__) +- Started accepting string subclasses in ``yarl.URL.__truediv__()`` operations (``URL / segment``) -- by `@mjpieters `__. (`#871 `__, `#884 `__) +- Fixed the human representation of URLs with square brackets in usernames and passwords -- by `@mjpieters `__. (`#876 `__, `#882 `__) +- Updated type hints to include ``URL.missing_port()``, ``URL.__bytes__()`` + and the ``encoding`` argument to ``yarl.URL.joinpath()`` + -- by `@mjpieters `__. (`#891 `__) + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Integrated Cython 3 to enable building *yarl* under Python 3.12 -- by `@mjpieters `__. (`#829 `__, `#881 `__) +- Declared modern ``setuptools.build_meta`` as the `PEP 517 `__ build + backend in ``pyproject.toml`` explicitly -- by `@webknjaz `__. (`#886 `__) +- Converted most of the packaging setup into a declarative ``setup.cfg`` + config -- by `@webknjaz `__. (`#890 `__) +- The packaging is replaced from an old-fashioned ``setup.py`` to an + in-tree `PEP 517 `__ build backend -- by `@webknjaz `__. + + Whenever the end-users or downstream packagers need to build ``yarl`` from + source (a Git checkout or an sdist), they may pass a ``config_settings`` + flag ``--pure-python``. If this flag is not set, a C-extension will be built + and included into the distribution. + + Here is how this can be done with ``pip``: + + .. code-block:: console + + $ python -m pip install . --config-settings=--pure-python=false + + This will also work with ``-e | --editable``. + + The same can be achieved via ``pypa/build``: + + .. code-block:: console + + $ python -m build --config-setting=--pure-python=false + + Adding ``-w | --wheel`` can force ``pypa/build`` produce a wheel from source + directly, as opposed to building an ``sdist`` and then building from it. (`#893 `__) + + .. attention:: + + v1.9.3 was the only version using the ``--pure-python`` setting name. + Later versions dropped the ``--`` prefix, making it just ``pure-python``. + +- Declared Python 3.12 supported officially in the distribution package metadata + -- by `@edgarrmondragon `__. (`#942 `__) + + +Contributor-facing changes +-------------------------- + +- A regression test for no-host URLs was added per `#821 `__ + and ``3986`` -- by `@kenballus `__. (`#821 `__, `#822 `__) +- Started testing *yarl* against Python 3.12 in CI -- by `@mjpieters `__. (`#881 `__) +- All Python 3.12 jobs are now marked as required to pass in CI + -- by `@edgarrmondragon `__. (`#942 `__) +- MyST is now integrated in Sphinx -- by `@webknjaz `__. + + This allows the contributors to author new documents in Markdown + when they have difficulties with going straight RST. (`#953 `__) + + +1.9.2 (2023-04-25) +================== + +Bugfixes +-------- + +- Fix regression with ``yarl.URL.__truediv__()`` and absolute URLs with empty paths causing the raw path to lack the leading ``/``. + (`#854 `_) + + +1.9.1 (2023-04-21) +================== + +Bugfixes +-------- + +- Marked tests that fail on older Python patch releases (< 3.7.10, < 3.8.8 and < 3.9.2) as expected to fail due to missing a security fix for CVE-2021-23336. (`#850 `_) + + +1.9.0 (2023-04-19) +================== + +This release was never published to PyPI, due to issues with the build process. + +Features +-------- + +- Added ``URL.joinpath(*elements)``, to create a new URL appending multiple path elements. (`#704 `_) +- Made ``URL.__truediv__()()`` return ``NotImplemented`` if called with an + unsupported type — by `@michaeljpeters `__. + (`#832 `_) + + +Bugfixes +-------- + +- Path normalization for absolute URLs no longer raises a ValueError exception + when ``..`` segments would otherwise go beyond the URL path root. + (`#536 `_) +- Fixed an issue with update_query() not getting rid of the query when argument is None. (`#792 `_) +- Added some input restrictions on with_port() function to prevent invalid boolean inputs or out of valid port inputs; handled incorrect 0 port representation. (`#793 `_) +- Made ``yarl.URL.build()`` raise a ``TypeError`` if the ``host`` argument is ``None`` — by `@paulpapacz `__. (`#808 `_) +- Fixed an issue with ``update_query()`` getting rid of the query when the argument + is empty but not ``None``. (`#845 `_) + + +Misc +---- + +- `#220 `_ + + +1.8.2 (2022-12-03) +================== + +This is the first release that started shipping wheels for Python 3.11. + + +1.8.1 (2022-08-01) +================== + +Misc +---- + +- `#694 `_, `#699 `_, `#700 `_, `#701 `_, `#702 `_, `#703 `_, `#739 `_ + + +1.8.0 (2022-08-01) +================== + +Features +-------- + +- Added ``URL.raw_suffix``, ``URL.suffix``, ``URL.raw_suffixes``, ``URL.suffixes``, ``URL.with_suffix``. (`#613 `_) + + +Improved Documentation +---------------------- + +- Fixed broken internal references to ``yarl.URL.human_repr()``. + (`#665 `_) +- Fixed broken external references to ``multidict:index`` docs. (`#665 `_) + + +Deprecations and Removals +------------------------- + +- Dropped Python 3.6 support. (`#672 `_) + + +Misc +---- + +- `#646 `_, `#699 `_, `#701 `_ + + +1.7.2 (2021-11-01) +================== + +Bugfixes +-------- + +- Changed call in ``with_port()`` to stop reencoding parts of the URL that were already encoded. (`#623 `_) + + +1.7.1 (2021-10-07) +================== + +Bugfixes +-------- + +- Fix 1.7.0 build error + +1.7.0 (2021-10-06) +================== + +Features +-------- + +- Add ``__bytes__()`` magic method so that ``bytes(url)`` will work and use optimal ASCII encoding. + (`#582 `_) +- Started shipping platform-specific arm64 wheels for Apple Silicon. (`#622 `_) +- Started shipping platform-specific wheels with the ``musl`` tag targeting typical Alpine Linux runtimes. (`#622 `_) +- Added support for Python 3.10. (`#622 `_) + + +1.6.3 (2020-11-14) +================== + +Bugfixes +-------- + +- No longer loose characters when decoding incorrect percent-sequences (like ``%e2%82%f8``). All non-decodable percent-sequences are now preserved. + `#517 `_ +- Provide x86 Windows wheels. + `#535 `_ + + +---- + + +1.6.2 (2020-10-12) +================== + + +Bugfixes +-------- + +- Provide generated ``.c`` files in TarBall distribution. + `#530 `_ + +1.6.1 (2020-10-12) +================== + +Features +-------- + +- Provide wheels for ``aarch64``, ``i686``, ``ppc64le``, ``s390x`` architectures on + Linux as well as ``x86_64``. + `#507 `_ +- Provide wheels for Python 3.9. + `#526 `_ + +Bugfixes +-------- + +- ``human_repr()`` now always produces valid representation equivalent to the original URL (if the original URL is valid). + `#511 `_ +- Fixed requoting a single percent followed by a percent-encoded character in the Cython implementation. + `#514 `_ +- Fix ValueError when decoding ``%`` which is not followed by two hexadecimal digits. + `#516 `_ +- Fix decoding ``%`` followed by a space and hexadecimal digit. + `#520 `_ +- Fix annotation of ``with_query()``/``update_query()`` methods for ``key=[val1, val2]`` case. + `#528 `_ + +Removal +------- + +- Drop Python 3.5 support; Python 3.6 is the minimal supported Python version. + + +---- + + +1.6.0 (2020-09-23) +================== + +Features +-------- + +- Allow for int and float subclasses in query, while still denying bool. + `#492 `_ + + +Bugfixes +-------- + +- Do not requote arguments in ``URL.build()``, ``with_xxx()`` and in ``/`` operator. + `#502 `_ +- Keep IPv6 brackets in ``origin()``. + `#504 `_ + + +---- + + +1.5.1 (2020-08-01) +================== + +Bugfixes +-------- + +- Fix including relocated internal ``yarl._quoting_c`` C-extension into published PyPI dists. + `#485 `_ + + +Misc +---- + +- `#484 `_ + + +---- + + +1.5.0 (2020-07-26) +================== + +Features +-------- + +- Convert host to lowercase on URL building. + `#386 `_ +- Allow using ``mod`` operator (``%``) for updating query string (an alias for ``update_query()`` method). + `#435 `_ +- Allow use of sequences such as ``list`` and ``tuple`` in the values + of a mapping such as ``dict`` to represent that a key has many values:: + + url = URL("http://example.com") + assert url.with_query({"a": [1, 2]}) == URL("http://example.com/?a=1&a=2") + + `#443 `_ +- Support ``URL.build()`` with scheme and path (creates a relative URL). + `#464 `_ +- Cache slow IDNA encode/decode calls. + `#476 `_ +- Add ``@final`` / ``Final`` type hints + `#477 `_ +- Support URL authority/raw_authority properties and authority argument of ``URL.build()`` method. + `#478 `_ +- Hide the library implementation details, make the exposed public list very clean. + `#483 `_ + + +Bugfixes +-------- + +- Fix tests with newer Python (3.7.6, 3.8.1 and 3.9.0+). + `#409 `_ +- Fix a bug where query component, passed in a form of mapping or sequence, is unquoted in unexpected way. + `#426 `_ +- Hide ``Query`` and ``QueryVariable`` type aliases in ``__init__.pyi``, now they are prefixed with underscore. + `#431 `_ +- Keep IPv6 brackets after updating port/user/password. + `#451 `_ + + +---- + + +1.4.2 (2019-12-05) +================== + +Features +-------- + +- Workaround for missing ``str.isascii()`` in Python 3.6 + `#389 `_ + + +---- + + +1.4.1 (2019-11-29) +================== + +* Fix regression, make the library work on Python 3.5 and 3.6 again. + +1.4.0 (2019-11-29) +================== + +* Distinguish an empty password in URL from a password not provided at all (#262) + +* Fixed annotations for optional parameters of ``URL.build`` (#309) + +* Use None as default value of ``user`` parameter of ``URL.build`` (#309) + +* Enforce building C Accelerated modules when installing from source tarball, use + ``YARL_NO_EXTENSIONS`` environment variable for falling back to (slower) Pure Python + implementation (#329) + +* Drop Python 3.5 support + +* Fix quoting of plus in path by pure python version (#339) + +* Don't create a new URL if fragment is unchanged (#292) + +* Included in error message the path that produces starting slash forbidden error (#376) + +* Skip slow IDNA encoding for ASCII-only strings (#387) + + +1.3.0 (2018-12-11) +================== + +* Fix annotations for ``query`` parameter (#207) + +* An incoming query sequence can have int variables (the same as for + Mapping type) (#208) + +* Add ``URL.explicit_port`` property (#218) + +* Give a friendlier error when port can't be converted to int (#168) + +* ``bool(URL())`` now returns ``False`` (#272) + +1.2.6 (2018-06-14) +================== + +* Drop Python 3.4 trove classifier (#205) + +1.2.5 (2018-05-23) +================== + +* Fix annotations for ``build`` (#199) + +1.2.4 (2018-05-08) +================== + +* Fix annotations for ``cached_property`` (#195) + +1.2.3 (2018-05-03) +================== + +* Accept ``str`` subclasses in ``URL`` constructor (#190) + +1.2.2 (2018-05-01) +================== + +* Fix build + +1.2.1 (2018-04-30) +================== + +* Pin minimal required Python to 3.5.3 (#189) + +1.2.0 (2018-04-30) +================== + +* Forbid inheritance, replace ``__init__`` with ``__new__`` (#171) + +* Support PEP-561 (provide type hinting marker) (#182) + +1.1.1 (2018-02-17) +================== + +* Fix performance regression: don't encode empty ``netloc`` (#170) + +1.1.0 (2018-01-21) +================== + +* Make pure Python quoter consistent with Cython version (#162) + +1.0.0 (2018-01-15) +================== + +* Use fast path if quoted string does not need requoting (#154) + +* Speed up quoting/unquoting by ``_Quoter`` and ``_Unquoter`` classes (#155) + +* Drop ``yarl.quote`` and ``yarl.unquote`` public functions (#155) + +* Add custom string writer, reuse static buffer if available (#157) + Code is 50-80 times faster than Pure Python version (was 4-5 times faster) + +* Don't recode IP zone (#144) + +* Support ``encoded=True`` in ``yarl.URL.build()`` (#158) + +* Fix updating query with multiple keys (#160) + +0.18.0 (2018-01-10) +=================== + +* Fallback to IDNA 2003 if domain name is not IDNA 2008 compatible (#152) + +0.17.0 (2017-12-30) +=================== + +* Use IDNA 2008 for domain name processing (#149) + +0.16.0 (2017-12-07) +=================== + +* Fix raising ``TypeError`` by ``url.query_string()`` after + ``url.with_query({})`` (empty mapping) (#141) + +0.15.0 (2017-11-23) +=================== + +* Add ``raw_path_qs`` attribute (#137) + +0.14.2 (2017-11-14) +=================== + +* Restore ``strict`` parameter as no-op in ``quote`` / ``unquote`` + +0.14.1 (2017-11-13) +=================== + +* Restore ``strict`` parameter as no-op for sake of compatibility with + aiohttp 2.2 + +0.14.0 (2017-11-11) +=================== + +* Drop strict mode (#123) + +* Fix ``"ValueError: Unallowed PCT %"`` when there's a ``"%"`` in the URL (#124) + +0.13.0 (2017-10-01) +=================== + +* Document ``encoded`` parameter (#102) + +* Support relative URLs like ``'?key=value'`` (#100) + +* Unsafe encoding for QS fixed. Encode ``;`` character in value parameter (#104) + +* Process passwords without user names (#95) + +0.12.0 (2017-06-26) +=================== + +* Properly support paths without leading slash in ``URL.with_path()`` (#90) + +* Enable type annotation checks + +0.11.0 (2017-06-26) +=================== + +* Normalize path (#86) + +* Clear query and fragment parts in ``.with_path()`` (#85) + +0.10.3 (2017-06-13) +=================== + +* Prevent double URL arguments unquoting (#83) + +0.10.2 (2017-05-05) +=================== + +* Unexpected hash behavior (#75) + + +0.10.1 (2017-05-03) +=================== + +* Unexpected compare behavior (#73) + +* Do not quote or unquote + if not a query string. (#74) + + +0.10.0 (2017-03-14) +=================== + +* Added ``URL.build`` class method (#58) + +* Added ``path_qs`` attribute (#42) + + +0.9.8 (2017-02-16) +================== + +* Do not quote ``:`` in path + + +0.9.7 (2017-02-16) +================== + +* Load from pickle without _cache (#56) + +* Percent-encoded pluses in path variables become spaces (#59) + + +0.9.6 (2017-02-15) +================== + +* Revert backward incompatible change (BaseURL) + + +0.9.5 (2017-02-14) +================== + +* Fix BaseURL rich comparison support + + +0.9.4 (2017-02-14) +================== + +* Use BaseURL + + +0.9.3 (2017-02-14) +================== + +* Added BaseURL + + +0.9.2 (2017-02-08) +================== + +* Remove debug print + + +0.9.1 (2017-02-07) +================== + +* Do not lose tail chars (#45) + + +0.9.0 (2017-02-07) +================== + +* Allow to quote ``%`` in non strict mode (#21) + +* Incorrect parsing of query parameters with %3B (;) inside (#34) + +* Fix core dumps (#41) + +* ``tmpbuf`` - compiling error (#43) + +* Added ``URL.update_path()`` method + +* Added ``URL.update_query()`` method (#47) + + +0.8.1 (2016-12-03) +================== + +* Fix broken aiohttp: revert back ``quote`` / ``unquote``. + + +0.8.0 (2016-12-03) +================== + +* Support more verbose error messages in ``.with_query()`` (#24) + +* Don't percent-encode ``@`` and ``:`` in path (#32) + +* Don't expose ``yarl.quote`` and ``yarl.unquote``, these functions are + part of private API + +0.7.1 (2016-11-18) +================== + +* Accept not only ``str`` but all classes inherited from ``str`` also (#25) + +0.7.0 (2016-11-07) +================== + +* Accept ``int`` as value for ``.with_query()`` + +0.6.0 (2016-11-07) +================== + +* Explicitly use UTF8 encoding in ``setup.py`` (#20) +* Properly unquote non-UTF8 strings (#19) + +0.5.3 (2016-11-02) +================== + +* Don't use ``typing.NamedTuple`` fields but indexes on URL construction + +0.5.2 (2016-11-02) +================== + +* Inline ``_encode`` class method + +0.5.1 (2016-11-02) +================== + +* Make URL construction faster by removing extra classmethod calls + +0.5.0 (2016-11-02) +================== + +* Add Cython optimization for quoting/unquoting +* Provide binary wheels + +0.4.3 (2016-09-29) +================== + +* Fix typing stubs + +0.4.2 (2016-09-29) +================== + +* Expose ``quote()`` and ``unquote()`` as public API + +0.4.1 (2016-09-28) +================== + +* Support empty values in query (``'/path?arg'``) + +0.4.0 (2016-09-27) +================== + +* Introduce ``relative()`` (#16) + +0.3.2 (2016-09-27) +================== + +* Typo fixes #15 + +0.3.1 (2016-09-26) +================== + +* Support sequence of pairs as ``with_query()`` parameter + +0.3.0 (2016-09-26) +================== + +* Introduce ``is_default_port()`` + +0.2.1 (2016-09-26) +================== + +* Raise ValueError for URLs like 'http://:8080/' + +0.2.0 (2016-09-18) +================== + +* Avoid doubling slashes when joining paths (#13) + +* Appending path starting from slash is forbidden (#12) + +0.1.4 (2016-09-09) +================== + +* Add ``kwargs`` support for ``with_query()`` (#10) + +0.1.3 (2016-09-07) +================== + +* Document ``with_query()``, ``with_fragment()`` and ``origin()`` + +* Allow ``None`` for ``with_query()`` and ``with_fragment()`` + +0.1.2 (2016-09-07) +================== + +* Fix links, tune docs theme. + +0.1.1 (2016-09-06) +================== + +* Update README, old version used obsolete API + +0.1.0 (2016-09-06) +================== + +* The library was deeply refactored, bytes are gone away but all + accepted strings are encoded if needed. + +0.0.1 (2016-08-30) +================== + +* The first release. diff --git a/venv/lib/python3.10/site-packages/yarl-1.20.1.dist-info/RECORD b/venv/lib/python3.10/site-packages/yarl-1.20.1.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..4b30d91974a6ca2bada5f0ddf33759d00ec62088 --- /dev/null +++ b/venv/lib/python3.10/site-packages/yarl-1.20.1.dist-info/RECORD @@ -0,0 +1,26 @@ +yarl-1.20.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +yarl-1.20.1.dist-info/METADATA,sha256=K0Ly6Viwwz6MHtOCAIKJqkwQuDjjt9IydAitWzRQYvU,73903 +yarl-1.20.1.dist-info/RECORD,, +yarl-1.20.1.dist-info/WHEEL,sha256=DTnKjM5OInJxWADod3iQyWxWcdG-eRwxzGww236swpY,151 +yarl-1.20.1.dist-info/licenses/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358 +yarl-1.20.1.dist-info/licenses/NOTICE,sha256=VtasbIEFwKUTBMIdsGDjYa-ajqCvmnXCOcKLXRNpODg,609 +yarl-1.20.1.dist-info/top_level.txt,sha256=vf3SJuQh-k7YtvsUrV_OPOrT9Kqn0COlk7IPYyhtGkQ,5 +yarl/__init__.py,sha256=FmDW8W3VgBfoaLs4K0k3YLdvtu6eTRG39PjdZ20COf0,281 +yarl/__pycache__/__init__.cpython-310.pyc,, +yarl/__pycache__/_parse.cpython-310.pyc,, +yarl/__pycache__/_path.cpython-310.pyc,, +yarl/__pycache__/_query.cpython-310.pyc,, +yarl/__pycache__/_quoters.cpython-310.pyc,, +yarl/__pycache__/_quoting.cpython-310.pyc,, +yarl/__pycache__/_quoting_py.cpython-310.pyc,, +yarl/__pycache__/_url.cpython-310.pyc,, +yarl/_parse.py,sha256=gNt8zxVFGr95ufUQpSMiiZ9vDrvg4zq6MEtT3f6_8J0,7185 +yarl/_path.py,sha256=A0FJUylZyzmlT0a3UDOBbK-EzZXCAYuQQBvG9eAC9hs,1291 +yarl/_query.py,sha256=2l76j4_2qQ6vnwKRyGwhI5AXUpdlKGmmC4yp3ZjjevI,3883 +yarl/_quoters.py,sha256=z-BzsXfLnJK-bd-HrGaoKGri9L3GpDv6vxFEtmu-uCM,1154 +yarl/_quoting.py,sha256=yKIqFTzFzWLVb08xy1DSxKNjFwo4f-oLlzxTuKwC57M,506 +yarl/_quoting_c.cpython-310-x86_64-linux-gnu.so,sha256=P7kZ-h4CzHtlZ_FHU3u9PS09JQbBfYE8lleR9WBYW48,973272 +yarl/_quoting_c.pyx,sha256=Rk-98-kf1OwXTeU50UV8QjYks0wAQHpyPZk6McruIqk,14356 +yarl/_quoting_py.py,sha256=oVxVuDWMCjuvTViBiDzhYBFMI-YfDCNGGUbfnQpkOgQ,6830 +yarl/_url.py,sha256=7_9EhA9LbXjmK3zsAS4-WuMZgle7RovVK1pQGYVCL8k,55323 +yarl/py.typed,sha256=ay5OMO475PlcZ_Fbun9maHW7Y6MBTk0UXL4ztHx3Iug,14 diff --git a/venv/lib/python3.10/site-packages/yarl-1.20.1.dist-info/WHEEL b/venv/lib/python3.10/site-packages/yarl-1.20.1.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..d170d6d9582d145f12244c4135d9446d597e1029 --- /dev/null +++ b/venv/lib/python3.10/site-packages/yarl-1.20.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.9.0) +Root-Is-Purelib: false +Tag: cp310-cp310-manylinux_2_17_x86_64 +Tag: cp310-cp310-manylinux2014_x86_64 + diff --git a/venv/lib/python3.10/site-packages/yarl-1.20.1.dist-info/licenses/LICENSE b/venv/lib/python3.10/site-packages/yarl-1.20.1.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..d645695673349e3947e8e5ae42332d0ac3164cd7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/yarl-1.20.1.dist-info/licenses/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.10/site-packages/yarl-1.20.1.dist-info/licenses/NOTICE b/venv/lib/python3.10/site-packages/yarl-1.20.1.dist-info/licenses/NOTICE new file mode 100644 index 0000000000000000000000000000000000000000..fa53b2b138df881c4c95239d0e4bede831b36ab5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/yarl-1.20.1.dist-info/licenses/NOTICE @@ -0,0 +1,13 @@ + Copyright 2016-2021, Andrew Svetlov and aio-libs team + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/venv/lib/python3.10/site-packages/yarl-1.20.1.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/yarl-1.20.1.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..e93e8bddefb14a8a753f7ecab6b934fd899cd9e5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/yarl-1.20.1.dist-info/top_level.txt @@ -0,0 +1 @@ +yarl diff --git a/venv/lib/python3.10/site-packages/zstandard-0.23.0.dist-info/INSTALLER b/venv/lib/python3.10/site-packages/zstandard-0.23.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/venv/lib/python3.10/site-packages/zstandard-0.23.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.10/site-packages/zstandard-0.23.0.dist-info/LICENSE b/venv/lib/python3.10/site-packages/zstandard-0.23.0.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..dcec4760996be2143d28a12718adc8f9e050bf29 --- /dev/null +++ b/venv/lib/python3.10/site-packages/zstandard-0.23.0.dist-info/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2016, Gregory Szorc +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation +and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors +may be used to endorse or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.10/site-packages/zstandard-0.23.0.dist-info/METADATA b/venv/lib/python3.10/site-packages/zstandard-0.23.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..e7304ab714948bf5812b88fd32a00d2046ac761e --- /dev/null +++ b/venv/lib/python3.10/site-packages/zstandard-0.23.0.dist-info/METADATA @@ -0,0 +1,62 @@ +Metadata-Version: 2.1 +Name: zstandard +Version: 0.23.0 +Summary: Zstandard bindings for Python +Home-page: https://github.com/indygreg/python-zstandard +Author: Gregory Szorc +Author-email: gregory.szorc@gmail.com +License: BSD +Keywords: zstandard,zstd,compression +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: C +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Requires-Python: >=3.8 +License-File: LICENSE +Requires-Dist: cffi >=1.11 ; platform_python_implementation == "PyPy" +Provides-Extra: cffi +Requires-Dist: cffi >=1.11 ; extra == 'cffi' + +================ +python-zstandard +================ + +| |ci-test| |ci-wheel| |ci-typing| |ci-sdist| |ci-anaconda| |ci-sphinx| + +This project provides Python bindings for interfacing with the +`Zstandard `_ compression library. A C extension +and CFFI interface are provided. + +The primary goal of the project is to provide a rich interface to the +underlying C API through a Pythonic interface while not sacrificing +performance. This means exposing most of the features and flexibility +of the C API while not sacrificing usability or safety that Python provides. + +The canonical home for this project is +https://github.com/indygreg/python-zstandard. + +For usage documentation, see https://python-zstandard.readthedocs.org/. + +.. |ci-test| image:: https://github.com/indygreg/python-zstandard/workflows/.github/workflows/test.yml/badge.svg + :target: https://github.com/indygreg/python-zstandard/blob/main/.github/workflows/test.yml + +.. |ci-wheel| image:: https://github.com/indygreg/python-zstandard/workflows/.github/workflows/wheel.yml/badge.svg + :target: https://github.com/indygreg/python-zstandard/blob/main/.github/workflows/wheel.yml + +.. |ci-typing| image:: https://github.com/indygreg/python-zstandard/workflows/.github/workflows/typing.yml/badge.svg + :target: https://github.com/indygreg/python-zstandard/blob/main/.github/workflows/typing.yml + +.. |ci-sdist| image:: https://github.com/indygreg/python-zstandard/workflows/.github/workflows/sdist.yml/badge.svg + :target: https://github.com/indygreg/python-zstandard/blob/main/.github/workflows/sdist.yml + +.. |ci-anaconda| image:: https://github.com/indygreg/python-zstandard/workflows/.github/workflows/anaconda.yml/badge.svg + :target: https://github.com/indygreg/python-zstandard/blob/main/.github/workflows/anaconda.yml + +.. |ci-sphinx| image:: https://github.com/indygreg/python-zstandard/workflows/.github/workflows/sphinx.yml/badge.svg + :target: https://github.com/indygreg/python-zstandard/blob/main/.github/workflows/sphinx.yml diff --git a/venv/lib/python3.10/site-packages/zstandard-0.23.0.dist-info/RECORD b/venv/lib/python3.10/site-packages/zstandard-0.23.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..8ec09caf435d86a0d1786e406671bb7f20344ce2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/zstandard-0.23.0.dist-info/RECORD @@ -0,0 +1,14 @@ +zstandard-0.23.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +zstandard-0.23.0.dist-info/LICENSE,sha256=eI_oXlNySkCymQUcSryc901Csn55aDJbgIkcJfea20s,1484 +zstandard-0.23.0.dist-info/METADATA,sha256=xzBbxliabSZGQNE1tdyXBCR8-AY6QdZT81KFtWhCjgE,2960 +zstandard-0.23.0.dist-info/RECORD,, +zstandard-0.23.0.dist-info/WHEEL,sha256=CzQQWV-lNyM92gr3iaBk8dvO35YDHRxgzkZ-dxumUIM,152 +zstandard-0.23.0.dist-info/top_level.txt,sha256=J-wj94pPadY4ipFaanrYBlrMblOSegEYS8o_LdogrpU,10 +zstandard/__init__.py,sha256=x96865ybxv72Je4ZWFAehBzkbbXcaWhYsq63ZANrgWQ,7102 +zstandard/__init__.pyi,sha256=kvP6Us9IPX4Ssfg5BwaE3_E7uuE1YzVUo9hTzYUcNHA,13938 +zstandard/__pycache__/__init__.cpython-310.pyc,, +zstandard/__pycache__/backend_cffi.cpython-310.pyc,, +zstandard/_cffi.cpython-310-x86_64-linux-gnu.so,sha256=oc4kAXM4zSeQHEo3F05q_IzuynQ45cMYyFUcdt7WAj0,11895240 +zstandard/backend_c.cpython-310-x86_64-linux-gnu.so,sha256=LsPgN7ijSemkcB44EbiWrOSGlSQtu7UYBLn4uYCa2Sg,11325432 +zstandard/backend_cffi.py,sha256=22dR_NT8xb0oOiuU5BS7YlDBEmlmnUfEK5Rxl0TNTik,152504 +zstandard/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/venv/lib/python3.10/site-packages/zstandard-0.23.0.dist-info/WHEEL b/venv/lib/python3.10/site-packages/zstandard-0.23.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..9bb86cf30c63df9170e9af3dd246ce6f41270402 --- /dev/null +++ b/venv/lib/python3.10/site-packages/zstandard-0.23.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.43.0) +Root-Is-Purelib: false +Tag: cp310-cp310-manylinux_2_17_x86_64 +Tag: cp310-cp310-manylinux2014_x86_64 + diff --git a/venv/lib/python3.10/site-packages/zstandard-0.23.0.dist-info/top_level.txt b/venv/lib/python3.10/site-packages/zstandard-0.23.0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..864700d2b3e63509d1e25eff308c0a99386bb4ac --- /dev/null +++ b/venv/lib/python3.10/site-packages/zstandard-0.23.0.dist-info/top_level.txt @@ -0,0 +1 @@ +zstandard