+
+[Gitter](https://gitter.im/matplotlib/matplotlib) is for coordinating
+development and asking questions directly related to contributing to
+matplotlib.
+
+## Citing Matplotlib
+
+If Matplotlib contributes to a project that leads to publication, please
+acknowledge this by citing Matplotlib.
+
+[A ready-made citation
+entry](https://matplotlib.org/stable/users/project/citing.html) is
+available.
diff --git a/.cache/pip/http-v2/f/9/5/d/2/f95d294c2281f9705ab34177ee4732e524880c56ef894e620cf4cb95 b/.cache/pip/http-v2/f/9/5/d/2/f95d294c2281f9705ab34177ee4732e524880c56ef894e620cf4cb95
new file mode 100644
index 0000000000000000000000000000000000000000..32209ccc02e4b5566b7cede8f9ad60a4d94aaf6c
Binary files /dev/null and b/.cache/pip/http-v2/f/9/5/d/2/f95d294c2281f9705ab34177ee4732e524880c56ef894e620cf4cb95 differ
diff --git a/.cache/pip/http-v2/f/9/5/d/2/f95d294c2281f9705ab34177ee4732e524880c56ef894e620cf4cb95.body b/.cache/pip/http-v2/f/9/5/d/2/f95d294c2281f9705ab34177ee4732e524880c56ef894e620cf4cb95.body
new file mode 100644
index 0000000000000000000000000000000000000000..3a23e1f569284a59cfd86527da892497a666437b
Binary files /dev/null and b/.cache/pip/http-v2/f/9/5/d/2/f95d294c2281f9705ab34177ee4732e524880c56ef894e620cf4cb95.body differ
diff --git a/.cache/pip/http-v2/f/c/6/8/7/fc687abc6120c7bc7cfd867fe3788604e9f8e9dcf17e51396d803e25 b/.cache/pip/http-v2/f/c/6/8/7/fc687abc6120c7bc7cfd867fe3788604e9f8e9dcf17e51396d803e25
new file mode 100644
index 0000000000000000000000000000000000000000..694c56984ae963ecc1ad6515294ea2d9f8cded44
Binary files /dev/null and b/.cache/pip/http-v2/f/c/6/8/7/fc687abc6120c7bc7cfd867fe3788604e9f8e9dcf17e51396d803e25 differ
diff --git a/.cache/pip/http-v2/f/d/4/0/6/fd406c904ba0d9ecbc45615ea0129f7f3a26756195fc93c0f69dce75 b/.cache/pip/http-v2/f/d/4/0/6/fd406c904ba0d9ecbc45615ea0129f7f3a26756195fc93c0f69dce75
new file mode 100644
index 0000000000000000000000000000000000000000..d3fe4cf49d6400c053b203185eefced1bc32dbe5
Binary files /dev/null and b/.cache/pip/http-v2/f/d/4/0/6/fd406c904ba0d9ecbc45615ea0129f7f3a26756195fc93c0f69dce75 differ
diff --git a/.cache/pip/http-v2/f/d/4/0/6/fd406c904ba0d9ecbc45615ea0129f7f3a26756195fc93c0f69dce75.body b/.cache/pip/http-v2/f/d/4/0/6/fd406c904ba0d9ecbc45615ea0129f7f3a26756195fc93c0f69dce75.body
new file mode 100644
index 0000000000000000000000000000000000000000..834b0ff2b68818889ac9d085eccfcc61a7da2eca
--- /dev/null
+++ b/.cache/pip/http-v2/f/d/4/0/6/fd406c904ba0d9ecbc45615ea0129f7f3a26756195fc93c0f69dce75.body
@@ -0,0 +1,322 @@
+Metadata-Version: 2.4
+Name: huggingface_hub
+Version: 1.7.2
+Summary: Client library to download and publish models, datasets and other repos on the huggingface.co hub
+Home-page: https://github.com/huggingface/huggingface_hub
+Author: Hugging Face, Inc.
+Author-email: julien@huggingface.co
+License: Apache-2.0
+Keywords: model-hub machine-learning models natural-language-processing deep-learning pytorch pretrained-models
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Education
+Classifier: Intended Audience :: Science/Research
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
+Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
+Requires-Python: >=3.9.0
+Description-Content-Type: text/markdown
+License-File: LICENSE
+Requires-Dist: filelock>=3.10.0
+Requires-Dist: fsspec>=2023.5.0
+Requires-Dist: hf-xet<2.0.0,>=1.4.2; platform_machine == "x86_64" or platform_machine == "amd64" or platform_machine == "AMD64" or platform_machine == "arm64" or platform_machine == "aarch64"
+Requires-Dist: httpx<1,>=0.23.0
+Requires-Dist: packaging>=20.9
+Requires-Dist: pyyaml>=5.1
+Requires-Dist: tqdm>=4.42.1
+Requires-Dist: typer
+Requires-Dist: typing-extensions>=4.1.0
+Provides-Extra: oauth
+Requires-Dist: authlib>=1.3.2; extra == "oauth"
+Requires-Dist: fastapi; extra == "oauth"
+Requires-Dist: httpx; extra == "oauth"
+Requires-Dist: itsdangerous; extra == "oauth"
+Provides-Extra: torch
+Requires-Dist: torch; extra == "torch"
+Requires-Dist: safetensors[torch]; extra == "torch"
+Provides-Extra: fastai
+Requires-Dist: toml; extra == "fastai"
+Requires-Dist: fastai>=2.4; extra == "fastai"
+Requires-Dist: fastcore>=1.3.27; extra == "fastai"
+Provides-Extra: hf-xet
+Requires-Dist: hf-xet<2.0.0,>=1.4.2; extra == "hf-xet"
+Provides-Extra: mcp
+Requires-Dist: mcp>=1.8.0; extra == "mcp"
+Provides-Extra: testing
+Requires-Dist: authlib>=1.3.2; extra == "testing"
+Requires-Dist: fastapi; extra == "testing"
+Requires-Dist: httpx; extra == "testing"
+Requires-Dist: itsdangerous; extra == "testing"
+Requires-Dist: jedi; extra == "testing"
+Requires-Dist: Jinja2; extra == "testing"
+Requires-Dist: pytest>=8.4.2; extra == "testing"
+Requires-Dist: pytest-cov; extra == "testing"
+Requires-Dist: pytest-env; extra == "testing"
+Requires-Dist: pytest-xdist; extra == "testing"
+Requires-Dist: pytest-vcr; extra == "testing"
+Requires-Dist: pytest-asyncio; extra == "testing"
+Requires-Dist: pytest-rerunfailures<16.0; extra == "testing"
+Requires-Dist: pytest-mock; extra == "testing"
+Requires-Dist: urllib3<2.0; extra == "testing"
+Requires-Dist: soundfile; extra == "testing"
+Requires-Dist: Pillow; extra == "testing"
+Requires-Dist: numpy; extra == "testing"
+Requires-Dist: duckdb; extra == "testing"
+Requires-Dist: fastapi; extra == "testing"
+Provides-Extra: typing
+Requires-Dist: typing-extensions>=4.8.0; extra == "typing"
+Requires-Dist: types-PyYAML; extra == "typing"
+Requires-Dist: types-simplejson; extra == "typing"
+Requires-Dist: types-toml; extra == "typing"
+Requires-Dist: types-tqdm; extra == "typing"
+Requires-Dist: types-urllib3; extra == "typing"
+Provides-Extra: quality
+Requires-Dist: ruff>=0.9.0; extra == "quality"
+Requires-Dist: mypy==1.15.0; extra == "quality"
+Requires-Dist: libcst>=1.4.0; extra == "quality"
+Requires-Dist: ty; extra == "quality"
+Provides-Extra: all
+Requires-Dist: authlib>=1.3.2; extra == "all"
+Requires-Dist: fastapi; extra == "all"
+Requires-Dist: httpx; extra == "all"
+Requires-Dist: itsdangerous; extra == "all"
+Requires-Dist: jedi; extra == "all"
+Requires-Dist: Jinja2; extra == "all"
+Requires-Dist: pytest>=8.4.2; extra == "all"
+Requires-Dist: pytest-cov; extra == "all"
+Requires-Dist: pytest-env; extra == "all"
+Requires-Dist: pytest-xdist; extra == "all"
+Requires-Dist: pytest-vcr; extra == "all"
+Requires-Dist: pytest-asyncio; extra == "all"
+Requires-Dist: pytest-rerunfailures<16.0; extra == "all"
+Requires-Dist: pytest-mock; extra == "all"
+Requires-Dist: urllib3<2.0; extra == "all"
+Requires-Dist: soundfile; extra == "all"
+Requires-Dist: Pillow; extra == "all"
+Requires-Dist: numpy; extra == "all"
+Requires-Dist: duckdb; extra == "all"
+Requires-Dist: fastapi; extra == "all"
+Requires-Dist: ruff>=0.9.0; extra == "all"
+Requires-Dist: mypy==1.15.0; extra == "all"
+Requires-Dist: libcst>=1.4.0; extra == "all"
+Requires-Dist: ty; extra == "all"
+Requires-Dist: typing-extensions>=4.8.0; extra == "all"
+Requires-Dist: types-PyYAML; extra == "all"
+Requires-Dist: types-simplejson; extra == "all"
+Requires-Dist: types-toml; extra == "all"
+Requires-Dist: types-tqdm; extra == "all"
+Requires-Dist: types-urllib3; extra == "all"
+Provides-Extra: dev
+Requires-Dist: authlib>=1.3.2; extra == "dev"
+Requires-Dist: fastapi; extra == "dev"
+Requires-Dist: httpx; extra == "dev"
+Requires-Dist: itsdangerous; extra == "dev"
+Requires-Dist: jedi; extra == "dev"
+Requires-Dist: Jinja2; extra == "dev"
+Requires-Dist: pytest>=8.4.2; extra == "dev"
+Requires-Dist: pytest-cov; extra == "dev"
+Requires-Dist: pytest-env; extra == "dev"
+Requires-Dist: pytest-xdist; extra == "dev"
+Requires-Dist: pytest-vcr; extra == "dev"
+Requires-Dist: pytest-asyncio; extra == "dev"
+Requires-Dist: pytest-rerunfailures<16.0; extra == "dev"
+Requires-Dist: pytest-mock; extra == "dev"
+Requires-Dist: urllib3<2.0; extra == "dev"
+Requires-Dist: soundfile; extra == "dev"
+Requires-Dist: Pillow; extra == "dev"
+Requires-Dist: numpy; extra == "dev"
+Requires-Dist: duckdb; extra == "dev"
+Requires-Dist: fastapi; extra == "dev"
+Requires-Dist: ruff>=0.9.0; extra == "dev"
+Requires-Dist: mypy==1.15.0; extra == "dev"
+Requires-Dist: libcst>=1.4.0; extra == "dev"
+Requires-Dist: ty; extra == "dev"
+Requires-Dist: typing-extensions>=4.8.0; extra == "dev"
+Requires-Dist: types-PyYAML; extra == "dev"
+Requires-Dist: types-simplejson; extra == "dev"
+Requires-Dist: types-toml; extra == "dev"
+Requires-Dist: types-tqdm; extra == "dev"
+Requires-Dist: types-urllib3; extra == "dev"
+Dynamic: author
+Dynamic: author-email
+Dynamic: classifier
+Dynamic: description
+Dynamic: description-content-type
+Dynamic: home-page
+Dynamic: keywords
+Dynamic: license
+Dynamic: license-file
+Dynamic: provides-extra
+Dynamic: requires-dist
+Dynamic: requires-python
+Dynamic: summary
+
+
+
+
+
+
+
+
+
+
+
+
+ The official Python client for the Huggingface Hub.
+
+
+
+
+
+
+
+
+
+
+
+
+ English |
+ Deutsch |
+ Français |
+ हिंदी |
+ 한국어 |
+ 中文 (简体)
+
+
+
+---
+
+**Documentation**: https://hf.co/docs/huggingface_hub
+
+**Source Code**: https://github.com/huggingface/huggingface_hub
+
+---
+
+## Welcome to the huggingface_hub library
+
+The `huggingface_hub` library allows you to interact with the [Hugging Face Hub](https://huggingface.co/), a platform democratizing open-source Machine Learning for creators and collaborators. Discover pre-trained models and datasets for your projects or play with the thousands of machine learning apps hosted on the Hub. You can also create and share your own models, datasets and demos with the community. The `huggingface_hub` library provides a simple way to do all these things with Python.
+
+## Key features
+
+- [Download files](https://huggingface.co/docs/huggingface_hub/en/guides/download) from the Hub.
+- [Upload files](https://huggingface.co/docs/huggingface_hub/en/guides/upload) to the Hub.
+- [Manage your repositories](https://huggingface.co/docs/huggingface_hub/en/guides/repository).
+- [Run Inference](https://huggingface.co/docs/huggingface_hub/en/guides/inference) on deployed models.
+- [Search](https://huggingface.co/docs/huggingface_hub/en/guides/search) for models, datasets and Spaces.
+- [Share Model Cards](https://huggingface.co/docs/huggingface_hub/en/guides/model-cards) to document your models.
+- [Engage with the community](https://huggingface.co/docs/huggingface_hub/en/guides/community) through PRs and comments.
+
+## Installation
+
+Install the `huggingface_hub` package with [pip](https://pypi.org/project/huggingface-hub/):
+
+```bash
+pip install huggingface_hub
+```
+
+If you prefer, you can also install it with [conda](https://huggingface.co/docs/huggingface_hub/en/installation#install-with-conda).
+
+In order to keep the package minimal by default, `huggingface_hub` comes with optional dependencies useful for some use cases. For example, if you want to use the MCP module, run:
+
+```bash
+pip install "huggingface_hub[mcp]"
+```
+
+To learn more installation and optional dependencies, check out the [installation guide](https://huggingface.co/docs/huggingface_hub/en/installation).
+
+## Quick start
+
+### Download files
+
+Download a single file
+
+```py
+from huggingface_hub import hf_hub_download
+
+hf_hub_download(repo_id="tiiuae/falcon-7b-instruct", filename="config.json")
+```
+
+Or an entire repository
+
+```py
+from huggingface_hub import snapshot_download
+
+snapshot_download("stabilityai/stable-diffusion-2-1")
+```
+
+Files will be downloaded in a local cache folder. More details in [this guide](https://huggingface.co/docs/huggingface_hub/en/guides/manage-cache).
+
+### Login
+
+The Hugging Face Hub uses tokens to authenticate applications (see [docs](https://huggingface.co/docs/hub/security-tokens)). To log in your machine, run the following CLI:
+
+```bash
+hf auth login
+# or using an environment variable
+hf auth login --token $HUGGINGFACE_TOKEN
+```
+
+### Create a repository
+
+```py
+from huggingface_hub import create_repo
+
+create_repo(repo_id="super-cool-model")
+```
+
+### Upload files
+
+Upload a single file
+
+```py
+from huggingface_hub import upload_file
+
+upload_file(
+ path_or_fileobj="/home/lysandre/dummy-test/README.md",
+ path_in_repo="README.md",
+ repo_id="lysandre/test-model",
+)
+```
+
+Or an entire folder
+
+```py
+from huggingface_hub import upload_folder
+
+upload_folder(
+ folder_path="/path/to/local/space",
+ repo_id="username/my-cool-space",
+ repo_type="space",
+)
+```
+
+For details in the [upload guide](https://huggingface.co/docs/huggingface_hub/en/guides/upload).
+
+## Integrating to the Hub.
+
+We're partnering with cool open source ML libraries to provide free model hosting and versioning. You can find the existing integrations [here](https://huggingface.co/docs/hub/libraries).
+
+The advantages are:
+
+- Free model or dataset hosting for libraries and their users.
+- Built-in file versioning, even with very large files, thanks to a git-based approach.
+- In-browser widgets to play with the uploaded models.
+- Anyone can upload a new model for your library, they just need to add the corresponding tag for the model to be discoverable.
+- Fast downloads! We use Cloudfront (a CDN) to geo-replicate downloads so they're blazing fast from anywhere on the globe.
+- Usage stats and more features to come.
+
+If you would like to integrate your library, feel free to open an issue to begin the discussion. We wrote a [step-by-step guide](https://huggingface.co/docs/hub/adding-a-library) with ❤️ showing how to do this integration.
+
+## Contributions (feature requests, bugs, etc.) are super welcome 💙💚💛💜🧡❤️
+
+Everyone is welcome to contribute, and we value everybody's contribution. Code is not the only way to help the community.
+Answering questions, helping others, reaching out and improving the documentations are immensely valuable to the community.
+We wrote a [contribution guide](https://github.com/huggingface/huggingface_hub/blob/main/CONTRIBUTING.md) to summarize
+how to get started to contribute to this repository.
diff --git a/.cache/pip/http-v2/f/f/6/3/f/ff63f8aee48f79bb6d2cdeff8f7863b5820a3c507a2623f007846bb0 b/.cache/pip/http-v2/f/f/6/3/f/ff63f8aee48f79bb6d2cdeff8f7863b5820a3c507a2623f007846bb0
new file mode 100644
index 0000000000000000000000000000000000000000..784ed0976f533b6cecb142363265e469228fb463
Binary files /dev/null and b/.cache/pip/http-v2/f/f/6/3/f/ff63f8aee48f79bb6d2cdeff8f7863b5820a3c507a2623f007846bb0 differ
diff --git a/.cache/pip/http-v2/f/f/6/3/f/ff63f8aee48f79bb6d2cdeff8f7863b5820a3c507a2623f007846bb0.body b/.cache/pip/http-v2/f/f/6/3/f/ff63f8aee48f79bb6d2cdeff8f7863b5820a3c507a2623f007846bb0.body
new file mode 100644
index 0000000000000000000000000000000000000000..0f2b466a638a34e304c69fb7976ab05a736d9ab8
--- /dev/null
+++ b/.cache/pip/http-v2/f/f/6/3/f/ff63f8aee48f79bb6d2cdeff8f7863b5820a3c507a2623f007846bb0.body
@@ -0,0 +1,219 @@
+Metadata-Version: 2.4
+Name: markdown-it-py
+Version: 4.0.0
+Summary: Python port of markdown-it. Markdown parsing, done right!
+Keywords: markdown,lexer,parser,commonmark,markdown-it
+Author-email: Chris Sewell
+Requires-Python: >=3.10
+Description-Content-Type: text/markdown
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Text Processing :: Markup
+License-File: LICENSE
+License-File: LICENSE.markdown-it
+Requires-Dist: mdurl~=0.1
+Requires-Dist: psutil ; extra == "benchmarking"
+Requires-Dist: pytest ; extra == "benchmarking"
+Requires-Dist: pytest-benchmark ; extra == "benchmarking"
+Requires-Dist: commonmark~=0.9 ; extra == "compare"
+Requires-Dist: markdown~=3.4 ; extra == "compare"
+Requires-Dist: mistletoe~=1.0 ; extra == "compare"
+Requires-Dist: mistune~=3.0 ; extra == "compare"
+Requires-Dist: panflute~=2.3 ; extra == "compare"
+Requires-Dist: markdown-it-pyrs ; extra == "compare"
+Requires-Dist: linkify-it-py>=1,<3 ; extra == "linkify"
+Requires-Dist: mdit-py-plugins>=0.5.0 ; extra == "plugins"
+Requires-Dist: gprof2dot ; extra == "profiling"
+Requires-Dist: mdit-py-plugins>=0.5.0 ; extra == "rtd"
+Requires-Dist: myst-parser ; extra == "rtd"
+Requires-Dist: pyyaml ; extra == "rtd"
+Requires-Dist: sphinx ; extra == "rtd"
+Requires-Dist: sphinx-copybutton ; extra == "rtd"
+Requires-Dist: sphinx-design ; extra == "rtd"
+Requires-Dist: sphinx-book-theme~=1.0 ; extra == "rtd"
+Requires-Dist: jupyter_sphinx ; extra == "rtd"
+Requires-Dist: ipykernel ; extra == "rtd"
+Requires-Dist: coverage ; extra == "testing"
+Requires-Dist: pytest ; extra == "testing"
+Requires-Dist: pytest-cov ; extra == "testing"
+Requires-Dist: pytest-regressions ; extra == "testing"
+Requires-Dist: requests ; extra == "testing"
+Project-URL: Documentation, https://markdown-it-py.readthedocs.io
+Project-URL: Homepage, https://github.com/executablebooks/markdown-it-py
+Provides-Extra: benchmarking
+Provides-Extra: compare
+Provides-Extra: linkify
+Provides-Extra: plugins
+Provides-Extra: profiling
+Provides-Extra: rtd
+Provides-Extra: testing
+
+# markdown-it-py
+
+[![Github-CI][github-ci]][github-link]
+[![Coverage Status][codecov-badge]][codecov-link]
+[![PyPI][pypi-badge]][pypi-link]
+[![Conda][conda-badge]][conda-link]
+[![PyPI - Downloads][install-badge]][install-link]
+
+
+
+
+
+> Markdown parser done right.
+
+- Follows the __[CommonMark spec](http://spec.commonmark.org/)__ for baseline parsing
+- Configurable syntax: you can add new rules and even replace existing ones.
+- Pluggable: Adds syntax extensions to extend the parser (see the [plugin list][md-plugins]).
+- High speed (see our [benchmarking tests][md-performance])
+- Easy to configure for [security][md-security]
+- Member of [Google's Assured Open Source Software](https://cloud.google.com/assured-open-source-software/docs/supported-packages)
+
+This is a Python port of [markdown-it], and some of its associated plugins.
+For more details see: .
+
+For details on [markdown-it] itself, see:
+
+- The __[Live demo](https://markdown-it.github.io)__
+- [The markdown-it README][markdown-it-readme]
+
+**See also:** [markdown-it-pyrs](https://github.com/chrisjsewell/markdown-it-pyrs) for an experimental Rust binding,
+for even more speed!
+
+## Installation
+
+### PIP
+
+```bash
+pip install markdown-it-py[plugins]
+```
+
+or with extras
+
+```bash
+pip install markdown-it-py[linkify,plugins]
+```
+
+### Conda
+
+```bash
+conda install -c conda-forge markdown-it-py
+```
+
+or with extras
+
+```bash
+conda install -c conda-forge markdown-it-py linkify-it-py mdit-py-plugins
+```
+
+## Usage
+
+### Python API Usage
+
+Render markdown to HTML with markdown-it-py and a custom configuration
+with and without plugins and features:
+
+```python
+from markdown_it import MarkdownIt
+from mdit_py_plugins.front_matter import front_matter_plugin
+from mdit_py_plugins.footnote import footnote_plugin
+
+md = (
+ MarkdownIt('commonmark', {'breaks':True,'html':True})
+ .use(front_matter_plugin)
+ .use(footnote_plugin)
+ .enable('table')
+)
+text = ("""
+---
+a: 1
+---
+
+a | b
+- | -
+1 | 2
+
+A footnote [^1]
+
+[^1]: some details
+""")
+tokens = md.parse(text)
+html_text = md.render(text)
+
+## To export the html to a file, uncomment the lines below:
+# from pathlib import Path
+# Path("output.html").write_text(html_text)
+```
+
+### Command-line Usage
+
+Render markdown to HTML with markdown-it-py from the
+command-line:
+
+```console
+usage: markdown-it [-h] [-v] [filenames [filenames ...]]
+
+Parse one or more markdown files, convert each to HTML, and print to stdout
+
+positional arguments:
+ filenames specify an optional list of files to convert
+
+optional arguments:
+ -h, --help show this help message and exit
+ -v, --version show program's version number and exit
+
+Interactive:
+
+ $ markdown-it
+ markdown-it-py [version 0.0.0] (interactive)
+ Type Ctrl-D to complete input, or Ctrl-C to exit.
+ >>> # Example
+ ... > markdown *input*
+ ...
+ Example
+
+ markdown input
+
+
+Batch:
+
+ $ markdown-it README.md README.footer.md > index.html
+
+```
+
+## References / Thanks
+
+Big thanks to the authors of [markdown-it]:
+
+- Alex Kocharin [github/rlidwka](https://github.com/rlidwka)
+- Vitaly Puzrin [github/puzrin](https://github.com/puzrin)
+
+Also [John MacFarlane](https://github.com/jgm) for his work on the CommonMark spec and reference implementations.
+
+[github-ci]: https://github.com/executablebooks/markdown-it-py/actions/workflows/tests.yml/badge.svg?branch=master
+[github-link]: https://github.com/executablebooks/markdown-it-py
+[pypi-badge]: https://img.shields.io/pypi/v/markdown-it-py.svg
+[pypi-link]: https://pypi.org/project/markdown-it-py
+[conda-badge]: https://anaconda.org/conda-forge/markdown-it-py/badges/version.svg
+[conda-link]: https://anaconda.org/conda-forge/markdown-it-py
+[codecov-badge]: https://codecov.io/gh/executablebooks/markdown-it-py/branch/master/graph/badge.svg
+[codecov-link]: https://codecov.io/gh/executablebooks/markdown-it-py
+[install-badge]: https://img.shields.io/pypi/dw/markdown-it-py?label=pypi%20installs
+[install-link]: https://pypistats.org/packages/markdown-it-py
+
+[CommonMark spec]: http://spec.commonmark.org/
+[markdown-it]: https://github.com/markdown-it/markdown-it
+[markdown-it-readme]: https://github.com/markdown-it/markdown-it/blob/master/README.md
+[md-security]: https://markdown-it-py.readthedocs.io/en/latest/security.html
+[md-performance]: https://markdown-it-py.readthedocs.io/en/latest/performance.html
+[md-plugins]: https://markdown-it-py.readthedocs.io/en/latest/plugins.html
+
diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/METADATA b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..8a2f639061cc4a203f7109d8335d28076442c61d
--- /dev/null
+++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/METADATA
@@ -0,0 +1,202 @@
+Metadata-Version: 2.4
+Name: h11
+Version: 0.16.0
+Summary: A pure-Python, bring-your-own-I/O implementation of HTTP/1.1
+Home-page: https://github.com/python-hyper/h11
+Author: Nathaniel J. Smith
+Author-email: njs@pobox.com
+License: MIT
+Classifier: Development Status :: 3 - Alpha
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Topic :: Internet :: WWW/HTTP
+Classifier: Topic :: System :: Networking
+Requires-Python: >=3.8
+License-File: LICENSE.txt
+Dynamic: author
+Dynamic: author-email
+Dynamic: classifier
+Dynamic: description
+Dynamic: home-page
+Dynamic: license
+Dynamic: license-file
+Dynamic: requires-python
+Dynamic: summary
+
+h11
+===
+
+.. image:: https://travis-ci.org/python-hyper/h11.svg?branch=master
+ :target: https://travis-ci.org/python-hyper/h11
+ :alt: Automated test status
+
+.. image:: https://codecov.io/gh/python-hyper/h11/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/python-hyper/h11
+ :alt: Test coverage
+
+.. image:: https://readthedocs.org/projects/h11/badge/?version=latest
+ :target: http://h11.readthedocs.io/en/latest/?badge=latest
+ :alt: Documentation Status
+
+This is a little HTTP/1.1 library written from scratch in Python,
+heavily inspired by `hyper-h2 `_.
+
+It's a "bring-your-own-I/O" library; h11 contains no IO code
+whatsoever. This means you can hook h11 up to your favorite network
+API, and that could be anything you want: synchronous, threaded,
+asynchronous, or your own implementation of `RFC 6214
+`_ -- h11 won't judge you.
+(Compare this to the current state of the art, where every time a `new
+network API `_ comes along then someone
+gets to start over reimplementing the entire HTTP protocol from
+scratch.) Cory Benfield made an `excellent blog post describing the
+benefits of this approach
+ `_, or if you like video
+then here's his `PyCon 2016 talk on the same theme
+`_.
+
+This also means that h11 is not immediately useful out of the box:
+it's a toolkit for building programs that speak HTTP, not something
+that could directly replace ``requests`` or ``twisted.web`` or
+whatever. But h11 makes it much easier to implement something like
+``requests`` or ``twisted.web``.
+
+At a high level, working with h11 goes like this:
+
+1) First, create an ``h11.Connection`` object to track the state of a
+ single HTTP/1.1 connection.
+
+2) When you read data off the network, pass it to
+ ``conn.receive_data(...)``; you'll get back a list of objects
+ representing high-level HTTP "events".
+
+3) When you want to send a high-level HTTP event, create the
+ corresponding "event" object and pass it to ``conn.send(...)``;
+ this will give you back some bytes that you can then push out
+ through the network.
+
+For example, a client might instantiate and then send a
+``h11.Request`` object, then zero or more ``h11.Data`` objects for the
+request body (e.g., if this is a POST), and then a
+``h11.EndOfMessage`` to indicate the end of the message. Then the
+server would then send back a ``h11.Response``, some ``h11.Data``, and
+its own ``h11.EndOfMessage``. If either side violates the protocol,
+you'll get a ``h11.ProtocolError`` exception.
+
+h11 is suitable for implementing both servers and clients, and has a
+pleasantly symmetric API: the events you send as a client are exactly
+the ones that you receive as a server and vice-versa.
+
+`Here's an example of a tiny HTTP client
+`_
+
+It also has `a fine manual `_.
+
+FAQ
+---
+
+*Whyyyyy?*
+
+I wanted to play with HTTP in `Curio
+`__ and `Trio
+`__, which at the time didn't have any
+HTTP libraries. So I thought, no big deal, Python has, like, a dozen
+different implementations of HTTP, surely I can find one that's
+reusable. I didn't find one, but I did find Cory's call-to-arms
+blog-post. So I figured, well, fine, if I have to implement HTTP from
+scratch, at least I can make sure no-one *else* has to ever again.
+
+*Should I use it?*
+
+Maybe. You should be aware that it's a very young project. But, it's
+feature complete and has an exhaustive test-suite and complete docs,
+so the next step is for people to try using it and see how it goes
+:-). If you do then please let us know -- if nothing else we'll want
+to talk to you before making any incompatible changes!
+
+*What are the features/limitations?*
+
+Roughly speaking, it's trying to be a robust, complete, and non-hacky
+implementation of the first "chapter" of the HTTP/1.1 spec: `RFC 7230:
+HTTP/1.1 Message Syntax and Routing
+`_. That is, it mostly focuses on
+implementing HTTP at the level of taking bytes on and off the wire,
+and the headers related to that, and tries to be anal about spec
+conformance. It doesn't know about higher-level concerns like URL
+routing, conditional GETs, cross-origin cookie policies, or content
+negotiation. But it does know how to take care of framing,
+cross-version differences in keep-alive handling, and the "obsolete
+line folding" rule, so you can focus your energies on the hard /
+interesting parts for your application, and it tries to support the
+full specification in the sense that any useful HTTP/1.1 conformant
+application should be able to use h11.
+
+It's pure Python, and has no dependencies outside of the standard
+library.
+
+It has a test suite with 100.0% coverage for both statements and
+branches.
+
+Currently it supports Python 3 (testing on 3.8-3.12) and PyPy 3.
+The last Python 2-compatible version was h11 0.11.x.
+(Originally it had a Cython wrapper for `http-parser
+`_ and a beautiful nested state
+machine implemented with ``yield from`` to postprocess the output. But
+I had to take these out -- the new *parser* needs fewer lines-of-code
+than the old *parser wrapper*, is written in pure Python, uses no
+exotic language syntax, and has more features. It's sad, really; that
+old state machine was really slick. I just need a few sentences here
+to mourn that.)
+
+I don't know how fast it is. I haven't benchmarked or profiled it yet,
+so it's probably got a few pointless hot spots, and I've been trying
+to err on the side of simplicity and robustness instead of
+micro-optimization. But at the architectural level I tried hard to
+avoid fundamentally bad decisions, e.g., I believe that all the
+parsing algorithms remain linear-time even in the face of pathological
+input like slowloris, and there are no byte-by-byte loops. (I also
+believe that it maintains bounded memory usage in the face of
+arbitrary/pathological input.)
+
+The whole library is ~800 lines-of-code. You can read and understand
+the whole thing in less than an hour. Most of the energy invested in
+this so far has been spent on trying to keep things simple by
+minimizing special-cases and ad hoc state manipulation; even though it
+is now quite small and simple, I'm still annoyed that I haven't
+figured out how to make it even smaller and simpler. (Unfortunately,
+HTTP does not lend itself to simplicity.)
+
+The API is ~feature complete and I don't expect the general outlines
+to change much, but you can't judge an API's ergonomics until you
+actually document and use it, so I'd expect some changes in the
+details.
+
+*How do I try it?*
+
+.. code-block:: sh
+
+ $ pip install h11
+ $ git clone git@github.com:python-hyper/h11
+ $ cd h11/examples
+ $ python basic-client.py
+
+and go from there.
+
+*License?*
+
+MIT
+
+*Code of conduct?*
+
+Contributors are requested to follow our `code of conduct
+`_ in
+all project spaces.
diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/RECORD b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..0ec9e3ef9debc2fafb8dee3b566802de58a6df83
--- /dev/null
+++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/RECORD
@@ -0,0 +1,17 @@
+h11/__init__.py,sha256=iO1KzkSO42yZ6ffg-VMgbx_ZVTWGUY00nRYEWn-s3kY,1507
+h11/_abnf.py,sha256=ybixr0xsupnkA6GFAyMubuXF6Tc1lb_hF890NgCsfNc,4815
+h11/_connection.py,sha256=k9YRVf6koZqbttBW36xSWaJpWdZwa-xQVU9AHEo9DuI,26863
+h11/_events.py,sha256=I97aXoal1Wu7dkL548BANBUCkOIbe-x5CioYA9IBY14,11792
+h11/_headers.py,sha256=P7D-lBNxHwdLZPLimmYwrPG-9ZkjElvvJZJdZAgSP-4,10412
+h11/_readers.py,sha256=a4RypORUCC3d0q_kxPuBIM7jTD8iLt5X91TH0FsduN4,8590
+h11/_receivebuffer.py,sha256=xrspsdsNgWFxRfQcTXxR8RrdjRXXTK0Io5cQYWpJ1Ws,5252
+h11/_state.py,sha256=_5LG_BGR8FCcFQeBPH-TMHgm_-B-EUcWCnQof_9XjFE,13231
+h11/_util.py,sha256=LWkkjXyJaFlAy6Lt39w73UStklFT5ovcvo0TkY7RYuk,4888
+h11/_version.py,sha256=GVSsbPSPDcOuF6ptfIiXnVJoaEm3ygXbMnqlr_Giahw,686
+h11/_writers.py,sha256=oFKm6PtjeHfbj4RLX7VB7KDc1gIY53gXG3_HR9ltmTA,5081
+h11/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7
+h11-0.16.0.dist-info/licenses/LICENSE.txt,sha256=N9tbuFkm2yikJ6JYZ_ELEjIAOuob5pzLhRE4rbjm82E,1124
+h11-0.16.0.dist-info/METADATA,sha256=KPMmCYrAn8unm48YD5YIfIQf4kViFct7hyqcfVzRnWQ,8348
+h11-0.16.0.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
+h11-0.16.0.dist-info/top_level.txt,sha256=F7dC4jl3zeh8TGHEPaWJrMbeuoWbS379Gwdi-Yvdcis,4
+h11-0.16.0.dist-info/RECORD,,
diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/WHEEL b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..1eb3c49d99559863120cfb8433fc8738fba43ba9
--- /dev/null
+++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: setuptools (78.1.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/licenses/LICENSE.txt b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/licenses/LICENSE.txt
new file mode 100644
index 0000000000000000000000000000000000000000..8f080eae848f759c9173bfc0c79506357ebe5090
--- /dev/null
+++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/licenses/LICENSE.txt
@@ -0,0 +1,22 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 Nathaniel J. Smith and other contributors
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/top_level.txt b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..0d24def711344ec6f4da2108f7d5c9261eb35f8b
--- /dev/null
+++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+h11
diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/__init__.py b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..989e92c3458681a6f0be72ae4105ea742750d328
--- /dev/null
+++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/__init__.py
@@ -0,0 +1,62 @@
+# A highish-level implementation of the HTTP/1.1 wire protocol (RFC 7230),
+# containing no networking code at all, loosely modelled on hyper-h2's generic
+# implementation of HTTP/2 (and in particular the h2.connection.H2Connection
+# class). There's still a bunch of subtle details you need to get right if you
+# want to make this actually useful, because it doesn't implement all the
+# semantics to check that what you're asking to write to the wire is sensible,
+# but at least it gets you out of dealing with the wire itself.
+
+from h11._connection import Connection, NEED_DATA, PAUSED
+from h11._events import (
+ ConnectionClosed,
+ Data,
+ EndOfMessage,
+ Event,
+ InformationalResponse,
+ Request,
+ Response,
+)
+from h11._state import (
+ CLIENT,
+ CLOSED,
+ DONE,
+ ERROR,
+ IDLE,
+ MIGHT_SWITCH_PROTOCOL,
+ MUST_CLOSE,
+ SEND_BODY,
+ SEND_RESPONSE,
+ SERVER,
+ SWITCHED_PROTOCOL,
+)
+from h11._util import LocalProtocolError, ProtocolError, RemoteProtocolError
+from h11._version import __version__
+
+PRODUCT_ID = "python-h11/" + __version__
+
+
+__all__ = (
+ "Connection",
+ "NEED_DATA",
+ "PAUSED",
+ "ConnectionClosed",
+ "Data",
+ "EndOfMessage",
+ "Event",
+ "InformationalResponse",
+ "Request",
+ "Response",
+ "CLIENT",
+ "CLOSED",
+ "DONE",
+ "ERROR",
+ "IDLE",
+ "MUST_CLOSE",
+ "SEND_BODY",
+ "SEND_RESPONSE",
+ "SERVER",
+ "SWITCHED_PROTOCOL",
+ "ProtocolError",
+ "LocalProtocolError",
+ "RemoteProtocolError",
+)
diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_abnf.py b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_abnf.py
new file mode 100644
index 0000000000000000000000000000000000000000..933587fba22290d7eb7df4c88e12f1e61702b8ce
--- /dev/null
+++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_abnf.py
@@ -0,0 +1,132 @@
+# We use native strings for all the re patterns, to take advantage of string
+# formatting, and then convert to bytestrings when compiling the final re
+# objects.
+
+# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#whitespace
+# OWS = *( SP / HTAB )
+# ; optional whitespace
+OWS = r"[ \t]*"
+
+# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.token.separators
+# token = 1*tchar
+#
+# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
+# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
+# / DIGIT / ALPHA
+# ; any VCHAR, except delimiters
+token = r"[-!#$%&'*+.^_`|~0-9a-zA-Z]+"
+
+# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#header.fields
+# field-name = token
+field_name = token
+
+# The standard says:
+#
+# field-value = *( field-content / obs-fold )
+# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
+# field-vchar = VCHAR / obs-text
+# obs-fold = CRLF 1*( SP / HTAB )
+# ; obsolete line folding
+# ; see Section 3.2.4
+#
+# https://tools.ietf.org/html/rfc5234#appendix-B.1
+#
+# VCHAR = %x21-7E
+# ; visible (printing) characters
+#
+# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.quoted-string
+# obs-text = %x80-FF
+#
+# However, the standard definition of field-content is WRONG! It disallows
+# fields containing a single visible character surrounded by whitespace,
+# e.g. "foo a bar".
+#
+# See: https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189
+#
+# So our definition of field_content attempts to fix it up...
+#
+# Also, we allow lots of control characters, because apparently people assume
+# that they're legal in practice (e.g., google analytics makes cookies with
+# \x01 in them!):
+# https://github.com/python-hyper/h11/issues/57
+# We still don't allow NUL or whitespace, because those are often treated as
+# meta-characters and letting them through can lead to nasty issues like SSRF.
+vchar = r"[\x21-\x7e]"
+vchar_or_obs_text = r"[^\x00\s]"
+field_vchar = vchar_or_obs_text
+field_content = r"{field_vchar}+(?:[ \t]+{field_vchar}+)*".format(**globals())
+
+# We handle obs-fold at a different level, and our fixed-up field_content
+# already grows to swallow the whole value, so ? instead of *
+field_value = r"({field_content})?".format(**globals())
+
+# header-field = field-name ":" OWS field-value OWS
+header_field = (
+ r"(?P{field_name})"
+ r":"
+ r"{OWS}"
+ r"(?P{field_value})"
+ r"{OWS}".format(**globals())
+)
+
+# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#request.line
+#
+# request-line = method SP request-target SP HTTP-version CRLF
+# method = token
+# HTTP-version = HTTP-name "/" DIGIT "." DIGIT
+# HTTP-name = %x48.54.54.50 ; "HTTP", case-sensitive
+#
+# request-target is complicated (see RFC 7230 sec 5.3) -- could be path, full
+# URL, host+port (for connect), or even "*", but in any case we are guaranteed
+# that it contists of the visible printing characters.
+method = token
+request_target = r"{vchar}+".format(**globals())
+http_version = r"HTTP/(?P[0-9]\.[0-9])"
+request_line = (
+ r"(?P{method})"
+ r" "
+ r"(?P{request_target})"
+ r" "
+ r"{http_version}".format(**globals())
+)
+
+# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#status.line
+#
+# status-line = HTTP-version SP status-code SP reason-phrase CRLF
+# status-code = 3DIGIT
+# reason-phrase = *( HTAB / SP / VCHAR / obs-text )
+status_code = r"[0-9]{3}"
+reason_phrase = r"([ \t]|{vchar_or_obs_text})*".format(**globals())
+status_line = (
+ r"{http_version}"
+ r" "
+ r"(?P{status_code})"
+ # However, there are apparently a few too many servers out there that just
+ # leave out the reason phrase:
+ # https://github.com/scrapy/scrapy/issues/345#issuecomment-281756036
+ # https://github.com/seanmonstar/httparse/issues/29
+ # so make it optional. ?: is a non-capturing group.
+ r"(?: (?P{reason_phrase}))?".format(**globals())
+)
+
+HEXDIG = r"[0-9A-Fa-f]"
+# Actually
+#
+# chunk-size = 1*HEXDIG
+#
+# but we impose an upper-limit to avoid ridiculosity. len(str(2**64)) == 20
+chunk_size = r"({HEXDIG}){{1,20}}".format(**globals())
+# Actually
+#
+# chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] )
+#
+# but we aren't parsing the things so we don't really care.
+chunk_ext = r";.*"
+chunk_header = (
+ r"(?P{chunk_size})"
+ r"(?P{chunk_ext})?"
+ r"{OWS}\r\n".format(
+ **globals()
+ ) # Even though the specification does not allow for extra whitespaces,
+ # we are lenient with trailing whitespaces because some servers on the wild use it.
+)
diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_connection.py b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_connection.py
new file mode 100644
index 0000000000000000000000000000000000000000..e37d82a82a882c072cb938a90eb4486b51cdad99
--- /dev/null
+++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_connection.py
@@ -0,0 +1,659 @@
+# This contains the main Connection class. Everything in h11 revolves around
+# this.
+from typing import (
+ Any,
+ Callable,
+ cast,
+ Dict,
+ List,
+ Optional,
+ overload,
+ Tuple,
+ Type,
+ Union,
+)
+
+from ._events import (
+ ConnectionClosed,
+ Data,
+ EndOfMessage,
+ Event,
+ InformationalResponse,
+ Request,
+ Response,
+)
+from ._headers import get_comma_header, has_expect_100_continue, set_comma_header
+from ._readers import READERS, ReadersType
+from ._receivebuffer import ReceiveBuffer
+from ._state import (
+ _SWITCH_CONNECT,
+ _SWITCH_UPGRADE,
+ CLIENT,
+ ConnectionState,
+ DONE,
+ ERROR,
+ MIGHT_SWITCH_PROTOCOL,
+ SEND_BODY,
+ SERVER,
+ SWITCHED_PROTOCOL,
+)
+from ._util import ( # Import the internal things we need
+ LocalProtocolError,
+ RemoteProtocolError,
+ Sentinel,
+)
+from ._writers import WRITERS, WritersType
+
+# Everything in __all__ gets re-exported as part of the h11 public API.
+__all__ = ["Connection", "NEED_DATA", "PAUSED"]
+
+
+class NEED_DATA(Sentinel, metaclass=Sentinel):
+ pass
+
+
+class PAUSED(Sentinel, metaclass=Sentinel):
+ pass
+
+
+# If we ever have this much buffered without it making a complete parseable
+# event, we error out. The only time we really buffer is when reading the
+# request/response line + headers together, so this is effectively the limit on
+# the size of that.
+#
+# Some precedents for defaults:
+# - node.js: 80 * 1024
+# - tomcat: 8 * 1024
+# - IIS: 16 * 1024
+# - Apache: <8 KiB per line>
+DEFAULT_MAX_INCOMPLETE_EVENT_SIZE = 16 * 1024
+
+
+# RFC 7230's rules for connection lifecycles:
+# - If either side says they want to close the connection, then the connection
+# must close.
+# - HTTP/1.1 defaults to keep-alive unless someone says Connection: close
+# - HTTP/1.0 defaults to close unless both sides say Connection: keep-alive
+# (and even this is a mess -- e.g. if you're implementing a proxy then
+# sending Connection: keep-alive is forbidden).
+#
+# We simplify life by simply not supporting keep-alive with HTTP/1.0 peers. So
+# our rule is:
+# - If someone says Connection: close, we will close
+# - If someone uses HTTP/1.0, we will close.
+def _keep_alive(event: Union[Request, Response]) -> bool:
+ connection = get_comma_header(event.headers, b"connection")
+ if b"close" in connection:
+ return False
+ if getattr(event, "http_version", b"1.1") < b"1.1":
+ return False
+ return True
+
+
+def _body_framing(
+ request_method: bytes, event: Union[Request, Response]
+) -> Tuple[str, Union[Tuple[()], Tuple[int]]]:
+ # Called when we enter SEND_BODY to figure out framing information for
+ # this body.
+ #
+ # These are the only two events that can trigger a SEND_BODY state:
+ assert type(event) in (Request, Response)
+ # Returns one of:
+ #
+ # ("content-length", count)
+ # ("chunked", ())
+ # ("http/1.0", ())
+ #
+ # which are (lookup key, *args) for constructing body reader/writer
+ # objects.
+ #
+ # Reference: https://tools.ietf.org/html/rfc7230#section-3.3.3
+ #
+ # Step 1: some responses always have an empty body, regardless of what the
+ # headers say.
+ if type(event) is Response:
+ if (
+ event.status_code in (204, 304)
+ or request_method == b"HEAD"
+ or (request_method == b"CONNECT" and 200 <= event.status_code < 300)
+ ):
+ return ("content-length", (0,))
+ # Section 3.3.3 also lists another case -- responses with status_code
+ # < 200. For us these are InformationalResponses, not Responses, so
+ # they can't get into this function in the first place.
+ assert event.status_code >= 200
+
+ # Step 2: check for Transfer-Encoding (T-E beats C-L):
+ transfer_encodings = get_comma_header(event.headers, b"transfer-encoding")
+ if transfer_encodings:
+ assert transfer_encodings == [b"chunked"]
+ return ("chunked", ())
+
+ # Step 3: check for Content-Length
+ content_lengths = get_comma_header(event.headers, b"content-length")
+ if content_lengths:
+ return ("content-length", (int(content_lengths[0]),))
+
+ # Step 4: no applicable headers; fallback/default depends on type
+ if type(event) is Request:
+ return ("content-length", (0,))
+ else:
+ return ("http/1.0", ())
+
+
+################################################################
+#
+# The main Connection class
+#
+################################################################
+
+
+class Connection:
+ """An object encapsulating the state of an HTTP connection.
+
+ Args:
+ our_role: If you're implementing a client, pass :data:`h11.CLIENT`. If
+ you're implementing a server, pass :data:`h11.SERVER`.
+
+ max_incomplete_event_size (int):
+ The maximum number of bytes we're willing to buffer of an
+ incomplete event. In practice this mostly sets a limit on the
+ maximum size of the request/response line + headers. If this is
+ exceeded, then :meth:`next_event` will raise
+ :exc:`RemoteProtocolError`.
+
+ """
+
+ def __init__(
+ self,
+ our_role: Type[Sentinel],
+ max_incomplete_event_size: int = DEFAULT_MAX_INCOMPLETE_EVENT_SIZE,
+ ) -> None:
+ self._max_incomplete_event_size = max_incomplete_event_size
+ # State and role tracking
+ if our_role not in (CLIENT, SERVER):
+ raise ValueError(f"expected CLIENT or SERVER, not {our_role!r}")
+ self.our_role = our_role
+ self.their_role: Type[Sentinel]
+ if our_role is CLIENT:
+ self.their_role = SERVER
+ else:
+ self.their_role = CLIENT
+ self._cstate = ConnectionState()
+
+ # Callables for converting data->events or vice-versa given the
+ # current state
+ self._writer = self._get_io_object(self.our_role, None, WRITERS)
+ self._reader = self._get_io_object(self.their_role, None, READERS)
+
+ # Holds any unprocessed received data
+ self._receive_buffer = ReceiveBuffer()
+ # If this is true, then it indicates that the incoming connection was
+ # closed *after* the end of whatever's in self._receive_buffer:
+ self._receive_buffer_closed = False
+
+ # Extra bits of state that don't fit into the state machine.
+ #
+ # These two are only used to interpret framing headers for figuring
+ # out how to read/write response bodies. their_http_version is also
+ # made available as a convenient public API.
+ self.their_http_version: Optional[bytes] = None
+ self._request_method: Optional[bytes] = None
+ # This is pure flow-control and doesn't at all affect the set of legal
+ # transitions, so no need to bother ConnectionState with it:
+ self.client_is_waiting_for_100_continue = False
+
+ @property
+ def states(self) -> Dict[Type[Sentinel], Type[Sentinel]]:
+ """A dictionary like::
+
+ {CLIENT: , SERVER: }
+
+ See :ref:`state-machine` for details.
+
+ """
+ return dict(self._cstate.states)
+
+ @property
+ def our_state(self) -> Type[Sentinel]:
+ """The current state of whichever role we are playing. See
+ :ref:`state-machine` for details.
+ """
+ return self._cstate.states[self.our_role]
+
+ @property
+ def their_state(self) -> Type[Sentinel]:
+ """The current state of whichever role we are NOT playing. See
+ :ref:`state-machine` for details.
+ """
+ return self._cstate.states[self.their_role]
+
+ @property
+ def they_are_waiting_for_100_continue(self) -> bool:
+ return self.their_role is CLIENT and self.client_is_waiting_for_100_continue
+
+ def start_next_cycle(self) -> None:
+ """Attempt to reset our connection state for a new request/response
+ cycle.
+
+ If both client and server are in :data:`DONE` state, then resets them
+ both to :data:`IDLE` state in preparation for a new request/response
+ cycle on this same connection. Otherwise, raises a
+ :exc:`LocalProtocolError`.
+
+ See :ref:`keepalive-and-pipelining`.
+
+ """
+ old_states = dict(self._cstate.states)
+ self._cstate.start_next_cycle()
+ self._request_method = None
+ # self.their_http_version gets left alone, since it presumably lasts
+ # beyond a single request/response cycle
+ assert not self.client_is_waiting_for_100_continue
+ self._respond_to_state_changes(old_states)
+
+ def _process_error(self, role: Type[Sentinel]) -> None:
+ old_states = dict(self._cstate.states)
+ self._cstate.process_error(role)
+ self._respond_to_state_changes(old_states)
+
+ def _server_switch_event(self, event: Event) -> Optional[Type[Sentinel]]:
+ if type(event) is InformationalResponse and event.status_code == 101:
+ return _SWITCH_UPGRADE
+ if type(event) is Response:
+ if (
+ _SWITCH_CONNECT in self._cstate.pending_switch_proposals
+ and 200 <= event.status_code < 300
+ ):
+ return _SWITCH_CONNECT
+ return None
+
+ # All events go through here
+ def _process_event(self, role: Type[Sentinel], event: Event) -> None:
+ # First, pass the event through the state machine to make sure it
+ # succeeds.
+ old_states = dict(self._cstate.states)
+ if role is CLIENT and type(event) is Request:
+ if event.method == b"CONNECT":
+ self._cstate.process_client_switch_proposal(_SWITCH_CONNECT)
+ if get_comma_header(event.headers, b"upgrade"):
+ self._cstate.process_client_switch_proposal(_SWITCH_UPGRADE)
+ server_switch_event = None
+ if role is SERVER:
+ server_switch_event = self._server_switch_event(event)
+ self._cstate.process_event(role, type(event), server_switch_event)
+
+ # Then perform the updates triggered by it.
+
+ if type(event) is Request:
+ self._request_method = event.method
+
+ if role is self.their_role and type(event) in (
+ Request,
+ Response,
+ InformationalResponse,
+ ):
+ event = cast(Union[Request, Response, InformationalResponse], event)
+ self.their_http_version = event.http_version
+
+ # Keep alive handling
+ #
+ # RFC 7230 doesn't really say what one should do if Connection: close
+ # shows up on a 1xx InformationalResponse. I think the idea is that
+ # this is not supposed to happen. In any case, if it does happen, we
+ # ignore it.
+ if type(event) in (Request, Response) and not _keep_alive(
+ cast(Union[Request, Response], event)
+ ):
+ self._cstate.process_keep_alive_disabled()
+
+ # 100-continue
+ if type(event) is Request and has_expect_100_continue(event):
+ self.client_is_waiting_for_100_continue = True
+ if type(event) in (InformationalResponse, Response):
+ self.client_is_waiting_for_100_continue = False
+ if role is CLIENT and type(event) in (Data, EndOfMessage):
+ self.client_is_waiting_for_100_continue = False
+
+ self._respond_to_state_changes(old_states, event)
+
+ def _get_io_object(
+ self,
+ role: Type[Sentinel],
+ event: Optional[Event],
+ io_dict: Union[ReadersType, WritersType],
+ ) -> Optional[Callable[..., Any]]:
+ # event may be None; it's only used when entering SEND_BODY
+ state = self._cstate.states[role]
+ if state is SEND_BODY:
+ # Special case: the io_dict has a dict of reader/writer factories
+ # that depend on the request/response framing.
+ framing_type, args = _body_framing(
+ cast(bytes, self._request_method), cast(Union[Request, Response], event)
+ )
+ return io_dict[SEND_BODY][framing_type](*args) # type: ignore[index]
+ else:
+ # General case: the io_dict just has the appropriate reader/writer
+ # for this state
+ return io_dict.get((role, state)) # type: ignore[return-value]
+
+ # This must be called after any action that might have caused
+ # self._cstate.states to change.
+ def _respond_to_state_changes(
+ self,
+ old_states: Dict[Type[Sentinel], Type[Sentinel]],
+ event: Optional[Event] = None,
+ ) -> None:
+ # Update reader/writer
+ if self.our_state != old_states[self.our_role]:
+ self._writer = self._get_io_object(self.our_role, event, WRITERS)
+ if self.their_state != old_states[self.their_role]:
+ self._reader = self._get_io_object(self.their_role, event, READERS)
+
+ @property
+ def trailing_data(self) -> Tuple[bytes, bool]:
+ """Data that has been received, but not yet processed, represented as
+ a tuple with two elements, where the first is a byte-string containing
+ the unprocessed data itself, and the second is a bool that is True if
+ the receive connection was closed.
+
+ See :ref:`switching-protocols` for discussion of why you'd want this.
+ """
+ return (bytes(self._receive_buffer), self._receive_buffer_closed)
+
+ def receive_data(self, data: bytes) -> None:
+ """Add data to our internal receive buffer.
+
+ This does not actually do any processing on the data, just stores
+ it. To trigger processing, you have to call :meth:`next_event`.
+
+ Args:
+ data (:term:`bytes-like object`):
+ The new data that was just received.
+
+ Special case: If *data* is an empty byte-string like ``b""``,
+ then this indicates that the remote side has closed the
+ connection (end of file). Normally this is convenient, because
+ standard Python APIs like :meth:`file.read` or
+ :meth:`socket.recv` use ``b""`` to indicate end-of-file, while
+ other failures to read are indicated using other mechanisms
+ like raising :exc:`TimeoutError`. When using such an API you
+ can just blindly pass through whatever you get from ``read``
+ to :meth:`receive_data`, and everything will work.
+
+ But, if you have an API where reading an empty string is a
+ valid non-EOF condition, then you need to be aware of this and
+ make sure to check for such strings and avoid passing them to
+ :meth:`receive_data`.
+
+ Returns:
+ Nothing, but after calling this you should call :meth:`next_event`
+ to parse the newly received data.
+
+ Raises:
+ RuntimeError:
+ Raised if you pass an empty *data*, indicating EOF, and then
+ pass a non-empty *data*, indicating more data that somehow
+ arrived after the EOF.
+
+ (Calling ``receive_data(b"")`` multiple times is fine,
+ and equivalent to calling it once.)
+
+ """
+ if data:
+ if self._receive_buffer_closed:
+ raise RuntimeError("received close, then received more data?")
+ self._receive_buffer += data
+ else:
+ self._receive_buffer_closed = True
+
+ def _extract_next_receive_event(
+ self,
+ ) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]:
+ state = self.their_state
+ # We don't pause immediately when they enter DONE, because even in
+ # DONE state we can still process a ConnectionClosed() event. But
+ # if we have data in our buffer, then we definitely aren't getting
+ # a ConnectionClosed() immediately and we need to pause.
+ if state is DONE and self._receive_buffer:
+ return PAUSED
+ if state is MIGHT_SWITCH_PROTOCOL or state is SWITCHED_PROTOCOL:
+ return PAUSED
+ assert self._reader is not None
+ event = self._reader(self._receive_buffer)
+ if event is None:
+ if not self._receive_buffer and self._receive_buffer_closed:
+ # In some unusual cases (basically just HTTP/1.0 bodies), EOF
+ # triggers an actual protocol event; in that case, we want to
+ # return that event, and then the state will change and we'll
+ # get called again to generate the actual ConnectionClosed().
+ if hasattr(self._reader, "read_eof"):
+ event = self._reader.read_eof()
+ else:
+ event = ConnectionClosed()
+ if event is None:
+ event = NEED_DATA
+ return event # type: ignore[no-any-return]
+
+ def next_event(self) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]:
+ """Parse the next event out of our receive buffer, update our internal
+ state, and return it.
+
+ This is a mutating operation -- think of it like calling :func:`next`
+ on an iterator.
+
+ Returns:
+ : One of three things:
+
+ 1) An event object -- see :ref:`events`.
+
+ 2) The special constant :data:`NEED_DATA`, which indicates that
+ you need to read more data from your socket and pass it to
+ :meth:`receive_data` before this method will be able to return
+ any more events.
+
+ 3) The special constant :data:`PAUSED`, which indicates that we
+ are not in a state where we can process incoming data (usually
+ because the peer has finished their part of the current
+ request/response cycle, and you have not yet called
+ :meth:`start_next_cycle`). See :ref:`flow-control` for details.
+
+ Raises:
+ RemoteProtocolError:
+ The peer has misbehaved. You should close the connection
+ (possibly after sending some kind of 4xx response).
+
+ Once this method returns :class:`ConnectionClosed` once, then all
+ subsequent calls will also return :class:`ConnectionClosed`.
+
+ If this method raises any exception besides :exc:`RemoteProtocolError`
+ then that's a bug -- if it happens please file a bug report!
+
+ If this method raises any exception then it also sets
+ :attr:`Connection.their_state` to :data:`ERROR` -- see
+ :ref:`error-handling` for discussion.
+
+ """
+
+ if self.their_state is ERROR:
+ raise RemoteProtocolError("Can't receive data when peer state is ERROR")
+ try:
+ event = self._extract_next_receive_event()
+ if event not in [NEED_DATA, PAUSED]:
+ self._process_event(self.their_role, cast(Event, event))
+ if event is NEED_DATA:
+ if len(self._receive_buffer) > self._max_incomplete_event_size:
+ # 431 is "Request header fields too large" which is pretty
+ # much the only situation where we can get here
+ raise RemoteProtocolError(
+ "Receive buffer too long", error_status_hint=431
+ )
+ if self._receive_buffer_closed:
+ # We're still trying to complete some event, but that's
+ # never going to happen because no more data is coming
+ raise RemoteProtocolError("peer unexpectedly closed connection")
+ return event
+ except BaseException as exc:
+ self._process_error(self.their_role)
+ if isinstance(exc, LocalProtocolError):
+ exc._reraise_as_remote_protocol_error()
+ else:
+ raise
+
+ @overload
+ def send(self, event: ConnectionClosed) -> None:
+ ...
+
+ @overload
+ def send(
+ self, event: Union[Request, InformationalResponse, Response, Data, EndOfMessage]
+ ) -> bytes:
+ ...
+
+ @overload
+ def send(self, event: Event) -> Optional[bytes]:
+ ...
+
+ def send(self, event: Event) -> Optional[bytes]:
+ """Convert a high-level event into bytes that can be sent to the peer,
+ while updating our internal state machine.
+
+ Args:
+ event: The :ref:`event ` to send.
+
+ Returns:
+ If ``type(event) is ConnectionClosed``, then returns
+ ``None``. Otherwise, returns a :term:`bytes-like object`.
+
+ Raises:
+ LocalProtocolError:
+ Sending this event at this time would violate our
+ understanding of the HTTP/1.1 protocol.
+
+ If this method raises any exception then it also sets
+ :attr:`Connection.our_state` to :data:`ERROR` -- see
+ :ref:`error-handling` for discussion.
+
+ """
+ data_list = self.send_with_data_passthrough(event)
+ if data_list is None:
+ return None
+ else:
+ return b"".join(data_list)
+
+ def send_with_data_passthrough(self, event: Event) -> Optional[List[bytes]]:
+ """Identical to :meth:`send`, except that in situations where
+ :meth:`send` returns a single :term:`bytes-like object`, this instead
+ returns a list of them -- and when sending a :class:`Data` event, this
+ list is guaranteed to contain the exact object you passed in as
+ :attr:`Data.data`. See :ref:`sendfile` for discussion.
+
+ """
+ if self.our_state is ERROR:
+ raise LocalProtocolError("Can't send data when our state is ERROR")
+ try:
+ if type(event) is Response:
+ event = self._clean_up_response_headers_for_sending(event)
+ # We want to call _process_event before calling the writer,
+ # because if someone tries to do something invalid then this will
+ # give a sensible error message, while our writers all just assume
+ # they will only receive valid events. But, _process_event might
+ # change self._writer. So we have to do a little dance:
+ writer = self._writer
+ self._process_event(self.our_role, event)
+ if type(event) is ConnectionClosed:
+ return None
+ else:
+ # In any situation where writer is None, process_event should
+ # have raised ProtocolError
+ assert writer is not None
+ data_list: List[bytes] = []
+ writer(event, data_list.append)
+ return data_list
+ except:
+ self._process_error(self.our_role)
+ raise
+
+ def send_failed(self) -> None:
+ """Notify the state machine that we failed to send the data it gave
+ us.
+
+ This causes :attr:`Connection.our_state` to immediately become
+ :data:`ERROR` -- see :ref:`error-handling` for discussion.
+
+ """
+ self._process_error(self.our_role)
+
+ # When sending a Response, we take responsibility for a few things:
+ #
+ # - Sometimes you MUST set Connection: close. We take care of those
+ # times. (You can also set it yourself if you want, and if you do then
+ # we'll respect that and close the connection at the right time. But you
+ # don't have to worry about that unless you want to.)
+ #
+ # - The user has to set Content-Length if they want it. Otherwise, for
+ # responses that have bodies (e.g. not HEAD), then we will automatically
+ # select the right mechanism for streaming a body of unknown length,
+ # which depends on depending on the peer's HTTP version.
+ #
+ # This function's *only* responsibility is making sure headers are set up
+ # right -- everything downstream just looks at the headers. There are no
+ # side channels.
+ def _clean_up_response_headers_for_sending(self, response: Response) -> Response:
+ assert type(response) is Response
+
+ headers = response.headers
+ need_close = False
+
+ # HEAD requests need some special handling: they always act like they
+ # have Content-Length: 0, and that's how _body_framing treats
+ # them. But their headers are supposed to match what we would send if
+ # the request was a GET. (Technically there is one deviation allowed:
+ # we're allowed to leave out the framing headers -- see
+ # https://tools.ietf.org/html/rfc7231#section-4.3.2 . But it's just as
+ # easy to get them right.)
+ method_for_choosing_headers = cast(bytes, self._request_method)
+ if method_for_choosing_headers == b"HEAD":
+ method_for_choosing_headers = b"GET"
+ framing_type, _ = _body_framing(method_for_choosing_headers, response)
+ if framing_type in ("chunked", "http/1.0"):
+ # This response has a body of unknown length.
+ # If our peer is HTTP/1.1, we use Transfer-Encoding: chunked
+ # If our peer is HTTP/1.0, we use no framing headers, and close the
+ # connection afterwards.
+ #
+ # Make sure to clear Content-Length (in principle user could have
+ # set both and then we ignored Content-Length b/c
+ # Transfer-Encoding overwrote it -- this would be naughty of them,
+ # but the HTTP spec says that if our peer does this then we have
+ # to fix it instead of erroring out, so we'll accord the user the
+ # same respect).
+ headers = set_comma_header(headers, b"content-length", [])
+ if self.their_http_version is None or self.their_http_version < b"1.1":
+ # Either we never got a valid request and are sending back an
+ # error (their_http_version is None), so we assume the worst;
+ # or else we did get a valid HTTP/1.0 request, so we know that
+ # they don't understand chunked encoding.
+ headers = set_comma_header(headers, b"transfer-encoding", [])
+ # This is actually redundant ATM, since currently we
+ # unconditionally disable keep-alive when talking to HTTP/1.0
+ # peers. But let's be defensive just in case we add
+ # Connection: keep-alive support later:
+ if self._request_method != b"HEAD":
+ need_close = True
+ else:
+ headers = set_comma_header(headers, b"transfer-encoding", [b"chunked"])
+
+ if not self._cstate.keep_alive or need_close:
+ # Make sure Connection: close is set
+ connection = set(get_comma_header(headers, b"connection"))
+ connection.discard(b"keep-alive")
+ connection.add(b"close")
+ headers = set_comma_header(headers, b"connection", sorted(connection))
+
+ return Response(
+ headers=headers,
+ status_code=response.status_code,
+ http_version=response.http_version,
+ reason=response.reason,
+ )
diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_events.py b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_events.py
new file mode 100644
index 0000000000000000000000000000000000000000..ca1c3adbde2c4e7710482a18e3471f91f1da610e
--- /dev/null
+++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_events.py
@@ -0,0 +1,369 @@
+# High level events that make up HTTP/1.1 conversations. Loosely inspired by
+# the corresponding events in hyper-h2:
+#
+# http://python-hyper.org/h2/en/stable/api.html#events
+#
+# Don't subclass these. Stuff will break.
+
+import re
+from abc import ABC
+from dataclasses import dataclass
+from typing import List, Tuple, Union
+
+from ._abnf import method, request_target
+from ._headers import Headers, normalize_and_validate
+from ._util import bytesify, LocalProtocolError, validate
+
+# Everything in __all__ gets re-exported as part of the h11 public API.
+__all__ = [
+ "Event",
+ "Request",
+ "InformationalResponse",
+ "Response",
+ "Data",
+ "EndOfMessage",
+ "ConnectionClosed",
+]
+
+method_re = re.compile(method.encode("ascii"))
+request_target_re = re.compile(request_target.encode("ascii"))
+
+
+class Event(ABC):
+ """
+ Base class for h11 events.
+ """
+
+ __slots__ = ()
+
+
+@dataclass(init=False, frozen=True)
+class Request(Event):
+ """The beginning of an HTTP request.
+
+ Fields:
+
+ .. attribute:: method
+
+ An HTTP method, e.g. ``b"GET"`` or ``b"POST"``. Always a byte
+ string. :term:`Bytes-like objects ` and native
+ strings containing only ascii characters will be automatically
+ converted to byte strings.
+
+ .. attribute:: target
+
+ The target of an HTTP request, e.g. ``b"/index.html"``, or one of the
+ more exotic formats described in `RFC 7320, section 5.3
+ `_. Always a byte
+ string. :term:`Bytes-like objects ` and native
+ strings containing only ascii characters will be automatically
+ converted to byte strings.
+
+ .. attribute:: headers
+
+ Request headers, represented as a list of (name, value) pairs. See
+ :ref:`the header normalization rules ` for details.
+
+ .. attribute:: http_version
+
+ The HTTP protocol version, represented as a byte string like
+ ``b"1.1"``. See :ref:`the HTTP version normalization rules
+ ` for details.
+
+ """
+
+ __slots__ = ("method", "headers", "target", "http_version")
+
+ method: bytes
+ headers: Headers
+ target: bytes
+ http_version: bytes
+
+ def __init__(
+ self,
+ *,
+ method: Union[bytes, str],
+ headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]],
+ target: Union[bytes, str],
+ http_version: Union[bytes, str] = b"1.1",
+ _parsed: bool = False,
+ ) -> None:
+ super().__init__()
+ if isinstance(headers, Headers):
+ object.__setattr__(self, "headers", headers)
+ else:
+ object.__setattr__(
+ self, "headers", normalize_and_validate(headers, _parsed=_parsed)
+ )
+ if not _parsed:
+ object.__setattr__(self, "method", bytesify(method))
+ object.__setattr__(self, "target", bytesify(target))
+ object.__setattr__(self, "http_version", bytesify(http_version))
+ else:
+ object.__setattr__(self, "method", method)
+ object.__setattr__(self, "target", target)
+ object.__setattr__(self, "http_version", http_version)
+
+ # "A server MUST respond with a 400 (Bad Request) status code to any
+ # HTTP/1.1 request message that lacks a Host header field and to any
+ # request message that contains more than one Host header field or a
+ # Host header field with an invalid field-value."
+ # -- https://tools.ietf.org/html/rfc7230#section-5.4
+ host_count = 0
+ for name, value in self.headers:
+ if name == b"host":
+ host_count += 1
+ if self.http_version == b"1.1" and host_count == 0:
+ raise LocalProtocolError("Missing mandatory Host: header")
+ if host_count > 1:
+ raise LocalProtocolError("Found multiple Host: headers")
+
+ validate(method_re, self.method, "Illegal method characters")
+ validate(request_target_re, self.target, "Illegal target characters")
+
+ # This is an unhashable type.
+ __hash__ = None # type: ignore
+
+
+@dataclass(init=False, frozen=True)
+class _ResponseBase(Event):
+ __slots__ = ("headers", "http_version", "reason", "status_code")
+
+ headers: Headers
+ http_version: bytes
+ reason: bytes
+ status_code: int
+
+ def __init__(
+ self,
+ *,
+ headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]],
+ status_code: int,
+ http_version: Union[bytes, str] = b"1.1",
+ reason: Union[bytes, str] = b"",
+ _parsed: bool = False,
+ ) -> None:
+ super().__init__()
+ if isinstance(headers, Headers):
+ object.__setattr__(self, "headers", headers)
+ else:
+ object.__setattr__(
+ self, "headers", normalize_and_validate(headers, _parsed=_parsed)
+ )
+ if not _parsed:
+ object.__setattr__(self, "reason", bytesify(reason))
+ object.__setattr__(self, "http_version", bytesify(http_version))
+ if not isinstance(status_code, int):
+ raise LocalProtocolError("status code must be integer")
+ # Because IntEnum objects are instances of int, but aren't
+ # duck-compatible (sigh), see gh-72.
+ object.__setattr__(self, "status_code", int(status_code))
+ else:
+ object.__setattr__(self, "reason", reason)
+ object.__setattr__(self, "http_version", http_version)
+ object.__setattr__(self, "status_code", status_code)
+
+ self.__post_init__()
+
+ def __post_init__(self) -> None:
+ pass
+
+ # This is an unhashable type.
+ __hash__ = None # type: ignore
+
+
+@dataclass(init=False, frozen=True)
+class InformationalResponse(_ResponseBase):
+ """An HTTP informational response.
+
+ Fields:
+
+ .. attribute:: status_code
+
+ The status code of this response, as an integer. For an
+ :class:`InformationalResponse`, this is always in the range [100,
+ 200).
+
+ .. attribute:: headers
+
+ Request headers, represented as a list of (name, value) pairs. See
+ :ref:`the header normalization rules ` for
+ details.
+
+ .. attribute:: http_version
+
+ The HTTP protocol version, represented as a byte string like
+ ``b"1.1"``. See :ref:`the HTTP version normalization rules
+ ` for details.
+
+ .. attribute:: reason
+
+ The reason phrase of this response, as a byte string. For example:
+ ``b"OK"``, or ``b"Not Found"``.
+
+ """
+
+ def __post_init__(self) -> None:
+ if not (100 <= self.status_code < 200):
+ raise LocalProtocolError(
+ "InformationalResponse status_code should be in range "
+ "[100, 200), not {}".format(self.status_code)
+ )
+
+ # This is an unhashable type.
+ __hash__ = None # type: ignore
+
+
+@dataclass(init=False, frozen=True)
+class Response(_ResponseBase):
+ """The beginning of an HTTP response.
+
+ Fields:
+
+ .. attribute:: status_code
+
+ The status code of this response, as an integer. For an
+ :class:`Response`, this is always in the range [200,
+ 1000).
+
+ .. attribute:: headers
+
+ Request headers, represented as a list of (name, value) pairs. See
+ :ref:`the header normalization rules ` for details.
+
+ .. attribute:: http_version
+
+ The HTTP protocol version, represented as a byte string like
+ ``b"1.1"``. See :ref:`the HTTP version normalization rules
+ ` for details.
+
+ .. attribute:: reason
+
+ The reason phrase of this response, as a byte string. For example:
+ ``b"OK"``, or ``b"Not Found"``.
+
+ """
+
+ def __post_init__(self) -> None:
+ if not (200 <= self.status_code < 1000):
+ raise LocalProtocolError(
+ "Response status_code should be in range [200, 1000), not {}".format(
+ self.status_code
+ )
+ )
+
+ # This is an unhashable type.
+ __hash__ = None # type: ignore
+
+
+@dataclass(init=False, frozen=True)
+class Data(Event):
+ """Part of an HTTP message body.
+
+ Fields:
+
+ .. attribute:: data
+
+ A :term:`bytes-like object` containing part of a message body. Or, if
+ using the ``combine=False`` argument to :meth:`Connection.send`, then
+ any object that your socket writing code knows what to do with, and for
+ which calling :func:`len` returns the number of bytes that will be
+ written -- see :ref:`sendfile` for details.
+
+ .. attribute:: chunk_start
+
+ A marker that indicates whether this data object is from the start of a
+ chunked transfer encoding chunk. This field is ignored when when a Data
+ event is provided to :meth:`Connection.send`: it is only valid on
+ events emitted from :meth:`Connection.next_event`. You probably
+ shouldn't use this attribute at all; see
+ :ref:`chunk-delimiters-are-bad` for details.
+
+ .. attribute:: chunk_end
+
+ A marker that indicates whether this data object is the last for a
+ given chunked transfer encoding chunk. This field is ignored when when
+ a Data event is provided to :meth:`Connection.send`: it is only valid
+ on events emitted from :meth:`Connection.next_event`. You probably
+ shouldn't use this attribute at all; see
+ :ref:`chunk-delimiters-are-bad` for details.
+
+ """
+
+ __slots__ = ("data", "chunk_start", "chunk_end")
+
+ data: bytes
+ chunk_start: bool
+ chunk_end: bool
+
+ def __init__(
+ self, data: bytes, chunk_start: bool = False, chunk_end: bool = False
+ ) -> None:
+ object.__setattr__(self, "data", data)
+ object.__setattr__(self, "chunk_start", chunk_start)
+ object.__setattr__(self, "chunk_end", chunk_end)
+
+ # This is an unhashable type.
+ __hash__ = None # type: ignore
+
+
+# XX FIXME: "A recipient MUST ignore (or consider as an error) any fields that
+# are forbidden to be sent in a trailer, since processing them as if they were
+# present in the header section might bypass external security filters."
+# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#chunked.trailer.part
+# Unfortunately, the list of forbidden fields is long and vague :-/
+@dataclass(init=False, frozen=True)
+class EndOfMessage(Event):
+ """The end of an HTTP message.
+
+ Fields:
+
+ .. attribute:: headers
+
+ Default value: ``[]``
+
+ Any trailing headers attached to this message, represented as a list of
+ (name, value) pairs. See :ref:`the header normalization rules
+ ` for details.
+
+ Must be empty unless ``Transfer-Encoding: chunked`` is in use.
+
+ """
+
+ __slots__ = ("headers",)
+
+ headers: Headers
+
+ def __init__(
+ self,
+ *,
+ headers: Union[
+ Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]], None
+ ] = None,
+ _parsed: bool = False,
+ ) -> None:
+ super().__init__()
+ if headers is None:
+ headers = Headers([])
+ elif not isinstance(headers, Headers):
+ headers = normalize_and_validate(headers, _parsed=_parsed)
+
+ object.__setattr__(self, "headers", headers)
+
+ # This is an unhashable type.
+ __hash__ = None # type: ignore
+
+
+@dataclass(frozen=True)
+class ConnectionClosed(Event):
+ """This event indicates that the sender has closed their outgoing
+ connection.
+
+ Note that this does not necessarily mean that they can't *receive* further
+ data, because TCP connections are composed to two one-way channels which
+ can be closed independently. See :ref:`closing` for details.
+
+ No fields.
+ """
+
+ pass
diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_headers.py b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_headers.py
new file mode 100644
index 0000000000000000000000000000000000000000..31da3e2b23b55a624b36f105e62a6902e63286aa
--- /dev/null
+++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_headers.py
@@ -0,0 +1,282 @@
+import re
+from typing import AnyStr, cast, List, overload, Sequence, Tuple, TYPE_CHECKING, Union
+
+from ._abnf import field_name, field_value
+from ._util import bytesify, LocalProtocolError, validate
+
+if TYPE_CHECKING:
+ from ._events import Request
+
+try:
+ from typing import Literal
+except ImportError:
+ from typing_extensions import Literal # type: ignore
+
+CONTENT_LENGTH_MAX_DIGITS = 20 # allow up to 1 billion TB - 1
+
+
+# Facts
+# -----
+#
+# Headers are:
+# keys: case-insensitive ascii
+# values: mixture of ascii and raw bytes
+#
+# "Historically, HTTP has allowed field content with text in the ISO-8859-1
+# charset [ISO-8859-1], supporting other charsets only through use of
+# [RFC2047] encoding. In practice, most HTTP header field values use only a
+# subset of the US-ASCII charset [USASCII]. Newly defined header fields SHOULD
+# limit their field values to US-ASCII octets. A recipient SHOULD treat other
+# octets in field content (obs-text) as opaque data."
+# And it deprecates all non-ascii values
+#
+# Leading/trailing whitespace in header names is forbidden
+#
+# Values get leading/trailing whitespace stripped
+#
+# Content-Disposition actually needs to contain unicode semantically; to
+# accomplish this it has a terrifically weird way of encoding the filename
+# itself as ascii (and even this still has lots of cross-browser
+# incompatibilities)
+#
+# Order is important:
+# "a proxy MUST NOT change the order of these field values when forwarding a
+# message"
+# (and there are several headers where the order indicates a preference)
+#
+# Multiple occurences of the same header:
+# "A sender MUST NOT generate multiple header fields with the same field name
+# in a message unless either the entire field value for that header field is
+# defined as a comma-separated list [or the header is Set-Cookie which gets a
+# special exception]" - RFC 7230. (cookies are in RFC 6265)
+#
+# So every header aside from Set-Cookie can be merged by b", ".join if it
+# occurs repeatedly. But, of course, they can't necessarily be split by
+# .split(b","), because quoting.
+#
+# Given all this mess (case insensitive, duplicates allowed, order is
+# important, ...), there doesn't appear to be any standard way to handle
+# headers in Python -- they're almost like dicts, but... actually just
+# aren't. For now we punt and just use a super simple representation: headers
+# are a list of pairs
+#
+# [(name1, value1), (name2, value2), ...]
+#
+# where all entries are bytestrings, names are lowercase and have no
+# leading/trailing whitespace, and values are bytestrings with no
+# leading/trailing whitespace. Searching and updating are done via naive O(n)
+# methods.
+#
+# Maybe a dict-of-lists would be better?
+
+_content_length_re = re.compile(rb"[0-9]+")
+_field_name_re = re.compile(field_name.encode("ascii"))
+_field_value_re = re.compile(field_value.encode("ascii"))
+
+
+class Headers(Sequence[Tuple[bytes, bytes]]):
+ """
+ A list-like interface that allows iterating over headers as byte-pairs
+ of (lowercased-name, value).
+
+ Internally we actually store the representation as three-tuples,
+ including both the raw original casing, in order to preserve casing
+ over-the-wire, and the lowercased name, for case-insensitive comparisions.
+
+ r = Request(
+ method="GET",
+ target="/",
+ headers=[("Host", "example.org"), ("Connection", "keep-alive")],
+ http_version="1.1",
+ )
+ assert r.headers == [
+ (b"host", b"example.org"),
+ (b"connection", b"keep-alive")
+ ]
+ assert r.headers.raw_items() == [
+ (b"Host", b"example.org"),
+ (b"Connection", b"keep-alive")
+ ]
+ """
+
+ __slots__ = "_full_items"
+
+ def __init__(self, full_items: List[Tuple[bytes, bytes, bytes]]) -> None:
+ self._full_items = full_items
+
+ def __bool__(self) -> bool:
+ return bool(self._full_items)
+
+ def __eq__(self, other: object) -> bool:
+ return list(self) == list(other) # type: ignore
+
+ def __len__(self) -> int:
+ return len(self._full_items)
+
+ def __repr__(self) -> str:
+ return "" % repr(list(self))
+
+ def __getitem__(self, idx: int) -> Tuple[bytes, bytes]: # type: ignore[override]
+ _, name, value = self._full_items[idx]
+ return (name, value)
+
+ def raw_items(self) -> List[Tuple[bytes, bytes]]:
+ return [(raw_name, value) for raw_name, _, value in self._full_items]
+
+
+HeaderTypes = Union[
+ List[Tuple[bytes, bytes]],
+ List[Tuple[bytes, str]],
+ List[Tuple[str, bytes]],
+ List[Tuple[str, str]],
+]
+
+
+@overload
+def normalize_and_validate(headers: Headers, _parsed: Literal[True]) -> Headers:
+ ...
+
+
+@overload
+def normalize_and_validate(headers: HeaderTypes, _parsed: Literal[False]) -> Headers:
+ ...
+
+
+@overload
+def normalize_and_validate(
+ headers: Union[Headers, HeaderTypes], _parsed: bool = False
+) -> Headers:
+ ...
+
+
+def normalize_and_validate(
+ headers: Union[Headers, HeaderTypes], _parsed: bool = False
+) -> Headers:
+ new_headers = []
+ seen_content_length = None
+ saw_transfer_encoding = False
+ for name, value in headers:
+ # For headers coming out of the parser, we can safely skip some steps,
+ # because it always returns bytes and has already run these regexes
+ # over the data:
+ if not _parsed:
+ name = bytesify(name)
+ value = bytesify(value)
+ validate(_field_name_re, name, "Illegal header name {!r}", name)
+ validate(_field_value_re, value, "Illegal header value {!r}", value)
+ assert isinstance(name, bytes)
+ assert isinstance(value, bytes)
+
+ raw_name = name
+ name = name.lower()
+ if name == b"content-length":
+ lengths = {length.strip() for length in value.split(b",")}
+ if len(lengths) != 1:
+ raise LocalProtocolError("conflicting Content-Length headers")
+ value = lengths.pop()
+ validate(_content_length_re, value, "bad Content-Length")
+ if len(value) > CONTENT_LENGTH_MAX_DIGITS:
+ raise LocalProtocolError("bad Content-Length")
+ if seen_content_length is None:
+ seen_content_length = value
+ new_headers.append((raw_name, name, value))
+ elif seen_content_length != value:
+ raise LocalProtocolError("conflicting Content-Length headers")
+ elif name == b"transfer-encoding":
+ # "A server that receives a request message with a transfer coding
+ # it does not understand SHOULD respond with 501 (Not
+ # Implemented)."
+ # https://tools.ietf.org/html/rfc7230#section-3.3.1
+ if saw_transfer_encoding:
+ raise LocalProtocolError(
+ "multiple Transfer-Encoding headers", error_status_hint=501
+ )
+ # "All transfer-coding names are case-insensitive"
+ # -- https://tools.ietf.org/html/rfc7230#section-4
+ value = value.lower()
+ if value != b"chunked":
+ raise LocalProtocolError(
+ "Only Transfer-Encoding: chunked is supported",
+ error_status_hint=501,
+ )
+ saw_transfer_encoding = True
+ new_headers.append((raw_name, name, value))
+ else:
+ new_headers.append((raw_name, name, value))
+ return Headers(new_headers)
+
+
+def get_comma_header(headers: Headers, name: bytes) -> List[bytes]:
+ # Should only be used for headers whose value is a list of
+ # comma-separated, case-insensitive values.
+ #
+ # The header name `name` is expected to be lower-case bytes.
+ #
+ # Connection: meets these criteria (including cast insensitivity).
+ #
+ # Content-Length: technically is just a single value (1*DIGIT), but the
+ # standard makes reference to implementations that do multiple values, and
+ # using this doesn't hurt. Ditto, case insensitivity doesn't things either
+ # way.
+ #
+ # Transfer-Encoding: is more complex (allows for quoted strings), so
+ # splitting on , is actually wrong. For example, this is legal:
+ #
+ # Transfer-Encoding: foo; options="1,2", chunked
+ #
+ # and should be parsed as
+ #
+ # foo; options="1,2"
+ # chunked
+ #
+ # but this naive function will parse it as
+ #
+ # foo; options="1
+ # 2"
+ # chunked
+ #
+ # However, this is okay because the only thing we are going to do with
+ # any Transfer-Encoding is reject ones that aren't just "chunked", so
+ # both of these will be treated the same anyway.
+ #
+ # Expect: the only legal value is the literal string
+ # "100-continue". Splitting on commas is harmless. Case insensitive.
+ #
+ out: List[bytes] = []
+ for _, found_name, found_raw_value in headers._full_items:
+ if found_name == name:
+ found_raw_value = found_raw_value.lower()
+ for found_split_value in found_raw_value.split(b","):
+ found_split_value = found_split_value.strip()
+ if found_split_value:
+ out.append(found_split_value)
+ return out
+
+
+def set_comma_header(headers: Headers, name: bytes, new_values: List[bytes]) -> Headers:
+ # The header name `name` is expected to be lower-case bytes.
+ #
+ # Note that when we store the header we use title casing for the header
+ # names, in order to match the conventional HTTP header style.
+ #
+ # Simply calling `.title()` is a blunt approach, but it's correct
+ # here given the cases where we're using `set_comma_header`...
+ #
+ # Connection, Content-Length, Transfer-Encoding.
+ new_headers: List[Tuple[bytes, bytes]] = []
+ for found_raw_name, found_name, found_raw_value in headers._full_items:
+ if found_name != name:
+ new_headers.append((found_raw_name, found_raw_value))
+ for new_value in new_values:
+ new_headers.append((name.title(), new_value))
+ return normalize_and_validate(new_headers)
+
+
+def has_expect_100_continue(request: "Request") -> bool:
+ # https://tools.ietf.org/html/rfc7231#section-5.1.1
+ # "A server that receives a 100-continue expectation in an HTTP/1.0 request
+ # MUST ignore that expectation."
+ if request.http_version < b"1.1":
+ return False
+ expect = get_comma_header(request.headers, b"expect")
+ return b"100-continue" in expect
diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_readers.py b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_readers.py
new file mode 100644
index 0000000000000000000000000000000000000000..576804cc282032526e0a932c9853d586a094bad0
--- /dev/null
+++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_readers.py
@@ -0,0 +1,250 @@
+# Code to read HTTP data
+#
+# Strategy: each reader is a callable which takes a ReceiveBuffer object, and
+# either:
+# 1) consumes some of it and returns an Event
+# 2) raises a LocalProtocolError (for consistency -- e.g. we call validate()
+# and it might raise a LocalProtocolError, so simpler just to always use
+# this)
+# 3) returns None, meaning "I need more data"
+#
+# If they have a .read_eof attribute, then this will be called if an EOF is
+# received -- but this is optional. Either way, the actual ConnectionClosed
+# event will be generated afterwards.
+#
+# READERS is a dict describing how to pick a reader. It maps states to either:
+# - a reader
+# - or, for body readers, a dict of per-framing reader factories
+
+import re
+from typing import Any, Callable, Dict, Iterable, NoReturn, Optional, Tuple, Type, Union
+
+from ._abnf import chunk_header, header_field, request_line, status_line
+from ._events import Data, EndOfMessage, InformationalResponse, Request, Response
+from ._receivebuffer import ReceiveBuffer
+from ._state import (
+ CLIENT,
+ CLOSED,
+ DONE,
+ IDLE,
+ MUST_CLOSE,
+ SEND_BODY,
+ SEND_RESPONSE,
+ SERVER,
+)
+from ._util import LocalProtocolError, RemoteProtocolError, Sentinel, validate
+
+__all__ = ["READERS"]
+
+header_field_re = re.compile(header_field.encode("ascii"))
+obs_fold_re = re.compile(rb"[ \t]+")
+
+
+def _obsolete_line_fold(lines: Iterable[bytes]) -> Iterable[bytes]:
+ it = iter(lines)
+ last: Optional[bytes] = None
+ for line in it:
+ match = obs_fold_re.match(line)
+ if match:
+ if last is None:
+ raise LocalProtocolError("continuation line at start of headers")
+ if not isinstance(last, bytearray):
+ # Cast to a mutable type, avoiding copy on append to ensure O(n) time
+ last = bytearray(last)
+ last += b" "
+ last += line[match.end() :]
+ else:
+ if last is not None:
+ yield last
+ last = line
+ if last is not None:
+ yield last
+
+
+def _decode_header_lines(
+ lines: Iterable[bytes],
+) -> Iterable[Tuple[bytes, bytes]]:
+ for line in _obsolete_line_fold(lines):
+ matches = validate(header_field_re, line, "illegal header line: {!r}", line)
+ yield (matches["field_name"], matches["field_value"])
+
+
+request_line_re = re.compile(request_line.encode("ascii"))
+
+
+def maybe_read_from_IDLE_client(buf: ReceiveBuffer) -> Optional[Request]:
+ lines = buf.maybe_extract_lines()
+ if lines is None:
+ if buf.is_next_line_obviously_invalid_request_line():
+ raise LocalProtocolError("illegal request line")
+ return None
+ if not lines:
+ raise LocalProtocolError("no request line received")
+ matches = validate(
+ request_line_re, lines[0], "illegal request line: {!r}", lines[0]
+ )
+ return Request(
+ headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches
+ )
+
+
+status_line_re = re.compile(status_line.encode("ascii"))
+
+
+def maybe_read_from_SEND_RESPONSE_server(
+ buf: ReceiveBuffer,
+) -> Union[InformationalResponse, Response, None]:
+ lines = buf.maybe_extract_lines()
+ if lines is None:
+ if buf.is_next_line_obviously_invalid_request_line():
+ raise LocalProtocolError("illegal request line")
+ return None
+ if not lines:
+ raise LocalProtocolError("no response line received")
+ matches = validate(status_line_re, lines[0], "illegal status line: {!r}", lines[0])
+ http_version = (
+ b"1.1" if matches["http_version"] is None else matches["http_version"]
+ )
+ reason = b"" if matches["reason"] is None else matches["reason"]
+ status_code = int(matches["status_code"])
+ class_: Union[Type[InformationalResponse], Type[Response]] = (
+ InformationalResponse if status_code < 200 else Response
+ )
+ return class_(
+ headers=list(_decode_header_lines(lines[1:])),
+ _parsed=True,
+ status_code=status_code,
+ reason=reason,
+ http_version=http_version,
+ )
+
+
+class ContentLengthReader:
+ def __init__(self, length: int) -> None:
+ self._length = length
+ self._remaining = length
+
+ def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]:
+ if self._remaining == 0:
+ return EndOfMessage()
+ data = buf.maybe_extract_at_most(self._remaining)
+ if data is None:
+ return None
+ self._remaining -= len(data)
+ return Data(data=data)
+
+ def read_eof(self) -> NoReturn:
+ raise RemoteProtocolError(
+ "peer closed connection without sending complete message body "
+ "(received {} bytes, expected {})".format(
+ self._length - self._remaining, self._length
+ )
+ )
+
+
+chunk_header_re = re.compile(chunk_header.encode("ascii"))
+
+
+class ChunkedReader:
+ def __init__(self) -> None:
+ self._bytes_in_chunk = 0
+ # After reading a chunk, we have to throw away the trailing \r\n.
+ # This tracks the bytes that we need to match and throw away.
+ self._bytes_to_discard = b""
+ self._reading_trailer = False
+
+ def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]:
+ if self._reading_trailer:
+ lines = buf.maybe_extract_lines()
+ if lines is None:
+ return None
+ return EndOfMessage(headers=list(_decode_header_lines(lines)))
+ if self._bytes_to_discard:
+ data = buf.maybe_extract_at_most(len(self._bytes_to_discard))
+ if data is None:
+ return None
+ if data != self._bytes_to_discard[: len(data)]:
+ raise LocalProtocolError(
+ f"malformed chunk footer: {data!r} (expected {self._bytes_to_discard!r})"
+ )
+ self._bytes_to_discard = self._bytes_to_discard[len(data) :]
+ if self._bytes_to_discard:
+ return None
+ # else, fall through and read some more
+ assert self._bytes_to_discard == b""
+ if self._bytes_in_chunk == 0:
+ # We need to refill our chunk count
+ chunk_header = buf.maybe_extract_next_line()
+ if chunk_header is None:
+ return None
+ matches = validate(
+ chunk_header_re,
+ chunk_header,
+ "illegal chunk header: {!r}",
+ chunk_header,
+ )
+ # XX FIXME: we discard chunk extensions. Does anyone care?
+ self._bytes_in_chunk = int(matches["chunk_size"], base=16)
+ if self._bytes_in_chunk == 0:
+ self._reading_trailer = True
+ return self(buf)
+ chunk_start = True
+ else:
+ chunk_start = False
+ assert self._bytes_in_chunk > 0
+ data = buf.maybe_extract_at_most(self._bytes_in_chunk)
+ if data is None:
+ return None
+ self._bytes_in_chunk -= len(data)
+ if self._bytes_in_chunk == 0:
+ self._bytes_to_discard = b"\r\n"
+ chunk_end = True
+ else:
+ chunk_end = False
+ return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end)
+
+ def read_eof(self) -> NoReturn:
+ raise RemoteProtocolError(
+ "peer closed connection without sending complete message body "
+ "(incomplete chunked read)"
+ )
+
+
+class Http10Reader:
+ def __call__(self, buf: ReceiveBuffer) -> Optional[Data]:
+ data = buf.maybe_extract_at_most(999999999)
+ if data is None:
+ return None
+ return Data(data=data)
+
+ def read_eof(self) -> EndOfMessage:
+ return EndOfMessage()
+
+
+def expect_nothing(buf: ReceiveBuffer) -> None:
+ if buf:
+ raise LocalProtocolError("Got data when expecting EOF")
+ return None
+
+
+ReadersType = Dict[
+ Union[Type[Sentinel], Tuple[Type[Sentinel], Type[Sentinel]]],
+ Union[Callable[..., Any], Dict[str, Callable[..., Any]]],
+]
+
+READERS: ReadersType = {
+ (CLIENT, IDLE): maybe_read_from_IDLE_client,
+ (SERVER, IDLE): maybe_read_from_SEND_RESPONSE_server,
+ (SERVER, SEND_RESPONSE): maybe_read_from_SEND_RESPONSE_server,
+ (CLIENT, DONE): expect_nothing,
+ (CLIENT, MUST_CLOSE): expect_nothing,
+ (CLIENT, CLOSED): expect_nothing,
+ (SERVER, DONE): expect_nothing,
+ (SERVER, MUST_CLOSE): expect_nothing,
+ (SERVER, CLOSED): expect_nothing,
+ SEND_BODY: {
+ "chunked": ChunkedReader,
+ "content-length": ContentLengthReader,
+ "http/1.0": Http10Reader,
+ },
+}
diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_receivebuffer.py b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_receivebuffer.py
new file mode 100644
index 0000000000000000000000000000000000000000..e5c4e08a56f5081e87103f38b4add6ce1b730204
--- /dev/null
+++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_receivebuffer.py
@@ -0,0 +1,153 @@
+import re
+import sys
+from typing import List, Optional, Union
+
+__all__ = ["ReceiveBuffer"]
+
+
+# Operations we want to support:
+# - find next \r\n or \r\n\r\n (\n or \n\n are also acceptable),
+# or wait until there is one
+# - read at-most-N bytes
+# Goals:
+# - on average, do this fast
+# - worst case, do this in O(n) where n is the number of bytes processed
+# Plan:
+# - store bytearray, offset, how far we've searched for a separator token
+# - use the how-far-we've-searched data to avoid rescanning
+# - while doing a stream of uninterrupted processing, advance offset instead
+# of constantly copying
+# WARNING:
+# - I haven't benchmarked or profiled any of this yet.
+#
+# Note that starting in Python 3.4, deleting the initial n bytes from a
+# bytearray is amortized O(n), thanks to some excellent work by Antoine
+# Martin:
+#
+# https://bugs.python.org/issue19087
+#
+# This means that if we only supported 3.4+, we could get rid of the code here
+# involving self._start and self.compress, because it's doing exactly the same
+# thing that bytearray now does internally.
+#
+# BUT unfortunately, we still support 2.7, and reading short segments out of a
+# long buffer MUST be O(bytes read) to avoid DoS issues, so we can't actually
+# delete this code. Yet:
+#
+# https://pythonclock.org/
+#
+# (Two things to double-check first though: make sure PyPy also has the
+# optimization, and benchmark to make sure it's a win, since we do have a
+# slightly clever thing where we delay calling compress() until we've
+# processed a whole event, which could in theory be slightly more efficient
+# than the internal bytearray support.)
+blank_line_regex = re.compile(b"\n\r?\n", re.MULTILINE)
+
+
+class ReceiveBuffer:
+ def __init__(self) -> None:
+ self._data = bytearray()
+ self._next_line_search = 0
+ self._multiple_lines_search = 0
+
+ def __iadd__(self, byteslike: Union[bytes, bytearray]) -> "ReceiveBuffer":
+ self._data += byteslike
+ return self
+
+ def __bool__(self) -> bool:
+ return bool(len(self))
+
+ def __len__(self) -> int:
+ return len(self._data)
+
+ # for @property unprocessed_data
+ def __bytes__(self) -> bytes:
+ return bytes(self._data)
+
+ def _extract(self, count: int) -> bytearray:
+ # extracting an initial slice of the data buffer and return it
+ out = self._data[:count]
+ del self._data[:count]
+
+ self._next_line_search = 0
+ self._multiple_lines_search = 0
+
+ return out
+
+ def maybe_extract_at_most(self, count: int) -> Optional[bytearray]:
+ """
+ Extract a fixed number of bytes from the buffer.
+ """
+ out = self._data[:count]
+ if not out:
+ return None
+
+ return self._extract(count)
+
+ def maybe_extract_next_line(self) -> Optional[bytearray]:
+ """
+ Extract the first line, if it is completed in the buffer.
+ """
+ # Only search in buffer space that we've not already looked at.
+ search_start_index = max(0, self._next_line_search - 1)
+ partial_idx = self._data.find(b"\r\n", search_start_index)
+
+ if partial_idx == -1:
+ self._next_line_search = len(self._data)
+ return None
+
+ # + 2 is to compensate len(b"\r\n")
+ idx = partial_idx + 2
+
+ return self._extract(idx)
+
+ def maybe_extract_lines(self) -> Optional[List[bytearray]]:
+ """
+ Extract everything up to the first blank line, and return a list of lines.
+ """
+ # Handle the case where we have an immediate empty line.
+ if self._data[:1] == b"\n":
+ self._extract(1)
+ return []
+
+ if self._data[:2] == b"\r\n":
+ self._extract(2)
+ return []
+
+ # Only search in buffer space that we've not already looked at.
+ match = blank_line_regex.search(self._data, self._multiple_lines_search)
+ if match is None:
+ self._multiple_lines_search = max(0, len(self._data) - 2)
+ return None
+
+ # Truncate the buffer and return it.
+ idx = match.span(0)[-1]
+ out = self._extract(idx)
+ lines = out.split(b"\n")
+
+ for line in lines:
+ if line.endswith(b"\r"):
+ del line[-1]
+
+ assert lines[-2] == lines[-1] == b""
+
+ del lines[-2:]
+
+ return lines
+
+ # In theory we should wait until `\r\n` before starting to validate
+ # incoming data. However it's interesting to detect (very) invalid data
+ # early given they might not even contain `\r\n` at all (hence only
+ # timeout will get rid of them).
+ # This is not a 100% effective detection but more of a cheap sanity check
+ # allowing for early abort in some useful cases.
+ # This is especially interesting when peer is messing up with HTTPS and
+ # sent us a TLS stream where we were expecting plain HTTP given all
+ # versions of TLS so far start handshake with a 0x16 message type code.
+ def is_next_line_obviously_invalid_request_line(self) -> bool:
+ try:
+ # HTTP header line must not contain non-printable characters
+ # and should not start with a space
+ return self._data[0] < 0x21
+ except IndexError:
+ return False
diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_state.py b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_state.py
new file mode 100644
index 0000000000000000000000000000000000000000..3ad444b043e3f3d6c05c2d9d84d5119312bfaa34
--- /dev/null
+++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_state.py
@@ -0,0 +1,365 @@
+################################################################
+# The core state machine
+################################################################
+#
+# Rule 1: everything that affects the state machine and state transitions must
+# live here in this file. As much as possible goes into the table-based
+# representation, but for the bits that don't quite fit, the actual code and
+# state must nonetheless live here.
+#
+# Rule 2: this file does not know about what role we're playing; it only knows
+# about HTTP request/response cycles in the abstract. This ensures that we
+# don't cheat and apply different rules to local and remote parties.
+#
+#
+# Theory of operation
+# ===================
+#
+# Possibly the simplest way to think about this is that we actually have 5
+# different state machines here. Yes, 5. These are:
+#
+# 1) The client state, with its complicated automaton (see the docs)
+# 2) The server state, with its complicated automaton (see the docs)
+# 3) The keep-alive state, with possible states {True, False}
+# 4) The SWITCH_CONNECT state, with possible states {False, True}
+# 5) The SWITCH_UPGRADE state, with possible states {False, True}
+#
+# For (3)-(5), the first state listed is the initial state.
+#
+# (1)-(3) are stored explicitly in member variables. The last
+# two are stored implicitly in the pending_switch_proposals set as:
+# (state of 4) == (_SWITCH_CONNECT in pending_switch_proposals)
+# (state of 5) == (_SWITCH_UPGRADE in pending_switch_proposals)
+#
+# And each of these machines has two different kinds of transitions:
+#
+# a) Event-triggered
+# b) State-triggered
+#
+# Event triggered is the obvious thing that you'd think it is: some event
+# happens, and if it's the right event at the right time then a transition
+# happens. But there are somewhat complicated rules for which machines can
+# "see" which events. (As a rule of thumb, if a machine "sees" an event, this
+# means two things: the event can affect the machine, and if the machine is
+# not in a state where it expects that event then it's an error.) These rules
+# are:
+#
+# 1) The client machine sees all h11.events objects emitted by the client.
+#
+# 2) The server machine sees all h11.events objects emitted by the server.
+#
+# It also sees the client's Request event.
+#
+# And sometimes, server events are annotated with a _SWITCH_* event. For
+# example, we can have a (Response, _SWITCH_CONNECT) event, which is
+# different from a regular Response event.
+#
+# 3) The keep-alive machine sees the process_keep_alive_disabled() event
+# (which is derived from Request/Response events), and this event
+# transitions it from True -> False, or from False -> False. There's no way
+# to transition back.
+#
+# 4&5) The _SWITCH_* machines transition from False->True when we get a
+# Request that proposes the relevant type of switch (via
+# process_client_switch_proposals), and they go from True->False when we
+# get a Response that has no _SWITCH_* annotation.
+#
+# So that's event-triggered transitions.
+#
+# State-triggered transitions are less standard. What they do here is couple
+# the machines together. The way this works is, when certain *joint*
+# configurations of states are achieved, then we automatically transition to a
+# new *joint* state. So, for example, if we're ever in a joint state with
+#
+# client: DONE
+# keep-alive: False
+#
+# then the client state immediately transitions to:
+#
+# client: MUST_CLOSE
+#
+# This is fundamentally different from an event-based transition, because it
+# doesn't matter how we arrived at the {client: DONE, keep-alive: False} state
+# -- maybe the client transitioned SEND_BODY -> DONE, or keep-alive
+# transitioned True -> False. Either way, once this precondition is satisfied,
+# this transition is immediately triggered.
+#
+# What if two conflicting state-based transitions get enabled at the same
+# time? In practice there's only one case where this arises (client DONE ->
+# MIGHT_SWITCH_PROTOCOL versus DONE -> MUST_CLOSE), and we resolve it by
+# explicitly prioritizing the DONE -> MIGHT_SWITCH_PROTOCOL transition.
+#
+# Implementation
+# --------------
+#
+# The event-triggered transitions for the server and client machines are all
+# stored explicitly in a table. Ditto for the state-triggered transitions that
+# involve just the server and client state.
+#
+# The transitions for the other machines, and the state-triggered transitions
+# that involve the other machines, are written out as explicit Python code.
+#
+# It'd be nice if there were some cleaner way to do all this. This isn't
+# *too* terrible, but I feel like it could probably be better.
+#
+# WARNING
+# -------
+#
+# The script that generates the state machine diagrams for the docs knows how
+# to read out the EVENT_TRIGGERED_TRANSITIONS and STATE_TRIGGERED_TRANSITIONS
+# tables. But it can't automatically read the transitions that are written
+# directly in Python code. So if you touch those, you need to also update the
+# script to keep it in sync!
+from typing import cast, Dict, Optional, Set, Tuple, Type, Union
+
+from ._events import *
+from ._util import LocalProtocolError, Sentinel
+
+# Everything in __all__ gets re-exported as part of the h11 public API.
+__all__ = [
+ "CLIENT",
+ "SERVER",
+ "IDLE",
+ "SEND_RESPONSE",
+ "SEND_BODY",
+ "DONE",
+ "MUST_CLOSE",
+ "CLOSED",
+ "MIGHT_SWITCH_PROTOCOL",
+ "SWITCHED_PROTOCOL",
+ "ERROR",
+]
+
+
+class CLIENT(Sentinel, metaclass=Sentinel):
+ pass
+
+
+class SERVER(Sentinel, metaclass=Sentinel):
+ pass
+
+
+# States
+class IDLE(Sentinel, metaclass=Sentinel):
+ pass
+
+
+class SEND_RESPONSE(Sentinel, metaclass=Sentinel):
+ pass
+
+
+class SEND_BODY(Sentinel, metaclass=Sentinel):
+ pass
+
+
+class DONE(Sentinel, metaclass=Sentinel):
+ pass
+
+
+class MUST_CLOSE(Sentinel, metaclass=Sentinel):
+ pass
+
+
+class CLOSED(Sentinel, metaclass=Sentinel):
+ pass
+
+
+class ERROR(Sentinel, metaclass=Sentinel):
+ pass
+
+
+# Switch types
+class MIGHT_SWITCH_PROTOCOL(Sentinel, metaclass=Sentinel):
+ pass
+
+
+class SWITCHED_PROTOCOL(Sentinel, metaclass=Sentinel):
+ pass
+
+
+class _SWITCH_UPGRADE(Sentinel, metaclass=Sentinel):
+ pass
+
+
+class _SWITCH_CONNECT(Sentinel, metaclass=Sentinel):
+ pass
+
+
+EventTransitionType = Dict[
+ Type[Sentinel],
+ Dict[
+ Type[Sentinel],
+ Dict[Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], Type[Sentinel]],
+ ],
+]
+
+EVENT_TRIGGERED_TRANSITIONS: EventTransitionType = {
+ CLIENT: {
+ IDLE: {Request: SEND_BODY, ConnectionClosed: CLOSED},
+ SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE},
+ DONE: {ConnectionClosed: CLOSED},
+ MUST_CLOSE: {ConnectionClosed: CLOSED},
+ CLOSED: {ConnectionClosed: CLOSED},
+ MIGHT_SWITCH_PROTOCOL: {},
+ SWITCHED_PROTOCOL: {},
+ ERROR: {},
+ },
+ SERVER: {
+ IDLE: {
+ ConnectionClosed: CLOSED,
+ Response: SEND_BODY,
+ # Special case: server sees client Request events, in this form
+ (Request, CLIENT): SEND_RESPONSE,
+ },
+ SEND_RESPONSE: {
+ InformationalResponse: SEND_RESPONSE,
+ Response: SEND_BODY,
+ (InformationalResponse, _SWITCH_UPGRADE): SWITCHED_PROTOCOL,
+ (Response, _SWITCH_CONNECT): SWITCHED_PROTOCOL,
+ },
+ SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE},
+ DONE: {ConnectionClosed: CLOSED},
+ MUST_CLOSE: {ConnectionClosed: CLOSED},
+ CLOSED: {ConnectionClosed: CLOSED},
+ SWITCHED_PROTOCOL: {},
+ ERROR: {},
+ },
+}
+
+StateTransitionType = Dict[
+ Tuple[Type[Sentinel], Type[Sentinel]], Dict[Type[Sentinel], Type[Sentinel]]
+]
+
+# NB: there are also some special-case state-triggered transitions hard-coded
+# into _fire_state_triggered_transitions below.
+STATE_TRIGGERED_TRANSITIONS: StateTransitionType = {
+ # (Client state, Server state) -> new states
+ # Protocol negotiation
+ (MIGHT_SWITCH_PROTOCOL, SWITCHED_PROTOCOL): {CLIENT: SWITCHED_PROTOCOL},
+ # Socket shutdown
+ (CLOSED, DONE): {SERVER: MUST_CLOSE},
+ (CLOSED, IDLE): {SERVER: MUST_CLOSE},
+ (ERROR, DONE): {SERVER: MUST_CLOSE},
+ (DONE, CLOSED): {CLIENT: MUST_CLOSE},
+ (IDLE, CLOSED): {CLIENT: MUST_CLOSE},
+ (DONE, ERROR): {CLIENT: MUST_CLOSE},
+}
+
+
+class ConnectionState:
+ def __init__(self) -> None:
+ # Extra bits of state that don't quite fit into the state model.
+
+ # If this is False then it enables the automatic DONE -> MUST_CLOSE
+ # transition. Don't set this directly; call .keep_alive_disabled()
+ self.keep_alive = True
+
+ # This is a subset of {UPGRADE, CONNECT}, containing the proposals
+ # made by the client for switching protocols.
+ self.pending_switch_proposals: Set[Type[Sentinel]] = set()
+
+ self.states: Dict[Type[Sentinel], Type[Sentinel]] = {CLIENT: IDLE, SERVER: IDLE}
+
+ def process_error(self, role: Type[Sentinel]) -> None:
+ self.states[role] = ERROR
+ self._fire_state_triggered_transitions()
+
+ def process_keep_alive_disabled(self) -> None:
+ self.keep_alive = False
+ self._fire_state_triggered_transitions()
+
+ def process_client_switch_proposal(self, switch_event: Type[Sentinel]) -> None:
+ self.pending_switch_proposals.add(switch_event)
+ self._fire_state_triggered_transitions()
+
+ def process_event(
+ self,
+ role: Type[Sentinel],
+ event_type: Type[Event],
+ server_switch_event: Optional[Type[Sentinel]] = None,
+ ) -> None:
+ _event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]] = event_type
+ if server_switch_event is not None:
+ assert role is SERVER
+ if server_switch_event not in self.pending_switch_proposals:
+ raise LocalProtocolError(
+ "Received server _SWITCH_UPGRADE event without a pending proposal"
+ )
+ _event_type = (event_type, server_switch_event)
+ if server_switch_event is None and _event_type is Response:
+ self.pending_switch_proposals = set()
+ self._fire_event_triggered_transitions(role, _event_type)
+ # Special case: the server state does get to see Request
+ # events.
+ if _event_type is Request:
+ assert role is CLIENT
+ self._fire_event_triggered_transitions(SERVER, (Request, CLIENT))
+ self._fire_state_triggered_transitions()
+
+ def _fire_event_triggered_transitions(
+ self,
+ role: Type[Sentinel],
+ event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]],
+ ) -> None:
+ state = self.states[role]
+ try:
+ new_state = EVENT_TRIGGERED_TRANSITIONS[role][state][event_type]
+ except KeyError:
+ event_type = cast(Type[Event], event_type)
+ raise LocalProtocolError(
+ "can't handle event type {} when role={} and state={}".format(
+ event_type.__name__, role, self.states[role]
+ )
+ ) from None
+ self.states[role] = new_state
+
+ def _fire_state_triggered_transitions(self) -> None:
+ # We apply these rules repeatedly until converging on a fixed point
+ while True:
+ start_states = dict(self.states)
+
+ # It could happen that both these special-case transitions are
+ # enabled at the same time:
+ #
+ # DONE -> MIGHT_SWITCH_PROTOCOL
+ # DONE -> MUST_CLOSE
+ #
+ # For example, this will always be true of a HTTP/1.0 client
+ # requesting CONNECT. If this happens, the protocol switch takes
+ # priority. From there the client will either go to
+ # SWITCHED_PROTOCOL, in which case it's none of our business when
+ # they close the connection, or else the server will deny the
+ # request, in which case the client will go back to DONE and then
+ # from there to MUST_CLOSE.
+ if self.pending_switch_proposals:
+ if self.states[CLIENT] is DONE:
+ self.states[CLIENT] = MIGHT_SWITCH_PROTOCOL
+
+ if not self.pending_switch_proposals:
+ if self.states[CLIENT] is MIGHT_SWITCH_PROTOCOL:
+ self.states[CLIENT] = DONE
+
+ if not self.keep_alive:
+ for role in (CLIENT, SERVER):
+ if self.states[role] is DONE:
+ self.states[role] = MUST_CLOSE
+
+ # Tabular state-triggered transitions
+ joint_state = (self.states[CLIENT], self.states[SERVER])
+ changes = STATE_TRIGGERED_TRANSITIONS.get(joint_state, {})
+ self.states.update(changes)
+
+ if self.states == start_states:
+ # Fixed point reached
+ return
+
+ def start_next_cycle(self) -> None:
+ if self.states != {CLIENT: DONE, SERVER: DONE}:
+ raise LocalProtocolError(
+ f"not in a reusable state. self.states={self.states}"
+ )
+ # Can't reach DONE/DONE with any of these active, but still, let's be
+ # sure.
+ assert self.keep_alive
+ assert not self.pending_switch_proposals
+ self.states = {CLIENT: IDLE, SERVER: IDLE}
diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_util.py b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_util.py
new file mode 100644
index 0000000000000000000000000000000000000000..6718445290770e028ea2f1f662026c9a0b0991db
--- /dev/null
+++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_util.py
@@ -0,0 +1,135 @@
+from typing import Any, Dict, NoReturn, Pattern, Tuple, Type, TypeVar, Union
+
+__all__ = [
+ "ProtocolError",
+ "LocalProtocolError",
+ "RemoteProtocolError",
+ "validate",
+ "bytesify",
+]
+
+
+class ProtocolError(Exception):
+ """Exception indicating a violation of the HTTP/1.1 protocol.
+
+ This as an abstract base class, with two concrete base classes:
+ :exc:`LocalProtocolError`, which indicates that you tried to do something
+ that HTTP/1.1 says is illegal, and :exc:`RemoteProtocolError`, which
+ indicates that the remote peer tried to do something that HTTP/1.1 says is
+ illegal. See :ref:`error-handling` for details.
+
+ In addition to the normal :exc:`Exception` features, it has one attribute:
+
+ .. attribute:: error_status_hint
+
+ This gives a suggestion as to what status code a server might use if
+ this error occurred as part of a request.
+
+ For a :exc:`RemoteProtocolError`, this is useful as a suggestion for
+ how you might want to respond to a misbehaving peer, if you're
+ implementing a server.
+
+ For a :exc:`LocalProtocolError`, this can be taken as a suggestion for
+ how your peer might have responded to *you* if h11 had allowed you to
+ continue.
+
+ The default is 400 Bad Request, a generic catch-all for protocol
+ violations.
+
+ """
+
+ def __init__(self, msg: str, error_status_hint: int = 400) -> None:
+ if type(self) is ProtocolError:
+ raise TypeError("tried to directly instantiate ProtocolError")
+ Exception.__init__(self, msg)
+ self.error_status_hint = error_status_hint
+
+
+# Strategy: there are a number of public APIs where a LocalProtocolError can
+# be raised (send(), all the different event constructors, ...), and only one
+# public API where RemoteProtocolError can be raised
+# (receive_data()). Therefore we always raise LocalProtocolError internally,
+# and then receive_data will translate this into a RemoteProtocolError.
+#
+# Internally:
+# LocalProtocolError is the generic "ProtocolError".
+# Externally:
+# LocalProtocolError is for local errors and RemoteProtocolError is for
+# remote errors.
+class LocalProtocolError(ProtocolError):
+ def _reraise_as_remote_protocol_error(self) -> NoReturn:
+ # After catching a LocalProtocolError, use this method to re-raise it
+ # as a RemoteProtocolError. This method must be called from inside an
+ # except: block.
+ #
+ # An easy way to get an equivalent RemoteProtocolError is just to
+ # modify 'self' in place.
+ self.__class__ = RemoteProtocolError # type: ignore
+ # But the re-raising is somewhat non-trivial -- you might think that
+ # now that we've modified the in-flight exception object, that just
+ # doing 'raise' to re-raise it would be enough. But it turns out that
+ # this doesn't work, because Python tracks the exception type
+ # (exc_info[0]) separately from the exception object (exc_info[1]),
+ # and we only modified the latter. So we really do need to re-raise
+ # the new type explicitly.
+ # On py3, the traceback is part of the exception object, so our
+ # in-place modification preserved it and we can just re-raise:
+ raise self
+
+
+class RemoteProtocolError(ProtocolError):
+ pass
+
+
+def validate(
+ regex: Pattern[bytes], data: bytes, msg: str = "malformed data", *format_args: Any
+) -> Dict[str, bytes]:
+ match = regex.fullmatch(data)
+ if not match:
+ if format_args:
+ msg = msg.format(*format_args)
+ raise LocalProtocolError(msg)
+ return match.groupdict()
+
+
+# Sentinel values
+#
+# - Inherit identity-based comparison and hashing from object
+# - Have a nice repr
+# - Have a *bonus property*: type(sentinel) is sentinel
+#
+# The bonus property is useful if you want to take the return value from
+# next_event() and do some sort of dispatch based on type(event).
+
+_T_Sentinel = TypeVar("_T_Sentinel", bound="Sentinel")
+
+
+class Sentinel(type):
+ def __new__(
+ cls: Type[_T_Sentinel],
+ name: str,
+ bases: Tuple[type, ...],
+ namespace: Dict[str, Any],
+ **kwds: Any
+ ) -> _T_Sentinel:
+ assert bases == (Sentinel,)
+ v = super().__new__(cls, name, bases, namespace, **kwds)
+ v.__class__ = v # type: ignore
+ return v
+
+ def __repr__(self) -> str:
+ return self.__name__
+
+
+# Used for methods, request targets, HTTP versions, header names, and header
+# values. Accepts ascii-strings, or bytes/bytearray/memoryview/..., and always
+# returns bytes.
+def bytesify(s: Union[bytes, bytearray, memoryview, int, str]) -> bytes:
+ # Fast-path:
+ if type(s) is bytes:
+ return s
+ if isinstance(s, str):
+ s = s.encode("ascii")
+ if isinstance(s, int):
+ raise TypeError("expected bytes-like object, not int")
+ return bytes(s)
diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_version.py b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_version.py
new file mode 100644
index 0000000000000000000000000000000000000000..76e7327b8617c9d12236f511414d5eb58e98a44b
--- /dev/null
+++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_version.py
@@ -0,0 +1,16 @@
+# This file must be kept very simple, because it is consumed from several
+# places -- it is imported by h11/__init__.py, execfile'd by setup.py, etc.
+
+# We use a simple scheme:
+# 1.0.0 -> 1.0.0+dev -> 1.1.0 -> 1.1.0+dev
+# where the +dev versions are never released into the wild, they're just what
+# we stick into the VCS in between releases.
+#
+# This is compatible with PEP 440:
+# http://legacy.python.org/dev/peps/pep-0440/
+# via the use of the "local suffix" "+dev", which is disallowed on index
+# servers and causes 1.0.0+dev to sort after plain 1.0.0, which is what we
+# want. (Contrast with the special suffix 1.0.0.dev, which sorts *before*
+# 1.0.0.)
+
+__version__ = "0.16.0"
diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_writers.py b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_writers.py
new file mode 100644
index 0000000000000000000000000000000000000000..939cdb912a9debaea07fbf3a9ac04549c44d077c
--- /dev/null
+++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_writers.py
@@ -0,0 +1,145 @@
+# Code to read HTTP data
+#
+# Strategy: each writer takes an event + a write-some-bytes function, which is
+# calls.
+#
+# WRITERS is a dict describing how to pick a reader. It maps states to either:
+# - a writer
+# - or, for body writers, a dict of framin-dependent writer factories
+
+from typing import Any, Callable, Dict, List, Tuple, Type, Union
+
+from ._events import Data, EndOfMessage, Event, InformationalResponse, Request, Response
+from ._headers import Headers
+from ._state import CLIENT, IDLE, SEND_BODY, SEND_RESPONSE, SERVER
+from ._util import LocalProtocolError, Sentinel
+
+__all__ = ["WRITERS"]
+
+Writer = Callable[[bytes], Any]
+
+
+def write_headers(headers: Headers, write: Writer) -> None:
+ # "Since the Host field-value is critical information for handling a
+ # request, a user agent SHOULD generate Host as the first header field
+ # following the request-line." - RFC 7230
+ raw_items = headers._full_items
+ for raw_name, name, value in raw_items:
+ if name == b"host":
+ write(b"%s: %s\r\n" % (raw_name, value))
+ for raw_name, name, value in raw_items:
+ if name != b"host":
+ write(b"%s: %s\r\n" % (raw_name, value))
+ write(b"\r\n")
+
+
+def write_request(request: Request, write: Writer) -> None:
+ if request.http_version != b"1.1":
+ raise LocalProtocolError("I only send HTTP/1.1")
+ write(b"%s %s HTTP/1.1\r\n" % (request.method, request.target))
+ write_headers(request.headers, write)
+
+
+# Shared between InformationalResponse and Response
+def write_any_response(
+ response: Union[InformationalResponse, Response], write: Writer
+) -> None:
+ if response.http_version != b"1.1":
+ raise LocalProtocolError("I only send HTTP/1.1")
+ status_bytes = str(response.status_code).encode("ascii")
+ # We don't bother sending ascii status messages like "OK"; they're
+ # optional and ignored by the protocol. (But the space after the numeric
+ # status code is mandatory.)
+ #
+ # XX FIXME: could at least make an effort to pull out the status message
+ # from stdlib's http.HTTPStatus table. Or maybe just steal their enums
+ # (either by import or copy/paste). We already accept them as status codes
+ # since they're of type IntEnum < int.
+ write(b"HTTP/1.1 %s %s\r\n" % (status_bytes, response.reason))
+ write_headers(response.headers, write)
+
+
+class BodyWriter:
+ def __call__(self, event: Event, write: Writer) -> None:
+ if type(event) is Data:
+ self.send_data(event.data, write)
+ elif type(event) is EndOfMessage:
+ self.send_eom(event.headers, write)
+ else: # pragma: no cover
+ assert False
+
+ def send_data(self, data: bytes, write: Writer) -> None:
+ pass
+
+ def send_eom(self, headers: Headers, write: Writer) -> None:
+ pass
+
+
+#
+# These are all careful not to do anything to 'data' except call len(data) and
+# write(data). This allows us to transparently pass-through funny objects,
+# like placeholder objects referring to files on disk that will be sent via
+# sendfile(2).
+#
+class ContentLengthWriter(BodyWriter):
+ def __init__(self, length: int) -> None:
+ self._length = length
+
+ def send_data(self, data: bytes, write: Writer) -> None:
+ self._length -= len(data)
+ if self._length < 0:
+ raise LocalProtocolError("Too much data for declared Content-Length")
+ write(data)
+
+ def send_eom(self, headers: Headers, write: Writer) -> None:
+ if self._length != 0:
+ raise LocalProtocolError("Too little data for declared Content-Length")
+ if headers:
+ raise LocalProtocolError("Content-Length and trailers don't mix")
+
+
+class ChunkedWriter(BodyWriter):
+ def send_data(self, data: bytes, write: Writer) -> None:
+ # if we encoded 0-length data in the naive way, it would look like an
+ # end-of-message.
+ if not data:
+ return
+ write(b"%x\r\n" % len(data))
+ write(data)
+ write(b"\r\n")
+
+ def send_eom(self, headers: Headers, write: Writer) -> None:
+ write(b"0\r\n")
+ write_headers(headers, write)
+
+
+class Http10Writer(BodyWriter):
+ def send_data(self, data: bytes, write: Writer) -> None:
+ write(data)
+
+ def send_eom(self, headers: Headers, write: Writer) -> None:
+ if headers:
+ raise LocalProtocolError("can't send trailers to HTTP/1.0 client")
+ # no need to close the socket ourselves, that will be taken care of by
+ # Connection: close machinery
+
+
+WritersType = Dict[
+ Union[Tuple[Type[Sentinel], Type[Sentinel]], Type[Sentinel]],
+ Union[
+ Dict[str, Type[BodyWriter]],
+ Callable[[Union[InformationalResponse, Response], Writer], None],
+ Callable[[Request, Writer], None],
+ ],
+]
+
+WRITERS: WritersType = {
+ (CLIENT, IDLE): write_request,
+ (SERVER, IDLE): write_any_response,
+ (SERVER, SEND_RESPONSE): write_any_response,
+ SEND_BODY: {
+ "chunked": ChunkedWriter,
+ "content-length": ContentLengthWriter,
+ "http/1.0": Http10Writer,
+ },
+}
diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/py.typed b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/py.typed
new file mode 100644
index 0000000000000000000000000000000000000000..f5642f79f21d872f010979dcf6f0c4a415acc19d
--- /dev/null
+++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/py.typed
@@ -0,0 +1 @@
+Marker
diff --git a/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna-3.11.dist-info/licenses/LICENSE.md b/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna-3.11.dist-info/licenses/LICENSE.md
new file mode 100644
index 0000000000000000000000000000000000000000..256ba90cd91190a6c980bd44663dc51c201c14d3
--- /dev/null
+++ b/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna-3.11.dist-info/licenses/LICENSE.md
@@ -0,0 +1,31 @@
+BSD 3-Clause License
+
+Copyright (c) 2013-2025, Kim Davies and contributors.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna/__init__.py b/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..cfdc030a751b089fc7e38fc88093b791605d501d
--- /dev/null
+++ b/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna/__init__.py
@@ -0,0 +1,45 @@
+from .core import (
+ IDNABidiError,
+ IDNAError,
+ InvalidCodepoint,
+ InvalidCodepointContext,
+ alabel,
+ check_bidi,
+ check_hyphen_ok,
+ check_initial_combiner,
+ check_label,
+ check_nfc,
+ decode,
+ encode,
+ ulabel,
+ uts46_remap,
+ valid_contextj,
+ valid_contexto,
+ valid_label_length,
+ valid_string_length,
+)
+from .intranges import intranges_contain
+from .package_data import __version__
+
+__all__ = [
+ "__version__",
+ "IDNABidiError",
+ "IDNAError",
+ "InvalidCodepoint",
+ "InvalidCodepointContext",
+ "alabel",
+ "check_bidi",
+ "check_hyphen_ok",
+ "check_initial_combiner",
+ "check_label",
+ "check_nfc",
+ "decode",
+ "encode",
+ "intranges_contain",
+ "ulabel",
+ "uts46_remap",
+ "valid_contextj",
+ "valid_contexto",
+ "valid_label_length",
+ "valid_string_length",
+]
diff --git a/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna/codec.py b/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna/codec.py
new file mode 100644
index 0000000000000000000000000000000000000000..cbc2e4ff4ec3e2318d47615bab44ea0ca3dba978
--- /dev/null
+++ b/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna/codec.py
@@ -0,0 +1,122 @@
+import codecs
+import re
+from typing import Any, Optional, Tuple
+
+from .core import IDNAError, alabel, decode, encode, ulabel
+
+_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]")
+
+
+class Codec(codecs.Codec):
+ def encode(self, data: str, errors: str = "strict") -> Tuple[bytes, int]:
+ if errors != "strict":
+ raise IDNAError('Unsupported error handling "{}"'.format(errors))
+
+ if not data:
+ return b"", 0
+
+ return encode(data), len(data)
+
+ def decode(self, data: bytes, errors: str = "strict") -> Tuple[str, int]:
+ if errors != "strict":
+ raise IDNAError('Unsupported error handling "{}"'.format(errors))
+
+ if not data:
+ return "", 0
+
+ return decode(data), len(data)
+
+
+class IncrementalEncoder(codecs.BufferedIncrementalEncoder):
+ def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[bytes, int]:
+ if errors != "strict":
+ raise IDNAError('Unsupported error handling "{}"'.format(errors))
+
+ if not data:
+ return b"", 0
+
+ labels = _unicode_dots_re.split(data)
+ trailing_dot = b""
+ if labels:
+ if not labels[-1]:
+ trailing_dot = b"."
+ del labels[-1]
+ elif not final:
+ # Keep potentially unfinished label until the next call
+ del labels[-1]
+ if labels:
+ trailing_dot = b"."
+
+ result = []
+ size = 0
+ for label in labels:
+ result.append(alabel(label))
+ if size:
+ size += 1
+ size += len(label)
+
+ # Join with U+002E
+ result_bytes = b".".join(result) + trailing_dot
+ size += len(trailing_dot)
+ return result_bytes, size
+
+
+class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
+ def _buffer_decode(self, data: Any, errors: str, final: bool) -> Tuple[str, int]:
+ if errors != "strict":
+ raise IDNAError('Unsupported error handling "{}"'.format(errors))
+
+ if not data:
+ return ("", 0)
+
+ if not isinstance(data, str):
+ data = str(data, "ascii")
+
+ labels = _unicode_dots_re.split(data)
+ trailing_dot = ""
+ if labels:
+ if not labels[-1]:
+ trailing_dot = "."
+ del labels[-1]
+ elif not final:
+ # Keep potentially unfinished label until the next call
+ del labels[-1]
+ if labels:
+ trailing_dot = "."
+
+ result = []
+ size = 0
+ for label in labels:
+ result.append(ulabel(label))
+ if size:
+ size += 1
+ size += len(label)
+
+ result_str = ".".join(result) + trailing_dot
+ size += len(trailing_dot)
+ return (result_str, size)
+
+
+class StreamWriter(Codec, codecs.StreamWriter):
+ pass
+
+
+class StreamReader(Codec, codecs.StreamReader):
+ pass
+
+
+def search_function(name: str) -> Optional[codecs.CodecInfo]:
+ if name != "idna2008":
+ return None
+ return codecs.CodecInfo(
+ name=name,
+ encode=Codec().encode,
+ decode=Codec().decode, # type: ignore
+ incrementalencoder=IncrementalEncoder,
+ incrementaldecoder=IncrementalDecoder,
+ streamwriter=StreamWriter,
+ streamreader=StreamReader,
+ )
+
+
+codecs.register(search_function)
diff --git a/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna/core.py b/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna/core.py
new file mode 100644
index 0000000000000000000000000000000000000000..8177bf7a324f9f54a29e41e867f5d56f2dd0a924
--- /dev/null
+++ b/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna/core.py
@@ -0,0 +1,437 @@
+import bisect
+import re
+import unicodedata
+from typing import Optional, Union
+
+from . import idnadata
+from .intranges import intranges_contain
+
+_virama_combining_class = 9
+_alabel_prefix = b"xn--"
+_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]")
+
+
+class IDNAError(UnicodeError):
+ """Base exception for all IDNA-encoding related problems"""
+
+ pass
+
+
+class IDNABidiError(IDNAError):
+ """Exception when bidirectional requirements are not satisfied"""
+
+ pass
+
+
+class InvalidCodepoint(IDNAError):
+ """Exception when a disallowed or unallocated codepoint is used"""
+
+ pass
+
+
+class InvalidCodepointContext(IDNAError):
+ """Exception when the codepoint is not valid in the context it is used"""
+
+ pass
+
+
+def _combining_class(cp: int) -> int:
+ v = unicodedata.combining(chr(cp))
+ if v == 0:
+ if not unicodedata.name(chr(cp)):
+ raise ValueError("Unknown character in unicodedata")
+ return v
+
+
+def _is_script(cp: str, script: str) -> bool:
+ return intranges_contain(ord(cp), idnadata.scripts[script])
+
+
+def _punycode(s: str) -> bytes:
+ return s.encode("punycode")
+
+
+def _unot(s: int) -> str:
+ return "U+{:04X}".format(s)
+
+
+def valid_label_length(label: Union[bytes, str]) -> bool:
+ if len(label) > 63:
+ return False
+ return True
+
+
+def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool:
+ if len(label) > (254 if trailing_dot else 253):
+ return False
+ return True
+
+
+def check_bidi(label: str, check_ltr: bool = False) -> bool:
+ # Bidi rules should only be applied if string contains RTL characters
+ bidi_label = False
+ for idx, cp in enumerate(label, 1):
+ direction = unicodedata.bidirectional(cp)
+ if direction == "":
+ # String likely comes from a newer version of Unicode
+ raise IDNABidiError("Unknown directionality in label {} at position {}".format(repr(label), idx))
+ if direction in ["R", "AL", "AN"]:
+ bidi_label = True
+ if not bidi_label and not check_ltr:
+ return True
+
+ # Bidi rule 1
+ direction = unicodedata.bidirectional(label[0])
+ if direction in ["R", "AL"]:
+ rtl = True
+ elif direction == "L":
+ rtl = False
+ else:
+ raise IDNABidiError("First codepoint in label {} must be directionality L, R or AL".format(repr(label)))
+
+ valid_ending = False
+ number_type: Optional[str] = None
+ for idx, cp in enumerate(label, 1):
+ direction = unicodedata.bidirectional(cp)
+
+ if rtl:
+ # Bidi rule 2
+ if direction not in [
+ "R",
+ "AL",
+ "AN",
+ "EN",
+ "ES",
+ "CS",
+ "ET",
+ "ON",
+ "BN",
+ "NSM",
+ ]:
+ raise IDNABidiError("Invalid direction for codepoint at position {} in a right-to-left label".format(idx))
+ # Bidi rule 3
+ if direction in ["R", "AL", "EN", "AN"]:
+ valid_ending = True
+ elif direction != "NSM":
+ valid_ending = False
+ # Bidi rule 4
+ if direction in ["AN", "EN"]:
+ if not number_type:
+ number_type = direction
+ else:
+ if number_type != direction:
+ raise IDNABidiError("Can not mix numeral types in a right-to-left label")
+ else:
+ # Bidi rule 5
+ if direction not in ["L", "EN", "ES", "CS", "ET", "ON", "BN", "NSM"]:
+ raise IDNABidiError("Invalid direction for codepoint at position {} in a left-to-right label".format(idx))
+ # Bidi rule 6
+ if direction in ["L", "EN"]:
+ valid_ending = True
+ elif direction != "NSM":
+ valid_ending = False
+
+ if not valid_ending:
+ raise IDNABidiError("Label ends with illegal codepoint directionality")
+
+ return True
+
+
+def check_initial_combiner(label: str) -> bool:
+ if unicodedata.category(label[0])[0] == "M":
+ raise IDNAError("Label begins with an illegal combining character")
+ return True
+
+
+def check_hyphen_ok(label: str) -> bool:
+ if label[2:4] == "--":
+ raise IDNAError("Label has disallowed hyphens in 3rd and 4th position")
+ if label[0] == "-" or label[-1] == "-":
+ raise IDNAError("Label must not start or end with a hyphen")
+ return True
+
+
+def check_nfc(label: str) -> None:
+ if unicodedata.normalize("NFC", label) != label:
+ raise IDNAError("Label must be in Normalization Form C")
+
+
+def valid_contextj(label: str, pos: int) -> bool:
+ cp_value = ord(label[pos])
+
+ if cp_value == 0x200C:
+ if pos > 0:
+ if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
+ return True
+
+ ok = False
+ for i in range(pos - 1, -1, -1):
+ joining_type = idnadata.joining_types.get(ord(label[i]))
+ if joining_type == ord("T"):
+ continue
+ elif joining_type in [ord("L"), ord("D")]:
+ ok = True
+ break
+ else:
+ break
+
+ if not ok:
+ return False
+
+ ok = False
+ for i in range(pos + 1, len(label)):
+ joining_type = idnadata.joining_types.get(ord(label[i]))
+ if joining_type == ord("T"):
+ continue
+ elif joining_type in [ord("R"), ord("D")]:
+ ok = True
+ break
+ else:
+ break
+ return ok
+
+ if cp_value == 0x200D:
+ if pos > 0:
+ if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
+ return True
+ return False
+
+ else:
+ return False
+
+
+def valid_contexto(label: str, pos: int, exception: bool = False) -> bool:
+ cp_value = ord(label[pos])
+
+ if cp_value == 0x00B7:
+ if 0 < pos < len(label) - 1:
+ if ord(label[pos - 1]) == 0x006C and ord(label[pos + 1]) == 0x006C:
+ return True
+ return False
+
+ elif cp_value == 0x0375:
+ if pos < len(label) - 1 and len(label) > 1:
+ return _is_script(label[pos + 1], "Greek")
+ return False
+
+ elif cp_value == 0x05F3 or cp_value == 0x05F4:
+ if pos > 0:
+ return _is_script(label[pos - 1], "Hebrew")
+ return False
+
+ elif cp_value == 0x30FB:
+ for cp in label:
+ if cp == "\u30fb":
+ continue
+ if _is_script(cp, "Hiragana") or _is_script(cp, "Katakana") or _is_script(cp, "Han"):
+ return True
+ return False
+
+ elif 0x660 <= cp_value <= 0x669:
+ for cp in label:
+ if 0x6F0 <= ord(cp) <= 0x06F9:
+ return False
+ return True
+
+ elif 0x6F0 <= cp_value <= 0x6F9:
+ for cp in label:
+ if 0x660 <= ord(cp) <= 0x0669:
+ return False
+ return True
+
+ return False
+
+
+def check_label(label: Union[str, bytes, bytearray]) -> None:
+ if isinstance(label, (bytes, bytearray)):
+ label = label.decode("utf-8")
+ if len(label) == 0:
+ raise IDNAError("Empty Label")
+
+ check_nfc(label)
+ check_hyphen_ok(label)
+ check_initial_combiner(label)
+
+ for pos, cp in enumerate(label):
+ cp_value = ord(cp)
+ if intranges_contain(cp_value, idnadata.codepoint_classes["PVALID"]):
+ continue
+ elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTJ"]):
+ try:
+ if not valid_contextj(label, pos):
+ raise InvalidCodepointContext(
+ "Joiner {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label))
+ )
+ except ValueError:
+ raise IDNAError(
+ "Unknown codepoint adjacent to joiner {} at position {} in {}".format(
+ _unot(cp_value), pos + 1, repr(label)
+ )
+ )
+ elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTO"]):
+ if not valid_contexto(label, pos):
+ raise InvalidCodepointContext(
+ "Codepoint {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label))
+ )
+ else:
+ raise InvalidCodepoint(
+ "Codepoint {} at position {} of {} not allowed".format(_unot(cp_value), pos + 1, repr(label))
+ )
+
+ check_bidi(label)
+
+
+def alabel(label: str) -> bytes:
+ try:
+ label_bytes = label.encode("ascii")
+ ulabel(label_bytes)
+ if not valid_label_length(label_bytes):
+ raise IDNAError("Label too long")
+ return label_bytes
+ except UnicodeEncodeError:
+ pass
+
+ check_label(label)
+ label_bytes = _alabel_prefix + _punycode(label)
+
+ if not valid_label_length(label_bytes):
+ raise IDNAError("Label too long")
+
+ return label_bytes
+
+
+def ulabel(label: Union[str, bytes, bytearray]) -> str:
+ if not isinstance(label, (bytes, bytearray)):
+ try:
+ label_bytes = label.encode("ascii")
+ except UnicodeEncodeError:
+ check_label(label)
+ return label
+ else:
+ label_bytes = bytes(label)
+
+ label_bytes = label_bytes.lower()
+ if label_bytes.startswith(_alabel_prefix):
+ label_bytes = label_bytes[len(_alabel_prefix) :]
+ if not label_bytes:
+ raise IDNAError("Malformed A-label, no Punycode eligible content found")
+ if label_bytes.decode("ascii")[-1] == "-":
+ raise IDNAError("A-label must not end with a hyphen")
+ else:
+ check_label(label_bytes)
+ return label_bytes.decode("ascii")
+
+ try:
+ label = label_bytes.decode("punycode")
+ except UnicodeError:
+ raise IDNAError("Invalid A-label")
+ check_label(label)
+ return label
+
+
+def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str:
+ """Re-map the characters in the string according to UTS46 processing."""
+ from .uts46data import uts46data
+
+ output = ""
+
+ for pos, char in enumerate(domain):
+ code_point = ord(char)
+ try:
+ uts46row = uts46data[code_point if code_point < 256 else bisect.bisect_left(uts46data, (code_point, "Z")) - 1]
+ status = uts46row[1]
+ replacement: Optional[str] = None
+ if len(uts46row) == 3:
+ replacement = uts46row[2]
+ if (
+ status == "V"
+ or (status == "D" and not transitional)
+ or (status == "3" and not std3_rules and replacement is None)
+ ):
+ output += char
+ elif replacement is not None and (
+ status == "M" or (status == "3" and not std3_rules) or (status == "D" and transitional)
+ ):
+ output += replacement
+ elif status != "I":
+ raise IndexError()
+ except IndexError:
+ raise InvalidCodepoint(
+ "Codepoint {} not allowed at position {} in {}".format(_unot(code_point), pos + 1, repr(domain))
+ )
+
+ return unicodedata.normalize("NFC", output)
+
+
+def encode(
+ s: Union[str, bytes, bytearray],
+ strict: bool = False,
+ uts46: bool = False,
+ std3_rules: bool = False,
+ transitional: bool = False,
+) -> bytes:
+ if not isinstance(s, str):
+ try:
+ s = str(s, "ascii")
+ except UnicodeDecodeError:
+ raise IDNAError("should pass a unicode string to the function rather than a byte string.")
+ if uts46:
+ s = uts46_remap(s, std3_rules, transitional)
+ trailing_dot = False
+ result = []
+ if strict:
+ labels = s.split(".")
+ else:
+ labels = _unicode_dots_re.split(s)
+ if not labels or labels == [""]:
+ raise IDNAError("Empty domain")
+ if labels[-1] == "":
+ del labels[-1]
+ trailing_dot = True
+ for label in labels:
+ s = alabel(label)
+ if s:
+ result.append(s)
+ else:
+ raise IDNAError("Empty label")
+ if trailing_dot:
+ result.append(b"")
+ s = b".".join(result)
+ if not valid_string_length(s, trailing_dot):
+ raise IDNAError("Domain too long")
+ return s
+
+
+def decode(
+ s: Union[str, bytes, bytearray],
+ strict: bool = False,
+ uts46: bool = False,
+ std3_rules: bool = False,
+) -> str:
+ try:
+ if not isinstance(s, str):
+ s = str(s, "ascii")
+ except UnicodeDecodeError:
+ raise IDNAError("Invalid ASCII in A-label")
+ if uts46:
+ s = uts46_remap(s, std3_rules, False)
+ trailing_dot = False
+ result = []
+ if not strict:
+ labels = _unicode_dots_re.split(s)
+ else:
+ labels = s.split(".")
+ if not labels or labels == [""]:
+ raise IDNAError("Empty domain")
+ if not labels[-1]:
+ del labels[-1]
+ trailing_dot = True
+ for label in labels:
+ s = ulabel(label)
+ if s:
+ result.append(s)
+ else:
+ raise IDNAError("Empty label")
+ if trailing_dot:
+ result.append("")
+ return ".".join(result)
diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/LICENSE b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..b9077766e9b9bdcae49ea5c8fced750ed13ec8f7
--- /dev/null
+++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/LICENSE
@@ -0,0 +1,13 @@
+Copyright (c) 2018, Tzu-ping Chung
+
+Permission to use, copy, modify, and distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/METADATA b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..52118f1e5c83bd7ef39196a749651fc87d176812
--- /dev/null
+++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/METADATA
@@ -0,0 +1,106 @@
+Metadata-Version: 2.1
+Name: shellingham
+Version: 1.5.4
+Summary: Tool to Detect Surrounding Shell
+Home-page: https://github.com/sarugaku/shellingham
+Author: Tzu-ping Chung
+Author-email: uranusjr@gmail.com
+License: ISC License
+Keywords: shell
+Classifier: Development Status :: 3 - Alpha
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: ISC License (ISCL)
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+
+=============================================
+Shellingham: Tool to Detect Surrounding Shell
+=============================================
+
+.. image:: https://img.shields.io/pypi/v/shellingham.svg
+ :target: https://pypi.org/project/shellingham/
+
+Shellingham detects what shell the current Python executable is running in.
+
+
+Usage
+=====
+
+.. code-block:: python
+
+ >>> import shellingham
+ >>> shellingham.detect_shell()
+ ('bash', '/bin/bash')
+
+``detect_shell`` pokes around the process's running environment to determine
+what shell it is run in. It returns a 2-tuple:
+
+* The shell name, always lowercased.
+* The command used to run the shell.
+
+``ShellDetectionFailure`` is raised if ``detect_shell`` fails to detect the
+surrounding shell.
+
+
+Notes
+=====
+
+* The shell name is always lowercased.
+* On Windows, the shell name is the name of the executable, minus the file
+ extension.
+
+
+Notes for Application Developers
+================================
+
+Remember, your application's user is not necessarily using a shell.
+Shellingham raises ``ShellDetectionFailure`` if there is no shell to detect,
+but *your application should almost never do this to your user*.
+
+A practical approach to this is to wrap ``detect_shell`` in a try block, and
+provide a sane default on failure
+
+.. code-block:: python
+
+ try:
+ shell = shellingham.detect_shell()
+ except shellingham.ShellDetectionFailure:
+ shell = provide_default()
+
+
+There are a few choices for you to choose from.
+
+* The POSIX standard mandates the environment variable ``SHELL`` to refer to
+ "the user's preferred command language interpreter". This is always available
+ (even if the user is not in an interactive session), and likely the correct
+ choice to launch an interactive sub-shell with.
+* A command ``sh`` is almost guaranteed to exist, likely at ``/bin/sh``, since
+ several POSIX tools rely on it. This should be suitable if you want to run a
+ (possibly non-interactive) script.
+* All versions of DOS and Windows have an environment variable ``COMSPEC``.
+ This can always be used to launch a usable command prompt (e.g. `cmd.exe` on
+ Windows).
+
+Here's a simple implementation to provide a default shell
+
+.. code-block:: python
+
+ import os
+
+ def provide_default():
+ if os.name == 'posix':
+ return os.environ['SHELL']
+ elif os.name == 'nt':
+ return os.environ['COMSPEC']
+ raise NotImplementedError(f'OS {os.name!r} support not available')
diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/RECORD b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..b7d5ac479bf032cd37b21d0856f881d97296c2fa
--- /dev/null
+++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/RECORD
@@ -0,0 +1,13 @@
+shellingham/__init__.py,sha256=pAKXUPKUdwyErC0ZjS-5w-fRdSbmdcfvnpt_x1yWqtA,635
+shellingham/_core.py,sha256=v-CTr_7F7cJAtNnzpa1N_Hl8afkY5yiDA4joGmsUBu0,300
+shellingham/nt.py,sha256=m6J6SuwyqVVlxXT9Bc-9F_1x-T5u0gCFFrRAF2LIkeg,4516
+shellingham/posix/__init__.py,sha256=pB69qtvZJ_yIf48nl4-ZfS3wLwwuXuknXOZhBnC2T1o,3129
+shellingham/posix/_core.py,sha256=_v18UaXbzr4muNhr3-mH1FdSdjZ_dOXQrtUyomIbKYQ,81
+shellingham/posix/proc.py,sha256=nSUxIuQSotvaDW76i0oTQAM9aZ9PXBLFAEktWljSKCo,2659
+shellingham/posix/ps.py,sha256=NGmDKCukhNp0lahwYCaMXphBYaVbhbiR9BtE0OkT8qU,1770
+shellingham-1.5.4.dist-info/LICENSE,sha256=84j9OMrRMRLB3A9mm76A5_hFQe26-3LzAw0sp2QsPJ0,751
+shellingham-1.5.4.dist-info/METADATA,sha256=GD2AIgo3STJieVc53TV8xbs_Sb05DMkZjVGA5UUaB_o,3461
+shellingham-1.5.4.dist-info/WHEEL,sha256=iYlv5fX357PQyRT2o6tw1bN-YcKFFHKqB_LwHO5wP-g,110
+shellingham-1.5.4.dist-info/top_level.txt,sha256=uKMQL5AKxPi4O9_Rbd838QeEs4ImpGQKNbEDZYqgBgk,12
+shellingham-1.5.4.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
+shellingham-1.5.4.dist-info/RECORD,,
diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/WHEEL b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..c34f1162ef9a50c355df1261ef6194ffc1b39975
--- /dev/null
+++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.41.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/top_level.txt b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..d4e44ce0299bb38463f8491ec8850910235c2709
--- /dev/null
+++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/top_level.txt
@@ -0,0 +1 @@
+shellingham
diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/zip-safe b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/zip-safe
new file mode 100644
index 0000000000000000000000000000000000000000..8b137891791fe96927ad78e64b0aad7bded08bdc
--- /dev/null
+++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/zip-safe
@@ -0,0 +1 @@
+
diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/__init__.py b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..15f7a90cbd02e5c2cc933cf6aa0374cca68035f1
--- /dev/null
+++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/__init__.py
@@ -0,0 +1,23 @@
+import importlib
+import os
+
+from ._core import ShellDetectionFailure
+
+__version__ = "1.5.4"
+
+
+def detect_shell(pid=None, max_depth=10):
+ name = os.name
+ try:
+ impl = importlib.import_module(".{}".format(name), __name__)
+ except ImportError:
+ message = "Shell detection not implemented for {0!r}".format(name)
+ raise RuntimeError(message)
+ try:
+ get_shell = impl.get_shell
+ except AttributeError:
+ raise RuntimeError("get_shell not implemented for {0!r}".format(name))
+ shell = get_shell(pid, max_depth=max_depth)
+ if shell:
+ return shell
+ raise ShellDetectionFailure()
diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/_core.py b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/_core.py
new file mode 100644
index 0000000000000000000000000000000000000000..13b65417c733b54e48b120e37f573c2baa6ef72b
--- /dev/null
+++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/_core.py
@@ -0,0 +1,11 @@
+SHELL_NAMES = (
+ {"sh", "bash", "dash", "ash"} # Bourne.
+ | {"csh", "tcsh"} # C.
+ | {"ksh", "zsh", "fish"} # Common alternatives.
+ | {"cmd", "powershell", "pwsh"} # Microsoft.
+ | {"elvish", "xonsh", "nu"} # More exotic.
+)
+
+
+class ShellDetectionFailure(EnvironmentError):
+ pass
diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/nt.py b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/nt.py
new file mode 100644
index 0000000000000000000000000000000000000000..389551b223a761fa2f97e929b60bf3ca5baed94c
--- /dev/null
+++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/nt.py
@@ -0,0 +1,163 @@
+import contextlib
+import ctypes
+import os
+
+from ctypes.wintypes import (
+ BOOL,
+ CHAR,
+ DWORD,
+ HANDLE,
+ LONG,
+ LPWSTR,
+ MAX_PATH,
+ PDWORD,
+ ULONG,
+)
+
+from shellingham._core import SHELL_NAMES
+
+
+INVALID_HANDLE_VALUE = HANDLE(-1).value
+ERROR_NO_MORE_FILES = 18
+ERROR_INSUFFICIENT_BUFFER = 122
+TH32CS_SNAPPROCESS = 2
+PROCESS_QUERY_LIMITED_INFORMATION = 0x1000
+
+
+kernel32 = ctypes.windll.kernel32
+
+
+def _check_handle(error_val=0):
+ def check(ret, func, args):
+ if ret == error_val:
+ raise ctypes.WinError()
+ return ret
+
+ return check
+
+
+def _check_expected(expected):
+ def check(ret, func, args):
+ if ret:
+ return True
+ code = ctypes.GetLastError()
+ if code == expected:
+ return False
+ raise ctypes.WinError(code)
+
+ return check
+
+
+class ProcessEntry32(ctypes.Structure):
+ _fields_ = (
+ ("dwSize", DWORD),
+ ("cntUsage", DWORD),
+ ("th32ProcessID", DWORD),
+ ("th32DefaultHeapID", ctypes.POINTER(ULONG)),
+ ("th32ModuleID", DWORD),
+ ("cntThreads", DWORD),
+ ("th32ParentProcessID", DWORD),
+ ("pcPriClassBase", LONG),
+ ("dwFlags", DWORD),
+ ("szExeFile", CHAR * MAX_PATH),
+ )
+
+
+kernel32.CloseHandle.argtypes = [HANDLE]
+kernel32.CloseHandle.restype = BOOL
+
+kernel32.CreateToolhelp32Snapshot.argtypes = [DWORD, DWORD]
+kernel32.CreateToolhelp32Snapshot.restype = HANDLE
+kernel32.CreateToolhelp32Snapshot.errcheck = _check_handle( # type: ignore
+ INVALID_HANDLE_VALUE,
+)
+
+kernel32.Process32First.argtypes = [HANDLE, ctypes.POINTER(ProcessEntry32)]
+kernel32.Process32First.restype = BOOL
+kernel32.Process32First.errcheck = _check_expected( # type: ignore
+ ERROR_NO_MORE_FILES,
+)
+
+kernel32.Process32Next.argtypes = [HANDLE, ctypes.POINTER(ProcessEntry32)]
+kernel32.Process32Next.restype = BOOL
+kernel32.Process32Next.errcheck = _check_expected( # type: ignore
+ ERROR_NO_MORE_FILES,
+)
+
+kernel32.GetCurrentProcessId.argtypes = []
+kernel32.GetCurrentProcessId.restype = DWORD
+
+kernel32.OpenProcess.argtypes = [DWORD, BOOL, DWORD]
+kernel32.OpenProcess.restype = HANDLE
+kernel32.OpenProcess.errcheck = _check_handle( # type: ignore
+ INVALID_HANDLE_VALUE,
+)
+
+kernel32.QueryFullProcessImageNameW.argtypes = [HANDLE, DWORD, LPWSTR, PDWORD]
+kernel32.QueryFullProcessImageNameW.restype = BOOL
+kernel32.QueryFullProcessImageNameW.errcheck = _check_expected( # type: ignore
+ ERROR_INSUFFICIENT_BUFFER,
+)
+
+
+@contextlib.contextmanager
+def _handle(f, *args, **kwargs):
+ handle = f(*args, **kwargs)
+ try:
+ yield handle
+ finally:
+ kernel32.CloseHandle(handle)
+
+
+def _iter_processes():
+ f = kernel32.CreateToolhelp32Snapshot
+ with _handle(f, TH32CS_SNAPPROCESS, 0) as snap:
+ entry = ProcessEntry32()
+ entry.dwSize = ctypes.sizeof(entry)
+ ret = kernel32.Process32First(snap, entry)
+ while ret:
+ yield entry
+ ret = kernel32.Process32Next(snap, entry)
+
+
+def _get_full_path(proch):
+ size = DWORD(MAX_PATH)
+ while True:
+ path_buff = ctypes.create_unicode_buffer("", size.value)
+ if kernel32.QueryFullProcessImageNameW(proch, 0, path_buff, size):
+ return path_buff.value
+ size.value *= 2
+
+
+def get_shell(pid=None, max_depth=10):
+ proc_map = {
+ proc.th32ProcessID: (proc.th32ParentProcessID, proc.szExeFile)
+ for proc in _iter_processes()
+ }
+ pid = pid or os.getpid()
+
+ for _ in range(0, max_depth + 1):
+ try:
+ ppid, executable = proc_map[pid]
+ except KeyError: # No such process? Give up.
+ break
+
+ # The executable name would be encoded with the current code page if
+ # we're in ANSI mode (usually). Try to decode it into str/unicode,
+ # replacing invalid characters to be safe (not thoeratically necessary,
+ # I think). Note that we need to use 'mbcs' instead of encoding
+ # settings from sys because this is from the Windows API, not Python
+ # internals (which those settings reflect). (pypa/pipenv#3382)
+ if isinstance(executable, bytes):
+ executable = executable.decode("mbcs", "replace")
+
+ name = executable.rpartition(".")[0].lower()
+ if name not in SHELL_NAMES:
+ pid = ppid
+ continue
+
+ key = PROCESS_QUERY_LIMITED_INFORMATION
+ with _handle(kernel32.OpenProcess, key, 0, pid) as proch:
+ return (name, _get_full_path(proch))
+
+ return None
diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/__init__.py b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..5bd2070db27189e62a1867e4de49f16f8c8841ff
--- /dev/null
+++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/__init__.py
@@ -0,0 +1,112 @@
+import os
+import re
+
+from .._core import SHELL_NAMES, ShellDetectionFailure
+from . import proc, ps
+
+# Based on QEMU docs: https://www.qemu.org/docs/master/user/main.html
+QEMU_BIN_REGEX = re.compile(
+ r"""qemu-
+ (alpha
+ |armeb
+ |arm
+ |m68k
+ |cris
+ |i386
+ |x86_64
+ |microblaze
+ |mips
+ |mipsel
+ |mips64
+ |mips64el
+ |mipsn32
+ |mipsn32el
+ |nios2
+ |ppc64
+ |ppc
+ |sh4eb
+ |sh4
+ |sparc
+ |sparc32plus
+ |sparc64
+ )""",
+ re.VERBOSE,
+)
+
+
+def _iter_process_parents(pid, max_depth=10):
+ """Select a way to obtain process information from the system.
+
+ * `/proc` is used if supported.
+ * The system `ps` utility is used as a fallback option.
+ """
+ for impl in (proc, ps):
+ try:
+ iterator = impl.iter_process_parents(pid, max_depth)
+ except EnvironmentError:
+ continue
+ return iterator
+ raise ShellDetectionFailure("compatible proc fs or ps utility is required")
+
+
+def _get_login_shell(proc_cmd):
+ """Form shell information from SHELL environ if possible."""
+ login_shell = os.environ.get("SHELL", "")
+ if login_shell:
+ proc_cmd = login_shell
+ else:
+ proc_cmd = proc_cmd[1:]
+ return (os.path.basename(proc_cmd).lower(), proc_cmd)
+
+
+_INTERPRETER_SHELL_NAMES = [
+ (re.compile(r"^python(\d+(\.\d+)?)?$"), {"xonsh"}),
+]
+
+
+def _get_interpreter_shell(proc_name, proc_args):
+ """Get shell invoked via an interpreter.
+
+ Some shells are implemented on, and invoked with an interpreter, e.g. xonsh
+ is commonly executed with an executable Python script. This detects what
+ script the interpreter is actually running, and check whether that looks
+ like a shell.
+
+ See sarugaku/shellingham#26 for rational.
+ """
+ for pattern, shell_names in _INTERPRETER_SHELL_NAMES:
+ if not pattern.match(proc_name):
+ continue
+ for arg in proc_args:
+ name = os.path.basename(arg).lower()
+ if os.path.isfile(arg) and name in shell_names:
+ return (name, arg)
+ return None
+
+
+def _get_shell(cmd, *args):
+ if cmd.startswith("-"): # Login shell! Let's use this.
+ return _get_login_shell(cmd)
+ name = os.path.basename(cmd).lower()
+ if name == "rosetta" or QEMU_BIN_REGEX.fullmatch(name):
+ # If the current process is Rosetta or QEMU, this likely is a
+ # containerized process. Parse out the actual command instead.
+ cmd = args[0]
+ args = args[1:]
+ name = os.path.basename(cmd).lower()
+ if name in SHELL_NAMES: # Command looks like a shell.
+ return (name, cmd)
+ shell = _get_interpreter_shell(name, args)
+ if shell:
+ return shell
+ return None
+
+
+def get_shell(pid=None, max_depth=10):
+ """Get the shell that the supplied pid or os.getpid() is running in."""
+ pid = str(pid or os.getpid())
+ for proc_args, _, _ in _iter_process_parents(pid, max_depth):
+ shell = _get_shell(*proc_args)
+ if shell:
+ return shell
+ return None
diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/_core.py b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/_core.py
new file mode 100644
index 0000000000000000000000000000000000000000..adc49e6e7a9d3edf062c55e0078136899f78d30d
--- /dev/null
+++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/_core.py
@@ -0,0 +1,3 @@
+import collections
+
+Process = collections.namedtuple("Process", "args pid ppid")
diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/proc.py b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/proc.py
new file mode 100644
index 0000000000000000000000000000000000000000..950f63228e5b328f82b70da8851ec60c6a2ff029
--- /dev/null
+++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/proc.py
@@ -0,0 +1,83 @@
+import io
+import os
+import re
+import sys
+
+from ._core import Process
+
+# FreeBSD: https://www.freebsd.org/cgi/man.cgi?query=procfs
+# NetBSD: https://man.netbsd.org/NetBSD-9.3-STABLE/mount_procfs.8
+# DragonFlyBSD: https://www.dragonflybsd.org/cgi/web-man?command=procfs
+BSD_STAT_PPID = 2
+
+# See https://docs.kernel.org/filesystems/proc.html
+LINUX_STAT_PPID = 3
+
+STAT_PATTERN = re.compile(r"\(.+\)|\S+")
+
+
+def detect_proc():
+ """Detect /proc filesystem style.
+
+ This checks the /proc/{pid} directory for possible formats. Returns one of
+ the following as str:
+
+ * `stat`: Linux-style, i.e. ``/proc/{pid}/stat``.
+ * `status`: BSD-style, i.e. ``/proc/{pid}/status``.
+ """
+ pid = os.getpid()
+ for name in ("stat", "status"):
+ if os.path.exists(os.path.join("/proc", str(pid), name)):
+ return name
+ raise ProcFormatError("unsupported proc format")
+
+
+def _use_bsd_stat_format():
+ try:
+ return os.uname().sysname.lower() in ("freebsd", "netbsd", "dragonfly")
+ except Exception:
+ return False
+
+
+def _get_ppid(pid, name):
+ path = os.path.join("/proc", str(pid), name)
+ with io.open(path, encoding="ascii", errors="replace") as f:
+ parts = STAT_PATTERN.findall(f.read())
+ # We only care about TTY and PPID -- both are numbers.
+ if _use_bsd_stat_format():
+ return parts[BSD_STAT_PPID]
+ return parts[LINUX_STAT_PPID]
+
+
+def _get_cmdline(pid):
+ path = os.path.join("/proc", str(pid), "cmdline")
+ encoding = sys.getfilesystemencoding() or "utf-8"
+ with io.open(path, encoding=encoding, errors="replace") as f:
+ # XXX: Command line arguments can be arbitrary byte sequences, not
+ # necessarily decodable. For Shellingham's purpose, however, we don't
+ # care. (pypa/pipenv#2820)
+ # cmdline appends an extra NULL at the end, hence the [:-1].
+ return tuple(f.read().split("\0")[:-1])
+
+
+class ProcFormatError(EnvironmentError):
+ pass
+
+
+def iter_process_parents(pid, max_depth=10):
+ """Try to look up the process tree via the /proc interface."""
+ stat_name = detect_proc()
+
+ # Inner generator function so we correctly throw an error eagerly if proc
+ # is not supported, rather than on the first call to the iterator. This
+ # allows the call site detects the correct implementation.
+ def _iter_process_parents(pid, max_depth):
+ for _ in range(max_depth):
+ ppid = _get_ppid(pid, stat_name)
+ args = _get_cmdline(pid)
+ yield Process(args=args, pid=pid, ppid=ppid)
+ if ppid == "0":
+ break
+ pid = ppid
+
+ return _iter_process_parents(pid, max_depth)
diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/ps.py b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/ps.py
new file mode 100644
index 0000000000000000000000000000000000000000..3bc39a74a56390c263e63bfead028f6bce4df3cb
--- /dev/null
+++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/ps.py
@@ -0,0 +1,51 @@
+import errno
+import subprocess
+import sys
+
+from ._core import Process
+
+
+class PsNotAvailable(EnvironmentError):
+ pass
+
+
+def iter_process_parents(pid, max_depth=10):
+ """Try to look up the process tree via the output of `ps`."""
+ try:
+ cmd = ["ps", "-ww", "-o", "pid=", "-o", "ppid=", "-o", "args="]
+ output = subprocess.check_output(cmd)
+ except OSError as e: # Python 2-compatible FileNotFoundError.
+ if e.errno != errno.ENOENT:
+ raise
+ raise PsNotAvailable("ps not found")
+ except subprocess.CalledProcessError as e:
+ # `ps` can return 1 if the process list is completely empty.
+ # (sarugaku/shellingham#15)
+ if not e.output.strip():
+ return
+ raise
+ if not isinstance(output, str):
+ encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
+ output = output.decode(encoding)
+
+ processes_mapping = {}
+ for line in output.split("\n"):
+ try:
+ _pid, ppid, args = line.strip().split(None, 2)
+ # XXX: This is not right, but we are really out of options.
+ # ps does not offer a sane way to decode the argument display,
+ # and this is "Good Enough" for obtaining shell names. Hopefully
+ # people don't name their shell with a space, or have something
+ # like "/usr/bin/xonsh is uber". (sarugaku/shellingham#14)
+ args = tuple(a.strip() for a in args.split(" "))
+ except ValueError:
+ continue
+ processes_mapping[_pid] = Process(args=args, pid=_pid, ppid=ppid)
+
+ for _ in range(max_depth):
+ try:
+ process = processes_mapping[pid]
+ except KeyError:
+ return
+ yield process
+ pid = process.ppid
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/METADATA b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..3200e601f970271fdde3fcc74f9af4423655a79d
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/METADATA
@@ -0,0 +1,107 @@
+Metadata-Version: 2.4
+Name: packaging
+Version: 26.0
+Summary: Core utilities for Python packages
+Author-email: Donald Stufft
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+License-Expression: Apache-2.0 OR BSD-2-Clause
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Typing :: Typed
+License-File: LICENSE
+License-File: LICENSE.APACHE
+License-File: LICENSE.BSD
+Project-URL: Documentation, https://packaging.pypa.io/
+Project-URL: Source, https://github.com/pypa/packaging
+
+packaging
+=========
+
+.. start-intro
+
+Reusable core utilities for various Python Packaging
+`interoperability specifications `_.
+
+This library provides utilities that implement the interoperability
+specifications which have clearly one correct behaviour (eg: :pep:`440`)
+or benefit greatly from having a single shared implementation (eg: :pep:`425`).
+
+.. end-intro
+
+The ``packaging`` project includes the following: version handling, specifiers,
+markers, requirements, tags, metadata, lockfiles, utilities.
+
+Documentation
+-------------
+
+The `documentation`_ provides information and the API for the following:
+
+- Version Handling
+- Specifiers
+- Markers
+- Requirements
+- Tags
+- Metadata
+- Lockfiles
+- Utilities
+
+Installation
+------------
+
+Use ``pip`` to install these utilities::
+
+ pip install packaging
+
+The ``packaging`` library uses calendar-based versioning (``YY.N``).
+
+Discussion
+----------
+
+If you run into bugs, you can file them in our `issue tracker`_.
+
+You can also join ``#pypa`` on Freenode to ask questions or get involved.
+
+
+.. _`documentation`: https://packaging.pypa.io/
+.. _`issue tracker`: https://github.com/pypa/packaging/issues
+
+
+Code of Conduct
+---------------
+
+Everyone interacting in the packaging project's codebases, issue trackers, chat
+rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
+
+.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md
+
+Contributing
+------------
+
+The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as
+well as how to report a potential security issue. The documentation for this
+project also covers information about `project development`_ and `security`_.
+
+.. _`project development`: https://packaging.pypa.io/en/latest/development/
+.. _`security`: https://packaging.pypa.io/en/latest/security/
+
+Project History
+---------------
+
+Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for
+recent changes and project history.
+
+.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/
+
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/RECORD b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..7ac212dd2e350a114166bbe59643b3a37d7e1069
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/RECORD
@@ -0,0 +1,24 @@
+packaging/__init__.py,sha256=y4lVbpeBzCGk-IPDw5BGBZ_b0P3ukEEJZAbGYc6Ey8c,494
+packaging/_elffile.py,sha256=-sKkptYqzYw2-x3QByJa5mB4rfPWu1pxkZHRx1WAFCY,3211
+packaging/_manylinux.py,sha256=Hf6nB0cOrayEs96-p3oIXAgGnFquv20DO5l-o2_Xnv0,9559
+packaging/_musllinux.py,sha256=Z6swjH3MA7XS3qXnmMN7QPhqP3fnoYI0eQ18e9-HgAE,2707
+packaging/_parser.py,sha256=U_DajsEx2VoC_F46fSVV3hDKNCWoQYkPkasO3dld0ig,10518
+packaging/_structures.py,sha256=Hn49Ta8zV9Wo8GiCL8Nl2ARZY983Un3pruZGVNldPwE,1514
+packaging/_tokenizer.py,sha256=M8EwNIdXeL9NMFuFrQtiOKwjka_xFx8KjRQnfE8O_z8,5421
+packaging/markers.py,sha256=ZX-cLvW1S3cZcEc0fHI4z7zSx5U2T19yMpDP_mE-CYw,12771
+packaging/metadata.py,sha256=CWVZpN_HfoYMSSDuCP7igOvGgqA9AOmpW8f3qTisfnc,39360
+packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+packaging/pylock.py,sha256=-R1uNfJ4PaLto7Mg62YsGOHgvskuiIEqPwxOywl42Jk,22537
+packaging/requirements.py,sha256=PMCAWD8aNMnVD-6uZMedhBuAVX2573eZ4yPBLXmz04I,2870
+packaging/specifiers.py,sha256=EPNPimY_zFivthv1vdjZYz5IqkKGsnKR2yKh-EVyvZw,40797
+packaging/tags.py,sha256=cXLV1pJD3UtJlDg7Wz3zrfdQhRZqr8jumSAKKAAd2xE,22856
+packaging/utils.py,sha256=N4c6oZzFJy6klTZ3AnkNz7sSkJesuFWPp68LA3B5dAo,5040
+packaging/version.py,sha256=7XWlL2IDYLwDYC0ht6cFEhapLwLWbmyo4rb7sEFj0x8,23272
+packaging/licenses/__init__.py,sha256=TwXLHZCXwSgdFwRLPxW602T6mSieunSFHM6fp8pgW78,5819
+packaging/licenses/_spdx.py,sha256=WW7DXiyg68up_YND_wpRYlr1SHhiV4FfJLQffghhMxQ,51122
+packaging-26.0.dist-info/licenses/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197
+packaging-26.0.dist-info/licenses/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
+packaging-26.0.dist-info/licenses/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344
+packaging-26.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
+packaging-26.0.dist-info/METADATA,sha256=M2K7fWom2iliuo2qpHhc0LrKwhq6kIoRlcyPWVgKJlo,3309
+packaging-26.0.dist-info/RECORD,,
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/WHEEL b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..d8b9936dad9ab2513fa6979f411560d3b6b57e37
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: flit 3.12.0
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/licenses/LICENSE b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/licenses/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..6f62d44e4ef733c0e713afcd2371fed7f2b3de67
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/licenses/LICENSE
@@ -0,0 +1,3 @@
+This software is made available under the terms of *either* of the licenses
+found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made
+under the terms of *both* these licenses.
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/licenses/LICENSE.APACHE b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/licenses/LICENSE.APACHE
new file mode 100644
index 0000000000000000000000000000000000000000..f433b1a53f5b830a205fd2df78e2b34974656c7b
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/licenses/LICENSE.APACHE
@@ -0,0 +1,177 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/licenses/LICENSE.BSD b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/licenses/LICENSE.BSD
new file mode 100644
index 0000000000000000000000000000000000000000..42ce7b75c92fb01a3f6ed17eea363f756b7da582
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/licenses/LICENSE.BSD
@@ -0,0 +1,23 @@
+Copyright (c) Donald Stufft and individual contributors.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/__init__.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..21695a74b5107c96ba4bb2cbca6b7f259dacd330
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/__init__.py
@@ -0,0 +1,15 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+__title__ = "packaging"
+__summary__ = "Core utilities for Python packages"
+__uri__ = "https://github.com/pypa/packaging"
+
+__version__ = "26.0"
+
+__author__ = "Donald Stufft and individual contributors"
+__email__ = "donald@stufft.io"
+
+__license__ = "BSD-2-Clause or Apache-2.0"
+__copyright__ = f"2014 {__author__}"
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_elffile.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_elffile.py
new file mode 100644
index 0000000000000000000000000000000000000000..497b0645217512ae2ba8ff61341fd2bbfa3648cd
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_elffile.py
@@ -0,0 +1,108 @@
+"""
+ELF file parser.
+
+This provides a class ``ELFFile`` that parses an ELF executable in a similar
+interface to ``ZipFile``. Only the read interface is implemented.
+
+ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
+"""
+
+from __future__ import annotations
+
+import enum
+import os
+import struct
+from typing import IO
+
+
+class ELFInvalid(ValueError):
+ pass
+
+
+class EIClass(enum.IntEnum):
+ C32 = 1
+ C64 = 2
+
+
+class EIData(enum.IntEnum):
+ Lsb = 1
+ Msb = 2
+
+
+class EMachine(enum.IntEnum):
+ I386 = 3
+ S390 = 22
+ Arm = 40
+ X8664 = 62
+ AArc64 = 183
+
+
+class ELFFile:
+ """
+ Representation of an ELF executable.
+ """
+
+ def __init__(self, f: IO[bytes]) -> None:
+ self._f = f
+
+ try:
+ ident = self._read("16B")
+ except struct.error as e:
+ raise ELFInvalid("unable to parse identification") from e
+ magic = bytes(ident[:4])
+ if magic != b"\x7fELF":
+ raise ELFInvalid(f"invalid magic: {magic!r}")
+
+ self.capacity = ident[4] # Format for program header (bitness).
+ self.encoding = ident[5] # Data structure encoding (endianness).
+
+ try:
+ # e_fmt: Format for program header.
+ # p_fmt: Format for section header.
+ # p_idx: Indexes to find p_type, p_offset, and p_filesz.
+ e_fmt, self._p_fmt, self._p_idx = {
+ (1, 1): ("HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB.
+ (2, 1): ("HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB.
+ }[(self.capacity, self.encoding)]
+ except KeyError as e:
+ raise ELFInvalid(
+ f"unrecognized capacity ({self.capacity}) or encoding ({self.encoding})"
+ ) from e
+
+ try:
+ (
+ _,
+ self.machine, # Architecture type.
+ _,
+ _,
+ self._e_phoff, # Offset of program header.
+ _,
+ self.flags, # Processor-specific flags.
+ _,
+ self._e_phentsize, # Size of section.
+ self._e_phnum, # Number of sections.
+ ) = self._read(e_fmt)
+ except struct.error as e:
+ raise ELFInvalid("unable to parse machine and section information") from e
+
+ def _read(self, fmt: str) -> tuple[int, ...]:
+ return struct.unpack(fmt, self._f.read(struct.calcsize(fmt)))
+
+ @property
+ def interpreter(self) -> str | None:
+ """
+ The path recorded in the ``PT_INTERP`` section header.
+ """
+ for index in range(self._e_phnum):
+ self._f.seek(self._e_phoff + self._e_phentsize * index)
+ try:
+ data = self._read(self._p_fmt)
+ except struct.error:
+ continue
+ if data[self._p_idx[0]] != 3: # Not PT_INTERP.
+ continue
+ self._f.seek(data[self._p_idx[1]])
+ return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0")
+ return None
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_manylinux.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_manylinux.py
new file mode 100644
index 0000000000000000000000000000000000000000..0e79e8a882be74fe76c80ccf49a9cd68fb636fd4
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_manylinux.py
@@ -0,0 +1,262 @@
+from __future__ import annotations
+
+import collections
+import contextlib
+import functools
+import os
+import re
+import sys
+import warnings
+from typing import Generator, Iterator, NamedTuple, Sequence
+
+from ._elffile import EIClass, EIData, ELFFile, EMachine
+
+EF_ARM_ABIMASK = 0xFF000000
+EF_ARM_ABI_VER5 = 0x05000000
+EF_ARM_ABI_FLOAT_HARD = 0x00000400
+
+_ALLOWED_ARCHS = {
+ "x86_64",
+ "aarch64",
+ "ppc64",
+ "ppc64le",
+ "s390x",
+ "loongarch64",
+ "riscv64",
+}
+
+
+# `os.PathLike` not a generic type until Python 3.9, so sticking with `str`
+# as the type for `path` until then.
+@contextlib.contextmanager
+def _parse_elf(path: str) -> Generator[ELFFile | None, None, None]:
+ try:
+ with open(path, "rb") as f:
+ yield ELFFile(f)
+ except (OSError, TypeError, ValueError):
+ yield None
+
+
+def _is_linux_armhf(executable: str) -> bool:
+ # hard-float ABI can be detected from the ELF header of the running
+ # process
+ # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
+ with _parse_elf(executable) as f:
+ return (
+ f is not None
+ and f.capacity == EIClass.C32
+ and f.encoding == EIData.Lsb
+ and f.machine == EMachine.Arm
+ and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5
+ and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD
+ )
+
+
+def _is_linux_i686(executable: str) -> bool:
+ with _parse_elf(executable) as f:
+ return (
+ f is not None
+ and f.capacity == EIClass.C32
+ and f.encoding == EIData.Lsb
+ and f.machine == EMachine.I386
+ )
+
+
+def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
+ if "armv7l" in archs:
+ return _is_linux_armhf(executable)
+ if "i686" in archs:
+ return _is_linux_i686(executable)
+ return any(arch in _ALLOWED_ARCHS for arch in archs)
+
+
+# If glibc ever changes its major version, we need to know what the last
+# minor version was, so we can build the complete list of all versions.
+# For now, guess what the highest minor version might be, assume it will
+# be 50 for testing. Once this actually happens, update the dictionary
+# with the actual value.
+_LAST_GLIBC_MINOR: dict[int, int] = collections.defaultdict(lambda: 50)
+
+
+class _GLibCVersion(NamedTuple):
+ major: int
+ minor: int
+
+
+def _glibc_version_string_confstr() -> str | None:
+ """
+ Primary implementation of glibc_version_string using os.confstr.
+ """
+ # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
+ # to be broken or missing. This strategy is used in the standard library
+ # platform module.
+ # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
+ try:
+ # Should be a string like "glibc 2.17".
+ version_string: str | None = os.confstr("CS_GNU_LIBC_VERSION")
+ assert version_string is not None
+ _, version = version_string.rsplit()
+ except (AssertionError, AttributeError, OSError, ValueError):
+ # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
+ return None
+ return version
+
+
+def _glibc_version_string_ctypes() -> str | None:
+ """
+ Fallback implementation of glibc_version_string using ctypes.
+ """
+ try:
+ import ctypes # noqa: PLC0415
+ except ImportError:
+ return None
+
+ # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
+ # manpage says, "If filename is NULL, then the returned handle is for the
+ # main program". This way we can let the linker do the work to figure out
+ # which libc our process is actually using.
+ #
+ # We must also handle the special case where the executable is not a
+ # dynamically linked executable. This can occur when using musl libc,
+ # for example. In this situation, dlopen() will error, leading to an
+ # OSError. Interestingly, at least in the case of musl, there is no
+ # errno set on the OSError. The single string argument used to construct
+ # OSError comes from libc itself and is therefore not portable to
+ # hard code here. In any case, failure to call dlopen() means we
+ # can proceed, so we bail on our attempt.
+ try:
+ process_namespace = ctypes.CDLL(None)
+ except OSError:
+ return None
+
+ try:
+ gnu_get_libc_version = process_namespace.gnu_get_libc_version
+ except AttributeError:
+ # Symbol doesn't exist -> therefore, we are not linked to
+ # glibc.
+ return None
+
+ # Call gnu_get_libc_version, which returns a string like "2.5"
+ gnu_get_libc_version.restype = ctypes.c_char_p
+ version_str: str = gnu_get_libc_version()
+ # py2 / py3 compatibility:
+ if not isinstance(version_str, str):
+ version_str = version_str.decode("ascii")
+
+ return version_str
+
+
+def _glibc_version_string() -> str | None:
+ """Returns glibc version string, or None if not using glibc."""
+ return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
+
+
+def _parse_glibc_version(version_str: str) -> _GLibCVersion:
+ """Parse glibc version.
+
+ We use a regexp instead of str.split because we want to discard any
+ random junk that might come after the minor version -- this might happen
+ in patched/forked versions of glibc (e.g. Linaro's version of glibc
+ uses version strings like "2.20-2014.11"). See gh-3588.
+ """
+ m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str)
+ if not m:
+ warnings.warn(
+ f"Expected glibc version with 2 components major.minor, got: {version_str}",
+ RuntimeWarning,
+ stacklevel=2,
+ )
+ return _GLibCVersion(-1, -1)
+ return _GLibCVersion(int(m.group("major")), int(m.group("minor")))
+
+
+@functools.lru_cache
+def _get_glibc_version() -> _GLibCVersion:
+ version_str = _glibc_version_string()
+ if version_str is None:
+ return _GLibCVersion(-1, -1)
+ return _parse_glibc_version(version_str)
+
+
+# From PEP 513, PEP 600
+def _is_compatible(arch: str, version: _GLibCVersion) -> bool:
+ sys_glibc = _get_glibc_version()
+ if sys_glibc < version:
+ return False
+ # Check for presence of _manylinux module.
+ try:
+ import _manylinux # noqa: PLC0415
+ except ImportError:
+ return True
+ if hasattr(_manylinux, "manylinux_compatible"):
+ result = _manylinux.manylinux_compatible(version[0], version[1], arch)
+ if result is not None:
+ return bool(result)
+ return True
+ if version == _GLibCVersion(2, 5) and hasattr(_manylinux, "manylinux1_compatible"):
+ return bool(_manylinux.manylinux1_compatible)
+ if version == _GLibCVersion(2, 12) and hasattr(
+ _manylinux, "manylinux2010_compatible"
+ ):
+ return bool(_manylinux.manylinux2010_compatible)
+ if version == _GLibCVersion(2, 17) and hasattr(
+ _manylinux, "manylinux2014_compatible"
+ ):
+ return bool(_manylinux.manylinux2014_compatible)
+ return True
+
+
+_LEGACY_MANYLINUX_MAP: dict[_GLibCVersion, str] = {
+ # CentOS 7 w/ glibc 2.17 (PEP 599)
+ _GLibCVersion(2, 17): "manylinux2014",
+ # CentOS 6 w/ glibc 2.12 (PEP 571)
+ _GLibCVersion(2, 12): "manylinux2010",
+ # CentOS 5 w/ glibc 2.5 (PEP 513)
+ _GLibCVersion(2, 5): "manylinux1",
+}
+
+
+def platform_tags(archs: Sequence[str]) -> Iterator[str]:
+ """Generate manylinux tags compatible to the current platform.
+
+ :param archs: Sequence of compatible architectures.
+ The first one shall be the closest to the actual architecture and be the part of
+ platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
+ The ``linux_`` prefix is assumed as a prerequisite for the current platform to
+ be manylinux-compatible.
+
+ :returns: An iterator of compatible manylinux tags.
+ """
+ if not _have_compatible_abi(sys.executable, archs):
+ return
+ # Oldest glibc to be supported regardless of architecture is (2, 17).
+ too_old_glibc2 = _GLibCVersion(2, 16)
+ if set(archs) & {"x86_64", "i686"}:
+ # On x86/i686 also oldest glibc to be supported is (2, 5).
+ too_old_glibc2 = _GLibCVersion(2, 4)
+ current_glibc = _GLibCVersion(*_get_glibc_version())
+ glibc_max_list = [current_glibc]
+ # We can assume compatibility across glibc major versions.
+ # https://sourceware.org/bugzilla/show_bug.cgi?id=24636
+ #
+ # Build a list of maximum glibc versions so that we can
+ # output the canonical list of all glibc from current_glibc
+ # down to too_old_glibc2, including all intermediary versions.
+ for glibc_major in range(current_glibc.major - 1, 1, -1):
+ glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
+ glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
+ for arch in archs:
+ for glibc_max in glibc_max_list:
+ if glibc_max.major == too_old_glibc2.major:
+ min_minor = too_old_glibc2.minor
+ else:
+ # For other glibc major versions oldest supported is (x, 0).
+ min_minor = -1
+ for glibc_minor in range(glibc_max.minor, min_minor, -1):
+ glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
+ if _is_compatible(arch, glibc_version):
+ yield "manylinux_{}_{}_{}".format(*glibc_version, arch)
+
+ # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
+ if legacy_tag := _LEGACY_MANYLINUX_MAP.get(glibc_version):
+ yield f"{legacy_tag}_{arch}"
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_musllinux.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_musllinux.py
new file mode 100644
index 0000000000000000000000000000000000000000..4e8116a79ca80d60657542a23b4bbcbc3c518eaf
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_musllinux.py
@@ -0,0 +1,85 @@
+"""PEP 656 support.
+
+This module implements logic to detect if the currently running Python is
+linked against musl, and what musl version is used.
+"""
+
+from __future__ import annotations
+
+import functools
+import re
+import subprocess
+import sys
+from typing import Iterator, NamedTuple, Sequence
+
+from ._elffile import ELFFile
+
+
+class _MuslVersion(NamedTuple):
+ major: int
+ minor: int
+
+
+def _parse_musl_version(output: str) -> _MuslVersion | None:
+ lines = [n for n in (n.strip() for n in output.splitlines()) if n]
+ if len(lines) < 2 or lines[0][:4] != "musl":
+ return None
+ m = re.match(r"Version (\d+)\.(\d+)", lines[1])
+ if not m:
+ return None
+ return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
+
+
+@functools.lru_cache
+def _get_musl_version(executable: str) -> _MuslVersion | None:
+ """Detect currently-running musl runtime version.
+
+ This is done by checking the specified executable's dynamic linking
+ information, and invoking the loader to parse its output for a version
+ string. If the loader is musl, the output would be something like::
+
+ musl libc (x86_64)
+ Version 1.2.2
+ Dynamic Program Loader
+ """
+ try:
+ with open(executable, "rb") as f:
+ ld = ELFFile(f).interpreter
+ except (OSError, TypeError, ValueError):
+ return None
+ if ld is None or "musl" not in ld:
+ return None
+ proc = subprocess.run([ld], check=False, stderr=subprocess.PIPE, text=True)
+ return _parse_musl_version(proc.stderr)
+
+
+def platform_tags(archs: Sequence[str]) -> Iterator[str]:
+ """Generate musllinux tags compatible to the current platform.
+
+ :param archs: Sequence of compatible architectures.
+ The first one shall be the closest to the actual architecture and be the part of
+ platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
+ The ``linux_`` prefix is assumed as a prerequisite for the current platform to
+ be musllinux-compatible.
+
+ :returns: An iterator of compatible musllinux tags.
+ """
+ sys_musl = _get_musl_version(sys.executable)
+ if sys_musl is None: # Python not dynamically linked against musl.
+ return
+ for arch in archs:
+ for minor in range(sys_musl.minor, -1, -1):
+ yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
+
+
+if __name__ == "__main__": # pragma: no cover
+ import sysconfig
+
+ plat = sysconfig.get_platform()
+ assert plat.startswith("linux-"), "not linux"
+
+ print("plat:", plat)
+ print("musl:", _get_musl_version(sys.executable))
+ print("tags:", end=" ")
+ for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
+ print(t, end="\n ")
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_parser.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_parser.py
new file mode 100644
index 0000000000000000000000000000000000000000..f6c1f5cd226b926f96a3bb1e9fb0f18d1bd021c9
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_parser.py
@@ -0,0 +1,365 @@
+"""Handwritten parser of dependency specifiers.
+
+The docstring for each __parse_* function contains EBNF-inspired grammar representing
+the implementation.
+"""
+
+from __future__ import annotations
+
+import ast
+from typing import List, Literal, NamedTuple, Sequence, Tuple, Union
+
+from ._tokenizer import DEFAULT_RULES, Tokenizer
+
+
+class Node:
+ __slots__ = ("value",)
+
+ def __init__(self, value: str) -> None:
+ self.value = value
+
+ def __str__(self) -> str:
+ return self.value
+
+ def __repr__(self) -> str:
+ return f"<{self.__class__.__name__}({self.value!r})>"
+
+ def serialize(self) -> str:
+ raise NotImplementedError
+
+
+class Variable(Node):
+ __slots__ = ()
+
+ def serialize(self) -> str:
+ return str(self)
+
+
+class Value(Node):
+ __slots__ = ()
+
+ def serialize(self) -> str:
+ return f'"{self}"'
+
+
+class Op(Node):
+ __slots__ = ()
+
+ def serialize(self) -> str:
+ return str(self)
+
+
+MarkerLogical = Literal["and", "or"]
+MarkerVar = Union[Variable, Value]
+MarkerItem = Tuple[MarkerVar, Op, MarkerVar]
+MarkerAtom = Union[MarkerItem, Sequence["MarkerAtom"]]
+MarkerList = List[Union["MarkerList", MarkerAtom, MarkerLogical]]
+
+
+class ParsedRequirement(NamedTuple):
+ name: str
+ url: str
+ extras: list[str]
+ specifier: str
+ marker: MarkerList | None
+
+
+# --------------------------------------------------------------------------------------
+# Recursive descent parser for dependency specifier
+# --------------------------------------------------------------------------------------
+def parse_requirement(source: str) -> ParsedRequirement:
+ return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES))
+
+
+def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement:
+ """
+ requirement = WS? IDENTIFIER WS? extras WS? requirement_details
+ """
+ tokenizer.consume("WS")
+
+ name_token = tokenizer.expect(
+ "IDENTIFIER", expected="package name at the start of dependency specifier"
+ )
+ name = name_token.text
+ tokenizer.consume("WS")
+
+ extras = _parse_extras(tokenizer)
+ tokenizer.consume("WS")
+
+ url, specifier, marker = _parse_requirement_details(tokenizer)
+ tokenizer.expect("END", expected="end of dependency specifier")
+
+ return ParsedRequirement(name, url, extras, specifier, marker)
+
+
+def _parse_requirement_details(
+ tokenizer: Tokenizer,
+) -> tuple[str, str, MarkerList | None]:
+ """
+ requirement_details = AT URL (WS requirement_marker?)?
+ | specifier WS? (requirement_marker)?
+ """
+
+ specifier = ""
+ url = ""
+ marker = None
+
+ if tokenizer.check("AT"):
+ tokenizer.read()
+ tokenizer.consume("WS")
+
+ url_start = tokenizer.position
+ url = tokenizer.expect("URL", expected="URL after @").text
+ if tokenizer.check("END", peek=True):
+ return (url, specifier, marker)
+
+ tokenizer.expect("WS", expected="whitespace after URL")
+
+ # The input might end after whitespace.
+ if tokenizer.check("END", peek=True):
+ return (url, specifier, marker)
+
+ marker = _parse_requirement_marker(
+ tokenizer,
+ span_start=url_start,
+ expected="semicolon (after URL and whitespace)",
+ )
+ else:
+ specifier_start = tokenizer.position
+ specifier = _parse_specifier(tokenizer)
+ tokenizer.consume("WS")
+
+ if tokenizer.check("END", peek=True):
+ return (url, specifier, marker)
+
+ marker = _parse_requirement_marker(
+ tokenizer,
+ span_start=specifier_start,
+ expected=(
+ "comma (within version specifier), semicolon (after version specifier)"
+ if specifier
+ else "semicolon (after name with no version specifier)"
+ ),
+ )
+
+ return (url, specifier, marker)
+
+
+def _parse_requirement_marker(
+ tokenizer: Tokenizer, *, span_start: int, expected: str
+) -> MarkerList:
+ """
+ requirement_marker = SEMICOLON marker WS?
+ """
+
+ if not tokenizer.check("SEMICOLON"):
+ tokenizer.raise_syntax_error(
+ f"Expected {expected} or end",
+ span_start=span_start,
+ span_end=None,
+ )
+ tokenizer.read()
+
+ marker = _parse_marker(tokenizer)
+ tokenizer.consume("WS")
+
+ return marker
+
+
+def _parse_extras(tokenizer: Tokenizer) -> list[str]:
+ """
+ extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)?
+ """
+ if not tokenizer.check("LEFT_BRACKET", peek=True):
+ return []
+
+ with tokenizer.enclosing_tokens(
+ "LEFT_BRACKET",
+ "RIGHT_BRACKET",
+ around="extras",
+ ):
+ tokenizer.consume("WS")
+ extras = _parse_extras_list(tokenizer)
+ tokenizer.consume("WS")
+
+ return extras
+
+
+def _parse_extras_list(tokenizer: Tokenizer) -> list[str]:
+ """
+ extras_list = identifier (wsp* ',' wsp* identifier)*
+ """
+ extras: list[str] = []
+
+ if not tokenizer.check("IDENTIFIER"):
+ return extras
+
+ extras.append(tokenizer.read().text)
+
+ while True:
+ tokenizer.consume("WS")
+ if tokenizer.check("IDENTIFIER", peek=True):
+ tokenizer.raise_syntax_error("Expected comma between extra names")
+ elif not tokenizer.check("COMMA"):
+ break
+
+ tokenizer.read()
+ tokenizer.consume("WS")
+
+ extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma")
+ extras.append(extra_token.text)
+
+ return extras
+
+
+def _parse_specifier(tokenizer: Tokenizer) -> str:
+ """
+ specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS
+ | WS? version_many WS?
+ """
+ with tokenizer.enclosing_tokens(
+ "LEFT_PARENTHESIS",
+ "RIGHT_PARENTHESIS",
+ around="version specifier",
+ ):
+ tokenizer.consume("WS")
+ parsed_specifiers = _parse_version_many(tokenizer)
+ tokenizer.consume("WS")
+
+ return parsed_specifiers
+
+
+def _parse_version_many(tokenizer: Tokenizer) -> str:
+ """
+ version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)?
+ """
+ parsed_specifiers = ""
+ while tokenizer.check("SPECIFIER"):
+ span_start = tokenizer.position
+ parsed_specifiers += tokenizer.read().text
+ if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True):
+ tokenizer.raise_syntax_error(
+ ".* suffix can only be used with `==` or `!=` operators",
+ span_start=span_start,
+ span_end=tokenizer.position + 1,
+ )
+ if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True):
+ tokenizer.raise_syntax_error(
+ "Local version label can only be used with `==` or `!=` operators",
+ span_start=span_start,
+ span_end=tokenizer.position,
+ )
+ tokenizer.consume("WS")
+ if not tokenizer.check("COMMA"):
+ break
+ parsed_specifiers += tokenizer.read().text
+ tokenizer.consume("WS")
+
+ return parsed_specifiers
+
+
+# --------------------------------------------------------------------------------------
+# Recursive descent parser for marker expression
+# --------------------------------------------------------------------------------------
+def parse_marker(source: str) -> MarkerList:
+ return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES))
+
+
+def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList:
+ retval = _parse_marker(tokenizer)
+ tokenizer.expect("END", expected="end of marker expression")
+ return retval
+
+
+def _parse_marker(tokenizer: Tokenizer) -> MarkerList:
+ """
+ marker = marker_atom (BOOLOP marker_atom)+
+ """
+ expression = [_parse_marker_atom(tokenizer)]
+ while tokenizer.check("BOOLOP"):
+ token = tokenizer.read()
+ expr_right = _parse_marker_atom(tokenizer)
+ expression.extend((token.text, expr_right))
+ return expression
+
+
+def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom:
+ """
+ marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS?
+ | WS? marker_item WS?
+ """
+
+ tokenizer.consume("WS")
+ if tokenizer.check("LEFT_PARENTHESIS", peek=True):
+ with tokenizer.enclosing_tokens(
+ "LEFT_PARENTHESIS",
+ "RIGHT_PARENTHESIS",
+ around="marker expression",
+ ):
+ tokenizer.consume("WS")
+ marker: MarkerAtom = _parse_marker(tokenizer)
+ tokenizer.consume("WS")
+ else:
+ marker = _parse_marker_item(tokenizer)
+ tokenizer.consume("WS")
+ return marker
+
+
+def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem:
+ """
+ marker_item = WS? marker_var WS? marker_op WS? marker_var WS?
+ """
+ tokenizer.consume("WS")
+ marker_var_left = _parse_marker_var(tokenizer)
+ tokenizer.consume("WS")
+ marker_op = _parse_marker_op(tokenizer)
+ tokenizer.consume("WS")
+ marker_var_right = _parse_marker_var(tokenizer)
+ tokenizer.consume("WS")
+ return (marker_var_left, marker_op, marker_var_right)
+
+
+def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar: # noqa: RET503
+ """
+ marker_var = VARIABLE | QUOTED_STRING
+ """
+ if tokenizer.check("VARIABLE"):
+ return process_env_var(tokenizer.read().text.replace(".", "_"))
+ elif tokenizer.check("QUOTED_STRING"):
+ return process_python_str(tokenizer.read().text)
+ else:
+ tokenizer.raise_syntax_error(
+ message="Expected a marker variable or quoted string"
+ )
+
+
+def process_env_var(env_var: str) -> Variable:
+ if env_var in ("platform_python_implementation", "python_implementation"):
+ return Variable("platform_python_implementation")
+ else:
+ return Variable(env_var)
+
+
+def process_python_str(python_str: str) -> Value:
+ value = ast.literal_eval(python_str)
+ return Value(str(value))
+
+
+def _parse_marker_op(tokenizer: Tokenizer) -> Op:
+ """
+ marker_op = IN | NOT IN | OP
+ """
+ if tokenizer.check("IN"):
+ tokenizer.read()
+ return Op("in")
+ elif tokenizer.check("NOT"):
+ tokenizer.read()
+ tokenizer.expect("WS", expected="whitespace after 'not'")
+ tokenizer.expect("IN", expected="'in' after 'not'")
+ return Op("not in")
+ elif tokenizer.check("OP"):
+ return Op(tokenizer.read().text)
+ else:
+ return tokenizer.raise_syntax_error(
+ "Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in"
+ )
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_structures.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_structures.py
new file mode 100644
index 0000000000000000000000000000000000000000..225e2eee01238571c50595eb104e0b70d5f503c4
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_structures.py
@@ -0,0 +1,69 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import typing
+
+
+@typing.final
+class InfinityType:
+ __slots__ = ()
+
+ def __repr__(self) -> str:
+ return "Infinity"
+
+ def __hash__(self) -> int:
+ return hash(repr(self))
+
+ def __lt__(self, other: object) -> bool:
+ return False
+
+ def __le__(self, other: object) -> bool:
+ return False
+
+ def __eq__(self, other: object) -> bool:
+ return isinstance(other, self.__class__)
+
+ def __gt__(self, other: object) -> bool:
+ return True
+
+ def __ge__(self, other: object) -> bool:
+ return True
+
+ def __neg__(self: object) -> "NegativeInfinityType":
+ return NegativeInfinity
+
+
+Infinity = InfinityType()
+
+
+@typing.final
+class NegativeInfinityType:
+ __slots__ = ()
+
+ def __repr__(self) -> str:
+ return "-Infinity"
+
+ def __hash__(self) -> int:
+ return hash(repr(self))
+
+ def __lt__(self, other: object) -> bool:
+ return True
+
+ def __le__(self, other: object) -> bool:
+ return True
+
+ def __eq__(self, other: object) -> bool:
+ return isinstance(other, self.__class__)
+
+ def __gt__(self, other: object) -> bool:
+ return False
+
+ def __ge__(self, other: object) -> bool:
+ return False
+
+ def __neg__(self: object) -> InfinityType:
+ return Infinity
+
+
+NegativeInfinity = NegativeInfinityType()
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_tokenizer.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_tokenizer.py
new file mode 100644
index 0000000000000000000000000000000000000000..e6d20dd3f56f880a92db7409a3e1335cb282a8f2
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_tokenizer.py
@@ -0,0 +1,193 @@
+from __future__ import annotations
+
+import contextlib
+import re
+from dataclasses import dataclass
+from typing import Generator, Mapping, NoReturn
+
+from .specifiers import Specifier
+
+
+@dataclass
+class Token:
+ name: str
+ text: str
+ position: int
+
+
+class ParserSyntaxError(Exception):
+ """The provided source text could not be parsed correctly."""
+
+ def __init__(
+ self,
+ message: str,
+ *,
+ source: str,
+ span: tuple[int, int],
+ ) -> None:
+ self.span = span
+ self.message = message
+ self.source = source
+
+ super().__init__()
+
+ def __str__(self) -> str:
+ marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^"
+ return f"{self.message}\n {self.source}\n {marker}"
+
+
+DEFAULT_RULES: dict[str, re.Pattern[str]] = {
+ "LEFT_PARENTHESIS": re.compile(r"\("),
+ "RIGHT_PARENTHESIS": re.compile(r"\)"),
+ "LEFT_BRACKET": re.compile(r"\["),
+ "RIGHT_BRACKET": re.compile(r"\]"),
+ "SEMICOLON": re.compile(r";"),
+ "COMMA": re.compile(r","),
+ "QUOTED_STRING": re.compile(
+ r"""
+ (
+ ('[^']*')
+ |
+ ("[^"]*")
+ )
+ """,
+ re.VERBOSE,
+ ),
+ "OP": re.compile(r"(===|==|~=|!=|<=|>=|<|>)"),
+ "BOOLOP": re.compile(r"\b(or|and)\b"),
+ "IN": re.compile(r"\bin\b"),
+ "NOT": re.compile(r"\bnot\b"),
+ "VARIABLE": re.compile(
+ r"""
+ \b(
+ python_version
+ |python_full_version
+ |os[._]name
+ |sys[._]platform
+ |platform_(release|system)
+ |platform[._](version|machine|python_implementation)
+ |python_implementation
+ |implementation_(name|version)
+ |extras?
+ |dependency_groups
+ )\b
+ """,
+ re.VERBOSE,
+ ),
+ "SPECIFIER": re.compile(
+ Specifier._operator_regex_str + Specifier._version_regex_str,
+ re.VERBOSE | re.IGNORECASE,
+ ),
+ "AT": re.compile(r"\@"),
+ "URL": re.compile(r"[^ \t]+"),
+ "IDENTIFIER": re.compile(r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b"),
+ "VERSION_PREFIX_TRAIL": re.compile(r"\.\*"),
+ "VERSION_LOCAL_LABEL_TRAIL": re.compile(r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*"),
+ "WS": re.compile(r"[ \t]+"),
+ "END": re.compile(r"$"),
+}
+
+
+class Tokenizer:
+ """Context-sensitive token parsing.
+
+ Provides methods to examine the input stream to check whether the next token
+ matches.
+ """
+
+ def __init__(
+ self,
+ source: str,
+ *,
+ rules: Mapping[str, re.Pattern[str]],
+ ) -> None:
+ self.source = source
+ self.rules = rules
+ self.next_token: Token | None = None
+ self.position = 0
+
+ def consume(self, name: str) -> None:
+ """Move beyond provided token name, if at current position."""
+ if self.check(name):
+ self.read()
+
+ def check(self, name: str, *, peek: bool = False) -> bool:
+ """Check whether the next token has the provided name.
+
+ By default, if the check succeeds, the token *must* be read before
+ another check. If `peek` is set to `True`, the token is not loaded and
+ would need to be checked again.
+ """
+ assert self.next_token is None, (
+ f"Cannot check for {name!r}, already have {self.next_token!r}"
+ )
+ assert name in self.rules, f"Unknown token name: {name!r}"
+
+ expression = self.rules[name]
+
+ match = expression.match(self.source, self.position)
+ if match is None:
+ return False
+ if not peek:
+ self.next_token = Token(name, match[0], self.position)
+ return True
+
+ def expect(self, name: str, *, expected: str) -> Token:
+ """Expect a certain token name next, failing with a syntax error otherwise.
+
+ The token is *not* read.
+ """
+ if not self.check(name):
+ raise self.raise_syntax_error(f"Expected {expected}")
+ return self.read()
+
+ def read(self) -> Token:
+ """Consume the next token and return it."""
+ token = self.next_token
+ assert token is not None
+
+ self.position += len(token.text)
+ self.next_token = None
+
+ return token
+
+ def raise_syntax_error(
+ self,
+ message: str,
+ *,
+ span_start: int | None = None,
+ span_end: int | None = None,
+ ) -> NoReturn:
+ """Raise ParserSyntaxError at the given position."""
+ span = (
+ self.position if span_start is None else span_start,
+ self.position if span_end is None else span_end,
+ )
+ raise ParserSyntaxError(
+ message,
+ source=self.source,
+ span=span,
+ )
+
+ @contextlib.contextmanager
+ def enclosing_tokens(
+ self, open_token: str, close_token: str, *, around: str
+ ) -> Generator[None, None, None]:
+ if self.check(open_token):
+ open_position = self.position
+ self.read()
+ else:
+ open_position = None
+
+ yield
+
+ if open_position is None:
+ return
+
+ if not self.check(close_token):
+ self.raise_syntax_error(
+ f"Expected matching {close_token} for {open_token}, after {around}",
+ span_start=open_position,
+ )
+
+ self.read()
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/licenses/__init__.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/licenses/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..335b275fa7575b0a7c525a713fbe0252ad2d956f
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/licenses/__init__.py
@@ -0,0 +1,147 @@
+#######################################################################################
+#
+# Adapted from:
+# https://github.com/pypa/hatch/blob/5352e44/backend/src/hatchling/licenses/parse.py
+#
+# MIT License
+#
+# Copyright (c) 2017-present Ofek Lev
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy of this
+# software and associated documentation files (the "Software"), to deal in the Software
+# without restriction, including without limitation the rights to use, copy, modify,
+# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to the following
+# conditions:
+#
+# The above copyright notice and this permission notice shall be included in all copies
+# or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
+# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
+# CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
+# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+#
+# With additional allowance of arbitrary `LicenseRef-` identifiers, not just
+# `LicenseRef-Public-Domain` and `LicenseRef-Proprietary`.
+#
+#######################################################################################
+from __future__ import annotations
+
+import re
+from typing import NewType, cast
+
+from ._spdx import EXCEPTIONS, LICENSES
+
+__all__ = [
+ "InvalidLicenseExpression",
+ "NormalizedLicenseExpression",
+ "canonicalize_license_expression",
+]
+
+license_ref_allowed = re.compile("^[A-Za-z0-9.-]*$")
+
+NormalizedLicenseExpression = NewType("NormalizedLicenseExpression", str)
+
+
+class InvalidLicenseExpression(ValueError):
+ """Raised when a license-expression string is invalid
+
+ >>> canonicalize_license_expression("invalid")
+ Traceback (most recent call last):
+ ...
+ packaging.licenses.InvalidLicenseExpression: Invalid license expression: 'invalid'
+ """
+
+
+def canonicalize_license_expression(
+ raw_license_expression: str,
+) -> NormalizedLicenseExpression:
+ if not raw_license_expression:
+ message = f"Invalid license expression: {raw_license_expression!r}"
+ raise InvalidLicenseExpression(message)
+
+ # Pad any parentheses so tokenization can be achieved by merely splitting on
+ # whitespace.
+ license_expression = raw_license_expression.replace("(", " ( ").replace(")", " ) ")
+ licenseref_prefix = "LicenseRef-"
+ license_refs = {
+ ref.lower(): "LicenseRef-" + ref[len(licenseref_prefix) :]
+ for ref in license_expression.split()
+ if ref.lower().startswith(licenseref_prefix.lower())
+ }
+
+ # Normalize to lower case so we can look up licenses/exceptions
+ # and so boolean operators are Python-compatible.
+ license_expression = license_expression.lower()
+
+ tokens = license_expression.split()
+
+ # Rather than implementing a parenthesis/boolean logic parser, create an
+ # expression that Python can parse. Everything that is not involved with the
+ # grammar itself is replaced with the placeholder `False` and the resultant
+ # expression should become a valid Python expression.
+ python_tokens = []
+ for token in tokens:
+ if token not in {"or", "and", "with", "(", ")"}:
+ python_tokens.append("False")
+ elif token == "with":
+ python_tokens.append("or")
+ elif (
+ token == "("
+ and python_tokens
+ and python_tokens[-1] not in {"or", "and", "("}
+ ) or (token == ")" and python_tokens and python_tokens[-1] == "("):
+ message = f"Invalid license expression: {raw_license_expression!r}"
+ raise InvalidLicenseExpression(message)
+ else:
+ python_tokens.append(token)
+
+ python_expression = " ".join(python_tokens)
+ try:
+ compile(python_expression, "", "eval")
+ except SyntaxError:
+ message = f"Invalid license expression: {raw_license_expression!r}"
+ raise InvalidLicenseExpression(message) from None
+
+ # Take a final pass to check for unknown licenses/exceptions.
+ normalized_tokens = []
+ for token in tokens:
+ if token in {"or", "and", "with", "(", ")"}:
+ normalized_tokens.append(token.upper())
+ continue
+
+ if normalized_tokens and normalized_tokens[-1] == "WITH":
+ if token not in EXCEPTIONS:
+ message = f"Unknown license exception: {token!r}"
+ raise InvalidLicenseExpression(message)
+
+ normalized_tokens.append(EXCEPTIONS[token]["id"])
+ else:
+ if token.endswith("+"):
+ final_token = token[:-1]
+ suffix = "+"
+ else:
+ final_token = token
+ suffix = ""
+
+ if final_token.startswith("licenseref-"):
+ if not license_ref_allowed.match(final_token):
+ message = f"Invalid licenseref: {final_token!r}"
+ raise InvalidLicenseExpression(message)
+ normalized_tokens.append(license_refs[final_token] + suffix)
+ else:
+ if final_token not in LICENSES:
+ message = f"Unknown license: {final_token!r}"
+ raise InvalidLicenseExpression(message)
+ normalized_tokens.append(LICENSES[final_token]["id"] + suffix)
+
+ normalized_expression = " ".join(normalized_tokens)
+
+ return cast(
+ "NormalizedLicenseExpression",
+ normalized_expression.replace("( ", "(").replace(" )", ")"),
+ )
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/licenses/_spdx.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/licenses/_spdx.py
new file mode 100644
index 0000000000000000000000000000000000000000..a277af28220b6dbe4599471104d1c7a2bd1e1288
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/licenses/_spdx.py
@@ -0,0 +1,799 @@
+
+from __future__ import annotations
+
+from typing import TypedDict
+
+class SPDXLicense(TypedDict):
+ id: str
+ deprecated: bool
+
+class SPDXException(TypedDict):
+ id: str
+ deprecated: bool
+
+
+VERSION = '3.27.0'
+
+LICENSES: dict[str, SPDXLicense] = {
+ '0bsd': {'id': '0BSD', 'deprecated': False},
+ '3d-slicer-1.0': {'id': '3D-Slicer-1.0', 'deprecated': False},
+ 'aal': {'id': 'AAL', 'deprecated': False},
+ 'abstyles': {'id': 'Abstyles', 'deprecated': False},
+ 'adacore-doc': {'id': 'AdaCore-doc', 'deprecated': False},
+ 'adobe-2006': {'id': 'Adobe-2006', 'deprecated': False},
+ 'adobe-display-postscript': {'id': 'Adobe-Display-PostScript', 'deprecated': False},
+ 'adobe-glyph': {'id': 'Adobe-Glyph', 'deprecated': False},
+ 'adobe-utopia': {'id': 'Adobe-Utopia', 'deprecated': False},
+ 'adsl': {'id': 'ADSL', 'deprecated': False},
+ 'afl-1.1': {'id': 'AFL-1.1', 'deprecated': False},
+ 'afl-1.2': {'id': 'AFL-1.2', 'deprecated': False},
+ 'afl-2.0': {'id': 'AFL-2.0', 'deprecated': False},
+ 'afl-2.1': {'id': 'AFL-2.1', 'deprecated': False},
+ 'afl-3.0': {'id': 'AFL-3.0', 'deprecated': False},
+ 'afmparse': {'id': 'Afmparse', 'deprecated': False},
+ 'agpl-1.0': {'id': 'AGPL-1.0', 'deprecated': True},
+ 'agpl-1.0-only': {'id': 'AGPL-1.0-only', 'deprecated': False},
+ 'agpl-1.0-or-later': {'id': 'AGPL-1.0-or-later', 'deprecated': False},
+ 'agpl-3.0': {'id': 'AGPL-3.0', 'deprecated': True},
+ 'agpl-3.0-only': {'id': 'AGPL-3.0-only', 'deprecated': False},
+ 'agpl-3.0-or-later': {'id': 'AGPL-3.0-or-later', 'deprecated': False},
+ 'aladdin': {'id': 'Aladdin', 'deprecated': False},
+ 'amd-newlib': {'id': 'AMD-newlib', 'deprecated': False},
+ 'amdplpa': {'id': 'AMDPLPA', 'deprecated': False},
+ 'aml': {'id': 'AML', 'deprecated': False},
+ 'aml-glslang': {'id': 'AML-glslang', 'deprecated': False},
+ 'ampas': {'id': 'AMPAS', 'deprecated': False},
+ 'antlr-pd': {'id': 'ANTLR-PD', 'deprecated': False},
+ 'antlr-pd-fallback': {'id': 'ANTLR-PD-fallback', 'deprecated': False},
+ 'any-osi': {'id': 'any-OSI', 'deprecated': False},
+ 'any-osi-perl-modules': {'id': 'any-OSI-perl-modules', 'deprecated': False},
+ 'apache-1.0': {'id': 'Apache-1.0', 'deprecated': False},
+ 'apache-1.1': {'id': 'Apache-1.1', 'deprecated': False},
+ 'apache-2.0': {'id': 'Apache-2.0', 'deprecated': False},
+ 'apafml': {'id': 'APAFML', 'deprecated': False},
+ 'apl-1.0': {'id': 'APL-1.0', 'deprecated': False},
+ 'app-s2p': {'id': 'App-s2p', 'deprecated': False},
+ 'apsl-1.0': {'id': 'APSL-1.0', 'deprecated': False},
+ 'apsl-1.1': {'id': 'APSL-1.1', 'deprecated': False},
+ 'apsl-1.2': {'id': 'APSL-1.2', 'deprecated': False},
+ 'apsl-2.0': {'id': 'APSL-2.0', 'deprecated': False},
+ 'arphic-1999': {'id': 'Arphic-1999', 'deprecated': False},
+ 'artistic-1.0': {'id': 'Artistic-1.0', 'deprecated': False},
+ 'artistic-1.0-cl8': {'id': 'Artistic-1.0-cl8', 'deprecated': False},
+ 'artistic-1.0-perl': {'id': 'Artistic-1.0-Perl', 'deprecated': False},
+ 'artistic-2.0': {'id': 'Artistic-2.0', 'deprecated': False},
+ 'artistic-dist': {'id': 'Artistic-dist', 'deprecated': False},
+ 'aspell-ru': {'id': 'Aspell-RU', 'deprecated': False},
+ 'aswf-digital-assets-1.0': {'id': 'ASWF-Digital-Assets-1.0', 'deprecated': False},
+ 'aswf-digital-assets-1.1': {'id': 'ASWF-Digital-Assets-1.1', 'deprecated': False},
+ 'baekmuk': {'id': 'Baekmuk', 'deprecated': False},
+ 'bahyph': {'id': 'Bahyph', 'deprecated': False},
+ 'barr': {'id': 'Barr', 'deprecated': False},
+ 'bcrypt-solar-designer': {'id': 'bcrypt-Solar-Designer', 'deprecated': False},
+ 'beerware': {'id': 'Beerware', 'deprecated': False},
+ 'bitstream-charter': {'id': 'Bitstream-Charter', 'deprecated': False},
+ 'bitstream-vera': {'id': 'Bitstream-Vera', 'deprecated': False},
+ 'bittorrent-1.0': {'id': 'BitTorrent-1.0', 'deprecated': False},
+ 'bittorrent-1.1': {'id': 'BitTorrent-1.1', 'deprecated': False},
+ 'blessing': {'id': 'blessing', 'deprecated': False},
+ 'blueoak-1.0.0': {'id': 'BlueOak-1.0.0', 'deprecated': False},
+ 'boehm-gc': {'id': 'Boehm-GC', 'deprecated': False},
+ 'boehm-gc-without-fee': {'id': 'Boehm-GC-without-fee', 'deprecated': False},
+ 'borceux': {'id': 'Borceux', 'deprecated': False},
+ 'brian-gladman-2-clause': {'id': 'Brian-Gladman-2-Clause', 'deprecated': False},
+ 'brian-gladman-3-clause': {'id': 'Brian-Gladman-3-Clause', 'deprecated': False},
+ 'bsd-1-clause': {'id': 'BSD-1-Clause', 'deprecated': False},
+ 'bsd-2-clause': {'id': 'BSD-2-Clause', 'deprecated': False},
+ 'bsd-2-clause-darwin': {'id': 'BSD-2-Clause-Darwin', 'deprecated': False},
+ 'bsd-2-clause-first-lines': {'id': 'BSD-2-Clause-first-lines', 'deprecated': False},
+ 'bsd-2-clause-freebsd': {'id': 'BSD-2-Clause-FreeBSD', 'deprecated': True},
+ 'bsd-2-clause-netbsd': {'id': 'BSD-2-Clause-NetBSD', 'deprecated': True},
+ 'bsd-2-clause-patent': {'id': 'BSD-2-Clause-Patent', 'deprecated': False},
+ 'bsd-2-clause-pkgconf-disclaimer': {'id': 'BSD-2-Clause-pkgconf-disclaimer', 'deprecated': False},
+ 'bsd-2-clause-views': {'id': 'BSD-2-Clause-Views', 'deprecated': False},
+ 'bsd-3-clause': {'id': 'BSD-3-Clause', 'deprecated': False},
+ 'bsd-3-clause-acpica': {'id': 'BSD-3-Clause-acpica', 'deprecated': False},
+ 'bsd-3-clause-attribution': {'id': 'BSD-3-Clause-Attribution', 'deprecated': False},
+ 'bsd-3-clause-clear': {'id': 'BSD-3-Clause-Clear', 'deprecated': False},
+ 'bsd-3-clause-flex': {'id': 'BSD-3-Clause-flex', 'deprecated': False},
+ 'bsd-3-clause-hp': {'id': 'BSD-3-Clause-HP', 'deprecated': False},
+ 'bsd-3-clause-lbnl': {'id': 'BSD-3-Clause-LBNL', 'deprecated': False},
+ 'bsd-3-clause-modification': {'id': 'BSD-3-Clause-Modification', 'deprecated': False},
+ 'bsd-3-clause-no-military-license': {'id': 'BSD-3-Clause-No-Military-License', 'deprecated': False},
+ 'bsd-3-clause-no-nuclear-license': {'id': 'BSD-3-Clause-No-Nuclear-License', 'deprecated': False},
+ 'bsd-3-clause-no-nuclear-license-2014': {'id': 'BSD-3-Clause-No-Nuclear-License-2014', 'deprecated': False},
+ 'bsd-3-clause-no-nuclear-warranty': {'id': 'BSD-3-Clause-No-Nuclear-Warranty', 'deprecated': False},
+ 'bsd-3-clause-open-mpi': {'id': 'BSD-3-Clause-Open-MPI', 'deprecated': False},
+ 'bsd-3-clause-sun': {'id': 'BSD-3-Clause-Sun', 'deprecated': False},
+ 'bsd-4-clause': {'id': 'BSD-4-Clause', 'deprecated': False},
+ 'bsd-4-clause-shortened': {'id': 'BSD-4-Clause-Shortened', 'deprecated': False},
+ 'bsd-4-clause-uc': {'id': 'BSD-4-Clause-UC', 'deprecated': False},
+ 'bsd-4.3reno': {'id': 'BSD-4.3RENO', 'deprecated': False},
+ 'bsd-4.3tahoe': {'id': 'BSD-4.3TAHOE', 'deprecated': False},
+ 'bsd-advertising-acknowledgement': {'id': 'BSD-Advertising-Acknowledgement', 'deprecated': False},
+ 'bsd-attribution-hpnd-disclaimer': {'id': 'BSD-Attribution-HPND-disclaimer', 'deprecated': False},
+ 'bsd-inferno-nettverk': {'id': 'BSD-Inferno-Nettverk', 'deprecated': False},
+ 'bsd-protection': {'id': 'BSD-Protection', 'deprecated': False},
+ 'bsd-source-beginning-file': {'id': 'BSD-Source-beginning-file', 'deprecated': False},
+ 'bsd-source-code': {'id': 'BSD-Source-Code', 'deprecated': False},
+ 'bsd-systemics': {'id': 'BSD-Systemics', 'deprecated': False},
+ 'bsd-systemics-w3works': {'id': 'BSD-Systemics-W3Works', 'deprecated': False},
+ 'bsl-1.0': {'id': 'BSL-1.0', 'deprecated': False},
+ 'busl-1.1': {'id': 'BUSL-1.1', 'deprecated': False},
+ 'bzip2-1.0.5': {'id': 'bzip2-1.0.5', 'deprecated': True},
+ 'bzip2-1.0.6': {'id': 'bzip2-1.0.6', 'deprecated': False},
+ 'c-uda-1.0': {'id': 'C-UDA-1.0', 'deprecated': False},
+ 'cal-1.0': {'id': 'CAL-1.0', 'deprecated': False},
+ 'cal-1.0-combined-work-exception': {'id': 'CAL-1.0-Combined-Work-Exception', 'deprecated': False},
+ 'caldera': {'id': 'Caldera', 'deprecated': False},
+ 'caldera-no-preamble': {'id': 'Caldera-no-preamble', 'deprecated': False},
+ 'catharon': {'id': 'Catharon', 'deprecated': False},
+ 'catosl-1.1': {'id': 'CATOSL-1.1', 'deprecated': False},
+ 'cc-by-1.0': {'id': 'CC-BY-1.0', 'deprecated': False},
+ 'cc-by-2.0': {'id': 'CC-BY-2.0', 'deprecated': False},
+ 'cc-by-2.5': {'id': 'CC-BY-2.5', 'deprecated': False},
+ 'cc-by-2.5-au': {'id': 'CC-BY-2.5-AU', 'deprecated': False},
+ 'cc-by-3.0': {'id': 'CC-BY-3.0', 'deprecated': False},
+ 'cc-by-3.0-at': {'id': 'CC-BY-3.0-AT', 'deprecated': False},
+ 'cc-by-3.0-au': {'id': 'CC-BY-3.0-AU', 'deprecated': False},
+ 'cc-by-3.0-de': {'id': 'CC-BY-3.0-DE', 'deprecated': False},
+ 'cc-by-3.0-igo': {'id': 'CC-BY-3.0-IGO', 'deprecated': False},
+ 'cc-by-3.0-nl': {'id': 'CC-BY-3.0-NL', 'deprecated': False},
+ 'cc-by-3.0-us': {'id': 'CC-BY-3.0-US', 'deprecated': False},
+ 'cc-by-4.0': {'id': 'CC-BY-4.0', 'deprecated': False},
+ 'cc-by-nc-1.0': {'id': 'CC-BY-NC-1.0', 'deprecated': False},
+ 'cc-by-nc-2.0': {'id': 'CC-BY-NC-2.0', 'deprecated': False},
+ 'cc-by-nc-2.5': {'id': 'CC-BY-NC-2.5', 'deprecated': False},
+ 'cc-by-nc-3.0': {'id': 'CC-BY-NC-3.0', 'deprecated': False},
+ 'cc-by-nc-3.0-de': {'id': 'CC-BY-NC-3.0-DE', 'deprecated': False},
+ 'cc-by-nc-4.0': {'id': 'CC-BY-NC-4.0', 'deprecated': False},
+ 'cc-by-nc-nd-1.0': {'id': 'CC-BY-NC-ND-1.0', 'deprecated': False},
+ 'cc-by-nc-nd-2.0': {'id': 'CC-BY-NC-ND-2.0', 'deprecated': False},
+ 'cc-by-nc-nd-2.5': {'id': 'CC-BY-NC-ND-2.5', 'deprecated': False},
+ 'cc-by-nc-nd-3.0': {'id': 'CC-BY-NC-ND-3.0', 'deprecated': False},
+ 'cc-by-nc-nd-3.0-de': {'id': 'CC-BY-NC-ND-3.0-DE', 'deprecated': False},
+ 'cc-by-nc-nd-3.0-igo': {'id': 'CC-BY-NC-ND-3.0-IGO', 'deprecated': False},
+ 'cc-by-nc-nd-4.0': {'id': 'CC-BY-NC-ND-4.0', 'deprecated': False},
+ 'cc-by-nc-sa-1.0': {'id': 'CC-BY-NC-SA-1.0', 'deprecated': False},
+ 'cc-by-nc-sa-2.0': {'id': 'CC-BY-NC-SA-2.0', 'deprecated': False},
+ 'cc-by-nc-sa-2.0-de': {'id': 'CC-BY-NC-SA-2.0-DE', 'deprecated': False},
+ 'cc-by-nc-sa-2.0-fr': {'id': 'CC-BY-NC-SA-2.0-FR', 'deprecated': False},
+ 'cc-by-nc-sa-2.0-uk': {'id': 'CC-BY-NC-SA-2.0-UK', 'deprecated': False},
+ 'cc-by-nc-sa-2.5': {'id': 'CC-BY-NC-SA-2.5', 'deprecated': False},
+ 'cc-by-nc-sa-3.0': {'id': 'CC-BY-NC-SA-3.0', 'deprecated': False},
+ 'cc-by-nc-sa-3.0-de': {'id': 'CC-BY-NC-SA-3.0-DE', 'deprecated': False},
+ 'cc-by-nc-sa-3.0-igo': {'id': 'CC-BY-NC-SA-3.0-IGO', 'deprecated': False},
+ 'cc-by-nc-sa-4.0': {'id': 'CC-BY-NC-SA-4.0', 'deprecated': False},
+ 'cc-by-nd-1.0': {'id': 'CC-BY-ND-1.0', 'deprecated': False},
+ 'cc-by-nd-2.0': {'id': 'CC-BY-ND-2.0', 'deprecated': False},
+ 'cc-by-nd-2.5': {'id': 'CC-BY-ND-2.5', 'deprecated': False},
+ 'cc-by-nd-3.0': {'id': 'CC-BY-ND-3.0', 'deprecated': False},
+ 'cc-by-nd-3.0-de': {'id': 'CC-BY-ND-3.0-DE', 'deprecated': False},
+ 'cc-by-nd-4.0': {'id': 'CC-BY-ND-4.0', 'deprecated': False},
+ 'cc-by-sa-1.0': {'id': 'CC-BY-SA-1.0', 'deprecated': False},
+ 'cc-by-sa-2.0': {'id': 'CC-BY-SA-2.0', 'deprecated': False},
+ 'cc-by-sa-2.0-uk': {'id': 'CC-BY-SA-2.0-UK', 'deprecated': False},
+ 'cc-by-sa-2.1-jp': {'id': 'CC-BY-SA-2.1-JP', 'deprecated': False},
+ 'cc-by-sa-2.5': {'id': 'CC-BY-SA-2.5', 'deprecated': False},
+ 'cc-by-sa-3.0': {'id': 'CC-BY-SA-3.0', 'deprecated': False},
+ 'cc-by-sa-3.0-at': {'id': 'CC-BY-SA-3.0-AT', 'deprecated': False},
+ 'cc-by-sa-3.0-de': {'id': 'CC-BY-SA-3.0-DE', 'deprecated': False},
+ 'cc-by-sa-3.0-igo': {'id': 'CC-BY-SA-3.0-IGO', 'deprecated': False},
+ 'cc-by-sa-4.0': {'id': 'CC-BY-SA-4.0', 'deprecated': False},
+ 'cc-pddc': {'id': 'CC-PDDC', 'deprecated': False},
+ 'cc-pdm-1.0': {'id': 'CC-PDM-1.0', 'deprecated': False},
+ 'cc-sa-1.0': {'id': 'CC-SA-1.0', 'deprecated': False},
+ 'cc0-1.0': {'id': 'CC0-1.0', 'deprecated': False},
+ 'cddl-1.0': {'id': 'CDDL-1.0', 'deprecated': False},
+ 'cddl-1.1': {'id': 'CDDL-1.1', 'deprecated': False},
+ 'cdl-1.0': {'id': 'CDL-1.0', 'deprecated': False},
+ 'cdla-permissive-1.0': {'id': 'CDLA-Permissive-1.0', 'deprecated': False},
+ 'cdla-permissive-2.0': {'id': 'CDLA-Permissive-2.0', 'deprecated': False},
+ 'cdla-sharing-1.0': {'id': 'CDLA-Sharing-1.0', 'deprecated': False},
+ 'cecill-1.0': {'id': 'CECILL-1.0', 'deprecated': False},
+ 'cecill-1.1': {'id': 'CECILL-1.1', 'deprecated': False},
+ 'cecill-2.0': {'id': 'CECILL-2.0', 'deprecated': False},
+ 'cecill-2.1': {'id': 'CECILL-2.1', 'deprecated': False},
+ 'cecill-b': {'id': 'CECILL-B', 'deprecated': False},
+ 'cecill-c': {'id': 'CECILL-C', 'deprecated': False},
+ 'cern-ohl-1.1': {'id': 'CERN-OHL-1.1', 'deprecated': False},
+ 'cern-ohl-1.2': {'id': 'CERN-OHL-1.2', 'deprecated': False},
+ 'cern-ohl-p-2.0': {'id': 'CERN-OHL-P-2.0', 'deprecated': False},
+ 'cern-ohl-s-2.0': {'id': 'CERN-OHL-S-2.0', 'deprecated': False},
+ 'cern-ohl-w-2.0': {'id': 'CERN-OHL-W-2.0', 'deprecated': False},
+ 'cfitsio': {'id': 'CFITSIO', 'deprecated': False},
+ 'check-cvs': {'id': 'check-cvs', 'deprecated': False},
+ 'checkmk': {'id': 'checkmk', 'deprecated': False},
+ 'clartistic': {'id': 'ClArtistic', 'deprecated': False},
+ 'clips': {'id': 'Clips', 'deprecated': False},
+ 'cmu-mach': {'id': 'CMU-Mach', 'deprecated': False},
+ 'cmu-mach-nodoc': {'id': 'CMU-Mach-nodoc', 'deprecated': False},
+ 'cnri-jython': {'id': 'CNRI-Jython', 'deprecated': False},
+ 'cnri-python': {'id': 'CNRI-Python', 'deprecated': False},
+ 'cnri-python-gpl-compatible': {'id': 'CNRI-Python-GPL-Compatible', 'deprecated': False},
+ 'coil-1.0': {'id': 'COIL-1.0', 'deprecated': False},
+ 'community-spec-1.0': {'id': 'Community-Spec-1.0', 'deprecated': False},
+ 'condor-1.1': {'id': 'Condor-1.1', 'deprecated': False},
+ 'copyleft-next-0.3.0': {'id': 'copyleft-next-0.3.0', 'deprecated': False},
+ 'copyleft-next-0.3.1': {'id': 'copyleft-next-0.3.1', 'deprecated': False},
+ 'cornell-lossless-jpeg': {'id': 'Cornell-Lossless-JPEG', 'deprecated': False},
+ 'cpal-1.0': {'id': 'CPAL-1.0', 'deprecated': False},
+ 'cpl-1.0': {'id': 'CPL-1.0', 'deprecated': False},
+ 'cpol-1.02': {'id': 'CPOL-1.02', 'deprecated': False},
+ 'cronyx': {'id': 'Cronyx', 'deprecated': False},
+ 'crossword': {'id': 'Crossword', 'deprecated': False},
+ 'cryptoswift': {'id': 'CryptoSwift', 'deprecated': False},
+ 'crystalstacker': {'id': 'CrystalStacker', 'deprecated': False},
+ 'cua-opl-1.0': {'id': 'CUA-OPL-1.0', 'deprecated': False},
+ 'cube': {'id': 'Cube', 'deprecated': False},
+ 'curl': {'id': 'curl', 'deprecated': False},
+ 'cve-tou': {'id': 'cve-tou', 'deprecated': False},
+ 'd-fsl-1.0': {'id': 'D-FSL-1.0', 'deprecated': False},
+ 'dec-3-clause': {'id': 'DEC-3-Clause', 'deprecated': False},
+ 'diffmark': {'id': 'diffmark', 'deprecated': False},
+ 'dl-de-by-2.0': {'id': 'DL-DE-BY-2.0', 'deprecated': False},
+ 'dl-de-zero-2.0': {'id': 'DL-DE-ZERO-2.0', 'deprecated': False},
+ 'doc': {'id': 'DOC', 'deprecated': False},
+ 'docbook-dtd': {'id': 'DocBook-DTD', 'deprecated': False},
+ 'docbook-schema': {'id': 'DocBook-Schema', 'deprecated': False},
+ 'docbook-stylesheet': {'id': 'DocBook-Stylesheet', 'deprecated': False},
+ 'docbook-xml': {'id': 'DocBook-XML', 'deprecated': False},
+ 'dotseqn': {'id': 'Dotseqn', 'deprecated': False},
+ 'drl-1.0': {'id': 'DRL-1.0', 'deprecated': False},
+ 'drl-1.1': {'id': 'DRL-1.1', 'deprecated': False},
+ 'dsdp': {'id': 'DSDP', 'deprecated': False},
+ 'dtoa': {'id': 'dtoa', 'deprecated': False},
+ 'dvipdfm': {'id': 'dvipdfm', 'deprecated': False},
+ 'ecl-1.0': {'id': 'ECL-1.0', 'deprecated': False},
+ 'ecl-2.0': {'id': 'ECL-2.0', 'deprecated': False},
+ 'ecos-2.0': {'id': 'eCos-2.0', 'deprecated': True},
+ 'efl-1.0': {'id': 'EFL-1.0', 'deprecated': False},
+ 'efl-2.0': {'id': 'EFL-2.0', 'deprecated': False},
+ 'egenix': {'id': 'eGenix', 'deprecated': False},
+ 'elastic-2.0': {'id': 'Elastic-2.0', 'deprecated': False},
+ 'entessa': {'id': 'Entessa', 'deprecated': False},
+ 'epics': {'id': 'EPICS', 'deprecated': False},
+ 'epl-1.0': {'id': 'EPL-1.0', 'deprecated': False},
+ 'epl-2.0': {'id': 'EPL-2.0', 'deprecated': False},
+ 'erlpl-1.1': {'id': 'ErlPL-1.1', 'deprecated': False},
+ 'etalab-2.0': {'id': 'etalab-2.0', 'deprecated': False},
+ 'eudatagrid': {'id': 'EUDatagrid', 'deprecated': False},
+ 'eupl-1.0': {'id': 'EUPL-1.0', 'deprecated': False},
+ 'eupl-1.1': {'id': 'EUPL-1.1', 'deprecated': False},
+ 'eupl-1.2': {'id': 'EUPL-1.2', 'deprecated': False},
+ 'eurosym': {'id': 'Eurosym', 'deprecated': False},
+ 'fair': {'id': 'Fair', 'deprecated': False},
+ 'fbm': {'id': 'FBM', 'deprecated': False},
+ 'fdk-aac': {'id': 'FDK-AAC', 'deprecated': False},
+ 'ferguson-twofish': {'id': 'Ferguson-Twofish', 'deprecated': False},
+ 'frameworx-1.0': {'id': 'Frameworx-1.0', 'deprecated': False},
+ 'freebsd-doc': {'id': 'FreeBSD-DOC', 'deprecated': False},
+ 'freeimage': {'id': 'FreeImage', 'deprecated': False},
+ 'fsfap': {'id': 'FSFAP', 'deprecated': False},
+ 'fsfap-no-warranty-disclaimer': {'id': 'FSFAP-no-warranty-disclaimer', 'deprecated': False},
+ 'fsful': {'id': 'FSFUL', 'deprecated': False},
+ 'fsfullr': {'id': 'FSFULLR', 'deprecated': False},
+ 'fsfullrsd': {'id': 'FSFULLRSD', 'deprecated': False},
+ 'fsfullrwd': {'id': 'FSFULLRWD', 'deprecated': False},
+ 'fsl-1.1-alv2': {'id': 'FSL-1.1-ALv2', 'deprecated': False},
+ 'fsl-1.1-mit': {'id': 'FSL-1.1-MIT', 'deprecated': False},
+ 'ftl': {'id': 'FTL', 'deprecated': False},
+ 'furuseth': {'id': 'Furuseth', 'deprecated': False},
+ 'fwlw': {'id': 'fwlw', 'deprecated': False},
+ 'game-programming-gems': {'id': 'Game-Programming-Gems', 'deprecated': False},
+ 'gcr-docs': {'id': 'GCR-docs', 'deprecated': False},
+ 'gd': {'id': 'GD', 'deprecated': False},
+ 'generic-xts': {'id': 'generic-xts', 'deprecated': False},
+ 'gfdl-1.1': {'id': 'GFDL-1.1', 'deprecated': True},
+ 'gfdl-1.1-invariants-only': {'id': 'GFDL-1.1-invariants-only', 'deprecated': False},
+ 'gfdl-1.1-invariants-or-later': {'id': 'GFDL-1.1-invariants-or-later', 'deprecated': False},
+ 'gfdl-1.1-no-invariants-only': {'id': 'GFDL-1.1-no-invariants-only', 'deprecated': False},
+ 'gfdl-1.1-no-invariants-or-later': {'id': 'GFDL-1.1-no-invariants-or-later', 'deprecated': False},
+ 'gfdl-1.1-only': {'id': 'GFDL-1.1-only', 'deprecated': False},
+ 'gfdl-1.1-or-later': {'id': 'GFDL-1.1-or-later', 'deprecated': False},
+ 'gfdl-1.2': {'id': 'GFDL-1.2', 'deprecated': True},
+ 'gfdl-1.2-invariants-only': {'id': 'GFDL-1.2-invariants-only', 'deprecated': False},
+ 'gfdl-1.2-invariants-or-later': {'id': 'GFDL-1.2-invariants-or-later', 'deprecated': False},
+ 'gfdl-1.2-no-invariants-only': {'id': 'GFDL-1.2-no-invariants-only', 'deprecated': False},
+ 'gfdl-1.2-no-invariants-or-later': {'id': 'GFDL-1.2-no-invariants-or-later', 'deprecated': False},
+ 'gfdl-1.2-only': {'id': 'GFDL-1.2-only', 'deprecated': False},
+ 'gfdl-1.2-or-later': {'id': 'GFDL-1.2-or-later', 'deprecated': False},
+ 'gfdl-1.3': {'id': 'GFDL-1.3', 'deprecated': True},
+ 'gfdl-1.3-invariants-only': {'id': 'GFDL-1.3-invariants-only', 'deprecated': False},
+ 'gfdl-1.3-invariants-or-later': {'id': 'GFDL-1.3-invariants-or-later', 'deprecated': False},
+ 'gfdl-1.3-no-invariants-only': {'id': 'GFDL-1.3-no-invariants-only', 'deprecated': False},
+ 'gfdl-1.3-no-invariants-or-later': {'id': 'GFDL-1.3-no-invariants-or-later', 'deprecated': False},
+ 'gfdl-1.3-only': {'id': 'GFDL-1.3-only', 'deprecated': False},
+ 'gfdl-1.3-or-later': {'id': 'GFDL-1.3-or-later', 'deprecated': False},
+ 'giftware': {'id': 'Giftware', 'deprecated': False},
+ 'gl2ps': {'id': 'GL2PS', 'deprecated': False},
+ 'glide': {'id': 'Glide', 'deprecated': False},
+ 'glulxe': {'id': 'Glulxe', 'deprecated': False},
+ 'glwtpl': {'id': 'GLWTPL', 'deprecated': False},
+ 'gnuplot': {'id': 'gnuplot', 'deprecated': False},
+ 'gpl-1.0': {'id': 'GPL-1.0', 'deprecated': True},
+ 'gpl-1.0+': {'id': 'GPL-1.0+', 'deprecated': True},
+ 'gpl-1.0-only': {'id': 'GPL-1.0-only', 'deprecated': False},
+ 'gpl-1.0-or-later': {'id': 'GPL-1.0-or-later', 'deprecated': False},
+ 'gpl-2.0': {'id': 'GPL-2.0', 'deprecated': True},
+ 'gpl-2.0+': {'id': 'GPL-2.0+', 'deprecated': True},
+ 'gpl-2.0-only': {'id': 'GPL-2.0-only', 'deprecated': False},
+ 'gpl-2.0-or-later': {'id': 'GPL-2.0-or-later', 'deprecated': False},
+ 'gpl-2.0-with-autoconf-exception': {'id': 'GPL-2.0-with-autoconf-exception', 'deprecated': True},
+ 'gpl-2.0-with-bison-exception': {'id': 'GPL-2.0-with-bison-exception', 'deprecated': True},
+ 'gpl-2.0-with-classpath-exception': {'id': 'GPL-2.0-with-classpath-exception', 'deprecated': True},
+ 'gpl-2.0-with-font-exception': {'id': 'GPL-2.0-with-font-exception', 'deprecated': True},
+ 'gpl-2.0-with-gcc-exception': {'id': 'GPL-2.0-with-GCC-exception', 'deprecated': True},
+ 'gpl-3.0': {'id': 'GPL-3.0', 'deprecated': True},
+ 'gpl-3.0+': {'id': 'GPL-3.0+', 'deprecated': True},
+ 'gpl-3.0-only': {'id': 'GPL-3.0-only', 'deprecated': False},
+ 'gpl-3.0-or-later': {'id': 'GPL-3.0-or-later', 'deprecated': False},
+ 'gpl-3.0-with-autoconf-exception': {'id': 'GPL-3.0-with-autoconf-exception', 'deprecated': True},
+ 'gpl-3.0-with-gcc-exception': {'id': 'GPL-3.0-with-GCC-exception', 'deprecated': True},
+ 'graphics-gems': {'id': 'Graphics-Gems', 'deprecated': False},
+ 'gsoap-1.3b': {'id': 'gSOAP-1.3b', 'deprecated': False},
+ 'gtkbook': {'id': 'gtkbook', 'deprecated': False},
+ 'gutmann': {'id': 'Gutmann', 'deprecated': False},
+ 'haskellreport': {'id': 'HaskellReport', 'deprecated': False},
+ 'hdf5': {'id': 'HDF5', 'deprecated': False},
+ 'hdparm': {'id': 'hdparm', 'deprecated': False},
+ 'hidapi': {'id': 'HIDAPI', 'deprecated': False},
+ 'hippocratic-2.1': {'id': 'Hippocratic-2.1', 'deprecated': False},
+ 'hp-1986': {'id': 'HP-1986', 'deprecated': False},
+ 'hp-1989': {'id': 'HP-1989', 'deprecated': False},
+ 'hpnd': {'id': 'HPND', 'deprecated': False},
+ 'hpnd-dec': {'id': 'HPND-DEC', 'deprecated': False},
+ 'hpnd-doc': {'id': 'HPND-doc', 'deprecated': False},
+ 'hpnd-doc-sell': {'id': 'HPND-doc-sell', 'deprecated': False},
+ 'hpnd-export-us': {'id': 'HPND-export-US', 'deprecated': False},
+ 'hpnd-export-us-acknowledgement': {'id': 'HPND-export-US-acknowledgement', 'deprecated': False},
+ 'hpnd-export-us-modify': {'id': 'HPND-export-US-modify', 'deprecated': False},
+ 'hpnd-export2-us': {'id': 'HPND-export2-US', 'deprecated': False},
+ 'hpnd-fenneberg-livingston': {'id': 'HPND-Fenneberg-Livingston', 'deprecated': False},
+ 'hpnd-inria-imag': {'id': 'HPND-INRIA-IMAG', 'deprecated': False},
+ 'hpnd-intel': {'id': 'HPND-Intel', 'deprecated': False},
+ 'hpnd-kevlin-henney': {'id': 'HPND-Kevlin-Henney', 'deprecated': False},
+ 'hpnd-markus-kuhn': {'id': 'HPND-Markus-Kuhn', 'deprecated': False},
+ 'hpnd-merchantability-variant': {'id': 'HPND-merchantability-variant', 'deprecated': False},
+ 'hpnd-mit-disclaimer': {'id': 'HPND-MIT-disclaimer', 'deprecated': False},
+ 'hpnd-netrek': {'id': 'HPND-Netrek', 'deprecated': False},
+ 'hpnd-pbmplus': {'id': 'HPND-Pbmplus', 'deprecated': False},
+ 'hpnd-sell-mit-disclaimer-xserver': {'id': 'HPND-sell-MIT-disclaimer-xserver', 'deprecated': False},
+ 'hpnd-sell-regexpr': {'id': 'HPND-sell-regexpr', 'deprecated': False},
+ 'hpnd-sell-variant': {'id': 'HPND-sell-variant', 'deprecated': False},
+ 'hpnd-sell-variant-mit-disclaimer': {'id': 'HPND-sell-variant-MIT-disclaimer', 'deprecated': False},
+ 'hpnd-sell-variant-mit-disclaimer-rev': {'id': 'HPND-sell-variant-MIT-disclaimer-rev', 'deprecated': False},
+ 'hpnd-uc': {'id': 'HPND-UC', 'deprecated': False},
+ 'hpnd-uc-export-us': {'id': 'HPND-UC-export-US', 'deprecated': False},
+ 'htmltidy': {'id': 'HTMLTIDY', 'deprecated': False},
+ 'ibm-pibs': {'id': 'IBM-pibs', 'deprecated': False},
+ 'icu': {'id': 'ICU', 'deprecated': False},
+ 'iec-code-components-eula': {'id': 'IEC-Code-Components-EULA', 'deprecated': False},
+ 'ijg': {'id': 'IJG', 'deprecated': False},
+ 'ijg-short': {'id': 'IJG-short', 'deprecated': False},
+ 'imagemagick': {'id': 'ImageMagick', 'deprecated': False},
+ 'imatix': {'id': 'iMatix', 'deprecated': False},
+ 'imlib2': {'id': 'Imlib2', 'deprecated': False},
+ 'info-zip': {'id': 'Info-ZIP', 'deprecated': False},
+ 'inner-net-2.0': {'id': 'Inner-Net-2.0', 'deprecated': False},
+ 'innosetup': {'id': 'InnoSetup', 'deprecated': False},
+ 'intel': {'id': 'Intel', 'deprecated': False},
+ 'intel-acpi': {'id': 'Intel-ACPI', 'deprecated': False},
+ 'interbase-1.0': {'id': 'Interbase-1.0', 'deprecated': False},
+ 'ipa': {'id': 'IPA', 'deprecated': False},
+ 'ipl-1.0': {'id': 'IPL-1.0', 'deprecated': False},
+ 'isc': {'id': 'ISC', 'deprecated': False},
+ 'isc-veillard': {'id': 'ISC-Veillard', 'deprecated': False},
+ 'jam': {'id': 'Jam', 'deprecated': False},
+ 'jasper-2.0': {'id': 'JasPer-2.0', 'deprecated': False},
+ 'jove': {'id': 'jove', 'deprecated': False},
+ 'jpl-image': {'id': 'JPL-image', 'deprecated': False},
+ 'jpnic': {'id': 'JPNIC', 'deprecated': False},
+ 'json': {'id': 'JSON', 'deprecated': False},
+ 'kastrup': {'id': 'Kastrup', 'deprecated': False},
+ 'kazlib': {'id': 'Kazlib', 'deprecated': False},
+ 'knuth-ctan': {'id': 'Knuth-CTAN', 'deprecated': False},
+ 'lal-1.2': {'id': 'LAL-1.2', 'deprecated': False},
+ 'lal-1.3': {'id': 'LAL-1.3', 'deprecated': False},
+ 'latex2e': {'id': 'Latex2e', 'deprecated': False},
+ 'latex2e-translated-notice': {'id': 'Latex2e-translated-notice', 'deprecated': False},
+ 'leptonica': {'id': 'Leptonica', 'deprecated': False},
+ 'lgpl-2.0': {'id': 'LGPL-2.0', 'deprecated': True},
+ 'lgpl-2.0+': {'id': 'LGPL-2.0+', 'deprecated': True},
+ 'lgpl-2.0-only': {'id': 'LGPL-2.0-only', 'deprecated': False},
+ 'lgpl-2.0-or-later': {'id': 'LGPL-2.0-or-later', 'deprecated': False},
+ 'lgpl-2.1': {'id': 'LGPL-2.1', 'deprecated': True},
+ 'lgpl-2.1+': {'id': 'LGPL-2.1+', 'deprecated': True},
+ 'lgpl-2.1-only': {'id': 'LGPL-2.1-only', 'deprecated': False},
+ 'lgpl-2.1-or-later': {'id': 'LGPL-2.1-or-later', 'deprecated': False},
+ 'lgpl-3.0': {'id': 'LGPL-3.0', 'deprecated': True},
+ 'lgpl-3.0+': {'id': 'LGPL-3.0+', 'deprecated': True},
+ 'lgpl-3.0-only': {'id': 'LGPL-3.0-only', 'deprecated': False},
+ 'lgpl-3.0-or-later': {'id': 'LGPL-3.0-or-later', 'deprecated': False},
+ 'lgpllr': {'id': 'LGPLLR', 'deprecated': False},
+ 'libpng': {'id': 'Libpng', 'deprecated': False},
+ 'libpng-1.6.35': {'id': 'libpng-1.6.35', 'deprecated': False},
+ 'libpng-2.0': {'id': 'libpng-2.0', 'deprecated': False},
+ 'libselinux-1.0': {'id': 'libselinux-1.0', 'deprecated': False},
+ 'libtiff': {'id': 'libtiff', 'deprecated': False},
+ 'libutil-david-nugent': {'id': 'libutil-David-Nugent', 'deprecated': False},
+ 'liliq-p-1.1': {'id': 'LiLiQ-P-1.1', 'deprecated': False},
+ 'liliq-r-1.1': {'id': 'LiLiQ-R-1.1', 'deprecated': False},
+ 'liliq-rplus-1.1': {'id': 'LiLiQ-Rplus-1.1', 'deprecated': False},
+ 'linux-man-pages-1-para': {'id': 'Linux-man-pages-1-para', 'deprecated': False},
+ 'linux-man-pages-copyleft': {'id': 'Linux-man-pages-copyleft', 'deprecated': False},
+ 'linux-man-pages-copyleft-2-para': {'id': 'Linux-man-pages-copyleft-2-para', 'deprecated': False},
+ 'linux-man-pages-copyleft-var': {'id': 'Linux-man-pages-copyleft-var', 'deprecated': False},
+ 'linux-openib': {'id': 'Linux-OpenIB', 'deprecated': False},
+ 'loop': {'id': 'LOOP', 'deprecated': False},
+ 'lpd-document': {'id': 'LPD-document', 'deprecated': False},
+ 'lpl-1.0': {'id': 'LPL-1.0', 'deprecated': False},
+ 'lpl-1.02': {'id': 'LPL-1.02', 'deprecated': False},
+ 'lppl-1.0': {'id': 'LPPL-1.0', 'deprecated': False},
+ 'lppl-1.1': {'id': 'LPPL-1.1', 'deprecated': False},
+ 'lppl-1.2': {'id': 'LPPL-1.2', 'deprecated': False},
+ 'lppl-1.3a': {'id': 'LPPL-1.3a', 'deprecated': False},
+ 'lppl-1.3c': {'id': 'LPPL-1.3c', 'deprecated': False},
+ 'lsof': {'id': 'lsof', 'deprecated': False},
+ 'lucida-bitmap-fonts': {'id': 'Lucida-Bitmap-Fonts', 'deprecated': False},
+ 'lzma-sdk-9.11-to-9.20': {'id': 'LZMA-SDK-9.11-to-9.20', 'deprecated': False},
+ 'lzma-sdk-9.22': {'id': 'LZMA-SDK-9.22', 'deprecated': False},
+ 'mackerras-3-clause': {'id': 'Mackerras-3-Clause', 'deprecated': False},
+ 'mackerras-3-clause-acknowledgment': {'id': 'Mackerras-3-Clause-acknowledgment', 'deprecated': False},
+ 'magaz': {'id': 'magaz', 'deprecated': False},
+ 'mailprio': {'id': 'mailprio', 'deprecated': False},
+ 'makeindex': {'id': 'MakeIndex', 'deprecated': False},
+ 'man2html': {'id': 'man2html', 'deprecated': False},
+ 'martin-birgmeier': {'id': 'Martin-Birgmeier', 'deprecated': False},
+ 'mcphee-slideshow': {'id': 'McPhee-slideshow', 'deprecated': False},
+ 'metamail': {'id': 'metamail', 'deprecated': False},
+ 'minpack': {'id': 'Minpack', 'deprecated': False},
+ 'mips': {'id': 'MIPS', 'deprecated': False},
+ 'miros': {'id': 'MirOS', 'deprecated': False},
+ 'mit': {'id': 'MIT', 'deprecated': False},
+ 'mit-0': {'id': 'MIT-0', 'deprecated': False},
+ 'mit-advertising': {'id': 'MIT-advertising', 'deprecated': False},
+ 'mit-click': {'id': 'MIT-Click', 'deprecated': False},
+ 'mit-cmu': {'id': 'MIT-CMU', 'deprecated': False},
+ 'mit-enna': {'id': 'MIT-enna', 'deprecated': False},
+ 'mit-feh': {'id': 'MIT-feh', 'deprecated': False},
+ 'mit-festival': {'id': 'MIT-Festival', 'deprecated': False},
+ 'mit-khronos-old': {'id': 'MIT-Khronos-old', 'deprecated': False},
+ 'mit-modern-variant': {'id': 'MIT-Modern-Variant', 'deprecated': False},
+ 'mit-open-group': {'id': 'MIT-open-group', 'deprecated': False},
+ 'mit-testregex': {'id': 'MIT-testregex', 'deprecated': False},
+ 'mit-wu': {'id': 'MIT-Wu', 'deprecated': False},
+ 'mitnfa': {'id': 'MITNFA', 'deprecated': False},
+ 'mmixware': {'id': 'MMIXware', 'deprecated': False},
+ 'motosoto': {'id': 'Motosoto', 'deprecated': False},
+ 'mpeg-ssg': {'id': 'MPEG-SSG', 'deprecated': False},
+ 'mpi-permissive': {'id': 'mpi-permissive', 'deprecated': False},
+ 'mpich2': {'id': 'mpich2', 'deprecated': False},
+ 'mpl-1.0': {'id': 'MPL-1.0', 'deprecated': False},
+ 'mpl-1.1': {'id': 'MPL-1.1', 'deprecated': False},
+ 'mpl-2.0': {'id': 'MPL-2.0', 'deprecated': False},
+ 'mpl-2.0-no-copyleft-exception': {'id': 'MPL-2.0-no-copyleft-exception', 'deprecated': False},
+ 'mplus': {'id': 'mplus', 'deprecated': False},
+ 'ms-lpl': {'id': 'MS-LPL', 'deprecated': False},
+ 'ms-pl': {'id': 'MS-PL', 'deprecated': False},
+ 'ms-rl': {'id': 'MS-RL', 'deprecated': False},
+ 'mtll': {'id': 'MTLL', 'deprecated': False},
+ 'mulanpsl-1.0': {'id': 'MulanPSL-1.0', 'deprecated': False},
+ 'mulanpsl-2.0': {'id': 'MulanPSL-2.0', 'deprecated': False},
+ 'multics': {'id': 'Multics', 'deprecated': False},
+ 'mup': {'id': 'Mup', 'deprecated': False},
+ 'naist-2003': {'id': 'NAIST-2003', 'deprecated': False},
+ 'nasa-1.3': {'id': 'NASA-1.3', 'deprecated': False},
+ 'naumen': {'id': 'Naumen', 'deprecated': False},
+ 'nbpl-1.0': {'id': 'NBPL-1.0', 'deprecated': False},
+ 'ncbi-pd': {'id': 'NCBI-PD', 'deprecated': False},
+ 'ncgl-uk-2.0': {'id': 'NCGL-UK-2.0', 'deprecated': False},
+ 'ncl': {'id': 'NCL', 'deprecated': False},
+ 'ncsa': {'id': 'NCSA', 'deprecated': False},
+ 'net-snmp': {'id': 'Net-SNMP', 'deprecated': True},
+ 'netcdf': {'id': 'NetCDF', 'deprecated': False},
+ 'newsletr': {'id': 'Newsletr', 'deprecated': False},
+ 'ngpl': {'id': 'NGPL', 'deprecated': False},
+ 'ngrep': {'id': 'ngrep', 'deprecated': False},
+ 'nicta-1.0': {'id': 'NICTA-1.0', 'deprecated': False},
+ 'nist-pd': {'id': 'NIST-PD', 'deprecated': False},
+ 'nist-pd-fallback': {'id': 'NIST-PD-fallback', 'deprecated': False},
+ 'nist-software': {'id': 'NIST-Software', 'deprecated': False},
+ 'nlod-1.0': {'id': 'NLOD-1.0', 'deprecated': False},
+ 'nlod-2.0': {'id': 'NLOD-2.0', 'deprecated': False},
+ 'nlpl': {'id': 'NLPL', 'deprecated': False},
+ 'nokia': {'id': 'Nokia', 'deprecated': False},
+ 'nosl': {'id': 'NOSL', 'deprecated': False},
+ 'noweb': {'id': 'Noweb', 'deprecated': False},
+ 'npl-1.0': {'id': 'NPL-1.0', 'deprecated': False},
+ 'npl-1.1': {'id': 'NPL-1.1', 'deprecated': False},
+ 'nposl-3.0': {'id': 'NPOSL-3.0', 'deprecated': False},
+ 'nrl': {'id': 'NRL', 'deprecated': False},
+ 'ntia-pd': {'id': 'NTIA-PD', 'deprecated': False},
+ 'ntp': {'id': 'NTP', 'deprecated': False},
+ 'ntp-0': {'id': 'NTP-0', 'deprecated': False},
+ 'nunit': {'id': 'Nunit', 'deprecated': True},
+ 'o-uda-1.0': {'id': 'O-UDA-1.0', 'deprecated': False},
+ 'oar': {'id': 'OAR', 'deprecated': False},
+ 'occt-pl': {'id': 'OCCT-PL', 'deprecated': False},
+ 'oclc-2.0': {'id': 'OCLC-2.0', 'deprecated': False},
+ 'odbl-1.0': {'id': 'ODbL-1.0', 'deprecated': False},
+ 'odc-by-1.0': {'id': 'ODC-By-1.0', 'deprecated': False},
+ 'offis': {'id': 'OFFIS', 'deprecated': False},
+ 'ofl-1.0': {'id': 'OFL-1.0', 'deprecated': False},
+ 'ofl-1.0-no-rfn': {'id': 'OFL-1.0-no-RFN', 'deprecated': False},
+ 'ofl-1.0-rfn': {'id': 'OFL-1.0-RFN', 'deprecated': False},
+ 'ofl-1.1': {'id': 'OFL-1.1', 'deprecated': False},
+ 'ofl-1.1-no-rfn': {'id': 'OFL-1.1-no-RFN', 'deprecated': False},
+ 'ofl-1.1-rfn': {'id': 'OFL-1.1-RFN', 'deprecated': False},
+ 'ogc-1.0': {'id': 'OGC-1.0', 'deprecated': False},
+ 'ogdl-taiwan-1.0': {'id': 'OGDL-Taiwan-1.0', 'deprecated': False},
+ 'ogl-canada-2.0': {'id': 'OGL-Canada-2.0', 'deprecated': False},
+ 'ogl-uk-1.0': {'id': 'OGL-UK-1.0', 'deprecated': False},
+ 'ogl-uk-2.0': {'id': 'OGL-UK-2.0', 'deprecated': False},
+ 'ogl-uk-3.0': {'id': 'OGL-UK-3.0', 'deprecated': False},
+ 'ogtsl': {'id': 'OGTSL', 'deprecated': False},
+ 'oldap-1.1': {'id': 'OLDAP-1.1', 'deprecated': False},
+ 'oldap-1.2': {'id': 'OLDAP-1.2', 'deprecated': False},
+ 'oldap-1.3': {'id': 'OLDAP-1.3', 'deprecated': False},
+ 'oldap-1.4': {'id': 'OLDAP-1.4', 'deprecated': False},
+ 'oldap-2.0': {'id': 'OLDAP-2.0', 'deprecated': False},
+ 'oldap-2.0.1': {'id': 'OLDAP-2.0.1', 'deprecated': False},
+ 'oldap-2.1': {'id': 'OLDAP-2.1', 'deprecated': False},
+ 'oldap-2.2': {'id': 'OLDAP-2.2', 'deprecated': False},
+ 'oldap-2.2.1': {'id': 'OLDAP-2.2.1', 'deprecated': False},
+ 'oldap-2.2.2': {'id': 'OLDAP-2.2.2', 'deprecated': False},
+ 'oldap-2.3': {'id': 'OLDAP-2.3', 'deprecated': False},
+ 'oldap-2.4': {'id': 'OLDAP-2.4', 'deprecated': False},
+ 'oldap-2.5': {'id': 'OLDAP-2.5', 'deprecated': False},
+ 'oldap-2.6': {'id': 'OLDAP-2.6', 'deprecated': False},
+ 'oldap-2.7': {'id': 'OLDAP-2.7', 'deprecated': False},
+ 'oldap-2.8': {'id': 'OLDAP-2.8', 'deprecated': False},
+ 'olfl-1.3': {'id': 'OLFL-1.3', 'deprecated': False},
+ 'oml': {'id': 'OML', 'deprecated': False},
+ 'openpbs-2.3': {'id': 'OpenPBS-2.3', 'deprecated': False},
+ 'openssl': {'id': 'OpenSSL', 'deprecated': False},
+ 'openssl-standalone': {'id': 'OpenSSL-standalone', 'deprecated': False},
+ 'openvision': {'id': 'OpenVision', 'deprecated': False},
+ 'opl-1.0': {'id': 'OPL-1.0', 'deprecated': False},
+ 'opl-uk-3.0': {'id': 'OPL-UK-3.0', 'deprecated': False},
+ 'opubl-1.0': {'id': 'OPUBL-1.0', 'deprecated': False},
+ 'oset-pl-2.1': {'id': 'OSET-PL-2.1', 'deprecated': False},
+ 'osl-1.0': {'id': 'OSL-1.0', 'deprecated': False},
+ 'osl-1.1': {'id': 'OSL-1.1', 'deprecated': False},
+ 'osl-2.0': {'id': 'OSL-2.0', 'deprecated': False},
+ 'osl-2.1': {'id': 'OSL-2.1', 'deprecated': False},
+ 'osl-3.0': {'id': 'OSL-3.0', 'deprecated': False},
+ 'padl': {'id': 'PADL', 'deprecated': False},
+ 'parity-6.0.0': {'id': 'Parity-6.0.0', 'deprecated': False},
+ 'parity-7.0.0': {'id': 'Parity-7.0.0', 'deprecated': False},
+ 'pddl-1.0': {'id': 'PDDL-1.0', 'deprecated': False},
+ 'php-3.0': {'id': 'PHP-3.0', 'deprecated': False},
+ 'php-3.01': {'id': 'PHP-3.01', 'deprecated': False},
+ 'pixar': {'id': 'Pixar', 'deprecated': False},
+ 'pkgconf': {'id': 'pkgconf', 'deprecated': False},
+ 'plexus': {'id': 'Plexus', 'deprecated': False},
+ 'pnmstitch': {'id': 'pnmstitch', 'deprecated': False},
+ 'polyform-noncommercial-1.0.0': {'id': 'PolyForm-Noncommercial-1.0.0', 'deprecated': False},
+ 'polyform-small-business-1.0.0': {'id': 'PolyForm-Small-Business-1.0.0', 'deprecated': False},
+ 'postgresql': {'id': 'PostgreSQL', 'deprecated': False},
+ 'ppl': {'id': 'PPL', 'deprecated': False},
+ 'psf-2.0': {'id': 'PSF-2.0', 'deprecated': False},
+ 'psfrag': {'id': 'psfrag', 'deprecated': False},
+ 'psutils': {'id': 'psutils', 'deprecated': False},
+ 'python-2.0': {'id': 'Python-2.0', 'deprecated': False},
+ 'python-2.0.1': {'id': 'Python-2.0.1', 'deprecated': False},
+ 'python-ldap': {'id': 'python-ldap', 'deprecated': False},
+ 'qhull': {'id': 'Qhull', 'deprecated': False},
+ 'qpl-1.0': {'id': 'QPL-1.0', 'deprecated': False},
+ 'qpl-1.0-inria-2004': {'id': 'QPL-1.0-INRIA-2004', 'deprecated': False},
+ 'radvd': {'id': 'radvd', 'deprecated': False},
+ 'rdisc': {'id': 'Rdisc', 'deprecated': False},
+ 'rhecos-1.1': {'id': 'RHeCos-1.1', 'deprecated': False},
+ 'rpl-1.1': {'id': 'RPL-1.1', 'deprecated': False},
+ 'rpl-1.5': {'id': 'RPL-1.5', 'deprecated': False},
+ 'rpsl-1.0': {'id': 'RPSL-1.0', 'deprecated': False},
+ 'rsa-md': {'id': 'RSA-MD', 'deprecated': False},
+ 'rscpl': {'id': 'RSCPL', 'deprecated': False},
+ 'ruby': {'id': 'Ruby', 'deprecated': False},
+ 'ruby-pty': {'id': 'Ruby-pty', 'deprecated': False},
+ 'sax-pd': {'id': 'SAX-PD', 'deprecated': False},
+ 'sax-pd-2.0': {'id': 'SAX-PD-2.0', 'deprecated': False},
+ 'saxpath': {'id': 'Saxpath', 'deprecated': False},
+ 'scea': {'id': 'SCEA', 'deprecated': False},
+ 'schemereport': {'id': 'SchemeReport', 'deprecated': False},
+ 'sendmail': {'id': 'Sendmail', 'deprecated': False},
+ 'sendmail-8.23': {'id': 'Sendmail-8.23', 'deprecated': False},
+ 'sendmail-open-source-1.1': {'id': 'Sendmail-Open-Source-1.1', 'deprecated': False},
+ 'sgi-b-1.0': {'id': 'SGI-B-1.0', 'deprecated': False},
+ 'sgi-b-1.1': {'id': 'SGI-B-1.1', 'deprecated': False},
+ 'sgi-b-2.0': {'id': 'SGI-B-2.0', 'deprecated': False},
+ 'sgi-opengl': {'id': 'SGI-OpenGL', 'deprecated': False},
+ 'sgp4': {'id': 'SGP4', 'deprecated': False},
+ 'shl-0.5': {'id': 'SHL-0.5', 'deprecated': False},
+ 'shl-0.51': {'id': 'SHL-0.51', 'deprecated': False},
+ 'simpl-2.0': {'id': 'SimPL-2.0', 'deprecated': False},
+ 'sissl': {'id': 'SISSL', 'deprecated': False},
+ 'sissl-1.2': {'id': 'SISSL-1.2', 'deprecated': False},
+ 'sl': {'id': 'SL', 'deprecated': False},
+ 'sleepycat': {'id': 'Sleepycat', 'deprecated': False},
+ 'smail-gpl': {'id': 'SMAIL-GPL', 'deprecated': False},
+ 'smlnj': {'id': 'SMLNJ', 'deprecated': False},
+ 'smppl': {'id': 'SMPPL', 'deprecated': False},
+ 'snia': {'id': 'SNIA', 'deprecated': False},
+ 'snprintf': {'id': 'snprintf', 'deprecated': False},
+ 'sofa': {'id': 'SOFA', 'deprecated': False},
+ 'softsurfer': {'id': 'softSurfer', 'deprecated': False},
+ 'soundex': {'id': 'Soundex', 'deprecated': False},
+ 'spencer-86': {'id': 'Spencer-86', 'deprecated': False},
+ 'spencer-94': {'id': 'Spencer-94', 'deprecated': False},
+ 'spencer-99': {'id': 'Spencer-99', 'deprecated': False},
+ 'spl-1.0': {'id': 'SPL-1.0', 'deprecated': False},
+ 'ssh-keyscan': {'id': 'ssh-keyscan', 'deprecated': False},
+ 'ssh-openssh': {'id': 'SSH-OpenSSH', 'deprecated': False},
+ 'ssh-short': {'id': 'SSH-short', 'deprecated': False},
+ 'ssleay-standalone': {'id': 'SSLeay-standalone', 'deprecated': False},
+ 'sspl-1.0': {'id': 'SSPL-1.0', 'deprecated': False},
+ 'standardml-nj': {'id': 'StandardML-NJ', 'deprecated': True},
+ 'sugarcrm-1.1.3': {'id': 'SugarCRM-1.1.3', 'deprecated': False},
+ 'sul-1.0': {'id': 'SUL-1.0', 'deprecated': False},
+ 'sun-ppp': {'id': 'Sun-PPP', 'deprecated': False},
+ 'sun-ppp-2000': {'id': 'Sun-PPP-2000', 'deprecated': False},
+ 'sunpro': {'id': 'SunPro', 'deprecated': False},
+ 'swl': {'id': 'SWL', 'deprecated': False},
+ 'swrule': {'id': 'swrule', 'deprecated': False},
+ 'symlinks': {'id': 'Symlinks', 'deprecated': False},
+ 'tapr-ohl-1.0': {'id': 'TAPR-OHL-1.0', 'deprecated': False},
+ 'tcl': {'id': 'TCL', 'deprecated': False},
+ 'tcp-wrappers': {'id': 'TCP-wrappers', 'deprecated': False},
+ 'termreadkey': {'id': 'TermReadKey', 'deprecated': False},
+ 'tgppl-1.0': {'id': 'TGPPL-1.0', 'deprecated': False},
+ 'thirdeye': {'id': 'ThirdEye', 'deprecated': False},
+ 'threeparttable': {'id': 'threeparttable', 'deprecated': False},
+ 'tmate': {'id': 'TMate', 'deprecated': False},
+ 'torque-1.1': {'id': 'TORQUE-1.1', 'deprecated': False},
+ 'tosl': {'id': 'TOSL', 'deprecated': False},
+ 'tpdl': {'id': 'TPDL', 'deprecated': False},
+ 'tpl-1.0': {'id': 'TPL-1.0', 'deprecated': False},
+ 'trustedqsl': {'id': 'TrustedQSL', 'deprecated': False},
+ 'ttwl': {'id': 'TTWL', 'deprecated': False},
+ 'ttyp0': {'id': 'TTYP0', 'deprecated': False},
+ 'tu-berlin-1.0': {'id': 'TU-Berlin-1.0', 'deprecated': False},
+ 'tu-berlin-2.0': {'id': 'TU-Berlin-2.0', 'deprecated': False},
+ 'ubuntu-font-1.0': {'id': 'Ubuntu-font-1.0', 'deprecated': False},
+ 'ucar': {'id': 'UCAR', 'deprecated': False},
+ 'ucl-1.0': {'id': 'UCL-1.0', 'deprecated': False},
+ 'ulem': {'id': 'ulem', 'deprecated': False},
+ 'umich-merit': {'id': 'UMich-Merit', 'deprecated': False},
+ 'unicode-3.0': {'id': 'Unicode-3.0', 'deprecated': False},
+ 'unicode-dfs-2015': {'id': 'Unicode-DFS-2015', 'deprecated': False},
+ 'unicode-dfs-2016': {'id': 'Unicode-DFS-2016', 'deprecated': False},
+ 'unicode-tou': {'id': 'Unicode-TOU', 'deprecated': False},
+ 'unixcrypt': {'id': 'UnixCrypt', 'deprecated': False},
+ 'unlicense': {'id': 'Unlicense', 'deprecated': False},
+ 'unlicense-libtelnet': {'id': 'Unlicense-libtelnet', 'deprecated': False},
+ 'unlicense-libwhirlpool': {'id': 'Unlicense-libwhirlpool', 'deprecated': False},
+ 'upl-1.0': {'id': 'UPL-1.0', 'deprecated': False},
+ 'urt-rle': {'id': 'URT-RLE', 'deprecated': False},
+ 'vim': {'id': 'Vim', 'deprecated': False},
+ 'vostrom': {'id': 'VOSTROM', 'deprecated': False},
+ 'vsl-1.0': {'id': 'VSL-1.0', 'deprecated': False},
+ 'w3c': {'id': 'W3C', 'deprecated': False},
+ 'w3c-19980720': {'id': 'W3C-19980720', 'deprecated': False},
+ 'w3c-20150513': {'id': 'W3C-20150513', 'deprecated': False},
+ 'w3m': {'id': 'w3m', 'deprecated': False},
+ 'watcom-1.0': {'id': 'Watcom-1.0', 'deprecated': False},
+ 'widget-workshop': {'id': 'Widget-Workshop', 'deprecated': False},
+ 'wsuipa': {'id': 'Wsuipa', 'deprecated': False},
+ 'wtfpl': {'id': 'WTFPL', 'deprecated': False},
+ 'wwl': {'id': 'wwl', 'deprecated': False},
+ 'wxwindows': {'id': 'wxWindows', 'deprecated': True},
+ 'x11': {'id': 'X11', 'deprecated': False},
+ 'x11-distribute-modifications-variant': {'id': 'X11-distribute-modifications-variant', 'deprecated': False},
+ 'x11-swapped': {'id': 'X11-swapped', 'deprecated': False},
+ 'xdebug-1.03': {'id': 'Xdebug-1.03', 'deprecated': False},
+ 'xerox': {'id': 'Xerox', 'deprecated': False},
+ 'xfig': {'id': 'Xfig', 'deprecated': False},
+ 'xfree86-1.1': {'id': 'XFree86-1.1', 'deprecated': False},
+ 'xinetd': {'id': 'xinetd', 'deprecated': False},
+ 'xkeyboard-config-zinoviev': {'id': 'xkeyboard-config-Zinoviev', 'deprecated': False},
+ 'xlock': {'id': 'xlock', 'deprecated': False},
+ 'xnet': {'id': 'Xnet', 'deprecated': False},
+ 'xpp': {'id': 'xpp', 'deprecated': False},
+ 'xskat': {'id': 'XSkat', 'deprecated': False},
+ 'xzoom': {'id': 'xzoom', 'deprecated': False},
+ 'ypl-1.0': {'id': 'YPL-1.0', 'deprecated': False},
+ 'ypl-1.1': {'id': 'YPL-1.1', 'deprecated': False},
+ 'zed': {'id': 'Zed', 'deprecated': False},
+ 'zeeff': {'id': 'Zeeff', 'deprecated': False},
+ 'zend-2.0': {'id': 'Zend-2.0', 'deprecated': False},
+ 'zimbra-1.3': {'id': 'Zimbra-1.3', 'deprecated': False},
+ 'zimbra-1.4': {'id': 'Zimbra-1.4', 'deprecated': False},
+ 'zlib': {'id': 'Zlib', 'deprecated': False},
+ 'zlib-acknowledgement': {'id': 'zlib-acknowledgement', 'deprecated': False},
+ 'zpl-1.1': {'id': 'ZPL-1.1', 'deprecated': False},
+ 'zpl-2.0': {'id': 'ZPL-2.0', 'deprecated': False},
+ 'zpl-2.1': {'id': 'ZPL-2.1', 'deprecated': False},
+}
+
+EXCEPTIONS: dict[str, SPDXException] = {
+ '389-exception': {'id': '389-exception', 'deprecated': False},
+ 'asterisk-exception': {'id': 'Asterisk-exception', 'deprecated': False},
+ 'asterisk-linking-protocols-exception': {'id': 'Asterisk-linking-protocols-exception', 'deprecated': False},
+ 'autoconf-exception-2.0': {'id': 'Autoconf-exception-2.0', 'deprecated': False},
+ 'autoconf-exception-3.0': {'id': 'Autoconf-exception-3.0', 'deprecated': False},
+ 'autoconf-exception-generic': {'id': 'Autoconf-exception-generic', 'deprecated': False},
+ 'autoconf-exception-generic-3.0': {'id': 'Autoconf-exception-generic-3.0', 'deprecated': False},
+ 'autoconf-exception-macro': {'id': 'Autoconf-exception-macro', 'deprecated': False},
+ 'bison-exception-1.24': {'id': 'Bison-exception-1.24', 'deprecated': False},
+ 'bison-exception-2.2': {'id': 'Bison-exception-2.2', 'deprecated': False},
+ 'bootloader-exception': {'id': 'Bootloader-exception', 'deprecated': False},
+ 'cgal-linking-exception': {'id': 'CGAL-linking-exception', 'deprecated': False},
+ 'classpath-exception-2.0': {'id': 'Classpath-exception-2.0', 'deprecated': False},
+ 'clisp-exception-2.0': {'id': 'CLISP-exception-2.0', 'deprecated': False},
+ 'cryptsetup-openssl-exception': {'id': 'cryptsetup-OpenSSL-exception', 'deprecated': False},
+ 'digia-qt-lgpl-exception-1.1': {'id': 'Digia-Qt-LGPL-exception-1.1', 'deprecated': False},
+ 'digirule-foss-exception': {'id': 'DigiRule-FOSS-exception', 'deprecated': False},
+ 'ecos-exception-2.0': {'id': 'eCos-exception-2.0', 'deprecated': False},
+ 'erlang-otp-linking-exception': {'id': 'erlang-otp-linking-exception', 'deprecated': False},
+ 'fawkes-runtime-exception': {'id': 'Fawkes-Runtime-exception', 'deprecated': False},
+ 'fltk-exception': {'id': 'FLTK-exception', 'deprecated': False},
+ 'fmt-exception': {'id': 'fmt-exception', 'deprecated': False},
+ 'font-exception-2.0': {'id': 'Font-exception-2.0', 'deprecated': False},
+ 'freertos-exception-2.0': {'id': 'freertos-exception-2.0', 'deprecated': False},
+ 'gcc-exception-2.0': {'id': 'GCC-exception-2.0', 'deprecated': False},
+ 'gcc-exception-2.0-note': {'id': 'GCC-exception-2.0-note', 'deprecated': False},
+ 'gcc-exception-3.1': {'id': 'GCC-exception-3.1', 'deprecated': False},
+ 'gmsh-exception': {'id': 'Gmsh-exception', 'deprecated': False},
+ 'gnat-exception': {'id': 'GNAT-exception', 'deprecated': False},
+ 'gnome-examples-exception': {'id': 'GNOME-examples-exception', 'deprecated': False},
+ 'gnu-compiler-exception': {'id': 'GNU-compiler-exception', 'deprecated': False},
+ 'gnu-javamail-exception': {'id': 'gnu-javamail-exception', 'deprecated': False},
+ 'gpl-3.0-389-ds-base-exception': {'id': 'GPL-3.0-389-ds-base-exception', 'deprecated': False},
+ 'gpl-3.0-interface-exception': {'id': 'GPL-3.0-interface-exception', 'deprecated': False},
+ 'gpl-3.0-linking-exception': {'id': 'GPL-3.0-linking-exception', 'deprecated': False},
+ 'gpl-3.0-linking-source-exception': {'id': 'GPL-3.0-linking-source-exception', 'deprecated': False},
+ 'gpl-cc-1.0': {'id': 'GPL-CC-1.0', 'deprecated': False},
+ 'gstreamer-exception-2005': {'id': 'GStreamer-exception-2005', 'deprecated': False},
+ 'gstreamer-exception-2008': {'id': 'GStreamer-exception-2008', 'deprecated': False},
+ 'harbour-exception': {'id': 'harbour-exception', 'deprecated': False},
+ 'i2p-gpl-java-exception': {'id': 'i2p-gpl-java-exception', 'deprecated': False},
+ 'independent-modules-exception': {'id': 'Independent-modules-exception', 'deprecated': False},
+ 'kicad-libraries-exception': {'id': 'KiCad-libraries-exception', 'deprecated': False},
+ 'lgpl-3.0-linking-exception': {'id': 'LGPL-3.0-linking-exception', 'deprecated': False},
+ 'libpri-openh323-exception': {'id': 'libpri-OpenH323-exception', 'deprecated': False},
+ 'libtool-exception': {'id': 'Libtool-exception', 'deprecated': False},
+ 'linux-syscall-note': {'id': 'Linux-syscall-note', 'deprecated': False},
+ 'llgpl': {'id': 'LLGPL', 'deprecated': False},
+ 'llvm-exception': {'id': 'LLVM-exception', 'deprecated': False},
+ 'lzma-exception': {'id': 'LZMA-exception', 'deprecated': False},
+ 'mif-exception': {'id': 'mif-exception', 'deprecated': False},
+ 'mxml-exception': {'id': 'mxml-exception', 'deprecated': False},
+ 'nokia-qt-exception-1.1': {'id': 'Nokia-Qt-exception-1.1', 'deprecated': True},
+ 'ocaml-lgpl-linking-exception': {'id': 'OCaml-LGPL-linking-exception', 'deprecated': False},
+ 'occt-exception-1.0': {'id': 'OCCT-exception-1.0', 'deprecated': False},
+ 'openjdk-assembly-exception-1.0': {'id': 'OpenJDK-assembly-exception-1.0', 'deprecated': False},
+ 'openvpn-openssl-exception': {'id': 'openvpn-openssl-exception', 'deprecated': False},
+ 'pcre2-exception': {'id': 'PCRE2-exception', 'deprecated': False},
+ 'polyparse-exception': {'id': 'polyparse-exception', 'deprecated': False},
+ 'ps-or-pdf-font-exception-20170817': {'id': 'PS-or-PDF-font-exception-20170817', 'deprecated': False},
+ 'qpl-1.0-inria-2004-exception': {'id': 'QPL-1.0-INRIA-2004-exception', 'deprecated': False},
+ 'qt-gpl-exception-1.0': {'id': 'Qt-GPL-exception-1.0', 'deprecated': False},
+ 'qt-lgpl-exception-1.1': {'id': 'Qt-LGPL-exception-1.1', 'deprecated': False},
+ 'qwt-exception-1.0': {'id': 'Qwt-exception-1.0', 'deprecated': False},
+ 'romic-exception': {'id': 'romic-exception', 'deprecated': False},
+ 'rrdtool-floss-exception-2.0': {'id': 'RRDtool-FLOSS-exception-2.0', 'deprecated': False},
+ 'sane-exception': {'id': 'SANE-exception', 'deprecated': False},
+ 'shl-2.0': {'id': 'SHL-2.0', 'deprecated': False},
+ 'shl-2.1': {'id': 'SHL-2.1', 'deprecated': False},
+ 'stunnel-exception': {'id': 'stunnel-exception', 'deprecated': False},
+ 'swi-exception': {'id': 'SWI-exception', 'deprecated': False},
+ 'swift-exception': {'id': 'Swift-exception', 'deprecated': False},
+ 'texinfo-exception': {'id': 'Texinfo-exception', 'deprecated': False},
+ 'u-boot-exception-2.0': {'id': 'u-boot-exception-2.0', 'deprecated': False},
+ 'ubdl-exception': {'id': 'UBDL-exception', 'deprecated': False},
+ 'universal-foss-exception-1.0': {'id': 'Universal-FOSS-exception-1.0', 'deprecated': False},
+ 'vsftpd-openssl-exception': {'id': 'vsftpd-openssl-exception', 'deprecated': False},
+ 'wxwindows-exception-3.1': {'id': 'WxWindows-exception-3.1', 'deprecated': False},
+ 'x11vnc-openssl-exception': {'id': 'x11vnc-openssl-exception', 'deprecated': False},
+}
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/markers.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/markers.py
new file mode 100644
index 0000000000000000000000000000000000000000..ca3706fe492f4cf0762f7734d84c2d269f88bbc5
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/markers.py
@@ -0,0 +1,388 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import operator
+import os
+import platform
+import sys
+from typing import AbstractSet, Callable, Literal, Mapping, TypedDict, Union, cast
+
+from ._parser import MarkerAtom, MarkerList, Op, Value, Variable
+from ._parser import parse_marker as _parse_marker
+from ._tokenizer import ParserSyntaxError
+from .specifiers import InvalidSpecifier, Specifier
+from .utils import canonicalize_name
+
+__all__ = [
+ "Environment",
+ "EvaluateContext",
+ "InvalidMarker",
+ "Marker",
+ "UndefinedComparison",
+ "UndefinedEnvironmentName",
+ "default_environment",
+]
+
+Operator = Callable[[str, Union[str, AbstractSet[str]]], bool]
+EvaluateContext = Literal["metadata", "lock_file", "requirement"]
+MARKERS_ALLOWING_SET = {"extras", "dependency_groups"}
+MARKERS_REQUIRING_VERSION = {
+ "implementation_version",
+ "platform_release",
+ "python_full_version",
+ "python_version",
+}
+
+
+class InvalidMarker(ValueError):
+ """
+ An invalid marker was found, users should refer to PEP 508.
+ """
+
+
+class UndefinedComparison(ValueError):
+ """
+ An invalid operation was attempted on a value that doesn't support it.
+ """
+
+
+class UndefinedEnvironmentName(ValueError):
+ """
+ A name was attempted to be used that does not exist inside of the
+ environment.
+ """
+
+
+class Environment(TypedDict):
+ implementation_name: str
+ """The implementation's identifier, e.g. ``'cpython'``."""
+
+ implementation_version: str
+ """
+ The implementation's version, e.g. ``'3.13.0a2'`` for CPython 3.13.0a2, or
+ ``'7.3.13'`` for PyPy3.10 v7.3.13.
+ """
+
+ os_name: str
+ """
+ The value of :py:data:`os.name`. The name of the operating system dependent module
+ imported, e.g. ``'posix'``.
+ """
+
+ platform_machine: str
+ """
+ Returns the machine type, e.g. ``'i386'``.
+
+ An empty string if the value cannot be determined.
+ """
+
+ platform_release: str
+ """
+ The system's release, e.g. ``'2.2.0'`` or ``'NT'``.
+
+ An empty string if the value cannot be determined.
+ """
+
+ platform_system: str
+ """
+ The system/OS name, e.g. ``'Linux'``, ``'Windows'`` or ``'Java'``.
+
+ An empty string if the value cannot be determined.
+ """
+
+ platform_version: str
+ """
+ The system's release version, e.g. ``'#3 on degas'``.
+
+ An empty string if the value cannot be determined.
+ """
+
+ python_full_version: str
+ """
+ The Python version as string ``'major.minor.patchlevel'``.
+
+ Note that unlike the Python :py:data:`sys.version`, this value will always include
+ the patchlevel (it defaults to 0).
+ """
+
+ platform_python_implementation: str
+ """
+ A string identifying the Python implementation, e.g. ``'CPython'``.
+ """
+
+ python_version: str
+ """The Python version as string ``'major.minor'``."""
+
+ sys_platform: str
+ """
+ This string contains a platform identifier that can be used to append
+ platform-specific components to :py:data:`sys.path`, for instance.
+
+ For Unix systems, except on Linux and AIX, this is the lowercased OS name as
+ returned by ``uname -s`` with the first part of the version as returned by
+ ``uname -r`` appended, e.g. ``'sunos5'`` or ``'freebsd8'``, at the time when Python
+ was built.
+ """
+
+
+def _normalize_extras(
+ result: MarkerList | MarkerAtom | str,
+) -> MarkerList | MarkerAtom | str:
+ if not isinstance(result, tuple):
+ return result
+
+ lhs, op, rhs = result
+ if isinstance(lhs, Variable) and lhs.value == "extra":
+ normalized_extra = canonicalize_name(rhs.value)
+ rhs = Value(normalized_extra)
+ elif isinstance(rhs, Variable) and rhs.value == "extra":
+ normalized_extra = canonicalize_name(lhs.value)
+ lhs = Value(normalized_extra)
+ return lhs, op, rhs
+
+
+def _normalize_extra_values(results: MarkerList) -> MarkerList:
+ """
+ Normalize extra values.
+ """
+
+ return [_normalize_extras(r) for r in results]
+
+
+def _format_marker(
+ marker: list[str] | MarkerAtom | str, first: bool | None = True
+) -> str:
+ assert isinstance(marker, (list, tuple, str))
+
+ # Sometimes we have a structure like [[...]] which is a single item list
+ # where the single item is itself it's own list. In that case we want skip
+ # the rest of this function so that we don't get extraneous () on the
+ # outside.
+ if (
+ isinstance(marker, list)
+ and len(marker) == 1
+ and isinstance(marker[0], (list, tuple))
+ ):
+ return _format_marker(marker[0])
+
+ if isinstance(marker, list):
+ inner = (_format_marker(m, first=False) for m in marker)
+ if first:
+ return " ".join(inner)
+ else:
+ return "(" + " ".join(inner) + ")"
+ elif isinstance(marker, tuple):
+ return " ".join([m.serialize() for m in marker])
+ else:
+ return marker
+
+
+_operators: dict[str, Operator] = {
+ "in": lambda lhs, rhs: lhs in rhs,
+ "not in": lambda lhs, rhs: lhs not in rhs,
+ "<": lambda _lhs, _rhs: False,
+ "<=": operator.eq,
+ "==": operator.eq,
+ "!=": operator.ne,
+ ">=": operator.eq,
+ ">": lambda _lhs, _rhs: False,
+}
+
+
+def _eval_op(lhs: str, op: Op, rhs: str | AbstractSet[str], *, key: str) -> bool:
+ op_str = op.serialize()
+ if key in MARKERS_REQUIRING_VERSION:
+ try:
+ spec = Specifier(f"{op_str}{rhs}")
+ except InvalidSpecifier:
+ pass
+ else:
+ return spec.contains(lhs, prereleases=True)
+
+ oper: Operator | None = _operators.get(op_str)
+ if oper is None:
+ raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
+
+ return oper(lhs, rhs)
+
+
+def _normalize(
+ lhs: str, rhs: str | AbstractSet[str], key: str
+) -> tuple[str, str | AbstractSet[str]]:
+ # PEP 685 - Comparison of extra names for optional distribution dependencies
+ # https://peps.python.org/pep-0685/
+ # > When comparing extra names, tools MUST normalize the names being
+ # > compared using the semantics outlined in PEP 503 for names
+ if key == "extra":
+ assert isinstance(rhs, str), "extra value must be a string"
+ # Both sides are normalized at this point already
+ return (lhs, rhs)
+ if key in MARKERS_ALLOWING_SET:
+ if isinstance(rhs, str): # pragma: no cover
+ return (canonicalize_name(lhs), canonicalize_name(rhs))
+ else:
+ return (canonicalize_name(lhs), {canonicalize_name(v) for v in rhs})
+
+ # other environment markers don't have such standards
+ return lhs, rhs
+
+
+def _evaluate_markers(
+ markers: MarkerList, environment: dict[str, str | AbstractSet[str]]
+) -> bool:
+ groups: list[list[bool]] = [[]]
+
+ for marker in markers:
+ if isinstance(marker, list):
+ groups[-1].append(_evaluate_markers(marker, environment))
+ elif isinstance(marker, tuple):
+ lhs, op, rhs = marker
+
+ if isinstance(lhs, Variable):
+ environment_key = lhs.value
+ lhs_value = environment[environment_key]
+ rhs_value = rhs.value
+ else:
+ lhs_value = lhs.value
+ environment_key = rhs.value
+ rhs_value = environment[environment_key]
+
+ assert isinstance(lhs_value, str), "lhs must be a string"
+ lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)
+ groups[-1].append(_eval_op(lhs_value, op, rhs_value, key=environment_key))
+ elif marker == "or":
+ groups.append([])
+ elif marker == "and":
+ pass
+ else: # pragma: nocover
+ raise TypeError(f"Unexpected marker {marker!r}")
+
+ return any(all(item) for item in groups)
+
+
+def format_full_version(info: sys._version_info) -> str:
+ version = f"{info.major}.{info.minor}.{info.micro}"
+ kind = info.releaselevel
+ if kind != "final":
+ version += kind[0] + str(info.serial)
+ return version
+
+
+def default_environment() -> Environment:
+ iver = format_full_version(sys.implementation.version)
+ implementation_name = sys.implementation.name
+ return {
+ "implementation_name": implementation_name,
+ "implementation_version": iver,
+ "os_name": os.name,
+ "platform_machine": platform.machine(),
+ "platform_release": platform.release(),
+ "platform_system": platform.system(),
+ "platform_version": platform.version(),
+ "python_full_version": platform.python_version(),
+ "platform_python_implementation": platform.python_implementation(),
+ "python_version": ".".join(platform.python_version_tuple()[:2]),
+ "sys_platform": sys.platform,
+ }
+
+
+class Marker:
+ def __init__(self, marker: str) -> None:
+ # Note: We create a Marker object without calling this constructor in
+ # packaging.requirements.Requirement. If any additional logic is
+ # added here, make sure to mirror/adapt Requirement.
+
+ # If this fails and throws an error, the repr still expects _markers to
+ # be defined.
+ self._markers: MarkerList = []
+
+ try:
+ self._markers = _normalize_extra_values(_parse_marker(marker))
+ # The attribute `_markers` can be described in terms of a recursive type:
+ # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]]
+ #
+ # For example, the following expression:
+ # python_version > "3.6" or (python_version == "3.6" and os_name == "unix")
+ #
+ # is parsed into:
+ # [
+ # (, ')>, ),
+ # 'and',
+ # [
+ # (, , ),
+ # 'or',
+ # (, , )
+ # ]
+ # ]
+ except ParserSyntaxError as e:
+ raise InvalidMarker(str(e)) from e
+
+ def __str__(self) -> str:
+ return _format_marker(self._markers)
+
+ def __repr__(self) -> str:
+ return f"<{self.__class__.__name__}('{self}')>"
+
+ def __hash__(self) -> int:
+ return hash(str(self))
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Marker):
+ return NotImplemented
+
+ return str(self) == str(other)
+
+ def evaluate(
+ self,
+ environment: Mapping[str, str | AbstractSet[str]] | None = None,
+ context: EvaluateContext = "metadata",
+ ) -> bool:
+ """Evaluate a marker.
+
+ Return the boolean from evaluating the given marker against the
+ environment. environment is an optional argument to override all or
+ part of the determined environment. The *context* parameter specifies what
+ context the markers are being evaluated for, which influences what markers
+ are considered valid. Acceptable values are "metadata" (for core metadata;
+ default), "lock_file", and "requirement" (i.e. all other situations).
+
+ The environment is determined from the current Python process.
+ """
+ current_environment = cast(
+ "dict[str, str | AbstractSet[str]]", default_environment()
+ )
+ if context == "lock_file":
+ current_environment.update(
+ extras=frozenset(), dependency_groups=frozenset()
+ )
+ elif context == "metadata":
+ current_environment["extra"] = ""
+
+ if environment is not None:
+ current_environment.update(environment)
+ if "extra" in current_environment:
+ # The API used to allow setting extra to None. We need to handle
+ # this case for backwards compatibility. Also skip running
+ # normalize name if extra is empty.
+ extra = cast("str | None", current_environment["extra"])
+ current_environment["extra"] = canonicalize_name(extra) if extra else ""
+
+ return _evaluate_markers(
+ self._markers, _repair_python_full_version(current_environment)
+ )
+
+
+def _repair_python_full_version(
+ env: dict[str, str | AbstractSet[str]],
+) -> dict[str, str | AbstractSet[str]]:
+ """
+ Work around platform.python_version() returning something that is not PEP 440
+ compliant for non-tagged Python builds.
+ """
+ python_full_version = cast("str", env["python_full_version"])
+ if python_full_version.endswith("+"):
+ env["python_full_version"] = f"{python_full_version}local"
+ return env
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/metadata.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/metadata.py
new file mode 100644
index 0000000000000000000000000000000000000000..253f6b1b7ebd711fdc6bbbab3b56897061bab515
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/metadata.py
@@ -0,0 +1,978 @@
+from __future__ import annotations
+
+import email.feedparser
+import email.header
+import email.message
+import email.parser
+import email.policy
+import keyword
+import pathlib
+import sys
+import typing
+from typing import (
+ Any,
+ Callable,
+ Generic,
+ Literal,
+ TypedDict,
+ cast,
+)
+
+from . import licenses, requirements, specifiers, utils
+from . import version as version_module
+
+if typing.TYPE_CHECKING:
+ from .licenses import NormalizedLicenseExpression
+
+T = typing.TypeVar("T")
+
+
+if sys.version_info >= (3, 11): # pragma: no cover
+ ExceptionGroup = ExceptionGroup # noqa: F821
+else: # pragma: no cover
+
+ class ExceptionGroup(Exception):
+ """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11.
+
+ If :external:exc:`ExceptionGroup` is already defined by Python itself,
+ that version is used instead.
+ """
+
+ message: str
+ exceptions: list[Exception]
+
+ def __init__(self, message: str, exceptions: list[Exception]) -> None:
+ self.message = message
+ self.exceptions = exceptions
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})"
+
+
+class InvalidMetadata(ValueError):
+ """A metadata field contains invalid data."""
+
+ field: str
+ """The name of the field that contains invalid data."""
+
+ def __init__(self, field: str, message: str) -> None:
+ self.field = field
+ super().__init__(message)
+
+
+# The RawMetadata class attempts to make as few assumptions about the underlying
+# serialization formats as possible. The idea is that as long as a serialization
+# formats offer some very basic primitives in *some* way then we can support
+# serializing to and from that format.
+class RawMetadata(TypedDict, total=False):
+ """A dictionary of raw core metadata.
+
+ Each field in core metadata maps to a key of this dictionary (when data is
+ provided). The key is lower-case and underscores are used instead of dashes
+ compared to the equivalent core metadata field. Any core metadata field that
+ can be specified multiple times or can hold multiple values in a single
+ field have a key with a plural name. See :class:`Metadata` whose attributes
+ match the keys of this dictionary.
+
+ Core metadata fields that can be specified multiple times are stored as a
+ list or dict depending on which is appropriate for the field. Any fields
+ which hold multiple values in a single field are stored as a list.
+
+ """
+
+ # Metadata 1.0 - PEP 241
+ metadata_version: str
+ name: str
+ version: str
+ platforms: list[str]
+ summary: str
+ description: str
+ keywords: list[str]
+ home_page: str
+ author: str
+ author_email: str
+ license: str
+
+ # Metadata 1.1 - PEP 314
+ supported_platforms: list[str]
+ download_url: str
+ classifiers: list[str]
+ requires: list[str]
+ provides: list[str]
+ obsoletes: list[str]
+
+ # Metadata 1.2 - PEP 345
+ maintainer: str
+ maintainer_email: str
+ requires_dist: list[str]
+ provides_dist: list[str]
+ obsoletes_dist: list[str]
+ requires_python: str
+ requires_external: list[str]
+ project_urls: dict[str, str]
+
+ # Metadata 2.0
+ # PEP 426 attempted to completely revamp the metadata format
+ # but got stuck without ever being able to build consensus on
+ # it and ultimately ended up withdrawn.
+ #
+ # However, a number of tools had started emitting METADATA with
+ # `2.0` Metadata-Version, so for historical reasons, this version
+ # was skipped.
+
+ # Metadata 2.1 - PEP 566
+ description_content_type: str
+ provides_extra: list[str]
+
+ # Metadata 2.2 - PEP 643
+ dynamic: list[str]
+
+ # Metadata 2.3 - PEP 685
+ # No new fields were added in PEP 685, just some edge case were
+ # tightened up to provide better interoperability.
+
+ # Metadata 2.4 - PEP 639
+ license_expression: str
+ license_files: list[str]
+
+ # Metadata 2.5 - PEP 794
+ import_names: list[str]
+ import_namespaces: list[str]
+
+
+# 'keywords' is special as it's a string in the core metadata spec, but we
+# represent it as a list.
+_STRING_FIELDS = {
+ "author",
+ "author_email",
+ "description",
+ "description_content_type",
+ "download_url",
+ "home_page",
+ "license",
+ "license_expression",
+ "maintainer",
+ "maintainer_email",
+ "metadata_version",
+ "name",
+ "requires_python",
+ "summary",
+ "version",
+}
+
+_LIST_FIELDS = {
+ "classifiers",
+ "dynamic",
+ "license_files",
+ "obsoletes",
+ "obsoletes_dist",
+ "platforms",
+ "provides",
+ "provides_dist",
+ "provides_extra",
+ "requires",
+ "requires_dist",
+ "requires_external",
+ "supported_platforms",
+ "import_names",
+ "import_namespaces",
+}
+
+_DICT_FIELDS = {
+ "project_urls",
+}
+
+
+def _parse_keywords(data: str) -> list[str]:
+ """Split a string of comma-separated keywords into a list of keywords."""
+ return [k.strip() for k in data.split(",")]
+
+
+def _parse_project_urls(data: list[str]) -> dict[str, str]:
+ """Parse a list of label/URL string pairings separated by a comma."""
+ urls = {}
+ for pair in data:
+ # Our logic is slightly tricky here as we want to try and do
+ # *something* reasonable with malformed data.
+ #
+ # The main thing that we have to worry about, is data that does
+ # not have a ',' at all to split the label from the Value. There
+ # isn't a singular right answer here, and we will fail validation
+ # later on (if the caller is validating) so it doesn't *really*
+ # matter, but since the missing value has to be an empty str
+ # and our return value is dict[str, str], if we let the key
+ # be the missing value, then they'd have multiple '' values that
+ # overwrite each other in a accumulating dict.
+ #
+ # The other potential issue is that it's possible to have the
+ # same label multiple times in the metadata, with no solid "right"
+ # answer with what to do in that case. As such, we'll do the only
+ # thing we can, which is treat the field as unparsable and add it
+ # to our list of unparsed fields.
+ #
+ # TODO: The spec doesn't say anything about if the keys should be
+ # considered case sensitive or not... logically they should
+ # be case-preserving and case-insensitive, but doing that
+ # would open up more cases where we might have duplicate
+ # entries.
+ label, _, url = (s.strip() for s in pair.partition(","))
+
+ if label in urls:
+ # The label already exists in our set of urls, so this field
+ # is unparsable, and we can just add the whole thing to our
+ # unparsable data and stop processing it.
+ raise KeyError("duplicate labels in project urls")
+ urls[label] = url
+
+ return urls
+
+
+def _get_payload(msg: email.message.Message, source: bytes | str) -> str:
+ """Get the body of the message."""
+ # If our source is a str, then our caller has managed encodings for us,
+ # and we don't need to deal with it.
+ if isinstance(source, str):
+ payload = msg.get_payload()
+ assert isinstance(payload, str)
+ return payload
+ # If our source is a bytes, then we're managing the encoding and we need
+ # to deal with it.
+ else:
+ bpayload = msg.get_payload(decode=True)
+ assert isinstance(bpayload, bytes)
+ try:
+ return bpayload.decode("utf8", "strict")
+ except UnicodeDecodeError as exc:
+ raise ValueError("payload in an invalid encoding") from exc
+
+
+# The various parse_FORMAT functions here are intended to be as lenient as
+# possible in their parsing, while still returning a correctly typed
+# RawMetadata.
+#
+# To aid in this, we also generally want to do as little touching of the
+# data as possible, except where there are possibly some historic holdovers
+# that make valid data awkward to work with.
+#
+# While this is a lower level, intermediate format than our ``Metadata``
+# class, some light touch ups can make a massive difference in usability.
+
+# Map METADATA fields to RawMetadata.
+_EMAIL_TO_RAW_MAPPING = {
+ "author": "author",
+ "author-email": "author_email",
+ "classifier": "classifiers",
+ "description": "description",
+ "description-content-type": "description_content_type",
+ "download-url": "download_url",
+ "dynamic": "dynamic",
+ "home-page": "home_page",
+ "import-name": "import_names",
+ "import-namespace": "import_namespaces",
+ "keywords": "keywords",
+ "license": "license",
+ "license-expression": "license_expression",
+ "license-file": "license_files",
+ "maintainer": "maintainer",
+ "maintainer-email": "maintainer_email",
+ "metadata-version": "metadata_version",
+ "name": "name",
+ "obsoletes": "obsoletes",
+ "obsoletes-dist": "obsoletes_dist",
+ "platform": "platforms",
+ "project-url": "project_urls",
+ "provides": "provides",
+ "provides-dist": "provides_dist",
+ "provides-extra": "provides_extra",
+ "requires": "requires",
+ "requires-dist": "requires_dist",
+ "requires-external": "requires_external",
+ "requires-python": "requires_python",
+ "summary": "summary",
+ "supported-platform": "supported_platforms",
+ "version": "version",
+}
+_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()}
+
+
+# This class is for writing RFC822 messages
+class RFC822Policy(email.policy.EmailPolicy):
+ """
+ This is :class:`email.policy.EmailPolicy`, but with a simple ``header_store_parse``
+ implementation that handles multi-line values, and some nice defaults.
+ """
+
+ utf8 = True
+ mangle_from_ = False
+ max_line_length = 0
+
+ def header_store_parse(self, name: str, value: str) -> tuple[str, str]:
+ size = len(name) + 2
+ value = value.replace("\n", "\n" + " " * size)
+ return (name, value)
+
+
+# This class is for writing RFC822 messages
+class RFC822Message(email.message.EmailMessage):
+ """
+ This is :class:`email.message.EmailMessage` with two small changes: it defaults to
+ our `RFC822Policy`, and it correctly writes unicode when being called
+ with `bytes()`.
+ """
+
+ def __init__(self) -> None:
+ super().__init__(policy=RFC822Policy())
+
+ def as_bytes(
+ self, unixfrom: bool = False, policy: email.policy.Policy | None = None
+ ) -> bytes:
+ """
+ Return the bytes representation of the message.
+
+ This handles unicode encoding.
+ """
+ return self.as_string(unixfrom, policy=policy).encode("utf-8")
+
+
+def parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]:
+ """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``).
+
+ This function returns a two-item tuple of dicts. The first dict is of
+ recognized fields from the core metadata specification. Fields that can be
+ parsed and translated into Python's built-in types are converted
+ appropriately. All other fields are left as-is. Fields that are allowed to
+ appear multiple times are stored as lists.
+
+ The second dict contains all other fields from the metadata. This includes
+ any unrecognized fields. It also includes any fields which are expected to
+ be parsed into a built-in type but were not formatted appropriately. Finally,
+ any fields that are expected to appear only once but are repeated are
+ included in this dict.
+
+ """
+ raw: dict[str, str | list[str] | dict[str, str]] = {}
+ unparsed: dict[str, list[str]] = {}
+
+ if isinstance(data, str):
+ parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data)
+ else:
+ parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data)
+
+ # We have to wrap parsed.keys() in a set, because in the case of multiple
+ # values for a key (a list), the key will appear multiple times in the
+ # list of keys, but we're avoiding that by using get_all().
+ for name_with_case in frozenset(parsed.keys()):
+ # Header names in RFC are case insensitive, so we'll normalize to all
+ # lower case to make comparisons easier.
+ name = name_with_case.lower()
+
+ # We use get_all() here, even for fields that aren't multiple use,
+ # because otherwise someone could have e.g. two Name fields, and we
+ # would just silently ignore it rather than doing something about it.
+ headers = parsed.get_all(name) or []
+
+ # The way the email module works when parsing bytes is that it
+ # unconditionally decodes the bytes as ascii using the surrogateescape
+ # handler. When you pull that data back out (such as with get_all() ),
+ # it looks to see if the str has any surrogate escapes, and if it does
+ # it wraps it in a Header object instead of returning the string.
+ #
+ # As such, we'll look for those Header objects, and fix up the encoding.
+ value = []
+ # Flag if we have run into any issues processing the headers, thus
+ # signalling that the data belongs in 'unparsed'.
+ valid_encoding = True
+ for h in headers:
+ # It's unclear if this can return more types than just a Header or
+ # a str, so we'll just assert here to make sure.
+ assert isinstance(h, (email.header.Header, str))
+
+ # If it's a header object, we need to do our little dance to get
+ # the real data out of it. In cases where there is invalid data
+ # we're going to end up with mojibake, but there's no obvious, good
+ # way around that without reimplementing parts of the Header object
+ # ourselves.
+ #
+ # That should be fine since, if mojibacked happens, this key is
+ # going into the unparsed dict anyways.
+ if isinstance(h, email.header.Header):
+ # The Header object stores it's data as chunks, and each chunk
+ # can be independently encoded, so we'll need to check each
+ # of them.
+ chunks: list[tuple[bytes, str | None]] = []
+ for binary, _encoding in email.header.decode_header(h):
+ try:
+ binary.decode("utf8", "strict")
+ except UnicodeDecodeError:
+ # Enable mojibake.
+ encoding = "latin1"
+ valid_encoding = False
+ else:
+ encoding = "utf8"
+ chunks.append((binary, encoding))
+
+ # Turn our chunks back into a Header object, then let that
+ # Header object do the right thing to turn them into a
+ # string for us.
+ value.append(str(email.header.make_header(chunks)))
+ # This is already a string, so just add it.
+ else:
+ value.append(h)
+
+ # We've processed all of our values to get them into a list of str,
+ # but we may have mojibake data, in which case this is an unparsed
+ # field.
+ if not valid_encoding:
+ unparsed[name] = value
+ continue
+
+ raw_name = _EMAIL_TO_RAW_MAPPING.get(name)
+ if raw_name is None:
+ # This is a bit of a weird situation, we've encountered a key that
+ # we don't know what it means, so we don't know whether it's meant
+ # to be a list or not.
+ #
+ # Since we can't really tell one way or another, we'll just leave it
+ # as a list, even though it may be a single item list, because that's
+ # what makes the most sense for email headers.
+ unparsed[name] = value
+ continue
+
+ # If this is one of our string fields, then we'll check to see if our
+ # value is a list of a single item. If it is then we'll assume that
+ # it was emitted as a single string, and unwrap the str from inside
+ # the list.
+ #
+ # If it's any other kind of data, then we haven't the faintest clue
+ # what we should parse it as, and we have to just add it to our list
+ # of unparsed stuff.
+ if raw_name in _STRING_FIELDS and len(value) == 1:
+ raw[raw_name] = value[0]
+ # If this is import_names, we need to special case the empty field
+ # case, which converts to an empty list instead of None. We can't let
+ # the empty case slip through, as it will fail validation.
+ elif raw_name == "import_names" and value == [""]:
+ raw[raw_name] = []
+ # If this is one of our list of string fields, then we can just assign
+ # the value, since email *only* has strings, and our get_all() call
+ # above ensures that this is a list.
+ elif raw_name in _LIST_FIELDS:
+ raw[raw_name] = value
+ # Special Case: Keywords
+ # The keywords field is implemented in the metadata spec as a str,
+ # but it conceptually is a list of strings, and is serialized using
+ # ", ".join(keywords), so we'll do some light data massaging to turn
+ # this into what it logically is.
+ elif raw_name == "keywords" and len(value) == 1:
+ raw[raw_name] = _parse_keywords(value[0])
+ # Special Case: Project-URL
+ # The project urls is implemented in the metadata spec as a list of
+ # specially-formatted strings that represent a key and a value, which
+ # is fundamentally a mapping, however the email format doesn't support
+ # mappings in a sane way, so it was crammed into a list of strings
+ # instead.
+ #
+ # We will do a little light data massaging to turn this into a map as
+ # it logically should be.
+ elif raw_name == "project_urls":
+ try:
+ raw[raw_name] = _parse_project_urls(value)
+ except KeyError:
+ unparsed[name] = value
+ # Nothing that we've done has managed to parse this, so it'll just
+ # throw it in our unparsable data and move on.
+ else:
+ unparsed[name] = value
+
+ # We need to support getting the Description from the message payload in
+ # addition to getting it from the the headers. This does mean, though, there
+ # is the possibility of it being set both ways, in which case we put both
+ # in 'unparsed' since we don't know which is right.
+ try:
+ payload = _get_payload(parsed, data)
+ except ValueError:
+ unparsed.setdefault("description", []).append(
+ parsed.get_payload(decode=isinstance(data, bytes)) # type: ignore[call-overload]
+ )
+ else:
+ if payload:
+ # Check to see if we've already got a description, if so then both
+ # it, and this body move to unparsable.
+ if "description" in raw:
+ description_header = cast("str", raw.pop("description"))
+ unparsed.setdefault("description", []).extend(
+ [description_header, payload]
+ )
+ elif "description" in unparsed:
+ unparsed["description"].append(payload)
+ else:
+ raw["description"] = payload
+
+ # We need to cast our `raw` to a metadata, because a TypedDict only support
+ # literal key names, but we're computing our key names on purpose, but the
+ # way this function is implemented, our `TypedDict` can only have valid key
+ # names.
+ return cast("RawMetadata", raw), unparsed
+
+
+_NOT_FOUND = object()
+
+
+# Keep the two values in sync.
+_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4", "2.5"]
+_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4", "2.5"]
+
+_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"])
+
+
+class _Validator(Generic[T]):
+ """Validate a metadata field.
+
+ All _process_*() methods correspond to a core metadata field. The method is
+ called with the field's raw value. If the raw value is valid it is returned
+ in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field).
+ If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause
+ as appropriate).
+ """
+
+ name: str
+ raw_name: str
+ added: _MetadataVersion
+
+ def __init__(
+ self,
+ *,
+ added: _MetadataVersion = "1.0",
+ ) -> None:
+ self.added = added
+
+ def __set_name__(self, _owner: Metadata, name: str) -> None:
+ self.name = name
+ self.raw_name = _RAW_TO_EMAIL_MAPPING[name]
+
+ def __get__(self, instance: Metadata, _owner: type[Metadata]) -> T:
+ # With Python 3.8, the caching can be replaced with functools.cached_property().
+ # No need to check the cache as attribute lookup will resolve into the
+ # instance's __dict__ before __get__ is called.
+ cache = instance.__dict__
+ value = instance._raw.get(self.name)
+
+ # To make the _process_* methods easier, we'll check if the value is None
+ # and if this field is NOT a required attribute, and if both of those
+ # things are true, we'll skip the the converter. This will mean that the
+ # converters never have to deal with the None union.
+ if self.name in _REQUIRED_ATTRS or value is not None:
+ try:
+ converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}")
+ except AttributeError:
+ pass
+ else:
+ value = converter(value)
+
+ cache[self.name] = value
+ try:
+ del instance._raw[self.name] # type: ignore[misc]
+ except KeyError:
+ pass
+
+ return cast("T", value)
+
+ def _invalid_metadata(
+ self, msg: str, cause: Exception | None = None
+ ) -> InvalidMetadata:
+ exc = InvalidMetadata(
+ self.raw_name, msg.format_map({"field": repr(self.raw_name)})
+ )
+ exc.__cause__ = cause
+ return exc
+
+ def _process_metadata_version(self, value: str) -> _MetadataVersion:
+ # Implicitly makes Metadata-Version required.
+ if value not in _VALID_METADATA_VERSIONS:
+ raise self._invalid_metadata(f"{value!r} is not a valid metadata version")
+ return cast("_MetadataVersion", value)
+
+ def _process_name(self, value: str) -> str:
+ if not value:
+ raise self._invalid_metadata("{field} is a required field")
+ # Validate the name as a side-effect.
+ try:
+ utils.canonicalize_name(value, validate=True)
+ except utils.InvalidName as exc:
+ raise self._invalid_metadata(
+ f"{value!r} is invalid for {{field}}", cause=exc
+ ) from exc
+ else:
+ return value
+
+ def _process_version(self, value: str) -> version_module.Version:
+ if not value:
+ raise self._invalid_metadata("{field} is a required field")
+ try:
+ return version_module.parse(value)
+ except version_module.InvalidVersion as exc:
+ raise self._invalid_metadata(
+ f"{value!r} is invalid for {{field}}", cause=exc
+ ) from exc
+
+ def _process_summary(self, value: str) -> str:
+ """Check the field contains no newlines."""
+ if "\n" in value:
+ raise self._invalid_metadata("{field} must be a single line")
+ return value
+
+ def _process_description_content_type(self, value: str) -> str:
+ content_types = {"text/plain", "text/x-rst", "text/markdown"}
+ message = email.message.EmailMessage()
+ message["content-type"] = value
+
+ content_type, parameters = (
+ # Defaults to `text/plain` if parsing failed.
+ message.get_content_type().lower(),
+ message["content-type"].params,
+ )
+ # Check if content-type is valid or defaulted to `text/plain` and thus was
+ # not parseable.
+ if content_type not in content_types or content_type not in value.lower():
+ raise self._invalid_metadata(
+ f"{{field}} must be one of {list(content_types)}, not {value!r}"
+ )
+
+ charset = parameters.get("charset", "UTF-8")
+ if charset != "UTF-8":
+ raise self._invalid_metadata(
+ f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"
+ )
+
+ markdown_variants = {"GFM", "CommonMark"}
+ variant = parameters.get("variant", "GFM") # Use an acceptable default.
+ if content_type == "text/markdown" and variant not in markdown_variants:
+ raise self._invalid_metadata(
+ f"valid Markdown variants for {{field}} are {list(markdown_variants)}, "
+ f"not {variant!r}",
+ )
+ return value
+
+ def _process_dynamic(self, value: list[str]) -> list[str]:
+ for dynamic_field in map(str.lower, value):
+ if dynamic_field in {"name", "version", "metadata-version"}:
+ raise self._invalid_metadata(
+ f"{dynamic_field!r} is not allowed as a dynamic field"
+ )
+ elif dynamic_field not in _EMAIL_TO_RAW_MAPPING:
+ raise self._invalid_metadata(
+ f"{dynamic_field!r} is not a valid dynamic field"
+ )
+ return list(map(str.lower, value))
+
+ def _process_provides_extra(
+ self,
+ value: list[str],
+ ) -> list[utils.NormalizedName]:
+ normalized_names = []
+ try:
+ for name in value:
+ normalized_names.append(utils.canonicalize_name(name, validate=True))
+ except utils.InvalidName as exc:
+ raise self._invalid_metadata(
+ f"{name!r} is invalid for {{field}}", cause=exc
+ ) from exc
+ else:
+ return normalized_names
+
+ def _process_requires_python(self, value: str) -> specifiers.SpecifierSet:
+ try:
+ return specifiers.SpecifierSet(value)
+ except specifiers.InvalidSpecifier as exc:
+ raise self._invalid_metadata(
+ f"{value!r} is invalid for {{field}}", cause=exc
+ ) from exc
+
+ def _process_requires_dist(
+ self,
+ value: list[str],
+ ) -> list[requirements.Requirement]:
+ reqs = []
+ try:
+ for req in value:
+ reqs.append(requirements.Requirement(req))
+ except requirements.InvalidRequirement as exc:
+ raise self._invalid_metadata(
+ f"{req!r} is invalid for {{field}}", cause=exc
+ ) from exc
+ else:
+ return reqs
+
+ def _process_license_expression(self, value: str) -> NormalizedLicenseExpression:
+ try:
+ return licenses.canonicalize_license_expression(value)
+ except ValueError as exc:
+ raise self._invalid_metadata(
+ f"{value!r} is invalid for {{field}}", cause=exc
+ ) from exc
+
+ def _process_license_files(self, value: list[str]) -> list[str]:
+ paths = []
+ for path in value:
+ if ".." in path:
+ raise self._invalid_metadata(
+ f"{path!r} is invalid for {{field}}, "
+ "parent directory indicators are not allowed"
+ )
+ if "*" in path:
+ raise self._invalid_metadata(
+ f"{path!r} is invalid for {{field}}, paths must be resolved"
+ )
+ if (
+ pathlib.PurePosixPath(path).is_absolute()
+ or pathlib.PureWindowsPath(path).is_absolute()
+ ):
+ raise self._invalid_metadata(
+ f"{path!r} is invalid for {{field}}, paths must be relative"
+ )
+ if pathlib.PureWindowsPath(path).as_posix() != path:
+ raise self._invalid_metadata(
+ f"{path!r} is invalid for {{field}}, paths must use '/' delimiter"
+ )
+ paths.append(path)
+ return paths
+
+ def _process_import_names(self, value: list[str]) -> list[str]:
+ for import_name in value:
+ name, semicolon, private = import_name.partition(";")
+ name = name.rstrip()
+ for identifier in name.split("."):
+ if not identifier.isidentifier():
+ raise self._invalid_metadata(
+ f"{name!r} is invalid for {{field}}; "
+ f"{identifier!r} is not a valid identifier"
+ )
+ elif keyword.iskeyword(identifier):
+ raise self._invalid_metadata(
+ f"{name!r} is invalid for {{field}}; "
+ f"{identifier!r} is a keyword"
+ )
+ if semicolon and private.lstrip() != "private":
+ raise self._invalid_metadata(
+ f"{import_name!r} is invalid for {{field}}; "
+ "the only valid option is 'private'"
+ )
+ return value
+
+ _process_import_namespaces = _process_import_names
+
+
+class Metadata:
+ """Representation of distribution metadata.
+
+ Compared to :class:`RawMetadata`, this class provides objects representing
+ metadata fields instead of only using built-in types. Any invalid metadata
+ will cause :exc:`InvalidMetadata` to be raised (with a
+ :py:attr:`~BaseException.__cause__` attribute as appropriate).
+ """
+
+ _raw: RawMetadata
+
+ @classmethod
+ def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> Metadata:
+ """Create an instance from :class:`RawMetadata`.
+
+ If *validate* is true, all metadata will be validated. All exceptions
+ related to validation will be gathered and raised as an :class:`ExceptionGroup`.
+ """
+ ins = cls()
+ ins._raw = data.copy() # Mutations occur due to caching enriched values.
+
+ if validate:
+ exceptions: list[Exception] = []
+ try:
+ metadata_version = ins.metadata_version
+ metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version)
+ except InvalidMetadata as metadata_version_exc:
+ exceptions.append(metadata_version_exc)
+ metadata_version = None
+
+ # Make sure to check for the fields that are present, the required
+ # fields (so their absence can be reported).
+ fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS
+ # Remove fields that have already been checked.
+ fields_to_check -= {"metadata_version"}
+
+ for key in fields_to_check:
+ try:
+ if metadata_version:
+ # Can't use getattr() as that triggers descriptor protocol which
+ # will fail due to no value for the instance argument.
+ try:
+ field_metadata_version = cls.__dict__[key].added
+ except KeyError:
+ exc = InvalidMetadata(key, f"unrecognized field: {key!r}")
+ exceptions.append(exc)
+ continue
+ field_age = _VALID_METADATA_VERSIONS.index(
+ field_metadata_version
+ )
+ if field_age > metadata_age:
+ field = _RAW_TO_EMAIL_MAPPING[key]
+ exc = InvalidMetadata(
+ field,
+ f"{field} introduced in metadata version "
+ f"{field_metadata_version}, not {metadata_version}",
+ )
+ exceptions.append(exc)
+ continue
+ getattr(ins, key)
+ except InvalidMetadata as exc:
+ exceptions.append(exc)
+
+ if exceptions:
+ raise ExceptionGroup("invalid metadata", exceptions)
+
+ return ins
+
+ @classmethod
+ def from_email(cls, data: bytes | str, *, validate: bool = True) -> Metadata:
+ """Parse metadata from email headers.
+
+ If *validate* is true, the metadata will be validated. All exceptions
+ related to validation will be gathered and raised as an :class:`ExceptionGroup`.
+ """
+ raw, unparsed = parse_email(data)
+
+ if validate:
+ exceptions: list[Exception] = []
+ for unparsed_key in unparsed:
+ if unparsed_key in _EMAIL_TO_RAW_MAPPING:
+ message = f"{unparsed_key!r} has invalid data"
+ else:
+ message = f"unrecognized field: {unparsed_key!r}"
+ exceptions.append(InvalidMetadata(unparsed_key, message))
+
+ if exceptions:
+ raise ExceptionGroup("unparsed", exceptions)
+
+ try:
+ return cls.from_raw(raw, validate=validate)
+ except ExceptionGroup as exc_group:
+ raise ExceptionGroup(
+ "invalid or unparsed metadata", exc_group.exceptions
+ ) from None
+
+ metadata_version: _Validator[_MetadataVersion] = _Validator()
+ """:external:ref:`core-metadata-metadata-version`
+ (required; validated to be a valid metadata version)"""
+ # `name` is not normalized/typed to NormalizedName so as to provide access to
+ # the original/raw name.
+ name: _Validator[str] = _Validator()
+ """:external:ref:`core-metadata-name`
+ (required; validated using :func:`~packaging.utils.canonicalize_name` and its
+ *validate* parameter)"""
+ version: _Validator[version_module.Version] = _Validator()
+ """:external:ref:`core-metadata-version` (required)"""
+ dynamic: _Validator[list[str] | None] = _Validator(
+ added="2.2",
+ )
+ """:external:ref:`core-metadata-dynamic`
+ (validated against core metadata field names and lowercased)"""
+ platforms: _Validator[list[str] | None] = _Validator()
+ """:external:ref:`core-metadata-platform`"""
+ supported_platforms: _Validator[list[str] | None] = _Validator(added="1.1")
+ """:external:ref:`core-metadata-supported-platform`"""
+ summary: _Validator[str | None] = _Validator()
+ """:external:ref:`core-metadata-summary` (validated to contain no newlines)"""
+ description: _Validator[str | None] = _Validator() # TODO 2.1: can be in body
+ """:external:ref:`core-metadata-description`"""
+ description_content_type: _Validator[str | None] = _Validator(added="2.1")
+ """:external:ref:`core-metadata-description-content-type` (validated)"""
+ keywords: _Validator[list[str] | None] = _Validator()
+ """:external:ref:`core-metadata-keywords`"""
+ home_page: _Validator[str | None] = _Validator()
+ """:external:ref:`core-metadata-home-page`"""
+ download_url: _Validator[str | None] = _Validator(added="1.1")
+ """:external:ref:`core-metadata-download-url`"""
+ author: _Validator[str | None] = _Validator()
+ """:external:ref:`core-metadata-author`"""
+ author_email: _Validator[str | None] = _Validator()
+ """:external:ref:`core-metadata-author-email`"""
+ maintainer: _Validator[str | None] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-maintainer`"""
+ maintainer_email: _Validator[str | None] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-maintainer-email`"""
+ license: _Validator[str | None] = _Validator()
+ """:external:ref:`core-metadata-license`"""
+ license_expression: _Validator[NormalizedLicenseExpression | None] = _Validator(
+ added="2.4"
+ )
+ """:external:ref:`core-metadata-license-expression`"""
+ license_files: _Validator[list[str] | None] = _Validator(added="2.4")
+ """:external:ref:`core-metadata-license-file`"""
+ classifiers: _Validator[list[str] | None] = _Validator(added="1.1")
+ """:external:ref:`core-metadata-classifier`"""
+ requires_dist: _Validator[list[requirements.Requirement] | None] = _Validator(
+ added="1.2"
+ )
+ """:external:ref:`core-metadata-requires-dist`"""
+ requires_python: _Validator[specifiers.SpecifierSet | None] = _Validator(
+ added="1.2"
+ )
+ """:external:ref:`core-metadata-requires-python`"""
+ # Because `Requires-External` allows for non-PEP 440 version specifiers, we
+ # don't do any processing on the values.
+ requires_external: _Validator[list[str] | None] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-requires-external`"""
+ project_urls: _Validator[dict[str, str] | None] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-project-url`"""
+ # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation
+ # regardless of metadata version.
+ provides_extra: _Validator[list[utils.NormalizedName] | None] = _Validator(
+ added="2.1",
+ )
+ """:external:ref:`core-metadata-provides-extra`"""
+ provides_dist: _Validator[list[str] | None] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-provides-dist`"""
+ obsoletes_dist: _Validator[list[str] | None] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-obsoletes-dist`"""
+ import_names: _Validator[list[str] | None] = _Validator(added="2.5")
+ """:external:ref:`core-metadata-import-name`"""
+ import_namespaces: _Validator[list[str] | None] = _Validator(added="2.5")
+ """:external:ref:`core-metadata-import-namespace`"""
+ requires: _Validator[list[str] | None] = _Validator(added="1.1")
+ """``Requires`` (deprecated)"""
+ provides: _Validator[list[str] | None] = _Validator(added="1.1")
+ """``Provides`` (deprecated)"""
+ obsoletes: _Validator[list[str] | None] = _Validator(added="1.1")
+ """``Obsoletes`` (deprecated)"""
+
+ def as_rfc822(self) -> RFC822Message:
+ """
+ Return an RFC822 message with the metadata.
+ """
+ message = RFC822Message()
+ self._write_metadata(message)
+ return message
+
+ def _write_metadata(self, message: RFC822Message) -> None:
+ """
+ Return an RFC822 message with the metadata.
+ """
+ for name, validator in self.__class__.__dict__.items():
+ if isinstance(validator, _Validator) and name != "description":
+ value = getattr(self, name)
+ email_name = _RAW_TO_EMAIL_MAPPING[name]
+ if value is not None:
+ if email_name == "project-url":
+ for label, url in value.items():
+ message[email_name] = f"{label}, {url}"
+ elif email_name == "keywords":
+ message[email_name] = ",".join(value)
+ elif email_name == "import-name" and value == []:
+ message[email_name] = ""
+ elif isinstance(value, list):
+ for item in value:
+ message[email_name] = str(item)
+ else:
+ message[email_name] = str(value)
+
+ # The description is a special case because it is in the body of the message.
+ if self.description is not None:
+ message.set_payload(self.description)
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/py.typed b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/py.typed
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/pylock.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/pylock.py
new file mode 100644
index 0000000000000000000000000000000000000000..a564f15246ad65038029f8fefb48621fa64a3abd
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/pylock.py
@@ -0,0 +1,635 @@
+from __future__ import annotations
+
+import dataclasses
+import logging
+import re
+from collections.abc import Mapping, Sequence
+from dataclasses import dataclass
+from datetime import datetime
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Protocol,
+ TypeVar,
+)
+
+from .markers import Marker
+from .specifiers import SpecifierSet
+from .utils import NormalizedName, is_normalized_name
+from .version import Version
+
+if TYPE_CHECKING: # pragma: no cover
+ from pathlib import Path
+
+ from typing_extensions import Self
+
+_logger = logging.getLogger(__name__)
+
+__all__ = [
+ "Package",
+ "PackageArchive",
+ "PackageDirectory",
+ "PackageSdist",
+ "PackageVcs",
+ "PackageWheel",
+ "Pylock",
+ "PylockUnsupportedVersionError",
+ "PylockValidationError",
+ "is_valid_pylock_path",
+]
+
+_T = TypeVar("_T")
+_T2 = TypeVar("_T2")
+
+
+class _FromMappingProtocol(Protocol): # pragma: no cover
+ @classmethod
+ def _from_dict(cls, d: Mapping[str, Any]) -> Self: ...
+
+
+_FromMappingProtocolT = TypeVar("_FromMappingProtocolT", bound=_FromMappingProtocol)
+
+
+_PYLOCK_FILE_NAME_RE = re.compile(r"^pylock\.([^.]+)\.toml$")
+
+
+def is_valid_pylock_path(path: Path) -> bool:
+ """Check if the given path is a valid pylock file path."""
+ return path.name == "pylock.toml" or bool(_PYLOCK_FILE_NAME_RE.match(path.name))
+
+
+def _toml_key(key: str) -> str:
+ return key.replace("_", "-")
+
+
+def _toml_value(key: str, value: Any) -> Any: # noqa: ANN401
+ if isinstance(value, (Version, Marker, SpecifierSet)):
+ return str(value)
+ if isinstance(value, Sequence) and key == "environments":
+ return [str(v) for v in value]
+ return value
+
+
+def _toml_dict_factory(data: list[tuple[str, Any]]) -> dict[str, Any]:
+ return {
+ _toml_key(key): _toml_value(key, value)
+ for key, value in data
+ if value is not None
+ }
+
+
+def _get(d: Mapping[str, Any], expected_type: type[_T], key: str) -> _T | None:
+ """Get a value from the dictionary and verify it's the expected type."""
+ if (value := d.get(key)) is None:
+ return None
+ if not isinstance(value, expected_type):
+ raise PylockValidationError(
+ f"Unexpected type {type(value).__name__} "
+ f"(expected {expected_type.__name__})",
+ context=key,
+ )
+ return value
+
+
+def _get_required(d: Mapping[str, Any], expected_type: type[_T], key: str) -> _T:
+ """Get a required value from the dictionary and verify it's the expected type."""
+ if (value := _get(d, expected_type, key)) is None:
+ raise _PylockRequiredKeyError(key)
+ return value
+
+
+def _get_sequence(
+ d: Mapping[str, Any], expected_item_type: type[_T], key: str
+) -> Sequence[_T] | None:
+ """Get a list value from the dictionary and verify it's the expected items type."""
+ if (value := _get(d, Sequence, key)) is None: # type: ignore[type-abstract]
+ return None
+ if isinstance(value, (str, bytes)):
+ # special case: str and bytes are Sequences, but we want to reject it
+ raise PylockValidationError(
+ f"Unexpected type {type(value).__name__} (expected Sequence)",
+ context=key,
+ )
+ for i, item in enumerate(value):
+ if not isinstance(item, expected_item_type):
+ raise PylockValidationError(
+ f"Unexpected type {type(item).__name__} "
+ f"(expected {expected_item_type.__name__})",
+ context=f"{key}[{i}]",
+ )
+ return value
+
+
+def _get_as(
+ d: Mapping[str, Any],
+ expected_type: type[_T],
+ target_type: Callable[[_T], _T2],
+ key: str,
+) -> _T2 | None:
+ """Get a value from the dictionary, verify it's the expected type,
+ and convert to the target type.
+
+ This assumes the target_type constructor accepts the value.
+ """
+ if (value := _get(d, expected_type, key)) is None:
+ return None
+ try:
+ return target_type(value)
+ except Exception as e:
+ raise PylockValidationError(e, context=key) from e
+
+
+def _get_required_as(
+ d: Mapping[str, Any],
+ expected_type: type[_T],
+ target_type: Callable[[_T], _T2],
+ key: str,
+) -> _T2:
+ """Get a required value from the dict, verify it's the expected type,
+ and convert to the target type."""
+ if (value := _get_as(d, expected_type, target_type, key)) is None:
+ raise _PylockRequiredKeyError(key)
+ return value
+
+
+def _get_sequence_as(
+ d: Mapping[str, Any],
+ expected_item_type: type[_T],
+ target_item_type: Callable[[_T], _T2],
+ key: str,
+) -> list[_T2] | None:
+ """Get list value from dictionary and verify expected items type."""
+ if (value := _get_sequence(d, expected_item_type, key)) is None:
+ return None
+ result = []
+ try:
+ for item in value:
+ typed_item = target_item_type(item)
+ result.append(typed_item)
+ except Exception as e:
+ raise PylockValidationError(e, context=f"{key}[{len(result)}]") from e
+ return result
+
+
+def _get_object(
+ d: Mapping[str, Any], target_type: type[_FromMappingProtocolT], key: str
+) -> _FromMappingProtocolT | None:
+ """Get a dictionary value from the dictionary and convert it to a dataclass."""
+ if (value := _get(d, Mapping, key)) is None: # type: ignore[type-abstract]
+ return None
+ try:
+ return target_type._from_dict(value)
+ except Exception as e:
+ raise PylockValidationError(e, context=key) from e
+
+
+def _get_sequence_of_objects(
+ d: Mapping[str, Any], target_item_type: type[_FromMappingProtocolT], key: str
+) -> list[_FromMappingProtocolT] | None:
+ """Get a list value from the dictionary and convert its items to a dataclass."""
+ if (value := _get_sequence(d, Mapping, key)) is None: # type: ignore[type-abstract]
+ return None
+ result: list[_FromMappingProtocolT] = []
+ try:
+ for item in value:
+ typed_item = target_item_type._from_dict(item)
+ result.append(typed_item)
+ except Exception as e:
+ raise PylockValidationError(e, context=f"{key}[{len(result)}]") from e
+ return result
+
+
+def _get_required_sequence_of_objects(
+ d: Mapping[str, Any], target_item_type: type[_FromMappingProtocolT], key: str
+) -> Sequence[_FromMappingProtocolT]:
+ """Get a required list value from the dictionary and convert its items to a
+ dataclass."""
+ if (result := _get_sequence_of_objects(d, target_item_type, key)) is None:
+ raise _PylockRequiredKeyError(key)
+ return result
+
+
+def _validate_normalized_name(name: str) -> NormalizedName:
+ """Validate that a string is a NormalizedName."""
+ if not is_normalized_name(name):
+ raise PylockValidationError(f"Name {name!r} is not normalized")
+ return NormalizedName(name)
+
+
+def _validate_path_url(path: str | None, url: str | None) -> None:
+ if not path and not url:
+ raise PylockValidationError("path or url must be provided")
+
+
+def _validate_hashes(hashes: Mapping[str, Any]) -> Mapping[str, Any]:
+ if not hashes:
+ raise PylockValidationError("At least one hash must be provided")
+ if not all(isinstance(hash_val, str) for hash_val in hashes.values()):
+ raise PylockValidationError("Hash values must be strings")
+ return hashes
+
+
+class PylockValidationError(Exception):
+ """Raised when when input data is not spec-compliant."""
+
+ context: str | None = None
+ message: str
+
+ def __init__(
+ self,
+ cause: str | Exception,
+ *,
+ context: str | None = None,
+ ) -> None:
+ if isinstance(cause, PylockValidationError):
+ if cause.context:
+ self.context = (
+ f"{context}.{cause.context}" if context else cause.context
+ )
+ else:
+ self.context = context
+ self.message = cause.message
+ else:
+ self.context = context
+ self.message = str(cause)
+
+ def __str__(self) -> str:
+ if self.context:
+ return f"{self.message} in {self.context!r}"
+ return self.message
+
+
+class _PylockRequiredKeyError(PylockValidationError):
+ def __init__(self, key: str) -> None:
+ super().__init__("Missing required value", context=key)
+
+
+class PylockUnsupportedVersionError(PylockValidationError):
+ """Raised when encountering an unsupported `lock_version`."""
+
+
+@dataclass(frozen=True, init=False)
+class PackageVcs:
+ type: str
+ url: str | None = None
+ path: str | None = None
+ requested_revision: str | None = None
+ commit_id: str # type: ignore[misc]
+ subdirectory: str | None = None
+
+ def __init__(
+ self,
+ *,
+ type: str,
+ url: str | None = None,
+ path: str | None = None,
+ requested_revision: str | None = None,
+ commit_id: str,
+ subdirectory: str | None = None,
+ ) -> None:
+ # In Python 3.10+ make dataclass kw_only=True and remove __init__
+ object.__setattr__(self, "type", type)
+ object.__setattr__(self, "url", url)
+ object.__setattr__(self, "path", path)
+ object.__setattr__(self, "requested_revision", requested_revision)
+ object.__setattr__(self, "commit_id", commit_id)
+ object.__setattr__(self, "subdirectory", subdirectory)
+
+ @classmethod
+ def _from_dict(cls, d: Mapping[str, Any]) -> Self:
+ package_vcs = cls(
+ type=_get_required(d, str, "type"),
+ url=_get(d, str, "url"),
+ path=_get(d, str, "path"),
+ requested_revision=_get(d, str, "requested-revision"),
+ commit_id=_get_required(d, str, "commit-id"),
+ subdirectory=_get(d, str, "subdirectory"),
+ )
+ _validate_path_url(package_vcs.path, package_vcs.url)
+ return package_vcs
+
+
+@dataclass(frozen=True, init=False)
+class PackageDirectory:
+ path: str
+ editable: bool | None = None
+ subdirectory: str | None = None
+
+ def __init__(
+ self,
+ *,
+ path: str,
+ editable: bool | None = None,
+ subdirectory: str | None = None,
+ ) -> None:
+ # In Python 3.10+ make dataclass kw_only=True and remove __init__
+ object.__setattr__(self, "path", path)
+ object.__setattr__(self, "editable", editable)
+ object.__setattr__(self, "subdirectory", subdirectory)
+
+ @classmethod
+ def _from_dict(cls, d: Mapping[str, Any]) -> Self:
+ return cls(
+ path=_get_required(d, str, "path"),
+ editable=_get(d, bool, "editable"),
+ subdirectory=_get(d, str, "subdirectory"),
+ )
+
+
+@dataclass(frozen=True, init=False)
+class PackageArchive:
+ url: str | None = None
+ path: str | None = None
+ size: int | None = None
+ upload_time: datetime | None = None
+ hashes: Mapping[str, str] # type: ignore[misc]
+ subdirectory: str | None = None
+
+ def __init__(
+ self,
+ *,
+ url: str | None = None,
+ path: str | None = None,
+ size: int | None = None,
+ upload_time: datetime | None = None,
+ hashes: Mapping[str, str],
+ subdirectory: str | None = None,
+ ) -> None:
+ # In Python 3.10+ make dataclass kw_only=True and remove __init__
+ object.__setattr__(self, "url", url)
+ object.__setattr__(self, "path", path)
+ object.__setattr__(self, "size", size)
+ object.__setattr__(self, "upload_time", upload_time)
+ object.__setattr__(self, "hashes", hashes)
+ object.__setattr__(self, "subdirectory", subdirectory)
+
+ @classmethod
+ def _from_dict(cls, d: Mapping[str, Any]) -> Self:
+ package_archive = cls(
+ url=_get(d, str, "url"),
+ path=_get(d, str, "path"),
+ size=_get(d, int, "size"),
+ upload_time=_get(d, datetime, "upload-time"),
+ hashes=_get_required_as(d, Mapping, _validate_hashes, "hashes"), # type: ignore[type-abstract]
+ subdirectory=_get(d, str, "subdirectory"),
+ )
+ _validate_path_url(package_archive.path, package_archive.url)
+ return package_archive
+
+
+@dataclass(frozen=True, init=False)
+class PackageSdist:
+ name: str | None = None
+ upload_time: datetime | None = None
+ url: str | None = None
+ path: str | None = None
+ size: int | None = None
+ hashes: Mapping[str, str] # type: ignore[misc]
+
+ def __init__(
+ self,
+ *,
+ name: str | None = None,
+ upload_time: datetime | None = None,
+ url: str | None = None,
+ path: str | None = None,
+ size: int | None = None,
+ hashes: Mapping[str, str],
+ ) -> None:
+ # In Python 3.10+ make dataclass kw_only=True and remove __init__
+ object.__setattr__(self, "name", name)
+ object.__setattr__(self, "upload_time", upload_time)
+ object.__setattr__(self, "url", url)
+ object.__setattr__(self, "path", path)
+ object.__setattr__(self, "size", size)
+ object.__setattr__(self, "hashes", hashes)
+
+ @classmethod
+ def _from_dict(cls, d: Mapping[str, Any]) -> Self:
+ package_sdist = cls(
+ name=_get(d, str, "name"),
+ upload_time=_get(d, datetime, "upload-time"),
+ url=_get(d, str, "url"),
+ path=_get(d, str, "path"),
+ size=_get(d, int, "size"),
+ hashes=_get_required_as(d, Mapping, _validate_hashes, "hashes"), # type: ignore[type-abstract]
+ )
+ _validate_path_url(package_sdist.path, package_sdist.url)
+ return package_sdist
+
+
+@dataclass(frozen=True, init=False)
+class PackageWheel:
+ name: str | None = None
+ upload_time: datetime | None = None
+ url: str | None = None
+ path: str | None = None
+ size: int | None = None
+ hashes: Mapping[str, str] # type: ignore[misc]
+
+ def __init__(
+ self,
+ *,
+ name: str | None = None,
+ upload_time: datetime | None = None,
+ url: str | None = None,
+ path: str | None = None,
+ size: int | None = None,
+ hashes: Mapping[str, str],
+ ) -> None:
+ # In Python 3.10+ make dataclass kw_only=True and remove __init__
+ object.__setattr__(self, "name", name)
+ object.__setattr__(self, "upload_time", upload_time)
+ object.__setattr__(self, "url", url)
+ object.__setattr__(self, "path", path)
+ object.__setattr__(self, "size", size)
+ object.__setattr__(self, "hashes", hashes)
+
+ @classmethod
+ def _from_dict(cls, d: Mapping[str, Any]) -> Self:
+ package_wheel = cls(
+ name=_get(d, str, "name"),
+ upload_time=_get(d, datetime, "upload-time"),
+ url=_get(d, str, "url"),
+ path=_get(d, str, "path"),
+ size=_get(d, int, "size"),
+ hashes=_get_required_as(d, Mapping, _validate_hashes, "hashes"), # type: ignore[type-abstract]
+ )
+ _validate_path_url(package_wheel.path, package_wheel.url)
+ return package_wheel
+
+
+@dataclass(frozen=True, init=False)
+class Package:
+ name: NormalizedName
+ version: Version | None = None
+ marker: Marker | None = None
+ requires_python: SpecifierSet | None = None
+ dependencies: Sequence[Mapping[str, Any]] | None = None
+ vcs: PackageVcs | None = None
+ directory: PackageDirectory | None = None
+ archive: PackageArchive | None = None
+ index: str | None = None
+ sdist: PackageSdist | None = None
+ wheels: Sequence[PackageWheel] | None = None
+ attestation_identities: Sequence[Mapping[str, Any]] | None = None
+ tool: Mapping[str, Any] | None = None
+
+ def __init__(
+ self,
+ *,
+ name: NormalizedName,
+ version: Version | None = None,
+ marker: Marker | None = None,
+ requires_python: SpecifierSet | None = None,
+ dependencies: Sequence[Mapping[str, Any]] | None = None,
+ vcs: PackageVcs | None = None,
+ directory: PackageDirectory | None = None,
+ archive: PackageArchive | None = None,
+ index: str | None = None,
+ sdist: PackageSdist | None = None,
+ wheels: Sequence[PackageWheel] | None = None,
+ attestation_identities: Sequence[Mapping[str, Any]] | None = None,
+ tool: Mapping[str, Any] | None = None,
+ ) -> None:
+ # In Python 3.10+ make dataclass kw_only=True and remove __init__
+ object.__setattr__(self, "name", name)
+ object.__setattr__(self, "version", version)
+ object.__setattr__(self, "marker", marker)
+ object.__setattr__(self, "requires_python", requires_python)
+ object.__setattr__(self, "dependencies", dependencies)
+ object.__setattr__(self, "vcs", vcs)
+ object.__setattr__(self, "directory", directory)
+ object.__setattr__(self, "archive", archive)
+ object.__setattr__(self, "index", index)
+ object.__setattr__(self, "sdist", sdist)
+ object.__setattr__(self, "wheels", wheels)
+ object.__setattr__(self, "attestation_identities", attestation_identities)
+ object.__setattr__(self, "tool", tool)
+
+ @classmethod
+ def _from_dict(cls, d: Mapping[str, Any]) -> Self:
+ package = cls(
+ name=_get_required_as(d, str, _validate_normalized_name, "name"),
+ version=_get_as(d, str, Version, "version"),
+ requires_python=_get_as(d, str, SpecifierSet, "requires-python"),
+ dependencies=_get_sequence(d, Mapping, "dependencies"), # type: ignore[type-abstract]
+ marker=_get_as(d, str, Marker, "marker"),
+ vcs=_get_object(d, PackageVcs, "vcs"),
+ directory=_get_object(d, PackageDirectory, "directory"),
+ archive=_get_object(d, PackageArchive, "archive"),
+ index=_get(d, str, "index"),
+ sdist=_get_object(d, PackageSdist, "sdist"),
+ wheels=_get_sequence_of_objects(d, PackageWheel, "wheels"),
+ attestation_identities=_get_sequence(d, Mapping, "attestation-identities"), # type: ignore[type-abstract]
+ tool=_get(d, Mapping, "tool"), # type: ignore[type-abstract]
+ )
+ distributions = bool(package.sdist) + len(package.wheels or [])
+ direct_urls = (
+ bool(package.vcs) + bool(package.directory) + bool(package.archive)
+ )
+ if distributions > 0 and direct_urls > 0:
+ raise PylockValidationError(
+ "None of vcs, directory, archive must be set if sdist or wheels are set"
+ )
+ if distributions == 0 and direct_urls != 1:
+ raise PylockValidationError(
+ "Exactly one of vcs, directory, archive must be set "
+ "if sdist and wheels are not set"
+ )
+ try:
+ for i, attestation_identity in enumerate( # noqa: B007
+ package.attestation_identities or []
+ ):
+ _get_required(attestation_identity, str, "kind")
+ except Exception as e:
+ raise PylockValidationError(
+ e, context=f"attestation-identities[{i}]"
+ ) from e
+ return package
+
+ @property
+ def is_direct(self) -> bool:
+ return not (self.sdist or self.wheels)
+
+
+@dataclass(frozen=True, init=False)
+class Pylock:
+ """A class representing a pylock file."""
+
+ lock_version: Version
+ environments: Sequence[Marker] | None = None
+ requires_python: SpecifierSet | None = None
+ extras: Sequence[NormalizedName] | None = None
+ dependency_groups: Sequence[str] | None = None
+ default_groups: Sequence[str] | None = None
+ created_by: str # type: ignore[misc]
+ packages: Sequence[Package] # type: ignore[misc]
+ tool: Mapping[str, Any] | None = None
+
+ def __init__(
+ self,
+ *,
+ lock_version: Version,
+ environments: Sequence[Marker] | None = None,
+ requires_python: SpecifierSet | None = None,
+ extras: Sequence[NormalizedName] | None = None,
+ dependency_groups: Sequence[str] | None = None,
+ default_groups: Sequence[str] | None = None,
+ created_by: str,
+ packages: Sequence[Package],
+ tool: Mapping[str, Any] | None = None,
+ ) -> None:
+ # In Python 3.10+ make dataclass kw_only=True and remove __init__
+ object.__setattr__(self, "lock_version", lock_version)
+ object.__setattr__(self, "environments", environments)
+ object.__setattr__(self, "requires_python", requires_python)
+ object.__setattr__(self, "extras", extras)
+ object.__setattr__(self, "dependency_groups", dependency_groups)
+ object.__setattr__(self, "default_groups", default_groups)
+ object.__setattr__(self, "created_by", created_by)
+ object.__setattr__(self, "packages", packages)
+ object.__setattr__(self, "tool", tool)
+
+ @classmethod
+ def _from_dict(cls, d: Mapping[str, Any]) -> Self:
+ pylock = cls(
+ lock_version=_get_required_as(d, str, Version, "lock-version"),
+ environments=_get_sequence_as(d, str, Marker, "environments"),
+ extras=_get_sequence_as(d, str, _validate_normalized_name, "extras"),
+ dependency_groups=_get_sequence(d, str, "dependency-groups"),
+ default_groups=_get_sequence(d, str, "default-groups"),
+ created_by=_get_required(d, str, "created-by"),
+ requires_python=_get_as(d, str, SpecifierSet, "requires-python"),
+ packages=_get_required_sequence_of_objects(d, Package, "packages"),
+ tool=_get(d, Mapping, "tool"), # type: ignore[type-abstract]
+ )
+ if not Version("1") <= pylock.lock_version < Version("2"):
+ raise PylockUnsupportedVersionError(
+ f"pylock version {pylock.lock_version} is not supported"
+ )
+ if pylock.lock_version > Version("1.0"):
+ _logger.warning(
+ "pylock minor version %s is not supported", pylock.lock_version
+ )
+ return pylock
+
+ @classmethod
+ def from_dict(cls, d: Mapping[str, Any], /) -> Self:
+ """Create and validate a Pylock instance from a TOML dictionary.
+
+ Raises :class:`PylockValidationError` if the input data is not
+ spec-compliant.
+ """
+ return cls._from_dict(d)
+
+ def to_dict(self) -> Mapping[str, Any]:
+ """Convert the Pylock instance to a TOML dictionary."""
+ return dataclasses.asdict(self, dict_factory=_toml_dict_factory)
+
+ def validate(self) -> None:
+ """Validate the Pylock instance against the specification.
+
+ Raises :class:`PylockValidationError` otherwise."""
+ self.from_dict(self.to_dict())
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/requirements.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/requirements.py
new file mode 100644
index 0000000000000000000000000000000000000000..3079be69bf880f47e64dbf62993f0e54754b7315
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/requirements.py
@@ -0,0 +1,86 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import annotations
+
+from typing import Iterator
+
+from ._parser import parse_requirement as _parse_requirement
+from ._tokenizer import ParserSyntaxError
+from .markers import Marker, _normalize_extra_values
+from .specifiers import SpecifierSet
+from .utils import canonicalize_name
+
+
+class InvalidRequirement(ValueError):
+ """
+ An invalid requirement was found, users should refer to PEP 508.
+ """
+
+
+class Requirement:
+ """Parse a requirement.
+
+ Parse a given requirement string into its parts, such as name, specifier,
+ URL, and extras. Raises InvalidRequirement on a badly-formed requirement
+ string.
+ """
+
+ # TODO: Can we test whether something is contained within a requirement?
+ # If so how do we do that? Do we need to test against the _name_ of
+ # the thing as well as the version? What about the markers?
+ # TODO: Can we normalize the name and extra name?
+
+ def __init__(self, requirement_string: str) -> None:
+ try:
+ parsed = _parse_requirement(requirement_string)
+ except ParserSyntaxError as e:
+ raise InvalidRequirement(str(e)) from e
+
+ self.name: str = parsed.name
+ self.url: str | None = parsed.url or None
+ self.extras: set[str] = set(parsed.extras or [])
+ self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
+ self.marker: Marker | None = None
+ if parsed.marker is not None:
+ self.marker = Marker.__new__(Marker)
+ self.marker._markers = _normalize_extra_values(parsed.marker)
+
+ def _iter_parts(self, name: str) -> Iterator[str]:
+ yield name
+
+ if self.extras:
+ formatted_extras = ",".join(sorted(self.extras))
+ yield f"[{formatted_extras}]"
+
+ if self.specifier:
+ yield str(self.specifier)
+
+ if self.url:
+ yield f" @ {self.url}"
+ if self.marker:
+ yield " "
+
+ if self.marker:
+ yield f"; {self.marker}"
+
+ def __str__(self) -> str:
+ return "".join(self._iter_parts(self.name))
+
+ def __repr__(self) -> str:
+ return f"<{self.__class__.__name__}('{self}')>"
+
+ def __hash__(self) -> int:
+ return hash(tuple(self._iter_parts(canonicalize_name(self.name))))
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Requirement):
+ return NotImplemented
+
+ return (
+ canonicalize_name(self.name) == canonicalize_name(other.name)
+ and self.extras == other.extras
+ and self.specifier == other.specifier
+ and self.url == other.url
+ and self.marker == other.marker
+ )
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/specifiers.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/specifiers.py
new file mode 100644
index 0000000000000000000000000000000000000000..5d26b0d1ae2d21b77e24b692d5a7e1fd01296edc
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/specifiers.py
@@ -0,0 +1,1068 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+"""
+.. testsetup::
+
+ from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier
+ from packaging.version import Version
+"""
+
+from __future__ import annotations
+
+import abc
+import itertools
+import re
+from typing import Callable, Final, Iterable, Iterator, TypeVar, Union
+
+from .utils import canonicalize_version
+from .version import InvalidVersion, Version
+
+UnparsedVersion = Union[Version, str]
+UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion)
+CallableOperator = Callable[[Version, str], bool]
+
+
+def _coerce_version(version: UnparsedVersion) -> Version | None:
+ if not isinstance(version, Version):
+ try:
+ version = Version(version)
+ except InvalidVersion:
+ return None
+ return version
+
+
+def _public_version(version: Version) -> Version:
+ return version.__replace__(local=None)
+
+
+def _base_version(version: Version) -> Version:
+ return version.__replace__(pre=None, post=None, dev=None, local=None)
+
+
+class InvalidSpecifier(ValueError):
+ """
+ Raised when attempting to create a :class:`Specifier` with a specifier
+ string that is invalid.
+
+ >>> Specifier("lolwat")
+ Traceback (most recent call last):
+ ...
+ packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat'
+ """
+
+
+class BaseSpecifier(metaclass=abc.ABCMeta):
+ __slots__ = ()
+ __match_args__ = ("_str",)
+
+ @property
+ def _str(self) -> str:
+ """Internal property for match_args"""
+ return str(self)
+
+ @abc.abstractmethod
+ def __str__(self) -> str:
+ """
+ Returns the str representation of this Specifier-like object. This
+ should be representative of the Specifier itself.
+ """
+
+ @abc.abstractmethod
+ def __hash__(self) -> int:
+ """
+ Returns a hash value for this Specifier-like object.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Returns a boolean representing whether or not the two Specifier-like
+ objects are equal.
+
+ :param other: The other object to check against.
+ """
+
+ @property
+ @abc.abstractmethod
+ def prereleases(self) -> bool | None:
+ """Whether or not pre-releases as a whole are allowed.
+
+ This can be set to either ``True`` or ``False`` to explicitly enable or disable
+ prereleases or it can be set to ``None`` (the default) to use default semantics.
+ """
+
+ @prereleases.setter # noqa: B027
+ def prereleases(self, value: bool) -> None:
+ """Setter for :attr:`prereleases`.
+
+ :param value: The value to set.
+ """
+
+ @abc.abstractmethod
+ def contains(self, item: str, prereleases: bool | None = None) -> bool:
+ """
+ Determines if the given item is contained within this specifier.
+ """
+
+ @abc.abstractmethod
+ def filter(
+ self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None
+ ) -> Iterator[UnparsedVersionVar]:
+ """
+ Takes an iterable of items and filters them so that only items which
+ are contained within this specifier are allowed in it.
+ """
+
+
+class Specifier(BaseSpecifier):
+ """This class abstracts handling of version specifiers.
+
+ .. tip::
+
+ It is generally not required to instantiate this manually. You should instead
+ prefer to work with :class:`SpecifierSet` instead, which can parse
+ comma-separated version specifiers (which is what package metadata contains).
+ """
+
+ __slots__ = ("_prereleases", "_spec", "_spec_version")
+
+ _operator_regex_str = r"""
+ (?P(~=|==|!=|<=|>=|<|>|===))
+ """
+ _version_regex_str = r"""
+ (?P
+ (?:
+ # The identity operators allow for an escape hatch that will
+ # do an exact string match of the version you wish to install.
+ # This will not be parsed by PEP 440 and we cannot determine
+ # any semantic meaning from it. This operator is discouraged
+ # but included entirely as an escape hatch.
+ (?<====) # Only match for the identity operator
+ \s*
+ [^\s;)]* # The arbitrary version can be just about anything,
+ # we match everything except for whitespace, a
+ # semi-colon for marker support, and a closing paren
+ # since versions can be enclosed in them.
+ )
+ |
+ (?:
+ # The (non)equality operators allow for wild card and local
+ # versions to be specified so we have to define these two
+ # operators separately to enable that.
+ (?<===|!=) # Only match for equals and not equals
+
+ \s*
+ v?
+ (?:[0-9]+!)? # epoch
+ [0-9]+(?:\.[0-9]+)* # release
+
+ # You cannot use a wild card and a pre-release, post-release, a dev or
+ # local version together so group them with a | and make them optional.
+ (?:
+ \.\* # Wild card syntax of .*
+ |
+ (?: # pre release
+ [-_\.]?
+ (alpha|beta|preview|pre|a|b|c|rc)
+ [-_\.]?
+ [0-9]*
+ )?
+ (?: # post release
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+ )?
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
+ (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
+ )?
+ )
+ |
+ (?:
+ # The compatible operator requires at least two digits in the
+ # release segment.
+ (?<=~=) # Only match for the compatible operator
+
+ \s*
+ v?
+ (?:[0-9]+!)? # epoch
+ [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
+ (?: # pre release
+ [-_\.]?
+ (alpha|beta|preview|pre|a|b|c|rc)
+ [-_\.]?
+ [0-9]*
+ )?
+ (?: # post release
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+ )?
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
+ )
+ |
+ (?:
+ # All other operators only allow a sub set of what the
+ # (non)equality operators do. Specifically they do not allow
+ # local versions to be specified nor do they allow the prefix
+ # matching wild cards.
+ (?=": "greater_than_equal",
+ "<": "less_than",
+ ">": "greater_than",
+ "===": "arbitrary",
+ }
+
+ def __init__(self, spec: str = "", prereleases: bool | None = None) -> None:
+ """Initialize a Specifier instance.
+
+ :param spec:
+ The string representation of a specifier which will be parsed and
+ normalized before use.
+ :param prereleases:
+ This tells the specifier if it should accept prerelease versions if
+ applicable or not. The default of ``None`` will autodetect it from the
+ given specifiers.
+ :raises InvalidSpecifier:
+ If the given specifier is invalid (i.e. bad syntax).
+ """
+ match = self._regex.fullmatch(spec)
+ if not match:
+ raise InvalidSpecifier(f"Invalid specifier: {spec!r}")
+
+ self._spec: tuple[str, str] = (
+ match.group("operator").strip(),
+ match.group("version").strip(),
+ )
+
+ # Store whether or not this Specifier should accept prereleases
+ self._prereleases = prereleases
+
+ # Specifier version cache
+ self._spec_version: tuple[str, Version] | None = None
+
+ def _get_spec_version(self, version: str) -> Version | None:
+ """One element cache, as only one spec Version is needed per Specifier."""
+ if self._spec_version is not None and self._spec_version[0] == version:
+ return self._spec_version[1]
+
+ version_specifier = _coerce_version(version)
+ if version_specifier is None:
+ return None
+
+ self._spec_version = (version, version_specifier)
+ return version_specifier
+
+ def _require_spec_version(self, version: str) -> Version:
+ """Get spec version, asserting it's valid (not for === operator).
+
+ This method should only be called for operators where version
+ strings are guaranteed to be valid PEP 440 versions (not ===).
+ """
+ spec_version = self._get_spec_version(version)
+ assert spec_version is not None
+ return spec_version
+
+ @property
+ def prereleases(self) -> bool | None:
+ # If there is an explicit prereleases set for this, then we'll just
+ # blindly use that.
+ if self._prereleases is not None:
+ return self._prereleases
+
+ # Only the "!=" operator does not imply prereleases when
+ # the version in the specifier is a prerelease.
+ operator, version_str = self._spec
+ if operator != "!=":
+ # The == specifier with trailing .* cannot include prereleases
+ # e.g. "==1.0a1.*" is not valid.
+ if operator == "==" and version_str.endswith(".*"):
+ return False
+
+ # "===" can have arbitrary string versions, so we cannot parse
+ # those, we take prereleases as unknown (None) for those.
+ version = self._get_spec_version(version_str)
+ if version is None:
+ return None
+
+ # For all other operators, use the check if spec Version
+ # object implies pre-releases.
+ if version.is_prerelease:
+ return True
+
+ return False
+
+ @prereleases.setter
+ def prereleases(self, value: bool | None) -> None:
+ self._prereleases = value
+
+ @property
+ def operator(self) -> str:
+ """The operator of this specifier.
+
+ >>> Specifier("==1.2.3").operator
+ '=='
+ """
+ return self._spec[0]
+
+ @property
+ def version(self) -> str:
+ """The version of this specifier.
+
+ >>> Specifier("==1.2.3").version
+ '1.2.3'
+ """
+ return self._spec[1]
+
+ def __repr__(self) -> str:
+ """A representation of the Specifier that shows all internal state.
+
+ >>> Specifier('>=1.0.0')
+ =1.0.0')>
+ >>> Specifier('>=1.0.0', prereleases=False)
+ =1.0.0', prereleases=False)>
+ >>> Specifier('>=1.0.0', prereleases=True)
+ =1.0.0', prereleases=True)>
+ """
+ pre = (
+ f", prereleases={self.prereleases!r}"
+ if self._prereleases is not None
+ else ""
+ )
+
+ return f"<{self.__class__.__name__}({str(self)!r}{pre})>"
+
+ def __str__(self) -> str:
+ """A string representation of the Specifier that can be round-tripped.
+
+ >>> str(Specifier('>=1.0.0'))
+ '>=1.0.0'
+ >>> str(Specifier('>=1.0.0', prereleases=False))
+ '>=1.0.0'
+ """
+ return "{}{}".format(*self._spec)
+
+ @property
+ def _canonical_spec(self) -> tuple[str, str]:
+ operator, version = self._spec
+ if operator == "===" or version.endswith(".*"):
+ return operator, version
+
+ spec_version = self._require_spec_version(version)
+
+ canonical_version = canonicalize_version(
+ spec_version, strip_trailing_zero=(operator != "~=")
+ )
+
+ return operator, canonical_version
+
+ def __hash__(self) -> int:
+ return hash(self._canonical_spec)
+
+ def __eq__(self, other: object) -> bool:
+ """Whether or not the two Specifier-like objects are equal.
+
+ :param other: The other object to check against.
+
+ The value of :attr:`prereleases` is ignored.
+
+ >>> Specifier("==1.2.3") == Specifier("== 1.2.3.0")
+ True
+ >>> (Specifier("==1.2.3", prereleases=False) ==
+ ... Specifier("==1.2.3", prereleases=True))
+ True
+ >>> Specifier("==1.2.3") == "==1.2.3"
+ True
+ >>> Specifier("==1.2.3") == Specifier("==1.2.4")
+ False
+ >>> Specifier("==1.2.3") == Specifier("~=1.2.3")
+ False
+ """
+ if isinstance(other, str):
+ try:
+ other = self.__class__(str(other))
+ except InvalidSpecifier:
+ return NotImplemented
+ elif not isinstance(other, self.__class__):
+ return NotImplemented
+
+ return self._canonical_spec == other._canonical_spec
+
+ def _get_operator(self, op: str) -> CallableOperator:
+ operator_callable: CallableOperator = getattr(
+ self, f"_compare_{self._operators[op]}"
+ )
+ return operator_callable
+
+ def _compare_compatible(self, prospective: Version, spec: str) -> bool:
+ # Compatible releases have an equivalent combination of >= and ==. That
+ # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
+ # implement this in terms of the other specifiers instead of
+ # implementing it ourselves. The only thing we need to do is construct
+ # the other specifiers.
+
+ # We want everything but the last item in the version, but we want to
+ # ignore suffix segments.
+ prefix = _version_join(
+ list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
+ )
+
+ # Add the prefix notation to the end of our string
+ prefix += ".*"
+
+ return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
+ prospective, prefix
+ )
+
+ def _compare_equal(self, prospective: Version, spec: str) -> bool:
+ # We need special logic to handle prefix matching
+ if spec.endswith(".*"):
+ # In the case of prefix matching we want to ignore local segment.
+ normalized_prospective = canonicalize_version(
+ _public_version(prospective), strip_trailing_zero=False
+ )
+ # Get the normalized version string ignoring the trailing .*
+ normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False)
+ # Split the spec out by bangs and dots, and pretend that there is
+ # an implicit dot in between a release segment and a pre-release segment.
+ split_spec = _version_split(normalized_spec)
+
+ # Split the prospective version out by bangs and dots, and pretend
+ # that there is an implicit dot in between a release segment and
+ # a pre-release segment.
+ split_prospective = _version_split(normalized_prospective)
+
+ # 0-pad the prospective version before shortening it to get the correct
+ # shortened version.
+ padded_prospective, _ = _pad_version(split_prospective, split_spec)
+
+ # Shorten the prospective version to be the same length as the spec
+ # so that we can determine if the specifier is a prefix of the
+ # prospective version or not.
+ shortened_prospective = padded_prospective[: len(split_spec)]
+
+ return shortened_prospective == split_spec
+ else:
+ # Convert our spec string into a Version
+ spec_version = self._require_spec_version(spec)
+
+ # If the specifier does not have a local segment, then we want to
+ # act as if the prospective version also does not have a local
+ # segment.
+ if not spec_version.local:
+ prospective = _public_version(prospective)
+
+ return prospective == spec_version
+
+ def _compare_not_equal(self, prospective: Version, spec: str) -> bool:
+ return not self._compare_equal(prospective, spec)
+
+ def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool:
+ # NB: Local version identifiers are NOT permitted in the version
+ # specifier, so local version labels can be universally removed from
+ # the prospective version.
+ return _public_version(prospective) <= self._require_spec_version(spec)
+
+ def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool:
+ # NB: Local version identifiers are NOT permitted in the version
+ # specifier, so local version labels can be universally removed from
+ # the prospective version.
+ return _public_version(prospective) >= self._require_spec_version(spec)
+
+ def _compare_less_than(self, prospective: Version, spec_str: str) -> bool:
+ # Convert our spec to a Version instance, since we'll want to work with
+ # it as a version.
+ spec = self._require_spec_version(spec_str)
+
+ # Check to see if the prospective version is less than the spec
+ # version. If it's not we can short circuit and just return False now
+ # instead of doing extra unneeded work.
+ if not prospective < spec:
+ return False
+
+ # This special case is here so that, unless the specifier itself
+ # includes is a pre-release version, that we do not accept pre-release
+ # versions for the version mentioned in the specifier (e.g. <3.1 should
+ # not match 3.1.dev0, but should match 3.0.dev0).
+ if (
+ not spec.is_prerelease
+ and prospective.is_prerelease
+ and _base_version(prospective) == _base_version(spec)
+ ):
+ return False
+
+ # If we've gotten to here, it means that prospective version is both
+ # less than the spec version *and* it's not a pre-release of the same
+ # version in the spec.
+ return True
+
+ def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool:
+ # Convert our spec to a Version instance, since we'll want to work with
+ # it as a version.
+ spec = self._require_spec_version(spec_str)
+
+ # Check to see if the prospective version is greater than the spec
+ # version. If it's not we can short circuit and just return False now
+ # instead of doing extra unneeded work.
+ if not prospective > spec:
+ return False
+
+ # This special case is here so that, unless the specifier itself
+ # includes is a post-release version, that we do not accept
+ # post-release versions for the version mentioned in the specifier
+ # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
+ if (
+ not spec.is_postrelease
+ and prospective.is_postrelease
+ and _base_version(prospective) == _base_version(spec)
+ ):
+ return False
+
+ # Ensure that we do not allow a local version of the version mentioned
+ # in the specifier, which is technically greater than, to match.
+ if prospective.local is not None and _base_version(
+ prospective
+ ) == _base_version(spec):
+ return False
+
+ # If we've gotten to here, it means that prospective version is both
+ # greater than the spec version *and* it's not a pre-release of the
+ # same version in the spec.
+ return True
+
+ def _compare_arbitrary(self, prospective: Version | str, spec: str) -> bool:
+ return str(prospective).lower() == str(spec).lower()
+
+ def __contains__(self, item: str | Version) -> bool:
+ """Return whether or not the item is contained in this specifier.
+
+ :param item: The item to check for.
+
+ This is used for the ``in`` operator and behaves the same as
+ :meth:`contains` with no ``prereleases`` argument passed.
+
+ >>> "1.2.3" in Specifier(">=1.2.3")
+ True
+ >>> Version("1.2.3") in Specifier(">=1.2.3")
+ True
+ >>> "1.0.0" in Specifier(">=1.2.3")
+ False
+ >>> "1.3.0a1" in Specifier(">=1.2.3")
+ True
+ >>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True)
+ True
+ """
+ return self.contains(item)
+
+ def contains(self, item: UnparsedVersion, prereleases: bool | None = None) -> bool:
+ """Return whether or not the item is contained in this specifier.
+
+ :param item:
+ The item to check for, which can be a version string or a
+ :class:`Version` instance.
+ :param prereleases:
+ Whether or not to match prereleases with this Specifier. If set to
+ ``None`` (the default), it will follow the recommendation from
+ :pep:`440` and match prereleases, as there are no other versions.
+
+ >>> Specifier(">=1.2.3").contains("1.2.3")
+ True
+ >>> Specifier(">=1.2.3").contains(Version("1.2.3"))
+ True
+ >>> Specifier(">=1.2.3").contains("1.0.0")
+ False
+ >>> Specifier(">=1.2.3").contains("1.3.0a1")
+ True
+ >>> Specifier(">=1.2.3", prereleases=False).contains("1.3.0a1")
+ False
+ >>> Specifier(">=1.2.3").contains("1.3.0a1")
+ True
+ """
+
+ return bool(list(self.filter([item], prereleases=prereleases)))
+
+ def filter(
+ self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None
+ ) -> Iterator[UnparsedVersionVar]:
+ """Filter items in the given iterable, that match the specifier.
+
+ :param iterable:
+ An iterable that can contain version strings and :class:`Version` instances.
+ The items in the iterable will be filtered according to the specifier.
+ :param prereleases:
+ Whether or not to allow prereleases in the returned iterator. If set to
+ ``None`` (the default), it will follow the recommendation from :pep:`440`
+ and match prereleases if there are no other versions.
+
+ >>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
+ ['1.3']
+ >>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")]))
+ ['1.2.3', '1.3', ]
+ >>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"]))
+ ['1.5a1']
+ >>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
+ ['1.3', '1.5a1']
+ >>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
+ ['1.3', '1.5a1']
+ """
+ prereleases_versions = []
+ found_non_prereleases = False
+
+ # Determine if to include prereleases by default
+ include_prereleases = (
+ prereleases if prereleases is not None else self.prereleases
+ )
+
+ # Get the matching operator
+ operator_callable = self._get_operator(self.operator)
+
+ # Filter versions
+ for version in iterable:
+ parsed_version = _coerce_version(version)
+ if parsed_version is None:
+ # === operator can match arbitrary (non-version) strings
+ if self.operator == "===" and self._compare_arbitrary(
+ version, self.version
+ ):
+ yield version
+ elif operator_callable(parsed_version, self.version):
+ # If it's not a prerelease or prereleases are allowed, yield it directly
+ if not parsed_version.is_prerelease or include_prereleases:
+ found_non_prereleases = True
+ yield version
+ # Otherwise collect prereleases for potential later use
+ elif prereleases is None and self._prereleases is not False:
+ prereleases_versions.append(version)
+
+ # If no non-prereleases were found and prereleases weren't
+ # explicitly forbidden, yield the collected prereleases
+ if (
+ not found_non_prereleases
+ and prereleases is None
+ and self._prereleases is not False
+ ):
+ yield from prereleases_versions
+
+
+_prefix_regex = re.compile(r"([0-9]+)((?:a|b|c|rc)[0-9]+)")
+
+
+def _version_split(version: str) -> list[str]:
+ """Split version into components.
+
+ The split components are intended for version comparison. The logic does
+ not attempt to retain the original version string, so joining the
+ components back with :func:`_version_join` may not produce the original
+ version string.
+ """
+ result: list[str] = []
+
+ epoch, _, rest = version.rpartition("!")
+ result.append(epoch or "0")
+
+ for item in rest.split("."):
+ match = _prefix_regex.fullmatch(item)
+ if match:
+ result.extend(match.groups())
+ else:
+ result.append(item)
+ return result
+
+
+def _version_join(components: list[str]) -> str:
+ """Join split version components into a version string.
+
+ This function assumes the input came from :func:`_version_split`, where the
+ first component must be the epoch (either empty or numeric), and all other
+ components numeric.
+ """
+ epoch, *rest = components
+ return f"{epoch}!{'.'.join(rest)}"
+
+
+def _is_not_suffix(segment: str) -> bool:
+ return not any(
+ segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
+ )
+
+
+def _pad_version(left: list[str], right: list[str]) -> tuple[list[str], list[str]]:
+ left_split, right_split = [], []
+
+ # Get the release segment of our versions
+ left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
+ right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
+
+ # Get the rest of our versions
+ left_split.append(left[len(left_split[0]) :])
+ right_split.append(right[len(right_split[0]) :])
+
+ # Insert our padding
+ left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
+ right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
+
+ return (
+ list(itertools.chain.from_iterable(left_split)),
+ list(itertools.chain.from_iterable(right_split)),
+ )
+
+
+class SpecifierSet(BaseSpecifier):
+ """This class abstracts handling of a set of version specifiers.
+
+ It can be passed a single specifier (``>=3.0``), a comma-separated list of
+ specifiers (``>=3.0,!=3.1``), or no specifier at all.
+ """
+
+ __slots__ = ("_prereleases", "_specs")
+
+ def __init__(
+ self,
+ specifiers: str | Iterable[Specifier] = "",
+ prereleases: bool | None = None,
+ ) -> None:
+ """Initialize a SpecifierSet instance.
+
+ :param specifiers:
+ The string representation of a specifier or a comma-separated list of
+ specifiers which will be parsed and normalized before use.
+ May also be an iterable of ``Specifier`` instances, which will be used
+ as is.
+ :param prereleases:
+ This tells the SpecifierSet if it should accept prerelease versions if
+ applicable or not. The default of ``None`` will autodetect it from the
+ given specifiers.
+
+ :raises InvalidSpecifier:
+ If the given ``specifiers`` are not parseable than this exception will be
+ raised.
+ """
+
+ if isinstance(specifiers, str):
+ # Split on `,` to break each individual specifier into its own item, and
+ # strip each item to remove leading/trailing whitespace.
+ split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
+
+ # Make each individual specifier a Specifier and save in a frozen set
+ # for later.
+ self._specs = frozenset(map(Specifier, split_specifiers))
+ else:
+ # Save the supplied specifiers in a frozen set.
+ self._specs = frozenset(specifiers)
+
+ # Store our prereleases value so we can use it later to determine if
+ # we accept prereleases or not.
+ self._prereleases = prereleases
+
+ @property
+ def prereleases(self) -> bool | None:
+ # If we have been given an explicit prerelease modifier, then we'll
+ # pass that through here.
+ if self._prereleases is not None:
+ return self._prereleases
+
+ # If we don't have any specifiers, and we don't have a forced value,
+ # then we'll just return None since we don't know if this should have
+ # pre-releases or not.
+ if not self._specs:
+ return None
+
+ # Otherwise we'll see if any of the given specifiers accept
+ # prereleases, if any of them do we'll return True, otherwise False.
+ if any(s.prereleases for s in self._specs):
+ return True
+
+ return None
+
+ @prereleases.setter
+ def prereleases(self, value: bool | None) -> None:
+ self._prereleases = value
+
+ def __repr__(self) -> str:
+ """A representation of the specifier set that shows all internal state.
+
+ Note that the ordering of the individual specifiers within the set may not
+ match the input string.
+
+ >>> SpecifierSet('>=1.0.0,!=2.0.0')
+ =1.0.0')>
+ >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False)
+ =1.0.0', prereleases=False)>
+ >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True)
+ =1.0.0', prereleases=True)>
+ """
+ pre = (
+ f", prereleases={self.prereleases!r}"
+ if self._prereleases is not None
+ else ""
+ )
+
+ return f""
+
+ def __str__(self) -> str:
+ """A string representation of the specifier set that can be round-tripped.
+
+ Note that the ordering of the individual specifiers within the set may not
+ match the input string.
+
+ >>> str(SpecifierSet(">=1.0.0,!=1.0.1"))
+ '!=1.0.1,>=1.0.0'
+ >>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False))
+ '!=1.0.1,>=1.0.0'
+ """
+ return ",".join(sorted(str(s) for s in self._specs))
+
+ def __hash__(self) -> int:
+ return hash(self._specs)
+
+ def __and__(self, other: SpecifierSet | str) -> SpecifierSet:
+ """Return a SpecifierSet which is a combination of the two sets.
+
+ :param other: The other object to combine with.
+
+ >>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1'
+ =1.0.0')>
+ >>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1')
+ =1.0.0')>
+ """
+ if isinstance(other, str):
+ other = SpecifierSet(other)
+ elif not isinstance(other, SpecifierSet):
+ return NotImplemented
+
+ specifier = SpecifierSet()
+ specifier._specs = frozenset(self._specs | other._specs)
+
+ if self._prereleases is None and other._prereleases is not None:
+ specifier._prereleases = other._prereleases
+ elif (
+ self._prereleases is not None and other._prereleases is None
+ ) or self._prereleases == other._prereleases:
+ specifier._prereleases = self._prereleases
+ else:
+ raise ValueError(
+ "Cannot combine SpecifierSets with True and False prerelease overrides."
+ )
+
+ return specifier
+
+ def __eq__(self, other: object) -> bool:
+ """Whether or not the two SpecifierSet-like objects are equal.
+
+ :param other: The other object to check against.
+
+ The value of :attr:`prereleases` is ignored.
+
+ >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1")
+ True
+ >>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) ==
+ ... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True))
+ True
+ >>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1"
+ True
+ >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0")
+ False
+ >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2")
+ False
+ """
+ if isinstance(other, (str, Specifier)):
+ other = SpecifierSet(str(other))
+ elif not isinstance(other, SpecifierSet):
+ return NotImplemented
+
+ return self._specs == other._specs
+
+ def __len__(self) -> int:
+ """Returns the number of specifiers in this specifier set."""
+ return len(self._specs)
+
+ def __iter__(self) -> Iterator[Specifier]:
+ """
+ Returns an iterator over all the underlying :class:`Specifier` instances
+ in this specifier set.
+
+ >>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str)
+ [, =1.0.0')>]
+ """
+ return iter(self._specs)
+
+ def __contains__(self, item: UnparsedVersion) -> bool:
+ """Return whether or not the item is contained in this specifier.
+
+ :param item: The item to check for.
+
+ This is used for the ``in`` operator and behaves the same as
+ :meth:`contains` with no ``prereleases`` argument passed.
+
+ >>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1")
+ True
+ >>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1")
+ True
+ >>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1")
+ False
+ >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1")
+ True
+ >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)
+ True
+ """
+ return self.contains(item)
+
+ def contains(
+ self,
+ item: UnparsedVersion,
+ prereleases: bool | None = None,
+ installed: bool | None = None,
+ ) -> bool:
+ """Return whether or not the item is contained in this SpecifierSet.
+
+ :param item:
+ The item to check for, which can be a version string or a
+ :class:`Version` instance.
+ :param prereleases:
+ Whether or not to match prereleases with this SpecifierSet. If set to
+ ``None`` (the default), it will follow the recommendation from :pep:`440`
+ and match prereleases, as there are no other versions.
+ :param installed:
+ Whether or not the item is installed. If set to ``True``, it will
+ accept prerelease versions even if the specifier does not allow them.
+
+ >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3")
+ True
+ >>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3"))
+ True
+ >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1")
+ False
+ >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1")
+ True
+ >>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False).contains("1.3.0a1")
+ False
+ >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True)
+ True
+ """
+ version = _coerce_version(item)
+
+ if version is not None and installed and version.is_prerelease:
+ prereleases = True
+
+ check_item = item if version is None else version
+ return bool(list(self.filter([check_item], prereleases=prereleases)))
+
+ def filter(
+ self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None
+ ) -> Iterator[UnparsedVersionVar]:
+ """Filter items in the given iterable, that match the specifiers in this set.
+
+ :param iterable:
+ An iterable that can contain version strings and :class:`Version` instances.
+ The items in the iterable will be filtered according to the specifier.
+ :param prereleases:
+ Whether or not to allow prereleases in the returned iterator. If set to
+ ``None`` (the default), it will follow the recommendation from :pep:`440`
+ and match prereleases if there are no other versions.
+
+ >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
+ ['1.3']
+ >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")]))
+ ['1.3', ]
+ >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"]))
+ ['1.5a1']
+ >>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
+ ['1.3', '1.5a1']
+ >>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
+ ['1.3', '1.5a1']
+
+ An "empty" SpecifierSet will filter items based on the presence of prerelease
+ versions in the set.
+
+ >>> list(SpecifierSet("").filter(["1.3", "1.5a1"]))
+ ['1.3']
+ >>> list(SpecifierSet("").filter(["1.5a1"]))
+ ['1.5a1']
+ >>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"]))
+ ['1.3', '1.5a1']
+ >>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True))
+ ['1.3', '1.5a1']
+ """
+ # Determine if we're forcing a prerelease or not, if we're not forcing
+ # one for this particular filter call, then we'll use whatever the
+ # SpecifierSet thinks for whether or not we should support prereleases.
+ if prereleases is None and self.prereleases is not None:
+ prereleases = self.prereleases
+
+ # If we have any specifiers, then we want to wrap our iterable in the
+ # filter method for each one, this will act as a logical AND amongst
+ # each specifier.
+ if self._specs:
+ # When prereleases is None, we need to let all versions through
+ # the individual filters, then decide about prereleases at the end
+ # based on whether any non-prereleases matched ALL specs.
+ for spec in self._specs:
+ iterable = spec.filter(
+ iterable, prereleases=True if prereleases is None else prereleases
+ )
+
+ if prereleases is not None:
+ # If we have a forced prereleases value,
+ # we can immediately return the iterator.
+ return iter(iterable)
+ else:
+ # Handle empty SpecifierSet cases where prereleases is not None.
+ if prereleases is True:
+ return iter(iterable)
+
+ if prereleases is False:
+ return (
+ item
+ for item in iterable
+ if (version := _coerce_version(item)) is None
+ or not version.is_prerelease
+ )
+
+ # Finally if prereleases is None, apply PEP 440 logic:
+ # exclude prereleases unless there are no final releases that matched.
+ filtered_items: list[UnparsedVersionVar] = []
+ found_prereleases: list[UnparsedVersionVar] = []
+ found_final_release = False
+
+ for item in iterable:
+ parsed_version = _coerce_version(item)
+ # Arbitrary strings are always included as it is not
+ # possible to determine if they are prereleases,
+ # and they have already passed all specifiers.
+ if parsed_version is None:
+ filtered_items.append(item)
+ found_prereleases.append(item)
+ elif parsed_version.is_prerelease:
+ found_prereleases.append(item)
+ else:
+ filtered_items.append(item)
+ found_final_release = True
+
+ return iter(filtered_items if found_final_release else found_prereleases)
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/tags.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/tags.py
new file mode 100644
index 0000000000000000000000000000000000000000..5ef27c897a4df35a2a6923b608a5e04a0a38b9ee
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/tags.py
@@ -0,0 +1,651 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import logging
+import platform
+import re
+import struct
+import subprocess
+import sys
+import sysconfig
+from importlib.machinery import EXTENSION_SUFFIXES
+from typing import (
+ Any,
+ Iterable,
+ Iterator,
+ Sequence,
+ Tuple,
+ cast,
+)
+
+from . import _manylinux, _musllinux
+
+logger = logging.getLogger(__name__)
+
+PythonVersion = Sequence[int]
+AppleVersion = Tuple[int, int]
+
+INTERPRETER_SHORT_NAMES: dict[str, str] = {
+ "python": "py", # Generic.
+ "cpython": "cp",
+ "pypy": "pp",
+ "ironpython": "ip",
+ "jython": "jy",
+}
+
+
+_32_BIT_INTERPRETER = struct.calcsize("P") == 4
+
+
+class Tag:
+ """
+ A representation of the tag triple for a wheel.
+
+ Instances are considered immutable and thus are hashable. Equality checking
+ is also supported.
+ """
+
+ __slots__ = ["_abi", "_hash", "_interpreter", "_platform"]
+
+ def __init__(self, interpreter: str, abi: str, platform: str) -> None:
+ self._interpreter = interpreter.lower()
+ self._abi = abi.lower()
+ self._platform = platform.lower()
+ # The __hash__ of every single element in a Set[Tag] will be evaluated each time
+ # that a set calls its `.disjoint()` method, which may be called hundreds of
+ # times when scanning a page of links for packages with tags matching that
+ # Set[Tag]. Pre-computing the value here produces significant speedups for
+ # downstream consumers.
+ self._hash = hash((self._interpreter, self._abi, self._platform))
+
+ @property
+ def interpreter(self) -> str:
+ return self._interpreter
+
+ @property
+ def abi(self) -> str:
+ return self._abi
+
+ @property
+ def platform(self) -> str:
+ return self._platform
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Tag):
+ return NotImplemented
+
+ return (
+ (self._hash == other._hash) # Short-circuit ASAP for perf reasons.
+ and (self._platform == other._platform)
+ and (self._abi == other._abi)
+ and (self._interpreter == other._interpreter)
+ )
+
+ def __hash__(self) -> int:
+ return self._hash
+
+ def __str__(self) -> str:
+ return f"{self._interpreter}-{self._abi}-{self._platform}"
+
+ def __repr__(self) -> str:
+ return f"<{self} @ {id(self)}>"
+
+ def __setstate__(self, state: tuple[None, dict[str, Any]]) -> None:
+ # The cached _hash is wrong when unpickling.
+ _, slots = state
+ for k, v in slots.items():
+ setattr(self, k, v)
+ self._hash = hash((self._interpreter, self._abi, self._platform))
+
+
+def parse_tag(tag: str) -> frozenset[Tag]:
+ """
+ Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
+
+ Returning a set is required due to the possibility that the tag is a
+ compressed tag set.
+ """
+ tags = set()
+ interpreters, abis, platforms = tag.split("-")
+ for interpreter in interpreters.split("."):
+ for abi in abis.split("."):
+ for platform_ in platforms.split("."):
+ tags.add(Tag(interpreter, abi, platform_))
+ return frozenset(tags)
+
+
+def _get_config_var(name: str, warn: bool = False) -> int | str | None:
+ value: int | str | None = sysconfig.get_config_var(name)
+ if value is None and warn:
+ logger.debug(
+ "Config variable '%s' is unset, Python ABI tag may be incorrect", name
+ )
+ return value
+
+
+def _normalize_string(string: str) -> str:
+ return string.replace(".", "_").replace("-", "_").replace(" ", "_")
+
+
+def _is_threaded_cpython(abis: list[str]) -> bool:
+ """
+ Determine if the ABI corresponds to a threaded (`--disable-gil`) build.
+
+ The threaded builds are indicated by a "t" in the abiflags.
+ """
+ if len(abis) == 0:
+ return False
+ # expect e.g., cp313
+ m = re.match(r"cp\d+(.*)", abis[0])
+ if not m:
+ return False
+ abiflags = m.group(1)
+ return "t" in abiflags
+
+
+def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool:
+ """
+ Determine if the Python version supports abi3.
+
+ PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`)
+ builds do not support abi3.
+ """
+ return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading
+
+
+def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> list[str]:
+ py_version = tuple(py_version) # To allow for version comparison.
+ abis = []
+ version = _version_nodot(py_version[:2])
+ threading = debug = pymalloc = ucs4 = ""
+ with_debug = _get_config_var("Py_DEBUG", warn)
+ has_refcount = hasattr(sys, "gettotalrefcount")
+ # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
+ # extension modules is the best option.
+ # https://github.com/pypa/pip/issues/3383#issuecomment-173267692
+ has_ext = "_d.pyd" in EXTENSION_SUFFIXES
+ if with_debug or (with_debug is None and (has_refcount or has_ext)):
+ debug = "d"
+ if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn):
+ threading = "t"
+ if py_version < (3, 8):
+ with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
+ if with_pymalloc or with_pymalloc is None:
+ pymalloc = "m"
+ if py_version < (3, 3):
+ unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
+ if unicode_size == 4 or (
+ unicode_size is None and sys.maxunicode == 0x10FFFF
+ ):
+ ucs4 = "u"
+ elif debug:
+ # Debug builds can also load "normal" extension modules.
+ # We can also assume no UCS-4 or pymalloc requirement.
+ abis.append(f"cp{version}{threading}")
+ abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}")
+ return abis
+
+
+def cpython_tags(
+ python_version: PythonVersion | None = None,
+ abis: Iterable[str] | None = None,
+ platforms: Iterable[str] | None = None,
+ *,
+ warn: bool = False,
+) -> Iterator[Tag]:
+ """
+ Yields the tags for a CPython interpreter.
+
+ The tags consist of:
+ - cp--
+ - cp-abi3-
+ - cp-none-
+ - cp-abi3- # Older Python versions down to 3.2.
+
+ If python_version only specifies a major version then user-provided ABIs and
+ the 'none' ABItag will be used.
+
+ If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
+ their normal position and not at the beginning.
+ """
+ if not python_version:
+ python_version = sys.version_info[:2]
+
+ interpreter = f"cp{_version_nodot(python_version[:2])}"
+
+ if abis is None:
+ abis = _cpython_abis(python_version, warn) if len(python_version) > 1 else []
+ abis = list(abis)
+ # 'abi3' and 'none' are explicitly handled later.
+ for explicit_abi in ("abi3", "none"):
+ try:
+ abis.remove(explicit_abi)
+ except ValueError: # noqa: PERF203
+ pass
+
+ platforms = list(platforms or platform_tags())
+ for abi in abis:
+ for platform_ in platforms:
+ yield Tag(interpreter, abi, platform_)
+
+ threading = _is_threaded_cpython(abis)
+ use_abi3 = _abi3_applies(python_version, threading)
+ if use_abi3:
+ yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
+ yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
+
+ if use_abi3:
+ for minor_version in range(python_version[1] - 1, 1, -1):
+ for platform_ in platforms:
+ version = _version_nodot((python_version[0], minor_version))
+ interpreter = f"cp{version}"
+ yield Tag(interpreter, "abi3", platform_)
+
+
+def _generic_abi() -> list[str]:
+ """
+ Return the ABI tag based on EXT_SUFFIX.
+ """
+ # The following are examples of `EXT_SUFFIX`.
+ # We want to keep the parts which are related to the ABI and remove the
+ # parts which are related to the platform:
+ # - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310
+ # - mac: '.cpython-310-darwin.so' => cp310
+ # - win: '.cp310-win_amd64.pyd' => cp310
+ # - win: '.pyd' => cp37 (uses _cpython_abis())
+ # - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73
+ # - graalpy: '.graalpy-38-native-x86_64-darwin.dylib'
+ # => graalpy_38_native
+
+ ext_suffix = _get_config_var("EXT_SUFFIX", warn=True)
+ if not isinstance(ext_suffix, str) or ext_suffix[0] != ".":
+ raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')")
+ parts = ext_suffix.split(".")
+ if len(parts) < 3:
+ # CPython3.7 and earlier uses ".pyd" on Windows.
+ return _cpython_abis(sys.version_info[:2])
+ soabi = parts[1]
+ if soabi.startswith("cpython"):
+ # non-windows
+ abi = "cp" + soabi.split("-")[1]
+ elif soabi.startswith("cp"):
+ # windows
+ abi = soabi.split("-")[0]
+ elif soabi.startswith("pypy"):
+ abi = "-".join(soabi.split("-")[:2])
+ elif soabi.startswith("graalpy"):
+ abi = "-".join(soabi.split("-")[:3])
+ elif soabi:
+ # pyston, ironpython, others?
+ abi = soabi
+ else:
+ return []
+ return [_normalize_string(abi)]
+
+
+def generic_tags(
+ interpreter: str | None = None,
+ abis: Iterable[str] | None = None,
+ platforms: Iterable[str] | None = None,
+ *,
+ warn: bool = False,
+) -> Iterator[Tag]:
+ """
+ Yields the tags for a generic interpreter.
+
+ The tags consist of:
+ - --
+
+ The "none" ABI will be added if it was not explicitly provided.
+ """
+ if not interpreter:
+ interp_name = interpreter_name()
+ interp_version = interpreter_version(warn=warn)
+ interpreter = f"{interp_name}{interp_version}"
+ abis = _generic_abi() if abis is None else list(abis)
+ platforms = list(platforms or platform_tags())
+ if "none" not in abis:
+ abis.append("none")
+ for abi in abis:
+ for platform_ in platforms:
+ yield Tag(interpreter, abi, platform_)
+
+
+def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
+ """
+ Yields Python versions in descending order.
+
+ After the latest version, the major-only version will be yielded, and then
+ all previous versions of that major version.
+ """
+ if len(py_version) > 1:
+ yield f"py{_version_nodot(py_version[:2])}"
+ yield f"py{py_version[0]}"
+ if len(py_version) > 1:
+ for minor in range(py_version[1] - 1, -1, -1):
+ yield f"py{_version_nodot((py_version[0], minor))}"
+
+
+def compatible_tags(
+ python_version: PythonVersion | None = None,
+ interpreter: str | None = None,
+ platforms: Iterable[str] | None = None,
+) -> Iterator[Tag]:
+ """
+ Yields the sequence of tags that are compatible with a specific version of Python.
+
+ The tags consist of:
+ - py*-none-
+ - -none-any # ... if `interpreter` is provided.
+ - py*-none-any
+ """
+ if not python_version:
+ python_version = sys.version_info[:2]
+ platforms = list(platforms or platform_tags())
+ for version in _py_interpreter_range(python_version):
+ for platform_ in platforms:
+ yield Tag(version, "none", platform_)
+ if interpreter:
+ yield Tag(interpreter, "none", "any")
+ for version in _py_interpreter_range(python_version):
+ yield Tag(version, "none", "any")
+
+
+def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
+ if not is_32bit:
+ return arch
+
+ if arch.startswith("ppc"):
+ return "ppc"
+
+ return "i386"
+
+
+def _mac_binary_formats(version: AppleVersion, cpu_arch: str) -> list[str]:
+ formats = [cpu_arch]
+ if cpu_arch == "x86_64":
+ if version < (10, 4):
+ return []
+ formats.extend(["intel", "fat64", "fat32"])
+
+ elif cpu_arch == "i386":
+ if version < (10, 4):
+ return []
+ formats.extend(["intel", "fat32", "fat"])
+
+ elif cpu_arch == "ppc64":
+ # TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
+ if version > (10, 5) or version < (10, 4):
+ return []
+ formats.append("fat64")
+
+ elif cpu_arch == "ppc":
+ if version > (10, 6):
+ return []
+ formats.extend(["fat32", "fat"])
+
+ if cpu_arch in {"arm64", "x86_64"}:
+ formats.append("universal2")
+
+ if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
+ formats.append("universal")
+
+ return formats
+
+
+def mac_platforms(
+ version: AppleVersion | None = None, arch: str | None = None
+) -> Iterator[str]:
+ """
+ Yields the platform tags for a macOS system.
+
+ The `version` parameter is a two-item tuple specifying the macOS version to
+ generate platform tags for. The `arch` parameter is the CPU architecture to
+ generate platform tags for. Both parameters default to the appropriate value
+ for the current system.
+ """
+ version_str, _, cpu_arch = platform.mac_ver()
+ if version is None:
+ version = cast("AppleVersion", tuple(map(int, version_str.split(".")[:2])))
+ if version == (10, 16):
+ # When built against an older macOS SDK, Python will report macOS 10.16
+ # instead of the real version.
+ version_str = subprocess.run(
+ [
+ sys.executable,
+ "-sS",
+ "-c",
+ "import platform; print(platform.mac_ver()[0])",
+ ],
+ check=True,
+ env={"SYSTEM_VERSION_COMPAT": "0"},
+ stdout=subprocess.PIPE,
+ text=True,
+ ).stdout
+ version = cast("AppleVersion", tuple(map(int, version_str.split(".")[:2])))
+
+ if arch is None:
+ arch = _mac_arch(cpu_arch)
+
+ if (10, 0) <= version < (11, 0):
+ # Prior to Mac OS 11, each yearly release of Mac OS bumped the
+ # "minor" version number. The major version was always 10.
+ major_version = 10
+ for minor_version in range(version[1], -1, -1):
+ compat_version = major_version, minor_version
+ binary_formats = _mac_binary_formats(compat_version, arch)
+ for binary_format in binary_formats:
+ yield f"macosx_{major_version}_{minor_version}_{binary_format}"
+
+ if version >= (11, 0):
+ # Starting with Mac OS 11, each yearly release bumps the major version
+ # number. The minor versions are now the midyear updates.
+ minor_version = 0
+ for major_version in range(version[0], 10, -1):
+ compat_version = major_version, minor_version
+ binary_formats = _mac_binary_formats(compat_version, arch)
+ for binary_format in binary_formats:
+ yield f"macosx_{major_version}_{minor_version}_{binary_format}"
+
+ if version >= (11, 0):
+ # Mac OS 11 on x86_64 is compatible with binaries from previous releases.
+ # Arm64 support was introduced in 11.0, so no Arm binaries from previous
+ # releases exist.
+ #
+ # However, the "universal2" binary format can have a
+ # macOS version earlier than 11.0 when the x86_64 part of the binary supports
+ # that version of macOS.
+ major_version = 10
+ if arch == "x86_64":
+ for minor_version in range(16, 3, -1):
+ compat_version = major_version, minor_version
+ binary_formats = _mac_binary_formats(compat_version, arch)
+ for binary_format in binary_formats:
+ yield f"macosx_{major_version}_{minor_version}_{binary_format}"
+ else:
+ for minor_version in range(16, 3, -1):
+ compat_version = major_version, minor_version
+ binary_format = "universal2"
+ yield f"macosx_{major_version}_{minor_version}_{binary_format}"
+
+
+def ios_platforms(
+ version: AppleVersion | None = None, multiarch: str | None = None
+) -> Iterator[str]:
+ """
+ Yields the platform tags for an iOS system.
+
+ :param version: A two-item tuple specifying the iOS version to generate
+ platform tags for. Defaults to the current iOS version.
+ :param multiarch: The CPU architecture+ABI to generate platform tags for -
+ (the value used by `sys.implementation._multiarch` e.g.,
+ `arm64_iphoneos` or `x84_64_iphonesimulator`). Defaults to the current
+ multiarch value.
+ """
+ if version is None:
+ # if iOS is the current platform, ios_ver *must* be defined. However,
+ # it won't exist for CPython versions before 3.13, which causes a mypy
+ # error.
+ _, release, _, _ = platform.ios_ver() # type: ignore[attr-defined, unused-ignore]
+ version = cast("AppleVersion", tuple(map(int, release.split(".")[:2])))
+
+ if multiarch is None:
+ multiarch = sys.implementation._multiarch
+ multiarch = multiarch.replace("-", "_")
+
+ ios_platform_template = "ios_{major}_{minor}_{multiarch}"
+
+ # Consider any iOS major.minor version from the version requested, down to
+ # 12.0. 12.0 is the first iOS version that is known to have enough features
+ # to support CPython. Consider every possible minor release up to X.9. There
+ # highest the minor has ever gone is 8 (14.8 and 15.8) but having some extra
+ # candidates that won't ever match doesn't really hurt, and it saves us from
+ # having to keep an explicit list of known iOS versions in the code. Return
+ # the results descending order of version number.
+
+ # If the requested major version is less than 12, there won't be any matches.
+ if version[0] < 12:
+ return
+
+ # Consider the actual X.Y version that was requested.
+ yield ios_platform_template.format(
+ major=version[0], minor=version[1], multiarch=multiarch
+ )
+
+ # Consider every minor version from X.0 to the minor version prior to the
+ # version requested by the platform.
+ for minor in range(version[1] - 1, -1, -1):
+ yield ios_platform_template.format(
+ major=version[0], minor=minor, multiarch=multiarch
+ )
+
+ for major in range(version[0] - 1, 11, -1):
+ for minor in range(9, -1, -1):
+ yield ios_platform_template.format(
+ major=major, minor=minor, multiarch=multiarch
+ )
+
+
+def android_platforms(
+ api_level: int | None = None, abi: str | None = None
+) -> Iterator[str]:
+ """
+ Yields the :attr:`~Tag.platform` tags for Android. If this function is invoked on
+ non-Android platforms, the ``api_level`` and ``abi`` arguments are required.
+
+ :param int api_level: The maximum `API level
+ `__ to return. Defaults
+ to the current system's version, as returned by ``platform.android_ver``.
+ :param str abi: The `Android ABI `__,
+ e.g. ``arm64_v8a``. Defaults to the current system's ABI , as returned by
+ ``sysconfig.get_platform``. Hyphens and periods will be replaced with
+ underscores.
+ """
+ if platform.system() != "Android" and (api_level is None or abi is None):
+ raise TypeError(
+ "on non-Android platforms, the api_level and abi arguments are required"
+ )
+
+ if api_level is None:
+ # Python 3.13 was the first version to return platform.system() == "Android",
+ # and also the first version to define platform.android_ver().
+ api_level = platform.android_ver().api_level # type: ignore[attr-defined]
+
+ if abi is None:
+ abi = sysconfig.get_platform().split("-")[-1]
+ abi = _normalize_string(abi)
+
+ # 16 is the minimum API level known to have enough features to support CPython
+ # without major patching. Yield every API level from the maximum down to the
+ # minimum, inclusive.
+ min_api_level = 16
+ for ver in range(api_level, min_api_level - 1, -1):
+ yield f"android_{ver}_{abi}"
+
+
+def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
+ linux = _normalize_string(sysconfig.get_platform())
+ if not linux.startswith("linux_"):
+ # we should never be here, just yield the sysconfig one and return
+ yield linux
+ return
+ if is_32bit:
+ if linux == "linux_x86_64":
+ linux = "linux_i686"
+ elif linux == "linux_aarch64":
+ linux = "linux_armv8l"
+ _, arch = linux.split("_", 1)
+ archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch])
+ yield from _manylinux.platform_tags(archs)
+ yield from _musllinux.platform_tags(archs)
+ for arch in archs:
+ yield f"linux_{arch}"
+
+
+def _generic_platforms() -> Iterator[str]:
+ yield _normalize_string(sysconfig.get_platform())
+
+
+def platform_tags() -> Iterator[str]:
+ """
+ Provides the platform tags for this installation.
+ """
+ if platform.system() == "Darwin":
+ return mac_platforms()
+ elif platform.system() == "iOS":
+ return ios_platforms()
+ elif platform.system() == "Android":
+ return android_platforms()
+ elif platform.system() == "Linux":
+ return _linux_platforms()
+ else:
+ return _generic_platforms()
+
+
+def interpreter_name() -> str:
+ """
+ Returns the name of the running interpreter.
+
+ Some implementations have a reserved, two-letter abbreviation which will
+ be returned when appropriate.
+ """
+ name = sys.implementation.name
+ return INTERPRETER_SHORT_NAMES.get(name) or name
+
+
+def interpreter_version(*, warn: bool = False) -> str:
+ """
+ Returns the version of the running interpreter.
+ """
+ version = _get_config_var("py_version_nodot", warn=warn)
+ return str(version) if version else _version_nodot(sys.version_info[:2])
+
+
+def _version_nodot(version: PythonVersion) -> str:
+ return "".join(map(str, version))
+
+
+def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
+ """
+ Returns the sequence of tag triples for the running interpreter.
+
+ The order of the sequence corresponds to priority order for the
+ interpreter, from most to least important.
+ """
+
+ interp_name = interpreter_name()
+ if interp_name == "cp":
+ yield from cpython_tags(warn=warn)
+ else:
+ yield from generic_tags()
+
+ if interp_name == "pp":
+ interp = "pp3"
+ elif interp_name == "cp":
+ interp = "cp" + interpreter_version(warn=warn)
+ else:
+ interp = None
+ yield from compatible_tags(interpreter=interp)
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/utils.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..c41c8137f2679a0fac21bb845596e231ae88dbd8
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/utils.py
@@ -0,0 +1,158 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import annotations
+
+import re
+from typing import NewType, Tuple, Union, cast
+
+from .tags import Tag, parse_tag
+from .version import InvalidVersion, Version, _TrimmedRelease
+
+BuildTag = Union[Tuple[()], Tuple[int, str]]
+NormalizedName = NewType("NormalizedName", str)
+
+
+class InvalidName(ValueError):
+ """
+ An invalid distribution name; users should refer to the packaging user guide.
+ """
+
+
+class InvalidWheelFilename(ValueError):
+ """
+ An invalid wheel filename was found, users should refer to PEP 427.
+ """
+
+
+class InvalidSdistFilename(ValueError):
+ """
+ An invalid sdist filename was found, users should refer to the packaging user guide.
+ """
+
+
+# Core metadata spec for `Name`
+_validate_regex = re.compile(r"[A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9]", re.IGNORECASE)
+_normalized_regex = re.compile(r"[a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9]")
+# PEP 427: The build number must start with a digit.
+_build_tag_regex = re.compile(r"(\d+)(.*)")
+
+
+def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName:
+ if validate and not _validate_regex.fullmatch(name):
+ raise InvalidName(f"name is invalid: {name!r}")
+ # Ensure all ``.`` and ``_`` are ``-``
+ # Emulates ``re.sub(r"[-_.]+", "-", name).lower()`` from PEP 503
+ # Much faster than re, and even faster than str.translate
+ value = name.lower().replace("_", "-").replace(".", "-")
+ # Condense repeats (faster than regex)
+ while "--" in value:
+ value = value.replace("--", "-")
+ return cast("NormalizedName", value)
+
+
+def is_normalized_name(name: str) -> bool:
+ return _normalized_regex.fullmatch(name) is not None
+
+
+def canonicalize_version(
+ version: Version | str, *, strip_trailing_zero: bool = True
+) -> str:
+ """
+ Return a canonical form of a version as a string.
+
+ >>> canonicalize_version('1.0.1')
+ '1.0.1'
+
+ Per PEP 625, versions may have multiple canonical forms, differing
+ only by trailing zeros.
+
+ >>> canonicalize_version('1.0.0')
+ '1'
+ >>> canonicalize_version('1.0.0', strip_trailing_zero=False)
+ '1.0.0'
+
+ Invalid versions are returned unaltered.
+
+ >>> canonicalize_version('foo bar baz')
+ 'foo bar baz'
+ """
+ if isinstance(version, str):
+ try:
+ version = Version(version)
+ except InvalidVersion:
+ return str(version)
+ return str(_TrimmedRelease(version) if strip_trailing_zero else version)
+
+
+def parse_wheel_filename(
+ filename: str,
+) -> tuple[NormalizedName, Version, BuildTag, frozenset[Tag]]:
+ if not filename.endswith(".whl"):
+ raise InvalidWheelFilename(
+ f"Invalid wheel filename (extension must be '.whl'): {filename!r}"
+ )
+
+ filename = filename[:-4]
+ dashes = filename.count("-")
+ if dashes not in (4, 5):
+ raise InvalidWheelFilename(
+ f"Invalid wheel filename (wrong number of parts): {filename!r}"
+ )
+
+ parts = filename.split("-", dashes - 2)
+ name_part = parts[0]
+ # See PEP 427 for the rules on escaping the project name.
+ if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
+ raise InvalidWheelFilename(f"Invalid project name: {filename!r}")
+ name = canonicalize_name(name_part)
+
+ try:
+ version = Version(parts[1])
+ except InvalidVersion as e:
+ raise InvalidWheelFilename(
+ f"Invalid wheel filename (invalid version): {filename!r}"
+ ) from e
+
+ if dashes == 5:
+ build_part = parts[2]
+ build_match = _build_tag_regex.match(build_part)
+ if build_match is None:
+ raise InvalidWheelFilename(
+ f"Invalid build number: {build_part} in {filename!r}"
+ )
+ build = cast("BuildTag", (int(build_match.group(1)), build_match.group(2)))
+ else:
+ build = ()
+ tags = parse_tag(parts[-1])
+ return (name, version, build, tags)
+
+
+def parse_sdist_filename(filename: str) -> tuple[NormalizedName, Version]:
+ if filename.endswith(".tar.gz"):
+ file_stem = filename[: -len(".tar.gz")]
+ elif filename.endswith(".zip"):
+ file_stem = filename[: -len(".zip")]
+ else:
+ raise InvalidSdistFilename(
+ f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
+ f" {filename!r}"
+ )
+
+ # We are requiring a PEP 440 version, which cannot contain dashes,
+ # so we split on the last dash.
+ name_part, sep, version_part = file_stem.rpartition("-")
+ if not sep:
+ raise InvalidSdistFilename(f"Invalid sdist filename: {filename!r}")
+
+ name = canonicalize_name(name_part)
+
+ try:
+ version = Version(version_part)
+ except InvalidVersion as e:
+ raise InvalidSdistFilename(
+ f"Invalid sdist filename (invalid version): {filename!r}"
+ ) from e
+
+ return (name, version)
diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/version.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/version.py
new file mode 100644
index 0000000000000000000000000000000000000000..1206c462d4fcaa670a816e201bb88b27dfc9cf88
--- /dev/null
+++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/version.py
@@ -0,0 +1,792 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+"""
+.. testsetup::
+
+ from packaging.version import parse, Version
+"""
+
+from __future__ import annotations
+
+import re
+import sys
+import typing
+from typing import (
+ Any,
+ Callable,
+ Literal,
+ NamedTuple,
+ SupportsInt,
+ Tuple,
+ TypedDict,
+ Union,
+)
+
+from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
+
+if typing.TYPE_CHECKING:
+ from typing_extensions import Self, Unpack
+
+if sys.version_info >= (3, 13): # pragma: no cover
+ from warnings import deprecated as _deprecated
+elif typing.TYPE_CHECKING:
+ from typing_extensions import deprecated as _deprecated
+else: # pragma: no cover
+ import functools
+ import warnings
+
+ def _deprecated(message: str) -> object:
+ def decorator(func: object) -> object:
+ @functools.wraps(func)
+ def wrapper(*args: object, **kwargs: object) -> object:
+ warnings.warn(
+ message,
+ category=DeprecationWarning,
+ stacklevel=2,
+ )
+ return func(*args, **kwargs)
+
+ return wrapper
+
+ return decorator
+
+
+_LETTER_NORMALIZATION = {
+ "alpha": "a",
+ "beta": "b",
+ "c": "rc",
+ "pre": "rc",
+ "preview": "rc",
+ "rev": "post",
+ "r": "post",
+}
+
+__all__ = ["VERSION_PATTERN", "InvalidVersion", "Version", "parse"]
+
+LocalType = Tuple[Union[int, str], ...]
+
+CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]]
+CmpLocalType = Union[
+ NegativeInfinityType,
+ Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...],
+]
+CmpKey = Tuple[
+ int,
+ Tuple[int, ...],
+ CmpPrePostDevType,
+ CmpPrePostDevType,
+ CmpPrePostDevType,
+ CmpLocalType,
+]
+VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
+
+
+class _VersionReplace(TypedDict, total=False):
+ epoch: int | None
+ release: tuple[int, ...] | None
+ pre: tuple[Literal["a", "b", "rc"], int] | None
+ post: int | None
+ dev: int | None
+ local: str | None
+
+
+def parse(version: str) -> Version:
+ """Parse the given version string.
+
+ >>> parse('1.0.dev1')
+
+
+ :param version: The version string to parse.
+ :raises InvalidVersion: When the version string is not a valid version.
+ """
+ return Version(version)
+
+
+class InvalidVersion(ValueError):
+ """Raised when a version string is not a valid version.
+
+ >>> Version("invalid")
+ Traceback (most recent call last):
+ ...
+ packaging.version.InvalidVersion: Invalid version: 'invalid'
+ """
+
+
+class _BaseVersion:
+ __slots__ = ()
+
+ # This can also be a normal member (see the packaging_legacy package);
+ # we are just requiring it to be readable. Actually defining a property
+ # has runtime effect on subclasses, so it's typing only.
+ if typing.TYPE_CHECKING:
+
+ @property
+ def _key(self) -> tuple[Any, ...]: ...
+
+ def __hash__(self) -> int:
+ return hash(self._key)
+
+ # Please keep the duplicated `isinstance` check
+ # in the six comparisons hereunder
+ # unless you find a way to avoid adding overhead function calls.
+ def __lt__(self, other: _BaseVersion) -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return self._key < other._key
+
+ def __le__(self, other: _BaseVersion) -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return self._key <= other._key
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return self._key == other._key
+
+ def __ge__(self, other: _BaseVersion) -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return self._key >= other._key
+
+ def __gt__(self, other: _BaseVersion) -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return self._key > other._key
+
+ def __ne__(self, other: object) -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return self._key != other._key
+
+
+# Deliberately not anchored to the start and end of the string, to make it
+# easier for 3rd party code to reuse
+
+# Note that ++ doesn't behave identically on CPython and PyPy, so not using it here
+_VERSION_PATTERN = r"""
+ v?+ # optional leading v
+ (?:
+ (?:(?P[0-9]+)!)?+ # epoch
+ (?P[0-9]+(?:\.[0-9]+)*+) # release segment
+ (?P # pre-release
+ [._-]?+
+ (?Palpha|a|beta|b|preview|pre|c|rc)
+ [._-]?+
+ (?P[0-9]+)?
+ )?+
+ (?P # post release
+ (?:-(?P[0-9]+))
+ |
+ (?:
+ [._-]?
+ (?Ppost|rev|r)
+ [._-]?
+ (?P[0-9]+)?
+ )
+ )?+
+ (?P # dev release
+ [._-]?+
+ (?Pdev)
+ [._-]?+
+ (?P[0-9]+)?
+ )?+
+ )
+ (?:\+
+ (?P # local version
+ [a-z0-9]+
+ (?:[._-][a-z0-9]+)*+
+ )
+ )?+
+"""
+
+_VERSION_PATTERN_OLD = _VERSION_PATTERN.replace("*+", "*").replace("?+", "?")
+
+# Possessive qualifiers were added in Python 3.11.
+# CPython 3.11.0-3.11.4 had a bug: https://github.com/python/cpython/pull/107795
+# Older PyPy also had a bug.
+VERSION_PATTERN = (
+ _VERSION_PATTERN_OLD
+ if (sys.implementation.name == "cpython" and sys.version_info < (3, 11, 5))
+ or (sys.implementation.name == "pypy" and sys.version_info < (3, 11, 13))
+ or sys.version_info < (3, 11)
+ else _VERSION_PATTERN
+)
+"""
+A string containing the regular expression used to match a valid version.
+
+The pattern is not anchored at either end, and is intended for embedding in larger
+expressions (for example, matching a version number as part of a file name). The
+regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
+flags set.
+
+:meta hide-value:
+"""
+
+
+# Validation pattern for local version in replace()
+_LOCAL_PATTERN = re.compile(r"[a-z0-9]+(?:[._-][a-z0-9]+)*", re.IGNORECASE)
+
+
+def _validate_epoch(value: object, /) -> int:
+ epoch = value or 0
+ if isinstance(epoch, int) and epoch >= 0:
+ return epoch
+ msg = f"epoch must be non-negative integer, got {epoch}"
+ raise InvalidVersion(msg)
+
+
+def _validate_release(value: object, /) -> tuple[int, ...]:
+ release = (0,) if value is None else value
+ if (
+ isinstance(release, tuple)
+ and len(release) > 0
+ and all(isinstance(i, int) and i >= 0 for i in release)
+ ):
+ return release
+ msg = f"release must be a non-empty tuple of non-negative integers, got {release}"
+ raise InvalidVersion(msg)
+
+
+def _validate_pre(value: object, /) -> tuple[Literal["a", "b", "rc"], int] | None:
+ if value is None:
+ return value
+ if (
+ isinstance(value, tuple)
+ and len(value) == 2
+ and value[0] in ("a", "b", "rc")
+ and isinstance(value[1], int)
+ and value[1] >= 0
+ ):
+ return value
+ msg = f"pre must be a tuple of ('a'|'b'|'rc', non-negative int), got {value}"
+ raise InvalidVersion(msg)
+
+
+def _validate_post(value: object, /) -> tuple[Literal["post"], int] | None:
+ if value is None:
+ return value
+ if isinstance(value, int) and value >= 0:
+ return ("post", value)
+ msg = f"post must be non-negative integer, got {value}"
+ raise InvalidVersion(msg)
+
+
+def _validate_dev(value: object, /) -> tuple[Literal["dev"], int] | None:
+ if value is None:
+ return value
+ if isinstance(value, int) and value >= 0:
+ return ("dev", value)
+ msg = f"dev must be non-negative integer, got {value}"
+ raise InvalidVersion(msg)
+
+
+def _validate_local(value: object, /) -> LocalType | None:
+ if value is None:
+ return value
+ if isinstance(value, str) and _LOCAL_PATTERN.fullmatch(value):
+ return _parse_local_version(value)
+ msg = f"local must be a valid version string, got {value!r}"
+ raise InvalidVersion(msg)
+
+
+# Backward compatibility for internals before 26.0. Do not use.
+class _Version(NamedTuple):
+ epoch: int
+ release: tuple[int, ...]
+ dev: tuple[str, int] | None
+ pre: tuple[str, int] | None
+ post: tuple[str, int] | None
+ local: LocalType | None
+
+
+class Version(_BaseVersion):
+ """This class abstracts handling of a project's versions.
+
+ A :class:`Version` instance is comparison aware and can be compared and
+ sorted using the standard Python interfaces.
+
+ >>> v1 = Version("1.0a5")
+ >>> v2 = Version("1.0")
+ >>> v1
+
+ >>> v2
+
+ >>> v1 < v2
+ True
+ >>> v1 == v2
+ False
+ >>> v1 > v2
+ False
+ >>> v1 >= v2
+ False
+ >>> v1 <= v2
+ True
+ """
+
+ __slots__ = ("_dev", "_epoch", "_key_cache", "_local", "_post", "_pre", "_release")
+ __match_args__ = ("_str",)
+
+ _regex = re.compile(r"\s*" + VERSION_PATTERN + r"\s*", re.VERBOSE | re.IGNORECASE)
+
+ _epoch: int
+ _release: tuple[int, ...]
+ _dev: tuple[str, int] | None
+ _pre: tuple[str, int] | None
+ _post: tuple[str, int] | None
+ _local: LocalType | None
+
+ _key_cache: CmpKey | None
+
+ def __init__(self, version: str) -> None:
+ """Initialize a Version object.
+
+ :param version:
+ The string representation of a version which will be parsed and normalized
+ before use.
+ :raises InvalidVersion:
+ If the ``version`` does not conform to PEP 440 in any way then this
+ exception will be raised.
+ """
+ # Validate the version and parse it into pieces
+ match = self._regex.fullmatch(version)
+ if not match:
+ raise InvalidVersion(f"Invalid version: {version!r}")
+ self._epoch = int(match.group("epoch")) if match.group("epoch") else 0
+ self._release = tuple(map(int, match.group("release").split(".")))
+ self._pre = _parse_letter_version(match.group("pre_l"), match.group("pre_n"))
+ self._post = _parse_letter_version(
+ match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+ )
+ self._dev = _parse_letter_version(match.group("dev_l"), match.group("dev_n"))
+ self._local = _parse_local_version(match.group("local"))
+
+ # Key which will be used for sorting
+ self._key_cache = None
+
+ def __replace__(self, **kwargs: Unpack[_VersionReplace]) -> Self:
+ epoch = _validate_epoch(kwargs["epoch"]) if "epoch" in kwargs else self._epoch
+ release = (
+ _validate_release(kwargs["release"])
+ if "release" in kwargs
+ else self._release
+ )
+ pre = _validate_pre(kwargs["pre"]) if "pre" in kwargs else self._pre
+ post = _validate_post(kwargs["post"]) if "post" in kwargs else self._post
+ dev = _validate_dev(kwargs["dev"]) if "dev" in kwargs else self._dev
+ local = _validate_local(kwargs["local"]) if "local" in kwargs else self._local
+
+ if (
+ epoch == self._epoch
+ and release == self._release
+ and pre == self._pre
+ and post == self._post
+ and dev == self._dev
+ and local == self._local
+ ):
+ return self
+
+ new_version = self.__class__.__new__(self.__class__)
+ new_version._key_cache = None
+ new_version._epoch = epoch
+ new_version._release = release
+ new_version._pre = pre
+ new_version._post = post
+ new_version._dev = dev
+ new_version._local = local
+
+ return new_version
+
+ @property
+ def _key(self) -> CmpKey:
+ if self._key_cache is None:
+ self._key_cache = _cmpkey(
+ self._epoch,
+ self._release,
+ self._pre,
+ self._post,
+ self._dev,
+ self._local,
+ )
+ return self._key_cache
+
+ @property
+ @_deprecated("Version._version is private and will be removed soon")
+ def _version(self) -> _Version:
+ return _Version(
+ self._epoch, self._release, self._dev, self._pre, self._post, self._local
+ )
+
+ @_version.setter
+ @_deprecated("Version._version is private and will be removed soon")
+ def _version(self, value: _Version) -> None:
+ self._epoch = value.epoch
+ self._release = value.release
+ self._dev = value.dev
+ self._pre = value.pre
+ self._post = value.post
+ self._local = value.local
+ self._key_cache = None
+
+ def __repr__(self) -> str:
+ """A representation of the Version that shows all internal state.
+
+ >>> Version('1.0.0')
+
+ """
+ return f""
+
+ def __str__(self) -> str:
+ """A string representation of the version that can be round-tripped.
+
+ >>> str(Version("1.0a5"))
+ '1.0a5'
+ """
+ # This is a hot function, so not calling self.base_version
+ version = ".".join(map(str, self.release))
+
+ # Epoch
+ if self.epoch:
+ version = f"{self.epoch}!{version}"
+
+ # Pre-release
+ if self.pre is not None:
+ version += "".join(map(str, self.pre))
+
+ # Post-release
+ if self.post is not None:
+ version += f".post{self.post}"
+
+ # Development release
+ if self.dev is not None:
+ version += f".dev{self.dev}"
+
+ # Local version segment
+ if self.local is not None:
+ version += f"+{self.local}"
+
+ return version
+
+ @property
+ def _str(self) -> str:
+ """Internal property for match_args"""
+ return str(self)
+
+ @property
+ def epoch(self) -> int:
+ """The epoch of the version.
+
+ >>> Version("2.0.0").epoch
+ 0
+ >>> Version("1!2.0.0").epoch
+ 1
+ """
+ return self._epoch
+
+ @property
+ def release(self) -> tuple[int, ...]:
+ """The components of the "release" segment of the version.
+
+ >>> Version("1.2.3").release
+ (1, 2, 3)
+ >>> Version("2.0.0").release
+ (2, 0, 0)
+ >>> Version("1!2.0.0.post0").release
+ (2, 0, 0)
+
+ Includes trailing zeroes but not the epoch or any pre-release / development /
+ post-release suffixes.
+ """
+ return self._release
+
+ @property
+ def pre(self) -> tuple[str, int] | None:
+ """The pre-release segment of the version.
+
+ >>> print(Version("1.2.3").pre)
+ None
+ >>> Version("1.2.3a1").pre
+ ('a', 1)
+ >>> Version("1.2.3b1").pre
+ ('b', 1)
+ >>> Version("1.2.3rc1").pre
+ ('rc', 1)
+ """
+ return self._pre
+
+ @property
+ def post(self) -> int | None:
+ """The post-release number of the version.
+
+ >>> print(Version("1.2.3").post)
+ None
+ >>> Version("1.2.3.post1").post
+ 1
+ """
+ return self._post[1] if self._post else None
+
+ @property
+ def dev(self) -> int | None:
+ """The development number of the version.
+
+ >>> print(Version("1.2.3").dev)
+ None
+ >>> Version("1.2.3.dev1").dev
+ 1
+ """
+ return self._dev[1] if self._dev else None
+
+ @property
+ def local(self) -> str | None:
+ """The local version segment of the version.
+
+ >>> print(Version("1.2.3").local)
+ None
+ >>> Version("1.2.3+abc").local
+ 'abc'
+ """
+ if self._local:
+ return ".".join(str(x) for x in self._local)
+ else:
+ return None
+
+ @property
+ def public(self) -> str:
+ """The public portion of the version.
+
+ >>> Version("1.2.3").public
+ '1.2.3'
+ >>> Version("1.2.3+abc").public
+ '1.2.3'
+ >>> Version("1!1.2.3dev1+abc").public
+ '1!1.2.3.dev1'
+ """
+ return str(self).split("+", 1)[0]
+
+ @property
+ def base_version(self) -> str:
+ """The "base version" of the version.
+
+ >>> Version("1.2.3").base_version
+ '1.2.3'
+ >>> Version("1.2.3+abc").base_version
+ '1.2.3'
+ >>> Version("1!1.2.3dev1+abc").base_version
+ '1!1.2.3'
+
+ The "base version" is the public version of the project without any pre or post
+ release markers.
+ """
+ release_segment = ".".join(map(str, self.release))
+ return f"{self.epoch}!{release_segment}" if self.epoch else release_segment
+
+ @property
+ def is_prerelease(self) -> bool:
+ """Whether this version is a pre-release.
+
+ >>> Version("1.2.3").is_prerelease
+ False
+ >>> Version("1.2.3a1").is_prerelease
+ True
+ >>> Version("1.2.3b1").is_prerelease
+ True
+ >>> Version("1.2.3rc1").is_prerelease
+ True
+ >>> Version("1.2.3dev1").is_prerelease
+ True
+ """
+ return self.dev is not None or self.pre is not None
+
+ @property
+ def is_postrelease(self) -> bool:
+ """Whether this version is a post-release.
+
+ >>> Version("1.2.3").is_postrelease
+ False
+ >>> Version("1.2.3.post1").is_postrelease
+ True
+ """
+ return self.post is not None
+
+ @property
+ def is_devrelease(self) -> bool:
+ """Whether this version is a development release.
+
+ >>> Version("1.2.3").is_devrelease
+ False
+ >>> Version("1.2.3.dev1").is_devrelease
+ True
+ """
+ return self.dev is not None
+
+ @property
+ def major(self) -> int:
+ """The first item of :attr:`release` or ``0`` if unavailable.
+
+ >>> Version("1.2.3").major
+ 1
+ """
+ return self.release[0] if len(self.release) >= 1 else 0
+
+ @property
+ def minor(self) -> int:
+ """The second item of :attr:`release` or ``0`` if unavailable.
+
+ >>> Version("1.2.3").minor
+ 2
+ >>> Version("1").minor
+ 0
+ """
+ return self.release[1] if len(self.release) >= 2 else 0
+
+ @property
+ def micro(self) -> int:
+ """The third item of :attr:`release` or ``0`` if unavailable.
+
+ >>> Version("1.2.3").micro
+ 3
+ >>> Version("1").micro
+ 0
+ """
+ return self.release[2] if len(self.release) >= 3 else 0
+
+
+class _TrimmedRelease(Version):
+ __slots__ = ()
+
+ def __init__(self, version: str | Version) -> None:
+ if isinstance(version, Version):
+ self._epoch = version._epoch
+ self._release = version._release
+ self._dev = version._dev
+ self._pre = version._pre
+ self._post = version._post
+ self._local = version._local
+ self._key_cache = version._key_cache
+ return
+ super().__init__(version) # pragma: no cover
+
+ @property
+ def release(self) -> tuple[int, ...]:
+ """
+ Release segment without any trailing zeros.
+
+ >>> _TrimmedRelease('1.0.0').release
+ (1,)
+ >>> _TrimmedRelease('0.0').release
+ (0,)
+ """
+ # This leaves one 0.
+ rel = super().release
+ len_release = len(rel)
+ i = len_release
+ while i > 1 and rel[i - 1] == 0:
+ i -= 1
+ return rel if i == len_release else rel[:i]
+
+
+def _parse_letter_version(
+ letter: str | None, number: str | bytes | SupportsInt | None
+) -> tuple[str, int] | None:
+ if letter:
+ # We normalize any letters to their lower case form
+ letter = letter.lower()
+
+ # We consider some words to be alternate spellings of other words and
+ # in those cases we want to normalize the spellings to our preferred
+ # spelling.
+ letter = _LETTER_NORMALIZATION.get(letter, letter)
+
+ # We consider there to be an implicit 0 in a pre-release if there is
+ # not a numeral associated with it.
+ return letter, int(number or 0)
+
+ if number:
+ # We assume if we are given a number, but we are not given a letter
+ # then this is using the implicit post release syntax (e.g. 1.0-1)
+ return "post", int(number)
+
+ return None
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local: str | None) -> LocalType | None:
+ """
+ Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+ """
+ if local is not None:
+ return tuple(
+ part.lower() if not part.isdigit() else int(part)
+ for part in _local_version_separators.split(local)
+ )
+ return None
+
+
+def _cmpkey(
+ epoch: int,
+ release: tuple[int, ...],
+ pre: tuple[str, int] | None,
+ post: tuple[str, int] | None,
+ dev: tuple[str, int] | None,
+ local: LocalType | None,
+) -> CmpKey:
+ # When we compare a release version, we want to compare it with all of the
+ # trailing zeros removed. We will use this for our sorting key.
+ len_release = len(release)
+ i = len_release
+ while i and release[i - 1] == 0:
+ i -= 1
+ _release = release if i == len_release else release[:i]
+
+ # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+ # We'll do this by abusing the pre segment, but we _only_ want to do this
+ # if there is not a pre or a post segment. If we have one of those then
+ # the normal sorting rules will handle this case correctly.
+ if pre is None and post is None and dev is not None:
+ _pre: CmpPrePostDevType = NegativeInfinity
+ # Versions without a pre-release (except as noted above) should sort after
+ # those with one.
+ elif pre is None:
+ _pre = Infinity
+ else:
+ _pre = pre
+
+ # Versions without a post segment should sort before those with one.
+ if post is None:
+ _post: CmpPrePostDevType = NegativeInfinity
+
+ else:
+ _post = post
+
+ # Versions without a development segment should sort after those with one.
+ if dev is None:
+ _dev: CmpPrePostDevType = Infinity
+
+ else:
+ _dev = dev
+
+ if local is None:
+ # Versions without a local segment should sort before those with one.
+ _local: CmpLocalType = NegativeInfinity
+ else:
+ # Versions with a local segment need that segment parsed to implement
+ # the sorting rules in PEP440.
+ # - Alpha numeric segments sort before numeric segments
+ # - Alpha numeric segments sort lexicographically
+ # - Numeric segments sort numerically
+ # - Shorter versions sort before longer versions when the prefixes
+ # match exactly
+ _local = tuple(
+ (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
+ )
+
+ return epoch, _release, _pre, _post, _dev, _local
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/METADATA b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..8fc14ea2b68c0df97b52da5802a44e0083973d26
--- /dev/null
+++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/METADATA
@@ -0,0 +1,412 @@
+Metadata-Version: 2.4
+Name: typer
+Version: 0.24.1
+Summary: Typer, build great CLIs. Easy to code. Based on Python type hints.
+Author-Email: =?utf-8?q?Sebasti=C3=A1n_Ram=C3=ADrez?=
+License-Expression: MIT
+License-File: LICENSE
+Classifier: Intended Audience :: Information Technology
+Classifier: Intended Audience :: System Administrators
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python
+Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Software Development
+Classifier: Typing :: Typed
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
+Project-URL: Homepage, https://github.com/fastapi/typer
+Project-URL: Documentation, https://typer.tiangolo.com
+Project-URL: Repository, https://github.com/fastapi/typer
+Project-URL: Issues, https://github.com/fastapi/typer/issues
+Project-URL: Changelog, https://typer.tiangolo.com/release-notes/
+Requires-Python: >=3.10
+Requires-Dist: click>=8.2.1
+Requires-Dist: shellingham>=1.3.0
+Requires-Dist: rich>=12.3.0
+Requires-Dist: annotated-doc>=0.0.2
+Description-Content-Type: text/markdown
+
+
+
+
+
+
+ Typer, build great CLIs. Easy to code. Based on Python type hints.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+---
+
+**Documentation**: https://typer.tiangolo.com
+
+**Source Code**: https://github.com/fastapi/typer
+
+---
+
+Typer is a library for building CLI applications that users will **love using** and developers will **love creating**. Based on Python type hints.
+
+It's also a command line tool to run scripts, automatically converting them to CLI applications.
+
+The key features are:
+
+* **Intuitive to write**: Great editor support. Completion everywhere. Less time debugging. Designed to be easy to use and learn. Less time reading docs.
+* **Easy to use**: It's easy to use for the final users. Automatic help, and automatic completion for all shells.
+* **Short**: Minimize code duplication. Multiple features from each parameter declaration. Fewer bugs.
+* **Start simple**: The simplest example adds only 2 lines of code to your app: **1 import, 1 function call**.
+* **Grow large**: Grow in complexity as much as you want, create arbitrarily complex trees of commands and groups of subcommands, with options and arguments.
+* **Run scripts**: Typer includes a `typer` command/program that you can use to run scripts, automatically converting them to CLIs, even if they don't use Typer internally.
+
+## 2026 February - Typer developer survey
+
+Help us define Typer's future by filling the Typer developer survey . ✨
+
+## FastAPI of CLIs
+
+**Typer** is FastAPI 's little sibling, it's the FastAPI of CLIs.
+
+## Installation
+
+Create and activate a virtual environment and then install **Typer**:
+
+
+
+```console
+$ pip install typer
+---> 100%
+Successfully installed typer rich shellingham
+```
+
+
+
+## Example
+
+### The absolute minimum
+
+* Create a file `main.py` with:
+
+```Python
+def main(name: str):
+ print(f"Hello {name}")
+```
+
+This script doesn't even use Typer internally. But you can use the `typer` command to run it as a CLI application.
+
+### Run it
+
+Run your application with the `typer` command:
+
+
+
+```console
+// Run your application
+$ typer main.py run
+
+// You get a nice error, you are missing NAME
+Usage: typer [PATH_OR_MODULE] run [OPTIONS] NAME
+Try 'typer [PATH_OR_MODULE] run --help' for help.
+╭─ Error ───────────────────────────────────────────╮
+│ Missing argument 'NAME'. │
+╰───────────────────────────────────────────────────╯
+
+
+// You get a --help for free
+$ typer main.py run --help
+
+Usage: typer [PATH_OR_MODULE] run [OPTIONS] NAME
+
+Run the provided Typer app.
+
+╭─ Arguments ───────────────────────────────────────╮
+│ * name TEXT [default: None] [required] |
+╰───────────────────────────────────────────────────╯
+╭─ Options ─────────────────────────────────────────╮
+│ --help Show this message and exit. │
+╰───────────────────────────────────────────────────╯
+
+// Now pass the NAME argument
+$ typer main.py run Camila
+
+Hello Camila
+
+// It works! 🎉
+```
+
+
+
+This is the simplest use case, not even using Typer internally, but it can already be quite useful for simple scripts.
+
+**Note**: auto-completion works when you create a Python package and run it with `--install-completion` or when you use the `typer` command.
+
+## Use Typer in your code
+
+Now let's start using Typer in your own code, update `main.py` with:
+
+```Python
+import typer
+
+
+def main(name: str):
+ print(f"Hello {name}")
+
+
+if __name__ == "__main__":
+ typer.run(main)
+```
+
+Now you could run it with Python directly:
+
+
+
+```console
+// Run your application
+$ python main.py
+
+// You get a nice error, you are missing NAME
+Usage: main.py [OPTIONS] NAME
+Try 'main.py --help' for help.
+╭─ Error ───────────────────────────────────────────╮
+│ Missing argument 'NAME'. │
+╰───────────────────────────────────────────────────╯
+
+
+// You get a --help for free
+$ python main.py --help
+
+Usage: main.py [OPTIONS] NAME
+
+╭─ Arguments ───────────────────────────────────────╮
+│ * name TEXT [default: None] [required] |
+╰───────────────────────────────────────────────────╯
+╭─ Options ─────────────────────────────────────────╮
+│ --help Show this message and exit. │
+╰───────────────────────────────────────────────────╯
+
+// Now pass the NAME argument
+$ python main.py Camila
+
+Hello Camila
+
+// It works! 🎉
+```
+
+
+
+**Note**: you can also call this same script with the `typer` command, but you don't need to.
+
+## Example upgrade
+
+This was the simplest example possible.
+
+Now let's see one a bit more complex.
+
+### An example with two subcommands
+
+Modify the file `main.py`.
+
+Create a `typer.Typer()` app, and create two subcommands with their parameters.
+
+```Python hl_lines="3 6 11 20"
+import typer
+
+app = typer.Typer()
+
+
+@app.command()
+def hello(name: str):
+ print(f"Hello {name}")
+
+
+@app.command()
+def goodbye(name: str, formal: bool = False):
+ if formal:
+ print(f"Goodbye Ms. {name}. Have a good day.")
+ else:
+ print(f"Bye {name}!")
+
+
+if __name__ == "__main__":
+ app()
+```
+
+And that will:
+
+* Explicitly create a `typer.Typer` app.
+ * The previous `typer.run` actually creates one implicitly for you.
+* Add two subcommands with `@app.command()`.
+* Execute the `app()` itself, as if it was a function (instead of `typer.run`).
+
+### Run the upgraded example
+
+Check the new help:
+
+
+
+```console
+$ python main.py --help
+
+ Usage: main.py [OPTIONS] COMMAND [ARGS]...
+
+╭─ Options ─────────────────────────────────────────╮
+│ --install-completion Install completion │
+│ for the current │
+│ shell. │
+│ --show-completion Show completion for │
+│ the current shell, │
+│ to copy it or │
+│ customize the │
+│ installation. │
+│ --help Show this message │
+│ and exit. │
+╰───────────────────────────────────────────────────╯
+╭─ Commands ────────────────────────────────────────╮
+│ goodbye │
+│ hello │
+╰───────────────────────────────────────────────────╯
+
+// When you create a package you get ✨ auto-completion ✨ for free, installed with --install-completion
+
+// You have 2 subcommands (the 2 functions): goodbye and hello
+```
+
+
+
+Now check the help for the `hello` command:
+
+
+
+```console
+$ python main.py hello --help
+
+ Usage: main.py hello [OPTIONS] NAME
+
+╭─ Arguments ───────────────────────────────────────╮
+│ * name TEXT [default: None] [required] │
+╰───────────────────────────────────────────────────╯
+╭─ Options ─────────────────────────────────────────╮
+│ --help Show this message and exit. │
+╰───────────────────────────────────────────────────╯
+```
+
+
+
+And now check the help for the `goodbye` command:
+
+
+
+```console
+$ python main.py goodbye --help
+
+ Usage: main.py goodbye [OPTIONS] NAME
+
+╭─ Arguments ───────────────────────────────────────╮
+│ * name TEXT [default: None] [required] │
+╰───────────────────────────────────────────────────╯
+╭─ Options ─────────────────────────────────────────╮
+│ --formal --no-formal [default: no-formal] │
+│ --help Show this message │
+│ and exit. │
+╰───────────────────────────────────────────────────╯
+
+// Automatic --formal and --no-formal for the bool option 🎉
+```
+
+
+
+Now you can try out the new command line application:
+
+
+
+```console
+// Use it with the hello command
+
+$ python main.py hello Camila
+
+Hello Camila
+
+// And with the goodbye command
+
+$ python main.py goodbye Camila
+
+Bye Camila!
+
+// And with --formal
+
+$ python main.py goodbye --formal Camila
+
+Goodbye Ms. Camila. Have a good day.
+```
+
+
+
+**Note**: If your app only has one command, by default the command name is **omitted** in usage: `python main.py Camila`. However, when there are multiple commands, you must **explicitly include the command name**: `python main.py hello Camila`. See [One or Multiple Commands](https://typer.tiangolo.com/tutorial/commands/one-or-multiple/) for more details.
+
+### Recap
+
+In summary, you declare **once** the types of parameters (*CLI arguments* and *CLI options*) as function parameters.
+
+You do that with standard modern Python types.
+
+You don't have to learn a new syntax, the methods or classes of a specific library, etc.
+
+Just standard **Python**.
+
+For example, for an `int`:
+
+```Python
+total: int
+```
+
+or for a `bool` flag:
+
+```Python
+force: bool
+```
+
+And similarly for **files**, **paths**, **enums** (choices), etc. And there are tools to create **groups of subcommands**, add metadata, extra **validation**, etc.
+
+**You get**: great editor support, including **completion** and **type checks** everywhere.
+
+**Your users get**: automatic **`--help`**, **auto-completion** in their terminal (Bash, Zsh, Fish, PowerShell) when they install your package or when using the `typer` command.
+
+For a more complete example including more features, see the Tutorial - User Guide .
+
+## Dependencies
+
+**Typer** stands on the shoulders of giants. It has three required dependencies:
+
+* Click : a popular tool for building CLIs in Python. Typer is based on it.
+* rich : to show nicely formatted errors automatically.
+* shellingham : to automatically detect the current shell when installing completion.
+
+### `typer-slim`
+
+There used to be a slimmed-down version of Typer called `typer-slim`, which didn't include the dependencies `rich` and `shellingham`, nor the `typer` command.
+
+However, since version 0.22.0, we have stopped supporting this, and `typer-slim` now simply installs (all of) Typer.
+
+If you want to disable Rich globally, you can set an environmental variable `TYPER_USE_RICH` to `False` or `0`.
+
+## License
+
+This project is licensed under the terms of the MIT license.
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/RECORD b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..52219d571f39b0935f47864ba03515bb96fb2f46
--- /dev/null
+++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/RECORD
@@ -0,0 +1,22 @@
+typer-0.24.1.dist-info/METADATA,sha256=V4OWoWjBhPNcoIaOxhr1cszo69nePKOHMRXERkMscKs,16057
+typer-0.24.1.dist-info/WHEEL,sha256=Wb0ASbVj8JvWHpOiIpPi7ucfIgJeCi__PzivviEAQFc,90
+typer-0.24.1.dist-info/entry_points.txt,sha256=YO13ByiqWeuas9V0JADLUARZFUe_cwU_7wmTNvxBYQ8,57
+typer-0.24.1.dist-info/licenses/LICENSE,sha256=WJks68-N-25AxOIRLtEhJsJDZm3KORKj14t-ysSFnUk,1086
+typer/__init__.py,sha256=WOelHJu4PW0hk9nfjEX0Qxssb58NCh1km_Xq5LY_33s,1596
+typer/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30
+typer/_completion_classes.py,sha256=R9v4D8pJ_-n8fLOuyxrRSu7sP5lpXIy5fsLUW8zwsDU,7039
+typer/_completion_shared.py,sha256=-uhCUIMc2S1ywdB-fBSSccH70mIBEsVTxHomcmy-klE,9129
+typer/_types.py,sha256=0lcBDLcsxqr1sxTsqObj_u0Dfa37lWJYUY4PNkX4QlA,974
+typer/_typing.py,sha256=QOw5o-B2L--C3ly2DQH6aUwag6x5brV5FhVaBZ5gzMg,1727
+typer/cli.py,sha256=icRbazvdRdbYeaidPZOmJDOzrP3RAa7vj2INVV9Zb8Q,10183
+typer/colors.py,sha256=e42j8uB520hLpX5C_0fiR3OOoIFMbhO3ADZvv6hlAV8,430
+typer/completion.py,sha256=FRTR9hP_IPdJp-4GXPOq0btXo5SvgAtLVfS3ZkAMpgQ,4793
+typer/core.py,sha256=O5NywSwHPyYbLhZkPYSfwIj7Za2hPnoPP4xPXRa97a0,27947
+typer/main.py,sha256=xyNex-QfGUi-enu9j9rl-_wofApxs5VwdpCthAUAAkk,69005
+typer/models.py,sha256=OwPG3MAXiUD5ih3p8eNVciXUsL07UIJfNWy3JiNpDfg,19843
+typer/params.py,sha256=AovViRtl-VvUIXnmKKpnxoWK9_gHUbyQgXxxv3h_7lI,59713
+typer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+typer/rich_utils.py,sha256=RTyeoxwz16ZZXYbwoEixB_LSEnqpoStG_TGCRTz6zFQ,25424
+typer/testing.py,sha256=-ovLNjUNNEFCJoau-41iTJIobsjPbqyTrRq7-8ac4z4,871
+typer/utils.py,sha256=wnJ1DWXBFMnxLHaMN_HDYntxLRby0K-rux63aokHInI,7599
+typer-0.24.1.dist-info/RECORD,,
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/WHEEL b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..18c430c1f6411c0532096b13de4384ab50a79abd
--- /dev/null
+++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: pdm-backend (2.4.7)
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/entry_points.txt b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/entry_points.txt
new file mode 100644
index 0000000000000000000000000000000000000000..ca44c05a071b87fe02a3724bd895838305fe4e5d
--- /dev/null
+++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/entry_points.txt
@@ -0,0 +1,5 @@
+[console_scripts]
+typer = typer.cli:main
+
+[gui_scripts]
+
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/licenses/LICENSE b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/licenses/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..a7694736cf37716aafec14b24aa8d6316ebe07a3
--- /dev/null
+++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/licenses/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2019 Sebastián Ramírez
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/__init__.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..cb0921c06c9c0eddcb32ef016236ea98c2a42375
--- /dev/null
+++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/__init__.py
@@ -0,0 +1,39 @@
+"""Typer, build great CLIs. Easy to code. Based on Python type hints."""
+
+__version__ = "0.24.1"
+
+from shutil import get_terminal_size as get_terminal_size
+
+from click.exceptions import Abort as Abort
+from click.exceptions import BadParameter as BadParameter
+from click.exceptions import Exit as Exit
+from click.termui import clear as clear
+from click.termui import confirm as confirm
+from click.termui import echo_via_pager as echo_via_pager
+from click.termui import edit as edit
+from click.termui import getchar as getchar
+from click.termui import pause as pause
+from click.termui import progressbar as progressbar
+from click.termui import prompt as prompt
+from click.termui import secho as secho
+from click.termui import style as style
+from click.termui import unstyle as unstyle
+from click.utils import echo as echo
+from click.utils import format_filename as format_filename
+from click.utils import get_app_dir as get_app_dir
+from click.utils import get_binary_stream as get_binary_stream
+from click.utils import get_text_stream as get_text_stream
+from click.utils import open_file as open_file
+
+from . import colors as colors
+from .main import Typer as Typer
+from .main import launch as launch
+from .main import run as run
+from .models import CallbackParam as CallbackParam
+from .models import Context as Context
+from .models import FileBinaryRead as FileBinaryRead
+from .models import FileBinaryWrite as FileBinaryWrite
+from .models import FileText as FileText
+from .models import FileTextWrite as FileTextWrite
+from .params import Argument as Argument
+from .params import Option as Option
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/__main__.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/__main__.py
new file mode 100644
index 0000000000000000000000000000000000000000..4e28416e104515e90fca4b69cc60d0c61fd15d61
--- /dev/null
+++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/__main__.py
@@ -0,0 +1,3 @@
+from .cli import main
+
+main()
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_completion_classes.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_completion_classes.py
new file mode 100644
index 0000000000000000000000000000000000000000..8548fb4d6a3e2e274811b0935276e1c0e6f2a9a0
--- /dev/null
+++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_completion_classes.py
@@ -0,0 +1,199 @@
+import importlib.util
+import os
+import re
+import sys
+from typing import Any
+
+import click
+import click.parser
+import click.shell_completion
+from click.shell_completion import split_arg_string as click_split_arg_string
+
+from ._completion_shared import (
+ COMPLETION_SCRIPT_BASH,
+ COMPLETION_SCRIPT_FISH,
+ COMPLETION_SCRIPT_POWER_SHELL,
+ COMPLETION_SCRIPT_ZSH,
+ Shells,
+)
+
+
+def _sanitize_help_text(text: str) -> str:
+ """Sanitizes the help text by removing rich tags"""
+ if not importlib.util.find_spec("rich"):
+ return text
+ from . import rich_utils
+
+ return rich_utils.rich_render_text(text)
+
+
+class BashComplete(click.shell_completion.BashComplete):
+ name = Shells.bash.value
+ source_template = COMPLETION_SCRIPT_BASH
+
+ def source_vars(self) -> dict[str, Any]:
+ return {
+ "complete_func": self.func_name,
+ "autocomplete_var": self.complete_var,
+ "prog_name": self.prog_name,
+ }
+
+ def get_completion_args(self) -> tuple[list[str], str]:
+ cwords = click_split_arg_string(os.environ["COMP_WORDS"])
+ cword = int(os.environ["COMP_CWORD"])
+ args = cwords[1:cword]
+
+ try:
+ incomplete = cwords[cword]
+ except IndexError:
+ incomplete = ""
+
+ return args, incomplete
+
+ def format_completion(self, item: click.shell_completion.CompletionItem) -> str:
+ # TODO: Explore replicating the new behavior from Click, with item types and
+ # triggering completion for files and directories
+ # return f"{item.type},{item.value}"
+ return f"{item.value}"
+
+ def complete(self) -> str:
+ args, incomplete = self.get_completion_args()
+ completions = self.get_completions(args, incomplete)
+ out = [self.format_completion(item) for item in completions]
+ return "\n".join(out)
+
+
+class ZshComplete(click.shell_completion.ZshComplete):
+ name = Shells.zsh.value
+ source_template = COMPLETION_SCRIPT_ZSH
+
+ def source_vars(self) -> dict[str, Any]:
+ return {
+ "complete_func": self.func_name,
+ "autocomplete_var": self.complete_var,
+ "prog_name": self.prog_name,
+ }
+
+ def get_completion_args(self) -> tuple[list[str], str]:
+ completion_args = os.getenv("_TYPER_COMPLETE_ARGS", "")
+ cwords = click_split_arg_string(completion_args)
+ args = cwords[1:]
+ if args and not completion_args.endswith(" "):
+ incomplete = args[-1]
+ args = args[:-1]
+ else:
+ incomplete = ""
+ return args, incomplete
+
+ def format_completion(self, item: click.shell_completion.CompletionItem) -> str:
+ def escape(s: str) -> str:
+ return (
+ s.replace('"', '""')
+ .replace("'", "''")
+ .replace("$", "\\$")
+ .replace("`", "\\`")
+ .replace(":", r"\\:")
+ )
+
+ # TODO: Explore replicating the new behavior from Click, pay attention to
+ # the difference with and without escape
+ # return f"{item.type}\n{item.value}\n{item.help if item.help else '_'}"
+ if item.help:
+ return f'"{escape(item.value)}":"{_sanitize_help_text(escape(item.help))}"'
+ else:
+ return f'"{escape(item.value)}"'
+
+ def complete(self) -> str:
+ args, incomplete = self.get_completion_args()
+ completions = self.get_completions(args, incomplete)
+ res = [self.format_completion(item) for item in completions]
+ if res:
+ args_str = "\n".join(res)
+ return f"_arguments '*: :(({args_str}))'"
+ else:
+ return "_files"
+
+
+class FishComplete(click.shell_completion.FishComplete):
+ name = Shells.fish.value
+ source_template = COMPLETION_SCRIPT_FISH
+
+ def source_vars(self) -> dict[str, Any]:
+ return {
+ "complete_func": self.func_name,
+ "autocomplete_var": self.complete_var,
+ "prog_name": self.prog_name,
+ }
+
+ def get_completion_args(self) -> tuple[list[str], str]:
+ completion_args = os.getenv("_TYPER_COMPLETE_ARGS", "")
+ cwords = click_split_arg_string(completion_args)
+ args = cwords[1:]
+ if args and not completion_args.endswith(" "):
+ incomplete = args[-1]
+ args = args[:-1]
+ else:
+ incomplete = ""
+ return args, incomplete
+
+ def format_completion(self, item: click.shell_completion.CompletionItem) -> str:
+ # TODO: Explore replicating the new behavior from Click, pay attention to
+ # the difference with and without formatted help
+ # if item.help:
+ # return f"{item.type},{item.value}\t{item.help}"
+
+ # return f"{item.type},{item.value}
+ if item.help:
+ formatted_help = re.sub(r"\s", " ", item.help)
+ return f"{item.value}\t{_sanitize_help_text(formatted_help)}"
+ else:
+ return f"{item.value}"
+
+ def complete(self) -> str:
+ complete_action = os.getenv("_TYPER_COMPLETE_FISH_ACTION", "")
+ args, incomplete = self.get_completion_args()
+ completions = self.get_completions(args, incomplete)
+ show_args = [self.format_completion(item) for item in completions]
+ if complete_action == "get-args":
+ if show_args:
+ return "\n".join(show_args)
+ elif complete_action == "is-args":
+ if show_args:
+ # Activate complete args (no files)
+ sys.exit(0)
+ else:
+ # Deactivate complete args (allow files)
+ sys.exit(1)
+ return "" # pragma: no cover
+
+
+class PowerShellComplete(click.shell_completion.ShellComplete):
+ name = Shells.powershell.value
+ source_template = COMPLETION_SCRIPT_POWER_SHELL
+
+ def source_vars(self) -> dict[str, Any]:
+ return {
+ "complete_func": self.func_name,
+ "autocomplete_var": self.complete_var,
+ "prog_name": self.prog_name,
+ }
+
+ def get_completion_args(self) -> tuple[list[str], str]:
+ completion_args = os.getenv("_TYPER_COMPLETE_ARGS", "")
+ incomplete = os.getenv("_TYPER_COMPLETE_WORD_TO_COMPLETE", "")
+ cwords = click_split_arg_string(completion_args)
+ args = cwords[1:-1] if incomplete else cwords[1:]
+ return args, incomplete
+
+ def format_completion(self, item: click.shell_completion.CompletionItem) -> str:
+ return f"{item.value}:::{_sanitize_help_text(item.help) if item.help else ' '}"
+
+
+def completion_init() -> None:
+ click.shell_completion.add_completion_class(BashComplete, Shells.bash.value)
+ click.shell_completion.add_completion_class(ZshComplete, Shells.zsh.value)
+ click.shell_completion.add_completion_class(FishComplete, Shells.fish.value)
+ click.shell_completion.add_completion_class(
+ PowerShellComplete, Shells.powershell.value
+ )
+ click.shell_completion.add_completion_class(PowerShellComplete, Shells.pwsh.value)
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_completion_shared.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_completion_shared.py
new file mode 100644
index 0000000000000000000000000000000000000000..5a81dcf68cddda410becd393e5cf0d7e9e83042e
--- /dev/null
+++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_completion_shared.py
@@ -0,0 +1,252 @@
+import os
+import re
+import subprocess
+from enum import Enum
+from pathlib import Path
+
+import click
+import shellingham
+
+
+class Shells(str, Enum):
+ bash = "bash"
+ zsh = "zsh"
+ fish = "fish"
+ powershell = "powershell"
+ pwsh = "pwsh"
+
+
+COMPLETION_SCRIPT_BASH = """
+%(complete_func)s() {
+ local IFS=$'\n'
+ COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \\
+ COMP_CWORD=$COMP_CWORD \\
+ %(autocomplete_var)s=complete_bash $1 ) )
+ return 0
+}
+
+complete -o default -F %(complete_func)s %(prog_name)s
+"""
+
+COMPLETION_SCRIPT_ZSH = """
+#compdef %(prog_name)s
+
+%(complete_func)s() {
+ eval $(env _TYPER_COMPLETE_ARGS="${words[1,$CURRENT]}" %(autocomplete_var)s=complete_zsh %(prog_name)s)
+}
+
+compdef %(complete_func)s %(prog_name)s
+"""
+
+COMPLETION_SCRIPT_FISH = 'complete --command %(prog_name)s --no-files --arguments "(env %(autocomplete_var)s=complete_fish _TYPER_COMPLETE_FISH_ACTION=get-args _TYPER_COMPLETE_ARGS=(commandline -cp) %(prog_name)s)" --condition "env %(autocomplete_var)s=complete_fish _TYPER_COMPLETE_FISH_ACTION=is-args _TYPER_COMPLETE_ARGS=(commandline -cp) %(prog_name)s"'
+
+COMPLETION_SCRIPT_POWER_SHELL = """
+Import-Module PSReadLine
+Set-PSReadLineKeyHandler -Chord Tab -Function MenuComplete
+$scriptblock = {
+ param($wordToComplete, $commandAst, $cursorPosition)
+ $Env:%(autocomplete_var)s = "complete_powershell"
+ $Env:_TYPER_COMPLETE_ARGS = $commandAst.ToString()
+ $Env:_TYPER_COMPLETE_WORD_TO_COMPLETE = $wordToComplete
+ %(prog_name)s | ForEach-Object {
+ $commandArray = $_ -Split ":::"
+ $command = $commandArray[0]
+ $helpString = $commandArray[1]
+ [System.Management.Automation.CompletionResult]::new(
+ $command, $command, 'ParameterValue', $helpString)
+ }
+ $Env:%(autocomplete_var)s = ""
+ $Env:_TYPER_COMPLETE_ARGS = ""
+ $Env:_TYPER_COMPLETE_WORD_TO_COMPLETE = ""
+}
+Register-ArgumentCompleter -Native -CommandName %(prog_name)s -ScriptBlock $scriptblock
+"""
+
+_completion_scripts = {
+ "bash": COMPLETION_SCRIPT_BASH,
+ "zsh": COMPLETION_SCRIPT_ZSH,
+ "fish": COMPLETION_SCRIPT_FISH,
+ "powershell": COMPLETION_SCRIPT_POWER_SHELL,
+ "pwsh": COMPLETION_SCRIPT_POWER_SHELL,
+}
+
+# TODO: Probably refactor this, copied from Click 7.x
+_invalid_ident_char_re = re.compile(r"[^a-zA-Z0-9_]")
+
+
+def get_completion_script(*, prog_name: str, complete_var: str, shell: str) -> str:
+ cf_name = _invalid_ident_char_re.sub("", prog_name.replace("-", "_"))
+ script = _completion_scripts.get(shell)
+ if script is None:
+ click.echo(f"Shell {shell} not supported.", err=True)
+ raise click.exceptions.Exit(1)
+ return (
+ script
+ % {
+ "complete_func": f"_{cf_name}_completion",
+ "prog_name": prog_name,
+ "autocomplete_var": complete_var,
+ }
+ ).strip()
+
+
+def install_bash(*, prog_name: str, complete_var: str, shell: str) -> Path:
+ # Ref: https://github.com/scop/bash-completion#faq
+ # It seems bash-completion is the official completion system for bash:
+ # Ref: https://www.gnu.org/software/bash/manual/html_node/A-Programmable-Completion-Example.html
+ # But installing in the locations from the docs doesn't seem to have effect
+ completion_path = Path.home() / ".bash_completions" / f"{prog_name}.sh"
+ rc_path = Path.home() / ".bashrc"
+ rc_path.parent.mkdir(parents=True, exist_ok=True)
+ rc_content = ""
+ if rc_path.is_file():
+ rc_content = rc_path.read_text()
+ completion_init_lines = [f"source '{completion_path}'"]
+ for line in completion_init_lines:
+ if line not in rc_content: # pragma: no cover
+ rc_content += f"\n{line}"
+ rc_content += "\n"
+ rc_path.write_text(rc_content)
+ # Install completion
+ completion_path.parent.mkdir(parents=True, exist_ok=True)
+ script_content = get_completion_script(
+ prog_name=prog_name, complete_var=complete_var, shell=shell
+ )
+ completion_path.write_text(script_content)
+ return completion_path
+
+
+def install_zsh(*, prog_name: str, complete_var: str, shell: str) -> Path:
+ # Setup Zsh and load ~/.zfunc
+ zshrc_path = Path.home() / ".zshrc"
+ zshrc_path.parent.mkdir(parents=True, exist_ok=True)
+ zshrc_content = ""
+ if zshrc_path.is_file():
+ zshrc_content = zshrc_path.read_text()
+ completion_line = "fpath+=~/.zfunc; autoload -Uz compinit; compinit"
+ if completion_line not in zshrc_content:
+ zshrc_content += f"\n{completion_line}\n"
+ style_line = "zstyle ':completion:*' menu select"
+ # TODO: consider setting the style only for the current program
+ # style_line = f"zstyle ':completion:*:*:{prog_name}:*' menu select"
+ # Install zstyle completion config only if the user doesn't have a customization
+ if "zstyle" not in zshrc_content:
+ zshrc_content += f"\n{style_line}\n"
+ zshrc_content = f"{zshrc_content.strip()}\n"
+ zshrc_path.write_text(zshrc_content)
+ # Install completion under ~/.zfunc/
+ path_obj = Path.home() / f".zfunc/_{prog_name}"
+ path_obj.parent.mkdir(parents=True, exist_ok=True)
+ script_content = get_completion_script(
+ prog_name=prog_name, complete_var=complete_var, shell=shell
+ )
+ path_obj.write_text(script_content)
+ return path_obj
+
+
+def install_fish(*, prog_name: str, complete_var: str, shell: str) -> Path:
+ path_obj = Path.home() / f".config/fish/completions/{prog_name}.fish"
+ parent_dir: Path = path_obj.parent
+ parent_dir.mkdir(parents=True, exist_ok=True)
+ script_content = get_completion_script(
+ prog_name=prog_name, complete_var=complete_var, shell=shell
+ )
+ path_obj.write_text(f"{script_content}\n")
+ return path_obj
+
+
+def install_powershell(*, prog_name: str, complete_var: str, shell: str) -> Path:
+ subprocess.run(
+ [
+ shell,
+ "-Command",
+ "Set-ExecutionPolicy",
+ "Unrestricted",
+ "-Scope",
+ "CurrentUser",
+ ]
+ )
+ result = subprocess.run(
+ [shell, "-NoProfile", "-Command", "echo", "$profile"],
+ check=True,
+ stdout=subprocess.PIPE,
+ )
+ if result.returncode != 0: # pragma: no cover
+ click.echo("Couldn't get PowerShell user profile", err=True)
+ raise click.exceptions.Exit(result.returncode)
+ path_str = ""
+ if isinstance(result.stdout, str): # pragma: no cover
+ path_str = result.stdout
+ if isinstance(result.stdout, bytes):
+ for encoding in ["windows-1252", "utf8", "cp850"]:
+ try:
+ path_str = result.stdout.decode(encoding)
+ break
+ except UnicodeDecodeError: # pragma: no cover
+ pass
+ if not path_str: # pragma: no cover
+ click.echo("Couldn't decode the path automatically", err=True)
+ raise click.exceptions.Exit(1)
+ path_obj = Path(path_str.strip())
+ parent_dir: Path = path_obj.parent
+ parent_dir.mkdir(parents=True, exist_ok=True)
+ script_content = get_completion_script(
+ prog_name=prog_name, complete_var=complete_var, shell=shell
+ )
+ with path_obj.open(mode="a") as f:
+ f.write(f"{script_content}\n")
+ return path_obj
+
+
+def install(
+ shell: str | None = None,
+ prog_name: str | None = None,
+ complete_var: str | None = None,
+) -> tuple[str, Path]:
+ prog_name = prog_name or click.get_current_context().find_root().info_name
+ assert prog_name
+ if complete_var is None:
+ complete_var = "_{}_COMPLETE".format(prog_name.replace("-", "_").upper())
+ test_disable_detection = os.getenv("_TYPER_COMPLETE_TEST_DISABLE_SHELL_DETECTION")
+ if shell is None and not test_disable_detection:
+ shell = _get_shell_name()
+ if shell == "bash":
+ installed_path = install_bash(
+ prog_name=prog_name, complete_var=complete_var, shell=shell
+ )
+ return shell, installed_path
+ elif shell == "zsh":
+ installed_path = install_zsh(
+ prog_name=prog_name, complete_var=complete_var, shell=shell
+ )
+ return shell, installed_path
+ elif shell == "fish":
+ installed_path = install_fish(
+ prog_name=prog_name, complete_var=complete_var, shell=shell
+ )
+ return shell, installed_path
+ elif shell in {"powershell", "pwsh"}:
+ installed_path = install_powershell(
+ prog_name=prog_name, complete_var=complete_var, shell=shell
+ )
+ return shell, installed_path
+ else:
+ click.echo(f"Shell {shell} is not supported.")
+ raise click.exceptions.Exit(1)
+
+
+def _get_shell_name() -> str | None:
+ """Get the current shell name, if available.
+
+ The name will always be lowercase. If the shell cannot be detected, None is
+ returned.
+ """
+ name: str | None # N.B. shellingham is untyped
+ try:
+ # N.B. detect_shell returns a tuple of (shell name, shell command).
+ # We only need the name.
+ name, _cmd = shellingham.detect_shell() # noqa: TID251
+ except shellingham.ShellDetectionFailure: # pragma: no cover
+ name = None
+
+ return name
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_types.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_types.py
new file mode 100644
index 0000000000000000000000000000000000000000..dc9fc63220d91b5c7ecc16170bb9de7afb135fb4
--- /dev/null
+++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_types.py
@@ -0,0 +1,27 @@
+from enum import Enum
+from typing import TypeVar
+
+import click
+
+ParamTypeValue = TypeVar("ParamTypeValue")
+
+
+class TyperChoice(click.Choice[ParamTypeValue]):
+ def normalize_choice(
+ self, choice: ParamTypeValue, ctx: click.Context | None
+ ) -> str:
+ # Click 8.2.0 added a new method `normalize_choice` to the `Choice` class
+ # to support enums, but it uses the enum names, while Typer has always used the
+ # enum values.
+ # This class overrides that method to maintain the previous behavior.
+ # In Click:
+ # normed_value = choice.name if isinstance(choice, Enum) else str(choice)
+ normed_value = str(choice.value) if isinstance(choice, Enum) else str(choice)
+
+ if ctx is not None and ctx.token_normalize_func is not None:
+ normed_value = ctx.token_normalize_func(normed_value)
+
+ if not self.case_sensitive:
+ normed_value = normed_value.casefold()
+
+ return normed_value
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_typing.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_typing.py
new file mode 100644
index 0000000000000000000000000000000000000000..218f674c2220589738656294fdd8ec243965376f
--- /dev/null
+++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_typing.py
@@ -0,0 +1,73 @@
+# Copied from pydantic 1.9.2 (the latest version to support python 3.6.)
+# https://github.com/pydantic/pydantic/blob/v1.9.2/pydantic/typing.py
+# Reduced drastically to only include Typer-specific 3.9+ functionality
+# mypy: ignore-errors
+
+import types
+from collections.abc import Callable
+from typing import (
+ Annotated,
+ Any,
+ Literal,
+ Union,
+ get_args,
+ get_origin,
+ get_type_hints,
+)
+
+
+def is_union(tp: type[Any] | None) -> bool:
+ return tp is Union or tp is types.UnionType # noqa: E721
+
+
+__all__ = (
+ "NoneType",
+ "is_none_type",
+ "is_callable_type",
+ "is_literal_type",
+ "all_literal_values",
+ "is_union",
+ "Annotated",
+ "Literal",
+ "get_args",
+ "get_origin",
+ "get_type_hints",
+)
+
+
+NoneType = None.__class__
+
+
+NONE_TYPES: tuple[Any, Any, Any] = (None, NoneType, Literal[None])
+
+
+def is_none_type(type_: Any) -> bool:
+ for none_type in NONE_TYPES:
+ if type_ is none_type:
+ return True
+ return False
+
+
+def is_callable_type(type_: type[Any]) -> bool:
+ return type_ is Callable or get_origin(type_) is Callable
+
+
+def is_literal_type(type_: type[Any]) -> bool:
+ return get_origin(type_) is Literal
+
+
+def literal_values(type_: type[Any]) -> tuple[Any, ...]:
+ return get_args(type_)
+
+
+def all_literal_values(type_: type[Any]) -> tuple[Any, ...]:
+ """
+ This method is used to retrieve all Literal values as
+ Literal can be used recursively (see https://www.python.org/dev/peps/pep-0586)
+ e.g. `Literal[Literal[Literal[1, 2, 3], "foo"], 5, None]`
+ """
+ if not is_literal_type(type_):
+ return (type_,)
+
+ values = literal_values(type_)
+ return tuple(x for value in values for x in all_literal_values(value))
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/cli.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/cli.py
new file mode 100644
index 0000000000000000000000000000000000000000..4b4356f8bdafcd5fc9e8de6c0c95f6f8f4f8bda1
--- /dev/null
+++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/cli.py
@@ -0,0 +1,317 @@
+import importlib.util
+import re
+import sys
+from pathlib import Path
+from typing import Any
+
+import click
+import typer
+import typer.core
+from click import Command, Group, Option
+
+from . import __version__
+from .core import HAS_RICH, MARKUP_MODE_KEY
+
+default_app_names = ("app", "cli", "main")
+default_func_names = ("main", "cli", "app")
+
+app = typer.Typer()
+utils_app = typer.Typer(help="Extra utility commands for Typer apps.")
+app.add_typer(utils_app, name="utils")
+
+
+class State:
+ def __init__(self) -> None:
+ self.app: str | None = None
+ self.func: str | None = None
+ self.file: Path | None = None
+ self.module: str | None = None
+
+
+state = State()
+
+
+def maybe_update_state(ctx: click.Context) -> None:
+ path_or_module = ctx.params.get("path_or_module")
+ if path_or_module:
+ file_path = Path(path_or_module)
+ if file_path.exists() and file_path.is_file():
+ state.file = file_path
+ else:
+ if not re.fullmatch(r"[a-zA-Z_]\w*(\.[a-zA-Z_]\w*)*", path_or_module):
+ typer.echo(
+ f"Not a valid file or Python module: {path_or_module}", err=True
+ )
+ sys.exit(1)
+ state.module = path_or_module
+ app_name = ctx.params.get("app")
+ if app_name:
+ state.app = app_name
+ func_name = ctx.params.get("func")
+ if func_name:
+ state.func = func_name
+
+
+class TyperCLIGroup(typer.core.TyperGroup):
+ def list_commands(self, ctx: click.Context) -> list[str]:
+ self.maybe_add_run(ctx)
+ return super().list_commands(ctx)
+
+ def get_command(self, ctx: click.Context, name: str) -> Command | None: # ty: ignore[invalid-method-override]
+ self.maybe_add_run(ctx)
+ return super().get_command(ctx, name)
+
+ def invoke(self, ctx: click.Context) -> Any:
+ self.maybe_add_run(ctx)
+ return super().invoke(ctx)
+
+ def maybe_add_run(self, ctx: click.Context) -> None:
+ maybe_update_state(ctx)
+ maybe_add_run_to_cli(self)
+
+
+def get_typer_from_module(module: Any) -> typer.Typer | None:
+ # Try to get defined app
+ if state.app:
+ obj = getattr(module, state.app, None)
+ if not isinstance(obj, typer.Typer):
+ typer.echo(f"Not a Typer object: --app {state.app}", err=True)
+ sys.exit(1)
+ return obj
+ # Try to get defined function
+ if state.func:
+ func_obj = getattr(module, state.func, None)
+ if not callable(func_obj):
+ typer.echo(f"Not a function: --func {state.func}", err=True)
+ raise typer.Exit(1)
+ sub_app = typer.Typer()
+ sub_app.command()(func_obj)
+ return sub_app
+ # Iterate and get a default object to use as CLI
+ local_names = dir(module)
+ local_names_set = set(local_names)
+ # Try to get a default Typer app
+ for name in default_app_names:
+ if name in local_names_set:
+ obj = getattr(module, name, None)
+ if isinstance(obj, typer.Typer):
+ return obj
+ # Try to get any Typer app
+ for name in local_names_set - set(default_app_names):
+ obj = getattr(module, name)
+ if isinstance(obj, typer.Typer):
+ return obj
+ # Try to get a default function
+ for func_name in default_func_names:
+ func_obj = getattr(module, func_name, None)
+ if callable(func_obj):
+ sub_app = typer.Typer()
+ sub_app.command()(func_obj)
+ return sub_app
+ # Try to get any func app
+ for func_name in local_names_set - set(default_func_names):
+ func_obj = getattr(module, func_name)
+ if callable(func_obj):
+ sub_app = typer.Typer()
+ sub_app.command()(func_obj)
+ return sub_app
+ return None
+
+
+def get_typer_from_state() -> typer.Typer | None:
+ spec = None
+ if state.file:
+ module_name = state.file.name
+ spec = importlib.util.spec_from_file_location(module_name, str(state.file))
+ elif state.module:
+ spec = importlib.util.find_spec(state.module)
+ if spec is None:
+ if state.file:
+ typer.echo(f"Could not import as Python file: {state.file}", err=True)
+ else:
+ typer.echo(f"Could not import as Python module: {state.module}", err=True)
+ sys.exit(1)
+ module = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(module) # type: ignore
+ obj = get_typer_from_module(module)
+ return obj
+
+
+def maybe_add_run_to_cli(cli: click.Group) -> None:
+ if "run" not in cli.commands:
+ if state.file or state.module:
+ obj = get_typer_from_state()
+ if obj:
+ obj._add_completion = False
+ click_obj = typer.main.get_command(obj)
+ click_obj.name = "run"
+ if not click_obj.help:
+ click_obj.help = "Run the provided Typer app."
+ cli.add_command(click_obj)
+
+
+def print_version(ctx: click.Context, param: Option, value: bool) -> None:
+ if not value or ctx.resilient_parsing:
+ return
+ typer.echo(f"Typer version: {__version__}")
+ raise typer.Exit()
+
+
+@app.callback(cls=TyperCLIGroup, no_args_is_help=True)
+def callback(
+ ctx: typer.Context,
+ *,
+ path_or_module: str = typer.Argument(None),
+ app: str = typer.Option(None, help="The typer app object/variable to use."),
+ func: str = typer.Option(None, help="The function to convert to Typer."),
+ version: bool = typer.Option(
+ False,
+ "--version",
+ help="Print version and exit.",
+ callback=print_version,
+ ),
+) -> None:
+ """
+ Run Typer scripts with completion, without having to create a package.
+
+ You probably want to install completion for the typer command:
+
+ $ typer --install-completion
+
+ https://typer.tiangolo.com/
+ """
+ maybe_update_state(ctx)
+
+
+def get_docs_for_click(
+ *,
+ obj: Command,
+ ctx: typer.Context,
+ indent: int = 0,
+ name: str = "",
+ call_prefix: str = "",
+ title: str | None = None,
+) -> str:
+ docs = "#" * (1 + indent)
+ command_name = name or obj.name
+ if call_prefix:
+ command_name = f"{call_prefix} {command_name}"
+ if not title:
+ title = f"`{command_name}`" if command_name else "CLI"
+ docs += f" {title}\n\n"
+ rich_markup_mode = None
+ if hasattr(ctx, "obj") and isinstance(ctx.obj, dict):
+ rich_markup_mode = ctx.obj.get(MARKUP_MODE_KEY, None)
+ to_parse: bool = bool(HAS_RICH and (rich_markup_mode == "rich"))
+ if obj.help:
+ docs += f"{_parse_html(to_parse, obj.help)}\n\n"
+ usage_pieces = obj.collect_usage_pieces(ctx)
+ if usage_pieces:
+ docs += "**Usage**:\n\n"
+ docs += "```console\n"
+ docs += "$ "
+ if command_name:
+ docs += f"{command_name} "
+ docs += f"{' '.join(usage_pieces)}\n"
+ docs += "```\n\n"
+ args = []
+ opts = []
+ for param in obj.get_params(ctx):
+ rv = param.get_help_record(ctx)
+ if rv is not None:
+ if param.param_type_name == "argument":
+ args.append(rv)
+ elif param.param_type_name == "option":
+ opts.append(rv)
+ if args:
+ docs += "**Arguments**:\n\n"
+ for arg_name, arg_help in args:
+ docs += f"* `{arg_name}`"
+ if arg_help:
+ docs += f": {_parse_html(to_parse, arg_help)}"
+ docs += "\n"
+ docs += "\n"
+ if opts:
+ docs += "**Options**:\n\n"
+ for opt_name, opt_help in opts:
+ docs += f"* `{opt_name}`"
+ if opt_help:
+ docs += f": {_parse_html(to_parse, opt_help)}"
+ docs += "\n"
+ docs += "\n"
+ if obj.epilog:
+ docs += f"{obj.epilog}\n\n"
+ if isinstance(obj, Group):
+ group = obj
+ commands = group.list_commands(ctx)
+ if commands:
+ docs += "**Commands**:\n\n"
+ for command in commands:
+ command_obj = group.get_command(ctx, command)
+ assert command_obj
+ docs += f"* `{command_obj.name}`"
+ command_help = command_obj.get_short_help_str()
+ if command_help:
+ docs += f": {_parse_html(to_parse, command_help)}"
+ docs += "\n"
+ docs += "\n"
+ for command in commands:
+ command_obj = group.get_command(ctx, command)
+ assert command_obj
+ use_prefix = ""
+ if command_name:
+ use_prefix += f"{command_name}"
+ docs += get_docs_for_click(
+ obj=command_obj, ctx=ctx, indent=indent + 1, call_prefix=use_prefix
+ )
+ return docs
+
+
+def _parse_html(to_parse: bool, input_text: str) -> str:
+ if not to_parse:
+ return input_text
+ from . import rich_utils
+
+ return rich_utils.rich_to_html(input_text)
+
+
+@utils_app.command()
+def docs(
+ ctx: typer.Context,
+ name: str = typer.Option("", help="The name of the CLI program to use in docs."),
+ output: Path | None = typer.Option(
+ None,
+ help="An output file to write docs to, like README.md.",
+ file_okay=True,
+ dir_okay=False,
+ ),
+ title: str | None = typer.Option(
+ None,
+ help="The title for the documentation page. If not provided, the name of "
+ "the program is used.",
+ ),
+) -> None:
+ """
+ Generate Markdown docs for a Typer app.
+ """
+ typer_obj = get_typer_from_state()
+ if not typer_obj:
+ typer.echo("No Typer app found", err=True)
+ raise typer.Abort()
+ if hasattr(typer_obj, "rich_markup_mode"):
+ if not hasattr(ctx, "obj") or ctx.obj is None:
+ ctx.ensure_object(dict)
+ if isinstance(ctx.obj, dict):
+ ctx.obj[MARKUP_MODE_KEY] = typer_obj.rich_markup_mode
+ click_obj = typer.main.get_command(typer_obj)
+ docs = get_docs_for_click(obj=click_obj, ctx=ctx, name=name, title=title)
+ clean_docs = f"{docs.strip()}\n"
+ if output:
+ output.write_text(clean_docs)
+ typer.echo(f"Docs saved to: {output}")
+ else:
+ typer.echo(clean_docs)
+
+
+def main() -> Any:
+ return app()
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/colors.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/colors.py
new file mode 100644
index 0000000000000000000000000000000000000000..54e7b166cb1de83321a4965cc4915824b47a7f4f
--- /dev/null
+++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/colors.py
@@ -0,0 +1,20 @@
+# Variable names to colors, just for completion
+BLACK = "black"
+RED = "red"
+GREEN = "green"
+YELLOW = "yellow"
+BLUE = "blue"
+MAGENTA = "magenta"
+CYAN = "cyan"
+WHITE = "white"
+
+RESET = "reset"
+
+BRIGHT_BLACK = "bright_black"
+BRIGHT_RED = "bright_red"
+BRIGHT_GREEN = "bright_green"
+BRIGHT_YELLOW = "bright_yellow"
+BRIGHT_BLUE = "bright_blue"
+BRIGHT_MAGENTA = "bright_magenta"
+BRIGHT_CYAN = "bright_cyan"
+BRIGHT_WHITE = "bright_white"
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/completion.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/completion.py
new file mode 100644
index 0000000000000000000000000000000000000000..0d621e411d7bab9a6d4ab14102670adbcabaf962
--- /dev/null
+++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/completion.py
@@ -0,0 +1,146 @@
+import os
+import sys
+from collections.abc import MutableMapping
+from typing import Any
+
+import click
+
+from ._completion_classes import completion_init
+from ._completion_shared import Shells, _get_shell_name, get_completion_script, install
+from .models import ParamMeta
+from .params import Option
+from .utils import get_params_from_function
+
+_click_patched = False
+
+
+def get_completion_inspect_parameters() -> tuple[ParamMeta, ParamMeta]:
+ completion_init()
+ test_disable_detection = os.getenv("_TYPER_COMPLETE_TEST_DISABLE_SHELL_DETECTION")
+ if not test_disable_detection:
+ parameters = get_params_from_function(_install_completion_placeholder_function)
+ else:
+ parameters = get_params_from_function(
+ _install_completion_no_auto_placeholder_function
+ )
+ install_param, show_param = parameters.values()
+ return install_param, show_param
+
+
+def install_callback(ctx: click.Context, param: click.Parameter, value: Any) -> Any:
+ if not value or ctx.resilient_parsing:
+ return value # pragma: no cover
+ if isinstance(value, str):
+ shell, path = install(shell=value)
+ else:
+ shell, path = install()
+ click.secho(f"{shell} completion installed in {path}", fg="green")
+ click.echo("Completion will take effect once you restart the terminal")
+ sys.exit(0)
+
+
+def show_callback(ctx: click.Context, param: click.Parameter, value: Any) -> Any:
+ if not value or ctx.resilient_parsing:
+ return value # pragma: no cover
+ prog_name = ctx.find_root().info_name
+ assert prog_name
+ complete_var = "_{}_COMPLETE".format(prog_name.replace("-", "_").upper())
+ shell = ""
+ test_disable_detection = os.getenv("_TYPER_COMPLETE_TEST_DISABLE_SHELL_DETECTION")
+ if isinstance(value, str):
+ shell = value
+ elif not test_disable_detection:
+ detected_shell = _get_shell_name()
+ if detected_shell is not None:
+ shell = detected_shell
+ script_content = get_completion_script(
+ prog_name=prog_name, complete_var=complete_var, shell=shell
+ )
+ click.echo(script_content)
+ sys.exit(0)
+
+
+# Create a fake command function to extract the completion parameters
+def _install_completion_placeholder_function(
+ install_completion: bool = Option(
+ None,
+ "--install-completion",
+ callback=install_callback,
+ expose_value=False,
+ help="Install completion for the current shell.",
+ ),
+ show_completion: bool = Option(
+ None,
+ "--show-completion",
+ callback=show_callback,
+ expose_value=False,
+ help="Show completion for the current shell, to copy it or customize the installation.",
+ ),
+) -> Any:
+ pass # pragma: no cover
+
+
+def _install_completion_no_auto_placeholder_function(
+ install_completion: Shells = Option(
+ None,
+ callback=install_callback,
+ expose_value=False,
+ help="Install completion for the specified shell.",
+ ),
+ show_completion: Shells = Option(
+ None,
+ callback=show_callback,
+ expose_value=False,
+ help="Show completion for the specified shell, to copy it or customize the installation.",
+ ),
+) -> Any:
+ pass # pragma: no cover
+
+
+# Re-implement Click's shell_complete to add error message with:
+# Invalid completion instruction
+# To use 7.x instruction style for compatibility
+# And to add extra error messages, for compatibility with Typer in previous versions
+# This is only called in new Command method, only used by Click 8.x+
+def shell_complete(
+ cli: click.Command,
+ ctx_args: MutableMapping[str, Any],
+ prog_name: str,
+ complete_var: str,
+ instruction: str,
+) -> int:
+ import click
+ import click.shell_completion
+
+ if "_" not in instruction:
+ click.echo("Invalid completion instruction.", err=True)
+ return 1
+
+ # Click 8 changed the order/style of shell instructions from e.g.
+ # source_bash to bash_source
+ # Typer override to preserve the old style for compatibility
+ # Original in Click 8.x commented:
+ # shell, _, instruction = instruction.partition("_")
+ instruction, _, shell = instruction.partition("_")
+ # Typer override end
+
+ comp_cls = click.shell_completion.get_completion_class(shell)
+
+ if comp_cls is None:
+ click.echo(f"Shell {shell} not supported.", err=True)
+ return 1
+
+ comp = comp_cls(cli, ctx_args, prog_name, complete_var)
+
+ if instruction == "source":
+ click.echo(comp.source())
+ return 0
+
+ # Typer override to print the completion help msg with Rich
+ if instruction == "complete":
+ click.echo(comp.complete())
+ return 0
+ # Typer override end
+
+ click.echo(f'Completion instruction "{instruction}" not supported.', err=True)
+ return 1
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/core.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/core.py
new file mode 100644
index 0000000000000000000000000000000000000000..3e72d839893185826dfdc9e9d6d8006a279db48d
--- /dev/null
+++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/core.py
@@ -0,0 +1,821 @@
+import errno
+import inspect
+import os
+import sys
+from collections.abc import Callable, MutableMapping, Sequence
+from difflib import get_close_matches
+from enum import Enum
+from gettext import gettext as _
+from typing import (
+ Any,
+ TextIO,
+ Union,
+ cast,
+)
+
+import click
+import click.core
+import click.formatting
+import click.shell_completion
+import click.types
+import click.utils
+
+from ._typing import Literal
+from .utils import parse_boolean_env_var
+
+MarkupMode = Literal["markdown", "rich", None]
+MARKUP_MODE_KEY = "TYPER_RICH_MARKUP_MODE"
+
+HAS_RICH = parse_boolean_env_var(os.getenv("TYPER_USE_RICH"), default=True)
+
+if HAS_RICH:
+ DEFAULT_MARKUP_MODE: MarkupMode = "rich"
+else:
+ DEFAULT_MARKUP_MODE = None
+
+
+# Copy from click.parser._split_opt
+def _split_opt(opt: str) -> tuple[str, str]:
+ first = opt[:1]
+ if first.isalnum():
+ return "", opt
+ if opt[1:2] == first:
+ return opt[:2], opt[2:]
+ return first, opt[1:]
+
+
+def _typer_param_setup_autocompletion_compat(
+ self: click.Parameter,
+ *,
+ autocompletion: Callable[
+ [click.Context, list[str], str], list[tuple[str, str] | str]
+ ]
+ | None = None,
+) -> None:
+ if self._custom_shell_complete is not None:
+ import warnings
+
+ warnings.warn(
+ "In Typer, only the parameter 'autocompletion' is supported. "
+ "The support for 'shell_complete' is deprecated and will be removed in upcoming versions. ",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ if autocompletion is not None:
+
+ def compat_autocompletion(
+ ctx: click.Context, param: click.core.Parameter, incomplete: str
+ ) -> list["click.shell_completion.CompletionItem"]:
+ from click.shell_completion import CompletionItem
+
+ out = []
+
+ for c in autocompletion(ctx, [], incomplete):
+ if isinstance(c, tuple):
+ use_completion = CompletionItem(c[0], help=c[1])
+ else:
+ assert isinstance(c, str)
+ use_completion = CompletionItem(c)
+
+ if use_completion.value.startswith(incomplete):
+ out.append(use_completion)
+
+ return out
+
+ self._custom_shell_complete = compat_autocompletion
+
+
+def _get_default_string(
+ obj: Union["TyperArgument", "TyperOption"],
+ *,
+ ctx: click.Context,
+ show_default_is_str: bool,
+ default_value: list[Any] | tuple[Any, ...] | str | Callable[..., Any] | Any,
+) -> str:
+ # Extracted from click.core.Option.get_help_record() to be reused by
+ # rich_utils avoiding RegEx hacks
+ if show_default_is_str:
+ default_string = f"({obj.show_default})"
+ elif isinstance(default_value, (list, tuple)):
+ default_string = ", ".join(
+ _get_default_string(
+ obj, ctx=ctx, show_default_is_str=show_default_is_str, default_value=d
+ )
+ for d in default_value
+ )
+ elif isinstance(default_value, Enum):
+ default_string = str(default_value.value)
+ elif inspect.isfunction(default_value):
+ default_string = _("(dynamic)")
+ elif isinstance(obj, TyperOption) and obj.is_bool_flag and obj.secondary_opts:
+ # For boolean flags that have distinct True/False opts,
+ # use the opt without prefix instead of the value.
+ # Typer override, original commented
+ # default_string = click.parser.split_opt(
+ # (self.opts if self.default else self.secondary_opts)[0]
+ # )[1]
+ if obj.default:
+ if obj.opts:
+ default_string = _split_opt(obj.opts[0])[1]
+ else:
+ default_string = str(default_value)
+ else:
+ default_string = _split_opt(obj.secondary_opts[0])[1]
+ # Typer override end
+ elif (
+ isinstance(obj, TyperOption)
+ and obj.is_bool_flag
+ and not obj.secondary_opts
+ and not default_value
+ ):
+ default_string = ""
+ else:
+ default_string = str(default_value)
+ return default_string
+
+
+def _extract_default_help_str(
+ obj: Union["TyperArgument", "TyperOption"], *, ctx: click.Context
+) -> Any | Callable[[], Any] | None:
+ # Extracted from click.core.Option.get_help_record() to be reused by
+ # rich_utils avoiding RegEx hacks
+ # Temporarily enable resilient parsing to avoid type casting
+ # failing for the default. Might be possible to extend this to
+ # help formatting in general.
+ resilient = ctx.resilient_parsing
+ ctx.resilient_parsing = True
+
+ try:
+ default_value = obj.get_default(ctx, call=False)
+ finally:
+ ctx.resilient_parsing = resilient
+ return default_value
+
+
+def _main(
+ self: click.Command,
+ *,
+ args: Sequence[str] | None = None,
+ prog_name: str | None = None,
+ complete_var: str | None = None,
+ standalone_mode: bool = True,
+ windows_expand_args: bool = True,
+ rich_markup_mode: MarkupMode = DEFAULT_MARKUP_MODE,
+ **extra: Any,
+) -> Any:
+ # Typer override, duplicated from click.main() to handle custom rich exceptions
+ # Verify that the environment is configured correctly, or reject
+ # further execution to avoid a broken script.
+ if args is None:
+ args = sys.argv[1:]
+
+ # Covered in Click tests
+ if os.name == "nt" and windows_expand_args: # pragma: no cover
+ args = click.utils._expand_args(args)
+ else:
+ args = list(args)
+
+ if prog_name is None:
+ prog_name = click.utils._detect_program_name()
+
+ # Process shell completion requests and exit early.
+ self._main_shell_completion(extra, prog_name, complete_var)
+
+ try:
+ try:
+ with self.make_context(prog_name, args, **extra) as ctx:
+ rv = self.invoke(ctx)
+ if not standalone_mode:
+ return rv
+ # it's not safe to `ctx.exit(rv)` here!
+ # note that `rv` may actually contain data like "1" which
+ # has obvious effects
+ # more subtle case: `rv=[None, None]` can come out of
+ # chained commands which all returned `None` -- so it's not
+ # even always obvious that `rv` indicates success/failure
+ # by its truthiness/falsiness
+ ctx.exit()
+ except EOFError as e:
+ click.echo(file=sys.stderr)
+ raise click.Abort() from e
+ except KeyboardInterrupt as e:
+ raise click.exceptions.Exit(130) from e
+ except click.ClickException as e:
+ if not standalone_mode:
+ raise
+ # Typer override
+ if HAS_RICH and rich_markup_mode is not None:
+ from . import rich_utils
+
+ rich_utils.rich_format_error(e)
+ else:
+ e.show()
+ # Typer override end
+ sys.exit(e.exit_code)
+ except OSError as e:
+ if e.errno == errno.EPIPE:
+ sys.stdout = cast(TextIO, click.utils.PacifyFlushWrapper(sys.stdout))
+ sys.stderr = cast(TextIO, click.utils.PacifyFlushWrapper(sys.stderr))
+ sys.exit(1)
+ else:
+ raise
+ except click.exceptions.Exit as e:
+ if standalone_mode:
+ sys.exit(e.exit_code)
+ else:
+ # in non-standalone mode, return the exit code
+ # note that this is only reached if `self.invoke` above raises
+ # an Exit explicitly -- thus bypassing the check there which
+ # would return its result
+ # the results of non-standalone execution may therefore be
+ # somewhat ambiguous: if there are codepaths which lead to
+ # `ctx.exit(1)` and to `return 1`, the caller won't be able to
+ # tell the difference between the two
+ return e.exit_code
+ except click.Abort:
+ if not standalone_mode:
+ raise
+ # Typer override
+ if HAS_RICH and rich_markup_mode is not None:
+ from . import rich_utils
+
+ rich_utils.rich_abort_error()
+ else:
+ click.echo(_("Aborted!"), file=sys.stderr)
+ # Typer override end
+ sys.exit(1)
+
+
+class TyperArgument(click.core.Argument):
+ def __init__(
+ self,
+ *,
+ # Parameter
+ param_decls: list[str],
+ type: Any | None = None,
+ required: bool | None = None,
+ default: Any | None = None,
+ callback: Callable[..., Any] | None = None,
+ nargs: int | None = None,
+ metavar: str | None = None,
+ expose_value: bool = True,
+ is_eager: bool = False,
+ envvar: str | list[str] | None = None,
+ # Note that shell_complete is not fully supported and will be removed in future versions
+ # TODO: Remove shell_complete in a future version (after 0.16.0)
+ shell_complete: Callable[
+ [click.Context, click.Parameter, str],
+ list["click.shell_completion.CompletionItem"] | list[str],
+ ]
+ | None = None,
+ autocompletion: Callable[..., Any] | None = None,
+ # TyperArgument
+ show_default: bool | str = True,
+ show_choices: bool = True,
+ show_envvar: bool = True,
+ help: str | None = None,
+ hidden: bool = False,
+ # Rich settings
+ rich_help_panel: str | None = None,
+ ):
+ self.help = help
+ self.show_default = show_default
+ self.show_choices = show_choices
+ self.show_envvar = show_envvar
+ self.hidden = hidden
+ self.rich_help_panel = rich_help_panel
+
+ super().__init__(
+ param_decls=param_decls,
+ type=type,
+ required=required,
+ default=default,
+ callback=callback,
+ nargs=nargs,
+ metavar=metavar,
+ expose_value=expose_value,
+ is_eager=is_eager,
+ envvar=envvar,
+ shell_complete=shell_complete,
+ )
+ _typer_param_setup_autocompletion_compat(self, autocompletion=autocompletion)
+
+ def _get_default_string(
+ self,
+ *,
+ ctx: click.Context,
+ show_default_is_str: bool,
+ default_value: list[Any] | tuple[Any, ...] | str | Callable[..., Any] | Any,
+ ) -> str:
+ return _get_default_string(
+ self,
+ ctx=ctx,
+ show_default_is_str=show_default_is_str,
+ default_value=default_value,
+ )
+
+ def _extract_default_help_str(
+ self, *, ctx: click.Context
+ ) -> Any | Callable[[], Any] | None:
+ return _extract_default_help_str(self, ctx=ctx)
+
+ def get_help_record(self, ctx: click.Context) -> tuple[str, str] | None:
+ # Modified version of click.core.Option.get_help_record()
+ # to support Arguments
+ if self.hidden:
+ return None
+ name = self.make_metavar(ctx=ctx)
+ help = self.help or ""
+ extra = []
+ if self.show_envvar:
+ envvar = self.envvar
+ # allow_from_autoenv is currently not supported in Typer for CLI Arguments
+ if envvar is not None:
+ var_str = (
+ ", ".join(str(d) for d in envvar)
+ if isinstance(envvar, (list, tuple))
+ else envvar
+ )
+ extra.append(f"env var: {var_str}")
+
+ # Typer override:
+ # Extracted to _extract_default_help_str() to allow re-using it in rich_utils
+ default_value = self._extract_default_help_str(ctx=ctx)
+ # Typer override end
+
+ show_default_is_str = isinstance(self.show_default, str)
+
+ if show_default_is_str or (
+ default_value is not None and (self.show_default or ctx.show_default)
+ ):
+ # Typer override:
+ # Extracted to _get_default_string() to allow re-using it in rich_utils
+ default_string = self._get_default_string(
+ ctx=ctx,
+ show_default_is_str=show_default_is_str,
+ default_value=default_value,
+ )
+ # Typer override end
+ if default_string:
+ extra.append(_("default: {default}").format(default=default_string))
+ if self.required:
+ extra.append(_("required"))
+ if extra:
+ extra_str = "; ".join(extra)
+ extra_str = f"[{extra_str}]"
+ rich_markup_mode = None
+ if hasattr(ctx, "obj") and isinstance(ctx.obj, dict):
+ rich_markup_mode = ctx.obj.get(MARKUP_MODE_KEY, None)
+ if HAS_RICH and rich_markup_mode == "rich":
+ # This is needed for when we want to export to HTML
+ from . import rich_utils
+
+ extra_str = rich_utils.escape_before_html_export(extra_str)
+
+ help = f"{help} {extra_str}" if help else f"{extra_str}"
+ return name, help
+
+ def make_metavar(self, ctx: click.Context | None = None) -> str:
+ # Modified version of click.core.Argument.make_metavar()
+ # to include Argument name
+ if self.metavar is not None:
+ var = self.metavar
+ if not self.required and not var.startswith("["):
+ var = f"[{var}]"
+ return var
+ var = (self.name or "").upper()
+ if not self.required:
+ var = f"[{var}]"
+ type_var = self.type.get_metavar(self, ctx=ctx) # type: ignore[arg-type]
+ # type_var = self.type.get_metavar(self, ctx=ctx)
+ if type_var:
+ var += f":{type_var}"
+ if self.nargs != 1:
+ var += "..."
+ return var
+
+ def value_is_missing(self, value: Any) -> bool:
+ return _value_is_missing(self, value)
+
+
+class TyperOption(click.core.Option):
+ def __init__(
+ self,
+ *,
+ # Parameter
+ param_decls: list[str],
+ type: click.types.ParamType | Any | None = None,
+ required: bool | None = None,
+ default: Any | None = None,
+ callback: Callable[..., Any] | None = None,
+ nargs: int | None = None,
+ metavar: str | None = None,
+ expose_value: bool = True,
+ is_eager: bool = False,
+ envvar: str | list[str] | None = None,
+ # Note that shell_complete is not fully supported and will be removed in future versions
+ # TODO: Remove shell_complete in a future version (after 0.16.0)
+ shell_complete: Callable[
+ [click.Context, click.Parameter, str],
+ list["click.shell_completion.CompletionItem"] | list[str],
+ ]
+ | None = None,
+ autocompletion: Callable[..., Any] | None = None,
+ # Option
+ show_default: bool | str = False,
+ prompt: bool | str = False,
+ confirmation_prompt: bool | str = False,
+ prompt_required: bool = True,
+ hide_input: bool = False,
+ is_flag: bool | None = None,
+ multiple: bool = False,
+ count: bool = False,
+ allow_from_autoenv: bool = True,
+ help: str | None = None,
+ hidden: bool = False,
+ show_choices: bool = True,
+ show_envvar: bool = False,
+ # Rich settings
+ rich_help_panel: str | None = None,
+ ):
+ super().__init__(
+ param_decls=param_decls,
+ type=type,
+ required=required,
+ default=default,
+ callback=callback,
+ nargs=nargs,
+ metavar=metavar,
+ expose_value=expose_value,
+ is_eager=is_eager,
+ envvar=envvar,
+ show_default=show_default,
+ prompt=prompt,
+ confirmation_prompt=confirmation_prompt,
+ hide_input=hide_input,
+ is_flag=is_flag,
+ multiple=multiple,
+ count=count,
+ allow_from_autoenv=allow_from_autoenv,
+ help=help,
+ hidden=hidden,
+ show_choices=show_choices,
+ show_envvar=show_envvar,
+ prompt_required=prompt_required,
+ shell_complete=shell_complete,
+ )
+ _typer_param_setup_autocompletion_compat(self, autocompletion=autocompletion)
+ self.rich_help_panel = rich_help_panel
+
+ def _get_default_string(
+ self,
+ *,
+ ctx: click.Context,
+ show_default_is_str: bool,
+ default_value: list[Any] | tuple[Any, ...] | str | Callable[..., Any] | Any,
+ ) -> str:
+ return _get_default_string(
+ self,
+ ctx=ctx,
+ show_default_is_str=show_default_is_str,
+ default_value=default_value,
+ )
+
+ def _extract_default_help_str(
+ self, *, ctx: click.Context
+ ) -> Any | Callable[[], Any] | None:
+ return _extract_default_help_str(self, ctx=ctx)
+
+ def make_metavar(self, ctx: click.Context | None = None) -> str:
+ return super().make_metavar(ctx=ctx) # type: ignore[arg-type]
+
+ def get_help_record(self, ctx: click.Context) -> tuple[str, str] | None:
+ # Duplicate all of Click's logic only to modify a single line, to allow boolean
+ # flags with only names for False values as it's currently supported by Typer
+ # Ref: https://typer.tiangolo.com/tutorial/parameter-types/bool/#only-names-for-false
+ if self.hidden:
+ return None
+
+ any_prefix_is_slash = False
+
+ def _write_opts(opts: Sequence[str]) -> str:
+ nonlocal any_prefix_is_slash
+
+ rv, any_slashes = click.formatting.join_options(opts)
+
+ if any_slashes:
+ any_prefix_is_slash = True
+
+ if not self.is_flag and not self.count:
+ rv += f" {self.make_metavar(ctx=ctx)}"
+
+ return rv
+
+ rv = [_write_opts(self.opts)]
+
+ if self.secondary_opts:
+ rv.append(_write_opts(self.secondary_opts))
+
+ help = self.help or ""
+ extra = []
+
+ if self.show_envvar:
+ envvar = self.envvar
+
+ if envvar is None:
+ if (
+ self.allow_from_autoenv
+ and ctx.auto_envvar_prefix is not None
+ and self.name is not None
+ ):
+ envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}"
+
+ if envvar is not None:
+ var_str = (
+ envvar
+ if isinstance(envvar, str)
+ else ", ".join(str(d) for d in envvar)
+ )
+ extra.append(_("env var: {var}").format(var=var_str))
+
+ # Typer override:
+ # Extracted to _extract_default() to allow re-using it in rich_utils
+ default_value = self._extract_default_help_str(ctx=ctx)
+ # Typer override end
+
+ show_default_is_str = isinstance(self.show_default, str)
+
+ if show_default_is_str or (
+ default_value is not None and (self.show_default or ctx.show_default)
+ ):
+ # Typer override:
+ # Extracted to _get_default_string() to allow re-using it in rich_utils
+ default_string = self._get_default_string(
+ ctx=ctx,
+ show_default_is_str=show_default_is_str,
+ default_value=default_value,
+ )
+ # Typer override end
+ if default_string:
+ extra.append(_("default: {default}").format(default=default_string))
+
+ if isinstance(self.type, click.types._NumberRangeBase):
+ range_str = self.type._describe_range()
+
+ if range_str:
+ extra.append(range_str)
+
+ if self.required:
+ extra.append(_("required"))
+
+ if extra:
+ extra_str = "; ".join(extra)
+ extra_str = f"[{extra_str}]"
+ rich_markup_mode = None
+ if hasattr(ctx, "obj") and isinstance(ctx.obj, dict):
+ rich_markup_mode = ctx.obj.get(MARKUP_MODE_KEY, None)
+ if HAS_RICH and rich_markup_mode == "rich":
+ # This is needed for when we want to export to HTML
+ from . import rich_utils
+
+ extra_str = rich_utils.escape_before_html_export(extra_str)
+
+ help = f"{help} {extra_str}" if help else f"{extra_str}"
+
+ return ("; " if any_prefix_is_slash else " / ").join(rv), help
+
+ def value_is_missing(self, value: Any) -> bool:
+ return _value_is_missing(self, value)
+
+
+def _value_is_missing(param: click.Parameter, value: Any) -> bool:
+ if value is None:
+ return True
+
+ # Click 8.3 and beyond
+ # if value is UNSET:
+ # return True
+
+ if (param.nargs != 1 or param.multiple) and value == ():
+ return True # pragma: no cover
+
+ return False
+
+
+def _typer_format_options(
+ self: click.core.Command, *, ctx: click.Context, formatter: click.HelpFormatter
+) -> None:
+ args = []
+ opts = []
+ for param in self.get_params(ctx):
+ rv = param.get_help_record(ctx)
+ if rv is not None:
+ if param.param_type_name == "argument":
+ args.append(rv)
+ elif param.param_type_name == "option":
+ opts.append(rv)
+
+ if args:
+ with formatter.section(_("Arguments")):
+ formatter.write_dl(args)
+ if opts:
+ with formatter.section(_("Options")):
+ formatter.write_dl(opts)
+
+
+def _typer_main_shell_completion(
+ self: click.core.Command,
+ *,
+ ctx_args: MutableMapping[str, Any],
+ prog_name: str,
+ complete_var: str | None = None,
+) -> None:
+ if complete_var is None:
+ complete_var = f"_{prog_name}_COMPLETE".replace("-", "_").upper()
+
+ instruction = os.environ.get(complete_var)
+
+ if not instruction:
+ return
+
+ from .completion import shell_complete
+
+ rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction)
+ sys.exit(rv)
+
+
+class TyperCommand(click.core.Command):
+ def __init__(
+ self,
+ name: str | None,
+ *,
+ context_settings: dict[str, Any] | None = None,
+ callback: Callable[..., Any] | None = None,
+ params: list[click.Parameter] | None = None,
+ help: str | None = None,
+ epilog: str | None = None,
+ short_help: str | None = None,
+ options_metavar: str | None = "[OPTIONS]",
+ add_help_option: bool = True,
+ no_args_is_help: bool = False,
+ hidden: bool = False,
+ deprecated: bool = False,
+ # Rich settings
+ rich_markup_mode: MarkupMode = DEFAULT_MARKUP_MODE,
+ rich_help_panel: str | None = None,
+ ) -> None:
+ super().__init__(
+ name=name,
+ context_settings=context_settings,
+ callback=callback,
+ params=params,
+ help=help,
+ epilog=epilog,
+ short_help=short_help,
+ options_metavar=options_metavar,
+ add_help_option=add_help_option,
+ no_args_is_help=no_args_is_help,
+ hidden=hidden,
+ deprecated=deprecated,
+ )
+ self.rich_markup_mode: MarkupMode = rich_markup_mode
+ self.rich_help_panel = rich_help_panel
+
+ def format_options(
+ self, ctx: click.Context, formatter: click.HelpFormatter
+ ) -> None:
+ _typer_format_options(self, ctx=ctx, formatter=formatter)
+
+ def _main_shell_completion(
+ self,
+ ctx_args: MutableMapping[str, Any],
+ prog_name: str,
+ complete_var: str | None = None,
+ ) -> None:
+ _typer_main_shell_completion(
+ self, ctx_args=ctx_args, prog_name=prog_name, complete_var=complete_var
+ )
+
+ def main(
+ self,
+ args: Sequence[str] | None = None,
+ prog_name: str | None = None,
+ complete_var: str | None = None,
+ standalone_mode: bool = True,
+ windows_expand_args: bool = True,
+ **extra: Any,
+ ) -> Any:
+ return _main(
+ self,
+ args=args,
+ prog_name=prog_name,
+ complete_var=complete_var,
+ standalone_mode=standalone_mode,
+ windows_expand_args=windows_expand_args,
+ rich_markup_mode=self.rich_markup_mode,
+ **extra,
+ )
+
+ def format_help(self, ctx: click.Context, formatter: click.HelpFormatter) -> None:
+ if not HAS_RICH or self.rich_markup_mode is None:
+ if not hasattr(ctx, "obj") or ctx.obj is None:
+ ctx.ensure_object(dict)
+ if isinstance(ctx.obj, dict):
+ ctx.obj[MARKUP_MODE_KEY] = self.rich_markup_mode
+ return super().format_help(ctx, formatter)
+ from . import rich_utils
+
+ return rich_utils.rich_format_help(
+ obj=self,
+ ctx=ctx,
+ markup_mode=self.rich_markup_mode,
+ )
+
+
+class TyperGroup(click.core.Group):
+ def __init__(
+ self,
+ *,
+ name: str | None = None,
+ commands: dict[str, click.Command] | Sequence[click.Command] | None = None,
+ # Rich settings
+ rich_markup_mode: MarkupMode = DEFAULT_MARKUP_MODE,
+ rich_help_panel: str | None = None,
+ suggest_commands: bool = True,
+ **attrs: Any,
+ ) -> None:
+ super().__init__(name=name, commands=commands, **attrs)
+ self.rich_markup_mode: MarkupMode = rich_markup_mode
+ self.rich_help_panel = rich_help_panel
+ self.suggest_commands = suggest_commands
+
+ def format_options(
+ self, ctx: click.Context, formatter: click.HelpFormatter
+ ) -> None:
+ _typer_format_options(self, ctx=ctx, formatter=formatter)
+ self.format_commands(ctx, formatter)
+
+ def _main_shell_completion(
+ self,
+ ctx_args: MutableMapping[str, Any],
+ prog_name: str,
+ complete_var: str | None = None,
+ ) -> None:
+ _typer_main_shell_completion(
+ self, ctx_args=ctx_args, prog_name=prog_name, complete_var=complete_var
+ )
+
+ def resolve_command(
+ self, ctx: click.Context, args: list[str]
+ ) -> tuple[str | None, click.Command | None, list[str]]:
+ try:
+ return super().resolve_command(ctx, args)
+ except click.UsageError as e:
+ if self.suggest_commands:
+ available_commands = list(self.commands.keys())
+ if available_commands and args:
+ typo = args[0]
+ matches = get_close_matches(typo, available_commands)
+ if matches:
+ suggestions = ", ".join(f"{m!r}" for m in matches)
+ message = e.message.rstrip(".")
+ e.message = f"{message}. Did you mean {suggestions}?"
+ raise
+
+ def main(
+ self,
+ args: Sequence[str] | None = None,
+ prog_name: str | None = None,
+ complete_var: str | None = None,
+ standalone_mode: bool = True,
+ windows_expand_args: bool = True,
+ **extra: Any,
+ ) -> Any:
+ return _main(
+ self,
+ args=args,
+ prog_name=prog_name,
+ complete_var=complete_var,
+ standalone_mode=standalone_mode,
+ windows_expand_args=windows_expand_args,
+ rich_markup_mode=self.rich_markup_mode,
+ **extra,
+ )
+
+ def format_help(self, ctx: click.Context, formatter: click.HelpFormatter) -> None:
+ if not HAS_RICH or self.rich_markup_mode is None:
+ return super().format_help(ctx, formatter)
+ from . import rich_utils
+
+ return rich_utils.rich_format_help(
+ obj=self,
+ ctx=ctx,
+ markup_mode=self.rich_markup_mode,
+ )
+
+ def list_commands(self, ctx: click.Context) -> list[str]:
+ """Returns a list of subcommand names.
+ Note that in Click's Group class, these are sorted.
+ In Typer, we wish to maintain the original order of creation (cf Issue #933)"""
+ return [n for n, c in self.commands.items()]
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/main.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/main.py
new file mode 100644
index 0000000000000000000000000000000000000000..f4f21bb8444a80af5ad8ab82822fc972f0207b9c
--- /dev/null
+++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/main.py
@@ -0,0 +1,2013 @@
+import inspect
+import os
+import platform
+import shutil
+import subprocess
+import sys
+import traceback
+from collections.abc import Callable, Sequence
+from datetime import datetime
+from enum import Enum
+from functools import update_wrapper
+from pathlib import Path
+from traceback import FrameSummary, StackSummary
+from types import TracebackType
+from typing import Annotated, Any
+from uuid import UUID
+
+import click
+from annotated_doc import Doc
+from typer._types import TyperChoice
+
+from ._typing import get_args, get_origin, is_literal_type, is_union, literal_values
+from .completion import get_completion_inspect_parameters
+from .core import (
+ DEFAULT_MARKUP_MODE,
+ HAS_RICH,
+ MarkupMode,
+ TyperArgument,
+ TyperCommand,
+ TyperGroup,
+ TyperOption,
+)
+from .models import (
+ AnyType,
+ ArgumentInfo,
+ CommandFunctionType,
+ CommandInfo,
+ Default,
+ DefaultPlaceholder,
+ DeveloperExceptionConfig,
+ FileBinaryRead,
+ FileBinaryWrite,
+ FileText,
+ FileTextWrite,
+ NoneType,
+ OptionInfo,
+ ParameterInfo,
+ ParamMeta,
+ Required,
+ TyperInfo,
+ TyperPath,
+)
+from .utils import get_params_from_function
+
+_original_except_hook = sys.excepthook
+_typer_developer_exception_attr_name = "__typer_developer_exception__"
+
+
+def except_hook(
+ exc_type: type[BaseException], exc_value: BaseException, tb: TracebackType | None
+) -> None:
+ exception_config: DeveloperExceptionConfig | None = getattr(
+ exc_value, _typer_developer_exception_attr_name, None
+ )
+ standard_traceback = os.getenv(
+ "TYPER_STANDARD_TRACEBACK", os.getenv("_TYPER_STANDARD_TRACEBACK")
+ )
+ if (
+ standard_traceback
+ or not exception_config
+ or not exception_config.pretty_exceptions_enable
+ ):
+ _original_except_hook(exc_type, exc_value, tb)
+ return
+ typer_path = os.path.dirname(__file__)
+ click_path = os.path.dirname(click.__file__)
+ internal_dir_names = [typer_path, click_path]
+ exc = exc_value
+ if HAS_RICH:
+ from . import rich_utils
+
+ rich_tb = rich_utils.get_traceback(exc, exception_config, internal_dir_names)
+ console_stderr = rich_utils._get_rich_console(stderr=True)
+ console_stderr.print(rich_tb)
+ return
+ tb_exc = traceback.TracebackException.from_exception(exc)
+ stack: list[FrameSummary] = []
+ for frame in tb_exc.stack:
+ if any(frame.filename.startswith(path) for path in internal_dir_names):
+ if not exception_config.pretty_exceptions_short:
+ # Hide the line for internal libraries, Typer and Click
+ stack.append(
+ traceback.FrameSummary(
+ filename=frame.filename,
+ lineno=frame.lineno,
+ name=frame.name,
+ line="",
+ )
+ )
+ else:
+ stack.append(frame)
+ # Type ignore ref: https://github.com/python/typeshed/pull/8244
+ final_stack_summary = StackSummary.from_list(stack)
+ tb_exc.stack = final_stack_summary
+ for line in tb_exc.format():
+ print(line, file=sys.stderr)
+ return
+
+
+def get_install_completion_arguments() -> tuple[click.Parameter, click.Parameter]:
+ install_param, show_param = get_completion_inspect_parameters()
+ click_install_param, _ = get_click_param(install_param)
+ click_show_param, _ = get_click_param(show_param)
+ return click_install_param, click_show_param
+
+
+class Typer:
+ """
+ `Typer` main class, the main entrypoint to use Typer.
+
+ Read more in the
+ [Typer docs for First Steps](https://typer.tiangolo.com/tutorial/typer-app/).
+
+ ## Example
+
+ ```python
+ import typer
+
+ app = typer.Typer()
+ ```
+ """
+
+ def __init__(
+ self,
+ *,
+ name: Annotated[
+ str | None,
+ Doc(
+ """
+ The name of this application.
+ Mostly used to set the name for [subcommands](https://typer.tiangolo.com/tutorial/subcommands/), in which case it can be overridden by `add_typer(name=...)`.
+
+ **Example**
+
+ ```python
+ import typer
+
+ app = typer.Typer(name="users")
+ ```
+ """
+ ),
+ ] = Default(None),
+ cls: Annotated[
+ type[TyperGroup] | None,
+ Doc(
+ """
+ The class of this app. Mainly used when [using the Click library underneath](https://typer.tiangolo.com/tutorial/using-click/). Can usually be left at the default value `None`.
+ Otherwise, should be a subtype of `TyperGroup`.
+
+ **Example**
+
+ ```python
+ import typer
+
+ app = typer.Typer(cls=TyperGroup)
+ ```
+ """
+ ),
+ ] = Default(None),
+ invoke_without_command: Annotated[
+ bool,
+ Doc(
+ """
+ By setting this to `True`, you can make sure a callback is executed even when no subcommand is provided.
+
+ **Example**
+
+ ```python
+ import typer
+
+ app = typer.Typer(invoke_without_command=True)
+ ```
+ """
+ ),
+ ] = Default(False),
+ no_args_is_help: Annotated[
+ bool,
+ Doc(
+ """
+ If this is set to `True`, running a command without any arguments will automatically show the help page.
+
+ **Example**
+
+ ```python
+ import typer
+
+ app = typer.Typer(no_args_is_help=True)
+ ```
+ """
+ ),
+ ] = Default(False),
+ subcommand_metavar: Annotated[
+ str | None,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited
+ from Click and supported for compatibility.
+
+ ---
+
+ How to represent the subcommand argument in help.
+ """
+ ),
+ ] = Default(None),
+ chain: Annotated[
+ bool,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited
+ from Click and supported for compatibility.
+
+ ---
+
+ Allow passing more than one subcommand argument.
+ """
+ ),
+ ] = Default(False),
+ result_callback: Annotated[
+ Callable[..., Any] | None,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited
+ from Click and supported for compatibility.
+
+ ---
+
+ A function to call after the group's and subcommand's callbacks.
+ """
+ ),
+ ] = Default(None),
+ # Command
+ context_settings: Annotated[
+ dict[Any, Any] | None,
+ Doc(
+ """
+ Pass configurations for the [context](https://typer.tiangolo.com/tutorial/commands/context/).
+ Available configurations can be found in the docs for Click's `Context` [here](https://click.palletsprojects.com/en/stable/api/#context).
+
+ **Example**
+
+ ```python
+ import typer
+
+ app = typer.Typer(context_settings={"help_option_names": ["-h", "--help"]})
+ ```
+ """
+ ),
+ ] = Default(None),
+ callback: Annotated[
+ Callable[..., Any] | None,
+ Doc(
+ """
+ Add a callback to the main Typer app. Can be overridden with `@app.callback()`.
+ See [the tutorial about callbacks](https://typer.tiangolo.com/tutorial/commands/callback/) for more details.
+
+ **Example**
+
+ ```python
+ import typer
+
+ def callback():
+ print("Running a command")
+
+ app = typer.Typer(callback=callback)
+ ```
+ """
+ ),
+ ] = Default(None),
+ help: Annotated[
+ str | None,
+ Doc(
+ """
+ Help text for the main Typer app.
+ See [the tutorial about name and help](https://typer.tiangolo.com/tutorial/subcommands/name-and-help) for different ways of setting a command's help,
+ and which one takes priority.
+
+ **Example**
+
+ ```python
+ import typer
+
+ app = typer.Typer(help="Some help.")
+ ```
+ """
+ ),
+ ] = Default(None),
+ epilog: Annotated[
+ str | None,
+ Doc(
+ """
+ Text that will be printed right after the help text.
+
+ **Example**
+
+ ```python
+ import typer
+
+ app = typer.Typer(epilog="May the force be with you")
+ ```
+ """
+ ),
+ ] = Default(None),
+ short_help: Annotated[
+ str | None,
+ Doc(
+ """
+ A shortened version of the help text that can be used e.g. in the help table listing subcommands.
+ When not defined, the normal `help` text will be used instead.
+
+ **Example**
+
+ ```python
+ import typer
+
+ app = typer.Typer(help="A lot of explanation about user management", short_help="user management")
+ ```
+ """
+ ),
+ ] = Default(None),
+ options_metavar: Annotated[
+ str,
+ Doc(
+ """
+ In the example usage string of the help text for a command, the default placeholder for various arguments is `[OPTIONS]`.
+ Set `options_metavar` to change this into a different string.
+
+ **Example**
+
+ ```python
+ import typer
+
+ app = typer.Typer(options_metavar="[OPTS]")
+ ```
+ """
+ ),
+ ] = Default("[OPTIONS]"),
+ add_help_option: Annotated[
+ bool,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited
+ from Click and supported for compatibility.
+
+ ---
+
+ By default each command registers a `--help` option. This can be disabled by this parameter.
+ """
+ ),
+ ] = Default(True),
+ hidden: Annotated[
+ bool,
+ Doc(
+ """
+ Hide this command from help outputs. `False` by default.
+
+ **Example**
+
+ ```python
+ import typer
+
+ app = typer.Typer(hidden=True)
+ ```
+ """
+ ),
+ ] = Default(False),
+ deprecated: Annotated[
+ bool,
+ Doc(
+ """
+ Mark this command as being deprecated in the help text. `False` by default.
+
+ **Example**
+
+ ```python
+ import typer
+
+ app = typer.Typer(deprecated=True)
+ ```
+ """
+ ),
+ ] = Default(False),
+ add_completion: Annotated[
+ bool,
+ Doc(
+ """
+ Toggle whether or not to add the `--install-completion` and `--show-completion` options to the app.
+ Set to `True` by default.
+
+ **Example**
+
+ ```python
+ import typer
+
+ app = typer.Typer(add_completion=False)
+ ```
+ """
+ ),
+ ] = True,
+ # Rich settings
+ rich_markup_mode: Annotated[
+ MarkupMode,
+ Doc(
+ """
+ Enable markup text if you have Rich installed. This can be set to `"markdown"`, `"rich"`, or `None`.
+ By default, `rich_markup_mode` is `None` if Rich is not installed, and `"rich"` if it is installed.
+ See [the tutorial on help formatting](https://typer.tiangolo.com/tutorial/commands/help/#rich-markdown-and-markup) for more information.
+
+ **Example**
+
+ ```python
+ import typer
+
+ app = typer.Typer(rich_markup_mode="rich")
+ ```
+ """
+ ),
+ ] = DEFAULT_MARKUP_MODE,
+ rich_help_panel: Annotated[
+ str | None,
+ Doc(
+ """
+ Set the panel name of the command when the help is printed with Rich.
+
+ **Example**
+
+ ```python
+ import typer
+
+ app = typer.Typer(rich_help_panel="Utils and Configs")
+ ```
+ """
+ ),
+ ] = Default(None),
+ suggest_commands: Annotated[
+ bool,
+ Doc(
+ """
+ As of version 0.20.0, Typer provides [support for mistyped command names](https://typer.tiangolo.com/tutorial/commands/help/#suggest-commands) by printing helpful suggestions.
+ You can turn this setting off with `suggest_commands`:
+
+ **Example**
+
+ ```python
+ import typer
+
+ app = typer.Typer(suggest_commands=False)
+ ```
+ """
+ ),
+ ] = True,
+ pretty_exceptions_enable: Annotated[
+ bool,
+ Doc(
+ """
+ If you want to disable [pretty exceptions with Rich](https://typer.tiangolo.com/tutorial/exceptions/#exceptions-with-rich),
+ you can set `pretty_exceptions_enable` to `False`. When doing so, you will see the usual standard exception trace.
+
+ **Example**
+
+ ```python
+ import typer
+
+ app = typer.Typer(pretty_exceptions_enable=False)
+ ```
+ """
+ ),
+ ] = True,
+ pretty_exceptions_show_locals: Annotated[
+ bool,
+ Doc(
+ """
+ If Rich is installed, [error messages](https://typer.tiangolo.com/tutorial/exceptions/#exceptions-and-errors)
+ will be nicely printed.
+
+ If you set `pretty_exceptions_show_locals=True` it will also include the values of local variables for easy debugging.
+
+ However, if such a variable contains delicate information, you should consider leaving `pretty_exceptions_show_locals=False`
+ (the default) to `False` to enhance security.
+
+ **Example**
+
+ ```python
+ import typer
+
+ app = typer.Typer(pretty_exceptions_show_locals=True)
+ ```
+ """
+ ),
+ ] = False,
+ pretty_exceptions_short: Annotated[
+ bool,
+ Doc(
+ """
+ By default, [pretty exceptions formatted with Rich](https://typer.tiangolo.com/tutorial/exceptions/#exceptions-with-rich) hide the long stack trace.
+ If you want to show the full trace instead, you can set the parameter `pretty_exceptions_short` to `False`:
+
+ **Example**
+
+ ```python
+ import typer
+
+ app = typer.Typer(pretty_exceptions_short=False)
+ ```
+ """
+ ),
+ ] = True,
+ ):
+ self._add_completion = add_completion
+ self.rich_markup_mode: MarkupMode = rich_markup_mode
+ self.rich_help_panel = rich_help_panel
+ self.suggest_commands = suggest_commands
+ self.pretty_exceptions_enable = pretty_exceptions_enable
+ self.pretty_exceptions_show_locals = pretty_exceptions_show_locals
+ self.pretty_exceptions_short = pretty_exceptions_short
+ self.info = TyperInfo(
+ name=name,
+ cls=cls,
+ invoke_without_command=invoke_without_command,
+ no_args_is_help=no_args_is_help,
+ subcommand_metavar=subcommand_metavar,
+ chain=chain,
+ result_callback=result_callback,
+ context_settings=context_settings,
+ callback=callback,
+ help=help,
+ epilog=epilog,
+ short_help=short_help,
+ options_metavar=options_metavar,
+ add_help_option=add_help_option,
+ hidden=hidden,
+ deprecated=deprecated,
+ )
+ self.registered_groups: list[TyperInfo] = []
+ self.registered_commands: list[CommandInfo] = []
+ self.registered_callback: TyperInfo | None = None
+
+ def callback(
+ self,
+ *,
+ cls: Annotated[
+ type[TyperGroup] | None,
+ Doc(
+ """
+ The class of this app. Mainly used when [using the Click library underneath](https://typer.tiangolo.com/tutorial/using-click/). Can usually be left at the default value `None`.
+ Otherwise, should be a subtype of `TyperGroup`.
+ """
+ ),
+ ] = Default(None),
+ invoke_without_command: Annotated[
+ bool,
+ Doc(
+ """
+ By setting this to `True`, you can make sure a callback is executed even when no subcommand is provided.
+ """
+ ),
+ ] = Default(False),
+ no_args_is_help: Annotated[
+ bool,
+ Doc(
+ """
+ If this is set to `True`, running a command without any arguments will automatically show the help page.
+ """
+ ),
+ ] = Default(False),
+ subcommand_metavar: Annotated[
+ str | None,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited
+ from Click and supported for compatibility.
+
+ ---
+
+ How to represent the subcommand argument in help.
+ """
+ ),
+ ] = Default(None),
+ chain: Annotated[
+ bool,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited
+ from Click and supported for compatibility.
+
+ ---
+
+ Allow passing more than one subcommand argument.
+ """
+ ),
+ ] = Default(False),
+ result_callback: Annotated[
+ Callable[..., Any] | None,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited
+ from Click and supported for compatibility.
+
+ ---
+
+ A function to call after the group's and subcommand's callbacks.
+ """
+ ),
+ ] = Default(None),
+ # Command
+ context_settings: Annotated[
+ dict[Any, Any] | None,
+ Doc(
+ """
+ Pass configurations for the [context](https://typer.tiangolo.com/tutorial/commands/context/).
+ Available configurations can be found in the docs for Click's `Context` [here](https://click.palletsprojects.com/en/stable/api/#context).
+ """
+ ),
+ ] = Default(None),
+ help: Annotated[
+ str | None,
+ Doc(
+ """
+ Help text for the command.
+ See [the tutorial about name and help](https://typer.tiangolo.com/tutorial/subcommands/name-and-help) for different ways of setting a command's help,
+ and which one takes priority.
+ """
+ ),
+ ] = Default(None),
+ epilog: Annotated[
+ str | None,
+ Doc(
+ """
+ Text that will be printed right after the help text.
+ """
+ ),
+ ] = Default(None),
+ short_help: Annotated[
+ str | None,
+ Doc(
+ """
+ A shortened version of the help text that can be used e.g. in the help table listing subcommands.
+ When not defined, the normal `help` text will be used instead.
+ """
+ ),
+ ] = Default(None),
+ options_metavar: Annotated[
+ str | None,
+ Doc(
+ """
+ In the example usage string of the help text for a command, the default placeholder for various arguments is `[OPTIONS]`.
+ Set `options_metavar` to change this into a different string. When `None`, the default value will be used.
+ """
+ ),
+ ] = Default(None),
+ add_help_option: Annotated[
+ bool,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited
+ from Click and supported for compatibility.
+
+ ---
+
+ By default each command registers a `--help` option. This can be disabled by this parameter.
+ """
+ ),
+ ] = Default(True),
+ hidden: Annotated[
+ bool,
+ Doc(
+ """
+ Hide this command from help outputs. `False` by default.
+ """
+ ),
+ ] = Default(False),
+ deprecated: Annotated[
+ bool,
+ Doc(
+ """
+ Mark this command as deprecated in the help text. `False` by default.
+ """
+ ),
+ ] = Default(False),
+ # Rich settings
+ rich_help_panel: Annotated[
+ str | None,
+ Doc(
+ """
+ Set the panel name of the command when the help is printed with Rich.
+ """
+ ),
+ ] = Default(None),
+ ) -> Callable[[CommandFunctionType], CommandFunctionType]:
+ """
+ Using the decorator `@app.callback`, you can declare the CLI parameters for the main CLI application.
+
+ Read more in the
+ [Typer docs for Callbacks](https://typer.tiangolo.com/tutorial/commands/callback/).
+
+ ## Example
+
+ ```python
+ import typer
+
+ app = typer.Typer()
+ state = {"verbose": False}
+
+ @app.callback()
+ def main(verbose: bool = False):
+ if verbose:
+ print("Will write verbose output")
+ state["verbose"] = True
+
+ @app.command()
+ def delete(username: str):
+ # define subcommand
+ ...
+ ```
+ """
+
+ def decorator(f: CommandFunctionType) -> CommandFunctionType:
+ self.registered_callback = TyperInfo(
+ cls=cls,
+ invoke_without_command=invoke_without_command,
+ no_args_is_help=no_args_is_help,
+ subcommand_metavar=subcommand_metavar,
+ chain=chain,
+ result_callback=result_callback,
+ context_settings=context_settings,
+ callback=f,
+ help=help,
+ epilog=epilog,
+ short_help=short_help,
+ options_metavar=(
+ options_metavar or self._info_val_str("options_metavar")
+ ),
+ add_help_option=add_help_option,
+ hidden=hidden,
+ deprecated=deprecated,
+ rich_help_panel=rich_help_panel,
+ )
+ return f
+
+ return decorator
+
+ def command(
+ self,
+ name: Annotated[
+ str | None,
+ Doc(
+ """
+ The name of this command.
+ """
+ ),
+ ] = None,
+ *,
+ cls: Annotated[
+ type[TyperCommand] | None,
+ Doc(
+ """
+ The class of this command. Mainly used when [using the Click library underneath](https://typer.tiangolo.com/tutorial/using-click/). Can usually be left at the default value `None`.
+ Otherwise, should be a subtype of `TyperCommand`.
+ """
+ ),
+ ] = None,
+ context_settings: Annotated[
+ dict[Any, Any] | None,
+ Doc(
+ """
+ Pass configurations for the [context](https://typer.tiangolo.com/tutorial/commands/context/).
+ Available configurations can be found in the docs for Click's `Context` [here](https://click.palletsprojects.com/en/stable/api/#context).
+ """
+ ),
+ ] = None,
+ help: Annotated[
+ str | None,
+ Doc(
+ """
+ Help text for the command.
+ See [the tutorial about name and help](https://typer.tiangolo.com/tutorial/subcommands/name-and-help) for different ways of setting a command's help,
+ and which one takes priority.
+ """
+ ),
+ ] = None,
+ epilog: Annotated[
+ str | None,
+ Doc(
+ """
+ Text that will be printed right after the help text.
+ """
+ ),
+ ] = None,
+ short_help: Annotated[
+ str | None,
+ Doc(
+ """
+ A shortened version of the help text that can be used e.g. in the help table listing subcommands.
+ When not defined, the normal `help` text will be used instead.
+ """
+ ),
+ ] = None,
+ options_metavar: Annotated[
+ str | None,
+ Doc(
+ """
+ In the example usage string of the help text for a command, the default placeholder for various arguments is `[OPTIONS]`.
+ Set `options_metavar` to change this into a different string. When `None`, the default value will be used.
+ """
+ ),
+ ] = Default(None),
+ add_help_option: Annotated[
+ bool,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited
+ from Click and supported for compatibility.
+
+ ---
+
+ By default each command registers a `--help` option. This can be disabled by this parameter.
+ """
+ ),
+ ] = True,
+ no_args_is_help: Annotated[
+ bool,
+ Doc(
+ """
+ If this is set to `True`, running a command without any arguments will automatically show the help page.
+ """
+ ),
+ ] = False,
+ hidden: Annotated[
+ bool,
+ Doc(
+ """
+ Hide this command from help outputs. `False` by default.
+ """
+ ),
+ ] = False,
+ deprecated: Annotated[
+ bool,
+ Doc(
+ """
+ Mark this command as deprecated in the help outputs. `False` by default.
+ """
+ ),
+ ] = False,
+ # Rich settings
+ rich_help_panel: Annotated[
+ str | None,
+ Doc(
+ """
+ Set the panel name of the command when the help is printed with Rich.
+ """
+ ),
+ ] = Default(None),
+ ) -> Callable[[CommandFunctionType], CommandFunctionType]:
+ """
+ Using the decorator `@app.command`, you can define a subcommand of the previously defined Typer app.
+
+ Read more in the
+ [Typer docs for Commands](https://typer.tiangolo.com/tutorial/commands/).
+
+ ## Example
+
+ ```python
+ import typer
+
+ app = typer.Typer()
+
+ @app.command()
+ def create():
+ print("Creating user: Hiro Hamada")
+
+ @app.command()
+ def delete():
+ print("Deleting user: Hiro Hamada")
+ ```
+ """
+ if cls is None:
+ cls = TyperCommand
+
+ def decorator(f: CommandFunctionType) -> CommandFunctionType:
+ self.registered_commands.append(
+ CommandInfo(
+ name=name,
+ cls=cls,
+ context_settings=context_settings,
+ callback=f,
+ help=help,
+ epilog=epilog,
+ short_help=short_help,
+ options_metavar=(
+ options_metavar or self._info_val_str("options_metavar")
+ ),
+ add_help_option=add_help_option,
+ no_args_is_help=no_args_is_help,
+ hidden=hidden,
+ deprecated=deprecated,
+ # Rich settings
+ rich_help_panel=rich_help_panel,
+ )
+ )
+ return f
+
+ return decorator
+
+ def add_typer(
+ self,
+ typer_instance: "Typer",
+ *,
+ name: Annotated[
+ str | None,
+ Doc(
+ """
+ The name of this subcommand.
+ See [the tutorial about name and help](https://typer.tiangolo.com/tutorial/subcommands/name-and-help) for different ways of setting a command's name,
+ and which one takes priority.
+ """
+ ),
+ ] = Default(None),
+ cls: Annotated[
+ type[TyperGroup] | None,
+ Doc(
+ """
+ The class of this subcommand. Mainly used when [using the Click library underneath](https://typer.tiangolo.com/tutorial/using-click/). Can usually be left at the default value `None`.
+ Otherwise, should be a subtype of `TyperGroup`.
+ """
+ ),
+ ] = Default(None),
+ invoke_without_command: Annotated[
+ bool,
+ Doc(
+ """
+ By setting this to `True`, you can make sure a callback is executed even when no subcommand is provided.
+ """
+ ),
+ ] = Default(False),
+ no_args_is_help: Annotated[
+ bool,
+ Doc(
+ """
+ If this is set to `True`, running a command without any arguments will automatically show the help page.
+ """
+ ),
+ ] = Default(False),
+ subcommand_metavar: Annotated[
+ str | None,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited
+ from Click and supported for compatibility.
+
+ ---
+
+ How to represent the subcommand argument in help.
+ """
+ ),
+ ] = Default(None),
+ chain: Annotated[
+ bool,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited
+ from Click and supported for compatibility.
+
+ ---
+
+ Allow passing more than one subcommand argument.
+ """
+ ),
+ ] = Default(False),
+ result_callback: Annotated[
+ Callable[..., Any] | None,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited
+ from Click and supported for compatibility.
+
+ ---
+
+ A function to call after the group's and subcommand's callbacks.
+ """
+ ),
+ ] = Default(None),
+ # Command
+ context_settings: Annotated[
+ dict[Any, Any] | None,
+ Doc(
+ """
+ Pass configurations for the [context](https://typer.tiangolo.com/tutorial/commands/context/).
+ Available configurations can be found in the docs for Click's `Context` [here](https://click.palletsprojects.com/en/stable/api/#context).
+ """
+ ),
+ ] = Default(None),
+ callback: Annotated[
+ Callable[..., Any] | None,
+ Doc(
+ """
+ Add a callback to this app.
+ See [the tutorial about callbacks](https://typer.tiangolo.com/tutorial/commands/callback/) for more details.
+ """
+ ),
+ ] = Default(None),
+ help: Annotated[
+ str | None,
+ Doc(
+ """
+ Help text for the subcommand.
+ See [the tutorial about name and help](https://typer.tiangolo.com/tutorial/subcommands/name-and-help) for different ways of setting a command's help,
+ and which one takes priority.
+ """
+ ),
+ ] = Default(None),
+ epilog: Annotated[
+ str | None,
+ Doc(
+ """
+ Text that will be printed right after the help text.
+ """
+ ),
+ ] = Default(None),
+ short_help: Annotated[
+ str | None,
+ Doc(
+ """
+ A shortened version of the help text that can be used e.g. in the help table listing subcommands.
+ When not defined, the normal `help` text will be used instead.
+ """
+ ),
+ ] = Default(None),
+ options_metavar: Annotated[
+ str | None,
+ Doc(
+ """
+ In the example usage string of the help text for a command, the default placeholder for various arguments is `[OPTIONS]`.
+ Set `options_metavar` to change this into a different string. When `None`, the default value will be used.
+ """
+ ),
+ ] = Default(None),
+ add_help_option: Annotated[
+ bool,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited
+ from Click and supported for compatibility.
+
+ ---
+
+ By default each command registers a `--help` option. This can be disabled by this parameter.
+ """
+ ),
+ ] = Default(True),
+ hidden: Annotated[
+ bool,
+ Doc(
+ """
+ Hide this command from help outputs. `False` by default.
+ """
+ ),
+ ] = Default(False),
+ deprecated: Annotated[
+ bool,
+ Doc(
+ """
+ Mark this command as deprecated in the help outputs. `False` by default.
+ """
+ ),
+ ] = False,
+ # Rich settings
+ rich_help_panel: Annotated[
+ str | None,
+ Doc(
+ """
+ Set the panel name of the command when the help is printed with Rich.
+ """
+ ),
+ ] = Default(None),
+ ) -> None:
+ """
+ Add subcommands to the main app using `app.add_typer()`.
+ Subcommands may be defined in separate modules, ensuring clean separation of code by functionality.
+
+ Read more in the
+ [Typer docs for SubCommands](https://typer.tiangolo.com/tutorial/subcommands/add-typer/).
+
+ ## Example
+
+ ```python
+ import typer
+
+ from .add import app as add_app
+ from .delete import app as delete_app
+
+ app = typer.Typer()
+
+ app.add_typer(add_app)
+ app.add_typer(delete_app)
+ ```
+ """
+ self.registered_groups.append(
+ TyperInfo(
+ typer_instance,
+ name=name,
+ cls=cls,
+ invoke_without_command=invoke_without_command,
+ no_args_is_help=no_args_is_help,
+ subcommand_metavar=subcommand_metavar,
+ chain=chain,
+ result_callback=result_callback,
+ context_settings=context_settings,
+ callback=callback,
+ help=help,
+ epilog=epilog,
+ short_help=short_help,
+ options_metavar=(
+ options_metavar or self._info_val_str("options_metavar")
+ ),
+ add_help_option=add_help_option,
+ hidden=hidden,
+ deprecated=deprecated,
+ rich_help_panel=rich_help_panel,
+ )
+ )
+
+ def __call__(self, *args: Any, **kwargs: Any) -> Any:
+ if sys.excepthook != except_hook:
+ sys.excepthook = except_hook
+ try:
+ return get_command(self)(*args, **kwargs)
+ except Exception as e:
+ # Set a custom attribute to tell the hook to show nice exceptions for user
+ # code. An alternative/first implementation was a custom exception with
+ # raise custom_exc from e
+ # but that means the last error shown is the custom exception, not the
+ # actual error. This trick improves developer experience by showing the
+ # actual error last.
+ setattr(
+ e,
+ _typer_developer_exception_attr_name,
+ DeveloperExceptionConfig(
+ pretty_exceptions_enable=self.pretty_exceptions_enable,
+ pretty_exceptions_show_locals=self.pretty_exceptions_show_locals,
+ pretty_exceptions_short=self.pretty_exceptions_short,
+ ),
+ )
+ raise e
+
+ def _info_val_str(self, name: str) -> str:
+ val = getattr(self.info, name)
+ val_str = val.value if isinstance(val, DefaultPlaceholder) else val
+ assert isinstance(val_str, str)
+ return val_str
+
+
+def get_group(typer_instance: Typer) -> TyperGroup:
+ group = get_group_from_info(
+ TyperInfo(typer_instance),
+ pretty_exceptions_short=typer_instance.pretty_exceptions_short,
+ rich_markup_mode=typer_instance.rich_markup_mode,
+ suggest_commands=typer_instance.suggest_commands,
+ )
+ return group
+
+
+def get_command(typer_instance: Typer) -> click.Command:
+ if typer_instance._add_completion:
+ click_install_param, click_show_param = get_install_completion_arguments()
+ if (
+ typer_instance.registered_callback
+ or typer_instance.info.callback
+ or typer_instance.registered_groups
+ or len(typer_instance.registered_commands) > 1
+ ):
+ # Create a Group
+ click_command: click.Command = get_group(typer_instance)
+ if typer_instance._add_completion:
+ click_command.params.append(click_install_param)
+ click_command.params.append(click_show_param)
+ return click_command
+ elif len(typer_instance.registered_commands) == 1:
+ # Create a single Command
+ single_command = typer_instance.registered_commands[0]
+
+ if not single_command.context_settings and not isinstance(
+ typer_instance.info.context_settings, DefaultPlaceholder
+ ):
+ single_command.context_settings = typer_instance.info.context_settings
+
+ click_command = get_command_from_info(
+ single_command,
+ pretty_exceptions_short=typer_instance.pretty_exceptions_short,
+ rich_markup_mode=typer_instance.rich_markup_mode,
+ )
+ if typer_instance._add_completion:
+ click_command.params.append(click_install_param)
+ click_command.params.append(click_show_param)
+ return click_command
+ raise RuntimeError(
+ "Could not get a command for this Typer instance"
+ ) # pragma: no cover
+
+
+def solve_typer_info_help(typer_info: TyperInfo) -> str:
+ # Priority 1: Explicit value was set in app.add_typer()
+ if not isinstance(typer_info.help, DefaultPlaceholder):
+ return inspect.cleandoc(typer_info.help or "")
+ # Priority 2: Explicit value was set in sub_app.callback()
+ if typer_info.typer_instance and typer_info.typer_instance.registered_callback:
+ callback_help = typer_info.typer_instance.registered_callback.help
+ if not isinstance(callback_help, DefaultPlaceholder):
+ return inspect.cleandoc(callback_help or "")
+ # Priority 3: Explicit value was set in sub_app = typer.Typer()
+ if typer_info.typer_instance and typer_info.typer_instance.info:
+ instance_help = typer_info.typer_instance.info.help
+ if not isinstance(instance_help, DefaultPlaceholder):
+ return inspect.cleandoc(instance_help or "")
+ # Priority 4: Implicit inference from callback docstring in app.add_typer()
+ if typer_info.callback:
+ doc = inspect.getdoc(typer_info.callback)
+ if doc:
+ return doc
+ # Priority 5: Implicit inference from callback docstring in @app.callback()
+ if typer_info.typer_instance and typer_info.typer_instance.registered_callback:
+ callback = typer_info.typer_instance.registered_callback.callback
+ if not isinstance(callback, DefaultPlaceholder):
+ doc = inspect.getdoc(callback or "")
+ if doc:
+ return doc
+ # Priority 6: Implicit inference from callback docstring in typer.Typer()
+ if typer_info.typer_instance and typer_info.typer_instance.info:
+ instance_callback = typer_info.typer_instance.info.callback
+ if not isinstance(instance_callback, DefaultPlaceholder):
+ doc = inspect.getdoc(instance_callback)
+ if doc:
+ return doc
+ # Value not set, use the default
+ return typer_info.help.value
+
+
+def solve_typer_info_defaults(typer_info: TyperInfo) -> TyperInfo:
+ values: dict[str, Any] = {}
+ for name, value in typer_info.__dict__.items():
+ # Priority 1: Value was set in app.add_typer()
+ if not isinstance(value, DefaultPlaceholder):
+ values[name] = value
+ continue
+ # Priority 2: Value was set in @subapp.callback()
+ try:
+ callback_value = getattr(
+ typer_info.typer_instance.registered_callback, # type: ignore
+ name,
+ )
+ if not isinstance(callback_value, DefaultPlaceholder):
+ values[name] = callback_value
+ continue
+ except AttributeError:
+ pass
+ # Priority 3: Value set in subapp = typer.Typer()
+ try:
+ instance_value = getattr(
+ typer_info.typer_instance.info, # type: ignore
+ name,
+ )
+ if not isinstance(instance_value, DefaultPlaceholder):
+ values[name] = instance_value
+ continue
+ except AttributeError:
+ pass
+ # Value not set, use the default
+ values[name] = value.value
+ values["help"] = solve_typer_info_help(typer_info)
+ return TyperInfo(**values)
+
+
+def get_group_from_info(
+ group_info: TyperInfo,
+ *,
+ pretty_exceptions_short: bool,
+ suggest_commands: bool,
+ rich_markup_mode: MarkupMode,
+) -> TyperGroup:
+ assert group_info.typer_instance, (
+ "A Typer instance is needed to generate a Click Group"
+ )
+ commands: dict[str, click.Command] = {}
+ for command_info in group_info.typer_instance.registered_commands:
+ command = get_command_from_info(
+ command_info=command_info,
+ pretty_exceptions_short=pretty_exceptions_short,
+ rich_markup_mode=rich_markup_mode,
+ )
+ if command.name:
+ commands[command.name] = command
+ for sub_group_info in group_info.typer_instance.registered_groups:
+ sub_group = get_group_from_info(
+ sub_group_info,
+ pretty_exceptions_short=pretty_exceptions_short,
+ rich_markup_mode=rich_markup_mode,
+ suggest_commands=suggest_commands,
+ )
+ if sub_group.name:
+ commands[sub_group.name] = sub_group
+ else:
+ if sub_group.callback:
+ import warnings
+
+ warnings.warn(
+ "The 'callback' parameter is not supported by Typer when using `add_typer` without a name",
+ stacklevel=5,
+ )
+ for sub_command_name, sub_command in sub_group.commands.items():
+ commands[sub_command_name] = sub_command
+ solved_info = solve_typer_info_defaults(group_info)
+ (
+ params,
+ convertors,
+ context_param_name,
+ ) = get_params_convertors_ctx_param_name_from_function(solved_info.callback)
+ cls = solved_info.cls or TyperGroup
+ assert issubclass(cls, TyperGroup), f"{cls} should be a subclass of {TyperGroup}"
+ group = cls(
+ name=solved_info.name or "",
+ commands=commands,
+ invoke_without_command=solved_info.invoke_without_command,
+ no_args_is_help=solved_info.no_args_is_help,
+ subcommand_metavar=solved_info.subcommand_metavar,
+ chain=solved_info.chain,
+ result_callback=solved_info.result_callback,
+ context_settings=solved_info.context_settings,
+ callback=get_callback(
+ callback=solved_info.callback,
+ params=params,
+ convertors=convertors,
+ context_param_name=context_param_name,
+ pretty_exceptions_short=pretty_exceptions_short,
+ ),
+ params=params,
+ help=solved_info.help,
+ epilog=solved_info.epilog,
+ short_help=solved_info.short_help,
+ options_metavar=solved_info.options_metavar,
+ add_help_option=solved_info.add_help_option,
+ hidden=solved_info.hidden,
+ deprecated=solved_info.deprecated,
+ rich_markup_mode=rich_markup_mode,
+ # Rich settings
+ rich_help_panel=solved_info.rich_help_panel,
+ suggest_commands=suggest_commands,
+ )
+ return group
+
+
+def get_command_name(name: str) -> str:
+ return name.lower().replace("_", "-")
+
+
+def get_params_convertors_ctx_param_name_from_function(
+ callback: Callable[..., Any] | None,
+) -> tuple[list[click.Argument | click.Option], dict[str, Any], str | None]:
+ params = []
+ convertors = {}
+ context_param_name = None
+ if callback:
+ parameters = get_params_from_function(callback)
+ for param_name, param in parameters.items():
+ if lenient_issubclass(param.annotation, click.Context):
+ context_param_name = param_name
+ continue
+ click_param, convertor = get_click_param(param)
+ if convertor:
+ convertors[param_name] = convertor
+ params.append(click_param)
+ return params, convertors, context_param_name
+
+
+def get_command_from_info(
+ command_info: CommandInfo,
+ *,
+ pretty_exceptions_short: bool,
+ rich_markup_mode: MarkupMode,
+) -> click.Command:
+ assert command_info.callback, "A command must have a callback function"
+ name = command_info.name or get_command_name(command_info.callback.__name__) # ty:ignore[unresolved-attribute]
+ use_help = command_info.help
+ if use_help is None:
+ use_help = inspect.getdoc(command_info.callback)
+ else:
+ use_help = inspect.cleandoc(use_help)
+ (
+ params,
+ convertors,
+ context_param_name,
+ ) = get_params_convertors_ctx_param_name_from_function(command_info.callback)
+ cls = command_info.cls or TyperCommand
+ command = cls(
+ name=name,
+ context_settings=command_info.context_settings,
+ callback=get_callback(
+ callback=command_info.callback,
+ params=params,
+ convertors=convertors,
+ context_param_name=context_param_name,
+ pretty_exceptions_short=pretty_exceptions_short,
+ ),
+ params=params, # type: ignore
+ help=use_help,
+ epilog=command_info.epilog,
+ short_help=command_info.short_help,
+ options_metavar=command_info.options_metavar,
+ add_help_option=command_info.add_help_option,
+ no_args_is_help=command_info.no_args_is_help,
+ hidden=command_info.hidden,
+ deprecated=command_info.deprecated,
+ rich_markup_mode=rich_markup_mode,
+ # Rich settings
+ rich_help_panel=command_info.rich_help_panel,
+ )
+ return command
+
+
+def determine_type_convertor(type_: Any) -> Callable[[Any], Any] | None:
+ convertor: Callable[[Any], Any] | None = None
+ if lenient_issubclass(type_, Path):
+ convertor = param_path_convertor
+ if lenient_issubclass(type_, Enum):
+ convertor = generate_enum_convertor(type_)
+ return convertor
+
+
+def param_path_convertor(value: str | None = None) -> Path | None:
+ if value is not None:
+ # allow returning any subclass of Path created by an annotated parser without converting
+ # it back to a Path
+ return value if isinstance(value, Path) else Path(value)
+ return None
+
+
+def generate_enum_convertor(enum: type[Enum]) -> Callable[[Any], Any]:
+ val_map = {str(val.value): val for val in enum}
+
+ def convertor(value: Any) -> Any:
+ if value is not None:
+ val = str(value)
+ if val in val_map:
+ key = val_map[val]
+ return enum(key)
+
+ return convertor
+
+
+def generate_list_convertor(
+ convertor: Callable[[Any], Any] | None, default_value: Any | None
+) -> Callable[[Sequence[Any] | None], list[Any] | None]:
+ def internal_convertor(value: Sequence[Any] | None) -> list[Any] | None:
+ if (value is None) or (default_value is None and len(value) == 0):
+ return None
+ return [convertor(v) if convertor else v for v in value]
+
+ return internal_convertor
+
+
+def generate_tuple_convertor(
+ types: Sequence[Any],
+) -> Callable[[tuple[Any, ...] | None], tuple[Any, ...] | None]:
+ convertors = [determine_type_convertor(type_) for type_ in types]
+
+ def internal_convertor(
+ param_args: tuple[Any, ...] | None,
+ ) -> tuple[Any, ...] | None:
+ if param_args is None:
+ return None
+ return tuple(
+ convertor(arg) if convertor else arg
+ for (convertor, arg) in zip(convertors, param_args, strict=False)
+ )
+
+ return internal_convertor
+
+
+def get_callback(
+ *,
+ callback: Callable[..., Any] | None = None,
+ params: Sequence[click.Parameter] = [],
+ convertors: dict[str, Callable[[str], Any]] | None = None,
+ context_param_name: str | None = None,
+ pretty_exceptions_short: bool,
+) -> Callable[..., Any] | None:
+ use_convertors = convertors or {}
+ if not callback:
+ return None
+ parameters = get_params_from_function(callback)
+ use_params: dict[str, Any] = {}
+ for param_name in parameters:
+ use_params[param_name] = None
+ for param in params:
+ if param.name:
+ use_params[param.name] = param.default
+
+ def wrapper(**kwargs: Any) -> Any:
+ _rich_traceback_guard = pretty_exceptions_short # noqa: F841
+ for k, v in kwargs.items():
+ if k in use_convertors:
+ use_params[k] = use_convertors[k](v)
+ else:
+ use_params[k] = v
+ if context_param_name:
+ use_params[context_param_name] = click.get_current_context()
+ return callback(**use_params)
+
+ update_wrapper(wrapper, callback)
+ return wrapper
+
+
+def get_click_type(
+ *, annotation: Any, parameter_info: ParameterInfo
+) -> click.ParamType:
+ if parameter_info.click_type is not None:
+ return parameter_info.click_type
+
+ elif parameter_info.parser is not None:
+ return click.types.FuncParamType(parameter_info.parser)
+
+ elif annotation is str:
+ return click.STRING
+ elif annotation is int:
+ if parameter_info.min is not None or parameter_info.max is not None:
+ min_ = None
+ max_ = None
+ if parameter_info.min is not None:
+ min_ = int(parameter_info.min)
+ if parameter_info.max is not None:
+ max_ = int(parameter_info.max)
+ return click.IntRange(min=min_, max=max_, clamp=parameter_info.clamp)
+ else:
+ return click.INT
+ elif annotation is float:
+ if parameter_info.min is not None or parameter_info.max is not None:
+ return click.FloatRange(
+ min=parameter_info.min,
+ max=parameter_info.max,
+ clamp=parameter_info.clamp,
+ )
+ else:
+ return click.FLOAT
+ elif annotation is bool:
+ return click.BOOL
+ elif annotation == UUID:
+ return click.UUID
+ elif annotation == datetime:
+ return click.DateTime(formats=parameter_info.formats)
+ elif (
+ annotation == Path
+ or parameter_info.allow_dash
+ or parameter_info.path_type
+ or parameter_info.resolve_path
+ ):
+ return TyperPath(
+ exists=parameter_info.exists,
+ file_okay=parameter_info.file_okay,
+ dir_okay=parameter_info.dir_okay,
+ writable=parameter_info.writable,
+ readable=parameter_info.readable,
+ resolve_path=parameter_info.resolve_path,
+ allow_dash=parameter_info.allow_dash,
+ path_type=parameter_info.path_type,
+ )
+ elif lenient_issubclass(annotation, FileTextWrite):
+ return click.File(
+ mode=parameter_info.mode or "w",
+ encoding=parameter_info.encoding,
+ errors=parameter_info.errors,
+ lazy=parameter_info.lazy,
+ atomic=parameter_info.atomic,
+ )
+ elif lenient_issubclass(annotation, FileText):
+ return click.File(
+ mode=parameter_info.mode or "r",
+ encoding=parameter_info.encoding,
+ errors=parameter_info.errors,
+ lazy=parameter_info.lazy,
+ atomic=parameter_info.atomic,
+ )
+ elif lenient_issubclass(annotation, FileBinaryRead):
+ return click.File(
+ mode=parameter_info.mode or "rb",
+ encoding=parameter_info.encoding,
+ errors=parameter_info.errors,
+ lazy=parameter_info.lazy,
+ atomic=parameter_info.atomic,
+ )
+ elif lenient_issubclass(annotation, FileBinaryWrite):
+ return click.File(
+ mode=parameter_info.mode or "wb",
+ encoding=parameter_info.encoding,
+ errors=parameter_info.errors,
+ lazy=parameter_info.lazy,
+ atomic=parameter_info.atomic,
+ )
+ elif lenient_issubclass(annotation, Enum):
+ # The custom TyperChoice is only needed for Click < 8.2.0, to parse the
+ # command line values matching them to the enum values. Click 8.2.0 added
+ # support for enum values but reading enum names.
+ # Passing here the list of enum values (instead of just the enum) accounts for
+ # Click < 8.2.0.
+ return TyperChoice(
+ [item.value for item in annotation],
+ case_sensitive=parameter_info.case_sensitive,
+ )
+ elif is_literal_type(annotation):
+ return click.Choice(
+ literal_values(annotation),
+ case_sensitive=parameter_info.case_sensitive,
+ )
+ raise RuntimeError(f"Type not yet supported: {annotation}") # pragma: no cover
+
+
+def lenient_issubclass(cls: Any, class_or_tuple: AnyType | tuple[AnyType, ...]) -> bool:
+ return isinstance(cls, type) and issubclass(cls, class_or_tuple)
+
+
+def get_click_param(
+ param: ParamMeta,
+) -> tuple[click.Argument | click.Option, Any]:
+ # First, find out what will be:
+ # * ParamInfo (ArgumentInfo or OptionInfo)
+ # * default_value
+ # * required
+ default_value = None
+ required = False
+ if isinstance(param.default, ParameterInfo):
+ parameter_info = param.default
+ if parameter_info.default == Required:
+ required = True
+ else:
+ default_value = parameter_info.default
+ elif param.default == Required or param.default is param.empty:
+ required = True
+ parameter_info = ArgumentInfo()
+ else:
+ default_value = param.default
+ parameter_info = OptionInfo()
+ annotation: Any
+ if param.annotation is not param.empty:
+ annotation = param.annotation
+ else:
+ annotation = str
+ main_type = annotation
+ is_list = False
+ is_tuple = False
+ parameter_type: Any = None
+ is_flag = None
+ origin = get_origin(main_type)
+
+ if origin is not None:
+ # Handle SomeType | None and Optional[SomeType]
+ if is_union(origin):
+ types = []
+ for type_ in get_args(main_type):
+ if type_ is NoneType:
+ continue
+ types.append(type_)
+ assert len(types) == 1, "Typer Currently doesn't support Union types"
+ main_type = types[0]
+ origin = get_origin(main_type)
+ # Handle Tuples and Lists
+ if lenient_issubclass(origin, list):
+ main_type = get_args(main_type)[0]
+ assert not get_origin(main_type), (
+ "List types with complex sub-types are not currently supported"
+ )
+ is_list = True
+ elif lenient_issubclass(origin, tuple):
+ types = []
+ for type_ in get_args(main_type):
+ assert not get_origin(type_), (
+ "Tuple types with complex sub-types are not currently supported"
+ )
+ types.append(
+ get_click_type(annotation=type_, parameter_info=parameter_info)
+ )
+ parameter_type = tuple(types)
+ is_tuple = True
+ if parameter_type is None:
+ parameter_type = get_click_type(
+ annotation=main_type, parameter_info=parameter_info
+ )
+ convertor = determine_type_convertor(main_type)
+ if is_list:
+ convertor = generate_list_convertor(
+ convertor=convertor, default_value=default_value
+ )
+ if is_tuple:
+ convertor = generate_tuple_convertor(get_args(main_type))
+ if isinstance(parameter_info, OptionInfo):
+ if main_type is bool:
+ is_flag = True
+ # Click doesn't accept a flag of type bool, only None, and then it sets it
+ # to bool internally
+ parameter_type = None
+ default_option_name = get_command_name(param.name)
+ if is_flag:
+ default_option_declaration = (
+ f"--{default_option_name}/--no-{default_option_name}"
+ )
+ else:
+ default_option_declaration = f"--{default_option_name}"
+ param_decls = [param.name]
+ if parameter_info.param_decls:
+ param_decls.extend(parameter_info.param_decls)
+ else:
+ param_decls.append(default_option_declaration)
+ return (
+ TyperOption(
+ # Option
+ param_decls=param_decls,
+ show_default=parameter_info.show_default,
+ prompt=parameter_info.prompt,
+ confirmation_prompt=parameter_info.confirmation_prompt,
+ prompt_required=parameter_info.prompt_required,
+ hide_input=parameter_info.hide_input,
+ is_flag=is_flag,
+ multiple=is_list,
+ count=parameter_info.count,
+ allow_from_autoenv=parameter_info.allow_from_autoenv,
+ type=parameter_type,
+ help=parameter_info.help,
+ hidden=parameter_info.hidden,
+ show_choices=parameter_info.show_choices,
+ show_envvar=parameter_info.show_envvar,
+ # Parameter
+ required=required,
+ default=default_value,
+ callback=get_param_callback(
+ callback=parameter_info.callback, convertor=convertor
+ ),
+ metavar=parameter_info.metavar,
+ expose_value=parameter_info.expose_value,
+ is_eager=parameter_info.is_eager,
+ envvar=parameter_info.envvar,
+ shell_complete=parameter_info.shell_complete,
+ autocompletion=get_param_completion(parameter_info.autocompletion),
+ # Rich settings
+ rich_help_panel=parameter_info.rich_help_panel,
+ ),
+ convertor,
+ )
+ elif isinstance(parameter_info, ArgumentInfo):
+ param_decls = [param.name]
+ nargs = None
+ if is_list:
+ nargs = -1
+ return (
+ TyperArgument(
+ # Argument
+ param_decls=param_decls,
+ type=parameter_type,
+ required=required,
+ nargs=nargs,
+ # TyperArgument
+ show_default=parameter_info.show_default,
+ show_choices=parameter_info.show_choices,
+ show_envvar=parameter_info.show_envvar,
+ help=parameter_info.help,
+ hidden=parameter_info.hidden,
+ # Parameter
+ default=default_value,
+ callback=get_param_callback(
+ callback=parameter_info.callback, convertor=convertor
+ ),
+ metavar=parameter_info.metavar,
+ expose_value=parameter_info.expose_value,
+ is_eager=parameter_info.is_eager,
+ envvar=parameter_info.envvar,
+ shell_complete=parameter_info.shell_complete,
+ autocompletion=get_param_completion(parameter_info.autocompletion),
+ # Rich settings
+ rich_help_panel=parameter_info.rich_help_panel,
+ ),
+ convertor,
+ )
+ raise AssertionError("A click.Parameter should be returned") # pragma: no cover
+
+
+def get_param_callback(
+ *,
+ callback: Callable[..., Any] | None = None,
+ convertor: Callable[..., Any] | None = None,
+) -> Callable[..., Any] | None:
+ if not callback:
+ return None
+ parameters = get_params_from_function(callback)
+ ctx_name = None
+ click_param_name = None
+ value_name = None
+ untyped_names: list[str] = []
+ for param_name, param_sig in parameters.items():
+ if lenient_issubclass(param_sig.annotation, click.Context):
+ ctx_name = param_name
+ elif lenient_issubclass(param_sig.annotation, click.Parameter):
+ click_param_name = param_name
+ else:
+ untyped_names.append(param_name)
+ # Extract value param name first
+ if untyped_names:
+ value_name = untyped_names.pop()
+ # If context and Click param were not typed (old/Click callback style) extract them
+ if untyped_names:
+ if ctx_name is None:
+ ctx_name = untyped_names.pop(0)
+ if click_param_name is None:
+ if untyped_names:
+ click_param_name = untyped_names.pop(0)
+ if untyped_names:
+ raise click.ClickException(
+ "Too many CLI parameter callback function parameters"
+ )
+
+ def wrapper(ctx: click.Context, param: click.Parameter, value: Any) -> Any:
+ use_params: dict[str, Any] = {}
+ if ctx_name:
+ use_params[ctx_name] = ctx
+ if click_param_name:
+ use_params[click_param_name] = param
+ if value_name:
+ if convertor:
+ use_value = convertor(value)
+ else:
+ use_value = value
+ use_params[value_name] = use_value
+ return callback(**use_params)
+
+ update_wrapper(wrapper, callback)
+ return wrapper
+
+
+def get_param_completion(
+ callback: Callable[..., Any] | None = None,
+) -> Callable[..., Any] | None:
+ if not callback:
+ return None
+ parameters = get_params_from_function(callback)
+ ctx_name = None
+ args_name = None
+ incomplete_name = None
+ unassigned_params = list(parameters.values())
+ for param_sig in unassigned_params[:]:
+ origin = get_origin(param_sig.annotation)
+ if lenient_issubclass(param_sig.annotation, click.Context):
+ ctx_name = param_sig.name
+ unassigned_params.remove(param_sig)
+ elif lenient_issubclass(origin, list):
+ args_name = param_sig.name
+ unassigned_params.remove(param_sig)
+ elif lenient_issubclass(param_sig.annotation, str):
+ incomplete_name = param_sig.name
+ unassigned_params.remove(param_sig)
+ # If there are still unassigned parameters (not typed), extract by name
+ for param_sig in unassigned_params[:]:
+ if ctx_name is None and param_sig.name == "ctx":
+ ctx_name = param_sig.name
+ unassigned_params.remove(param_sig)
+ elif args_name is None and param_sig.name == "args":
+ args_name = param_sig.name
+ unassigned_params.remove(param_sig)
+ elif incomplete_name is None and param_sig.name == "incomplete":
+ incomplete_name = param_sig.name
+ unassigned_params.remove(param_sig)
+ # Extract value param name first
+ if unassigned_params:
+ show_params = " ".join([param.name for param in unassigned_params])
+ raise click.ClickException(
+ f"Invalid autocompletion callback parameters: {show_params}"
+ )
+
+ def wrapper(ctx: click.Context, args: list[str], incomplete: str | None) -> Any:
+ use_params: dict[str, Any] = {}
+ if ctx_name:
+ use_params[ctx_name] = ctx
+ if args_name:
+ use_params[args_name] = args
+ if incomplete_name:
+ use_params[incomplete_name] = incomplete
+ return callback(**use_params)
+
+ update_wrapper(wrapper, callback)
+ return wrapper
+
+
+def run(
+ function: Annotated[
+ Callable[..., Any],
+ Doc(
+ """
+ The function that should power this CLI application.
+ """
+ ),
+ ],
+) -> None:
+ """
+ This function converts a given function to a CLI application with `Typer()` and executes it.
+
+ ## Example
+
+ ```python
+ import typer
+
+ def main(name: str):
+ print(f"Hello {name}")
+
+ if __name__ == "__main__":
+ typer.run(main)
+ ```
+ """
+ app = Typer(add_completion=False)
+ app.command()(function)
+ app()
+
+
+def _is_macos() -> bool:
+ return platform.system() == "Darwin"
+
+
+def _is_linux_or_bsd() -> bool:
+ if platform.system() == "Linux":
+ return True
+
+ return "BSD" in platform.system()
+
+
+def launch(
+ url: Annotated[
+ str,
+ Doc(
+ """
+ URL or filename of the thing to launch.
+ """
+ ),
+ ],
+ wait: Annotated[
+ bool,
+ Doc(
+ """
+ Wait for the program to exit before returning. This only works if the launched program blocks.
+ In particular, `xdg-open` on Linux does not block.
+ """
+ ),
+ ] = False,
+ locate: Annotated[
+ bool,
+ Doc(
+ """
+ If this is set to `True`, then instead of launching the application associated with the URL, it will attempt to
+ launch a file manager with the file located. This might have weird effects if the URL does not point to the filesystem.
+ """
+ ),
+ ] = False,
+) -> int:
+ """
+ This function launches the given URL (or filename) in the default
+ viewer application for this file type. If this is an executable, it
+ might launch the executable in a new session. The return value is
+ the exit code of the launched application. Usually, `0` indicates
+ success.
+
+ This function handles url in different operating systems separately:
+ - On macOS (Darwin), it uses the `open` command.
+ - On Linux and BSD, it uses `xdg-open` if available.
+ - On Windows (and other OSes), it uses the standard webbrowser module.
+
+ The function avoids, when possible, using the webbrowser module on Linux and macOS
+ to prevent spammy terminal messages from some browsers (e.g., Chrome).
+
+ ## Examples
+ ```python
+ import typer
+
+ typer.launch("https://typer.tiangolo.com/")
+ ```
+
+ ```python
+ import typer
+
+ typer.launch("/my/downloaded/file", locate=True)
+ ```
+ """
+
+ if url.startswith("http://") or url.startswith("https://"):
+ if _is_macos():
+ return subprocess.Popen(
+ ["open", url], stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT
+ ).wait()
+
+ has_xdg_open = _is_linux_or_bsd() and shutil.which("xdg-open") is not None
+
+ if has_xdg_open:
+ return subprocess.Popen(
+ ["xdg-open", url], stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT
+ ).wait()
+
+ import webbrowser
+
+ webbrowser.open(url)
+
+ return 0
+
+ else:
+ return click.launch(url)
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/models.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/models.py
new file mode 100644
index 0000000000000000000000000000000000000000..3285a96a2433176a22c11c2402116e5f7562218e
--- /dev/null
+++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/models.py
@@ -0,0 +1,651 @@
+import inspect
+import io
+from collections.abc import Callable, Sequence
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Optional,
+ TypeVar,
+)
+
+import click
+import click.shell_completion
+
+if TYPE_CHECKING: # pragma: no cover
+ from .core import TyperCommand, TyperGroup
+ from .main import Typer
+
+
+NoneType = type(None)
+
+AnyType = type[Any]
+
+Required = ...
+
+
+class Context(click.Context):
+ """
+ The [`Context`](https://click.palletsprojects.com/en/stable/api/#click.Context) has some additional data about the current execution of your program.
+ When declaring it in a [callback](https://typer.tiangolo.com/tutorial/options/callback-and-context/) function,
+ you can access this additional information.
+ """
+
+ pass
+
+
+class FileText(io.TextIOWrapper):
+ """
+ Gives you a file-like object for reading text, and you will get a `str` data from it.
+ The default mode of this class is `mode="r"`.
+
+ **Example**
+
+ ```python
+ from typing import Annotated
+
+ import typer
+
+ app = typer.Typer()
+
+ @app.command()
+ def main(config: Annotated[typer.FileText, typer.Option()]):
+ for line in config:
+ print(f"Config line: {line}")
+
+ if __name__ == "__main__":
+ app()
+ ```
+ """
+
+ pass
+
+
+class FileTextWrite(FileText):
+ """
+ You can use this class for writing text. Alternatively, you can use `FileText` with `mode="w"`.
+ The default mode of this class is `mode="w"`.
+
+ **Example**
+
+ ```python
+ from typing import Annotated
+
+ import typer
+
+ app = typer.Typer()
+
+ @app.command()
+ def main(config: Annotated[typer.FileTextWrite, typer.Option()]):
+ config.write("Some config written by the app")
+ print("Config written")
+
+ if __name__ == "__main__":
+ app()
+ ```
+ """
+
+ pass
+
+
+class FileBinaryRead(io.BufferedReader):
+ """
+ You can use this class to read binary data, receiving `bytes`.
+ The default mode of this class is `mode="rb"`.
+ It is useful for reading binary files like images:
+
+ **Example**
+
+ ```python
+ from typing import Annotated
+
+ import typer
+
+ app = typer.Typer()
+
+ @app.command()
+ def main(file: Annotated[typer.FileBinaryRead, typer.Option()]):
+ processed_total = 0
+ for bytes_chunk in file:
+ # Process the bytes in bytes_chunk
+ processed_total += len(bytes_chunk)
+ print(f"Processed bytes total: {processed_total}")
+
+ if __name__ == "__main__":
+ app()
+ ```
+ """
+
+ pass
+
+
+class FileBinaryWrite(io.BufferedWriter):
+ """
+ You can use this class to write binary data: you pass `bytes` to it instead of strings.
+ The default mode of this class is `mode="wb"`.
+ It is useful for writing binary files like images:
+
+ **Example**
+
+ ```python
+ from typing import Annotated
+
+ import typer
+
+ app = typer.Typer()
+
+ @app.command()
+ def main(file: Annotated[typer.FileBinaryWrite, typer.Option()]):
+ first_line_str = "some settings\\n"
+ # You cannot write str directly to a binary file; encode it first
+ first_line_bytes = first_line_str.encode("utf-8")
+ # Then you can write the bytes
+ file.write(first_line_bytes)
+ # This is already bytes, it starts with b"
+ second_line = b"la cig\xc3\xbce\xc3\xb1a trae al ni\xc3\xb1o"
+ file.write(second_line)
+ print("Binary file written")
+
+ if __name__ == "__main__":
+ app()
+ ```
+ """
+
+ pass
+
+
+class CallbackParam(click.Parameter):
+ """
+ In a callback function, you can declare a function parameter with type `CallbackParam`
+ to access the specific Click [`Parameter`](https://click.palletsprojects.com/en/stable/api/#click.Parameter) object.
+ """
+
+ pass
+
+
+class DefaultPlaceholder:
+ """
+ You shouldn't use this class directly.
+
+ It's used internally to recognize when a default value has been overwritten, even
+ if the new value is `None`.
+ """
+
+ def __init__(self, value: Any):
+ self.value = value
+
+ def __bool__(self) -> bool:
+ return bool(self.value)
+
+
+DefaultType = TypeVar("DefaultType")
+
+CommandFunctionType = TypeVar("CommandFunctionType", bound=Callable[..., Any])
+
+
+def Default(value: DefaultType) -> DefaultType:
+ """
+ You shouldn't use this function directly.
+
+ It's used internally to recognize when a default value has been overwritten, even
+ if the new value is `None`.
+ """
+ return DefaultPlaceholder(value) # type: ignore
+
+
+class CommandInfo:
+ def __init__(
+ self,
+ name: str | None = None,
+ *,
+ cls: type["TyperCommand"] | None = None,
+ context_settings: dict[Any, Any] | None = None,
+ callback: Callable[..., Any] | None = None,
+ help: str | None = None,
+ epilog: str | None = None,
+ short_help: str | None = None,
+ options_metavar: str = "[OPTIONS]",
+ add_help_option: bool = True,
+ no_args_is_help: bool = False,
+ hidden: bool = False,
+ deprecated: bool = False,
+ # Rich settings
+ rich_help_panel: str | None = None,
+ ):
+ self.name = name
+ self.cls = cls
+ self.context_settings = context_settings
+ self.callback = callback
+ self.help = help
+ self.epilog = epilog
+ self.short_help = short_help
+ self.options_metavar = options_metavar
+ self.add_help_option = add_help_option
+ self.no_args_is_help = no_args_is_help
+ self.hidden = hidden
+ self.deprecated = deprecated
+ # Rich settings
+ self.rich_help_panel = rich_help_panel
+
+
+class TyperInfo:
+ def __init__(
+ self,
+ typer_instance: Optional["Typer"] = Default(None),
+ *,
+ name: str | None = Default(None),
+ cls: type["TyperGroup"] | None = Default(None),
+ invoke_without_command: bool = Default(False),
+ no_args_is_help: bool = Default(False),
+ subcommand_metavar: str | None = Default(None),
+ chain: bool = Default(False),
+ result_callback: Callable[..., Any] | None = Default(None),
+ # Command
+ context_settings: dict[Any, Any] | None = Default(None),
+ callback: Callable[..., Any] | None = Default(None),
+ help: str | None = Default(None),
+ epilog: str | None = Default(None),
+ short_help: str | None = Default(None),
+ options_metavar: str = Default("[OPTIONS]"),
+ add_help_option: bool = Default(True),
+ hidden: bool = Default(False),
+ deprecated: bool = Default(False),
+ # Rich settings
+ rich_help_panel: str | None = Default(None),
+ ):
+ self.typer_instance = typer_instance
+ self.name = name
+ self.cls = cls
+ self.invoke_without_command = invoke_without_command
+ self.no_args_is_help = no_args_is_help
+ self.subcommand_metavar = subcommand_metavar
+ self.chain = chain
+ self.result_callback = result_callback
+ self.context_settings = context_settings
+ self.callback = callback
+ self.help = help
+ self.epilog = epilog
+ self.short_help = short_help
+ self.options_metavar = options_metavar
+ self.add_help_option = add_help_option
+ self.hidden = hidden
+ self.deprecated = deprecated
+ self.rich_help_panel = rich_help_panel
+
+
+class ParameterInfo:
+ def __init__(
+ self,
+ *,
+ default: Any | None = None,
+ param_decls: Sequence[str] | None = None,
+ callback: Callable[..., Any] | None = None,
+ metavar: str | None = None,
+ expose_value: bool = True,
+ is_eager: bool = False,
+ envvar: str | list[str] | None = None,
+ # Note that shell_complete is not fully supported and will be removed in future versions
+ # TODO: Remove shell_complete in a future version (after 0.16.0)
+ shell_complete: Callable[
+ [click.Context, click.Parameter, str],
+ list["click.shell_completion.CompletionItem"] | list[str],
+ ]
+ | None = None,
+ autocompletion: Callable[..., Any] | None = None,
+ default_factory: Callable[[], Any] | None = None,
+ # Custom type
+ parser: Callable[[str], Any] | None = None,
+ click_type: click.ParamType | None = None,
+ # TyperArgument
+ show_default: bool | str = True,
+ show_choices: bool = True,
+ show_envvar: bool = True,
+ help: str | None = None,
+ hidden: bool = False,
+ # Choice
+ case_sensitive: bool = True,
+ # Numbers
+ min: int | float | None = None,
+ max: int | float | None = None,
+ clamp: bool = False,
+ # DateTime
+ formats: list[str] | None = None,
+ # File
+ mode: str | None = None,
+ encoding: str | None = None,
+ errors: str | None = "strict",
+ lazy: bool | None = None,
+ atomic: bool = False,
+ # Path
+ exists: bool = False,
+ file_okay: bool = True,
+ dir_okay: bool = True,
+ writable: bool = False,
+ readable: bool = True,
+ resolve_path: bool = False,
+ allow_dash: bool = False,
+ path_type: None | type[str] | type[bytes] = None,
+ # Rich settings
+ rich_help_panel: str | None = None,
+ ):
+ # Check if user has provided multiple custom parsers
+ if parser and click_type:
+ raise ValueError(
+ "Multiple custom type parsers provided. "
+ "`parser` and `click_type` may not both be provided."
+ )
+
+ self.default = default
+ self.param_decls = param_decls
+ self.callback = callback
+ self.metavar = metavar
+ self.expose_value = expose_value
+ self.is_eager = is_eager
+ self.envvar = envvar
+ self.shell_complete = shell_complete
+ self.autocompletion = autocompletion
+ self.default_factory = default_factory
+ # Custom type
+ self.parser = parser
+ self.click_type = click_type
+ # TyperArgument
+ self.show_default = show_default
+ self.show_choices = show_choices
+ self.show_envvar = show_envvar
+ self.help = help
+ self.hidden = hidden
+ # Choice
+ self.case_sensitive = case_sensitive
+ # Numbers
+ self.min = min
+ self.max = max
+ self.clamp = clamp
+ # DateTime
+ self.formats = formats
+ # File
+ self.mode = mode
+ self.encoding = encoding
+ self.errors = errors
+ self.lazy = lazy
+ self.atomic = atomic
+ # Path
+ self.exists = exists
+ self.file_okay = file_okay
+ self.dir_okay = dir_okay
+ self.writable = writable
+ self.readable = readable
+ self.resolve_path = resolve_path
+ self.allow_dash = allow_dash
+ self.path_type = path_type
+ # Rich settings
+ self.rich_help_panel = rich_help_panel
+
+
+class OptionInfo(ParameterInfo):
+ def __init__(
+ self,
+ *,
+ # ParameterInfo
+ default: Any | None = None,
+ param_decls: Sequence[str] | None = None,
+ callback: Callable[..., Any] | None = None,
+ metavar: str | None = None,
+ expose_value: bool = True,
+ is_eager: bool = False,
+ envvar: str | list[str] | None = None,
+ # Note that shell_complete is not fully supported and will be removed in future versions
+ # TODO: Remove shell_complete in a future version (after 0.16.0)
+ shell_complete: Callable[
+ [click.Context, click.Parameter, str],
+ list["click.shell_completion.CompletionItem"] | list[str],
+ ]
+ | None = None,
+ autocompletion: Callable[..., Any] | None = None,
+ default_factory: Callable[[], Any] | None = None,
+ # Custom type
+ parser: Callable[[str], Any] | None = None,
+ click_type: click.ParamType | None = None,
+ # Option
+ show_default: bool | str = True,
+ prompt: bool | str = False,
+ confirmation_prompt: bool = False,
+ prompt_required: bool = True,
+ hide_input: bool = False,
+ # TODO: remove is_flag and flag_value in a future release
+ is_flag: bool | None = None,
+ flag_value: Any | None = None,
+ count: bool = False,
+ allow_from_autoenv: bool = True,
+ help: str | None = None,
+ hidden: bool = False,
+ show_choices: bool = True,
+ show_envvar: bool = True,
+ # Choice
+ case_sensitive: bool = True,
+ # Numbers
+ min: int | float | None = None,
+ max: int | float | None = None,
+ clamp: bool = False,
+ # DateTime
+ formats: list[str] | None = None,
+ # File
+ mode: str | None = None,
+ encoding: str | None = None,
+ errors: str | None = "strict",
+ lazy: bool | None = None,
+ atomic: bool = False,
+ # Path
+ exists: bool = False,
+ file_okay: bool = True,
+ dir_okay: bool = True,
+ writable: bool = False,
+ readable: bool = True,
+ resolve_path: bool = False,
+ allow_dash: bool = False,
+ path_type: None | type[str] | type[bytes] = None,
+ # Rich settings
+ rich_help_panel: str | None = None,
+ ):
+ super().__init__(
+ default=default,
+ param_decls=param_decls,
+ callback=callback,
+ metavar=metavar,
+ expose_value=expose_value,
+ is_eager=is_eager,
+ envvar=envvar,
+ shell_complete=shell_complete,
+ autocompletion=autocompletion,
+ default_factory=default_factory,
+ # Custom type
+ parser=parser,
+ click_type=click_type,
+ # TyperArgument
+ show_default=show_default,
+ show_choices=show_choices,
+ show_envvar=show_envvar,
+ help=help,
+ hidden=hidden,
+ # Choice
+ case_sensitive=case_sensitive,
+ # Numbers
+ min=min,
+ max=max,
+ clamp=clamp,
+ # DateTime
+ formats=formats,
+ # File
+ mode=mode,
+ encoding=encoding,
+ errors=errors,
+ lazy=lazy,
+ atomic=atomic,
+ # Path
+ exists=exists,
+ file_okay=file_okay,
+ dir_okay=dir_okay,
+ writable=writable,
+ readable=readable,
+ resolve_path=resolve_path,
+ allow_dash=allow_dash,
+ path_type=path_type,
+ # Rich settings
+ rich_help_panel=rich_help_panel,
+ )
+ if is_flag is not None or flag_value is not None:
+ import warnings
+
+ warnings.warn(
+ "The 'is_flag' and 'flag_value' parameters are not supported by Typer "
+ "and will be removed entirely in a future release.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self.prompt = prompt
+ self.confirmation_prompt = confirmation_prompt
+ self.prompt_required = prompt_required
+ self.hide_input = hide_input
+ self.count = count
+ self.allow_from_autoenv = allow_from_autoenv
+
+
+class ArgumentInfo(ParameterInfo):
+ def __init__(
+ self,
+ *,
+ # ParameterInfo
+ default: Any | None = None,
+ param_decls: Sequence[str] | None = None,
+ callback: Callable[..., Any] | None = None,
+ metavar: str | None = None,
+ expose_value: bool = True,
+ is_eager: bool = False,
+ envvar: str | list[str] | None = None,
+ # Note that shell_complete is not fully supported and will be removed in future versions
+ # TODO: Remove shell_complete in a future version (after 0.16.0)
+ shell_complete: Callable[
+ [click.Context, click.Parameter, str],
+ list["click.shell_completion.CompletionItem"] | list[str],
+ ]
+ | None = None,
+ autocompletion: Callable[..., Any] | None = None,
+ default_factory: Callable[[], Any] | None = None,
+ # Custom type
+ parser: Callable[[str], Any] | None = None,
+ click_type: click.ParamType | None = None,
+ # TyperArgument
+ show_default: bool | str = True,
+ show_choices: bool = True,
+ show_envvar: bool = True,
+ help: str | None = None,
+ hidden: bool = False,
+ # Choice
+ case_sensitive: bool = True,
+ # Numbers
+ min: int | float | None = None,
+ max: int | float | None = None,
+ clamp: bool = False,
+ # DateTime
+ formats: list[str] | None = None,
+ # File
+ mode: str | None = None,
+ encoding: str | None = None,
+ errors: str | None = "strict",
+ lazy: bool | None = None,
+ atomic: bool = False,
+ # Path
+ exists: bool = False,
+ file_okay: bool = True,
+ dir_okay: bool = True,
+ writable: bool = False,
+ readable: bool = True,
+ resolve_path: bool = False,
+ allow_dash: bool = False,
+ path_type: None | type[str] | type[bytes] = None,
+ # Rich settings
+ rich_help_panel: str | None = None,
+ ):
+ super().__init__(
+ default=default,
+ param_decls=param_decls,
+ callback=callback,
+ metavar=metavar,
+ expose_value=expose_value,
+ is_eager=is_eager,
+ envvar=envvar,
+ shell_complete=shell_complete,
+ autocompletion=autocompletion,
+ default_factory=default_factory,
+ # Custom type
+ parser=parser,
+ click_type=click_type,
+ # TyperArgument
+ show_default=show_default,
+ show_choices=show_choices,
+ show_envvar=show_envvar,
+ help=help,
+ hidden=hidden,
+ # Choice
+ case_sensitive=case_sensitive,
+ # Numbers
+ min=min,
+ max=max,
+ clamp=clamp,
+ # DateTime
+ formats=formats,
+ # File
+ mode=mode,
+ encoding=encoding,
+ errors=errors,
+ lazy=lazy,
+ atomic=atomic,
+ # Path
+ exists=exists,
+ file_okay=file_okay,
+ dir_okay=dir_okay,
+ writable=writable,
+ readable=readable,
+ resolve_path=resolve_path,
+ allow_dash=allow_dash,
+ path_type=path_type,
+ # Rich settings
+ rich_help_panel=rich_help_panel,
+ )
+
+
+class ParamMeta:
+ empty = inspect.Parameter.empty
+
+ def __init__(
+ self,
+ *,
+ name: str,
+ default: Any = inspect.Parameter.empty,
+ annotation: Any = inspect.Parameter.empty,
+ ) -> None:
+ self.name = name
+ self.default = default
+ self.annotation = annotation
+
+
+class DeveloperExceptionConfig:
+ def __init__(
+ self,
+ *,
+ pretty_exceptions_enable: bool = True,
+ pretty_exceptions_show_locals: bool = True,
+ pretty_exceptions_short: bool = True,
+ ) -> None:
+ self.pretty_exceptions_enable = pretty_exceptions_enable
+ self.pretty_exceptions_show_locals = pretty_exceptions_show_locals
+ self.pretty_exceptions_short = pretty_exceptions_short
+
+
+class TyperPath(click.Path):
+ # Overwrite Click's behaviour to be compatible with Typer's autocompletion system
+ def shell_complete(
+ self, ctx: click.Context, param: click.Parameter, incomplete: str
+ ) -> list[click.shell_completion.CompletionItem]:
+ """Return an empty list so that the autocompletion functionality
+ will work properly from the commandline.
+ """
+ return []
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/params.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/params.py
new file mode 100644
index 0000000000000000000000000000000000000000..b325b273c43860f79f598c11e971f60f2c95676f
--- /dev/null
+++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/params.py
@@ -0,0 +1,1831 @@
+from collections.abc import Callable
+from typing import TYPE_CHECKING, Annotated, Any, overload
+
+import click
+from annotated_doc import Doc
+
+from .models import ArgumentInfo, OptionInfo
+
+if TYPE_CHECKING: # pragma: no cover
+ import click.shell_completion
+
+
+# Overload for Option created with custom type 'parser'
+@overload
+def Option(
+ # Parameter
+ default: Any | None = ...,
+ *param_decls: str,
+ callback: Callable[..., Any] | None = None,
+ metavar: str | None = None,
+ expose_value: bool = True,
+ is_eager: bool = False,
+ envvar: str | list[str] | None = None,
+ # Note that shell_complete is not fully supported and will be removed in future versions
+ # TODO: Remove shell_complete in a future version (after 0.16.0)
+ shell_complete: Callable[
+ [click.Context, click.Parameter, str],
+ list["click.shell_completion.CompletionItem"] | list[str],
+ ]
+ | None = None,
+ autocompletion: Callable[..., Any] | None = None,
+ default_factory: Callable[[], Any] | None = None,
+ # Custom type
+ parser: Callable[[str], Any] | None = None,
+ # Option
+ show_default: bool | str = True,
+ prompt: bool | str = False,
+ confirmation_prompt: bool = False,
+ prompt_required: bool = True,
+ hide_input: bool = False,
+ # TODO: remove is_flag and flag_value in a future release
+ is_flag: bool | None = None,
+ flag_value: Any | None = None,
+ count: bool = False,
+ allow_from_autoenv: bool = True,
+ help: str | None = None,
+ hidden: bool = False,
+ show_choices: bool = True,
+ show_envvar: bool = True,
+ # Choice
+ case_sensitive: bool = True,
+ # Numbers
+ min: int | float | None = None,
+ max: int | float | None = None,
+ clamp: bool = False,
+ # DateTime
+ formats: list[str] | None = None,
+ # File
+ mode: str | None = None,
+ encoding: str | None = None,
+ errors: str | None = "strict",
+ lazy: bool | None = None,
+ atomic: bool = False,
+ # Path
+ exists: bool = False,
+ file_okay: bool = True,
+ dir_okay: bool = True,
+ writable: bool = False,
+ readable: bool = True,
+ resolve_path: bool = False,
+ allow_dash: bool = False,
+ path_type: None | type[str] | type[bytes] = None,
+ # Rich settings
+ rich_help_panel: str | None = None,
+) -> Any: ...
+
+
+# Overload for Option created with custom type 'click_type'
+@overload
+def Option(
+ # Parameter
+ default: Any | None = ...,
+ *param_decls: str,
+ callback: Callable[..., Any] | None = None,
+ metavar: str | None = None,
+ expose_value: bool = True,
+ is_eager: bool = False,
+ envvar: str | list[str] | None = None,
+ # Note that shell_complete is not fully supported and will be removed in future versions
+ # TODO: Remove shell_complete in a future version (after 0.16.0)
+ shell_complete: Callable[
+ [click.Context, click.Parameter, str],
+ list["click.shell_completion.CompletionItem"] | list[str],
+ ]
+ | None = None,
+ autocompletion: Callable[..., Any] | None = None,
+ default_factory: Callable[[], Any] | None = None,
+ # Custom type
+ click_type: click.ParamType | None = None,
+ # Option
+ show_default: bool | str = True,
+ prompt: bool | str = False,
+ confirmation_prompt: bool = False,
+ prompt_required: bool = True,
+ hide_input: bool = False,
+ # TODO: remove is_flag and flag_value in a future release
+ is_flag: bool | None = None,
+ flag_value: Any | None = None,
+ count: bool = False,
+ allow_from_autoenv: bool = True,
+ help: str | None = None,
+ hidden: bool = False,
+ show_choices: bool = True,
+ show_envvar: bool = True,
+ # Choice
+ case_sensitive: bool = True,
+ # Numbers
+ min: int | float | None = None,
+ max: int | float | None = None,
+ clamp: bool = False,
+ # DateTime
+ formats: list[str] | None = None,
+ # File
+ mode: str | None = None,
+ encoding: str | None = None,
+ errors: str | None = "strict",
+ lazy: bool | None = None,
+ atomic: bool = False,
+ # Path
+ exists: bool = False,
+ file_okay: bool = True,
+ dir_okay: bool = True,
+ writable: bool = False,
+ readable: bool = True,
+ resolve_path: bool = False,
+ allow_dash: bool = False,
+ path_type: None | type[str] | type[bytes] = None,
+ # Rich settings
+ rich_help_panel: str | None = None,
+) -> Any: ...
+
+
+def Option(
+ # Parameter
+ default: Annotated[
+ Any | None,
+ Doc(
+ """
+ Usually, [CLI options](https://typer.tiangolo.com/tutorial/options/) are optional and have a default value, passed on like this:
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(network: str = typer.Option("CNN")):
+ print(f"Training neural network of type: {network}")
+ ```
+
+ Note that this usage is deprecated, and we recommend to use `Annotated` instead:
+ ```
+ @app.command()
+ def main(network: Annotated[str, typer.Option()] = "CNN"):
+ print(f"Hello {name}!")
+ ```
+
+ You can also use `...` ([Ellipsis](https://docs.python.org/3/library/constants.html#Ellipsis)) as the "default" value to clarify that this is a required CLI option.
+ """
+ ),
+ ] = ...,
+ *param_decls: Annotated[
+ str,
+ Doc(
+ """
+ Positional argument that defines how users can call this option on the command line. This may be one or multiple aliases, all strings.
+ If not defined, Typer will automatically use the function parameter as default name.
+ See [the tutorial about CLI Option Names](https://typer.tiangolo.com/tutorial/options/name/) for more details.
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(user_name: Annotated[str, typer.Option("--user", "-u", "-x")]):
+ print(f"Hello {user_name}")
+ ```
+ """
+ ),
+ ],
+ callback: Annotated[
+ Callable[..., Any] | None,
+ Doc(
+ """
+ Add a callback to this CLI Option, to execute additional logic after its value was received from the terminal.
+ See [the tutorial about callbacks](https://typer.tiangolo.com/tutorial/options/callback-and-context/) for more details.
+
+ **Example**
+
+ ```python
+ def name_callback(value: str):
+ if value != "Deadpool":
+ raise typer.BadParameter("Only Deadpool is allowed")
+ return value
+
+ @app.command()
+ def main(name: Annotated[str, typer.Option(callback=name_callback)]):
+ print(f"Hello {name}")
+ ```
+ """
+ ),
+ ] = None,
+ metavar: Annotated[
+ str | None,
+ Doc(
+ """
+ Customize the name displayed in the [help text](https://typer.tiangolo.com/tutorial/options/help/) to represent this CLI option.
+ Note that this doesn't influence the way the option must be called.
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(user: Annotated[str, typer.Option(metavar="User name")]):
+ print(f"Hello {user}")
+ ```
+ """
+ ),
+ ] = None,
+ expose_value: Annotated[
+ bool,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility.
+
+ ---
+
+ If this is `True` then the value is passed onwards to the command callback and stored on the context, otherwise it’s skipped.
+ """
+ ),
+ ] = True,
+ is_eager: Annotated[
+ bool,
+ Doc(
+ """
+ Mark a CLI Option to be "eager", ensuring it gets processed before other CLI parameters. This could be relevant when there are other parameters with callbacks that could exit the program early.
+ For more information and an extended example, see the documentation [here](https://typer.tiangolo.com/tutorial/options/version/#fix-with-is_eager).
+ """
+ ),
+ ] = False,
+ envvar: Annotated[
+ str | list[str] | None,
+ Doc(
+ """
+ Configure a CLI Option to read its value from an environment variable if it is not provided in the command line.
+ For more information, see the [documentation on Environment Variables](https://typer.tiangolo.com/tutorial/arguments/envvar/).
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(user: Annotated[str, typer.Option(envvar="ME")]):
+ print(f"Hello {user}")
+ ```
+ """
+ ),
+ ] = None,
+ # TODO: Remove shell_complete in a future version (after 0.16.0)
+ shell_complete: Annotated[
+ Callable[
+ [click.Context, click.Parameter, str],
+ list["click.shell_completion.CompletionItem"] | list[str],
+ ]
+ | None,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility.
+ It is however not fully functional, and will likely be removed in future versions.
+ """
+ ),
+ ] = None,
+ autocompletion: Annotated[
+ Callable[..., Any] | None,
+ Doc(
+ """
+ Provide a custom function that helps to autocomplete the values of this CLI Option.
+ See [the tutorial on parameter autocompletion](https://typer.tiangolo.com/tutorial/options-autocompletion) for more details.
+
+ **Example**
+
+ ```python
+ def complete():
+ return ["Me", "Myself", "I"]
+
+ @app.command()
+ def main(name: Annotated[str, typer.Option(autocompletion=complete)]):
+ print(f"Hello {name}")
+ ```
+ """
+ ),
+ ] = None,
+ default_factory: Annotated[
+ Callable[[], Any] | None,
+ Doc(
+ """
+ Provide a custom function that dynamically generates a [default](https://typer.tiangolo.com/tutorial/arguments/default) for this CLI Option.
+
+ **Example**
+
+ ```python
+ def get_name():
+ return random.choice(["Me", "Myself", "I"])
+
+ @app.command()
+ def main(name: Annotated[str, typer.Option(default_factory=get_name)]):
+ print(f"Hello {name}")
+ ```
+ """
+ ),
+ ] = None,
+ # Custom type
+ parser: Annotated[
+ Callable[[str], Any] | None,
+ Doc(
+ """
+ Use your own custom types in Typer applications by defining a `parser` function that parses input into your own types:
+
+ **Example**
+
+ ```python
+ class CustomClass:
+ def __init__(self, value: str):
+ self.value = value
+
+ def __str__(self):
+ return f""
+
+ def my_parser(value: str):
+ return CustomClass(value * 2)
+
+ @app.command()
+ def main(opt: Annotated[CustomClass, typer.Option(parser=my_parser)] = "Foo"):
+ print(f"--opt is {opt}")
+ ```
+ """
+ ),
+ ] = None,
+ click_type: Annotated[
+ click.ParamType | None,
+ Doc(
+ """
+ Define this parameter to use a [custom Click type](https://click.palletsprojects.com/en/stable/parameters/#implementing-custom-types) in your Typer applications.
+
+ **Example**
+
+ ```python
+ class MyClass:
+ def __init__(self, value: str):
+ self.value = value
+
+ def __str__(self):
+ return f""
+
+ class MyParser(click.ParamType):
+ name = "MyClass"
+
+ def convert(self, value, param, ctx):
+ return MyClass(value * 3)
+
+ @app.command()
+ def main(opt: Annotated[MyClass, typer.Option(click_type=MyParser())] = "Foo"):
+ print(f"--opt is {opt}")
+ ```
+ """
+ ),
+ ] = None,
+ # Option
+ show_default: Annotated[
+ bool | str,
+ Doc(
+ """
+ When set to `False`, don't show the default value of this CLI Option in the [help text](https://typer.tiangolo.com/tutorial/options/help/).
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(name: Annotated[str, typer.Option(show_default=False)] = "Rick"):
+ print(f"Hello {name}")
+ ```
+ """
+ ),
+ ] = True,
+ prompt: Annotated[
+ bool | str,
+ Doc(
+ """
+ When set to `True`, a prompt will appear to ask for the value of this CLI Option if it was not provided:
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(name: str, lastname: Annotated[str, typer.Option(prompt=True)]):
+ print(f"Hello {name} {lastname}")
+ ```
+ """
+ ),
+ ] = False,
+ confirmation_prompt: Annotated[
+ bool,
+ Doc(
+ """
+ When set to `True`, a user will need to type a prompted value twice (may be useful for passwords etc.).
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(project: Annotated[str, typer.Option(prompt=True, confirmation_prompt=True)]):
+ print(f"Deleting project {project}")
+ ```
+ """
+ ),
+ ] = False,
+ prompt_required: Annotated[
+ bool,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility.
+
+ ---
+
+ If this is `False` then a prompt is only shown if the option's flag is given without a value.
+ """
+ ),
+ ] = True,
+ hide_input: Annotated[
+ bool,
+ Doc(
+ """
+ When you've configured a prompt, for instance for [querying a password](https://typer.tiangolo.com/tutorial/options/password/),
+ don't show anything on the screen while the user is typing the value.
+
+ **Example**
+
+ ```python
+ @app.command()
+ def login(
+ name: str,
+ password: Annotated[str, typer.Option(prompt=True, hide_input=True)],
+ ):
+ print(f"Hello {name}. Doing something very secure with password.")
+ ```
+ """
+ ),
+ ] = False,
+ # TODO: remove is_flag and flag_value in a future release
+ is_flag: Annotated[
+ bool | None,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility.
+ It is however not fully functional, and will likely be removed in future versions.
+ """
+ ),
+ ] = None,
+ flag_value: Annotated[
+ Any | None,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility.
+ It is however not fully functional, and will likely be removed in future versions.
+ """
+ ),
+ ] = None,
+ count: Annotated[
+ bool,
+ Doc(
+ """
+ Make a CLI Option work as a [counter](https://typer.tiangolo.com/tutorial/parameter-types/number/#counter-cli-options).
+ The CLI option will have the `int` value representing the number of times the option was used on the command line.
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(verbose: Annotated[int, typer.Option("--verbose", "-v", count=True)] = 0):
+ print(f"Verbose level is {verbose}")
+ ```
+ """
+ ),
+ ] = False,
+ allow_from_autoenv: Annotated[
+ bool,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility.
+
+ ---
+
+ If this is enabled then the value of this parameter will be pulled from an environment variable in case a prefix is defined on the context.
+ """
+ ),
+ ] = True,
+ help: Annotated[
+ str | None,
+ Doc(
+ """
+ Help text for this CLI Option.
+ See [the tutorial about CLI Options with help](https://typer.tiangolo.com/tutorial/options/help/) for more dedails.
+
+ **Example**
+
+ ```python
+ @app.command()
+ def greet(name: Annotated[str, typer.Option(help="Person to greet")] = "Deadpool"):
+ print(f"Hello {name}")
+ ```
+ """
+ ),
+ ] = None,
+ hidden: Annotated[
+ bool,
+ Doc(
+ """
+ Hide this CLI Option from [help outputs](https://typer.tiangolo.com/tutorial/options/help). `False` by default.
+
+ **Example**
+
+ ```python
+ @app.command()
+ def greet(name: Annotated[str, typer.Option(hidden=True)] = "Deadpool"):
+ print(f"Hello {name}")
+ ```
+ """
+ ),
+ ] = False,
+ show_choices: Annotated[
+ bool,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility.
+
+ ---
+
+ When set to `False`, this suppresses choices from being displayed inline when `prompt` is used.
+ """
+ ),
+ ] = True,
+ show_envvar: Annotated[
+ bool,
+ Doc(
+ """
+ When an ["envvar"](https://typer.tiangolo.com/tutorial/arguments/envvar) is defined, prevent it from showing up in the help text:
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(user: Annotated[str, typer.Option(envvar="ME", show_envvar=False)]):
+ print(f"Hello {user}")
+ ```
+ """
+ ),
+ ] = True,
+ # Choice
+ case_sensitive: Annotated[
+ bool,
+ Doc(
+ """
+ For a CLI Option representing an [Enum (choice)](https://typer.tiangolo.com/tutorial/parameter-types/enum),
+ you can allow case-insensitive matching with this parameter:
+
+ **Example**
+
+ ```python
+ from enum import Enum
+
+ class NeuralNetwork(str, Enum):
+ simple = "simple"
+ conv = "conv"
+ lstm = "lstm"
+
+ @app.command()
+ def main(
+ network: Annotated[NeuralNetwork, typer.Option(case_sensitive=False)]):
+ print(f"Training neural network of type: {network.value}")
+ ```
+
+ With this setting, "LSTM" or "lstm" will both be valid values that will be resolved to `NeuralNetwork.lstm`.
+ """
+ ),
+ ] = True,
+ # Numbers
+ min: Annotated[
+ int | float | None,
+ Doc(
+ """
+ For a CLI Option representing a [number](https://typer.tiangolo.com/tutorial/parameter-types/number/) (`int` or `float`),
+ you can define numeric validations with `min` and `max` values:
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(
+ user: Annotated[str, typer.Argument()],
+ user_id: Annotated[int, typer.Option(min=1, max=1000)],
+ ):
+ print(f"ID for {user} is {user_id}")
+ ```
+
+ If the user attempts to input an invalid number, an error will be shown, explaining why the value is invalid.
+ """
+ ),
+ ] = None,
+ max: Annotated[
+ int | float | None,
+ Doc(
+ """
+ For a CLI Option representing a [number](https://typer.tiangolo.com/tutorial/parameter-types/number/) (`int` or `float`),
+ you can define numeric validations with `min` and `max` values:
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(
+ user: Annotated[str, typer.Argument()],
+ user_id: Annotated[int, typer.Option(min=1, max=1000)],
+ ):
+ print(f"ID for {user} is {user_id}")
+ ```
+
+ If the user attempts to input an invalid number, an error will be shown, explaining why the value is invalid.
+ """
+ ),
+ ] = None,
+ clamp: Annotated[
+ bool,
+ Doc(
+ """
+ For a CLI Option representing a [number](https://typer.tiangolo.com/tutorial/parameter-types/number/) and that is bounded by using `min` and/or `max`,
+ you can opt to use the closest minimum or maximum value instead of raising an error when the value is out of bounds. This is done by setting `clamp` to `True`.
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(
+ user: Annotated[str, typer.Argument()],
+ user_id: Annotated[int, typer.Option(min=1, max=1000, clamp=True)],
+ ):
+ print(f"ID for {user} is {user_id}")
+ ```
+
+ If the user attempts to input 3420 for `user_id`, this will internally be converted to `1000`.
+ """
+ ),
+ ] = False,
+ # DateTime
+ formats: Annotated[
+ list[str] | None,
+ Doc(
+ """
+ For a CLI Option representing a [DateTime object](https://typer.tiangolo.com/tutorial/parameter-types/datetime),
+ you can customize the formats that can be parsed automatically:
+
+ **Example**
+
+ ```python
+ from datetime import datetime
+
+ @app.command()
+ def main(
+ birthday: Annotated[
+ datetime,
+ typer.Option(
+ formats=["%Y-%m-%d", "%Y-%m-%d %H:%M:%S", "%m/%d/%Y"]
+ ),
+ ],
+ ):
+ print(f"Birthday defined at: {birthday}")
+ ```
+ """
+ ),
+ ] = None,
+ # File
+ mode: Annotated[
+ str | None,
+ Doc(
+ """
+ For a CLI Option representing a [File object](https://typer.tiangolo.com/tutorial/parameter-types/file/),
+ you can customize the mode to open the file with. If unset, Typer will set a [sensible value by default](https://typer.tiangolo.com/tutorial/parameter-types/file/#advanced-mode).
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(config: Annotated[typer.FileText, typer.Option(mode="a")]):
+ config.write("This is a single line\\n")
+ print("Config line written")
+ ```
+ """
+ ),
+ ] = None,
+ encoding: Annotated[
+ str | None,
+ Doc(
+ """
+ Customize the encoding of this CLI Option represented by a [File object](https://typer.tiangolo.com/tutorial/parameter-types/file/).
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(config: Annotated[typer.FileText, typer.Option(encoding="utf-8")]):
+ config.write("All the text gets written\\n")
+ ```
+ """
+ ),
+ ] = None,
+ errors: Annotated[
+ str | None,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility.
+
+ ---
+
+ The error handling mode.
+ """
+ ),
+ ] = "strict",
+ lazy: Annotated[
+ bool | None,
+ Doc(
+ """
+ For a CLI Option representing a [File object](https://typer.tiangolo.com/tutorial/parameter-types/file/),
+ by default the file will not be created until you actually start writing to it.
+ You can change this behaviour by setting this parameter.
+ By default, it's set to `True` for writing and to `False` for reading.
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(config: Annotated[typer.FileText, typer.Option(mode="a", lazy=False)]):
+ config.write("This is a single line\\n")
+ print("Config line written")
+ ```
+ """
+ ),
+ ] = None,
+ atomic: Annotated[
+ bool,
+ Doc(
+ """
+ For a CLI Option representing a [File object](https://typer.tiangolo.com/tutorial/parameter-types/file/),
+ you can ensure that all write instructions first go into a temporal file, and are only moved to the final destination after completing
+ by setting `atomic` to `True`. This can be useful for files with potential concurrent access.
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(config: Annotated[typer.FileText, typer.Option(mode="a", atomic=True)]):
+ config.write("All the text")
+ ```
+ """
+ ),
+ ] = False,
+ # Path
+ exists: Annotated[
+ bool,
+ Doc(
+ """
+ When set to `True` for a [`Path` CLI Option](https://typer.tiangolo.com/tutorial/parameter-types/path/),
+ additional validation is performed to check that the file or directory exists. If not, the value will be invalid.
+
+ **Example**
+
+ ```python
+ from pathlib import Path
+
+ @app.command()
+ def main(config: Annotated[Path, typer.Option(exists=True)]):
+ text = config.read_text()
+ print(f"Config file contents: {text}")
+ ```
+ """
+ ),
+ ] = False,
+ file_okay: Annotated[
+ bool,
+ Doc(
+ """
+ Determine whether or not a [`Path` CLI Option](https://typer.tiangolo.com/tutorial/parameter-types/path/)
+ is allowed to refer to a file. When this is set to `False`, the application will raise a validation error when a path to a file is given.
+
+ **Example**
+
+ ```python
+ from pathlib import Path
+
+ @app.command()
+ def main(config: Annotated[Path, typer.Option(exists=True, file_okay=False)]):
+ print(f"Directory listing: {[x.name for x in config.iterdir()]}")
+ ```
+ """
+ ),
+ ] = True,
+ dir_okay: Annotated[
+ bool,
+ Doc(
+ """
+ Determine whether or not a [`Path` CLI Option](https://typer.tiangolo.com/tutorial/parameter-types/path/)
+ is allowed to refer to a directory. When this is set to `False`, the application will raise a validation error when a path to a directory is given.
+
+ **Example**
+
+ ```python
+ from pathlib import Path
+
+ @app.command()
+ def main(config: Annotated[Path, typer.Argument(exists=True, dir_okay=False)]):
+ text = config.read_text()
+ print(f"Config file contents: {text}")
+ ```
+ """
+ ),
+ ] = True,
+ writable: Annotated[
+ bool,
+ Doc(
+ """
+ Whether or not to perform a writable check for this [`Path` CLI Option](https://typer.tiangolo.com/tutorial/parameter-types/path/).
+
+ **Example**
+
+ ```python
+ from pathlib import Path
+
+ @app.command()
+ def main(config: Annotated[Path, typer.Option(writable=True)]):
+ config.write_text("All the text")
+ ```
+ """
+ ),
+ ] = False,
+ readable: Annotated[
+ bool,
+ Doc(
+ """
+ Whether or not to perform a readable check for this [`Path` CLI Option](https://typer.tiangolo.com/tutorial/parameter-types/path/).
+
+ **Example**
+
+ ```python
+ from pathlib import Path
+
+ @app.command()
+ def main(config: Annotated[Path, typer.Option(readable=True)]):
+ config.read_text("All the text")
+ ```
+ """
+ ),
+ ] = True,
+ resolve_path: Annotated[
+ bool,
+ Doc(
+ """
+ Whether or not to fully resolve the path of this [`Path` CLI Option](https://typer.tiangolo.com/tutorial/parameter-types/path/),
+ meaning that the path becomes absolute and symlinks are resolved.
+
+ **Example**
+
+ ```python
+ from pathlib import Path
+
+ @app.command()
+ def main(config: Annotated[Path, typer.Option(resolve_path=True)]):
+ config.read_text("All the text")
+ ```
+ """
+ ),
+ ] = False,
+ allow_dash: Annotated[
+ bool,
+ Doc(
+ """
+ When set to `True`, a single dash for this [`Path` CLI Option](https://typer.tiangolo.com/tutorial/parameter-types/path/)
+ would be a valid value, indicating standard streams. This is a more advanced use-case.
+ """
+ ),
+ ] = False,
+ path_type: Annotated[
+ None | type[str] | type[bytes],
+ Doc(
+ """
+ A string type that will be used to represent this [`Path` argument](https://typer.tiangolo.com/tutorial/parameter-types/path/).
+ The default is `None` which means the return value will be either bytes or unicode, depending on what makes most sense given the input data.
+ This is a more advanced use-case.
+ """
+ ),
+ ] = None,
+ # Rich settings
+ rich_help_panel: Annotated[
+ str | None,
+ Doc(
+ """
+ Set the panel name where you want this CLI Option to be shown in the [help text](https://typer.tiangolo.com/tutorial/arguments/help).
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(
+ name: Annotated[str, typer.Argument(help="Who to greet")],
+ age: Annotated[str, typer.Option(help="Their age", rich_help_panel="Data")],
+ ):
+ print(f"Hello {name} of age {age}")
+ ```
+ """
+ ),
+ ] = None,
+) -> Any:
+ """
+ A [CLI Option](https://typer.tiangolo.com/tutorial/options) is a parameter to your command line application that is called with a single or double dash, something like `--verbose` or `-v`.
+
+ Often, CLI Options are optional, meaning that users can omit them from the command. However, you can set them to be required by using `Annotated`
+ and omitting a default value.
+
+ ## Example
+
+ ```python
+ @app.command()
+ def register(
+ user: Annotated[str, typer.Argument()],
+ age: Annotated[int, typer.Option(min=18)],
+ ):
+ print(f"User is {user}")
+ print(f"--age is {age}")
+ ```
+
+ Note how in this example, `--age` is a required CLI Option.
+ """
+ return OptionInfo(
+ # Parameter
+ default=default,
+ param_decls=param_decls,
+ callback=callback,
+ metavar=metavar,
+ expose_value=expose_value,
+ is_eager=is_eager,
+ envvar=envvar,
+ shell_complete=shell_complete,
+ autocompletion=autocompletion,
+ default_factory=default_factory,
+ # Custom type
+ parser=parser,
+ click_type=click_type,
+ # Option
+ show_default=show_default,
+ prompt=prompt,
+ confirmation_prompt=confirmation_prompt,
+ prompt_required=prompt_required,
+ hide_input=hide_input,
+ is_flag=is_flag,
+ flag_value=flag_value,
+ count=count,
+ allow_from_autoenv=allow_from_autoenv,
+ help=help,
+ hidden=hidden,
+ show_choices=show_choices,
+ show_envvar=show_envvar,
+ # Choice
+ case_sensitive=case_sensitive,
+ # Numbers
+ min=min,
+ max=max,
+ clamp=clamp,
+ # DateTime
+ formats=formats,
+ # File
+ mode=mode,
+ encoding=encoding,
+ errors=errors,
+ lazy=lazy,
+ atomic=atomic,
+ # Path
+ exists=exists,
+ file_okay=file_okay,
+ dir_okay=dir_okay,
+ writable=writable,
+ readable=readable,
+ resolve_path=resolve_path,
+ allow_dash=allow_dash,
+ path_type=path_type,
+ # Rich settings
+ rich_help_panel=rich_help_panel,
+ )
+
+
+# Overload for Argument created with custom type 'parser'
+@overload
+def Argument(
+ # Parameter
+ default: Any | None = ...,
+ *,
+ callback: Callable[..., Any] | None = None,
+ metavar: str | None = None,
+ expose_value: bool = True,
+ is_eager: bool = False,
+ envvar: str | list[str] | None = None,
+ # Note that shell_complete is not fully supported and will be removed in future versions
+ # TODO: Remove shell_complete in a future version (after 0.16.0)
+ shell_complete: Callable[
+ [click.Context, click.Parameter, str],
+ list["click.shell_completion.CompletionItem"] | list[str],
+ ]
+ | None = None,
+ autocompletion: Callable[..., Any] | None = None,
+ default_factory: Callable[[], Any] | None = None,
+ # Custom type
+ parser: Callable[[str], Any] | None = None,
+ # TyperArgument
+ show_default: bool | str = True,
+ show_choices: bool = True,
+ show_envvar: bool = True,
+ help: str | None = None,
+ hidden: bool = False,
+ # Choice
+ case_sensitive: bool = True,
+ # Numbers
+ min: int | float | None = None,
+ max: int | float | None = None,
+ clamp: bool = False,
+ # DateTime
+ formats: list[str] | None = None,
+ # File
+ mode: str | None = None,
+ encoding: str | None = None,
+ errors: str | None = "strict",
+ lazy: bool | None = None,
+ atomic: bool = False,
+ # Path
+ exists: bool = False,
+ file_okay: bool = True,
+ dir_okay: bool = True,
+ writable: bool = False,
+ readable: bool = True,
+ resolve_path: bool = False,
+ allow_dash: bool = False,
+ path_type: None | type[str] | type[bytes] = None,
+ # Rich settings
+ rich_help_panel: str | None = None,
+) -> Any: ...
+
+
+# Overload for Argument created with custom type 'click_type'
+@overload
+def Argument(
+ # Parameter
+ default: Any | None = ...,
+ *,
+ callback: Callable[..., Any] | None = None,
+ metavar: str | None = None,
+ expose_value: bool = True,
+ is_eager: bool = False,
+ envvar: str | list[str] | None = None,
+ # Note that shell_complete is not fully supported and will be removed in future versions
+ # TODO: Remove shell_complete in a future version (after 0.16.0)
+ shell_complete: Callable[
+ [click.Context, click.Parameter, str],
+ list["click.shell_completion.CompletionItem"] | list[str],
+ ]
+ | None = None,
+ autocompletion: Callable[..., Any] | None = None,
+ default_factory: Callable[[], Any] | None = None,
+ # Custom type
+ click_type: click.ParamType | None = None,
+ # TyperArgument
+ show_default: bool | str = True,
+ show_choices: bool = True,
+ show_envvar: bool = True,
+ help: str | None = None,
+ hidden: bool = False,
+ # Choice
+ case_sensitive: bool = True,
+ # Numbers
+ min: int | float | None = None,
+ max: int | float | None = None,
+ clamp: bool = False,
+ # DateTime
+ formats: list[str] | None = None,
+ # File
+ mode: str | None = None,
+ encoding: str | None = None,
+ errors: str | None = "strict",
+ lazy: bool | None = None,
+ atomic: bool = False,
+ # Path
+ exists: bool = False,
+ file_okay: bool = True,
+ dir_okay: bool = True,
+ writable: bool = False,
+ readable: bool = True,
+ resolve_path: bool = False,
+ allow_dash: bool = False,
+ path_type: None | type[str] | type[bytes] = None,
+ # Rich settings
+ rich_help_panel: str | None = None,
+) -> Any: ...
+
+
+def Argument(
+ # Parameter
+ default: Annotated[
+ Any | None,
+ Doc(
+ """
+ By default, CLI arguments are required. However, by giving them a default value they become [optional](https://typer.tiangolo.com/tutorial/arguments/optional):
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(name: str = typer.Argument("World")):
+ print(f"Hello {name}!")
+ ```
+
+ Note that this usage is deprecated, and we recommend to use `Annotated` instead:
+ ```python
+ @app.command()
+ def main(name: Annotated[str, typer.Argument()] = "World"):
+ print(f"Hello {name}!")
+ ```
+ """
+ ),
+ ] = ...,
+ *,
+ callback: Annotated[
+ Callable[..., Any] | None,
+ Doc(
+ """
+ Add a callback to this CLI Argument, to execute additional logic with the value received from the terminal.
+ See [the tutorial about callbacks](https://typer.tiangolo.com/tutorial/options/callback-and-context/) for more details.
+
+ **Example**
+
+ ```python
+ def name_callback(value: str):
+ if value != "Deadpool":
+ raise typer.BadParameter("Only Deadpool is allowed")
+ return value
+
+ @app.command()
+ def main(name: Annotated[str, typer.Argument(callback=name_callback)]):
+ print(f"Hello {name}")
+ ```
+ """
+ ),
+ ] = None,
+ metavar: Annotated[
+ str | None,
+ Doc(
+ """
+ Customize the name displayed in the help text to represent this CLI Argument.
+ By default, it will be the same name you declared, in uppercase.
+ See [the tutorial about CLI Arguments with Help](https://typer.tiangolo.com/tutorial/arguments/help/#custom-help-name-metavar) for more details.
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(name: Annotated[str, typer.Argument(metavar="✨username✨")]):
+ print(f"Hello {name}")
+ ```
+ """
+ ),
+ ] = None,
+ expose_value: Annotated[
+ bool,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility.
+
+ ---
+
+ If this is `True` then the value is passed onwards to the command callback and stored on the context, otherwise it’s skipped.
+ """
+ ),
+ ] = True,
+ is_eager: Annotated[
+ bool,
+ Doc(
+ """
+ Set an argument to "eager" to ensure it gets processed before other CLI parameters. This could be relevant when there are other parameters with callbacks that could exit the program early.
+ For more information and an extended example, see the documentation [here](https://typer.tiangolo.com/tutorial/options/version/#fix-with-is_eager).
+ """
+ ),
+ ] = False,
+ envvar: Annotated[
+ str | list[str] | None,
+ Doc(
+ """
+ Configure an argument to read a value from an environment variable if it is not provided in the command line as a CLI argument.
+ For more information, see the [documentation on Environment Variables](https://typer.tiangolo.com/tutorial/arguments/envvar/).
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(name: Annotated[str, typer.Argument(envvar="ME")]):
+ print(f"Hello Mr. {name}")
+ ```
+ """
+ ),
+ ] = None,
+ # TODO: Remove shell_complete in a future version (after 0.16.0)
+ shell_complete: Annotated[
+ Callable[
+ [click.Context, click.Parameter, str],
+ list["click.shell_completion.CompletionItem"] | list[str],
+ ]
+ | None,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility.
+ It is however not fully functional, and will likely be removed in future versions.
+ """
+ ),
+ ] = None,
+ autocompletion: Annotated[
+ Callable[..., Any] | None,
+ Doc(
+ """
+ Provide a custom function that helps to autocomplete the values of this CLI Argument.
+ See [the tutorial on parameter autocompletion](https://typer.tiangolo.com/tutorial/options-autocompletion) for more details.
+
+ **Example**
+
+ ```python
+ def complete():
+ return ["Me", "Myself", "I"]
+
+ @app.command()
+ def main(name: Annotated[str, typer.Argument(autocompletion=complete)]):
+ print(f"Hello {name}")
+ ```
+ """
+ ),
+ ] = None,
+ default_factory: Annotated[
+ Callable[[], Any] | None,
+ Doc(
+ """
+ Provide a custom function that dynamically generates a [default](https://typer.tiangolo.com/tutorial/arguments/default) for this CLI Argument.
+
+ **Example**
+
+ ```python
+ def get_name():
+ return random.choice(["Me", "Myself", "I"])
+
+ @app.command()
+ def main(name: Annotated[str, typer.Argument(default_factory=get_name)]):
+ print(f"Hello {name}")
+ ```
+ """
+ ),
+ ] = None,
+ # Custom type
+ parser: Annotated[
+ Callable[[str], Any] | None,
+ Doc(
+ """
+ Use your own custom types in Typer applications by defining a `parser` function that parses input into your own types:
+
+ **Example**
+
+ ```python
+ class CustomClass:
+ def __init__(self, value: str):
+ self.value = value
+
+ def __str__(self):
+ return f""
+
+ def my_parser(value: str):
+ return CustomClass(value * 2)
+
+ @app.command()
+ def main(arg: Annotated[CustomClass, typer.Argument(parser=my_parser):
+ print(f"arg is {arg}")
+ ```
+ """
+ ),
+ ] = None,
+ click_type: Annotated[
+ click.ParamType | None,
+ Doc(
+ """
+ Define this parameter to use a [custom Click type](https://click.palletsprojects.com/en/stable/parameters/#implementing-custom-types) in your Typer applications.
+
+ **Example**
+
+ ```python
+ class MyClass:
+ def __init__(self, value: str):
+ self.value = value
+
+ def __str__(self):
+ return f""
+
+ class MyParser(click.ParamType):
+ name = "MyClass"
+
+ def convert(self, value, param, ctx):
+ return MyClass(value * 3)
+
+ @app.command()
+ def main(arg: Annotated[MyClass, typer.Argument(click_type=MyParser())]):
+ print(f"arg is {arg}")
+ ```
+ """
+ ),
+ ] = None,
+ # TyperArgument
+ show_default: Annotated[
+ bool | str,
+ Doc(
+ """
+ When set to `False`, don't show the default value of this CLI Argument in the [help text](https://typer.tiangolo.com/tutorial/arguments/help/).
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(name: Annotated[str, typer.Argument(show_default=False)] = "Rick"):
+ print(f"Hello {name}")
+ ```
+ """
+ ),
+ ] = True,
+ show_choices: Annotated[
+ bool,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility.
+
+ ---
+
+ When set to `False`, this suppresses choices from being displayed inline when `prompt` is used.
+ """
+ ),
+ ] = True,
+ show_envvar: Annotated[
+ bool,
+ Doc(
+ """
+ When an ["envvar"](https://typer.tiangolo.com/tutorial/arguments/envvar) is defined, prevent it from showing up in the help text:
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(name: Annotated[str, typer.Argument(envvar="ME", show_envvar=False)]):
+ print(f"Hello Mr. {name}")
+ ```
+ """
+ ),
+ ] = True,
+ help: Annotated[
+ str | None,
+ Doc(
+ """
+ Help text for this CLI Argument.
+ See [the tutorial about CLI Arguments with help](https://typer.tiangolo.com/tutorial/arguments/help/) for more dedails.
+
+ **Example**
+
+ ```python
+ @app.command()
+ def greet(name: Annotated[str, typer.Argument(help="Person to greet")]):
+ print(f"Hello {name}")
+ ```
+ """
+ ),
+ ] = None,
+ hidden: Annotated[
+ bool,
+ Doc(
+ """
+ Hide this CLI Argument from [help outputs](https://typer.tiangolo.com/tutorial/arguments/help). `False` by default.
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(name: Annotated[str, typer.Argument(hidden=True)] = "World"):
+ print(f"Hello {name}")
+ ```
+ """
+ ),
+ ] = False,
+ # Choice
+ case_sensitive: Annotated[
+ bool,
+ Doc(
+ """
+ For a CLI Argument representing an [Enum (choice)](https://typer.tiangolo.com/tutorial/parameter-types/enum),
+ you can allow case-insensitive matching with this parameter:
+
+ **Example**
+
+ ```python
+ from enum import Enum
+
+ class NeuralNetwork(str, Enum):
+ simple = "simple"
+ conv = "conv"
+ lstm = "lstm"
+
+ @app.command()
+ def main(
+ network: Annotated[NeuralNetwork, typer.Argument(case_sensitive=False)]):
+ print(f"Training neural network of type: {network.value}")
+ ```
+
+ With this setting, "LSTM" or "lstm" will both be valid values that will be resolved to `NeuralNetwork.lstm`.
+ """
+ ),
+ ] = True,
+ # Numbers
+ min: Annotated[
+ int | float | None,
+ Doc(
+ """
+ For a CLI Argument representing a [number](https://typer.tiangolo.com/tutorial/parameter-types/number/) (`int` or `float`),
+ you can define numeric validations with `min` and `max` values:
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(
+ user: Annotated[str, typer.Argument()],
+ user_id: Annotated[int, typer.Argument(min=1, max=1000)],
+ ):
+ print(f"ID for {user} is {user_id}")
+ ```
+
+ If the user attempts to input an invalid number, an error will be shown, explaining why the value is invalid.
+ """
+ ),
+ ] = None,
+ max: Annotated[
+ int | float | None,
+ Doc(
+ """
+ For a CLI Argument representing a [number](https://typer.tiangolo.com/tutorial/parameter-types/number/) (`int` or `float`),
+ you can define numeric validations with `min` and `max` values:
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(
+ user: Annotated[str, typer.Argument()],
+ user_id: Annotated[int, typer.Argument(min=1, max=1000)],
+ ):
+ print(f"ID for {user} is {user_id}")
+ ```
+
+ If the user attempts to input an invalid number, an error will be shown, explaining why the value is invalid.
+ """
+ ),
+ ] = None,
+ clamp: Annotated[
+ bool,
+ Doc(
+ """
+ For a CLI Argument representing a [number](https://typer.tiangolo.com/tutorial/parameter-types/number/) and that is bounded by using `min` and/or `max`,
+ you can opt to use the closest minimum or maximum value instead of raising an error. This is done by setting `clamp` to `True`.
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(
+ user: Annotated[str, typer.Argument()],
+ user_id: Annotated[int, typer.Argument(min=1, max=1000, clamp=True)],
+ ):
+ print(f"ID for {user} is {user_id}")
+ ```
+
+ If the user attempts to input 3420 for `user_id`, this will internally be converted to `1000`.
+ """
+ ),
+ ] = False,
+ # DateTime
+ formats: Annotated[
+ list[str] | None,
+ Doc(
+ """
+ For a CLI Argument representing a [DateTime object](https://typer.tiangolo.com/tutorial/parameter-types/datetime),
+ you can customize the formats that can be parsed automatically:
+
+ **Example**
+
+ ```python
+ from datetime import datetime
+
+ @app.command()
+ def main(
+ birthday: Annotated[
+ datetime,
+ typer.Argument(
+ formats=["%Y-%m-%d", "%Y-%m-%d %H:%M:%S", "%m/%d/%Y"]
+ ),
+ ],
+ ):
+ print(f"Birthday defined at: {birthday}")
+ ```
+ """
+ ),
+ ] = None,
+ # File
+ mode: Annotated[
+ str | None,
+ Doc(
+ """
+ For a CLI Argument representing a [File object](https://typer.tiangolo.com/tutorial/parameter-types/file/),
+ you can customize the mode to open the file with. If unset, Typer will set a [sensible value by default](https://typer.tiangolo.com/tutorial/parameter-types/file/#advanced-mode).
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(config: Annotated[typer.FileText, typer.Argument(mode="a")]):
+ config.write("This is a single line\\n")
+ print("Config line written")
+ ```
+ """
+ ),
+ ] = None,
+ encoding: Annotated[
+ str | None,
+ Doc(
+ """
+ Customize the encoding of this CLI Argument represented by a [File object](https://typer.tiangolo.com/tutorial/parameter-types/file/).
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(config: Annotated[typer.FileText, typer.Argument(encoding="utf-8")]):
+ config.write("All the text gets written\\n")
+ ```
+ """
+ ),
+ ] = None,
+ errors: Annotated[
+ str | None,
+ Doc(
+ """
+ **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility.
+
+ ---
+
+ The error handling mode.
+ """
+ ),
+ ] = "strict",
+ lazy: Annotated[
+ bool | None,
+ Doc(
+ """
+ For a CLI Argument representing a [File object](https://typer.tiangolo.com/tutorial/parameter-types/file/),
+ by default the file will not be created until you actually start writing to it.
+ You can change this behaviour by setting this parameter.
+ By default, it's set to `True` for writing and to `False` for reading.
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(config: Annotated[typer.FileText, typer.Argument(mode="a", lazy=False)]):
+ config.write("This is a single line\\n")
+ print("Config line written")
+ ```
+ """
+ ),
+ ] = None,
+ atomic: Annotated[
+ bool,
+ Doc(
+ """
+ For a CLI Argument representing a [File object](https://typer.tiangolo.com/tutorial/parameter-types/file/),
+ you can ensure that all write instructions first go into a temporal file, and are only moved to the final destination after completing
+ by setting `atomic` to `True`. This can be useful for files with potential concurrent access.
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(config: Annotated[typer.FileText, typer.Argument(mode="a", atomic=True)]):
+ config.write("All the text")
+ ```
+ """
+ ),
+ ] = False,
+ # Path
+ exists: Annotated[
+ bool,
+ Doc(
+ """
+ When set to `True` for a [`Path` argument](https://typer.tiangolo.com/tutorial/parameter-types/path/),
+ additional validation is performed to check that the file or directory exists. If not, the value will be invalid.
+
+ **Example**
+
+ ```python
+ from pathlib import Path
+
+ @app.command()
+ def main(config: Annotated[Path, typer.Argument(exists=True)]):
+ text = config.read_text()
+ print(f"Config file contents: {text}")
+ ```
+ """
+ ),
+ ] = False,
+ file_okay: Annotated[
+ bool,
+ Doc(
+ """
+ Determine whether or not a [`Path` argument](https://typer.tiangolo.com/tutorial/parameter-types/path/)
+ is allowed to refer to a file. When this is set to `False`, the application will raise a validation error when a path to a file is given.
+
+ **Example**
+
+ ```python
+ from pathlib import Path
+
+ @app.command()
+ def main(config: Annotated[Path, typer.Argument(exists=True, file_okay=False)]):
+ print(f"Directory listing: {[x.name for x in config.iterdir()]}")
+ ```
+ """
+ ),
+ ] = True,
+ dir_okay: Annotated[
+ bool,
+ Doc(
+ """
+ Determine whether or not a [`Path` argument](https://typer.tiangolo.com/tutorial/parameter-types/path/)
+ is allowed to refer to a directory. When this is set to `False`, the application will raise a validation error when a path to a directory is given.
+
+ **Example**
+
+ ```python
+ from pathlib import Path
+
+ @app.command()
+ def main(config: Annotated[Path, typer.Argument(exists=True, dir_okay=False)]):
+ text = config.read_text()
+ print(f"Config file contents: {text}")
+ ```
+ """
+ ),
+ ] = True,
+ writable: Annotated[
+ bool,
+ Doc(
+ """
+ Whether or not to perform a writable check for this [`Path` argument](https://typer.tiangolo.com/tutorial/parameter-types/path/).
+
+ **Example**
+
+ ```python
+ from pathlib import Path
+
+ @app.command()
+ def main(config: Annotated[Path, typer.Argument(writable=True)]):
+ config.write_text("All the text")
+ ```
+ """
+ ),
+ ] = False,
+ readable: Annotated[
+ bool,
+ Doc(
+ """
+ Whether or not to perform a readable check for this [`Path` argument](https://typer.tiangolo.com/tutorial/parameter-types/path/).
+
+ **Example**
+
+ ```python
+ from pathlib import Path
+
+ @app.command()
+ def main(config: Annotated[Path, typer.Argument(readable=True)]):
+ config.read_text("All the text")
+ ```
+ """
+ ),
+ ] = True,
+ resolve_path: Annotated[
+ bool,
+ Doc(
+ """
+ Whether or not to fully resolve the path of this [`Path` argument](https://typer.tiangolo.com/tutorial/parameter-types/path/),
+ meaning that the path becomes absolute and symlinks are resolved.
+
+ **Example**
+
+ ```python
+ from pathlib import Path
+
+ @app.command()
+ def main(config: Annotated[Path, typer.Argument(resolve_path=True)]):
+ config.read_text("All the text")
+ ```
+ """
+ ),
+ ] = False,
+ allow_dash: Annotated[
+ bool,
+ Doc(
+ """
+ When set to `True`, a single dash for this [`Path` argument](https://typer.tiangolo.com/tutorial/parameter-types/path/)
+ would be a valid value, indicating standard streams. This is a more advanced use-case.
+ """
+ ),
+ ] = False,
+ path_type: Annotated[
+ None | type[str] | type[bytes],
+ Doc(
+ """
+ A string type that will be used to represent this [`Path` argument](https://typer.tiangolo.com/tutorial/parameter-types/path/).
+ The default is `None` which means the return value will be either bytes or unicode, depending on what makes most sense given the input data.
+ This is a more advanced use-case.
+ """
+ ),
+ ] = None,
+ # Rich settings
+ rich_help_panel: Annotated[
+ str | None,
+ Doc(
+ """
+ Set the panel name where you want this CLI Argument to be shown in the [help text](https://typer.tiangolo.com/tutorial/arguments/help).
+
+ **Example**
+
+ ```python
+ @app.command()
+ def main(
+ name: Annotated[str, typer.Argument(help="Who to greet")],
+ age: Annotated[str, typer.Option(help="Their age", rich_help_panel="Data")],
+ ):
+ print(f"Hello {name} of age {age}")
+ ```
+ """
+ ),
+ ] = None,
+) -> Any:
+ """
+ A [CLI Argument](https://typer.tiangolo.com/tutorial/arguments) is a positional parameter to your command line application.
+
+ Often, CLI Arguments are required, meaning that users have to specify them. However, you can set them to be optional by defining a default value:
+
+ ## Example
+
+ ```python
+ @app.command()
+ def main(name: Annotated[str, typer.Argument()] = "World"):
+ print(f"Hello {name}!")
+ ```
+
+ Note how in this example, if `name` is not specified on the command line, the application will still execute normally and print "Hello World!".
+ """
+ return ArgumentInfo(
+ # Parameter
+ default=default,
+ # Arguments can only have one param declaration
+ # it will be generated from the param name
+ param_decls=None,
+ callback=callback,
+ metavar=metavar,
+ expose_value=expose_value,
+ is_eager=is_eager,
+ envvar=envvar,
+ shell_complete=shell_complete,
+ autocompletion=autocompletion,
+ default_factory=default_factory,
+ # Custom type
+ parser=parser,
+ click_type=click_type,
+ # TyperArgument
+ show_default=show_default,
+ show_choices=show_choices,
+ show_envvar=show_envvar,
+ help=help,
+ hidden=hidden,
+ # Choice
+ case_sensitive=case_sensitive,
+ # Numbers
+ min=min,
+ max=max,
+ clamp=clamp,
+ # DateTime
+ formats=formats,
+ # File
+ mode=mode,
+ encoding=encoding,
+ errors=errors,
+ lazy=lazy,
+ atomic=atomic,
+ # Path
+ exists=exists,
+ file_okay=file_okay,
+ dir_okay=dir_okay,
+ writable=writable,
+ readable=readable,
+ resolve_path=resolve_path,
+ allow_dash=allow_dash,
+ path_type=path_type,
+ # Rich settings
+ rich_help_panel=rich_help_panel,
+ )
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/py.typed b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/py.typed
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/rich_utils.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/rich_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..d85043238c3d47a89cd6d5127fce198c0564f0b7
--- /dev/null
+++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/rich_utils.py
@@ -0,0 +1,753 @@
+# Extracted and modified from https://github.com/ewels/rich-click
+
+import inspect
+import io
+from collections import defaultdict
+from collections.abc import Iterable
+from gettext import gettext as _
+from os import getenv
+from typing import Any, Literal
+
+import click
+from rich import box
+from rich.align import Align
+from rich.columns import Columns
+from rich.console import Console, RenderableType, group
+from rich.emoji import Emoji
+from rich.highlighter import RegexHighlighter
+from rich.markdown import Markdown
+from rich.markup import escape
+from rich.padding import Padding
+from rich.panel import Panel
+from rich.table import Table
+from rich.text import Text
+from rich.theme import Theme
+from rich.traceback import Traceback
+from typer.models import DeveloperExceptionConfig
+
+# Default styles
+STYLE_OPTION = "bold cyan"
+STYLE_SWITCH = "bold green"
+STYLE_NEGATIVE_OPTION = "bold magenta"
+STYLE_NEGATIVE_SWITCH = "bold red"
+STYLE_METAVAR = "bold yellow"
+STYLE_METAVAR_SEPARATOR = "dim"
+STYLE_USAGE = "yellow"
+STYLE_USAGE_COMMAND = "bold"
+STYLE_DEPRECATED = "red"
+STYLE_DEPRECATED_COMMAND = "dim"
+STYLE_HELPTEXT_FIRST_LINE = ""
+STYLE_HELPTEXT = "dim"
+STYLE_OPTION_HELP = ""
+STYLE_OPTION_DEFAULT = "dim"
+STYLE_OPTION_ENVVAR = "dim yellow"
+STYLE_REQUIRED_SHORT = "red"
+STYLE_REQUIRED_LONG = "dim red"
+STYLE_OPTIONS_PANEL_BORDER = "dim"
+ALIGN_OPTIONS_PANEL: Literal["left", "center", "right"] = "left"
+STYLE_OPTIONS_TABLE_SHOW_LINES = False
+STYLE_OPTIONS_TABLE_LEADING = 0
+STYLE_OPTIONS_TABLE_PAD_EDGE = False
+STYLE_OPTIONS_TABLE_PADDING = (0, 1)
+STYLE_OPTIONS_TABLE_BOX = ""
+STYLE_OPTIONS_TABLE_ROW_STYLES = None
+STYLE_OPTIONS_TABLE_BORDER_STYLE = None
+STYLE_COMMANDS_PANEL_BORDER = "dim"
+ALIGN_COMMANDS_PANEL: Literal["left", "center", "right"] = "left"
+STYLE_COMMANDS_TABLE_SHOW_LINES = False
+STYLE_COMMANDS_TABLE_LEADING = 0
+STYLE_COMMANDS_TABLE_PAD_EDGE = False
+STYLE_COMMANDS_TABLE_PADDING = (0, 1)
+STYLE_COMMANDS_TABLE_BOX = ""
+STYLE_COMMANDS_TABLE_ROW_STYLES = None
+STYLE_COMMANDS_TABLE_BORDER_STYLE = None
+STYLE_COMMANDS_TABLE_FIRST_COLUMN = "bold cyan"
+STYLE_ERRORS_PANEL_BORDER = "red"
+ALIGN_ERRORS_PANEL: Literal["left", "center", "right"] = "left"
+STYLE_ERRORS_SUGGESTION = "dim"
+STYLE_ABORTED = "red"
+_TERMINAL_WIDTH = getenv("TERMINAL_WIDTH")
+MAX_WIDTH = int(_TERMINAL_WIDTH) if _TERMINAL_WIDTH else None
+COLOR_SYSTEM: Literal["auto", "standard", "256", "truecolor", "windows"] | None = (
+ "auto" # Set to None to disable colors
+)
+_TYPER_FORCE_DISABLE_TERMINAL = getenv("_TYPER_FORCE_DISABLE_TERMINAL")
+FORCE_TERMINAL = (
+ True
+ if getenv("GITHUB_ACTIONS") or getenv("FORCE_COLOR") or getenv("PY_COLORS")
+ else None
+)
+if _TYPER_FORCE_DISABLE_TERMINAL:
+ FORCE_TERMINAL = False
+
+# Fixed strings
+DEPRECATED_STRING = _("(deprecated) ")
+DEFAULT_STRING = _("[default: {}]")
+ENVVAR_STRING = _("[env var: {}]")
+REQUIRED_SHORT_STRING = "*"
+REQUIRED_LONG_STRING = _("[required]")
+RANGE_STRING = " [{}]"
+ARGUMENTS_PANEL_TITLE = _("Arguments")
+OPTIONS_PANEL_TITLE = _("Options")
+COMMANDS_PANEL_TITLE = _("Commands")
+ERRORS_PANEL_TITLE = _("Error")
+ABORTED_TEXT = _("Aborted.")
+RICH_HELP = _("Try [blue]'{command_path} {help_option}'[/] for help.")
+
+MARKUP_MODE_MARKDOWN = "markdown"
+MARKUP_MODE_RICH = "rich"
+_RICH_HELP_PANEL_NAME = "rich_help_panel"
+ANSI_PREFIX = "\033["
+
+MarkupModeStrict = Literal["markdown", "rich"]
+
+
+# Rich regex highlighter
+class OptionHighlighter(RegexHighlighter):
+ """Highlights our special options."""
+
+ highlights = [
+ r"(^|\W)(?P\-\w+)(?![a-zA-Z0-9])",
+ r"(^|\W)(?P\-\-[\w\-]+)(?![a-zA-Z0-9])",
+ r"(?P\<[^\>]+\>)",
+ r"(?PUsage: )",
+ ]
+
+
+class NegativeOptionHighlighter(RegexHighlighter):
+ highlights = [
+ r"(^|\W)(?P\-\w+)(?![a-zA-Z0-9])",
+ r"(^|\W)(?P\-\-[\w\-]+)(?![a-zA-Z0-9])",
+ ]
+
+
+# Highlighter to make [ | ] and <> dim
+class MetavarHighlighter(RegexHighlighter):
+ highlights = [
+ r"^(?P(\[|<))",
+ r"(?P\|)",
+ r"(?P(\]|>))(\.\.\.)?$",
+ ]
+
+
+highlighter = OptionHighlighter()
+negative_highlighter = NegativeOptionHighlighter()
+metavar_highlighter = MetavarHighlighter()
+
+
+def _has_ansi_character(text: str) -> bool:
+ return ANSI_PREFIX in text
+
+
+def _get_rich_console(stderr: bool = False) -> Console:
+ return Console(
+ theme=Theme(
+ {
+ "option": STYLE_OPTION,
+ "switch": STYLE_SWITCH,
+ "negative_option": STYLE_NEGATIVE_OPTION,
+ "negative_switch": STYLE_NEGATIVE_SWITCH,
+ "metavar": STYLE_METAVAR,
+ "metavar_sep": STYLE_METAVAR_SEPARATOR,
+ "usage": STYLE_USAGE,
+ },
+ ),
+ highlighter=highlighter,
+ color_system=COLOR_SYSTEM,
+ force_terminal=FORCE_TERMINAL,
+ width=MAX_WIDTH,
+ stderr=stderr,
+ )
+
+
+def _make_rich_text(
+ *, text: str, style: str = "", markup_mode: MarkupModeStrict
+) -> Markdown | Text:
+ """Take a string, remove indentations, and return styled text.
+
+ If `markup_mode` is `"rich"`, the text is parsed for Rich markup strings.
+ If `markup_mode` is `"markdown"`, parse as Markdown.
+ """
+ # Remove indentations from input text
+ text = inspect.cleandoc(text)
+ if markup_mode == MARKUP_MODE_MARKDOWN:
+ text = Emoji.replace(text)
+ return Markdown(text, style=style)
+ else:
+ assert markup_mode == MARKUP_MODE_RICH
+ if _has_ansi_character(text):
+ return highlighter(Text.from_ansi(text, style=style))
+ else:
+ return highlighter(Text.from_markup(text, style=style))
+
+
+@group()
+def _get_help_text(
+ *,
+ obj: click.Command | click.Group,
+ markup_mode: MarkupModeStrict,
+) -> Iterable[Markdown | Text]:
+ """Build primary help text for a click command or group.
+
+ Returns the prose help text for a command or group, rendered either as a
+ Rich Text object or as Markdown.
+ If the command is marked as deprecated, the deprecated string will be prepended.
+ """
+ # Prepend deprecated status
+ if obj.deprecated:
+ yield Text(DEPRECATED_STRING, style=STYLE_DEPRECATED)
+
+ # Fetch and dedent the help text
+ help_text = inspect.cleandoc(obj.help or "")
+
+ # Trim off anything that comes after \f on its own line
+ help_text = help_text.partition("\f")[0]
+
+ # Get the first paragraph
+ first_line, *remaining_paragraphs = help_text.split("\n\n")
+
+ # Remove single linebreaks
+ if markup_mode != MARKUP_MODE_MARKDOWN and not first_line.startswith("\b"):
+ first_line = first_line.replace("\n", " ")
+ yield _make_rich_text(
+ text=first_line.strip(),
+ style=STYLE_HELPTEXT_FIRST_LINE,
+ markup_mode=markup_mode,
+ )
+
+ # Get remaining lines, remove single line breaks and format as dim
+ if remaining_paragraphs:
+ # Add a newline inbetween the header and the remaining paragraphs
+ yield Text("")
+ # Join with double linebreaks for markdown and Rich markup
+ remaining_lines = "\n\n".join(remaining_paragraphs)
+
+ yield _make_rich_text(
+ text=remaining_lines,
+ style=STYLE_HELPTEXT,
+ markup_mode=markup_mode,
+ )
+
+
+def _get_parameter_help(
+ *,
+ param: click.Option | click.Argument | click.Parameter,
+ ctx: click.Context,
+ markup_mode: MarkupModeStrict,
+) -> Columns:
+ """Build primary help text for a click option or argument.
+
+ Returns the prose help text for an option or argument, rendered either
+ as a Rich Text object or as Markdown.
+ Additional elements are appended to show the default and required status if
+ applicable.
+ """
+ # import here to avoid cyclic imports
+ from .core import TyperArgument, TyperOption
+
+ items: list[Text | Markdown] = []
+
+ # Get the environment variable first
+
+ envvar = getattr(param, "envvar", None)
+ var_str = ""
+ # https://github.com/pallets/click/blob/0aec1168ac591e159baf6f61026d6ae322c53aaf/src/click/core.py#L2720-L2726
+ if envvar is None:
+ if (
+ getattr(param, "allow_from_autoenv", None)
+ and getattr(ctx, "auto_envvar_prefix", None) is not None
+ and param.name is not None
+ ):
+ envvar = f"{ctx.auto_envvar_prefix}_{param.name.upper()}"
+ if envvar is not None:
+ var_str = (
+ envvar if isinstance(envvar, str) else ", ".join(str(d) for d in envvar)
+ )
+
+ # Main help text
+ help_value: str | None = getattr(param, "help", None)
+ if help_value:
+ paragraphs = help_value.split("\n\n")
+ # Remove single linebreaks
+ if markup_mode != MARKUP_MODE_MARKDOWN:
+ paragraphs = [
+ x.replace("\n", " ").strip()
+ if not x.startswith("\b")
+ else "{}\n".format(x.strip("\b\n"))
+ for x in paragraphs
+ ]
+ items.append(
+ _make_rich_text(
+ text="\n".join(paragraphs).strip(),
+ style=STYLE_OPTION_HELP,
+ markup_mode=markup_mode,
+ )
+ )
+
+ # Environment variable AFTER help text
+ if envvar and getattr(param, "show_envvar", None):
+ items.append(Text(ENVVAR_STRING.format(var_str), style=STYLE_OPTION_ENVVAR))
+
+ # Default value
+ # This uses Typer's specific param._get_default_string
+ if isinstance(param, (TyperOption, TyperArgument)):
+ default_value = param._extract_default_help_str(ctx=ctx)
+ show_default_is_str = isinstance(param.show_default, str)
+ if show_default_is_str or (
+ default_value is not None and (param.show_default or ctx.show_default)
+ ):
+ default_str = param._get_default_string(
+ ctx=ctx,
+ show_default_is_str=show_default_is_str,
+ default_value=default_value,
+ )
+ if default_str:
+ items.append(
+ Text(
+ DEFAULT_STRING.format(default_str),
+ style=STYLE_OPTION_DEFAULT,
+ )
+ )
+
+ # Required?
+ if param.required:
+ items.append(Text(REQUIRED_LONG_STRING, style=STYLE_REQUIRED_LONG))
+
+ # Use Columns - this allows us to group different renderable types
+ # (Text, Markdown) onto a single line.
+ return Columns(items)
+
+
+def _make_command_help(
+ *,
+ help_text: str,
+ markup_mode: MarkupModeStrict,
+) -> Text | Markdown:
+ """Build cli help text for a click group command.
+
+ That is, when calling help on groups with multiple subcommands
+ (not the main help text when calling the subcommand help).
+
+ Returns the first paragraph of help text for a command, rendered either as a
+ Rich Text object or as Markdown.
+ Ignores single newlines as paragraph markers, looks for double only.
+ """
+ paragraphs = inspect.cleandoc(help_text).split("\n\n")
+ # Remove single linebreaks
+ if markup_mode != MARKUP_MODE_RICH and not paragraphs[0].startswith("\b"):
+ paragraphs[0] = paragraphs[0].replace("\n", " ")
+ elif paragraphs[0].startswith("\b"):
+ paragraphs[0] = paragraphs[0].replace("\b\n", "")
+ return _make_rich_text(
+ text=paragraphs[0].strip(),
+ style=STYLE_OPTION_HELP,
+ markup_mode=markup_mode,
+ )
+
+
+def _print_options_panel(
+ *,
+ name: str,
+ params: list[click.Option] | list[click.Argument],
+ ctx: click.Context,
+ markup_mode: MarkupModeStrict,
+ console: Console,
+) -> None:
+ options_rows: list[list[RenderableType]] = []
+ required_rows: list[str | Text] = []
+ for param in params:
+ # Short and long form
+ opt_long_strs = []
+ opt_short_strs = []
+ secondary_opt_long_strs = []
+ secondary_opt_short_strs = []
+ for opt_str in param.opts:
+ if "--" in opt_str:
+ opt_long_strs.append(opt_str)
+ else:
+ opt_short_strs.append(opt_str)
+ for opt_str in param.secondary_opts:
+ if "--" in opt_str:
+ secondary_opt_long_strs.append(opt_str)
+ else:
+ secondary_opt_short_strs.append(opt_str)
+
+ # Column for a metavar, if we have one
+ metavar = Text(style=STYLE_METAVAR, overflow="fold")
+ metavar_str = param.make_metavar(ctx=ctx)
+ # Do it ourselves if this is a positional argument
+ if (
+ isinstance(param, click.Argument)
+ and param.name
+ and metavar_str == param.name.upper()
+ ):
+ metavar_str = param.type.name.upper()
+
+ # Skip booleans and choices (handled above)
+ if metavar_str != "BOOLEAN":
+ metavar.append(metavar_str)
+
+ # Range - from
+ # https://github.com/pallets/click/blob/c63c70dabd3f86ca68678b4f00951f78f52d0270/src/click/core.py#L2698-L2706 # noqa: E501
+ # skip count with default range type
+ if (
+ isinstance(param.type, click.types._NumberRangeBase)
+ and isinstance(param, click.Option)
+ and not (param.count and param.type.min == 0 and param.type.max is None)
+ ):
+ range_str = param.type._describe_range()
+ if range_str:
+ metavar.append(RANGE_STRING.format(range_str))
+
+ # Required asterisk
+ required: str | Text = ""
+ if param.required:
+ required = Text(REQUIRED_SHORT_STRING, style=STYLE_REQUIRED_SHORT)
+
+ required_rows.append(required)
+ options_rows.append(
+ [
+ highlighter(",".join(opt_long_strs)),
+ highlighter(",".join(opt_short_strs)),
+ negative_highlighter(",".join(secondary_opt_long_strs)),
+ negative_highlighter(",".join(secondary_opt_short_strs)),
+ metavar_highlighter(metavar),
+ _get_parameter_help(
+ param=param,
+ ctx=ctx,
+ markup_mode=markup_mode,
+ ),
+ ]
+ )
+ rows_with_required: list[list[RenderableType]] = []
+ if any(required_rows):
+ for required, row in zip(required_rows, options_rows, strict=True):
+ rows_with_required.append([required, *row])
+ else:
+ rows_with_required = options_rows
+ if options_rows:
+ t_styles: dict[str, Any] = {
+ "show_lines": STYLE_OPTIONS_TABLE_SHOW_LINES,
+ "leading": STYLE_OPTIONS_TABLE_LEADING,
+ "box": STYLE_OPTIONS_TABLE_BOX,
+ "border_style": STYLE_OPTIONS_TABLE_BORDER_STYLE,
+ "row_styles": STYLE_OPTIONS_TABLE_ROW_STYLES,
+ "pad_edge": STYLE_OPTIONS_TABLE_PAD_EDGE,
+ "padding": STYLE_OPTIONS_TABLE_PADDING,
+ }
+ box_style = getattr(box, t_styles.pop("box"), None)
+
+ options_table = Table(
+ highlight=True,
+ show_header=False,
+ expand=True,
+ box=box_style,
+ **t_styles,
+ )
+ for row in rows_with_required:
+ options_table.add_row(*row)
+ console.print(
+ Panel(
+ options_table,
+ border_style=STYLE_OPTIONS_PANEL_BORDER,
+ title=name,
+ title_align=ALIGN_OPTIONS_PANEL,
+ )
+ )
+
+
+def _print_commands_panel(
+ *,
+ name: str,
+ commands: list[click.Command],
+ markup_mode: MarkupModeStrict,
+ console: Console,
+ cmd_len: int,
+) -> None:
+ t_styles: dict[str, Any] = {
+ "show_lines": STYLE_COMMANDS_TABLE_SHOW_LINES,
+ "leading": STYLE_COMMANDS_TABLE_LEADING,
+ "box": STYLE_COMMANDS_TABLE_BOX,
+ "border_style": STYLE_COMMANDS_TABLE_BORDER_STYLE,
+ "row_styles": STYLE_COMMANDS_TABLE_ROW_STYLES,
+ "pad_edge": STYLE_COMMANDS_TABLE_PAD_EDGE,
+ "padding": STYLE_COMMANDS_TABLE_PADDING,
+ }
+ box_style = getattr(box, t_styles.pop("box"), None)
+
+ commands_table = Table(
+ highlight=False,
+ show_header=False,
+ expand=True,
+ box=box_style,
+ **t_styles,
+ )
+ # Define formatting in first column, as commands don't match highlighter
+ # regex
+ commands_table.add_column(
+ style=STYLE_COMMANDS_TABLE_FIRST_COLUMN,
+ no_wrap=True,
+ width=cmd_len,
+ )
+
+ # A big ratio makes the description column be greedy and take all the space
+ # available instead of allowing the command column to grow and misalign with
+ # other panels.
+ commands_table.add_column("Description", justify="left", no_wrap=False, ratio=10)
+ rows: list[list[RenderableType | None]] = []
+ deprecated_rows: list[RenderableType | None] = []
+ for command in commands:
+ helptext = command.short_help or command.help or ""
+ command_name = command.name or ""
+ if command.deprecated:
+ command_name_text = Text(f"{command_name}", style=STYLE_DEPRECATED_COMMAND)
+ deprecated_rows.append(Text(DEPRECATED_STRING, style=STYLE_DEPRECATED))
+ else:
+ command_name_text = Text(command_name)
+ deprecated_rows.append(None)
+ rows.append(
+ [
+ command_name_text,
+ _make_command_help(
+ help_text=helptext,
+ markup_mode=markup_mode,
+ ),
+ ]
+ )
+ rows_with_deprecated = rows
+ if any(deprecated_rows):
+ rows_with_deprecated = []
+ for row, deprecated_text in zip(rows, deprecated_rows, strict=True):
+ rows_with_deprecated.append([*row, deprecated_text])
+ for row in rows_with_deprecated:
+ commands_table.add_row(*row)
+ if commands_table.row_count:
+ console.print(
+ Panel(
+ commands_table,
+ border_style=STYLE_COMMANDS_PANEL_BORDER,
+ title=name,
+ title_align=ALIGN_COMMANDS_PANEL,
+ )
+ )
+
+
+def rich_format_help(
+ *,
+ obj: click.Command | click.Group,
+ ctx: click.Context,
+ markup_mode: MarkupModeStrict,
+) -> None:
+ """Print nicely formatted help text using rich.
+
+ Based on original code from rich-cli, by @willmcgugan.
+ https://github.com/Textualize/rich-cli/blob/8a2767c7a340715fc6fbf4930ace717b9b2fc5e5/src/rich_cli/__main__.py#L162-L236
+
+ Replacement for the click function format_help().
+ Takes a command or group and builds the help text output.
+ """
+ console = _get_rich_console()
+
+ # Print usage
+ console.print(
+ Padding(highlighter(obj.get_usage(ctx)), 1), style=STYLE_USAGE_COMMAND
+ )
+
+ # Print command / group help if we have some
+ if obj.help:
+ # Print with some padding
+ console.print(
+ Padding(
+ Align(
+ _get_help_text(
+ obj=obj,
+ markup_mode=markup_mode,
+ ),
+ pad=False,
+ ),
+ (0, 1, 1, 1),
+ )
+ )
+ panel_to_arguments: defaultdict[str, list[click.Argument]] = defaultdict(list)
+ panel_to_options: defaultdict[str, list[click.Option]] = defaultdict(list)
+ for param in obj.get_params(ctx):
+ # Skip if option is hidden
+ if getattr(param, "hidden", False):
+ continue
+ if isinstance(param, click.Argument):
+ panel_name = (
+ getattr(param, _RICH_HELP_PANEL_NAME, None) or ARGUMENTS_PANEL_TITLE
+ )
+ panel_to_arguments[panel_name].append(param)
+ elif isinstance(param, click.Option):
+ panel_name = (
+ getattr(param, _RICH_HELP_PANEL_NAME, None) or OPTIONS_PANEL_TITLE
+ )
+ panel_to_options[panel_name].append(param)
+ default_arguments = panel_to_arguments.get(ARGUMENTS_PANEL_TITLE, [])
+ _print_options_panel(
+ name=ARGUMENTS_PANEL_TITLE,
+ params=default_arguments,
+ ctx=ctx,
+ markup_mode=markup_mode,
+ console=console,
+ )
+ for panel_name, arguments in panel_to_arguments.items():
+ if panel_name == ARGUMENTS_PANEL_TITLE:
+ # Already printed above
+ continue
+ _print_options_panel(
+ name=panel_name,
+ params=arguments,
+ ctx=ctx,
+ markup_mode=markup_mode,
+ console=console,
+ )
+ default_options = panel_to_options.get(OPTIONS_PANEL_TITLE, [])
+ _print_options_panel(
+ name=OPTIONS_PANEL_TITLE,
+ params=default_options,
+ ctx=ctx,
+ markup_mode=markup_mode,
+ console=console,
+ )
+ for panel_name, options in panel_to_options.items():
+ if panel_name == OPTIONS_PANEL_TITLE:
+ # Already printed above
+ continue
+ _print_options_panel(
+ name=panel_name,
+ params=options,
+ ctx=ctx,
+ markup_mode=markup_mode,
+ console=console,
+ )
+
+ if isinstance(obj, click.Group):
+ panel_to_commands: defaultdict[str, list[click.Command]] = defaultdict(list)
+ for command_name in obj.list_commands(ctx):
+ command = obj.get_command(ctx, command_name)
+ if command and not command.hidden:
+ panel_name = (
+ getattr(command, _RICH_HELP_PANEL_NAME, None)
+ or COMMANDS_PANEL_TITLE
+ )
+ panel_to_commands[panel_name].append(command)
+
+ # Identify the longest command name in all panels
+ max_cmd_len = max(
+ [
+ len(command.name or "")
+ for commands in panel_to_commands.values()
+ for command in commands
+ ],
+ default=0,
+ )
+
+ # Print each command group panel
+ default_commands = panel_to_commands.get(COMMANDS_PANEL_TITLE, [])
+ _print_commands_panel(
+ name=COMMANDS_PANEL_TITLE,
+ commands=default_commands,
+ markup_mode=markup_mode,
+ console=console,
+ cmd_len=max_cmd_len,
+ )
+ for panel_name, commands in panel_to_commands.items():
+ if panel_name == COMMANDS_PANEL_TITLE:
+ # Already printed above
+ continue
+ _print_commands_panel(
+ name=panel_name,
+ commands=commands,
+ markup_mode=markup_mode,
+ console=console,
+ cmd_len=max_cmd_len,
+ )
+
+ # Epilogue if we have it
+ if obj.epilog:
+ # Remove single linebreaks, replace double with single
+ lines = obj.epilog.split("\n\n")
+ epilogue = "\n".join([x.replace("\n", " ").strip() for x in lines])
+ epilogue_text = _make_rich_text(text=epilogue, markup_mode=markup_mode)
+ console.print(Padding(Align(epilogue_text, pad=False), 1))
+
+
+def rich_format_error(self: click.ClickException) -> None:
+ """Print richly formatted click errors.
+
+ Called by custom exception handler to print richly formatted click errors.
+ Mimics original click.ClickException.echo() function but with rich formatting.
+ """
+ # Don't do anything when it's a NoArgsIsHelpError (without importing it, cf. #1278)
+ if self.__class__.__name__ == "NoArgsIsHelpError":
+ return
+
+ console = _get_rich_console(stderr=True)
+ ctx: click.Context | None = getattr(self, "ctx", None)
+ if ctx is not None:
+ console.print(ctx.get_usage())
+
+ if ctx is not None and ctx.command.get_help_option(ctx) is not None:
+ console.print(
+ RICH_HELP.format(
+ command_path=ctx.command_path, help_option=ctx.help_option_names[0]
+ ),
+ style=STYLE_ERRORS_SUGGESTION,
+ )
+
+ console.print(
+ Panel(
+ highlighter(self.format_message()),
+ border_style=STYLE_ERRORS_PANEL_BORDER,
+ title=ERRORS_PANEL_TITLE,
+ title_align=ALIGN_ERRORS_PANEL,
+ )
+ )
+
+
+def rich_abort_error() -> None:
+ """Print richly formatted abort error."""
+ console = _get_rich_console(stderr=True)
+ console.print(ABORTED_TEXT, style=STYLE_ABORTED)
+
+
+def escape_before_html_export(input_text: str) -> str:
+ """Ensure that the input string can be used for HTML export."""
+ return escape(input_text).strip()
+
+
+def rich_to_html(input_text: str) -> str:
+ """Print the HTML version of a rich-formatted input string.
+
+ This function does not provide a full HTML page, but can be used to insert
+ HTML-formatted text spans into a markdown file.
+ """
+ console = Console(record=True, highlight=False, file=io.StringIO())
+
+ console.print(input_text, overflow="ignore", crop=False)
+
+ return console.export_html(inline_styles=True, code_format="{code}").strip()
+
+
+def rich_render_text(text: str) -> str:
+ """Remove rich tags and render a pure text representation"""
+ console = _get_rich_console()
+ return "".join(segment.text for segment in console.render(text)).rstrip("\n")
+
+
+def get_traceback(
+ exc: BaseException,
+ exception_config: DeveloperExceptionConfig,
+ internal_dir_names: list[str],
+) -> Traceback:
+ rich_tb = Traceback.from_exception(
+ type(exc),
+ exc,
+ exc.__traceback__,
+ show_locals=exception_config.pretty_exceptions_show_locals,
+ suppress=internal_dir_names,
+ width=MAX_WIDTH,
+ )
+ return rich_tb
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/testing.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/testing.py
new file mode 100644
index 0000000000000000000000000000000000000000..09711e66fd82bb7214f6d6a43ba5bd0ae8602865
--- /dev/null
+++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/testing.py
@@ -0,0 +1,30 @@
+from collections.abc import Mapping, Sequence
+from typing import IO, Any
+
+from click.testing import CliRunner as ClickCliRunner # noqa
+from click.testing import Result
+from typer.main import Typer
+from typer.main import get_command as _get_command
+
+
+class CliRunner(ClickCliRunner):
+ def invoke( # type: ignore
+ self,
+ app: Typer,
+ args: str | Sequence[str] | None = None,
+ input: bytes | str | IO[Any] | None = None,
+ env: Mapping[str, str | None] | None = None,
+ catch_exceptions: bool = True,
+ color: bool = False,
+ **extra: Any,
+ ) -> Result:
+ use_cli = _get_command(app)
+ return super().invoke(
+ use_cli,
+ args=args,
+ input=input,
+ env=env,
+ catch_exceptions=catch_exceptions,
+ color=color,
+ **extra,
+ )
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/utils.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..addf9334d4210a9cddc9e5608ae446417d372eb0
--- /dev/null
+++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/utils.py
@@ -0,0 +1,197 @@
+import inspect
+from collections.abc import Callable
+from copy import copy
+from typing import Any, cast
+
+from ._typing import Annotated, get_args, get_origin, get_type_hints
+from .models import ArgumentInfo, OptionInfo, ParameterInfo, ParamMeta
+
+
+def _param_type_to_user_string(param_type: type[ParameterInfo]) -> str:
+ # Render a `ParameterInfo` subclass for use in error messages.
+ # User code doesn't call `*Info` directly, so errors should present the classes how
+ # they were (probably) defined in the user code.
+ if param_type is OptionInfo:
+ return "`Option`"
+ elif param_type is ArgumentInfo:
+ return "`Argument`"
+ # This line shouldn't be reachable during normal use.
+ return f"`{param_type.__name__}`" # pragma: no cover
+
+
+class AnnotatedParamWithDefaultValueError(Exception):
+ argument_name: str
+ param_type: type[ParameterInfo]
+
+ def __init__(self, argument_name: str, param_type: type[ParameterInfo]):
+ self.argument_name = argument_name
+ self.param_type = param_type
+
+ def __str__(self) -> str:
+ param_type_str = _param_type_to_user_string(self.param_type)
+ return (
+ f"{param_type_str} default value cannot be set in `Annotated`"
+ f" for {self.argument_name!r}. Set the default value with `=` instead."
+ )
+
+
+class MixedAnnotatedAndDefaultStyleError(Exception):
+ argument_name: str
+ annotated_param_type: type[ParameterInfo]
+ default_param_type: type[ParameterInfo]
+
+ def __init__(
+ self,
+ argument_name: str,
+ annotated_param_type: type[ParameterInfo],
+ default_param_type: type[ParameterInfo],
+ ):
+ self.argument_name = argument_name
+ self.annotated_param_type = annotated_param_type
+ self.default_param_type = default_param_type
+
+ def __str__(self) -> str:
+ annotated_param_type_str = _param_type_to_user_string(self.annotated_param_type)
+ default_param_type_str = _param_type_to_user_string(self.default_param_type)
+ msg = f"Cannot specify {annotated_param_type_str} in `Annotated` and"
+ if self.annotated_param_type is self.default_param_type:
+ msg += " default value"
+ else:
+ msg += f" {default_param_type_str} as a default value"
+ msg += f" together for {self.argument_name!r}"
+ return msg
+
+
+class MultipleTyperAnnotationsError(Exception):
+ argument_name: str
+
+ def __init__(self, argument_name: str):
+ self.argument_name = argument_name
+
+ def __str__(self) -> str:
+ return (
+ "Cannot specify multiple `Annotated` Typer arguments"
+ f" for {self.argument_name!r}"
+ )
+
+
+class DefaultFactoryAndDefaultValueError(Exception):
+ argument_name: str
+ param_type: type[ParameterInfo]
+
+ def __init__(self, argument_name: str, param_type: type[ParameterInfo]):
+ self.argument_name = argument_name
+ self.param_type = param_type
+
+ def __str__(self) -> str:
+ param_type_str = _param_type_to_user_string(self.param_type)
+ return (
+ "Cannot specify `default_factory` and a default value together"
+ f" for {param_type_str}"
+ )
+
+
+def _split_annotation_from_typer_annotations(
+ base_annotation: type[Any],
+) -> tuple[type[Any], list[ParameterInfo]]:
+ if get_origin(base_annotation) is not Annotated:
+ return base_annotation, []
+ base_annotation, *maybe_typer_annotations = get_args(base_annotation)
+ return base_annotation, [
+ annotation
+ for annotation in maybe_typer_annotations
+ if isinstance(annotation, ParameterInfo)
+ ]
+
+
+def get_params_from_function(func: Callable[..., Any]) -> dict[str, ParamMeta]:
+ signature = inspect.signature(func, eval_str=True)
+ type_hints = get_type_hints(func)
+ params = {}
+ for param in signature.parameters.values():
+ annotation, typer_annotations = _split_annotation_from_typer_annotations(
+ param.annotation,
+ )
+ if len(typer_annotations) > 1:
+ raise MultipleTyperAnnotationsError(param.name)
+
+ default = param.default
+ if typer_annotations:
+ # It's something like `my_param: Annotated[str, Argument()]`
+ [parameter_info] = typer_annotations
+
+ # Forbid `my_param: Annotated[str, Argument()] = Argument("...")`
+ if isinstance(param.default, ParameterInfo):
+ raise MixedAnnotatedAndDefaultStyleError(
+ argument_name=param.name,
+ annotated_param_type=type(parameter_info),
+ default_param_type=type(param.default),
+ )
+
+ parameter_info = copy(parameter_info)
+
+ # When used as a default, `Option` takes a default value and option names
+ # as positional arguments:
+ # `Option(some_value, "--some-argument", "-s")`
+ # When used in `Annotated` (ie, what this is handling), `Option` just takes
+ # option names as positional arguments:
+ # `Option("--some-argument", "-s")`
+ # In this case, the `default` attribute of `parameter_info` is actually
+ # meant to be the first item of `param_decls`.
+ if (
+ isinstance(parameter_info, OptionInfo)
+ and parameter_info.default is not ...
+ ):
+ parameter_info.param_decls = (
+ cast(str, parameter_info.default),
+ *(parameter_info.param_decls or ()),
+ )
+ parameter_info.default = ...
+
+ # Forbid `my_param: Annotated[str, Argument('some-default')]`
+ if parameter_info.default is not ...:
+ raise AnnotatedParamWithDefaultValueError(
+ param_type=type(parameter_info),
+ argument_name=param.name,
+ )
+ if param.default is not param.empty:
+ # Put the parameter's default (set by `=`) into `parameter_info`, where
+ # typer can find it.
+ parameter_info.default = param.default
+
+ default = parameter_info
+ elif param.name in type_hints:
+ # Resolve forward references.
+ annotation = type_hints[param.name]
+
+ if isinstance(default, ParameterInfo):
+ parameter_info = copy(default)
+ # Click supports `default` as either
+ # - an actual value; or
+ # - a factory function (returning a default value.)
+ # The two are not interchangeable for static typing, so typer allows
+ # specifying `default_factory`. Move the `default_factory` into `default`
+ # so click can find it.
+ if parameter_info.default is ... and parameter_info.default_factory:
+ parameter_info.default = parameter_info.default_factory
+ elif parameter_info.default_factory:
+ raise DefaultFactoryAndDefaultValueError(
+ argument_name=param.name, param_type=type(parameter_info)
+ )
+ default = parameter_info
+
+ params[param.name] = ParamMeta(
+ name=param.name, default=default, annotation=annotation
+ )
+ return params
+
+
+def parse_boolean_env_var(env_var_value: str | None, default: bool) -> bool:
+ if env_var_value is None:
+ return default
+ value = env_var_value.lower()
+ if value in ("y", "yes", "t", "true", "on", "1"):
+ return True
+ if value in ("n", "no", "f", "false", "off", "0"):
+ return False
+ return default
diff --git a/.cache/uv/archive-v0/_qp7s-2UQBrykFVv616Yc/typing_extensions-4.15.0.dist-info/METADATA b/.cache/uv/archive-v0/_qp7s-2UQBrykFVv616Yc/typing_extensions-4.15.0.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..b09cb50e1f9a42a8eb58a4339cbdb26250368375
--- /dev/null
+++ b/.cache/uv/archive-v0/_qp7s-2UQBrykFVv616Yc/typing_extensions-4.15.0.dist-info/METADATA
@@ -0,0 +1,72 @@
+Metadata-Version: 2.4
+Name: typing_extensions
+Version: 4.15.0
+Summary: Backported and Experimental Type Hints for Python 3.9+
+Keywords: annotations,backport,checker,checking,function,hinting,hints,type,typechecking,typehinting,typehints,typing
+Author-email: "Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Michael Lee"
+Requires-Python: >=3.9
+Description-Content-Type: text/markdown
+License-Expression: PSF-2.0
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
+Classifier: Topic :: Software Development
+License-File: LICENSE
+Project-URL: Bug Tracker, https://github.com/python/typing_extensions/issues
+Project-URL: Changes, https://github.com/python/typing_extensions/blob/main/CHANGELOG.md
+Project-URL: Documentation, https://typing-extensions.readthedocs.io/
+Project-URL: Home, https://github.com/python/typing_extensions
+Project-URL: Q & A, https://github.com/python/typing/discussions
+Project-URL: Repository, https://github.com/python/typing_extensions
+
+# Typing Extensions
+
+[](https://gitter.im/python/typing)
+
+[Documentation](https://typing-extensions.readthedocs.io/en/latest/#) –
+[PyPI](https://pypi.org/project/typing-extensions/)
+
+## Overview
+
+The `typing_extensions` module serves two related purposes:
+
+- Enable use of new type system features on older Python versions. For example,
+ `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows
+ users on previous Python versions to use it too.
+- Enable experimentation with new type system PEPs before they are accepted and
+ added to the `typing` module.
+
+`typing_extensions` is treated specially by static type checkers such as
+mypy and pyright. Objects defined in `typing_extensions` are treated the same
+way as equivalent forms in `typing`.
+
+`typing_extensions` uses
+[Semantic Versioning](https://semver.org/). The
+major version will be incremented only for backwards-incompatible changes.
+Therefore, it's safe to depend
+on `typing_extensions` like this: `typing_extensions ~=x.y`,
+where `x.y` is the first version that includes all features you need.
+[This](https://packaging.python.org/en/latest/specifications/version-specifiers/#compatible-release)
+is equivalent to `typing_extensions >=x.y, <(x+1)`. Do not depend on `~= x.y.z`
+unless you really know what you're doing; that defeats the purpose of
+semantic versioning.
+
+## Included items
+
+See [the documentation](https://typing-extensions.readthedocs.io/en/latest/#) for a
+complete listing of module contents.
+
+## Contributing
+
+See [CONTRIBUTING.md](https://github.com/python/typing_extensions/blob/main/CONTRIBUTING.md)
+for how to contribute to `typing_extensions`.
+
diff --git a/.cache/uv/archive-v0/_qp7s-2UQBrykFVv616Yc/typing_extensions-4.15.0.dist-info/RECORD b/.cache/uv/archive-v0/_qp7s-2UQBrykFVv616Yc/typing_extensions-4.15.0.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..29fd02e54b8d77045f9d2a1ee424bc80ef6ace7c
--- /dev/null
+++ b/.cache/uv/archive-v0/_qp7s-2UQBrykFVv616Yc/typing_extensions-4.15.0.dist-info/RECORD
@@ -0,0 +1,5 @@
+typing_extensions.py,sha256=Qz0R0XDTok0usGXrwb_oSM6n49fOaFZ6tSvqLUwvftg,160429
+typing_extensions-4.15.0.dist-info/licenses/LICENSE,sha256=Oy-B_iHRgcSZxZolbI4ZaEVdZonSaaqFNzv7avQdo78,13936
+typing_extensions-4.15.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
+typing_extensions-4.15.0.dist-info/METADATA,sha256=wTg3j-jxiTSsmd4GBTXFPsbBOu7WXpTDJkHafuMZKnI,3259
+typing_extensions-4.15.0.dist-info/RECORD,,
diff --git a/.cache/uv/archive-v0/_qp7s-2UQBrykFVv616Yc/typing_extensions-4.15.0.dist-info/WHEEL b/.cache/uv/archive-v0/_qp7s-2UQBrykFVv616Yc/typing_extensions-4.15.0.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..d8b9936dad9ab2513fa6979f411560d3b6b57e37
--- /dev/null
+++ b/.cache/uv/archive-v0/_qp7s-2UQBrykFVv616Yc/typing_extensions-4.15.0.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: flit 3.12.0
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/.cache/uv/archive-v0/_qp7s-2UQBrykFVv616Yc/typing_extensions-4.15.0.dist-info/licenses/LICENSE b/.cache/uv/archive-v0/_qp7s-2UQBrykFVv616Yc/typing_extensions-4.15.0.dist-info/licenses/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..f26bcf4d2de6eb136e31006ca3ab447d5e488adf
--- /dev/null
+++ b/.cache/uv/archive-v0/_qp7s-2UQBrykFVv616Yc/typing_extensions-4.15.0.dist-info/licenses/LICENSE
@@ -0,0 +1,279 @@
+A. HISTORY OF THE SOFTWARE
+==========================
+
+Python was created in the early 1990s by Guido van Rossum at Stichting
+Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands
+as a successor of a language called ABC. Guido remains Python's
+principal author, although it includes many contributions from others.
+
+In 1995, Guido continued his work on Python at the Corporation for
+National Research Initiatives (CNRI, see https://www.cnri.reston.va.us)
+in Reston, Virginia where he released several versions of the
+software.
+
+In May 2000, Guido and the Python core development team moved to
+BeOpen.com to form the BeOpen PythonLabs team. In October of the same
+year, the PythonLabs team moved to Digital Creations, which became
+Zope Corporation. In 2001, the Python Software Foundation (PSF, see
+https://www.python.org/psf/) was formed, a non-profit organization
+created specifically to own Python-related Intellectual Property.
+Zope Corporation was a sponsoring member of the PSF.
+
+All Python releases are Open Source (see https://opensource.org for
+the Open Source Definition). Historically, most, but not all, Python
+releases have also been GPL-compatible; the table below summarizes
+the various releases.
+
+ Release Derived Year Owner GPL-
+ from compatible? (1)
+
+ 0.9.0 thru 1.2 1991-1995 CWI yes
+ 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
+ 1.6 1.5.2 2000 CNRI no
+ 2.0 1.6 2000 BeOpen.com no
+ 1.6.1 1.6 2001 CNRI yes (2)
+ 2.1 2.0+1.6.1 2001 PSF no
+ 2.0.1 2.0+1.6.1 2001 PSF yes
+ 2.1.1 2.1+2.0.1 2001 PSF yes
+ 2.1.2 2.1.1 2002 PSF yes
+ 2.1.3 2.1.2 2002 PSF yes
+ 2.2 and above 2.1.1 2001-now PSF yes
+
+Footnotes:
+
+(1) GPL-compatible doesn't mean that we're distributing Python under
+ the GPL. All Python licenses, unlike the GPL, let you distribute
+ a modified version without making your changes open source. The
+ GPL-compatible licenses make it possible to combine Python with
+ other software that is released under the GPL; the others don't.
+
+(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
+ because its license has a choice of law clause. According to
+ CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
+ is "not incompatible" with the GPL.
+
+Thanks to the many outside volunteers who have worked under Guido's
+direction to make these releases possible.
+
+
+B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
+===============================================================
+
+Python software and documentation are licensed under the
+Python Software Foundation License Version 2.
+
+Starting with Python 3.8.6, examples, recipes, and other code in
+the documentation are dual licensed under the PSF License Version 2
+and the Zero-Clause BSD license.
+
+Some software incorporated into Python is under different licenses.
+The licenses are listed with code falling under that license.
+
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation;
+All Rights Reserved" are retained in Python alone or in any derivative version
+prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee. This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
+-------------------------------------------
+
+BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
+
+1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
+office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
+Individual or Organization ("Licensee") accessing and otherwise using
+this software in source or binary form and its associated
+documentation ("the Software").
+
+2. Subject to the terms and conditions of this BeOpen Python License
+Agreement, BeOpen hereby grants Licensee a non-exclusive,
+royalty-free, world-wide license to reproduce, analyze, test, perform
+and/or display publicly, prepare derivative works, distribute, and
+otherwise use the Software alone or in any derivative version,
+provided, however, that the BeOpen Python License is retained in the
+Software, alone or in any derivative version prepared by Licensee.
+
+3. BeOpen is making the Software available to Licensee on an "AS IS"
+basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
+SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
+AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
+DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+5. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+6. This License Agreement shall be governed by and interpreted in all
+respects by the law of the State of California, excluding conflict of
+law provisions. Nothing in this License Agreement shall be deemed to
+create any relationship of agency, partnership, or joint venture
+between BeOpen and Licensee. This License Agreement does not grant
+permission to use BeOpen trademarks or trade names in a trademark
+sense to endorse or promote products or services of Licensee, or any
+third party. As an exception, the "BeOpen Python" logos available at
+http://www.pythonlabs.com/logos.html may be used according to the
+permissions granted on that web page.
+
+7. By copying, installing or otherwise using the software, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
+---------------------------------------
+
+1. This LICENSE AGREEMENT is between the Corporation for National
+Research Initiatives, having an office at 1895 Preston White Drive,
+Reston, VA 20191 ("CNRI"), and the Individual or Organization
+("Licensee") accessing and otherwise using Python 1.6.1 software in
+source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, CNRI
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python 1.6.1
+alone or in any derivative version, provided, however, that CNRI's
+License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
+1995-2001 Corporation for National Research Initiatives; All Rights
+Reserved" are retained in Python 1.6.1 alone or in any derivative
+version prepared by Licensee. Alternately, in lieu of CNRI's License
+Agreement, Licensee may substitute the following text (omitting the
+quotes): "Python 1.6.1 is made available subject to the terms and
+conditions in CNRI's License Agreement. This Agreement together with
+Python 1.6.1 may be located on the internet using the following
+unique, persistent identifier (known as a handle): 1895.22/1013. This
+Agreement may also be obtained from a proxy server on the internet
+using the following URL: http://hdl.handle.net/1895.22/1013".
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python 1.6.1 or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python 1.6.1.
+
+4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
+basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. This License Agreement shall be governed by the federal
+intellectual property law of the United States, including without
+limitation the federal copyright law, and, to the extent such
+U.S. federal law does not apply, by the law of the Commonwealth of
+Virginia, excluding Virginia's conflict of law provisions.
+Notwithstanding the foregoing, with regard to derivative works based
+on Python 1.6.1 that incorporate non-separable material that was
+previously distributed under the GNU General Public License (GPL), the
+law of the Commonwealth of Virginia shall govern this License
+Agreement only as to issues arising under or with respect to
+Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
+License Agreement shall be deemed to create any relationship of
+agency, partnership, or joint venture between CNRI and Licensee. This
+License Agreement does not grant permission to use CNRI trademarks or
+trade name in a trademark sense to endorse or promote products or
+services of Licensee, or any third party.
+
+8. By clicking on the "ACCEPT" button where indicated, or by copying,
+installing or otherwise using Python 1.6.1, Licensee agrees to be
+bound by the terms and conditions of this License Agreement.
+
+ ACCEPT
+
+
+CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
+--------------------------------------------------
+
+Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
+The Netherlands. All rights reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appear in all copies and that
+both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of Stichting Mathematisch
+Centrum or CWI not be used in advertising or publicity pertaining to
+distribution of the software without specific, written prior
+permission.
+
+STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
+FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION
+----------------------------------------------------------------------
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
diff --git a/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/__init__.py b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..8081f77b8812f3b42d7949daa4195d2c35dc70ac
--- /dev/null
+++ b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/__init__.py
@@ -0,0 +1,38 @@
+from ._monitor import TMonitor, TqdmSynchronisationWarning
+from ._tqdm_pandas import tqdm_pandas
+from .cli import main # TODO: remove in v5.0.0
+from .gui import tqdm as tqdm_gui # TODO: remove in v5.0.0
+from .gui import trange as tgrange # TODO: remove in v5.0.0
+from .std import (
+ TqdmDeprecationWarning, TqdmExperimentalWarning, TqdmKeyError, TqdmMonitorWarning,
+ TqdmTypeError, TqdmWarning, tqdm, trange)
+from .version import __version__
+
+__all__ = ['tqdm', 'tqdm_gui', 'trange', 'tgrange', 'tqdm_pandas',
+ 'tqdm_notebook', 'tnrange', 'main', 'TMonitor',
+ 'TqdmTypeError', 'TqdmKeyError',
+ 'TqdmWarning', 'TqdmDeprecationWarning',
+ 'TqdmExperimentalWarning',
+ 'TqdmMonitorWarning', 'TqdmSynchronisationWarning',
+ '__version__']
+
+
+def tqdm_notebook(*args, **kwargs): # pragma: no cover
+ """See tqdm.notebook.tqdm for full documentation"""
+ from warnings import warn
+
+ from .notebook import tqdm as _tqdm_notebook
+ warn("This function will be removed in tqdm==5.0.0\n"
+ "Please use `tqdm.notebook.tqdm` instead of `tqdm.tqdm_notebook`",
+ TqdmDeprecationWarning, stacklevel=2)
+ return _tqdm_notebook(*args, **kwargs)
+
+
+def tnrange(*args, **kwargs): # pragma: no cover
+ """Shortcut for `tqdm.notebook.tqdm(range(*args), **kwargs)`."""
+ from warnings import warn
+
+ from .notebook import trange as _tnrange
+ warn("Please use `tqdm.notebook.trange` instead of `tqdm.tnrange`",
+ TqdmDeprecationWarning, stacklevel=2)
+ return _tnrange(*args, **kwargs)
diff --git a/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/__main__.py b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/__main__.py
new file mode 100644
index 0000000000000000000000000000000000000000..4e28416e104515e90fca4b69cc60d0c61fd15d61
--- /dev/null
+++ b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/__main__.py
@@ -0,0 +1,3 @@
+from .cli import main
+
+main()
diff --git a/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/_main.py b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/_main.py
new file mode 100644
index 0000000000000000000000000000000000000000..04fdeeff17b5cc84b210f445b54b87d5b99e3748
--- /dev/null
+++ b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/_main.py
@@ -0,0 +1,9 @@
+from warnings import warn
+
+from .cli import * # NOQA
+from .cli import __all__ # NOQA
+from .std import TqdmDeprecationWarning
+
+warn("This function will be removed in tqdm==5.0.0\n"
+ "Please use `tqdm.cli.*` instead of `tqdm._main.*`",
+ TqdmDeprecationWarning, stacklevel=2)
diff --git a/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/_monitor.py b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/_monitor.py
new file mode 100644
index 0000000000000000000000000000000000000000..f71aa56817ca77eba5df4a2dd11cb0c4a9a7ea1c
--- /dev/null
+++ b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/_monitor.py
@@ -0,0 +1,95 @@
+import atexit
+from threading import Event, Thread, current_thread
+from time import time
+from warnings import warn
+
+__all__ = ["TMonitor", "TqdmSynchronisationWarning"]
+
+
+class TqdmSynchronisationWarning(RuntimeWarning):
+ """tqdm multi-thread/-process errors which may cause incorrect nesting
+ but otherwise no adverse effects"""
+ pass
+
+
+class TMonitor(Thread):
+ """
+ Monitoring thread for tqdm bars.
+ Monitors if tqdm bars are taking too much time to display
+ and readjusts miniters automatically if necessary.
+
+ Parameters
+ ----------
+ tqdm_cls : class
+ tqdm class to use (can be core tqdm or a submodule).
+ sleep_interval : float
+ Time to sleep between monitoring checks.
+ """
+ _test = {} # internal vars for unit testing
+
+ def __init__(self, tqdm_cls, sleep_interval):
+ Thread.__init__(self)
+ self.daemon = True # kill thread when main killed (KeyboardInterrupt)
+ self.woken = 0 # last time woken up, to sync with monitor
+ self.tqdm_cls = tqdm_cls
+ self.sleep_interval = sleep_interval
+ self._time = self._test.get("time", time)
+ self.was_killed = self._test.get("Event", Event)()
+ atexit.register(self.exit)
+ self.start()
+
+ def exit(self):
+ self.was_killed.set()
+ if self is not current_thread():
+ self.join()
+ return self.report()
+
+ def get_instances(self):
+ # returns a copy of started `tqdm_cls` instances
+ return [i for i in self.tqdm_cls._instances.copy()
+ # Avoid race by checking that the instance started
+ if hasattr(i, 'start_t')]
+
+ def run(self):
+ cur_t = self._time()
+ while True:
+ # After processing and before sleeping, notify that we woke
+ # Need to be done just before sleeping
+ self.woken = cur_t
+ # Sleep some time...
+ self.was_killed.wait(self.sleep_interval)
+ # Quit if killed
+ if self.was_killed.is_set():
+ return
+ # Then monitor!
+ # Acquire lock (to access _instances)
+ with self.tqdm_cls.get_lock():
+ cur_t = self._time()
+ # Check tqdm instances are waiting too long to print
+ instances = self.get_instances()
+ for instance in instances:
+ # Check event in loop to reduce blocking time on exit
+ if self.was_killed.is_set():
+ return
+ # Only if mininterval > 1 (else iterations are just slow)
+ # and last refresh exceeded maxinterval
+ if (
+ instance.miniters > 1
+ and (cur_t - instance.last_print_t) >= instance.maxinterval
+ ):
+ # force bypassing miniters on next iteration
+ # (dynamic_miniters adjusts mininterval automatically)
+ instance.miniters = 1
+ # Refresh now! (works only for manual tqdm)
+ instance.refresh(nolock=True)
+ # Remove accidental long-lived strong reference
+ del instance
+ if instances != self.get_instances(): # pragma: nocover
+ warn("Set changed size during iteration" +
+ " (see https://github.com/tqdm/tqdm/issues/481)",
+ TqdmSynchronisationWarning, stacklevel=2)
+ # Remove accidental long-lived strong references
+ del instances
+
+ def report(self):
+ return not self.was_killed.is_set()
diff --git a/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/_tqdm.py b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/_tqdm.py
new file mode 100644
index 0000000000000000000000000000000000000000..7fc4962774a4651db7a739a3f143633b6215a9bd
--- /dev/null
+++ b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/_tqdm.py
@@ -0,0 +1,9 @@
+from warnings import warn
+
+from .std import * # NOQA
+from .std import __all__ # NOQA
+from .std import TqdmDeprecationWarning
+
+warn("This function will be removed in tqdm==5.0.0\n"
+ "Please use `tqdm.std.*` instead of `tqdm._tqdm.*`",
+ TqdmDeprecationWarning, stacklevel=2)
diff --git a/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/_tqdm_gui.py b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/_tqdm_gui.py
new file mode 100644
index 0000000000000000000000000000000000000000..f32aa894f54b3a5b47a0fbf4263c2fd20df56c9d
--- /dev/null
+++ b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/_tqdm_gui.py
@@ -0,0 +1,9 @@
+from warnings import warn
+
+from .gui import * # NOQA
+from .gui import __all__ # NOQA
+from .std import TqdmDeprecationWarning
+
+warn("This function will be removed in tqdm==5.0.0\n"
+ "Please use `tqdm.gui.*` instead of `tqdm._tqdm_gui.*`",
+ TqdmDeprecationWarning, stacklevel=2)
diff --git a/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/_tqdm_notebook.py b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/_tqdm_notebook.py
new file mode 100644
index 0000000000000000000000000000000000000000..f225fbf5b52d04987ccf68f4d5ee4b735e3158b0
--- /dev/null
+++ b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/_tqdm_notebook.py
@@ -0,0 +1,9 @@
+from warnings import warn
+
+from .notebook import * # NOQA
+from .notebook import __all__ # NOQA
+from .std import TqdmDeprecationWarning
+
+warn("This function will be removed in tqdm==5.0.0\n"
+ "Please use `tqdm.notebook.*` instead of `tqdm._tqdm_notebook.*`",
+ TqdmDeprecationWarning, stacklevel=2)
diff --git a/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/_tqdm_pandas.py b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/_tqdm_pandas.py
new file mode 100644
index 0000000000000000000000000000000000000000..c4fe6efdc603579e7f8acfa27ac10dccdf3e94ce
--- /dev/null
+++ b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/_tqdm_pandas.py
@@ -0,0 +1,24 @@
+import sys
+
+__author__ = "github.com/casperdcl"
+__all__ = ['tqdm_pandas']
+
+
+def tqdm_pandas(tclass, **tqdm_kwargs):
+ """
+ Registers the given `tqdm` instance with
+ `pandas.core.groupby.DataFrameGroupBy.progress_apply`.
+ """
+ from tqdm import TqdmDeprecationWarning
+
+ if isinstance(tclass, type) or (getattr(tclass, '__name__', '').startswith(
+ 'tqdm_')): # delayed adapter case
+ TqdmDeprecationWarning(
+ "Please use `tqdm.pandas(...)` instead of `tqdm_pandas(tqdm, ...)`.",
+ fp_write=getattr(tqdm_kwargs.get('file', None), 'write', sys.stderr.write))
+ tclass.pandas(**tqdm_kwargs)
+ else:
+ TqdmDeprecationWarning(
+ "Please use `tqdm.pandas(...)` instead of `tqdm_pandas(tqdm(...))`.",
+ fp_write=getattr(tclass.fp, 'write', sys.stderr.write))
+ type(tclass).pandas(deprecated_t=tclass)
diff --git a/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/_utils.py b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..385e849e106d1319fe21045f14eb0aa6552fb153
--- /dev/null
+++ b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/_utils.py
@@ -0,0 +1,11 @@
+from warnings import warn
+
+from .std import TqdmDeprecationWarning
+from .utils import ( # NOQA, pylint: disable=unused-import
+ CUR_OS, IS_NIX, IS_WIN, RE_ANSI, Comparable, FormatReplace, SimpleTextIOWrapper,
+ _environ_cols_wrapper, _is_ascii, _is_utf, _screen_shape_linux, _screen_shape_tput,
+ _screen_shape_windows, _screen_shape_wrapper, _supports_unicode, _term_move_up, colorama)
+
+warn("This function will be removed in tqdm==5.0.0\n"
+ "Please use `tqdm.utils.*` instead of `tqdm._utils.*`",
+ TqdmDeprecationWarning, stacklevel=2)
diff --git a/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/asyncio.py b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/asyncio.py
new file mode 100644
index 0000000000000000000000000000000000000000..2d00a0a2e755f36068d079ccc12ca84d86ff42be
--- /dev/null
+++ b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/asyncio.py
@@ -0,0 +1,93 @@
+"""
+Asynchronous progressbar decorator for iterators.
+Includes a default `range` iterator printing to `stderr`.
+
+Usage:
+>>> from tqdm.asyncio import trange, tqdm
+>>> async for i in trange(10):
+... ...
+"""
+import asyncio
+from sys import version_info
+
+from .std import tqdm as std_tqdm
+
+__author__ = {"github.com/": ["casperdcl"]}
+__all__ = ['tqdm_asyncio', 'tarange', 'tqdm', 'trange']
+
+
+class tqdm_asyncio(std_tqdm):
+ """
+ Asynchronous-friendly version of tqdm.
+ """
+ def __init__(self, iterable=None, *args, **kwargs):
+ super().__init__(iterable, *args, **kwargs)
+ self.iterable_awaitable = False
+ if iterable is not None:
+ if hasattr(iterable, "__anext__"):
+ self.iterable_next = iterable.__anext__
+ self.iterable_awaitable = True
+ elif hasattr(iterable, "__next__"):
+ self.iterable_next = iterable.__next__
+ else:
+ self.iterable_iterator = iter(iterable)
+ self.iterable_next = self.iterable_iterator.__next__
+
+ def __aiter__(self):
+ return self
+
+ async def __anext__(self):
+ try:
+ if self.iterable_awaitable:
+ res = await self.iterable_next()
+ else:
+ res = self.iterable_next()
+ self.update()
+ return res
+ except StopIteration:
+ self.close()
+ raise StopAsyncIteration
+ except BaseException:
+ self.close()
+ raise
+
+ def send(self, *args, **kwargs):
+ return self.iterable.send(*args, **kwargs)
+
+ @classmethod
+ def as_completed(cls, fs, *, loop=None, timeout=None, total=None, **tqdm_kwargs):
+ """
+ Wrapper for `asyncio.as_completed`.
+ """
+ if total is None:
+ total = len(fs)
+ kwargs = {}
+ if version_info[:2] < (3, 10):
+ kwargs['loop'] = loop
+ yield from cls(asyncio.as_completed(fs, timeout=timeout, **kwargs),
+ total=total, **tqdm_kwargs)
+
+ @classmethod
+ async def gather(cls, *fs, loop=None, timeout=None, total=None, **tqdm_kwargs):
+ """
+ Wrapper for `asyncio.gather`.
+ """
+ async def wrap_awaitable(i, f):
+ return i, await f
+
+ ifs = [wrap_awaitable(i, f) for i, f in enumerate(fs)]
+ res = [await f for f in cls.as_completed(ifs, loop=loop, timeout=timeout,
+ total=total, **tqdm_kwargs)]
+ return [i for _, i in sorted(res)]
+
+
+def tarange(*args, **kwargs):
+ """
+ A shortcut for `tqdm.asyncio.tqdm(range(*args), **kwargs)`.
+ """
+ return tqdm_asyncio(range(*args), **kwargs)
+
+
+# Aliases
+tqdm = tqdm_asyncio
+trange = tarange
diff --git a/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/auto.py b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/auto.py
new file mode 100644
index 0000000000000000000000000000000000000000..206c4409d5269594bdbab3a092ef6e09e7c01947
--- /dev/null
+++ b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/auto.py
@@ -0,0 +1,40 @@
+"""
+Enables multiple commonly used features.
+
+Method resolution order:
+
+- `tqdm.autonotebook` without import warnings
+- `tqdm.asyncio`
+- `tqdm.std` base class
+
+Usage:
+>>> from tqdm.auto import trange, tqdm
+>>> for i in trange(10):
+... ...
+"""
+import warnings
+
+from .std import TqdmExperimentalWarning
+
+with warnings.catch_warnings():
+ warnings.simplefilter("ignore", category=TqdmExperimentalWarning)
+ from .autonotebook import tqdm as notebook_tqdm
+
+from .asyncio import tqdm as asyncio_tqdm
+from .std import tqdm as std_tqdm
+
+if notebook_tqdm != std_tqdm:
+ class tqdm(notebook_tqdm, asyncio_tqdm): # pylint: disable=inconsistent-mro
+ pass
+else:
+ tqdm = asyncio_tqdm
+
+
+def trange(*args, **kwargs):
+ """
+ A shortcut for `tqdm.auto.tqdm(range(*args), **kwargs)`.
+ """
+ return tqdm(range(*args), **kwargs)
+
+
+__all__ = ["tqdm", "trange"]
diff --git a/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/autonotebook.py b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/autonotebook.py
new file mode 100644
index 0000000000000000000000000000000000000000..a09f2ec4b8c95f12b8c7b7774f84d5ec55826334
--- /dev/null
+++ b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/autonotebook.py
@@ -0,0 +1,29 @@
+"""
+Automatically choose between `tqdm.notebook` and `tqdm.std`.
+
+Usage:
+>>> from tqdm.autonotebook import trange, tqdm
+>>> for i in trange(10):
+... ...
+"""
+import sys
+from warnings import warn
+
+try:
+ get_ipython = sys.modules['IPython'].get_ipython
+ if 'IPKernelApp' not in get_ipython().config: # pragma: no cover
+ raise ImportError("console")
+ from .notebook import WARN_NOIPYW, IProgress
+ if IProgress is None:
+ from .std import TqdmWarning
+ warn(WARN_NOIPYW, TqdmWarning, stacklevel=2)
+ raise ImportError('ipywidgets')
+except Exception:
+ from .std import tqdm, trange
+else: # pragma: no cover
+ from .notebook import tqdm, trange
+ from .std import TqdmExperimentalWarning
+ warn("Using `tqdm.autonotebook.tqdm` in notebook mode."
+ " Use `tqdm.tqdm` instead to force console mode"
+ " (e.g. in jupyter console)", TqdmExperimentalWarning, stacklevel=2)
+__all__ = ["tqdm", "trange"]
diff --git a/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/cli.py b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/cli.py
new file mode 100644
index 0000000000000000000000000000000000000000..1dd08f6053b992274dc8242a4e81ec7e66ddd937
--- /dev/null
+++ b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/cli.py
@@ -0,0 +1,324 @@
+"""
+Module version for monitoring CLI pipes (`... | python -m tqdm | ...`).
+"""
+import logging
+import re
+import sys
+from ast import literal_eval as numeric
+from textwrap import indent
+
+from .std import TqdmKeyError, TqdmTypeError, tqdm
+from .version import __version__
+
+__all__ = ["main"]
+log = logging.getLogger(__name__)
+
+
+def cast(val, typ):
+ log.debug((val, typ))
+ if " or " in typ:
+ for t in typ.split(" or "):
+ try:
+ return cast(val, t)
+ except TqdmTypeError:
+ pass
+ raise TqdmTypeError(f"{val} : {typ}")
+
+ # sys.stderr.write('\ndebug | `val:type`: `' + val + ':' + typ + '`.\n')
+ if typ == 'bool':
+ if (val == 'True') or (val == ''):
+ return True
+ if val == 'False':
+ return False
+ raise TqdmTypeError(val + ' : ' + typ)
+ if typ == 'chr':
+ if len(val) == 1:
+ return val.encode()
+ if re.match(r"^\\\w+$", val):
+ return eval(f'"{val}"').encode()
+ raise TqdmTypeError(f"{val} : {typ}")
+ if typ == 'str':
+ return val
+ if typ == 'int':
+ try:
+ return int(val)
+ except ValueError as exc:
+ raise TqdmTypeError(f"{val} : {typ}") from exc
+ if typ == 'float':
+ try:
+ return float(val)
+ except ValueError as exc:
+ raise TqdmTypeError(f"{val} : {typ}") from exc
+ raise TqdmTypeError(f"{val} : {typ}")
+
+
+def posix_pipe(fin, fout, delim=b'\\n', buf_size=256,
+ callback=lambda float: None, callback_len=True):
+ """
+ Params
+ ------
+ fin : binary file with `read(buf_size : int)` method
+ fout : binary file with `write` (and optionally `flush`) methods.
+ callback : function(float), e.g.: `tqdm.update`
+ callback_len : If (default: True) do `callback(len(buffer))`.
+ Otherwise, do `callback(data) for data in buffer.split(delim)`.
+ """
+ fp_write = fout.write
+
+ if not delim:
+ while True:
+ tmp = fin.read(buf_size)
+
+ # flush at EOF
+ if not tmp:
+ getattr(fout, 'flush', lambda: None)()
+ return
+
+ fp_write(tmp)
+ callback(len(tmp))
+ # return
+
+ buf = b''
+ len_delim = len(delim)
+ # n = 0
+ while True:
+ tmp = fin.read(buf_size)
+
+ # flush at EOF
+ if not tmp:
+ if buf:
+ fp_write(buf)
+ if callback_len:
+ # n += 1 + buf.count(delim)
+ callback(1 + buf.count(delim))
+ else:
+ for i in buf.split(delim):
+ callback(i)
+ getattr(fout, 'flush', lambda: None)()
+ return # n
+
+ while True:
+ i = tmp.find(delim)
+ if i < 0:
+ buf += tmp
+ break
+ fp_write(buf + tmp[:i + len(delim)])
+ # n += 1
+ callback(1 if callback_len else (buf + tmp[:i]))
+ buf = b''
+ tmp = tmp[i + len_delim:]
+
+
+# ((opt, type), ... )
+RE_OPTS = re.compile(r'\n {4}(\S+)\s{2,}:\s*([^,]+)')
+# better split method assuming no positional args
+RE_SHLEX = re.compile(r'\s*(? : \2', d)
+ split = RE_OPTS.split(d)
+ opt_types_desc = zip(split[1::3], split[2::3], split[3::3])
+ d = ''.join(('\n --{0} : {2}{3}' if otd[1] == 'bool' else
+ '\n --{0}=<{1}> : {2}{3}').format(
+ otd[0].replace('_', '-'), otd[0], *otd[1:])
+ for otd in opt_types_desc if otd[0] not in UNSUPPORTED_OPTS)
+
+ help_short = "Usage:\n tqdm [--help | options]\n"
+ d = help_short + """
+Options:
+ -h, --help Print this help and exit.
+ -v, --version Print version and exit.
+""" + d.strip('\n') + '\n'
+
+ # opts = docopt(d, version=__version__)
+ if any(v in argv for v in ('-v', '--version')):
+ sys.stdout.write(__version__ + '\n')
+ sys.exit(0)
+ elif any(v in argv for v in ('-h', '--help')):
+ sys.stdout.write(d + '\n')
+ sys.exit(0)
+ elif argv and argv[0][:2] != '--':
+ sys.stderr.write(f"Error:Unknown argument:{argv[0]}\n{help_short}")
+
+ argv = RE_SHLEX.split(' '.join(["tqdm"] + argv))
+ opts = dict(zip(argv[1::3], argv[3::3]))
+
+ log.debug(opts)
+ opts.pop('log', True)
+
+ tqdm_args = {'file': fp}
+ try:
+ for (o, v) in opts.items():
+ o = o.replace('-', '_')
+ try:
+ tqdm_args[o] = cast(v, opt_types[o])
+ except KeyError as e:
+ raise TqdmKeyError(str(e))
+ log.debug('args:' + str(tqdm_args))
+
+ delim_per_char = tqdm_args.pop('bytes', False)
+ update = tqdm_args.pop('update', False)
+ update_to = tqdm_args.pop('update_to', False)
+ if sum((delim_per_char, update, update_to)) > 1:
+ raise TqdmKeyError("Can only have one of --bytes --update --update_to")
+ except Exception:
+ fp.write("\nError:\n" + help_short)
+ stdin, stdout_write = sys.stdin, sys.stdout.write
+ for i in stdin:
+ stdout_write(i)
+ raise
+ else:
+ buf_size = tqdm_args.pop('buf_size', 256)
+ delim = tqdm_args.pop('delim', b'\\n')
+ tee = tqdm_args.pop('tee', False)
+ manpath = tqdm_args.pop('manpath', None)
+ comppath = tqdm_args.pop('comppath', None)
+ if tqdm_args.pop('null', False):
+ class stdout:
+ @staticmethod
+ def write(_):
+ pass
+ else:
+ stdout = sys.stdout
+ stdout = getattr(stdout, 'buffer', stdout)
+ stdin = getattr(sys.stdin, 'buffer', sys.stdin)
+ if manpath or comppath:
+ try: # py<3.9
+ import importlib_resources as resources
+ except ImportError:
+ from importlib import resources
+ from pathlib import Path
+
+ def cp(name, dst):
+ """copy resource `name` to `dst`"""
+ fi = resources.files('tqdm') / name
+ dst.write_bytes(fi.read_bytes())
+ log.info("written:%s", dst)
+ if manpath is not None:
+ cp('tqdm.1', Path(manpath) / 'tqdm.1')
+ if comppath is not None:
+ cp('completion.sh', Path(comppath) / 'tqdm_completion.sh')
+ sys.exit(0)
+ if tee:
+ stdout_write = stdout.write
+ fp_write = getattr(fp, 'buffer', fp).write
+
+ class stdout: # pylint: disable=function-redefined
+ @staticmethod
+ def write(x):
+ with tqdm.external_write_mode(file=fp):
+ fp_write(x)
+ stdout_write(x)
+ if delim_per_char:
+ tqdm_args.setdefault('unit', 'B')
+ tqdm_args.setdefault('unit_scale', True)
+ tqdm_args.setdefault('unit_divisor', 1024)
+ log.debug(tqdm_args)
+ with tqdm(**tqdm_args) as t:
+ posix_pipe(stdin, stdout, '', buf_size, t.update)
+ elif delim == b'\\n':
+ log.debug(tqdm_args)
+ write = stdout.write
+ if update or update_to:
+ with tqdm(**tqdm_args) as t:
+ if update:
+ def callback(i):
+ t.update(numeric(i.decode()))
+ else: # update_to
+ def callback(i):
+ t.update(numeric(i.decode()) - t.n)
+ for i in stdin:
+ write(i)
+ callback(i)
+ else:
+ for i in tqdm(stdin, **tqdm_args):
+ write(i)
+ else:
+ log.debug(tqdm_args)
+ with tqdm(**tqdm_args) as t:
+ callback_len = False
+ if update:
+ def callback(i):
+ t.update(numeric(i.decode()))
+ elif update_to:
+ def callback(i):
+ t.update(numeric(i.decode()) - t.n)
+ else:
+ callback = t.update
+ callback_len = True
+ posix_pipe(stdin, stdout, delim, buf_size, callback, callback_len)
diff --git a/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/completion.sh b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/completion.sh
new file mode 100644
index 0000000000000000000000000000000000000000..9f61c7f14bb8c1f6099b9eb75dce28ece6a7ae96
--- /dev/null
+++ b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/completion.sh
@@ -0,0 +1,19 @@
+#!/usr/bin/env bash
+_tqdm(){
+ local cur prv
+ cur="${COMP_WORDS[COMP_CWORD]}"
+ prv="${COMP_WORDS[COMP_CWORD - 1]}"
+
+ case ${prv} in
+ --bar_format|--buf_size|--colour|--comppath|--delay|--delim|--desc|--initial|--lock_args|--manpath|--maxinterval|--mininterval|--miniters|--ncols|--nrows|--position|--postfix|--smoothing|--total|--unit|--unit_divisor)
+ # await user input
+ ;;
+ "--log")
+ COMPREPLY=($(compgen -W 'CRITICAL FATAL ERROR WARN WARNING INFO DEBUG NOTSET' -- ${cur}))
+ ;;
+ *)
+ COMPREPLY=($(compgen -W '--ascii --bar_format --buf_size --bytes --colour --comppath --delay --delim --desc --disable --dynamic_ncols --help --initial --leave --lock_args --log --manpath --maxinterval --mininterval --miniters --ncols --nrows --null --position --postfix --smoothing --tee --total --unit --unit_divisor --unit_scale --update --update_to --version --write_bytes -h -v' -- ${cur}))
+ ;;
+ esac
+}
+complete -F _tqdm tqdm
diff --git a/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/contrib/bells.py b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/contrib/bells.py
new file mode 100644
index 0000000000000000000000000000000000000000..5b8f4b9ecd894f1edfaa08d9fe730b8d7c8b93e0
--- /dev/null
+++ b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/contrib/bells.py
@@ -0,0 +1,26 @@
+"""
+Even more features than `tqdm.auto` (all the bells & whistles):
+
+- `tqdm.auto`
+- `tqdm.tqdm.pandas`
+- `tqdm.contrib.telegram`
+ + uses `${TQDM_TELEGRAM_TOKEN}` and `${TQDM_TELEGRAM_CHAT_ID}`
+- `tqdm.contrib.discord`
+ + uses `${TQDM_DISCORD_TOKEN}` and `${TQDM_DISCORD_CHANNEL_ID}`
+"""
+__all__ = ['tqdm', 'trange']
+import warnings
+from os import getenv
+
+if getenv("TQDM_SLACK_TOKEN") and getenv("TQDM_SLACK_CHANNEL"):
+ from .slack import tqdm, trange
+elif getenv("TQDM_TELEGRAM_TOKEN") and getenv("TQDM_TELEGRAM_CHAT_ID"):
+ from .telegram import tqdm, trange
+elif getenv("TQDM_DISCORD_TOKEN") and getenv("TQDM_DISCORD_CHANNEL_ID"):
+ from .discord import tqdm, trange
+else:
+ from ..auto import tqdm, trange
+
+with warnings.catch_warnings():
+ warnings.simplefilter("ignore", category=FutureWarning)
+ tqdm.pandas()
diff --git a/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/dask.py b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/dask.py
new file mode 100644
index 0000000000000000000000000000000000000000..57f1b668f59dc5991019eee34c7df3232a2c2cd7
--- /dev/null
+++ b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/dask.py
@@ -0,0 +1,44 @@
+from functools import partial
+
+from dask.callbacks import Callback
+
+from .auto import tqdm as tqdm_auto
+
+__author__ = {"github.com/": ["casperdcl"]}
+__all__ = ['TqdmCallback']
+
+
+class TqdmCallback(Callback):
+ """Dask callback for task progress."""
+ def __init__(self, start=None, pretask=None, tqdm_class=tqdm_auto,
+ **tqdm_kwargs):
+ """
+ Parameters
+ ----------
+ tqdm_class : optional
+ `tqdm` class to use for bars [default: `tqdm.auto.tqdm`].
+ tqdm_kwargs : optional
+ Any other arguments used for all bars.
+ """
+ super().__init__(start=start, pretask=pretask)
+ if tqdm_kwargs:
+ tqdm_class = partial(tqdm_class, **tqdm_kwargs)
+ self.tqdm_class = tqdm_class
+
+ def _start_state(self, _, state):
+ self.pbar = self.tqdm_class(total=sum(
+ len(state[k]) for k in ['ready', 'waiting', 'running', 'finished']))
+
+ def _posttask(self, *_, **__):
+ self.pbar.update()
+
+ def _finish(self, *_, **__):
+ self.pbar.close()
+
+ def display(self):
+ """Displays in the current cell in Notebooks."""
+ container = getattr(self.bar, 'container', None)
+ if container is None:
+ return
+ from .notebook import display
+ display(container)
diff --git a/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/gui.py b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/gui.py
new file mode 100644
index 0000000000000000000000000000000000000000..cb52fb91a8661f4c73edd352bbc6f21b877dcfee
--- /dev/null
+++ b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/gui.py
@@ -0,0 +1,179 @@
+"""
+Matplotlib GUI progressbar decorator for iterators.
+
+Usage:
+>>> from tqdm.gui import trange, tqdm
+>>> for i in trange(10):
+... ...
+"""
+# future division is important to divide integers and get as
+# a result precise floating numbers (instead of truncated int)
+import re
+from warnings import warn
+
+# to inherit from the tqdm class
+from .std import TqdmExperimentalWarning
+from .std import tqdm as std_tqdm
+
+# import compatibility functions and utilities
+
+__author__ = {"github.com/": ["casperdcl", "lrq3000"]}
+__all__ = ['tqdm_gui', 'tgrange', 'tqdm', 'trange']
+
+
+class tqdm_gui(std_tqdm): # pragma: no cover
+ """Experimental Matplotlib GUI version of tqdm!"""
+ # TODO: @classmethod: write() on GUI?
+ def __init__(self, *args, **kwargs):
+ from collections import deque
+
+ import matplotlib as mpl
+ import matplotlib.pyplot as plt
+ kwargs = kwargs.copy()
+ kwargs['gui'] = True
+ colour = kwargs.pop('colour', 'g')
+ super().__init__(*args, **kwargs)
+
+ if self.disable:
+ return
+
+ warn("GUI is experimental/alpha", TqdmExperimentalWarning, stacklevel=2)
+ self.mpl = mpl
+ self.plt = plt
+
+ # Remember if external environment uses toolbars
+ self.toolbar = self.mpl.rcParams['toolbar']
+ self.mpl.rcParams['toolbar'] = 'None'
+
+ self.mininterval = max(self.mininterval, 0.5)
+ self.fig, ax = plt.subplots(figsize=(9, 2.2))
+ # self.fig.subplots_adjust(bottom=0.2)
+ total = self.__len__() # avoids TypeError on None #971
+ if total is not None:
+ self.xdata = []
+ self.ydata = []
+ self.zdata = []
+ else:
+ self.xdata = deque([])
+ self.ydata = deque([])
+ self.zdata = deque([])
+ self.line1, = ax.plot(self.xdata, self.ydata, color='b')
+ self.line2, = ax.plot(self.xdata, self.zdata, color='k')
+ ax.set_ylim(0, 0.001)
+ if total is not None:
+ ax.set_xlim(0, 100)
+ ax.set_xlabel("percent")
+ self.fig.legend((self.line1, self.line2), ("cur", "est"),
+ loc='center right')
+ # progressbar
+ self.hspan = plt.axhspan(0, 0.001, xmin=0, xmax=0, color=colour)
+ else:
+ # ax.set_xlim(-60, 0)
+ ax.set_xlim(0, 60)
+ ax.invert_xaxis()
+ ax.set_xlabel("seconds")
+ ax.legend(("cur", "est"), loc='lower left')
+ ax.grid()
+ # ax.set_xlabel('seconds')
+ ax.set_ylabel((self.unit if self.unit else "it") + "/s")
+ if self.unit_scale:
+ plt.ticklabel_format(style='sci', axis='y', scilimits=(0, 0))
+ ax.yaxis.get_offset_text().set_x(-0.15)
+
+ # Remember if external environment is interactive
+ self.wasion = plt.isinteractive()
+ plt.ion()
+ self.ax = ax
+
+ def close(self):
+ if self.disable:
+ return
+
+ self.disable = True
+
+ with self.get_lock():
+ self._instances.remove(self)
+
+ # Restore toolbars
+ self.mpl.rcParams['toolbar'] = self.toolbar
+ # Return to non-interactive mode
+ if not self.wasion:
+ self.plt.ioff()
+ if self.leave:
+ self.display()
+ else:
+ self.plt.close(self.fig)
+
+ def clear(self, *_, **__):
+ pass
+
+ def display(self, *_, **__):
+ n = self.n
+ cur_t = self._time()
+ elapsed = cur_t - self.start_t
+ delta_it = n - self.last_print_n
+ delta_t = cur_t - self.last_print_t
+
+ # Inline due to multiple calls
+ total = self.total
+ xdata = self.xdata
+ ydata = self.ydata
+ zdata = self.zdata
+ ax = self.ax
+ line1 = self.line1
+ line2 = self.line2
+ hspan = getattr(self, 'hspan', None)
+ # instantaneous rate
+ y = delta_it / delta_t
+ # overall rate
+ z = n / elapsed
+ # update line data
+ xdata.append(n * 100.0 / total if total else cur_t)
+ ydata.append(y)
+ zdata.append(z)
+
+ # Discard old values
+ # xmin, xmax = ax.get_xlim()
+ # if (not total) and elapsed > xmin * 1.1:
+ if (not total) and elapsed > 66:
+ xdata.popleft()
+ ydata.popleft()
+ zdata.popleft()
+
+ ymin, ymax = ax.get_ylim()
+ if y > ymax or z > ymax:
+ ymax = 1.1 * y
+ ax.set_ylim(ymin, ymax)
+ ax.figure.canvas.draw()
+
+ if total:
+ line1.set_data(xdata, ydata)
+ line2.set_data(xdata, zdata)
+ if hspan:
+ hspan.set_xy((0, ymin))
+ hspan.set_height(ymax - ymin)
+ hspan.set_width(n / total)
+ else:
+ t_ago = [cur_t - i for i in xdata]
+ line1.set_data(t_ago, ydata)
+ line2.set_data(t_ago, zdata)
+
+ d = self.format_dict
+ # remove {bar}
+ d['bar_format'] = (d['bar_format'] or "{l_bar} {r_bar}").replace(
+ "{bar}", " ")
+ msg = self.format_meter(**d)
+ if ' ' in msg:
+ msg = "".join(re.split(r'\|? \|?', msg, maxsplit=1))
+ ax.set_title(msg, fontname="DejaVu Sans Mono", fontsize=11)
+ self.plt.pause(1e-9)
+
+
+def tgrange(*args, **kwargs):
+ """Shortcut for `tqdm.gui.tqdm(range(*args), **kwargs)`."""
+ return tqdm_gui(range(*args), **kwargs)
+
+
+# Aliases
+tqdm = tqdm_gui
+trange = tgrange
diff --git a/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/keras.py b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/keras.py
new file mode 100644
index 0000000000000000000000000000000000000000..cce9467c51a95388aaa502d1da9a42f3ebf0af24
--- /dev/null
+++ b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/keras.py
@@ -0,0 +1,122 @@
+from copy import copy
+from functools import partial
+
+from .auto import tqdm as tqdm_auto
+
+try:
+ import keras
+except (ImportError, AttributeError) as e:
+ try:
+ from tensorflow import keras
+ except ImportError:
+ raise e
+__author__ = {"github.com/": ["casperdcl"]}
+__all__ = ['TqdmCallback']
+
+
+class TqdmCallback(keras.callbacks.Callback):
+ """Keras callback for epoch and batch progress."""
+ @staticmethod
+ def bar2callback(bar, pop=None, delta=(lambda logs: 1)):
+ def callback(_, logs=None):
+ n = delta(logs)
+ if logs:
+ if pop:
+ logs = copy(logs)
+ [logs.pop(i, 0) for i in pop]
+ bar.set_postfix(logs, refresh=False)
+ bar.update(n)
+
+ return callback
+
+ def __init__(self, epochs=None, data_size=None, batch_size=None, verbose=1,
+ tqdm_class=tqdm_auto, **tqdm_kwargs):
+ """
+ Parameters
+ ----------
+ epochs : int, optional
+ data_size : int, optional
+ Number of training pairs.
+ batch_size : int, optional
+ Number of training pairs per batch.
+ verbose : int
+ 0: epoch, 1: batch (transient), 2: batch. [default: 1].
+ Will be set to `0` unless both `data_size` and `batch_size`
+ are given.
+ tqdm_class : optional
+ `tqdm` class to use for bars [default: `tqdm.auto.tqdm`].
+ tqdm_kwargs : optional
+ Any other arguments used for all bars.
+ """
+ if tqdm_kwargs:
+ tqdm_class = partial(tqdm_class, **tqdm_kwargs)
+ self.tqdm_class = tqdm_class
+ self.epoch_bar = tqdm_class(total=epochs, unit='epoch')
+ self.on_epoch_end = self.bar2callback(self.epoch_bar)
+ if data_size and batch_size:
+ self.batches = batches = (data_size + batch_size - 1) // batch_size
+ else:
+ self.batches = batches = None
+ self.verbose = verbose
+ if verbose == 1:
+ self.batch_bar = tqdm_class(total=batches, unit='batch', leave=False)
+ self.on_batch_end = self.bar2callback(
+ self.batch_bar, pop=['batch', 'size'],
+ delta=lambda logs: logs.get('size', 1))
+
+ def on_train_begin(self, *_, **__):
+ params = self.params.get
+ auto_total = params('epochs', params('nb_epoch', None))
+ if auto_total is not None and auto_total != self.epoch_bar.total:
+ self.epoch_bar.reset(total=auto_total)
+
+ def on_epoch_begin(self, epoch, *_, **__):
+ if self.epoch_bar.n < epoch:
+ ebar = self.epoch_bar
+ ebar.n = ebar.last_print_n = ebar.initial = epoch
+ if self.verbose:
+ params = self.params.get
+ total = params('samples', params(
+ 'nb_sample', params('steps', None))) or self.batches
+ if self.verbose == 2:
+ if hasattr(self, 'batch_bar'):
+ self.batch_bar.close()
+ self.batch_bar = self.tqdm_class(
+ total=total, unit='batch', leave=True,
+ unit_scale=1 / (params('batch_size', 1) or 1))
+ self.on_batch_end = self.bar2callback(
+ self.batch_bar, pop=['batch', 'size'],
+ delta=lambda logs: logs.get('size', 1))
+ elif self.verbose == 1:
+ self.batch_bar.unit_scale = 1 / (params('batch_size', 1) or 1)
+ self.batch_bar.reset(total=total)
+ else:
+ raise KeyError('Unknown verbosity')
+
+ def on_train_end(self, *_, **__):
+ if hasattr(self, 'batch_bar'):
+ self.batch_bar.close()
+ self.epoch_bar.close()
+
+ def display(self):
+ """Displays in the current cell in Notebooks."""
+ container = getattr(self.epoch_bar, 'container', None)
+ if container is None:
+ return
+ from .notebook import display
+ display(container)
+ batch_bar = getattr(self, 'batch_bar', None)
+ if batch_bar is not None:
+ display(batch_bar.container)
+
+ @staticmethod
+ def _implements_train_batch_hooks():
+ return True
+
+ @staticmethod
+ def _implements_test_batch_hooks():
+ return True
+
+ @staticmethod
+ def _implements_predict_batch_hooks():
+ return True
diff --git a/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/notebook.py b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/notebook.py
new file mode 100644
index 0000000000000000000000000000000000000000..83178e981f46c0784a36bee1f9e347022c7678a4
--- /dev/null
+++ b/.cache/uv/archive-v0/_ucjKFNpNyxFllhqaeOlI/tqdm/notebook.py
@@ -0,0 +1,315 @@
+"""
+IPython/Jupyter Notebook progressbar decorator for iterators.
+Includes a default `range` iterator printing to `stderr`.
+
+Usage:
+>>> from tqdm.notebook import trange, tqdm
+>>> for i in trange(10):
+... ...
+"""
+# import compatibility functions and utilities
+import re
+import sys
+from html import escape
+from weakref import proxy
+
+# to inherit from the tqdm class
+from .std import tqdm as std_tqdm
+
+if True: # pragma: no cover
+ # import IPython/Jupyter base widget and display utilities
+ IPY = 0
+ try: # IPython 4.x
+ import ipywidgets
+ IPY = 4
+ except ImportError: # IPython 3.x / 2.x
+ IPY = 32
+ import warnings
+ with warnings.catch_warnings():
+ warnings.filterwarnings(
+ 'ignore', message=".*The `IPython.html` package has been deprecated.*")
+ try:
+ import IPython.html.widgets as ipywidgets # NOQA: F401
+ except ImportError:
+ pass
+
+ try: # IPython 4.x / 3.x
+ if IPY == 32:
+ from IPython.html.widgets import HTML
+ from IPython.html.widgets import FloatProgress as IProgress
+ from IPython.html.widgets import HBox
+ IPY = 3
+ else:
+ from ipywidgets import HTML
+ from ipywidgets import FloatProgress as IProgress
+ from ipywidgets import HBox
+ except ImportError:
+ try: # IPython 2.x
+ from IPython.html.widgets import HTML
+ from IPython.html.widgets import ContainerWidget as HBox
+ from IPython.html.widgets import FloatProgressWidget as IProgress
+ IPY = 2
+ except ImportError:
+ IPY = 0
+ IProgress = None
+ HBox = object
+
+ try:
+ from IPython.display import display # , clear_output
+ except ImportError:
+ pass
+
+__author__ = {"github.com/": ["lrq3000", "casperdcl", "alexanderkuk"]}
+__all__ = ['tqdm_notebook', 'tnrange', 'tqdm', 'trange']
+WARN_NOIPYW = ("IProgress not found. Please update jupyter and ipywidgets."
+ " See https://ipywidgets.readthedocs.io/en/stable"
+ "/user_install.html")
+
+
+class TqdmHBox(HBox):
+ """`ipywidgets.HBox` with a pretty representation"""
+ def _json_(self, pretty=None):
+ pbar = getattr(self, 'pbar', None)
+ if pbar is None:
+ return {}
+ d = pbar.format_dict
+ if pretty is not None:
+ d["ascii"] = not pretty
+ return d
+
+ def __repr__(self, pretty=False):
+ pbar = getattr(self, 'pbar', None)
+ if pbar is None:
+ return super().__repr__()
+ return pbar.format_meter(**self._json_(pretty))
+
+ def _repr_pretty_(self, pp, *_, **__):
+ pp.text(self.__repr__(True))
+
+
+class tqdm_notebook(std_tqdm):
+ """
+ Experimental IPython/Jupyter Notebook widget using tqdm!
+ """
+ @staticmethod
+ def status_printer(_, total=None, desc=None, ncols=None):
+ """
+ Manage the printing of an IPython/Jupyter Notebook progress bar widget.
+ """
+ # Fallback to text bar if there's no total
+ # DEPRECATED: replaced with an 'info' style bar
+ # if not total:
+ # return super(tqdm_notebook, tqdm_notebook).status_printer(file)
+
+ # fp = file
+
+ # Prepare IPython progress bar
+ if IProgress is None: # #187 #451 #558 #872
+ raise ImportError(WARN_NOIPYW)
+ if total:
+ pbar = IProgress(min=0, max=total)
+ else: # No total? Show info style bar with no progress tqdm status
+ pbar = IProgress(min=0, max=1)
+ pbar.value = 1
+ pbar.bar_style = 'info'
+ if ncols is None:
+ pbar.layout.width = "20px"
+
+ ltext = HTML()
+ rtext = HTML()
+ if desc:
+ ltext.value = desc
+ container = TqdmHBox(children=[ltext, pbar, rtext])
+ # Prepare layout
+ if ncols is not None: # use default style of ipywidgets
+ # ncols could be 100, "100px", "100%"
+ ncols = str(ncols) # ipywidgets only accepts string
+ try:
+ if int(ncols) > 0: # isnumeric and positive
+ ncols += 'px'
+ except ValueError:
+ pass
+ pbar.layout.flex = '2'
+ container.layout.width = ncols
+ container.layout.display = 'inline-flex'
+ container.layout.flex_flow = 'row wrap'
+
+ return container
+
+ def display(self, msg=None, pos=None,
+ # additional signals
+ close=False, bar_style=None, check_delay=True):
+ # Note: contrary to native tqdm, msg='' does NOT clear bar
+ # goal is to keep all infos if error happens so user knows
+ # at which iteration the loop failed.
+
+ # Clear previous output (really necessary?)
+ # clear_output(wait=1)
+
+ if not msg and not close:
+ d = self.format_dict
+ # remove {bar}
+ d['bar_format'] = (d['bar_format'] or "{l_bar} {r_bar}").replace(
+ "{bar}", " ")
+ msg = self.format_meter(**d)
+
+ ltext, pbar, rtext = self.container.children
+ pbar.value = self.n
+
+ if msg:
+ msg = msg.replace(' ', '\u2007') # fix html space padding
+ # html escape special characters (like '&')
+ if ' ' in msg:
+ left, right = map(escape, re.split(r'\|? \|?', msg, maxsplit=1))
+ else:
+ left, right = '', escape(msg)
+
+ # Update description
+ ltext.value = left
+ # never clear the bar (signal: msg='')
+ if right:
+ rtext.value = right
+
+ # Change bar style
+ if bar_style:
+ # Hack-ish way to avoid the danger bar_style being overridden by
+ # success because the bar gets closed after the error...
+ if pbar.bar_style != 'danger' or bar_style != 'success':
+ pbar.bar_style = bar_style
+
+ # Special signal to close the bar
+ if close and pbar.bar_style != 'danger': # hide only if no error
+ try:
+ self.container.close()
+ except AttributeError:
+ self.container.visible = False
+ self.container.layout.visibility = 'hidden' # IPYW>=8
+
+ if check_delay and self.delay > 0 and not self.displayed:
+ display(self.container)
+ self.displayed = True
+
+ @property
+ def colour(self):
+ if hasattr(self, 'container'):
+ return self.container.children[-2].style.bar_color
+
+ @colour.setter
+ def colour(self, bar_color):
+ if hasattr(self, 'container'):
+ self.container.children[-2].style.bar_color = bar_color
+
+ def __init__(self, *args, **kwargs):
+ """
+ Supports the usual `tqdm.tqdm` parameters as well as those listed below.
+
+ Parameters
+ ----------
+ display : Whether to call `display(self.container)` immediately
+ [default: True].
+ """
+ kwargs = kwargs.copy()
+ # Setup default output
+ file_kwarg = kwargs.get('file', sys.stderr)
+ if file_kwarg is sys.stderr or file_kwarg is None:
+ kwargs['file'] = sys.stdout # avoid the red block in IPython
+
+ # Initialize parent class + avoid printing by using gui=True
+ kwargs['gui'] = True
+ # convert disable = None to False
+ kwargs['disable'] = bool(kwargs.get('disable', False))
+ colour = kwargs.pop('colour', None)
+ display_here = kwargs.pop('display', True)
+ super().__init__(*args, **kwargs)
+ if self.disable or not kwargs['gui']:
+ self.disp = lambda *_, **__: None
+ return
+
+ # Get bar width
+ self.ncols = '100%' if self.dynamic_ncols else kwargs.get("ncols", None)
+
+ # Replace with IPython progress bar display (with correct total)
+ unit_scale = 1 if self.unit_scale is True else self.unit_scale or 1
+ total = self.total * unit_scale if self.total else self.total
+ self.container = self.status_printer(self.fp, total, self.desc, self.ncols)
+ self.container.pbar = proxy(self)
+ self.displayed = False
+ if display_here and self.delay <= 0:
+ display(self.container)
+ self.displayed = True
+ self.disp = self.display
+ self.colour = colour
+
+ # Print initial bar state
+ if not self.disable:
+ self.display(check_delay=False)
+
+ def __iter__(self):
+ try:
+ it = super().__iter__()
+ yield from it
+ # NB: except ... [ as ...] breaks IPython async KeyboardInterrupt
+ except: # NOQA
+ self.disp(bar_style='danger')
+ raise
+ # NB: don't `finally: close()`
+ # since this could be a shared bar which the user will `reset()`
+
+ def update(self, n=1):
+ try:
+ return super().update(n=n)
+ # NB: except ... [ as ...] breaks IPython async KeyboardInterrupt
+ except: # NOQA
+ # cannot catch KeyboardInterrupt when using manual tqdm
+ # as the interrupt will most likely happen on another statement
+ self.disp(bar_style='danger')
+ raise
+ # NB: don't `finally: close()`
+ # since this could be a shared bar which the user will `reset()`
+
+ def close(self):
+ if self.disable:
+ return
+ super().close()
+ # Try to detect if there was an error or KeyboardInterrupt
+ # in manual mode: if n < total, things probably got wrong
+ if self.total and self.n < self.total:
+ self.disp(bar_style='danger', check_delay=False)
+ else:
+ if self.leave:
+ self.disp(bar_style='success', check_delay=False)
+ else:
+ self.disp(close=True, check_delay=False)
+
+ def clear(self, *_, **__):
+ pass
+
+ def reset(self, total=None):
+ """
+ Resets to 0 iterations for repeated use.
+
+ Consider combining with `leave=True`.
+
+ Parameters
+ ----------
+ total : int or float, optional. Total to use for the new bar.
+ """
+ if self.disable:
+ return super().reset(total=total)
+ _, pbar, _ = self.container.children
+ pbar.bar_style = ''
+ if total is not None:
+ pbar.max = total
+ if not self.total and self.ncols is None: # no longer unknown total
+ pbar.layout.width = None # reset width
+ return super().reset(total=total)
+
+
+def tnrange(*args, **kwargs):
+ """Shortcut for `tqdm.notebook.tqdm(range(*args), **kwargs)`."""
+ return tqdm_notebook(range(*args), **kwargs)
+
+
+# Aliases
+tqdm = tqdm_notebook
+trange = tnrange
diff --git a/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock-3.25.2.dist-info/METADATA b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock-3.25.2.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..3d005485a7d2453cdc329b7221e5fb07cd121057
--- /dev/null
+++ b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock-3.25.2.dist-info/METADATA
@@ -0,0 +1,38 @@
+Metadata-Version: 2.4
+Name: filelock
+Version: 3.25.2
+Summary: A platform independent file lock.
+Project-URL: Documentation, https://py-filelock.readthedocs.io
+Project-URL: Homepage, https://github.com/tox-dev/py-filelock
+Project-URL: Source, https://github.com/tox-dev/py-filelock
+Project-URL: Tracker, https://github.com/tox-dev/py-filelock/issues
+Maintainer-email: Bernát Gábor
+License-Expression: MIT
+License-File: LICENSE
+Keywords: application,cache,directory,log,user
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
+Classifier: Topic :: Internet
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: System
+Requires-Python: >=3.10
+Description-Content-Type: text/markdown
+
+# filelock
+
+[](https://pypi.org/project/filelock/)
+[](https://pypi.org/project/filelock/)
+[](https://py-filelock.readthedocs.io/en/latest/?badge=latest)
+[](https://pepy.tech/project/filelock)
+[](https://github.com/tox-dev/py-filelock/actions/workflows/check.yaml)
+
+For more information checkout the [official documentation](https://py-filelock.readthedocs.io/en/latest/index.html).
diff --git a/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock-3.25.2.dist-info/RECORD b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock-3.25.2.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..d22634d0de620923036676c5d1d7b02c363f8a16
--- /dev/null
+++ b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock-3.25.2.dist-info/RECORD
@@ -0,0 +1,16 @@
+filelock/__init__.py,sha256=-eghH1rpZitcXkHLNqZS1w-3_rmb-zsYFyVA8n1MK44,2305
+filelock/_api.py,sha256=l8P7bqosgleI3vPn8OVGQ0-sa2d3WVhvM_j9SCCcTx4,21159
+filelock/_async_read_write.py,sha256=f6QwJ5ENUwircnNhvdkTm4I2nYV5nY-rvnYgs7NZBLc,7545
+filelock/_error.py,sha256=mnelOh0EVyVeskG3rksL4kW3OArL4TMb2-PwmzQWIFg,788
+filelock/_read_write.py,sha256=5tVPm1TDXvAu0sgva_LQJ4DH-SDo0rVxuYAFLEQGZw4,15323
+filelock/_soft.py,sha256=Qar4H767G4zbkJFadMvtn5m0uBCYpHcVk2BfNmLrA1o,4677
+filelock/_unix.py,sha256=pbTsIi29Qv8zBtLsXqc3PzlS3wz2db53M1MIqVo9790,4579
+filelock/_util.py,sha256=wPf-LvzmdHWHw4DinDeVJlB0GNVOYGNUeNKSnHBQuUU,1716
+filelock/_windows.py,sha256=kJFOenaffOP6Qza2b3-yxgiPiDcyroWnzz9STGkCrgM,3943
+filelock/asyncio.py,sha256=NvrDsqS095NZc16l_OjBQcTY-D6xB4Vy7AK3ni8tr8A,13943
+filelock/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+filelock/version.py,sha256=p1u3I6A9nX3mbHkvyJTo-_u506UHBhmP9NaOUwHULRE,706
+filelock-3.25.2.dist-info/METADATA,sha256=CHiP5bKcITUrdDKmfCvMspEdKUj5vuPq3udl5JA3Yj4,1977
+filelock-3.25.2.dist-info/WHEEL,sha256=QccIxa26bgl1E6uMy58deGWi-0aeIkkangHcxk2kWfw,87
+filelock-3.25.2.dist-info/licenses/LICENSE,sha256=YIyJ1QYK6ZIa3M8yNmlbxlSplG4SMj72wCHfoE4pTUg,1088
+filelock-3.25.2.dist-info/RECORD,,
diff --git a/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock-3.25.2.dist-info/WHEEL b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock-3.25.2.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..b1b94fd58e7e9ed0ef3449473bc48de68afcc3fe
--- /dev/null
+++ b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock-3.25.2.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: hatchling 1.29.0
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock-3.25.2.dist-info/licenses/LICENSE b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock-3.25.2.dist-info/licenses/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..291919c0b6f41d014767f6c877af9f7595fcff99
--- /dev/null
+++ b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock-3.25.2.dist-info/licenses/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2025 Bernát Gábor and contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/__init__.py b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..eabba41f4c697d1218b04a132923eae0d9ce6f1e
--- /dev/null
+++ b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/__init__.py
@@ -0,0 +1,82 @@
+"""
+A platform independent file lock that supports the with-statement.
+
+.. autodata:: filelock.__version__
+ :no-value:
+
+"""
+
+from __future__ import annotations
+
+import sys
+import warnings
+from typing import TYPE_CHECKING
+
+from ._api import AcquireReturnProxy, BaseFileLock
+from ._error import Timeout
+
+try:
+ from ._async_read_write import AsyncAcquireReadWriteReturnProxy, AsyncReadWriteLock
+ from ._read_write import ReadWriteLock
+except ImportError: # sqlite3 may be unavailable if Python was built without it or the C library is missing
+ AsyncAcquireReadWriteReturnProxy = None # type: ignore[assignment, misc]
+ AsyncReadWriteLock = None # type: ignore[assignment, misc]
+ ReadWriteLock = None # type: ignore[assignment, misc]
+
+from ._soft import SoftFileLock
+from ._unix import UnixFileLock, has_fcntl
+from ._windows import WindowsFileLock
+from .asyncio import (
+ AsyncAcquireReturnProxy,
+ AsyncSoftFileLock,
+ AsyncUnixFileLock,
+ AsyncWindowsFileLock,
+ BaseAsyncFileLock,
+)
+from .version import version
+
+#: version of the project as a string
+__version__: str = version
+
+
+if sys.platform == "win32": # pragma: win32 cover
+ _FileLock: type[BaseFileLock] = WindowsFileLock
+ _AsyncFileLock: type[BaseAsyncFileLock] = AsyncWindowsFileLock
+else: # pragma: win32 no cover # noqa: PLR5501
+ if has_fcntl:
+ _FileLock: type[BaseFileLock] = UnixFileLock
+ _AsyncFileLock: type[BaseAsyncFileLock] = AsyncUnixFileLock
+ else:
+ _FileLock = SoftFileLock
+ _AsyncFileLock = AsyncSoftFileLock
+ if warnings is not None:
+ warnings.warn("only soft file lock is available", stacklevel=2)
+
+if TYPE_CHECKING:
+ FileLock = SoftFileLock
+ AsyncFileLock = AsyncSoftFileLock
+else:
+ #: Alias for the lock, which should be used for the current platform.
+ FileLock = _FileLock
+ AsyncFileLock = _AsyncFileLock
+
+
+__all__ = [
+ "AcquireReturnProxy",
+ "AsyncAcquireReadWriteReturnProxy",
+ "AsyncAcquireReturnProxy",
+ "AsyncFileLock",
+ "AsyncReadWriteLock",
+ "AsyncSoftFileLock",
+ "AsyncUnixFileLock",
+ "AsyncWindowsFileLock",
+ "BaseAsyncFileLock",
+ "BaseFileLock",
+ "FileLock",
+ "ReadWriteLock",
+ "SoftFileLock",
+ "Timeout",
+ "UnixFileLock",
+ "WindowsFileLock",
+ "__version__",
+]
diff --git a/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_api.py b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_api.py
new file mode 100644
index 0000000000000000000000000000000000000000..3bcc94cdf258f808e3f637186182c05dc961203c
--- /dev/null
+++ b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_api.py
@@ -0,0 +1,578 @@
+from __future__ import annotations
+
+import contextlib
+import inspect
+import logging
+import os
+import pathlib
+import sys
+import time
+import warnings
+from abc import ABCMeta, abstractmethod
+from dataclasses import dataclass
+from threading import local
+from typing import TYPE_CHECKING, Any, cast
+from weakref import WeakValueDictionary
+
+from ._error import Timeout
+
+#: Sentinel indicating that no explicit file permission mode was passed.
+#: When used, lock files are created with 0o666 (letting umask and default ACLs control the final permissions)
+#: and fchmod is skipped so that POSIX default ACL inheritance is preserved.
+_UNSET_FILE_MODE: int = -1
+
+if TYPE_CHECKING:
+ from collections.abc import Callable
+ from types import TracebackType
+
+ from ._read_write import ReadWriteLock
+
+ if sys.version_info >= (3, 11): # pragma: no cover (py311+)
+ from typing import Self
+ else: # pragma: no cover ( None:
+ super().__init__()
+ self.held: dict[str, int] = {}
+
+
+_registry = _ThreadLocalRegistry()
+
+
+# This is a helper class which is returned by :meth:`BaseFileLock.acquire` and wraps the lock to make sure __enter__
+# is not called twice when entering the with statement. If we would simply return *self*, the lock would be acquired
+# again in the *__enter__* method of the BaseFileLock, but not released again automatically. issue #37 (memory leak)
+class AcquireReturnProxy:
+ """A context-aware object that will release the lock file when exiting."""
+
+ def __init__(self, lock: BaseFileLock | ReadWriteLock) -> None:
+ self.lock: BaseFileLock | ReadWriteLock = lock
+
+ def __enter__(self) -> BaseFileLock | ReadWriteLock:
+ return self.lock
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ traceback: TracebackType | None,
+ ) -> None:
+ self.lock.release()
+
+
+@dataclass
+class FileLockContext:
+ """A dataclass which holds the context for a ``BaseFileLock`` object."""
+
+ # The context is held in a separate class to allow optional use of thread local storage via the
+ # ThreadLocalFileContext class.
+
+ #: The path to the lock file.
+ lock_file: str
+
+ #: The default timeout value.
+ timeout: float
+
+ #: The mode for the lock files
+ mode: int
+
+ #: Whether the lock should be blocking or not
+ blocking: bool
+
+ #: The default polling interval value.
+ poll_interval: float
+
+ #: The lock lifetime in seconds; ``None`` means the lock never expires.
+ lifetime: float | None = None
+
+ #: The file descriptor for the *_lock_file* as it is returned by the os.open() function, not None when lock held
+ lock_file_fd: int | None = None
+
+ #: The lock counter is used for implementing the nested locking mechanism.
+ lock_counter: int = 0 # When the lock is acquired is increased and the lock is only released, when this value is 0
+
+
+class ThreadLocalFileContext(FileLockContext, local):
+ """A thread local version of the ``FileLockContext`` class."""
+
+
+class FileLockMeta(ABCMeta):
+ _instances: WeakValueDictionary[str, BaseFileLock]
+
+ def __call__( # noqa: PLR0913
+ cls,
+ lock_file: str | os.PathLike[str],
+ timeout: float = -1,
+ mode: int = _UNSET_FILE_MODE,
+ thread_local: bool = True, # noqa: FBT001, FBT002
+ *,
+ blocking: bool = True,
+ is_singleton: bool = False,
+ poll_interval: float = 0.05,
+ lifetime: float | None = None,
+ **kwargs: Any, # capture remaining kwargs for subclasses # noqa: ANN401
+ ) -> BaseFileLock:
+ if is_singleton:
+ instance = cls._instances.get(str(lock_file))
+ if instance:
+ params_to_check = {
+ "thread_local": (thread_local, instance.is_thread_local()),
+ "timeout": (timeout, instance.timeout),
+ "mode": (mode, instance._context.mode), # noqa: SLF001
+ "blocking": (blocking, instance.blocking),
+ "poll_interval": (poll_interval, instance.poll_interval),
+ "lifetime": (lifetime, instance.lifetime),
+ }
+
+ non_matching_params = {
+ name: (passed_param, set_param)
+ for name, (passed_param, set_param) in params_to_check.items()
+ if passed_param != set_param
+ }
+ if not non_matching_params:
+ return cast("BaseFileLock", instance)
+
+ # parameters do not match; raise error
+ msg = "Singleton lock instances cannot be initialized with differing arguments"
+ msg += "\nNon-matching arguments: "
+ for param_name, (passed_param, set_param) in non_matching_params.items():
+ msg += f"\n\t{param_name} (existing lock has {set_param} but {passed_param} was passed)"
+ raise ValueError(msg)
+
+ # Workaround to make `__init__`'s params optional in subclasses
+ # E.g. virtualenv changes the signature of the `__init__` method in the `BaseFileLock` class descendant
+ # (https://github.com/tox-dev/filelock/pull/340)
+
+ all_params = {
+ "timeout": timeout,
+ "mode": mode,
+ "thread_local": thread_local,
+ "blocking": blocking,
+ "is_singleton": is_singleton,
+ "poll_interval": poll_interval,
+ "lifetime": lifetime,
+ **kwargs,
+ }
+
+ present_params = inspect.signature(cls.__init__).parameters
+ init_params = {key: value for key, value in all_params.items() if key in present_params}
+
+ instance = super().__call__(lock_file, **init_params)
+
+ if is_singleton:
+ cls._instances[str(lock_file)] = instance
+
+ return cast("BaseFileLock", instance)
+
+
+class BaseFileLock(contextlib.ContextDecorator, metaclass=FileLockMeta):
+ """
+ Abstract base class for a file lock object.
+
+ Provides a reentrant, cross-process exclusive lock backed by OS-level primitives. Subclasses implement the actual
+ locking mechanism (:class:`UnixFileLock `, :class:`WindowsFileLock
+ `, :class:`SoftFileLock `).
+
+ """
+
+ _instances: WeakValueDictionary[str, BaseFileLock]
+
+ def __init_subclass__(cls, **kwargs: dict[str, Any]) -> None:
+ """Setup unique state for lock subclasses."""
+ super().__init_subclass__(**kwargs)
+ cls._instances = WeakValueDictionary()
+
+ def __init__( # noqa: PLR0913
+ self,
+ lock_file: str | os.PathLike[str],
+ timeout: float = -1,
+ mode: int = _UNSET_FILE_MODE,
+ thread_local: bool = True, # noqa: FBT001, FBT002
+ *,
+ blocking: bool = True,
+ is_singleton: bool = False,
+ poll_interval: float = 0.05,
+ lifetime: float | None = None,
+ ) -> None:
+ """
+ Create a new lock object.
+
+ :param lock_file: path to the file
+ :param timeout: default timeout when acquiring the lock, in seconds. It will be used as fallback value in the
+ acquire method, if no timeout value (``None``) is given. If you want to disable the timeout, set it to a
+ negative value. A timeout of 0 means that there is exactly one attempt to acquire the file lock.
+ :param mode: file permissions for the lockfile. When not specified, the OS controls permissions via umask and
+ default ACLs, preserving POSIX default ACL inheritance in shared directories.
+ :param thread_local: Whether this object's internal context should be thread local or not. If this is set to
+ ``False`` then the lock will be reentrant across threads.
+ :param blocking: whether the lock should be blocking or not
+ :param is_singleton: If this is set to ``True`` then only one instance of this class will be created per lock
+ file. This is useful if you want to use the lock object for reentrant locking without needing to pass the
+ same object around.
+ :param poll_interval: default interval for polling the lock file, in seconds. It will be used as fallback value
+ in the acquire method, if no poll_interval value (``None``) is given.
+ :param lifetime: maximum time in seconds a lock can be held before it is considered expired. When set, a waiting
+ process will break a lock whose file modification time is older than ``lifetime`` seconds. ``None`` (the
+ default) means locks never expire.
+
+ """
+ self._is_thread_local = thread_local
+ self._is_singleton = is_singleton
+
+ # Create the context. Note that external code should not work with the context directly and should instead use
+ # properties of this class.
+ kwargs: dict[str, Any] = {
+ "lock_file": os.fspath(lock_file),
+ "timeout": timeout,
+ "mode": mode,
+ "blocking": blocking,
+ "poll_interval": poll_interval,
+ "lifetime": lifetime,
+ }
+ self._context: FileLockContext = (ThreadLocalFileContext if thread_local else FileLockContext)(**kwargs)
+
+ def is_thread_local(self) -> bool:
+ """:returns: a flag indicating if this lock is thread local or not"""
+ return self._is_thread_local
+
+ @property
+ def is_singleton(self) -> bool:
+ """
+ :returns: a flag indicating if this lock is singleton or not
+
+ .. versionadded:: 3.13.0
+
+ """
+ return self._is_singleton
+
+ @property
+ def lock_file(self) -> str:
+ """:returns: path to the lock file"""
+ return self._context.lock_file
+
+ @property
+ def timeout(self) -> float:
+ """
+ :returns: the default timeout value, in seconds
+
+ .. versionadded:: 2.0.0
+
+ """
+ return self._context.timeout
+
+ @timeout.setter
+ def timeout(self, value: float | str) -> None:
+ """
+ Change the default timeout value.
+
+ :param value: the new value, in seconds
+
+ """
+ self._context.timeout = float(value)
+
+ @property
+ def blocking(self) -> bool:
+ """
+ :returns: whether the locking is blocking or not
+
+ .. versionadded:: 3.14.0
+
+ """
+ return self._context.blocking
+
+ @blocking.setter
+ def blocking(self, value: bool) -> None:
+ """
+ Change the default blocking value.
+
+ :param value: the new value as bool
+
+ """
+ self._context.blocking = value
+
+ @property
+ def poll_interval(self) -> float:
+ """
+ :returns: the default polling interval, in seconds
+
+ .. versionadded:: 3.24.0
+
+ """
+ return self._context.poll_interval
+
+ @poll_interval.setter
+ def poll_interval(self, value: float) -> None:
+ """
+ Change the default polling interval.
+
+ :param value: the new value, in seconds
+
+ """
+ self._context.poll_interval = value
+
+ @property
+ def lifetime(self) -> float | None:
+ """
+ :returns: the lock lifetime in seconds, or ``None`` if the lock never expires
+
+ .. versionadded:: 3.24.0
+
+ """
+ return self._context.lifetime
+
+ @lifetime.setter
+ def lifetime(self, value: float | None) -> None:
+ """
+ Change the lock lifetime.
+
+ :param value: the new value in seconds, or ``None`` to disable expiration
+
+ """
+ self._context.lifetime = value
+
+ @property
+ def mode(self) -> int:
+ """:returns: the file permissions for the lockfile"""
+ return 0o644 if self._context.mode == _UNSET_FILE_MODE else self._context.mode
+
+ @property
+ def has_explicit_mode(self) -> bool:
+ """:returns: whether the file permissions were explicitly set"""
+ return self._context.mode != _UNSET_FILE_MODE
+
+ def _open_mode(self) -> int:
+ """:returns: the mode for os.open() — 0o666 when unset (let umask/ACLs decide), else the explicit mode"""
+ return 0o666 if self._context.mode == _UNSET_FILE_MODE else self._context.mode
+
+ def _try_break_expired_lock(self) -> None:
+ """Remove the lock file if its modification time exceeds the configured :attr:`lifetime`."""
+ if (lifetime := self._context.lifetime) is None:
+ return
+ with contextlib.suppress(OSError):
+ if time.time() - pathlib.Path(self.lock_file).stat().st_mtime < lifetime:
+ return
+ break_path = f"{self.lock_file}.break.{os.getpid()}"
+ pathlib.Path(self.lock_file).rename(break_path)
+ pathlib.Path(break_path).unlink()
+
+ @abstractmethod
+ def _acquire(self) -> None:
+ """If the file lock could be acquired, self._context.lock_file_fd holds the file descriptor of the lock file."""
+ raise NotImplementedError
+
+ @abstractmethod
+ def _release(self) -> None:
+ """Releases the lock and sets self._context.lock_file_fd to None."""
+ raise NotImplementedError
+
+ @property
+ def is_locked(self) -> bool:
+ """
+ :returns: A boolean indicating if the lock file is holding the lock currently.
+
+ .. versionchanged:: 2.0.0
+
+ This was previously a method and is now a property.
+
+ """
+ return self._context.lock_file_fd is not None
+
+ @property
+ def lock_counter(self) -> int:
+ """:returns: The number of times this lock has been acquired (but not yet released)."""
+ return self._context.lock_counter
+
+ @staticmethod
+ def _check_give_up( # noqa: PLR0913
+ lock_id: int,
+ lock_filename: str,
+ *,
+ blocking: bool,
+ cancel_check: Callable[[], bool] | None,
+ timeout: float,
+ start_time: float,
+ ) -> bool:
+ if blocking is False:
+ _LOGGER.debug("Failed to immediately acquire lock %s on %s", lock_id, lock_filename)
+ return True
+ if cancel_check is not None and cancel_check():
+ _LOGGER.debug("Cancellation requested for lock %s on %s", lock_id, lock_filename)
+ return True
+ if 0 <= timeout < time.perf_counter() - start_time:
+ _LOGGER.debug("Timeout on acquiring lock %s on %s", lock_id, lock_filename)
+ return True
+ return False
+
+ def acquire( # noqa: C901
+ self,
+ timeout: float | None = None,
+ poll_interval: float | None = None,
+ *,
+ poll_intervall: float | None = None,
+ blocking: bool | None = None,
+ cancel_check: Callable[[], bool] | None = None,
+ ) -> AcquireReturnProxy:
+ """
+ Try to acquire the file lock.
+
+ :param timeout: maximum wait time for acquiring the lock, ``None`` means use the default :attr:`~timeout` is and
+ if ``timeout < 0``, there is no timeout and this method will block until the lock could be acquired
+ :param poll_interval: interval of trying to acquire the lock file, ``None`` means use the default
+ :attr:`~poll_interval`
+ :param poll_intervall: deprecated, kept for backwards compatibility, use ``poll_interval`` instead
+ :param blocking: defaults to True. If False, function will return immediately if it cannot obtain a lock on the
+ first attempt. Otherwise, this method will block until the timeout expires or the lock is acquired.
+ :param cancel_check: a callable returning ``True`` when the acquisition should be canceled. Checked on each poll
+ iteration. When triggered, raises :class:`~Timeout` just like an expired timeout.
+
+ :returns: a context object that will unlock the file when the context is exited
+
+ :raises Timeout: if fails to acquire lock within the timeout period
+
+ .. code-block:: python
+
+ # You can use this method in the context manager (recommended)
+ with lock.acquire():
+ pass
+
+ # Or use an equivalent try-finally construct:
+ lock.acquire()
+ try:
+ pass
+ finally:
+ lock.release()
+
+ .. versionchanged:: 2.0.0
+
+ This method returns now a *proxy* object instead of *self*, so that it can be used in a with statement
+ without side effects.
+
+ """
+ # Use the default timeout, if no timeout is provided.
+ if timeout is None:
+ timeout = self._context.timeout
+
+ if blocking is None:
+ blocking = self._context.blocking
+
+ if poll_intervall is not None:
+ msg = "use poll_interval instead of poll_intervall"
+ warnings.warn(msg, DeprecationWarning, stacklevel=2)
+ poll_interval = poll_intervall
+
+ poll_interval = poll_interval if poll_interval is not None else self._context.poll_interval
+
+ # Increment the number right at the beginning. We can still undo it, if something fails.
+ self._context.lock_counter += 1
+
+ lock_id = id(self)
+ lock_filename = self.lock_file
+ canonical = _canonical(lock_filename)
+
+ would_block = self._context.lock_counter == 1 and not self.is_locked and timeout < 0 and blocking
+ if would_block and (existing := _registry.held.get(canonical)) is not None and existing != lock_id:
+ self._context.lock_counter -= 1
+ msg = (
+ f"Deadlock: lock '{lock_filename}' is already held by a different "
+ f"FileLock instance in this thread. Use is_singleton=True to "
+ f"enable reentrant locking across instances."
+ )
+ raise RuntimeError(msg)
+
+ start_time = time.perf_counter()
+ try:
+ while True:
+ if not self.is_locked:
+ self._try_break_expired_lock()
+ _LOGGER.debug("Attempting to acquire lock %s on %s", lock_id, lock_filename)
+ self._acquire()
+ if self.is_locked:
+ _LOGGER.debug("Lock %s acquired on %s", lock_id, lock_filename)
+ break
+ if self._check_give_up(
+ lock_id,
+ lock_filename,
+ blocking=blocking,
+ cancel_check=cancel_check,
+ timeout=timeout,
+ start_time=start_time,
+ ):
+ raise Timeout(lock_filename) # noqa: TRY301
+ msg = "Lock %s not acquired on %s, waiting %s seconds ..."
+ _LOGGER.debug(msg, lock_id, lock_filename, poll_interval)
+ time.sleep(poll_interval)
+ except BaseException:
+ self._context.lock_counter = max(0, self._context.lock_counter - 1)
+ if self._context.lock_counter == 0:
+ _registry.held.pop(canonical, None)
+ raise
+ if self._context.lock_counter == 1:
+ _registry.held[canonical] = lock_id
+ return AcquireReturnProxy(lock=self)
+
+ def release(self, force: bool = False) -> None: # noqa: FBT001, FBT002
+ """
+ Release the file lock. The lock is only completely released when the lock counter reaches 0. The lock file
+ itself is not automatically deleted.
+
+ :param force: If true, the lock counter is ignored and the lock is released in every case.
+
+ """
+ if self.is_locked:
+ self._context.lock_counter -= 1
+
+ if self._context.lock_counter == 0 or force:
+ lock_id, lock_filename = id(self), self.lock_file
+
+ _LOGGER.debug("Attempting to release lock %s on %s", lock_id, lock_filename)
+ self._release()
+ self._context.lock_counter = 0
+ _registry.held.pop(_canonical(lock_filename), None)
+ _LOGGER.debug("Lock %s released on %s", lock_id, lock_filename)
+
+ def __enter__(self) -> Self:
+ """
+ Acquire the lock.
+
+ :returns: the lock object
+
+ """
+ self.acquire()
+ return self
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ traceback: TracebackType | None,
+ ) -> None:
+ """
+ Release the lock.
+
+ :param exc_type: the exception type if raised
+ :param exc_value: the exception value if raised
+ :param traceback: the exception traceback if raised
+
+ """
+ self.release()
+
+ def __del__(self) -> None:
+ """Called when the lock object is deleted."""
+ self.release(force=True)
+
+
+__all__ = [
+ "_UNSET_FILE_MODE",
+ "AcquireReturnProxy",
+ "BaseFileLock",
+]
diff --git a/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_async_read_write.py b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_async_read_write.py
new file mode 100644
index 0000000000000000000000000000000000000000..9bba315233eeaff8a355aa31fc775c5c1e881e26
--- /dev/null
+++ b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_async_read_write.py
@@ -0,0 +1,203 @@
+"""Async wrapper around :class:`ReadWriteLock` for use with ``asyncio``."""
+
+from __future__ import annotations
+
+import asyncio
+import functools
+from contextlib import asynccontextmanager
+from typing import TYPE_CHECKING
+
+from ._read_write import ReadWriteLock
+
+if TYPE_CHECKING:
+ import os
+ from collections.abc import AsyncGenerator, Callable
+ from concurrent import futures
+ from types import TracebackType
+
+
+class AsyncAcquireReadWriteReturnProxy:
+ """Context-aware object that releases the async read/write lock on exit."""
+
+ def __init__(self, lock: AsyncReadWriteLock) -> None:
+ self.lock = lock
+
+ async def __aenter__(self) -> AsyncReadWriteLock:
+ return self.lock
+
+ async def __aexit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ traceback: TracebackType | None,
+ ) -> None:
+ await self.lock.release()
+
+
+class AsyncReadWriteLock:
+ """
+ Async wrapper around :class:`ReadWriteLock` for use in ``asyncio`` applications.
+
+ Because Python's :mod:`sqlite3` module has no async API, all blocking SQLite operations are dispatched to a thread
+ pool via ``loop.run_in_executor()``. Reentrancy, upgrade/downgrade rules, and singleton behavior are delegated
+ to the underlying :class:`ReadWriteLock`.
+
+ :param lock_file: path to the SQLite database file used as the lock
+ :param timeout: maximum wait time in seconds; ``-1`` means block indefinitely
+ :param blocking: if ``False``, raise :class:`~filelock.Timeout` immediately when the lock is unavailable
+ :param is_singleton: if ``True``, reuse existing :class:`ReadWriteLock` instances for the same resolved path
+ :param loop: event loop for ``run_in_executor``; ``None`` uses the running loop
+ :param executor: executor for ``run_in_executor``; ``None`` uses the default executor
+
+ .. versionadded:: 3.21.0
+
+ """
+
+ def __init__( # noqa: PLR0913
+ self,
+ lock_file: str | os.PathLike[str],
+ timeout: float = -1,
+ *,
+ blocking: bool = True,
+ is_singleton: bool = True,
+ loop: asyncio.AbstractEventLoop | None = None,
+ executor: futures.Executor | None = None,
+ ) -> None:
+ self._lock = ReadWriteLock(lock_file, timeout, blocking=blocking, is_singleton=is_singleton)
+ self._loop = loop
+ self._executor = executor
+
+ @property
+ def lock_file(self) -> str:
+ """:returns: the path to the lock file."""
+ return self._lock.lock_file
+
+ @property
+ def timeout(self) -> float:
+ """:returns: the default timeout."""
+ return self._lock.timeout
+
+ @property
+ def blocking(self) -> bool:
+ """:returns: whether blocking is enabled by default."""
+ return self._lock.blocking
+
+ @property
+ def loop(self) -> asyncio.AbstractEventLoop | None:
+ """:returns: the event loop (or ``None`` for the running loop)."""
+ return self._loop
+
+ @property
+ def executor(self) -> futures.Executor | None:
+ """:returns: the executor (or ``None`` for the default)."""
+ return self._executor
+
+ async def _run(self, func: Callable[..., object], *args: object, **kwargs: object) -> object:
+ loop = self._loop or asyncio.get_running_loop()
+ return await loop.run_in_executor(self._executor, functools.partial(func, *args, **kwargs))
+
+ async def acquire_read(self, timeout: float = -1, *, blocking: bool = True) -> AsyncAcquireReadWriteReturnProxy:
+ """
+ Acquire a shared read lock.
+
+ See :meth:`ReadWriteLock.acquire_read` for full semantics.
+
+ :param timeout: maximum wait time in seconds; ``-1`` means block indefinitely
+ :param blocking: if ``False``, raise :class:`~filelock.Timeout` immediately when the lock is unavailable
+
+ :returns: a proxy that can be used as an async context manager to release the lock
+
+ :raises RuntimeError: if a write lock is already held on this instance
+ :raises Timeout: if the lock cannot be acquired within *timeout* seconds
+
+ """
+ await self._run(self._lock.acquire_read, timeout, blocking=blocking)
+ return AsyncAcquireReadWriteReturnProxy(lock=self)
+
+ async def acquire_write(self, timeout: float = -1, *, blocking: bool = True) -> AsyncAcquireReadWriteReturnProxy:
+ """
+ Acquire an exclusive write lock.
+
+ See :meth:`ReadWriteLock.acquire_write` for full semantics.
+
+ :param timeout: maximum wait time in seconds; ``-1`` means block indefinitely
+ :param blocking: if ``False``, raise :class:`~filelock.Timeout` immediately when the lock is unavailable
+
+ :returns: a proxy that can be used as an async context manager to release the lock
+
+ :raises RuntimeError: if a read lock is already held, or a write lock is held by a different thread
+ :raises Timeout: if the lock cannot be acquired within *timeout* seconds
+
+ """
+ await self._run(self._lock.acquire_write, timeout, blocking=blocking)
+ return AsyncAcquireReadWriteReturnProxy(lock=self)
+
+ async def release(self, *, force: bool = False) -> None:
+ """
+ Release one level of the current lock.
+
+ See :meth:`ReadWriteLock.release` for full semantics.
+
+ :param force: if ``True``, release the lock completely regardless of the current lock level
+
+ :raises RuntimeError: if no lock is currently held and *force* is ``False``
+
+ """
+ await self._run(self._lock.release, force=force)
+
+ @asynccontextmanager
+ async def read_lock(self, timeout: float | None = None, *, blocking: bool | None = None) -> AsyncGenerator[None]:
+ """
+ Async context manager that acquires and releases a shared read lock.
+
+ Falls back to instance defaults for *timeout* and *blocking* when ``None``.
+
+ :param timeout: maximum wait time in seconds, or ``None`` to use the instance default
+ :param blocking: if ``False``, raise :class:`~filelock.Timeout` immediately; ``None`` uses the instance default
+
+ """
+ if timeout is None:
+ timeout = self._lock.timeout
+ if blocking is None:
+ blocking = self._lock.blocking
+ await self.acquire_read(timeout, blocking=blocking)
+ try:
+ yield
+ finally:
+ await self.release()
+
+ @asynccontextmanager
+ async def write_lock(self, timeout: float | None = None, *, blocking: bool | None = None) -> AsyncGenerator[None]:
+ """
+ Async context manager that acquires and releases an exclusive write lock.
+
+ Falls back to instance defaults for *timeout* and *blocking* when ``None``.
+
+ :param timeout: maximum wait time in seconds, or ``None`` to use the instance default
+ :param blocking: if ``False``, raise :class:`~filelock.Timeout` immediately; ``None`` uses the instance default
+
+ """
+ if timeout is None:
+ timeout = self._lock.timeout
+ if blocking is None:
+ blocking = self._lock.blocking
+ await self.acquire_write(timeout, blocking=blocking)
+ try:
+ yield
+ finally:
+ await self.release()
+
+ async def close(self) -> None:
+ """
+ Release the lock (if held) and close the underlying SQLite connection.
+
+ After calling this method, the lock instance is no longer usable.
+
+ """
+ await self._run(self._lock.close)
+
+
+__all__ = [
+ "AsyncAcquireReadWriteReturnProxy",
+ "AsyncReadWriteLock",
+]
diff --git a/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_error.py b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_error.py
new file mode 100644
index 0000000000000000000000000000000000000000..7aaac6b005091938e4907fedb6889a00475c082e
--- /dev/null
+++ b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_error.py
@@ -0,0 +1,30 @@
+from __future__ import annotations
+
+from typing import Any
+
+
+class Timeout(TimeoutError): # noqa: N818
+ """Raised when the lock could not be acquired in *timeout* seconds."""
+
+ def __init__(self, lock_file: str) -> None:
+ super().__init__()
+ self._lock_file = lock_file
+
+ def __reduce__(self) -> str | tuple[Any, ...]:
+ return self.__class__, (self._lock_file,) # Properly pickle the exception
+
+ def __str__(self) -> str:
+ return f"The file lock '{self._lock_file}' could not be acquired."
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}({self.lock_file!r})"
+
+ @property
+ def lock_file(self) -> str:
+ """:returns: The path of the file lock."""
+ return self._lock_file
+
+
+__all__ = [
+ "Timeout",
+]
diff --git a/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_read_write.py b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_read_write.py
new file mode 100644
index 0000000000000000000000000000000000000000..9e26cbf3dba1b5bd84ab34d6a781ed1f826496be
--- /dev/null
+++ b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_read_write.py
@@ -0,0 +1,364 @@
+from __future__ import annotations
+
+import atexit
+import logging
+import os
+import pathlib
+import sqlite3
+import threading
+import time
+from contextlib import contextmanager, suppress
+from typing import TYPE_CHECKING, Literal
+from weakref import WeakValueDictionary
+
+from ._api import AcquireReturnProxy
+from ._error import Timeout
+
+if TYPE_CHECKING:
+ from collections.abc import Generator
+
+_LOGGER = logging.getLogger("filelock")
+
+_all_connections: set[sqlite3.Connection] = set()
+_all_connections_lock = threading.Lock()
+
+
+def _cleanup_connections() -> None:
+ with _all_connections_lock:
+ for con in list(_all_connections):
+ with suppress(Exception):
+ con.close()
+ _all_connections.clear()
+
+
+atexit.register(_cleanup_connections)
+
+# sqlite3_busy_timeout() accepts a C int, max 2_147_483_647 on 32-bit. Use a lower value to be safe (~23 days).
+_MAX_SQLITE_TIMEOUT_MS = 2_000_000_000 - 1
+
+
+def timeout_for_sqlite(timeout: float, *, blocking: bool, already_waited: float) -> int:
+ if blocking is False:
+ return 0
+
+ if timeout == -1:
+ return _MAX_SQLITE_TIMEOUT_MS
+
+ if timeout < 0:
+ msg = "timeout must be a non-negative number or -1"
+ raise ValueError(msg)
+
+ remaining = max(timeout - already_waited, 0) if timeout > 0 else timeout
+ timeout_ms = int(remaining * 1000)
+ if timeout_ms > _MAX_SQLITE_TIMEOUT_MS or timeout_ms < 0:
+ _LOGGER.warning("timeout %s is too large for SQLite, using %s ms instead", timeout, _MAX_SQLITE_TIMEOUT_MS)
+ return _MAX_SQLITE_TIMEOUT_MS
+ return timeout_ms
+
+
+class _ReadWriteLockMeta(type):
+ """
+ Metaclass that handles singleton resolution when is_singleton=True.
+
+ Singleton logic lives here rather than in ReadWriteLock.get_lock so that ``ReadWriteLock(path)`` transparently
+ returns cached instances without a 2-arg ``super()`` call that type checkers cannot verify.
+
+ """
+
+ _instances: WeakValueDictionary[pathlib.Path, ReadWriteLock]
+ _instances_lock: threading.Lock
+
+ def __call__(
+ cls,
+ lock_file: str | os.PathLike[str],
+ timeout: float = -1,
+ *,
+ blocking: bool = True,
+ is_singleton: bool = True,
+ ) -> ReadWriteLock:
+ if not is_singleton:
+ return super().__call__(lock_file, timeout, blocking=blocking, is_singleton=is_singleton)
+
+ normalized = pathlib.Path(lock_file).resolve()
+ with cls._instances_lock:
+ if normalized not in cls._instances:
+ instance = super().__call__(lock_file, timeout, blocking=blocking, is_singleton=is_singleton)
+ cls._instances[normalized] = instance
+ else:
+ instance = cls._instances[normalized]
+
+ if instance.timeout != timeout or instance.blocking != blocking:
+ msg = (
+ f"Singleton lock created with timeout={instance.timeout}, blocking={instance.blocking},"
+ f" cannot be changed to timeout={timeout}, blocking={blocking}"
+ )
+ raise ValueError(msg)
+ return instance
+
+
+class ReadWriteLock(metaclass=_ReadWriteLockMeta):
+ """
+ Cross-process read-write lock backed by SQLite.
+
+ Allows concurrent shared readers or a single exclusive writer. The lock is reentrant within the same mode (multiple
+ ``acquire_read`` calls nest, as do multiple ``acquire_write`` calls from the same thread), but upgrading from read
+ to write or downgrading from write to read raises :class:`RuntimeError`. Write locks are pinned to the thread that
+ acquired them.
+
+ By default, ``is_singleton=True``: calling ``ReadWriteLock(path)`` with the same resolved path returns the same
+ instance. The lock file must use a ``.db`` extension (SQLite database).
+
+ :param lock_file: path to the SQLite database file used as the lock
+ :param timeout: maximum wait time in seconds; ``-1`` means block indefinitely
+ :param blocking: if ``False``, raise :class:`~filelock.Timeout` immediately when the lock is unavailable
+ :param is_singleton: if ``True``, reuse existing instances for the same resolved path
+
+ .. versionadded:: 3.21.0
+
+ """
+
+ _instances: WeakValueDictionary[pathlib.Path, ReadWriteLock] = WeakValueDictionary()
+ _instances_lock = threading.Lock()
+
+ @classmethod
+ def get_lock(
+ cls, lock_file: str | os.PathLike[str], timeout: float = -1, *, blocking: bool = True
+ ) -> ReadWriteLock:
+ """
+ Return the singleton :class:`ReadWriteLock` for *lock_file*.
+
+ :param lock_file: path to the SQLite database file used as the lock
+ :param timeout: maximum wait time in seconds; ``-1`` means block indefinitely
+ :param blocking: if ``False``, raise :class:`~filelock.Timeout` immediately when the lock is unavailable
+
+ :returns: the singleton lock instance
+
+ :raises ValueError: if an instance already exists for this path with different *timeout* or *blocking* values
+
+ """
+ return cls(lock_file, timeout, blocking=blocking)
+
+ def __init__(
+ self,
+ lock_file: str | os.PathLike[str],
+ timeout: float = -1,
+ *,
+ blocking: bool = True,
+ is_singleton: bool = True, # noqa: ARG002 # consumed by _ReadWriteLockMeta.__call__
+ ) -> None:
+ self.lock_file = os.fspath(lock_file)
+ self.timeout = timeout
+ self.blocking = blocking
+ self._transaction_lock = threading.Lock() # serializes the (possibly blocking) SQLite transaction work
+ self._internal_lock = threading.Lock() # protects _lock_level / _current_mode updates and rollback
+ self._lock_level = 0
+ self._current_mode: Literal["read", "write"] | None = None
+ self._write_thread_id: int | None = None
+ self._con = sqlite3.connect(self.lock_file, check_same_thread=False)
+ with _all_connections_lock:
+ _all_connections.add(self._con)
+
+ def _acquire_transaction_lock(self, *, blocking: bool, timeout: float) -> None:
+ if not blocking:
+ acquired = self._transaction_lock.acquire(blocking=False)
+ elif timeout == -1:
+ acquired = self._transaction_lock.acquire(blocking=True)
+ else:
+ acquired = self._transaction_lock.acquire(blocking=True, timeout=timeout)
+ if not acquired:
+ raise Timeout(self.lock_file) from None
+
+ def _validate_reentrant(self, mode: Literal["read", "write"], opposite: str, direction: str) -> AcquireReturnProxy:
+ if self._current_mode != mode:
+ msg = (
+ f"Cannot acquire {mode} lock on {self.lock_file} (lock id: {id(self)}): "
+ f"already holding a {opposite} lock ({direction} not allowed)"
+ )
+ raise RuntimeError(msg)
+ if mode == "write" and (cur := threading.get_ident()) != self._write_thread_id:
+ msg = (
+ f"Cannot acquire write lock on {self.lock_file} (lock id: {id(self)}) "
+ f"from thread {cur} while it is held by thread {self._write_thread_id}"
+ )
+ raise RuntimeError(msg)
+ self._lock_level += 1
+ return AcquireReturnProxy(lock=self)
+
+ def _configure_and_begin(
+ self, mode: Literal["read", "write"], timeout: float, *, blocking: bool, start_time: float
+ ) -> None:
+ waited = time.perf_counter() - start_time
+ timeout_ms = timeout_for_sqlite(timeout, blocking=blocking, already_waited=waited)
+ self._con.execute(f"PRAGMA busy_timeout={timeout_ms};").close()
+ # Use legacy journal mode (not WAL) because WAL does not block readers when a concurrent EXCLUSIVE
+ # write transaction is active, making read-write locking impossible without modifying table data.
+ # MEMORY is safe here since no actual writes happen — crashes cannot corrupt the DB.
+ # See https://sqlite.org/lang_transaction.html#deferred_immediate_and_exclusive_transactions
+ #
+ # Set here (not in __init__) because this pragma itself may block on a locked database,
+ # so it must run after busy_timeout is configured above.
+ self._con.execute("PRAGMA journal_mode=MEMORY;").close()
+ # Recompute remaining timeout after the potentially blocking journal_mode pragma.
+ waited = time.perf_counter() - start_time
+ if (recomputed := timeout_for_sqlite(timeout, blocking=blocking, already_waited=waited)) != timeout_ms:
+ self._con.execute(f"PRAGMA busy_timeout={recomputed};").close()
+ stmt = "BEGIN EXCLUSIVE TRANSACTION;" if mode == "write" else "BEGIN TRANSACTION;"
+ self._con.execute(stmt).close()
+ if mode == "read":
+ # A SELECT is needed to force SQLite to actually acquire the SHARED lock on the database.
+ # https://www.sqlite.org/lockingv3.html#transaction_control
+ self._con.execute("SELECT name FROM sqlite_schema LIMIT 1;").close()
+
+ def _acquire(self, mode: Literal["read", "write"], timeout: float, *, blocking: bool) -> AcquireReturnProxy:
+ opposite = "write" if mode == "read" else "read"
+ direction = "downgrade" if mode == "read" else "upgrade"
+
+ with self._internal_lock:
+ if self._lock_level > 0:
+ return self._validate_reentrant(mode, opposite, direction)
+
+ start_time = time.perf_counter()
+ self._acquire_transaction_lock(blocking=blocking, timeout=timeout)
+ try:
+ # Double-check: another thread may have acquired the lock while we waited on _transaction_lock.
+ with self._internal_lock:
+ if self._lock_level > 0:
+ return self._validate_reentrant(mode, opposite, direction)
+
+ self._configure_and_begin(mode, timeout, blocking=blocking, start_time=start_time)
+
+ with self._internal_lock:
+ self._current_mode = mode
+ self._lock_level = 1
+ if mode == "write":
+ self._write_thread_id = threading.get_ident()
+
+ return AcquireReturnProxy(lock=self)
+
+ except sqlite3.OperationalError as exc:
+ if "database is locked" not in str(exc):
+ raise
+ raise Timeout(self.lock_file) from None
+ finally:
+ self._transaction_lock.release()
+
+ def acquire_read(self, timeout: float = -1, *, blocking: bool = True) -> AcquireReturnProxy:
+ """
+ Acquire a shared read lock.
+
+ If this instance already holds a read lock, the lock level is incremented (reentrant). Attempting to acquire a
+ read lock while holding a write lock raises :class:`RuntimeError` (downgrade not allowed).
+
+ :param timeout: maximum wait time in seconds; ``-1`` means block indefinitely
+ :param blocking: if ``False``, raise :class:`~filelock.Timeout` immediately when the lock is unavailable
+
+ :returns: a proxy that can be used as a context manager to release the lock
+
+ :raises RuntimeError: if a write lock is already held on this instance
+ :raises Timeout: if the lock cannot be acquired within *timeout* seconds
+
+ """
+ return self._acquire("read", timeout, blocking=blocking)
+
+ def acquire_write(self, timeout: float = -1, *, blocking: bool = True) -> AcquireReturnProxy:
+ """
+ Acquire an exclusive write lock.
+
+ If this instance already holds a write lock from the same thread, the lock level is incremented (reentrant).
+ Attempting to acquire a write lock while holding a read lock raises :class:`RuntimeError` (upgrade not allowed).
+ Write locks are pinned to the acquiring thread: a different thread trying to re-enter also raises
+ :class:`RuntimeError`.
+
+ :param timeout: maximum wait time in seconds; ``-1`` means block indefinitely
+ :param blocking: if ``False``, raise :class:`~filelock.Timeout` immediately when the lock is unavailable
+
+ :returns: a proxy that can be used as a context manager to release the lock
+
+ :raises RuntimeError: if a read lock is already held, or a write lock is held by a different thread
+ :raises Timeout: if the lock cannot be acquired within *timeout* seconds
+
+ """
+ return self._acquire("write", timeout, blocking=blocking)
+
+ def release(self, *, force: bool = False) -> None:
+ """
+ Release one level of the current lock.
+
+ When the lock level reaches zero the underlying SQLite transaction is rolled back, releasing the database lock.
+
+ :param force: if ``True``, release the lock completely regardless of the current lock level
+
+ :raises RuntimeError: if no lock is currently held and *force* is ``False``
+
+ """
+ should_rollback = False
+ with self._internal_lock:
+ if self._lock_level == 0:
+ if force:
+ return
+ msg = f"Cannot release a lock on {self.lock_file} (lock id: {id(self)}) that is not held"
+ raise RuntimeError(msg)
+ if force:
+ self._lock_level = 0
+ else:
+ self._lock_level -= 1
+ if self._lock_level == 0:
+ self._current_mode = None
+ self._write_thread_id = None
+ should_rollback = True
+ if should_rollback:
+ self._con.rollback()
+
+ @contextmanager
+ def read_lock(self, timeout: float | None = None, *, blocking: bool | None = None) -> Generator[None]:
+ """
+ Context manager that acquires and releases a shared read lock.
+
+ Falls back to instance defaults for *timeout* and *blocking* when ``None``.
+
+ :param timeout: maximum wait time in seconds, or ``None`` to use the instance default
+ :param blocking: if ``False``, raise :class:`~filelock.Timeout` immediately; ``None`` uses the instance default
+
+ """
+ if timeout is None:
+ timeout = self.timeout
+ if blocking is None:
+ blocking = self.blocking
+ self.acquire_read(timeout, blocking=blocking)
+ try:
+ yield
+ finally:
+ self.release()
+
+ @contextmanager
+ def write_lock(self, timeout: float | None = None, *, blocking: bool | None = None) -> Generator[None]:
+ """
+ Context manager that acquires and releases an exclusive write lock.
+
+ Falls back to instance defaults for *timeout* and *blocking* when ``None``.
+
+ :param timeout: maximum wait time in seconds, or ``None`` to use the instance default
+ :param blocking: if ``False``, raise :class:`~filelock.Timeout` immediately; ``None`` uses the instance default
+
+ """
+ if timeout is None:
+ timeout = self.timeout
+ if blocking is None:
+ blocking = self.blocking
+ self.acquire_write(timeout, blocking=blocking)
+ try:
+ yield
+ finally:
+ self.release()
+
+ def close(self) -> None:
+ """
+ Release the lock (if held) and close the underlying SQLite connection.
+
+ After calling this method, the lock instance is no longer usable.
+
+ """
+ self.release(force=True)
+ self._con.close()
+ with _all_connections_lock:
+ _all_connections.discard(self._con)
diff --git a/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_soft.py b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_soft.py
new file mode 100644
index 0000000000000000000000000000000000000000..8e2cde5673239b4e827c93a61aae55828a7221db
--- /dev/null
+++ b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_soft.py
@@ -0,0 +1,127 @@
+from __future__ import annotations
+
+import os
+import socket
+import sys
+import time
+from contextlib import suppress
+from errno import EACCES, EEXIST, EPERM, ESRCH
+from pathlib import Path
+
+from ._api import BaseFileLock
+from ._util import ensure_directory_exists, raise_on_not_writable_file
+
+_WIN_SYNCHRONIZE = 0x100000
+_WIN_ERROR_INVALID_PARAMETER = 87
+
+
+class SoftFileLock(BaseFileLock):
+ """
+ Portable file lock based on file existence.
+
+ Unlike :class:`UnixFileLock ` and :class:`WindowsFileLock `, this
+ lock does not use OS-level locking primitives. Instead, it creates the lock file with ``O_CREAT | O_EXCL`` and
+ treats its existence as the lock indicator. This makes it work on any filesystem but leaves stale lock files behind
+ if the process crashes without releasing the lock.
+
+ To mitigate stale locks, the lock file contains the PID and hostname of the holding process. On contention, if the
+ holder is on the same host and its PID no longer exists, the stale lock is broken automatically.
+
+ """
+
+ def _acquire(self) -> None:
+ raise_on_not_writable_file(self.lock_file)
+ ensure_directory_exists(self.lock_file)
+ flags = (
+ os.O_WRONLY # open for writing only
+ | os.O_CREAT
+ | os.O_EXCL # together with above raise EEXIST if the file specified by filename exists
+ | os.O_TRUNC # truncate the file to zero byte
+ )
+ if (o_nofollow := getattr(os, "O_NOFOLLOW", None)) is not None:
+ flags |= o_nofollow
+ try:
+ file_handler = os.open(self.lock_file, flags, self._open_mode())
+ except OSError as exception:
+ if not (
+ exception.errno == EEXIST or (exception.errno == EACCES and sys.platform == "win32")
+ ): # pragma: win32 no cover
+ raise
+ if exception.errno == EEXIST and sys.platform != "win32": # pragma: win32 no cover
+ self._try_break_stale_lock()
+ else:
+ self._write_lock_info(file_handler)
+ self._context.lock_file_fd = file_handler
+
+ def _try_break_stale_lock(self) -> None:
+ with suppress(OSError, ValueError):
+ content = Path(self.lock_file).read_text(encoding="utf-8")
+ lines = content.strip().splitlines()
+ if len(lines) != 2: # noqa: PLR2004
+ return
+ pid_str, hostname = lines
+ if hostname != socket.gethostname():
+ return
+ pid = int(pid_str)
+ if self._is_process_alive(pid):
+ return
+ break_path = f"{self.lock_file}.break.{os.getpid()}"
+ Path(self.lock_file).rename(break_path)
+ Path(break_path).unlink()
+
+ @staticmethod
+ def _is_process_alive(pid: int) -> bool:
+ if sys.platform == "win32": # pragma: win32 cover
+ import ctypes # noqa: PLC0415
+
+ kernel32 = ctypes.windll.kernel32
+ handle = kernel32.OpenProcess(_WIN_SYNCHRONIZE, 0, pid)
+ if handle:
+ kernel32.CloseHandle(handle)
+ return True
+ return kernel32.GetLastError() != _WIN_ERROR_INVALID_PARAMETER
+ try:
+ os.kill(pid, 0)
+ except OSError as exc:
+ if exc.errno == ESRCH:
+ return False
+ if exc.errno == EPERM:
+ return True
+ raise
+ return True
+
+ @staticmethod
+ def _write_lock_info(fd: int) -> None:
+ with suppress(OSError):
+ os.write(fd, f"{os.getpid()}\n{socket.gethostname()}\n".encode())
+
+ def _release(self) -> None:
+ assert self._context.lock_file_fd is not None # noqa: S101
+ os.close(self._context.lock_file_fd)
+ self._context.lock_file_fd = None
+ if sys.platform == "win32":
+ self._windows_unlink_with_retry()
+ else:
+ with suppress(OSError):
+ Path(self.lock_file).unlink()
+
+ def _windows_unlink_with_retry(self) -> None:
+ max_retries = 10
+ retry_delay = 0.001
+ for attempt in range(max_retries):
+ # Windows doesn't immediately release file handles after close, causing EACCES/EPERM on unlink
+ try:
+ Path(self.lock_file).unlink()
+ except OSError as exc: # noqa: PERF203
+ if exc.errno not in {EACCES, EPERM}:
+ return
+ if attempt < max_retries - 1:
+ time.sleep(retry_delay)
+ retry_delay *= 2
+ else:
+ return
+
+
+__all__ = [
+ "SoftFileLock",
+]
diff --git a/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_unix.py b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_unix.py
new file mode 100644
index 0000000000000000000000000000000000000000..ac5f26a9e05fa2e11834224b979ecb2d97b29101
--- /dev/null
+++ b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_unix.py
@@ -0,0 +1,116 @@
+from __future__ import annotations
+
+import os
+import sys
+import warnings
+from contextlib import suppress
+from errno import EAGAIN, ENOSYS, EWOULDBLOCK
+from pathlib import Path
+from typing import cast
+
+from ._api import BaseFileLock
+from ._util import ensure_directory_exists
+
+#: a flag to indicate if the fcntl API is available
+has_fcntl = False
+if sys.platform == "win32": # pragma: win32 cover
+
+ class UnixFileLock(BaseFileLock):
+ """Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems."""
+
+ def _acquire(self) -> None:
+ raise NotImplementedError
+
+ def _release(self) -> None:
+ raise NotImplementedError
+
+else: # pragma: win32 no cover
+ try:
+ import fcntl
+
+ _ = (fcntl.flock, fcntl.LOCK_EX, fcntl.LOCK_NB, fcntl.LOCK_UN)
+ except (ImportError, AttributeError):
+ pass
+ else:
+ has_fcntl = True
+
+ class UnixFileLock(BaseFileLock):
+ """
+ Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems.
+
+ Lock file cleanup: Unix and macOS delete the lock file reliably after release, even in
+ multi-threaded scenarios. Unlike Windows, Unix allows unlinking files that other processes
+ have open.
+ """
+
+ def _acquire(self) -> None: # noqa: C901, PLR0912
+ ensure_directory_exists(self.lock_file)
+ open_flags = os.O_RDWR | os.O_TRUNC
+ o_nofollow = getattr(os, "O_NOFOLLOW", None)
+ if o_nofollow is not None:
+ open_flags |= o_nofollow
+ open_flags |= os.O_CREAT
+ open_mode = self._open_mode()
+ try:
+ fd = os.open(self.lock_file, open_flags, open_mode)
+ except FileNotFoundError:
+ # On FUSE/NFS, os.open(O_CREAT) is not atomic: LOOKUP + CREATE can be split, allowing a concurrent
+ # unlink() to delete the file between them. For valid paths, treat ENOENT as transient contention.
+ # For invalid paths (e.g., empty string), re-raise to avoid infinite retry loops.
+ if self.lock_file and Path(self.lock_file).parent.exists():
+ return
+ raise
+ except PermissionError:
+ # Sticky-bit dirs (e.g. /tmp): O_CREAT fails if the file is owned by another user (#317).
+ # Fall back to opening the existing file without O_CREAT.
+ if not Path(self.lock_file).exists():
+ raise
+ try:
+ fd = os.open(self.lock_file, open_flags & ~os.O_CREAT, open_mode)
+ except FileNotFoundError:
+ return
+ if self.has_explicit_mode:
+ with suppress(PermissionError):
+ os.fchmod(fd, self._context.mode)
+ try:
+ fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
+ except OSError as exception:
+ os.close(fd)
+ if exception.errno == ENOSYS:
+ with suppress(OSError):
+ Path(self.lock_file).unlink()
+ self._fallback_to_soft_lock()
+ self._acquire()
+ return
+ if exception.errno not in {EAGAIN, EWOULDBLOCK}:
+ raise
+ else:
+ # The file may have been unlinked by a concurrent _release() between our open() and flock().
+ # A lock on an unlinked inode is useless — discard and let the retry loop start fresh.
+ if os.fstat(fd).st_nlink == 0:
+ os.close(fd)
+ else:
+ self._context.lock_file_fd = fd
+
+ def _fallback_to_soft_lock(self) -> None:
+ from ._soft import SoftFileLock # noqa: PLC0415
+
+ warnings.warn("flock not supported on this filesystem, falling back to SoftFileLock", stacklevel=2)
+ from .asyncio import AsyncSoftFileLock, BaseAsyncFileLock # noqa: PLC0415
+
+ self.__class__ = AsyncSoftFileLock if isinstance(self, BaseAsyncFileLock) else SoftFileLock
+
+ def _release(self) -> None:
+ fd = cast("int", self._context.lock_file_fd)
+ self._context.lock_file_fd = None
+ with suppress(OSError):
+ Path(self.lock_file).unlink()
+ fcntl.flock(fd, fcntl.LOCK_UN)
+ with suppress(OSError): # close can raise EIO on FUSE/Docker bind-mount filesystems after unlink
+ os.close(fd)
+
+
+__all__ = [
+ "UnixFileLock",
+ "has_fcntl",
+]
diff --git a/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_util.py b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_util.py
new file mode 100644
index 0000000000000000000000000000000000000000..670152393ed76d2cecf4cf5e774360af4380f319
--- /dev/null
+++ b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_util.py
@@ -0,0 +1,53 @@
+from __future__ import annotations
+
+import os
+import stat
+import sys
+from errno import EACCES, EISDIR
+from pathlib import Path
+
+
+def raise_on_not_writable_file(filename: str) -> None:
+ """
+ Raise an exception if attempting to open the file for writing would fail.
+
+ This is done so files that will never be writable can be separated from files that are writable but currently
+ locked.
+
+ :param filename: file to check
+
+ :raises OSError: as if the file was opened for writing.
+
+ """
+ try: # use stat to do exists + can write to check without race condition
+ file_stat = os.stat(filename) # noqa: PTH116
+ except OSError:
+ return # swallow does not exist or other errors
+
+ if file_stat.st_mtime != 0: # if os.stat returns but modification is zero that's an invalid os.stat - ignore it
+ if not (file_stat.st_mode & stat.S_IWUSR):
+ raise PermissionError(EACCES, "Permission denied", filename)
+
+ if stat.S_ISDIR(file_stat.st_mode):
+ if sys.platform == "win32": # pragma: win32 cover
+ # On Windows, this is PermissionError
+ raise PermissionError(EACCES, "Permission denied", filename)
+ else: # pragma: win32 no cover # noqa: RET506
+ # On linux / macOS, this is IsADirectoryError
+ raise IsADirectoryError(EISDIR, "Is a directory", filename)
+
+
+def ensure_directory_exists(filename: Path | str) -> None:
+ """
+ Ensure the directory containing the file exists (create it if necessary).
+
+ :param filename: file.
+
+ """
+ Path(filename).parent.mkdir(parents=True, exist_ok=True)
+
+
+__all__ = [
+ "ensure_directory_exists",
+ "raise_on_not_writable_file",
+]
diff --git a/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_windows.py b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_windows.py
new file mode 100644
index 0000000000000000000000000000000000000000..fe4903aed7b156e7a72ff4ee4ed5c99dac3f354c
--- /dev/null
+++ b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/_windows.py
@@ -0,0 +1,111 @@
+from __future__ import annotations
+
+import os
+import sys
+from contextlib import suppress
+from errno import EACCES
+from pathlib import Path
+from typing import cast
+
+from ._api import BaseFileLock
+from ._util import ensure_directory_exists, raise_on_not_writable_file
+
+if sys.platform == "win32": # pragma: win32 cover
+ import ctypes
+ import msvcrt
+ from ctypes import wintypes
+
+ # Windows API constants for reparse point detection
+ FILE_ATTRIBUTE_REPARSE_POINT = 0x00000400
+ INVALID_FILE_ATTRIBUTES = 0xFFFFFFFF
+
+ # Load kernel32.dll
+ _kernel32 = ctypes.WinDLL("kernel32", use_last_error=True)
+ _kernel32.GetFileAttributesW.argtypes = [wintypes.LPCWSTR]
+ _kernel32.GetFileAttributesW.restype = wintypes.DWORD
+
+ def _is_reparse_point(path: str) -> bool:
+ """
+ Check if a path is a reparse point (symlink, junction, etc.) on Windows.
+
+ :param path: Path to check
+
+ :returns: True if path is a reparse point, False otherwise
+
+ :raises OSError: If GetFileAttributesW fails for reasons other than file-not-found
+
+ """
+ attrs = _kernel32.GetFileAttributesW(path)
+ if attrs == INVALID_FILE_ATTRIBUTES:
+ # File doesn't exist yet - that's fine, we'll create it
+ err = ctypes.get_last_error()
+ if err == 2: # noqa: PLR2004 # ERROR_FILE_NOT_FOUND
+ return False
+ if err == 3: # noqa: PLR2004 # ERROR_PATH_NOT_FOUND
+ return False
+ # Some other error - let caller handle it
+ return False
+ return bool(attrs & FILE_ATTRIBUTE_REPARSE_POINT)
+
+ class WindowsFileLock(BaseFileLock):
+ """
+ Uses the :func:`msvcrt.locking` function to hard lock the lock file on Windows systems.
+
+ Lock file cleanup: Windows attempts to delete the lock file after release, but deletion is
+ not guaranteed in multi-threaded scenarios where another thread holds an open handle. The lock
+ file may persist on disk, which does not affect lock correctness.
+ """
+
+ def _acquire(self) -> None:
+ raise_on_not_writable_file(self.lock_file)
+ ensure_directory_exists(self.lock_file)
+
+ # Security check: Refuse to open reparse points (symlinks, junctions)
+ # This prevents TOCTOU symlink attacks (CVE-TBD)
+ if _is_reparse_point(self.lock_file):
+ msg = f"Lock file is a reparse point (symlink/junction): {self.lock_file}"
+ raise OSError(msg)
+
+ flags = (
+ os.O_RDWR # open for read and write
+ | os.O_CREAT # create file if not exists
+ )
+ try:
+ fd = os.open(self.lock_file, flags, self._open_mode())
+ except OSError as exception:
+ if exception.errno != EACCES: # has no access to this lock
+ raise
+ else:
+ try:
+ msvcrt.locking(fd, msvcrt.LK_NBLCK, 1)
+ except OSError as exception:
+ os.close(fd) # close file first
+ if exception.errno != EACCES: # file is already locked
+ raise
+ else:
+ self._context.lock_file_fd = fd
+
+ def _release(self) -> None:
+ fd = cast("int", self._context.lock_file_fd)
+ self._context.lock_file_fd = None
+ msvcrt.locking(fd, msvcrt.LK_UNLCK, 1)
+ os.close(fd)
+
+ with suppress(OSError):
+ Path(self.lock_file).unlink()
+
+else: # pragma: win32 no cover
+
+ class WindowsFileLock(BaseFileLock):
+ """Uses the :func:`msvcrt.locking` function to hard lock the lock file on Windows systems."""
+
+ def _acquire(self) -> None:
+ raise NotImplementedError
+
+ def _release(self) -> None:
+ raise NotImplementedError
+
+
+__all__ = [
+ "WindowsFileLock",
+]
diff --git a/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/asyncio.py b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/asyncio.py
new file mode 100644
index 0000000000000000000000000000000000000000..81743adff7e9684ff3d3ce6c23a0220bc95e7a68
--- /dev/null
+++ b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/asyncio.py
@@ -0,0 +1,376 @@
+"""An asyncio-based implementation of the file lock."""
+
+from __future__ import annotations
+
+import asyncio
+import contextlib
+import logging
+import os
+import time
+from dataclasses import dataclass
+from inspect import iscoroutinefunction
+from threading import local
+from typing import TYPE_CHECKING, Any, NoReturn, cast
+
+from ._api import _UNSET_FILE_MODE, BaseFileLock, FileLockContext, FileLockMeta
+from ._error import Timeout
+from ._soft import SoftFileLock
+from ._unix import UnixFileLock
+from ._windows import WindowsFileLock
+
+if TYPE_CHECKING:
+ import sys
+ from collections.abc import Callable
+ from concurrent import futures
+ from types import TracebackType
+
+ if sys.version_info >= (3, 11): # pragma: no cover (py311+)
+ from typing import Self
+ else: # pragma: no cover ( None: # noqa: D107
+ self.lock = lock
+
+ async def __aenter__(self) -> BaseAsyncFileLock: # noqa: D105
+ return self.lock
+
+ async def __aexit__( # noqa: D105
+ self,
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ traceback: TracebackType | None,
+ ) -> None:
+ await self.lock.release()
+
+
+class AsyncFileLockMeta(FileLockMeta):
+ def __call__( # ty: ignore[invalid-method-override] # noqa: PLR0913
+ cls, # noqa: N805
+ lock_file: str | os.PathLike[str],
+ timeout: float = -1,
+ mode: int = _UNSET_FILE_MODE,
+ thread_local: bool = False, # noqa: FBT001, FBT002
+ *,
+ blocking: bool = True,
+ is_singleton: bool = False,
+ poll_interval: float = 0.05,
+ lifetime: float | None = None,
+ loop: asyncio.AbstractEventLoop | None = None,
+ run_in_executor: bool = True,
+ executor: futures.Executor | None = None,
+ ) -> BaseAsyncFileLock:
+ if thread_local and run_in_executor:
+ msg = "run_in_executor is not supported when thread_local is True"
+ raise ValueError(msg)
+ instance = super().__call__(
+ lock_file=lock_file,
+ timeout=timeout,
+ mode=mode,
+ thread_local=thread_local,
+ blocking=blocking,
+ is_singleton=is_singleton,
+ poll_interval=poll_interval,
+ lifetime=lifetime,
+ loop=loop,
+ run_in_executor=run_in_executor,
+ executor=executor,
+ )
+ return cast("BaseAsyncFileLock", instance)
+
+
+class BaseAsyncFileLock(BaseFileLock, metaclass=AsyncFileLockMeta):
+ """
+ Base class for asynchronous file locks.
+
+ .. versionadded:: 3.15.0
+
+ """
+
+ def __init__( # noqa: PLR0913
+ self,
+ lock_file: str | os.PathLike[str],
+ timeout: float = -1,
+ mode: int = _UNSET_FILE_MODE,
+ thread_local: bool = False, # noqa: FBT001, FBT002
+ *,
+ blocking: bool = True,
+ is_singleton: bool = False,
+ poll_interval: float = 0.05,
+ lifetime: float | None = None,
+ loop: asyncio.AbstractEventLoop | None = None,
+ run_in_executor: bool = True,
+ executor: futures.Executor | None = None,
+ ) -> None:
+ """
+ Create a new lock object.
+
+ :param lock_file: path to the file
+ :param timeout: default timeout when acquiring the lock, in seconds. It will be used as fallback value in the
+ acquire method, if no timeout value (``None``) is given. If you want to disable the timeout, set it to a
+ negative value. A timeout of 0 means that there is exactly one attempt to acquire the file lock.
+ :param mode: file permissions for the lockfile. When not specified, the OS controls permissions via umask and
+ default ACLs, preserving POSIX default ACL inheritance in shared directories.
+ :param thread_local: Whether this object's internal context should be thread local or not. If this is set to
+ ``False`` then the lock will be reentrant across threads.
+ :param blocking: whether the lock should be blocking or not
+ :param is_singleton: If this is set to ``True`` then only one instance of this class will be created per lock
+ file. This is useful if you want to use the lock object for reentrant locking without needing to pass the
+ same object around.
+ :param poll_interval: default interval for polling the lock file, in seconds. It will be used as fallback value
+ in the acquire method, if no poll_interval value (``None``) is given.
+ :param lifetime: maximum time in seconds a lock can be held before it is considered expired. When set, a waiting
+ process will break a lock whose file modification time is older than ``lifetime`` seconds. ``None`` (the
+ default) means locks never expire.
+ :param loop: The event loop to use. If not specified, the running event loop will be used.
+ :param run_in_executor: If this is set to ``True`` then the lock will be acquired in an executor.
+ :param executor: The executor to use. If not specified, the default executor will be used.
+
+ """
+ self._is_thread_local = thread_local
+ self._is_singleton = is_singleton
+
+ # Create the context. Note that external code should not work with the context directly and should instead use
+ # properties of this class.
+ kwargs: dict[str, Any] = {
+ "lock_file": os.fspath(lock_file),
+ "timeout": timeout,
+ "mode": mode,
+ "blocking": blocking,
+ "poll_interval": poll_interval,
+ "lifetime": lifetime,
+ "loop": loop,
+ "run_in_executor": run_in_executor,
+ "executor": executor,
+ }
+ self._context: AsyncFileLockContext = (AsyncThreadLocalFileContext if thread_local else AsyncFileLockContext)(
+ **kwargs
+ )
+
+ @property
+ def run_in_executor(self) -> bool:
+ """:returns: whether run in executor."""
+ return self._context.run_in_executor
+
+ @property
+ def executor(self) -> futures.Executor | None:
+ """:returns: the executor."""
+ return self._context.executor
+
+ @executor.setter
+ def executor(self, value: futures.Executor | None) -> None: # pragma: no cover
+ """
+ Change the executor.
+
+ :param futures.Executor | None value: the new executor or ``None``
+
+ """
+ self._context.executor = value
+
+ @property
+ def loop(self) -> asyncio.AbstractEventLoop | None:
+ """:returns: the event loop."""
+ return self._context.loop
+
+ async def acquire( # ty: ignore[invalid-method-override]
+ self,
+ timeout: float | None = None,
+ poll_interval: float | None = None,
+ *,
+ blocking: bool | None = None,
+ cancel_check: Callable[[], bool] | None = None,
+ ) -> AsyncAcquireReturnProxy:
+ """
+ Try to acquire the file lock.
+
+ :param timeout: maximum wait time for acquiring the lock, ``None`` means use the default
+ :attr:`~BaseFileLock.timeout` is and if ``timeout < 0``, there is no timeout and this method will block
+ until the lock could be acquired
+ :param poll_interval: interval of trying to acquire the lock file, ``None`` means use the default
+ :attr:`~BaseFileLock.poll_interval`
+ :param blocking: defaults to True. If False, function will return immediately if it cannot obtain a lock on the
+ first attempt. Otherwise, this method will block until the timeout expires or the lock is acquired.
+ :param cancel_check: a callable returning ``True`` when the acquisition should be canceled. Checked on each poll
+ iteration. When triggered, raises :class:`~Timeout` just like an expired timeout.
+
+ :returns: a context object that will unlock the file when the context is exited
+
+ :raises Timeout: if fails to acquire lock within the timeout period
+
+ .. code-block:: python
+
+ # You can use this method in the context manager (recommended)
+ with lock.acquire():
+ pass
+
+ # Or use an equivalent try-finally construct:
+ lock.acquire()
+ try:
+ pass
+ finally:
+ lock.release()
+
+ """
+ # Use the default timeout, if no timeout is provided.
+ if timeout is None:
+ timeout = self._context.timeout
+
+ if blocking is None:
+ blocking = self._context.blocking
+
+ if poll_interval is None:
+ poll_interval = self._context.poll_interval
+
+ # Increment the number right at the beginning. We can still undo it, if something fails.
+ self._context.lock_counter += 1
+
+ lock_id = id(self)
+ lock_filename = self.lock_file
+ start_time = time.perf_counter()
+ try:
+ while True:
+ if not self.is_locked:
+ self._try_break_expired_lock()
+ _LOGGER.debug("Attempting to acquire lock %s on %s", lock_id, lock_filename)
+ await self._run_internal_method(self._acquire)
+ if self.is_locked:
+ _LOGGER.debug("Lock %s acquired on %s", lock_id, lock_filename)
+ break
+ if self._check_give_up(
+ lock_id,
+ lock_filename,
+ blocking=blocking,
+ cancel_check=cancel_check,
+ timeout=timeout,
+ start_time=start_time,
+ ):
+ raise Timeout(lock_filename) # noqa: TRY301
+ msg = "Lock %s not acquired on %s, waiting %s seconds ..."
+ _LOGGER.debug(msg, lock_id, lock_filename, poll_interval)
+ await asyncio.sleep(poll_interval)
+ except BaseException: # Something did go wrong, so decrement the counter.
+ self._context.lock_counter = max(0, self._context.lock_counter - 1)
+ raise
+ return AsyncAcquireReturnProxy(lock=self)
+
+ async def release(self, force: bool = False) -> None: # ty: ignore[invalid-method-override] # noqa: FBT001, FBT002
+ """
+ Release the file lock. The lock is only completely released when the lock counter reaches 0. The lock file
+ itself is not automatically deleted.
+
+ :param force: If true, the lock counter is ignored and the lock is released in every case.
+
+ """
+ if self.is_locked:
+ self._context.lock_counter -= 1
+
+ if self._context.lock_counter == 0 or force:
+ lock_id, lock_filename = id(self), self.lock_file
+
+ _LOGGER.debug("Attempting to release lock %s on %s", lock_id, lock_filename)
+ await self._run_internal_method(self._release)
+ self._context.lock_counter = 0
+ _LOGGER.debug("Lock %s released on %s", lock_id, lock_filename)
+
+ async def _run_internal_method(self, method: Callable[[], Any]) -> None:
+ if iscoroutinefunction(method):
+ await method()
+ elif self.run_in_executor:
+ loop = self.loop or asyncio.get_running_loop()
+ await loop.run_in_executor(self.executor, method)
+ else:
+ method()
+
+ def __enter__(self) -> NoReturn:
+ """
+ Replace old __enter__ method to avoid using it.
+
+ NOTE: DO NOT USE `with` FOR ASYNCIO LOCKS, USE `async with` INSTEAD.
+
+ :returns: none
+ :rtype: NoReturn
+
+ """
+ msg = "Do not use `with` for asyncio locks, use `async with` instead."
+ raise NotImplementedError(msg)
+
+ async def __aenter__(self) -> Self:
+ """
+ Acquire the lock.
+
+ :returns: the lock object
+
+ """
+ await self.acquire()
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ traceback: TracebackType | None,
+ ) -> None:
+ """
+ Release the lock.
+
+ :param exc_type: the exception type if raised
+ :param exc_value: the exception value if raised
+ :param traceback: the exception traceback if raised
+
+ """
+ await self.release()
+
+ def __del__(self) -> None:
+ """Called when the lock object is deleted."""
+ with contextlib.suppress(RuntimeError):
+ loop = self.loop or asyncio.get_running_loop()
+ if not loop.is_running(): # pragma: no cover
+ loop.run_until_complete(self.release(force=True))
+ else:
+ loop.create_task(self.release(force=True))
+
+
+class AsyncSoftFileLock(SoftFileLock, BaseAsyncFileLock):
+ """Simply watches the existence of the lock file."""
+
+
+class AsyncUnixFileLock(UnixFileLock, BaseAsyncFileLock):
+ """Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems."""
+
+
+class AsyncWindowsFileLock(WindowsFileLock, BaseAsyncFileLock):
+ """Uses the :func:`msvcrt.locking` to hard lock the lock file on windows systems."""
+
+
+__all__ = [
+ "AsyncAcquireReturnProxy",
+ "AsyncSoftFileLock",
+ "AsyncUnixFileLock",
+ "AsyncWindowsFileLock",
+ "BaseAsyncFileLock",
+]
diff --git a/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/py.typed b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/py.typed
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/version.py b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/version.py
new file mode 100644
index 0000000000000000000000000000000000000000..83fa43ba446b6fdf3b7f05e105a0fba787808bbc
--- /dev/null
+++ b/.cache/uv/archive-v0/aw8qBy6dKG6WH1Wd4zXN0/filelock/version.py
@@ -0,0 +1,34 @@
+# file generated by setuptools-scm
+# don't change, don't track in version control
+
+__all__ = [
+ "__version__",
+ "__version_tuple__",
+ "version",
+ "version_tuple",
+ "__commit_id__",
+ "commit_id",
+]
+
+TYPE_CHECKING = False
+if TYPE_CHECKING:
+ from typing import Tuple
+ from typing import Union
+
+ VERSION_TUPLE = Tuple[Union[int, str], ...]
+ COMMIT_ID = Union[str, None]
+else:
+ VERSION_TUPLE = object
+ COMMIT_ID = object
+
+version: str
+__version__: str
+__version_tuple__: VERSION_TUPLE
+version_tuple: VERSION_TUPLE
+commit_id: COMMIT_ID
+__commit_id__: COMMIT_ID
+
+__version__ = version = '3.25.2'
+__version_tuple__ = version_tuple = (3, 25, 2)
+
+__commit_id__ = commit_id = None
diff --git a/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl-0.1.2.dist-info/LICENSE b/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl-0.1.2.dist-info/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..2a920c59d8abdd485a774087915986448495fd7c
--- /dev/null
+++ b/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl-0.1.2.dist-info/LICENSE
@@ -0,0 +1,46 @@
+Copyright (c) 2015 Vitaly Puzrin, Alex Kocharin.
+Copyright (c) 2021 Taneli Hukkinen
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
+
+--------------------------------------------------------------------------------
+
+.parse() is based on Joyent's node.js `url` code:
+
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl-0.1.2.dist-info/METADATA b/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl-0.1.2.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..b4670e86b6dc207c944c55c5d3b84911fb41157a
--- /dev/null
+++ b/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl-0.1.2.dist-info/METADATA
@@ -0,0 +1,32 @@
+Metadata-Version: 2.1
+Name: mdurl
+Version: 0.1.2
+Summary: Markdown URL utilities
+Keywords: markdown,commonmark
+Author-email: Taneli Hukkinen
+Requires-Python: >=3.7
+Description-Content-Type: text/markdown
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: MacOS
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: POSIX :: Linux
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Typing :: Typed
+Project-URL: Homepage, https://github.com/executablebooks/mdurl
+
+# mdurl
+
+[](https://github.com/executablebooks/mdurl/actions?query=workflow%3ATests+branch%3Amaster+event%3Apush)
+[](https://codecov.io/gh/executablebooks/mdurl)
+[](https://pypi.org/project/mdurl)
+
+This is a Python port of the JavaScript [mdurl](https://www.npmjs.com/package/mdurl) package.
+See the [upstream README.md file](https://github.com/markdown-it/mdurl/blob/master/README.md) for API documentation.
+
diff --git a/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl-0.1.2.dist-info/RECORD b/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl-0.1.2.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..0d8cca744563db4cf5383af099d8375c8eca116e
--- /dev/null
+++ b/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl-0.1.2.dist-info/RECORD
@@ -0,0 +1,11 @@
+mdurl/__init__.py,sha256=1vpE89NyXniIRZNC_4f6BPm3Ub4bPntjfyyhLRR7opU,547
+mdurl/_decode.py,sha256=3Q_gDQqU__TvDbu7x-b9LjbVl4QWy5g_qFwljcuvN_Y,3004
+mdurl/_encode.py,sha256=goJLUFt1h4rVZNqqm9t15Nw2W-bFXYQEy3aR01ImWvs,2602
+mdurl/_format.py,sha256=xZct0mdePXA0H3kAqxjGtlB5O86G35DAYMGkA44CmB4,626
+mdurl/_parse.py,sha256=ezZSkM2_4NQ2Zx047sEdcJG7NYQRFHiZK7Y8INHFzwY,11374
+mdurl/_url.py,sha256=5kQnRQN2A_G4svLnRzZcG0bfoD9AbBrYDXousDHZ3z0,284
+mdurl/py.typed,sha256=8PjyZ1aVoQpRVvt71muvuq5qE-jTFZkK-GLHkhdebmc,26
+mdurl-0.1.2.dist-info/LICENSE,sha256=fGBd9uKGZ6lgMRjpgnT2SknOPu0NJvzM6VNKNF4O-VU,2338
+mdurl-0.1.2.dist-info/WHEEL,sha256=4TfKIB_xu-04bc2iKz6_zFt-gEFEEDU_31HGhqzOCE8,81
+mdurl-0.1.2.dist-info/METADATA,sha256=tTsp1I9Jk2cFP9o8gefOJ9JVg4Drv4PmYCOwLrfd0l0,1638
+mdurl-0.1.2.dist-info/RECORD,,
diff --git a/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl-0.1.2.dist-info/WHEEL b/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl-0.1.2.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..668ba4d0151c5c76ed6e758061daa8c1b0bf5d21
--- /dev/null
+++ b/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl-0.1.2.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: flit 3.7.1
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl/__init__.py b/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..cdbb640e004cef0e950a656a53d92d89d82c7472
--- /dev/null
+++ b/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl/__init__.py
@@ -0,0 +1,18 @@
+__all__ = (
+ "decode",
+ "DECODE_DEFAULT_CHARS",
+ "DECODE_COMPONENT_CHARS",
+ "encode",
+ "ENCODE_DEFAULT_CHARS",
+ "ENCODE_COMPONENT_CHARS",
+ "format",
+ "parse",
+ "URL",
+)
+__version__ = "0.1.2" # DO NOT EDIT THIS LINE MANUALLY. LET bump2version UTILITY DO IT
+
+from mdurl._decode import DECODE_COMPONENT_CHARS, DECODE_DEFAULT_CHARS, decode
+from mdurl._encode import ENCODE_COMPONENT_CHARS, ENCODE_DEFAULT_CHARS, encode
+from mdurl._format import format
+from mdurl._parse import url_parse as parse
+from mdurl._url import URL
diff --git a/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl/_decode.py b/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl/_decode.py
new file mode 100644
index 0000000000000000000000000000000000000000..9b50a2dde976a6d43491ec6f20d12e60f6f6597f
--- /dev/null
+++ b/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl/_decode.py
@@ -0,0 +1,104 @@
+from __future__ import annotations
+
+from collections.abc import Sequence
+import functools
+import re
+
+DECODE_DEFAULT_CHARS = ";/?:@&=+$,#"
+DECODE_COMPONENT_CHARS = ""
+
+decode_cache: dict[str, list[str]] = {}
+
+
+def get_decode_cache(exclude: str) -> Sequence[str]:
+ if exclude in decode_cache:
+ return decode_cache[exclude]
+
+ cache: list[str] = []
+ decode_cache[exclude] = cache
+
+ for i in range(128):
+ ch = chr(i)
+ cache.append(ch)
+
+ for i in range(len(exclude)):
+ ch_code = ord(exclude[i])
+ cache[ch_code] = "%" + ("0" + hex(ch_code)[2:].upper())[-2:]
+
+ return cache
+
+
+# Decode percent-encoded string.
+#
+def decode(string: str, exclude: str = DECODE_DEFAULT_CHARS) -> str:
+ cache = get_decode_cache(exclude)
+ repl_func = functools.partial(repl_func_with_cache, cache=cache)
+ return re.sub(r"(%[a-f0-9]{2})+", repl_func, string, flags=re.IGNORECASE)
+
+
+def repl_func_with_cache(match: re.Match, cache: Sequence[str]) -> str:
+ seq = match.group()
+ result = ""
+
+ i = 0
+ l = len(seq) # noqa: E741
+ while i < l:
+ b1 = int(seq[i + 1 : i + 3], 16)
+
+ if b1 < 0x80:
+ result += cache[b1]
+ i += 3 # emulate JS for loop statement3
+ continue
+
+ if (b1 & 0xE0) == 0xC0 and (i + 3 < l):
+ # 110xxxxx 10xxxxxx
+ b2 = int(seq[i + 4 : i + 6], 16)
+
+ if (b2 & 0xC0) == 0x80:
+ all_bytes = bytes((b1, b2))
+ try:
+ result += all_bytes.decode()
+ except UnicodeDecodeError:
+ result += "\ufffd" * 2
+
+ i += 3
+ i += 3 # emulate JS for loop statement3
+ continue
+
+ if (b1 & 0xF0) == 0xE0 and (i + 6 < l):
+ # 1110xxxx 10xxxxxx 10xxxxxx
+ b2 = int(seq[i + 4 : i + 6], 16)
+ b3 = int(seq[i + 7 : i + 9], 16)
+
+ if (b2 & 0xC0) == 0x80 and (b3 & 0xC0) == 0x80:
+ all_bytes = bytes((b1, b2, b3))
+ try:
+ result += all_bytes.decode()
+ except UnicodeDecodeError:
+ result += "\ufffd" * 3
+
+ i += 6
+ i += 3 # emulate JS for loop statement3
+ continue
+
+ if (b1 & 0xF8) == 0xF0 and (i + 9 < l):
+ # 111110xx 10xxxxxx 10xxxxxx 10xxxxxx
+ b2 = int(seq[i + 4 : i + 6], 16)
+ b3 = int(seq[i + 7 : i + 9], 16)
+ b4 = int(seq[i + 10 : i + 12], 16)
+
+ if (b2 & 0xC0) == 0x80 and (b3 & 0xC0) == 0x80 and (b4 & 0xC0) == 0x80:
+ all_bytes = bytes((b1, b2, b3, b4))
+ try:
+ result += all_bytes.decode()
+ except UnicodeDecodeError:
+ result += "\ufffd" * 4
+
+ i += 9
+ i += 3 # emulate JS for loop statement3
+ continue
+
+ result += "\ufffd"
+ i += 3 # emulate JS for loop statement3
+
+ return result
diff --git a/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl/_encode.py b/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl/_encode.py
new file mode 100644
index 0000000000000000000000000000000000000000..bc2e5b917afe9e9ecaa6f11af7a9ac82704d3914
--- /dev/null
+++ b/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl/_encode.py
@@ -0,0 +1,85 @@
+from __future__ import annotations
+
+from collections.abc import Sequence
+from string import ascii_letters, digits, hexdigits
+from urllib.parse import quote as encode_uri_component
+
+ASCII_LETTERS_AND_DIGITS = ascii_letters + digits
+
+ENCODE_DEFAULT_CHARS = ";/?:@&=+$,-_.!~*'()#"
+ENCODE_COMPONENT_CHARS = "-_.!~*'()"
+
+encode_cache: dict[str, list[str]] = {}
+
+
+# Create a lookup array where anything but characters in `chars` string
+# and alphanumeric chars is percent-encoded.
+def get_encode_cache(exclude: str) -> Sequence[str]:
+ if exclude in encode_cache:
+ return encode_cache[exclude]
+
+ cache: list[str] = []
+ encode_cache[exclude] = cache
+
+ for i in range(128):
+ ch = chr(i)
+
+ if ch in ASCII_LETTERS_AND_DIGITS:
+ # always allow unencoded alphanumeric characters
+ cache.append(ch)
+ else:
+ cache.append("%" + ("0" + hex(i)[2:].upper())[-2:])
+
+ for i in range(len(exclude)):
+ cache[ord(exclude[i])] = exclude[i]
+
+ return cache
+
+
+# Encode unsafe characters with percent-encoding, skipping already
+# encoded sequences.
+#
+# - string - string to encode
+# - exclude - list of characters to ignore (in addition to a-zA-Z0-9)
+# - keepEscaped - don't encode '%' in a correct escape sequence (default: true)
+def encode(
+ string: str, exclude: str = ENCODE_DEFAULT_CHARS, *, keep_escaped: bool = True
+) -> str:
+ result = ""
+
+ cache = get_encode_cache(exclude)
+
+ l = len(string) # noqa: E741
+ i = 0
+ while i < l:
+ code = ord(string[i])
+
+ # %
+ if keep_escaped and code == 0x25 and i + 2 < l:
+ if all(c in hexdigits for c in string[i + 1 : i + 3]):
+ result += string[i : i + 3]
+ i += 2
+ i += 1 # JS for loop statement3
+ continue
+
+ if code < 128:
+ result += cache[code]
+ i += 1 # JS for loop statement3
+ continue
+
+ if code >= 0xD800 and code <= 0xDFFF:
+ if code >= 0xD800 and code <= 0xDBFF and i + 1 < l:
+ next_code = ord(string[i + 1])
+ if next_code >= 0xDC00 and next_code <= 0xDFFF:
+ result += encode_uri_component(string[i] + string[i + 1])
+ i += 1
+ i += 1 # JS for loop statement3
+ continue
+ result += "%EF%BF%BD"
+ i += 1 # JS for loop statement3
+ continue
+
+ result += encode_uri_component(string[i])
+ i += 1 # JS for loop statement3
+
+ return result
diff --git a/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl/_format.py b/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl/_format.py
new file mode 100644
index 0000000000000000000000000000000000000000..12524ca626065183ec9974f3d7d08dadd4a7d3e8
--- /dev/null
+++ b/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl/_format.py
@@ -0,0 +1,27 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from mdurl._url import URL
+
+
+def format(url: URL) -> str: # noqa: A001
+ result = ""
+
+ result += url.protocol or ""
+ result += "//" if url.slashes else ""
+ result += url.auth + "@" if url.auth else ""
+
+ if url.hostname and ":" in url.hostname:
+ # ipv6 address
+ result += "[" + url.hostname + "]"
+ else:
+ result += url.hostname or ""
+
+ result += ":" + url.port if url.port else ""
+ result += url.pathname or ""
+ result += url.search or ""
+ result += url.hash or ""
+
+ return result
diff --git a/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl/_parse.py b/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl/_parse.py
new file mode 100644
index 0000000000000000000000000000000000000000..ffeeac768dca3bff60c55c9b1f0bc0fbb4cec7b1
--- /dev/null
+++ b/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl/_parse.py
@@ -0,0 +1,304 @@
+# Copyright Joyent, Inc. and other Node contributors.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the
+# following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+# USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+# Changes from joyent/node:
+#
+# 1. No leading slash in paths,
+# e.g. in `url.parse('http://foo?bar')` pathname is ``, not `/`
+#
+# 2. Backslashes are not replaced with slashes,
+# so `http:\\example.org\` is treated like a relative path
+#
+# 3. Trailing colon is treated like a part of the path,
+# i.e. in `http://example.org:foo` pathname is `:foo`
+#
+# 4. Nothing is URL-encoded in the resulting object,
+# (in joyent/node some chars in auth and paths are encoded)
+#
+# 5. `url.parse()` does not have `parseQueryString` argument
+#
+# 6. Removed extraneous result properties: `host`, `path`, `query`, etc.,
+# which can be constructed using other parts of the url.
+
+from __future__ import annotations
+
+from collections import defaultdict
+import re
+
+from mdurl._url import URL
+
+# Reference: RFC 3986, RFC 1808, RFC 2396
+
+# define these here so at least they only have to be
+# compiled once on the first module load.
+PROTOCOL_PATTERN = re.compile(r"^([a-z0-9.+-]+:)", flags=re.IGNORECASE)
+PORT_PATTERN = re.compile(r":[0-9]*$")
+
+# Special case for a simple path URL
+SIMPLE_PATH_PATTERN = re.compile(r"^(//?(?!/)[^?\s]*)(\?[^\s]*)?$")
+
+# RFC 2396: characters reserved for delimiting URLs.
+# We actually just auto-escape these.
+DELIMS = ("<", ">", '"', "`", " ", "\r", "\n", "\t")
+
+# RFC 2396: characters not allowed for various reasons.
+UNWISE = ("{", "}", "|", "\\", "^", "`") + DELIMS
+
+# Allowed by RFCs, but cause of XSS attacks. Always escape these.
+AUTO_ESCAPE = ("'",) + UNWISE
+# Characters that are never ever allowed in a hostname.
+# Note that any invalid chars are also handled, but these
+# are the ones that are *expected* to be seen, so we fast-path
+# them.
+NON_HOST_CHARS = ("%", "/", "?", ";", "#") + AUTO_ESCAPE
+HOST_ENDING_CHARS = ("/", "?", "#")
+HOSTNAME_MAX_LEN = 255
+HOSTNAME_PART_PATTERN = re.compile(r"^[+a-z0-9A-Z_-]{0,63}$")
+HOSTNAME_PART_START = re.compile(r"^([+a-z0-9A-Z_-]{0,63})(.*)$")
+# protocols that can allow "unsafe" and "unwise" chars.
+
+# protocols that never have a hostname.
+HOSTLESS_PROTOCOL = defaultdict(
+ bool,
+ {
+ "javascript": True,
+ "javascript:": True,
+ },
+)
+# protocols that always contain a // bit.
+SLASHED_PROTOCOL = defaultdict(
+ bool,
+ {
+ "http": True,
+ "https": True,
+ "ftp": True,
+ "gopher": True,
+ "file": True,
+ "http:": True,
+ "https:": True,
+ "ftp:": True,
+ "gopher:": True,
+ "file:": True,
+ },
+)
+
+
+class MutableURL:
+ def __init__(self) -> None:
+ self.protocol: str | None = None
+ self.slashes: bool = False
+ self.auth: str | None = None
+ self.port: str | None = None
+ self.hostname: str | None = None
+ self.hash: str | None = None
+ self.search: str | None = None
+ self.pathname: str | None = None
+
+ def parse(self, url: str, slashes_denote_host: bool) -> "MutableURL":
+ lower_proto = ""
+ slashes = False
+ rest = url
+
+ # trim before proceeding.
+ # This is to support parse stuff like " http://foo.com \n"
+ rest = rest.strip()
+
+ if not slashes_denote_host and len(url.split("#")) == 1:
+ # Try fast path regexp
+ simple_path = SIMPLE_PATH_PATTERN.match(rest)
+ if simple_path:
+ self.pathname = simple_path.group(1)
+ if simple_path.group(2):
+ self.search = simple_path.group(2)
+ return self
+
+ proto = ""
+ proto_match = PROTOCOL_PATTERN.match(rest)
+ if proto_match:
+ proto = proto_match.group()
+ lower_proto = proto.lower()
+ self.protocol = proto
+ rest = rest[len(proto) :]
+
+ # figure out if it's got a host
+ # user@server is *always* interpreted as a hostname, and url
+ # resolution will treat //foo/bar as host=foo,path=bar because that's
+ # how the browser resolves relative URLs.
+ if slashes_denote_host or proto or re.search(r"^//[^@/]+@[^@/]+", rest):
+ slashes = rest.startswith("//")
+ if slashes and not (proto and HOSTLESS_PROTOCOL[proto]):
+ rest = rest[2:]
+ self.slashes = True
+
+ if not HOSTLESS_PROTOCOL[proto] and (
+ slashes or (proto and not SLASHED_PROTOCOL[proto])
+ ):
+
+ # there's a hostname.
+ # the first instance of /, ?, ;, or # ends the host.
+ #
+ # If there is an @ in the hostname, then non-host chars *are* allowed
+ # to the left of the last @ sign, unless some host-ending character
+ # comes *before* the @-sign.
+ # URLs are obnoxious.
+ #
+ # ex:
+ # http://a@b@c/ => user:a@b host:c
+ # http://a@b?@c => user:a host:c path:/?@c
+
+ # v0.12 TODO(isaacs): This is not quite how Chrome does things.
+ # Review our test case against browsers more comprehensively.
+
+ # find the first instance of any hostEndingChars
+ host_end = -1
+ for i in range(len(HOST_ENDING_CHARS)):
+ hec = rest.find(HOST_ENDING_CHARS[i])
+ if hec != -1 and (host_end == -1 or hec < host_end):
+ host_end = hec
+
+ # at this point, either we have an explicit point where the
+ # auth portion cannot go past, or the last @ char is the decider.
+ if host_end == -1:
+ # atSign can be anywhere.
+ at_sign = rest.rfind("@")
+ else:
+ # atSign must be in auth portion.
+ # http://a@b/c@d => host:b auth:a path:/c@d
+ at_sign = rest.rfind("@", 0, host_end + 1)
+
+ # Now we have a portion which is definitely the auth.
+ # Pull that off.
+ if at_sign != -1:
+ auth = rest[:at_sign]
+ rest = rest[at_sign + 1 :]
+ self.auth = auth
+
+ # the host is the remaining to the left of the first non-host char
+ host_end = -1
+ for i in range(len(NON_HOST_CHARS)):
+ hec = rest.find(NON_HOST_CHARS[i])
+ if hec != -1 and (host_end == -1 or hec < host_end):
+ host_end = hec
+ # if we still have not hit it, then the entire thing is a host.
+ if host_end == -1:
+ host_end = len(rest)
+
+ if host_end > 0 and rest[host_end - 1] == ":":
+ host_end -= 1
+ host = rest[:host_end]
+ rest = rest[host_end:]
+
+ # pull out port.
+ self.parse_host(host)
+
+ # we've indicated that there is a hostname,
+ # so even if it's empty, it has to be present.
+ self.hostname = self.hostname or ""
+
+ # if hostname begins with [ and ends with ]
+ # assume that it's an IPv6 address.
+ ipv6_hostname = self.hostname.startswith("[") and self.hostname.endswith(
+ "]"
+ )
+
+ # validate a little.
+ if not ipv6_hostname:
+ hostparts = self.hostname.split(".")
+ l = len(hostparts) # noqa: E741
+ i = 0
+ while i < l:
+ part = hostparts[i]
+ if not part:
+ i += 1 # emulate statement3 in JS for loop
+ continue
+ if not HOSTNAME_PART_PATTERN.search(part):
+ newpart = ""
+ k = len(part)
+ j = 0
+ while j < k:
+ if ord(part[j]) > 127:
+ # we replace non-ASCII char with a temporary placeholder
+ # we need this to make sure size of hostname is not
+ # broken by replacing non-ASCII by nothing
+ newpart += "x"
+ else:
+ newpart += part[j]
+ j += 1 # emulate statement3 in JS for loop
+
+ # we test again with ASCII char only
+ if not HOSTNAME_PART_PATTERN.search(newpart):
+ valid_parts = hostparts[:i]
+ not_host = hostparts[i + 1 :]
+ bit = HOSTNAME_PART_START.search(part)
+ if bit:
+ valid_parts.append(bit.group(1))
+ not_host.insert(0, bit.group(2))
+ if not_host:
+ rest = ".".join(not_host) + rest
+ self.hostname = ".".join(valid_parts)
+ break
+ i += 1 # emulate statement3 in JS for loop
+
+ if len(self.hostname) > HOSTNAME_MAX_LEN:
+ self.hostname = ""
+
+ # strip [ and ] from the hostname
+ # the host field still retains them, though
+ if ipv6_hostname:
+ self.hostname = self.hostname[1:-1]
+
+ # chop off from the tail first.
+ hash = rest.find("#") # noqa: A001
+ if hash != -1:
+ # got a fragment string.
+ self.hash = rest[hash:]
+ rest = rest[:hash]
+ qm = rest.find("?")
+ if qm != -1:
+ self.search = rest[qm:]
+ rest = rest[:qm]
+ if rest:
+ self.pathname = rest
+ if SLASHED_PROTOCOL[lower_proto] and self.hostname and not self.pathname:
+ self.pathname = ""
+
+ return self
+
+ def parse_host(self, host: str) -> None:
+ port_match = PORT_PATTERN.search(host)
+ if port_match:
+ port = port_match.group()
+ if port != ":":
+ self.port = port[1:]
+ host = host[: -len(port)]
+ if host:
+ self.hostname = host
+
+
+def url_parse(url: URL | str, *, slashes_denote_host: bool = False) -> URL:
+ if isinstance(url, URL):
+ return url
+ u = MutableURL()
+ u.parse(url, slashes_denote_host)
+ return URL(
+ u.protocol, u.slashes, u.auth, u.port, u.hostname, u.hash, u.search, u.pathname
+ )
diff --git a/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl/_url.py b/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl/_url.py
new file mode 100644
index 0000000000000000000000000000000000000000..f866e7a179c8854e37c9bba6294f48681e5d99d7
--- /dev/null
+++ b/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl/_url.py
@@ -0,0 +1,14 @@
+from __future__ import annotations
+
+from typing import NamedTuple
+
+
+class URL(NamedTuple):
+ protocol: str | None
+ slashes: bool
+ auth: str | None
+ port: str | None
+ hostname: str | None
+ hash: str | None # noqa: A003
+ search: str | None
+ pathname: str | None
diff --git a/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl/py.typed b/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl/py.typed
new file mode 100644
index 0000000000000000000000000000000000000000..7632ecf77545c5e5501cb3fc5719df0761104ca2
--- /dev/null
+++ b/.cache/uv/archive-v0/mqencNre-Vow0WZx4ww9r/mdurl/py.typed
@@ -0,0 +1 @@
+# Marker file for PEP 561
diff --git a/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc-0.0.4.dist-info/METADATA b/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc-0.0.4.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..9bf7a9e800778c5a8c3f1357450ab0849d13d953
--- /dev/null
+++ b/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc-0.0.4.dist-info/METADATA
@@ -0,0 +1,145 @@
+Metadata-Version: 2.4
+Name: annotated-doc
+Version: 0.0.4
+Summary: Document parameters, class attributes, return types, and variables inline, with Annotated.
+Author-Email: =?utf-8?q?Sebasti=C3=A1n_Ram=C3=ADrez?=
+License-Expression: MIT
+License-File: LICENSE
+Classifier: Intended Audience :: Information Technology
+Classifier: Intended Audience :: System Administrators
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python
+Classifier: Topic :: Internet
+Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Software Development
+Classifier: Typing :: Typed
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
+Project-URL: Homepage, https://github.com/fastapi/annotated-doc
+Project-URL: Documentation, https://github.com/fastapi/annotated-doc
+Project-URL: Repository, https://github.com/fastapi/annotated-doc
+Project-URL: Issues, https://github.com/fastapi/annotated-doc/issues
+Project-URL: Changelog, https://github.com/fastapi/annotated-doc/release-notes.md
+Requires-Python: >=3.8
+Description-Content-Type: text/markdown
+
+# Annotated Doc
+
+Document parameters, class attributes, return types, and variables inline, with `Annotated`.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+## Installation
+
+```bash
+pip install annotated-doc
+```
+
+Or with `uv`:
+
+```Python
+uv add annotated-doc
+```
+
+## Usage
+
+Import `Doc` and pass a single literal string with the documentation for the specific parameter, class attribute, return type, or variable.
+
+For example, to document a parameter `name` in a function `hi` you could do:
+
+```Python
+from typing import Annotated
+
+from annotated_doc import Doc
+
+def hi(name: Annotated[str, Doc("Who to say hi to")]) -> None:
+ print(f"Hi, {name}!")
+```
+
+You can also use it to document class attributes:
+
+```Python
+from typing import Annotated
+
+from annotated_doc import Doc
+
+class User:
+ name: Annotated[str, Doc("The user's name")]
+ age: Annotated[int, Doc("The user's age")]
+```
+
+The same way, you could document return types and variables, or anything that could have a type annotation with `Annotated`.
+
+## Who Uses This
+
+`annotated-doc` was made for:
+
+* [FastAPI](https://fastapi.tiangolo.com/)
+* [Typer](https://typer.tiangolo.com/)
+* [SQLModel](https://sqlmodel.tiangolo.com/)
+* [Asyncer](https://asyncer.tiangolo.com/)
+
+`annotated-doc` is supported by [griffe-typingdoc](https://github.com/mkdocstrings/griffe-typingdoc), which powers reference documentation like the one in the [FastAPI Reference](https://fastapi.tiangolo.com/reference/).
+
+## Reasons not to use `annotated-doc`
+
+You are already comfortable with one of the existing docstring formats, like:
+
+* Sphinx
+* numpydoc
+* Google
+* Keras
+
+Your team is already comfortable using them.
+
+You prefer having the documentation about parameters all together in a docstring, separated from the code defining them.
+
+You care about a specific set of users, using one specific editor, and that editor already has support for the specific docstring format you use.
+
+## Reasons to use `annotated-doc`
+
+* No micro-syntax to learn for newcomers, it’s **just Python** syntax.
+* **Editing** would be already fully supported by default by any editor (current or future) supporting Python syntax, including syntax errors, syntax highlighting, etc.
+* **Rendering** would be relatively straightforward to implement by static tools (tools that don't need runtime execution), as the information can be extracted from the AST they normally already create.
+* **Deduplication of information**: the name of a parameter would be defined in a single place, not duplicated inside of a docstring.
+* **Elimination** of the possibility of having **inconsistencies** when removing a parameter or class variable and **forgetting to remove** its documentation.
+* **Minimization** of the probability of adding a new parameter or class variable and **forgetting to add its documentation**.
+* **Elimination** of the possibility of having **inconsistencies** between the **name** of a parameter in the **signature** and the name in the docstring when it is renamed.
+* **Access** to the documentation string for each symbol at **runtime**, including existing (older) Python versions.
+* A more formalized way to document other symbols, like type aliases, that could use Annotated.
+* **Support** for apps using FastAPI, Typer and others.
+* **AI Accessibility**: AI tools will have an easier way understanding each parameter as the distance from documentation to parameter is much closer.
+
+## History
+
+I ([@tiangolo](https://github.com/tiangolo)) originally wanted for this to be part of the Python standard library (in [PEP 727](https://peps.python.org/pep-0727/)), but the proposal was withdrawn as there was a fair amount of negative feedback and opposition.
+
+The conclusion was that this was better done as an external effort, in a third-party library.
+
+So, here it is, with a simpler approach, as a third-party library, in a way that can be used by others, starting with FastAPI and friends.
+
+## License
+
+This project is licensed under the terms of the MIT license.
diff --git a/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc-0.0.4.dist-info/RECORD b/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc-0.0.4.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..68849e8669b1cafe91c7ac7b66a7ba947e4751d4
--- /dev/null
+++ b/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc-0.0.4.dist-info/RECORD
@@ -0,0 +1,8 @@
+annotated_doc-0.0.4.dist-info/METADATA,sha256=Irm5KJua33dY2qKKAjJ-OhKaVBVIfwFGej_dSe3Z1TU,6566
+annotated_doc-0.0.4.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90
+annotated_doc-0.0.4.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
+annotated_doc-0.0.4.dist-info/licenses/LICENSE,sha256=__Fwd5pqy_ZavbQFwIfxzuF4ZpHkqWpANFF-SlBKDN8,1086
+annotated_doc/__init__.py,sha256=VuyxxUe80kfEyWnOrCx_Bk8hybo3aKo6RYBlkBBYW8k,52
+annotated_doc/main.py,sha256=5Zfvxv80SwwLqpRW73AZyZyiM4bWma9QWRbp_cgD20s,1075
+annotated_doc/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+annotated_doc-0.0.4.dist-info/RECORD,,
diff --git a/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc-0.0.4.dist-info/WHEEL b/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc-0.0.4.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..045c8acdea31cbca5be986e915f784c1aafc720f
--- /dev/null
+++ b/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc-0.0.4.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: pdm-backend (2.4.5)
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc-0.0.4.dist-info/entry_points.txt b/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc-0.0.4.dist-info/entry_points.txt
new file mode 100644
index 0000000000000000000000000000000000000000..c3ad4726d437022e5c606a4206ffb6007347a008
--- /dev/null
+++ b/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc-0.0.4.dist-info/entry_points.txt
@@ -0,0 +1,4 @@
+[console_scripts]
+
+[gui_scripts]
+
diff --git a/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc-0.0.4.dist-info/licenses/LICENSE b/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc-0.0.4.dist-info/licenses/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..7a254464cc78ccea32b3ded00513c44c4e4da412
--- /dev/null
+++ b/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc-0.0.4.dist-info/licenses/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2025 Sebastián Ramírez
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc/__init__.py b/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..a0152a7d12abc2db37fb26e764a61e0c894a43f3
--- /dev/null
+++ b/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc/__init__.py
@@ -0,0 +1,3 @@
+from .main import Doc as Doc
+
+__version__ = "0.0.4"
diff --git a/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc/main.py b/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc/main.py
new file mode 100644
index 0000000000000000000000000000000000000000..7063c59e4500a1d02bfc9b41887f9e95f8163507
--- /dev/null
+++ b/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc/main.py
@@ -0,0 +1,36 @@
+class Doc:
+ """Define the documentation of a type annotation using `Annotated`, to be
+ used in class attributes, function and method parameters, return values,
+ and variables.
+
+ The value should be a positional-only string literal to allow static tools
+ like editors and documentation generators to use it.
+
+ This complements docstrings.
+
+ The string value passed is available in the attribute `documentation`.
+
+ Example:
+
+ ```Python
+ from typing import Annotated
+ from annotated_doc import Doc
+
+ def hi(name: Annotated[str, Doc("Who to say hi to")]) -> None:
+ print(f"Hi, {name}!")
+ ```
+ """
+
+ def __init__(self, documentation: str, /) -> None:
+ self.documentation = documentation
+
+ def __repr__(self) -> str:
+ return f"Doc({self.documentation!r})"
+
+ def __hash__(self) -> int:
+ return hash(self.documentation)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Doc):
+ return NotImplemented
+ return self.documentation == other.documentation
diff --git a/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc/py.typed b/.cache/uv/archive-v0/yH6nUu9ihS7kLaquI29zs/annotated_doc/py.typed
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/.cache/uv/simple-v18/pypi/annotated-doc.rkyv b/.cache/uv/simple-v18/pypi/annotated-doc.rkyv
new file mode 100644
index 0000000000000000000000000000000000000000..20c861941b61746cde2e138ee277b2c10f37ad74
Binary files /dev/null and b/.cache/uv/simple-v18/pypi/annotated-doc.rkyv differ
diff --git a/.cache/uv/simple-v18/pypi/anyio.rkyv b/.cache/uv/simple-v18/pypi/anyio.rkyv
new file mode 100644
index 0000000000000000000000000000000000000000..ee2946c0593efc1cd1997e17dc54ed8cea0d0b05
Binary files /dev/null and b/.cache/uv/simple-v18/pypi/anyio.rkyv differ
diff --git a/.cache/uv/simple-v18/pypi/certifi.rkyv b/.cache/uv/simple-v18/pypi/certifi.rkyv
new file mode 100644
index 0000000000000000000000000000000000000000..8be74ef66334e50a2c8f9478d870b58a76147aad
Binary files /dev/null and b/.cache/uv/simple-v18/pypi/certifi.rkyv differ
diff --git a/.cache/uv/simple-v18/pypi/click.rkyv b/.cache/uv/simple-v18/pypi/click.rkyv
new file mode 100644
index 0000000000000000000000000000000000000000..d03f8bca184299fa2081ecd97076d95d0720ee37
Binary files /dev/null and b/.cache/uv/simple-v18/pypi/click.rkyv differ
diff --git a/.cache/uv/simple-v18/pypi/filelock.rkyv b/.cache/uv/simple-v18/pypi/filelock.rkyv
new file mode 100644
index 0000000000000000000000000000000000000000..818f431040971c7ebbdccbe35c07c397ff453350
Binary files /dev/null and b/.cache/uv/simple-v18/pypi/filelock.rkyv differ
diff --git a/.cache/uv/simple-v18/pypi/fsspec.rkyv b/.cache/uv/simple-v18/pypi/fsspec.rkyv
new file mode 100644
index 0000000000000000000000000000000000000000..cc72325ebb535221bfb88fc88bb46652486ae8be
Binary files /dev/null and b/.cache/uv/simple-v18/pypi/fsspec.rkyv differ
diff --git a/.cache/uv/simple-v18/pypi/h11.rkyv b/.cache/uv/simple-v18/pypi/h11.rkyv
new file mode 100644
index 0000000000000000000000000000000000000000..e6cf728256baa9428e9557f45f58ec4cf8158fc9
Binary files /dev/null and b/.cache/uv/simple-v18/pypi/h11.rkyv differ
diff --git a/.cache/uv/simple-v18/pypi/httpcore.rkyv b/.cache/uv/simple-v18/pypi/httpcore.rkyv
new file mode 100644
index 0000000000000000000000000000000000000000..abfc9743570ded62a03ded947bc8969f91d9b62a
Binary files /dev/null and b/.cache/uv/simple-v18/pypi/httpcore.rkyv differ
diff --git a/.cache/uv/simple-v18/pypi/httpx.rkyv b/.cache/uv/simple-v18/pypi/httpx.rkyv
new file mode 100644
index 0000000000000000000000000000000000000000..2fe76a27859478ca0df7df9a0b21f57e8730a02d
Binary files /dev/null and b/.cache/uv/simple-v18/pypi/httpx.rkyv differ
diff --git a/.cache/uv/simple-v18/pypi/idna.rkyv b/.cache/uv/simple-v18/pypi/idna.rkyv
new file mode 100644
index 0000000000000000000000000000000000000000..c2de8c6033ae9b28eb9b0ab554139c94fb7d9d11
Binary files /dev/null and b/.cache/uv/simple-v18/pypi/idna.rkyv differ
diff --git a/.cache/uv/simple-v18/pypi/markdown-it-py.rkyv b/.cache/uv/simple-v18/pypi/markdown-it-py.rkyv
new file mode 100644
index 0000000000000000000000000000000000000000..b75a9b6eddba344611fb7deca845b7b925e57757
Binary files /dev/null and b/.cache/uv/simple-v18/pypi/markdown-it-py.rkyv differ
diff --git a/.cache/uv/simple-v18/pypi/mdurl.rkyv b/.cache/uv/simple-v18/pypi/mdurl.rkyv
new file mode 100644
index 0000000000000000000000000000000000000000..a18de700ed1f37300ea3db18c7b1b3b00309266b
Binary files /dev/null and b/.cache/uv/simple-v18/pypi/mdurl.rkyv differ
diff --git a/.cache/uv/simple-v18/pypi/packaging.rkyv b/.cache/uv/simple-v18/pypi/packaging.rkyv
new file mode 100644
index 0000000000000000000000000000000000000000..e2b145fc5411b7e0f3d514f117e13a1f50a99628
Binary files /dev/null and b/.cache/uv/simple-v18/pypi/packaging.rkyv differ
diff --git a/.cache/uv/simple-v18/pypi/pygments.rkyv b/.cache/uv/simple-v18/pypi/pygments.rkyv
new file mode 100644
index 0000000000000000000000000000000000000000..f6c205c2403485178af13b2840cd3ab5a896be35
Binary files /dev/null and b/.cache/uv/simple-v18/pypi/pygments.rkyv differ
diff --git a/.cache/uv/simple-v18/pypi/shellingham.rkyv b/.cache/uv/simple-v18/pypi/shellingham.rkyv
new file mode 100644
index 0000000000000000000000000000000000000000..1d3d9e1a240103fff415966f8a93c72d352e984d
Binary files /dev/null and b/.cache/uv/simple-v18/pypi/shellingham.rkyv differ
diff --git a/.cache/uv/simple-v18/pypi/typer.rkyv b/.cache/uv/simple-v18/pypi/typer.rkyv
new file mode 100644
index 0000000000000000000000000000000000000000..6f718dc1dc1ac5e5b513ab5ced858b912f975b6a
Binary files /dev/null and b/.cache/uv/simple-v18/pypi/typer.rkyv differ
diff --git a/.cache/uv/simple-v18/pypi/typing-extensions.rkyv b/.cache/uv/simple-v18/pypi/typing-extensions.rkyv
new file mode 100644
index 0000000000000000000000000000000000000000..4107cd29e6b23b48f7d7565736a6455be0a8ded0
Binary files /dev/null and b/.cache/uv/simple-v18/pypi/typing-extensions.rkyv differ
diff --git a/.cache/uv/wheels-v5/pypi/annotated-doc/0.0.4-py3-none-any.http b/.cache/uv/wheels-v5/pypi/annotated-doc/0.0.4-py3-none-any.http
new file mode 100644
index 0000000000000000000000000000000000000000..04451f60fabeef2da86cac7bf0fa6f92f2b5b574
Binary files /dev/null and b/.cache/uv/wheels-v5/pypi/annotated-doc/0.0.4-py3-none-any.http differ
diff --git a/.cache/uv/wheels-v5/pypi/anyio/4.12.1-py3-none-any.http b/.cache/uv/wheels-v5/pypi/anyio/4.12.1-py3-none-any.http
new file mode 100644
index 0000000000000000000000000000000000000000..7081daa0cfeeca320ff640327c547658f647c617
Binary files /dev/null and b/.cache/uv/wheels-v5/pypi/anyio/4.12.1-py3-none-any.http differ
diff --git a/.cache/uv/wheels-v5/pypi/certifi/2026.2.25-py3-none-any.http b/.cache/uv/wheels-v5/pypi/certifi/2026.2.25-py3-none-any.http
new file mode 100644
index 0000000000000000000000000000000000000000..1371cd5baa8829701d410f23abdd1ddb933a1942
Binary files /dev/null and b/.cache/uv/wheels-v5/pypi/certifi/2026.2.25-py3-none-any.http differ
diff --git a/.cache/uv/wheels-v5/pypi/click/8.3.1-py3-none-any.http b/.cache/uv/wheels-v5/pypi/click/8.3.1-py3-none-any.http
new file mode 100644
index 0000000000000000000000000000000000000000..8ba396c31904adb8d465d9cfbf2f5d5e32cb1e10
Binary files /dev/null and b/.cache/uv/wheels-v5/pypi/click/8.3.1-py3-none-any.http differ
diff --git a/.cache/uv/wheels-v5/pypi/filelock/3.25.2-py3-none-any.http b/.cache/uv/wheels-v5/pypi/filelock/3.25.2-py3-none-any.http
new file mode 100644
index 0000000000000000000000000000000000000000..014feeda77c2377a2f3b1427140978e1bf5faad8
Binary files /dev/null and b/.cache/uv/wheels-v5/pypi/filelock/3.25.2-py3-none-any.http differ
diff --git a/.cache/uv/wheels-v5/pypi/fsspec/2026.2.0-py3-none-any.http b/.cache/uv/wheels-v5/pypi/fsspec/2026.2.0-py3-none-any.http
new file mode 100644
index 0000000000000000000000000000000000000000..ca1fd2d3e8ae46d667ec20fa989c02ba94f9abdb
Binary files /dev/null and b/.cache/uv/wheels-v5/pypi/fsspec/2026.2.0-py3-none-any.http differ
diff --git a/.cache/uv/wheels-v5/pypi/h11/0.16.0-py3-none-any.http b/.cache/uv/wheels-v5/pypi/h11/0.16.0-py3-none-any.http
new file mode 100644
index 0000000000000000000000000000000000000000..4fe3282a3fe09a022331e5f0dff034d9e4f35787
Binary files /dev/null and b/.cache/uv/wheels-v5/pypi/h11/0.16.0-py3-none-any.http differ
diff --git a/.cache/uv/wheels-v5/pypi/hf-xet/1.4.2-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.http b/.cache/uv/wheels-v5/pypi/hf-xet/1.4.2-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.http
new file mode 100644
index 0000000000000000000000000000000000000000..ad70c62f3592d02b36391099961a572f3b469e22
Binary files /dev/null and b/.cache/uv/wheels-v5/pypi/hf-xet/1.4.2-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.http differ
diff --git a/.cache/uv/wheels-v5/pypi/httpcore/1.0.9-py3-none-any.http b/.cache/uv/wheels-v5/pypi/httpcore/1.0.9-py3-none-any.http
new file mode 100644
index 0000000000000000000000000000000000000000..1e535f470921ff974f460748df1984e38a06b689
Binary files /dev/null and b/.cache/uv/wheels-v5/pypi/httpcore/1.0.9-py3-none-any.http differ
diff --git a/.cache/uv/wheels-v5/pypi/httpx/0.28.1-py3-none-any.http b/.cache/uv/wheels-v5/pypi/httpx/0.28.1-py3-none-any.http
new file mode 100644
index 0000000000000000000000000000000000000000..1ea585b9146a0610b081aa8444245b49fce59f9d
Binary files /dev/null and b/.cache/uv/wheels-v5/pypi/httpx/0.28.1-py3-none-any.http differ
diff --git a/.cache/uv/wheels-v5/pypi/huggingface-hub/1.7.2-py3-none-any.http b/.cache/uv/wheels-v5/pypi/huggingface-hub/1.7.2-py3-none-any.http
new file mode 100644
index 0000000000000000000000000000000000000000..da7f1d41ba020db5df223f5ad74b684e5731d64b
Binary files /dev/null and b/.cache/uv/wheels-v5/pypi/huggingface-hub/1.7.2-py3-none-any.http differ
diff --git a/.cache/uv/wheels-v5/pypi/idna/3.11-py3-none-any.http b/.cache/uv/wheels-v5/pypi/idna/3.11-py3-none-any.http
new file mode 100644
index 0000000000000000000000000000000000000000..80350ba2182bf982f78601007edab56085d9de50
Binary files /dev/null and b/.cache/uv/wheels-v5/pypi/idna/3.11-py3-none-any.http differ
diff --git a/.cache/uv/wheels-v5/pypi/markdown-it-py/4.0.0-py3-none-any.http b/.cache/uv/wheels-v5/pypi/markdown-it-py/4.0.0-py3-none-any.http
new file mode 100644
index 0000000000000000000000000000000000000000..3398a0e385fbedee7c07f0167f6f971cb44938ea
Binary files /dev/null and b/.cache/uv/wheels-v5/pypi/markdown-it-py/4.0.0-py3-none-any.http differ
diff --git a/.cache/uv/wheels-v5/pypi/mdurl/0.1.2-py3-none-any.http b/.cache/uv/wheels-v5/pypi/mdurl/0.1.2-py3-none-any.http
new file mode 100644
index 0000000000000000000000000000000000000000..b612b38cf75d7845ae94b0f542c8dc8e27f4cca1
Binary files /dev/null and b/.cache/uv/wheels-v5/pypi/mdurl/0.1.2-py3-none-any.http differ
diff --git a/.cache/uv/wheels-v5/pypi/packaging/26.0-py3-none-any.http b/.cache/uv/wheels-v5/pypi/packaging/26.0-py3-none-any.http
new file mode 100644
index 0000000000000000000000000000000000000000..46e398d2940d4760a9c5be04dc686e22cf43e416
Binary files /dev/null and b/.cache/uv/wheels-v5/pypi/packaging/26.0-py3-none-any.http differ
diff --git a/.cache/uv/wheels-v5/pypi/pygments/2.19.2-py3-none-any.http b/.cache/uv/wheels-v5/pypi/pygments/2.19.2-py3-none-any.http
new file mode 100644
index 0000000000000000000000000000000000000000..8a5d8a5b041ff5aac5fe6c658f581606d5a03db6
Binary files /dev/null and b/.cache/uv/wheels-v5/pypi/pygments/2.19.2-py3-none-any.http differ
diff --git a/.cache/uv/wheels-v5/pypi/pyyaml/6.0.3-00b06ae7f1096870.http b/.cache/uv/wheels-v5/pypi/pyyaml/6.0.3-00b06ae7f1096870.http
new file mode 100644
index 0000000000000000000000000000000000000000..65103bc107ea684ab36bdfceca468b1a43cab8e6
Binary files /dev/null and b/.cache/uv/wheels-v5/pypi/pyyaml/6.0.3-00b06ae7f1096870.http differ
diff --git a/.cache/uv/wheels-v5/pypi/rich/14.3.3-py3-none-any.http b/.cache/uv/wheels-v5/pypi/rich/14.3.3-py3-none-any.http
new file mode 100644
index 0000000000000000000000000000000000000000..45f20be0af5d59327aa04c1bb1c1a0cb89fe1b74
Binary files /dev/null and b/.cache/uv/wheels-v5/pypi/rich/14.3.3-py3-none-any.http differ
diff --git a/.cache/uv/wheels-v5/pypi/shellingham/1.5.4-py2.py3-none-any.http b/.cache/uv/wheels-v5/pypi/shellingham/1.5.4-py2.py3-none-any.http
new file mode 100644
index 0000000000000000000000000000000000000000..f787def30cdbd67f8bd76f9a3dea1c601ac09e02
Binary files /dev/null and b/.cache/uv/wheels-v5/pypi/shellingham/1.5.4-py2.py3-none-any.http differ
diff --git a/.cache/uv/wheels-v5/pypi/tqdm/4.67.3-py3-none-any.http b/.cache/uv/wheels-v5/pypi/tqdm/4.67.3-py3-none-any.http
new file mode 100644
index 0000000000000000000000000000000000000000..eb088084b926d0856ecdcdff8272c9ac3885c337
Binary files /dev/null and b/.cache/uv/wheels-v5/pypi/tqdm/4.67.3-py3-none-any.http differ
diff --git a/.cache/uv/wheels-v5/pypi/typer/0.24.1-py3-none-any.http b/.cache/uv/wheels-v5/pypi/typer/0.24.1-py3-none-any.http
new file mode 100644
index 0000000000000000000000000000000000000000..1733bb5f7e94c4f63a9d88f9b5d6ad3b45929e9b
Binary files /dev/null and b/.cache/uv/wheels-v5/pypi/typer/0.24.1-py3-none-any.http differ
diff --git a/.cache/uv/wheels-v5/pypi/typing-extensions/4.15.0-py3-none-any.http b/.cache/uv/wheels-v5/pypi/typing-extensions/4.15.0-py3-none-any.http
new file mode 100644
index 0000000000000000000000000000000000000000..6535f5d9d4ab722dabbe2609a8bd6cfa5fd368bc
Binary files /dev/null and b/.cache/uv/wheels-v5/pypi/typing-extensions/4.15.0-py3-none-any.http differ
diff --git a/.gitattributes b/.gitattributes
index fa9fe687d73578018d1da32a3ab40d8a48e55126..bc7c959ff6a79021292fcf665dfed105277e09f9 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -67,3 +67,5 @@ __pycache__/Changed_MLR_All_Strategies.cpython-312.pyc filter=lfs diff=lfs merge
.cache/pip/http-v2/a/7/a/1/2/a7a12c025695d568f5d73aa1b3adb9e9b040c3d15606f125747a2f15.body filter=lfs diff=lfs merge=lfs -text
.cache/pip/http-v2/9/b/5/1/6/9b51619e20a708f99372afefd7dbe20a1b5e42ae213f02bf27267976.body filter=lfs diff=lfs merge=lfs -text
.cache/uv/archive-v0/8pe70yNTw5Y7O7ZqMsYi-/yaml/_yaml.cpython-312-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
+.cache/pip/http-v2/b/5/7/a/9/b57a93fe5f8c75cd00cfe703d012d39466ff7ca30bf44d13df4112f6.body filter=lfs diff=lfs merge=lfs -text
+.cache/pip/http-v2/c/d/5/d/e/cd5de44b899cbe1869765c21b55cffc53c0c0ffa9d3c6f1fd40a42a2.body filter=lfs diff=lfs merge=lfs -text
diff --git a/10%_Strategy2_latest.pt b/10%_Strategy2_latest.pt
new file mode 100644
index 0000000000000000000000000000000000000000..ef8dc3d8cb18318553d9c3bb3e51d609f317dd55
--- /dev/null
+++ b/10%_Strategy2_latest.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f87fb2ae852d2c103c442bc6d80be28a5889cb50cf405d757278748b5bfb7b2b
+size 72258089
diff --git a/100%_Strategy2_latest.pt b/100%_Strategy2_latest.pt
new file mode 100644
index 0000000000000000000000000000000000000000..e1ecf3af53d7f1a7a6de35b76d29caff4e7e6ff3
--- /dev/null
+++ b/100%_Strategy2_latest.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a08d5e110968e2f9d26bddcafe11cbe6c773809427d95a482f71c6828e692fb6
+size 72258089
diff --git a/50%_Strategt2_latest.pt b/50%_Strategt2_latest.pt
new file mode 100644
index 0000000000000000000000000000000000000000..2e776b7373038c9d2e415a0a10da3b7e19f87cdd
--- /dev/null
+++ b/50%_Strategt2_latest.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:033ea6e08ae9484cecbaa9b34db2a811532857cd2d9a660be5961c0679c9bb04
+size 72258089
diff --git a/BUSI_with_classes/all_images/BUSI_0045_M.png b/BUSI_with_classes/all_images/BUSI_0045_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..8da351f004187ad9ee127bf33fdb46872508a385
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0045_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2f12662dc1b4d1de30db9d6f35d5f5690efd66242ad0a2ec4775138f9e65dac2
+size 259401
diff --git a/BUSI_with_classes/all_images/BUSI_0046_M.png b/BUSI_with_classes/all_images/BUSI_0046_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..cd49c1e769b37ce63efcfa88e77b6feda28f3bde
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0046_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:356f255b87b34d245f05a721a7b3ef29ddfa430c79123465947797c949fad759
+size 250851
diff --git a/BUSI_with_classes/all_images/BUSI_0048_M.png b/BUSI_with_classes/all_images/BUSI_0048_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..bc0dcf6f7d6a43c2209241a623b1bb6b1fe5a2b7
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0048_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b74fc81705bb773f28390a0e23b03cd33548acc2e4cb10cd5b4ed1d54e51599c
+size 171232
diff --git a/BUSI_with_classes/all_images/BUSI_0049_B.png b/BUSI_with_classes/all_images/BUSI_0049_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..f456e8adb98ceae3c041ab128ca52d9df44dba8a
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0049_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b2dc4ec35fed3590eb2430ef498822417149fe42933970c3a39a192ce202b856
+size 235980
diff --git a/BUSI_with_classes/all_images/BUSI_0049_M.png b/BUSI_with_classes/all_images/BUSI_0049_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..fd22a79786748566c5f3355e3944429ff4be827e
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0049_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0f50384b97f1b0f20d557a4216afa2a40afb3254aa89c8619d94bc083c467fb1
+size 185204
diff --git a/BUSI_with_classes/all_images/BUSI_0050_B.png b/BUSI_with_classes/all_images/BUSI_0050_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..e627a5d53fde086bc2b58b06fd67548ef0cb407b
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0050_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:60a78dcdce7ef9286396bc2ab1c04e395754bf4b685af5fd14bd0ca8df6dfafc
+size 285182
diff --git a/BUSI_with_classes/all_images/BUSI_0050_M.png b/BUSI_with_classes/all_images/BUSI_0050_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..684855517363dfce3f4085c57e9cfa3743549155
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0050_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c586c6ec8e9070e6baaeeba2e7c71d37f6fd5aabc56c2d8ec75f514e5b75afb8
+size 152560
diff --git a/BUSI_with_classes/all_images/BUSI_0051_B.png b/BUSI_with_classes/all_images/BUSI_0051_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..7caed827beeb5f3a6f171f7e04af65a123d4228e
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0051_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d590ffc178bad70844b9525adf0daeaf84c1e1571134acfde7cc75a23907d088
+size 84572
diff --git a/BUSI_with_classes/all_images/BUSI_0051_M.png b/BUSI_with_classes/all_images/BUSI_0051_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..cdc58290f52804e4dcc40319f36d644b35f2f00a
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0051_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e6b43dc12d575229a5c3b44ca36b00a6c1e1dbfc38edf421312507ba29ef6155
+size 151787
diff --git a/BUSI_with_classes/all_images/BUSI_0052_B.png b/BUSI_with_classes/all_images/BUSI_0052_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..cb223e33e6b4428a2732378146ffc62bfc485589
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0052_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d57dfcc97dcbd7d61c7638d71d106471371a0af218c206142db7db0d1fa134fd
+size 90271
diff --git a/BUSI_with_classes/all_images/BUSI_0052_M.png b/BUSI_with_classes/all_images/BUSI_0052_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..101409b5a00a1e2f338f6190fdc3057cadbcee47
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0052_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:82c005dadfd80356f66a2575aae446e02a1e1c32906e48e40cdae3e16d773181
+size 160723
diff --git a/BUSI_with_classes/all_images/BUSI_0053_B.png b/BUSI_with_classes/all_images/BUSI_0053_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..51576e1313a6d91842b02d8b7d230ebc57012ae2
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0053_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0554d253769e5670e3c758f6845bc55a474bda8b970bffa06bc128c7de198fba
+size 162321
diff --git a/BUSI_with_classes/all_images/BUSI_0053_M.png b/BUSI_with_classes/all_images/BUSI_0053_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..63c937d71f91b1f657a7eefb18109bc9656a6afd
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0053_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ba7403c349498cb31380a3460fc0afbb79520d33dd1d0b4d475f931766436b21
+size 176369
diff --git a/BUSI_with_classes/all_images/BUSI_0054_B.png b/BUSI_with_classes/all_images/BUSI_0054_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..5af5ff21e84f322d4d9af9192f52c9a7ffee2bee
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0054_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5244b4beb1204a581cc3851b38ec3973dcd8b9d4634af53c547f56b7081c379a
+size 176858
diff --git a/BUSI_with_classes/all_images/BUSI_0054_M.png b/BUSI_with_classes/all_images/BUSI_0054_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..5ce5612ef976f54a0e344f21a130526d28c93b7c
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0054_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3ade77914ef37fdc3c87afb3cf1cf895fb1877a757c839be3cf6e1c706ee15a3
+size 244357
diff --git a/BUSI_with_classes/all_images/BUSI_0055_B.png b/BUSI_with_classes/all_images/BUSI_0055_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..5fb70045e71cd1544a0026740b7b0cbb53a66450
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0055_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ce885ae20c233964cc4eee3c4141b3c5f317f22569351f4bb4b88398b7ea4bd8
+size 264590
diff --git a/BUSI_with_classes/all_images/BUSI_0055_M.png b/BUSI_with_classes/all_images/BUSI_0055_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..334c95b46e50ab03fbc3a2d9fccf2dc5df4a83c3
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0055_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4e964c55e67f41de407551a7a984eb413c1cbfc4534d4572070a9208ac8f9a41
+size 241674
diff --git a/BUSI_with_classes/all_images/BUSI_0056_B.png b/BUSI_with_classes/all_images/BUSI_0056_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..582f384f3f734066e0097d1677208bbce9733ed4
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0056_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:990ed5c945715dbb506ed51295b765c80ed7bbffedd30ca35b99f32664003bd1
+size 303478
diff --git a/BUSI_with_classes/all_images/BUSI_0056_M.png b/BUSI_with_classes/all_images/BUSI_0056_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..058aabda18d54a4377c29552e0791ce3c42d6dcd
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0056_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9a3b5461a3b72d70578818fba0e6cd79428a7d777e879b244005a64f72b46113
+size 197033
diff --git a/BUSI_with_classes/all_images/BUSI_0057_B.png b/BUSI_with_classes/all_images/BUSI_0057_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..09e704f774b2eb3f45b2541359603937f6b227f8
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0057_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:eb776ad51abba7d5dd97bb1fbf3e2a73621ea186f4351aa6830264db9d23f760
+size 156103
diff --git a/BUSI_with_classes/all_images/BUSI_0057_M.png b/BUSI_with_classes/all_images/BUSI_0057_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..c691bd6fb3f9d41c355df3290909929a777c8bfe
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0057_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b6bb6ce375ea3dd2096f24d611b3dbab1fd05084f97963fe05b12c41d56490df
+size 205528
diff --git a/BUSI_with_classes/all_images/BUSI_0058_B.png b/BUSI_with_classes/all_images/BUSI_0058_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..15320d3217f61b222756b1aabcad0f2a8cca7db9
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0058_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1e955972534c5c1c8236e8b2c22fb1d037736c42841fd1517585ac52935d9e4a
+size 247941
diff --git a/BUSI_with_classes/all_images/BUSI_0058_M.png b/BUSI_with_classes/all_images/BUSI_0058_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..4172bf279f71437b66d0a3a560d4a1832ae499bf
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0058_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3ef950f3f873940cb4caf5982fefe4ffbd35d52c5b73a4e37739b13556bc62eb
+size 208204
diff --git a/BUSI_with_classes/all_images/BUSI_0059_B.png b/BUSI_with_classes/all_images/BUSI_0059_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..c01bf877df90420face654e9f56455dcdbfaef89
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0059_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e676c1e5887f30e929a18d01b2537ba4144f9e6c340a58966f470ef40a54eecb
+size 158646
diff --git a/BUSI_with_classes/all_images/BUSI_0059_M.png b/BUSI_with_classes/all_images/BUSI_0059_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..f899c914e9532ba3082c9c84eb9c96c0f9cdf924
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0059_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d7d6ac021facd411a595c4016cbf05bf2b98de069a3ed07749e5ccb0508262e1
+size 201552
diff --git a/BUSI_with_classes/all_images/BUSI_0060_B.png b/BUSI_with_classes/all_images/BUSI_0060_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..3722a5ea5a9ce53cb7f1e4b129a3f6305fa0c85d
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0060_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:59589be94cbb88d5e3e958799c15df82a54241837c07361e2db3a0d0eb49f23e
+size 249559
diff --git a/BUSI_with_classes/all_images/BUSI_0060_M.png b/BUSI_with_classes/all_images/BUSI_0060_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..1295f925cced530d7b2aa6f8b7e16a37b1cd0aab
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0060_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cb8c28ed11d6967a003202cc00f21a87d8a698fc21fac2cf6a22b13fd02388ee
+size 209177
diff --git a/BUSI_with_classes/all_images/BUSI_0061_B.png b/BUSI_with_classes/all_images/BUSI_0061_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..2e2e7273f76d4f6c658682f481bb273e88b4013f
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0061_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5180f1b3fff08a550197ee969b2b534c49629ae944e526b047015d54e669b0c0
+size 243487
diff --git a/BUSI_with_classes/all_images/BUSI_0061_M.png b/BUSI_with_classes/all_images/BUSI_0061_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..e3b69d40e1343d1964ba4683a530665fec737295
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0061_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e16b97d9c675e76982460d07235dd5a5dc0f91afb79c512cca24d8b44947bfd4
+size 166665
diff --git a/BUSI_with_classes/all_images/BUSI_0062_B.png b/BUSI_with_classes/all_images/BUSI_0062_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..574a968ab74a97432fdba9dc30cc136952be74a4
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0062_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:337fe83f3919fe5035be81e2bf05b3c1ea57755d258a32a34cfb50ccb91d11d1
+size 138623
diff --git a/BUSI_with_classes/all_images/BUSI_0062_M.png b/BUSI_with_classes/all_images/BUSI_0062_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..45db43219cd0a29c0548075a8f054e86d3d056df
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0062_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e4918877dbc1a9efd616d945827d9c75d7a46f0c2ebe02920af54a9f5cc46490
+size 220167
diff --git a/BUSI_with_classes/all_images/BUSI_0063_B.png b/BUSI_with_classes/all_images/BUSI_0063_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..0b24f2bf55c430dcb89dd4d38df41bc09a29e649
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0063_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8e79a9a55b7d4f11fd87df490dc706e772af6a9df5b9f51b11e8b62270eadfd0
+size 216382
diff --git a/BUSI_with_classes/all_images/BUSI_0063_M.png b/BUSI_with_classes/all_images/BUSI_0063_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..72d604eb84721d8dc94b02db7b437ed85ed59dd9
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0063_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6a73aebe385be8d798aaed99970b9c9b5d5750dd3e04cb9474fe56083a22c0ed
+size 243616
diff --git a/BUSI_with_classes/all_images/BUSI_0064_B.png b/BUSI_with_classes/all_images/BUSI_0064_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..d301b4765cc44b9251d01b58f819b03ae039b57a
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0064_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:702486aa165ceb7e48b6d967dd63116b90e01c2597e71dcd24fa471a259bdd18
+size 179894
diff --git a/BUSI_with_classes/all_images/BUSI_0064_M.png b/BUSI_with_classes/all_images/BUSI_0064_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..aaf82fc8e13e65ebd54a0a61cdecdaef2b02379c
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0064_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:36ccca8f83b7265ddc7d18d3b06baf4829f32ce1de9e6a3b9c14cfaa58f013ed
+size 200072
diff --git a/BUSI_with_classes/all_images/BUSI_0065_B.png b/BUSI_with_classes/all_images/BUSI_0065_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..a67a736e539e7dda32b7c004b1786d543ed8f7d5
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0065_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:136d18b52e5dafe24e6ed19804db7bac4afb1cb30bcff695b697053c5f96c988
+size 160918
diff --git a/BUSI_with_classes/all_images/BUSI_0065_M.png b/BUSI_with_classes/all_images/BUSI_0065_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..b7ddf87312a2fb1820adc253ce060cc7c95eafee
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0065_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:32edaab301082424dfd6b8af7a40381905044e1888ad55cb92870b2a7fec98b6
+size 293482
diff --git a/BUSI_with_classes/all_images/BUSI_0066_B.png b/BUSI_with_classes/all_images/BUSI_0066_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..fd155eab7ce9114d34e4026553312be9fa03e8e9
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0066_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cd1724a4d7e376cd1dab754329283facc8a12daab160fec6a9fd2fe3706a6140
+size 170305
diff --git a/BUSI_with_classes/all_images/BUSI_0066_M.png b/BUSI_with_classes/all_images/BUSI_0066_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..4c6e2483538a8cd5d3f3651f533f1997fe29f3ed
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0066_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:16c4b4faa4940575c71e6bb91dd63a21af4b2be47bba2f7522312483134c28d0
+size 219837
diff --git a/BUSI_with_classes/all_images/BUSI_0067_B.png b/BUSI_with_classes/all_images/BUSI_0067_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..dd5120428ed788a3ec57cd11a5fb862aca375e5f
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0067_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:aaba47722fbae0cd76861d25e544826216774bbe07a96c56c50cdb8db4eb6523
+size 154430
diff --git a/BUSI_with_classes/all_images/BUSI_0067_M.png b/BUSI_with_classes/all_images/BUSI_0067_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..4a037ab5c393db50dfd50344b1150c6f9b9bba69
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0067_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:34cf99976b6e1d1bdb42ef10309efd13904926c29338a25fc6ee747ad63f585c
+size 315603
diff --git a/BUSI_with_classes/all_images/BUSI_0068_B.png b/BUSI_with_classes/all_images/BUSI_0068_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..468aa1e24889db284ccc95ffcf63355231484bec
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0068_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:48448d4cf6cc82a2e9273b0a0e27367e36e141f655d4ad8c7fa402abbd64824f
+size 227064
diff --git a/BUSI_with_classes/all_images/BUSI_0068_M.png b/BUSI_with_classes/all_images/BUSI_0068_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..241ab3287909be1c0373dcbca4cc5a21b40205de
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0068_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:357fb54776f62fba6930130c273257bcc6fc721f2715643a7673e8a73dc927d8
+size 159908
diff --git a/BUSI_with_classes/all_images/BUSI_0069_B.png b/BUSI_with_classes/all_images/BUSI_0069_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..6715fc7011aa5c022a3545de6d943b04b4178843
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0069_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d2a914a3ba4e967f2c6de84a0b2d0e788fdbb392705aa41f5472a830ca83c51c
+size 316505
diff --git a/BUSI_with_classes/all_images/BUSI_0069_M.png b/BUSI_with_classes/all_images/BUSI_0069_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..e9f5db43091c2946264a731e1cdfcc0209a75332
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0069_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:925e7488817888344ad17393a0b3ef2cdc6faccfa5dc77794f8ae17f80e54bbf
+size 159564
diff --git a/BUSI_with_classes/all_images/BUSI_0070_B.png b/BUSI_with_classes/all_images/BUSI_0070_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..06d8c1d01aa4d15e0a87c61b00ed6b9e3b76b1af
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0070_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1102f4034e6bb0e9439faa40b660234df0503e1eded853190431848dba8dbcee
+size 191785
diff --git a/BUSI_with_classes/all_images/BUSI_0070_M.png b/BUSI_with_classes/all_images/BUSI_0070_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..2c3eb70565a60bab38d4eafcd396399022d5bdaa
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0070_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b583f883ce61e93e74c0e3bab5be463402d25d8dd4abbb0714e9243899af8418
+size 163771
diff --git a/BUSI_with_classes/all_images/BUSI_0071_B.png b/BUSI_with_classes/all_images/BUSI_0071_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..6679e216ae4351fcf6bbd88e44ffa0e22d254f8c
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0071_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bb4c4715ed4e021cb1cbe4d845e4094122df0513d9e69497fee3b6f663ac6920
+size 155563
diff --git a/BUSI_with_classes/all_images/BUSI_0071_M.png b/BUSI_with_classes/all_images/BUSI_0071_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..f7242490ab0dcbf292491a353648c763f8f0962e
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0071_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:088462c53eb2eee0698979a69406e9818aff7c4766966a4ec234617a314b2750
+size 180563
diff --git a/BUSI_with_classes/all_images/BUSI_0072_B.png b/BUSI_with_classes/all_images/BUSI_0072_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..4f113476a42e6b8dc16db3192d990039b3bd6504
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0072_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c988a2420afe5a72dda36cff7d881371a3f1632a421b3b73d51984d459f0dad9
+size 168059
diff --git a/BUSI_with_classes/all_images/BUSI_0072_M.png b/BUSI_with_classes/all_images/BUSI_0072_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..98bb76052f08e32befa3a1355d749302f9ccf81e
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0072_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ddce046d1959a536a1b04678ac7051febbed141eeb18fc597a869ad094705783
+size 178390
diff --git a/BUSI_with_classes/all_images/BUSI_0073_B.png b/BUSI_with_classes/all_images/BUSI_0073_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..a7b9941afb4ef194cf0297bb5783fbb9c9c9807e
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0073_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:94ecb04ea9b767cc23cef27196ce6e780f3af3b6ebbcf3ae15a369a28107c77d
+size 161126
diff --git a/BUSI_with_classes/all_images/BUSI_0073_M.png b/BUSI_with_classes/all_images/BUSI_0073_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..9f9ca586678f70d1e56ce78aff4a853bbee4754a
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0073_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6569484601a3c77d6c107832d84484794d8cd2ef1659beda1d8bef92d384349b
+size 166626
diff --git a/BUSI_with_classes/all_images/BUSI_0074_B.png b/BUSI_with_classes/all_images/BUSI_0074_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..ae7ab337642810b91dac7be0fb2299e4af853312
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0074_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:dbded2f75980e780c21409f3acb21aa1c80bfa14ef9d71bb831ce75929824b6f
+size 164994
diff --git a/BUSI_with_classes/all_images/BUSI_0074_M.png b/BUSI_with_classes/all_images/BUSI_0074_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..a93c1369ffa860dedbb4a09c003a3cb56e269098
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0074_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ff738572bc4387b461f3fe7f02b0c3090667f6b245f57eb4640efa23c2df10c4
+size 165283
diff --git a/BUSI_with_classes/all_images/BUSI_0075_B.png b/BUSI_with_classes/all_images/BUSI_0075_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..49cd9aface0844bc3440fff4a0c50029cb1c1b3e
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0075_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:92e5f11c24191d37969670779d22ad188707a30db5b3267f5931aa4a1a07e2c2
+size 243014
diff --git a/BUSI_with_classes/all_images/BUSI_0075_M.png b/BUSI_with_classes/all_images/BUSI_0075_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..0beb2ac04435d7caa0cc8c0779fd208c8a4437d8
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0075_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4e3ea60ae677607b3c5499285cdf9c4c16c69cc100ba55db76a9e714f3d918a7
+size 106255
diff --git a/BUSI_with_classes/all_images/BUSI_0076_B.png b/BUSI_with_classes/all_images/BUSI_0076_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..831a9938d2b3876e78cf1d0e2f235221cbb217e5
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0076_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cc17b2e8fdb142c3745791e8feaab48085b7c4e6b7236d3b8d7661ef74ae539d
+size 308501
diff --git a/BUSI_with_classes/all_images/BUSI_0076_M.png b/BUSI_with_classes/all_images/BUSI_0076_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..e64353d554aadd033e4c213b745b554022e95123
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0076_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:69acb50cfb84d47c94462139dd53f11e5b9d61300561ea41cedf57c481f135b0
+size 178459
diff --git a/BUSI_with_classes/all_images/BUSI_0077_B.png b/BUSI_with_classes/all_images/BUSI_0077_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..3db4843592d478682b87d4d95b7cf847eebc8db2
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0077_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2f3d6bde60119e06361a1e822efb9af6a8059bc3f5cbca580fa1334572e01ddb
+size 124977
diff --git a/BUSI_with_classes/all_images/BUSI_0077_M.png b/BUSI_with_classes/all_images/BUSI_0077_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..56a786d71ebc76a906885f56ff93ff14d533847e
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0077_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5a5d406c18cf1c6e20cb9ef553d494e1bd7e5cdf0e74cb2292421c2b30381b46
+size 266333
diff --git a/BUSI_with_classes/all_images/BUSI_0078_B.png b/BUSI_with_classes/all_images/BUSI_0078_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..c613ea8ce5ba2d37399f6bd7a42fdaed3a011e19
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0078_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3e6d67c89dda04cf0de12817b054853e17f146c3cff4d3a9840cb08ae853e8a7
+size 252527
diff --git a/BUSI_with_classes/all_images/BUSI_0078_M.png b/BUSI_with_classes/all_images/BUSI_0078_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..7f046baa877fc17662ffa3d214886a052bd9525f
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0078_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:25579bee76896463454c8d18264b9a2dc85fd8e26770c59e24728c25085c1e1a
+size 281945
diff --git a/BUSI_with_classes/all_images/BUSI_0079_M.png b/BUSI_with_classes/all_images/BUSI_0079_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..82c14bf4c78aa308b1542a2af61d6b0c116b0a67
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0079_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3375087928b83b2695d65dc83abda4c17b7539951c3010fff6c63631bcca57a0
+size 159600
diff --git a/BUSI_with_classes/all_images/BUSI_0080_B.png b/BUSI_with_classes/all_images/BUSI_0080_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..4a8690c19893b609fa015e1df35152ea162cc335
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0080_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:fa0e405fedf832b77082d71c0cb35d94383aa4d1c5eb66d9cbc2ae725f41c1bf
+size 158286
diff --git a/BUSI_with_classes/all_images/BUSI_0080_M.png b/BUSI_with_classes/all_images/BUSI_0080_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..7c3d8883aceb1bf78ad8801bc8ac528a61e684ae
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0080_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1751a8d75c275306ace27d8644b9830b80ee3f7601d71739d30be47e94626295
+size 187247
diff --git a/BUSI_with_classes/all_images/BUSI_0081_B.png b/BUSI_with_classes/all_images/BUSI_0081_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..156b3b199cea0343a7d59c2ec9d60a3984bfabe0
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0081_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5628241d09669326261d6b90fcd8990aad4861e87e50390e8143027f66bc8a06
+size 167883
diff --git a/BUSI_with_classes/all_images/BUSI_0081_M.png b/BUSI_with_classes/all_images/BUSI_0081_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..8f9404a0daae9ed341a81a994fb36c900fd14118
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0081_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bb0f68efbf6d3c94343fde8a935ebb98989977a58c3a31df1d6cda1238b8e880
+size 221605
diff --git a/BUSI_with_classes/all_images/BUSI_0082_B.png b/BUSI_with_classes/all_images/BUSI_0082_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..d7669c34951a0ddfd2d14c1af2646ac43dcbf60f
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0082_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:71beff17c57be85072ca0e76c1fef8c53877f47c988855a18ccc2f46db6da887
+size 167900
diff --git a/BUSI_with_classes/all_images/BUSI_0082_M.png b/BUSI_with_classes/all_images/BUSI_0082_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..14fc5916b3910b25b9c7c53a01324ee602790506
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0082_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2575b9a5099c70c0771d6b546daa62ef64e1661231cf656e7f2a78c420e2c914
+size 135898
diff --git a/BUSI_with_classes/all_images/BUSI_0083_M.png b/BUSI_with_classes/all_images/BUSI_0083_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..dd930c3790cf5fbf4d6231161218d91d7f978f17
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0083_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b893da18d8417c19c2a5f900eb4263dd86e1bd1a439360063cbe2aa9ea2dbf50
+size 230916
diff --git a/BUSI_with_classes/all_images/BUSI_0084_B.png b/BUSI_with_classes/all_images/BUSI_0084_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..9d473a0b2783cb80df0a2e5c5de267e874c30669
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0084_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d8467b5b55eca9f8bedbb9a2ec5350a4436fdcfabfdcea1334308d92ffeec35d
+size 228135
diff --git a/BUSI_with_classes/all_images/BUSI_0084_M.png b/BUSI_with_classes/all_images/BUSI_0084_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..29f07779ff5198b9b7f92963805638b2d9f051dd
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0084_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:19184e20cb3e3a23c45f4b079a7b8af8be7db8eec09aa9668c6b6597b87ba55f
+size 221889
diff --git a/BUSI_with_classes/all_images/BUSI_0085_B.png b/BUSI_with_classes/all_images/BUSI_0085_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..bb54c7258cbfbc0d73fcdd6b64ec29b6169c2276
--- /dev/null
+++ b/BUSI_with_classes/all_images/BUSI_0085_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:97401a8589578e63beaf814532939209f1c0970ecad0d62bde23da722379e07a
+size 163721
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/alpha.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/alpha.png
new file mode 100644
index 0000000000000000000000000000000000000000..a4f5856927d2cd703595a9be15b36499452db946
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/alpha.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e17d6beaae15304f2d55906f68123a1cbc2a82544e3dc6e35a0953b8cd905138
+size 26791
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/ce_loss.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/ce_loss.png
new file mode 100644
index 0000000000000000000000000000000000000000..e971420d0d269dd601b988d97d3d4d4377c7964a
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/ce_loss.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2df2fd17d0dffbb5009a6ca952db8b51ac94f45b7e3d58965ae2ba847e6ccba5
+size 27855
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/dice.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/dice.png
new file mode 100644
index 0000000000000000000000000000000000000000..59118ccba50ff348b110e3bdef52eafb36506841
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/dice.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:dc42e423e67f6300f602f7a3d4d42363aa3a9d528485b39947deff82a9838dcd
+size 27246
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/dice_loss.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/dice_loss.png
new file mode 100644
index 0000000000000000000000000000000000000000..a17bf540a3c7e106f1fe6d6de075d80ee7d8391c
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/dice_loss.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:85389a1354d9c2957a6302fd13043722613fda730088c7f83827a00246c576b9
+size 30524
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/entropy.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/entropy.png
new file mode 100644
index 0000000000000000000000000000000000000000..6d7e203e6a14def3a0a56142fc22a0f003c8bb0f
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/entropy.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ac58e854efbfd17df53ae66255e7a2f849239a47c5535bbfaf8217aa79da5536
+size 28092
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/iou.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/iou.png
new file mode 100644
index 0000000000000000000000000000000000000000..d806f4502d9f1b10f57f67062b83c502ad0960c8
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/iou.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b558966139710fec092e196291f2adcd8e29db607092b66a3445a93d3ac95f93
+size 27877
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/loss.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/loss.png
new file mode 100644
index 0000000000000000000000000000000000000000..0b9ca9b38f0ee95125aae4e6d012360387881a26
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/loss.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8d8c39d2524ec68f4de08116800a4b772926f7f3016c6cf33595550ba920ad26
+size 30964
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/lr.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/lr.png
new file mode 100644
index 0000000000000000000000000000000000000000..aa2110e8c4217cd419756830e6e2d5e0495d9663
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/lr.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e05aa15408f7355e397a268fd45876d979e55dc95e00b57e7751498b722d93d9
+size 42152
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/reward.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/reward.png
new file mode 100644
index 0000000000000000000000000000000000000000..dcf4b989d7c3c0a19ca234af1150745daecc0409
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/plots/reward.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5f743aca25a0ba2f9c1f731fb42de7ce066e311f222472972f388d723ce5c39e
+size 35832
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0020_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0020_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..016c918afd65e0b36d33d397d14dce0b409035d9
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0020_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5755131f5bc1a8d99d62f9d896a3e505161de890fcf6f1b750c398f341d0ad58
+size 243
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0023_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0023_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..3184cf3936b6054b68151ff9cf31e13e688e6614
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0023_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:91ec73c203e0fca8b67e99feb0272d69a52c4592b897f0abd4b00babbd56d0be
+size 426
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0055_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0055_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..7b0f6324d1edc0a9225855b92afeeecafa866edd
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0055_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:40a656018805004b5ffd087d8eee3cc608b0253eafa76e1b9b99b1fc601bcfc8
+size 283
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0062_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0062_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..397237b268ed1ae4a15aef7db9465ab1735e2477
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0062_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:daca5838f5526e8bd8cb84cd6ef7e09daf7a72fa82294f6552f1d780dda0652f
+size 460
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0073_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0073_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..b508a4c2a520321bea19ac0b8fbf303f01312d8f
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0073_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1d94d4de63c43e003440c1848da8f4f5340c9a22598cbfa0088701c2abc26d95
+size 273
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0075_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0075_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..37be1fcdbda4cc658d72c6da6734b42d143a2b4c
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0075_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6c92334bc13ef101000e44469268e4ee0ef81575dd2b1254e32eb4971687d19c
+size 223
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0099_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0099_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..d57493d9d94ba761528e30167533370d720786c8
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0099_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:173247764a2016828034c874f8b3ebed484a302e08bc5f2e5dfc6d89ffcfb0c8
+size 232
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0100_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0100_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..7a02e8b1594f1dc0d891acc513205b9c3935a407
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0100_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0468923d9abfcf2f18c5cc1e75755bb1a51323447e0db86c764916a556bf52e0
+size 273
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0104_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0104_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..c50d80b918b6a33ebb99688e71b29eee3874e2c3
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0104_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7bf25f5433f8bae73e6ece33bdef21a387c99dec14876b4a3be42b80153ad7af
+size 155
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0121_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0121_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..5a770be1be71fcd1b9f41b9995b35b9f0ac7a7a3
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0121_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:643f0a668e36e7018aa2e72f6d1ce6e20a621194324924de7eb775409cffb779
+size 232
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0129_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0129_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..36e790b432e820ed73803fd1ad2ed0169f03a323
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0129_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ded881d2078623f8f988b2032dd8db63427d291eab9d27d75f6a0eefae08459e
+size 550
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0153_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0153_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..58201b1f91a03d1ee61e03e43362fe6a4954b6f4
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0153_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f821c2b529dd97174ddfc216f24272a5562da451d34c73498a88051476019bd7
+size 525
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0154_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0154_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..9d1e15e676d6279ed3b5bb44e9006427de3cfa6c
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0154_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:510e034f626305b09e0753bbefc4c370537a359c519812afbefcced402e48b47
+size 156
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0156_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0156_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..238abb460a53cf1f29e2b14f2ed8b34fde9ce886
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0156_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:747c569d4f77bbc9aa095cdab9e180237b89e137cb977bf88df7f03f142e91a7
+size 171
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0163_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0163_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..e275c756b8739eac357261c4c5b0a2d15817df3a
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0163_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:07e17acb0e86c57f23c6711b70617ea07c28f9a78e490e454d48f9f96d9d9432
+size 240
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0207_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0207_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..68c9dff63558ca64c3870d60a26aac3a4f212328
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0207_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a9826f13493d931d1c69b12dd827a50b6dcc12e501a6fc8204fa343f8cd0ef57
+size 173
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0214_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0214_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..5ed3ce581600feec61c201ca4a70566f57bdb780
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0214_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9af077335f1324f5c574b9e107a9e6c2d972ea217e20af8cd122aafff4d35458
+size 240
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0229_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0229_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..30f280105f5ed823e2b8d114e56e21436363e2e8
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0229_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cec72bb17d1ddd3402330b8287bdbf1169a992b39847734aeae1ce51b131f2df
+size 245
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0270_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0270_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..31c3ad049782a27c18489a665f58c20bf6d32e14
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0270_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ec19530f0b0dd7e736dc2843bee07e82336009db36efc282fcbba92e5ce943f1
+size 226
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0320_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0320_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..77ddd9afdeda648398f48d94965e98f50a21c000
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0320_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:029df6f067de6d0052fb5dbcde7688744dfbf743f825780a7543e769a53aa702
+size 226
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0340_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0340_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..90fbf3fc21f6dccb1a3d144f9a804617395f2f02
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0340_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e29713b570fe19a58c48f0b8f681d9968bebd32ffd1383c7ca52867cbf947180
+size 441
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0357_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0357_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..2d0b2def2f802018ac94d1b703320e5e0ba1fa7b
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0357_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:adc4c944a7905f975196abd891811b3bfed0779593bed675d0f45210dc1398f1
+size 238
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0358_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0358_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..5ddd94a196d1d7ddc52a13a8e28e02191154d09f
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0358_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e87942baed5954d1bcdd06fd3bbd7f4fd7fe0ae21117b625eebb3822ee51d333
+size 267
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0412_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0412_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..f598c6ef2fcef596ff35f3d32b02568af9176f93
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions/BUSI_0412_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:46cc858bcff3b6dcb0d1a0969995bbb6752c206e390d97b568386305a15fdd21
+size 590
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0003_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0003_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..a8ceaf9b92c52d0f8e2342d95a4bff9bc6c762d0
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0003_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9426cc661678f5de81c0dc0d94006c46483dab2ea723064825296268b2b56d63
+size 249
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0007_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0007_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..3346c2bdc1b355fdb26ca150b7298f9c21d72752
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0007_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3161d6a68b77b683a0def527cdc0fa3dfc243f3f3fecabac3049c450254bc5da
+size 154
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0009_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0009_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..8a5aa464cefde1d301ad2fd5da42ee12080992c7
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0009_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4a0880204f8cf82a7704f664746e2c5e6b37882ace8400be8a4295a846f0c2bf
+size 228
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0016_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0016_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..d141c1939d430dc597b1660ebbfb3f6c65bd8fa3
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0016_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:92696f7dd3c51e4c1b6b7633e727c5dd2e2a2e5080059d841ed5d01d61a4e5ab
+size 388
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0020_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0020_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..09a10d3b433c3376c5346570a57ae3fa5641b93a
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0020_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a5b95525f820862084cf69019f260cb2172df4f4157bb70c409cec51285a4d04
+size 234
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0021_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0021_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..293b01c2656900ea6defd9d4090a6a6a84c9b57f
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0021_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d8eedd27c9b5cc9189e68475a853f157d3411297c6f953c5bccc2b8cd6ac99c9
+size 160
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0023_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0023_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..c60e04692fe07006eca2980507f021c6703215fc
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0023_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e2ff4e8455256464d183c57b818819fc034478c8bc529d546b69f7830d0323ff
+size 429
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0039_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0039_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..7d00f7b63d54ed3e118cdae3cbc09ba2c2531473
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0039_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0b13db6fc76bdb14a2f7463480d0dbb4810e57804e4dc6a2ba28b8b22b40f1f1
+size 239
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0055_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0055_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..38b9b7b9c4a958e4d1b492505a045fda32812c06
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0055_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:32a087d3429030c9b949f8152583da019677af6dadc386fa0076b62c14bf7140
+size 284
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0057_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0057_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..73751bfe954e3c9c3dc5ffef3e4392b76be14f6b
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0057_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:02327b0442be6160ea99ac2d37f38900d13a04e07f08acf0bd0186e86c8e3484
+size 179
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0059_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0059_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..cbfd06497f20b23d30acb9e6368c6bf05cb5b92d
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0059_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6b30a7122e8f2a7e841920fccd82b1fb68d3020cbe3b1b7a3dfd310de1f93614
+size 478
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0062_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0062_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..40d4d25339d00f6a31bad491305063986855522c
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0062_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9d34cac8a6a9c3653ac5d1752225824af4dc002bb01030defceaacf83d39fa58
+size 462
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0067_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0067_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..66e58ca06203965f066215f646595c74c2b63f64
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0067_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:483e7be73f46a609ddcd9c822cb609aa37140fd613208402003d5d03f60fc2c4
+size 341
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0067_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0067_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..aad8d74b5bcb9ba88a8253edb03a02fdce36f67f
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0067_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bfde6123593700d180c9c702438d688e69ed9b74b9f755baf3763419cda4f8bc
+size 470
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0073_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0073_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..5123ff2b4d0f9df3e775de02fac695fb3eb93138
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0073_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d34642634716a226ae9ddf9e379097c9277c026f5472dede79083bf2912b48e6
+size 276
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0075_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0075_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..1fe024dbf742109ac610aa81fff8bd74937835fa
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0075_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a1e793bd38704745438c9602de4448e1a9b70dcca88dfeb121b949827c50f1dc
+size 225
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0080_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0080_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..2aadfb0bbf4389201020734a99d2fb95877e189f
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0080_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:896e84852f46d4ec1e9522be519d083fd7072cb55814bdf2e23915fa1b1f2eac
+size 417
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0095_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0095_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..e5e918ad202598aa39b0f1262d36bff36064e219
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0095_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9e332790a51f9f2a1b2c11efec328d9adaf84837e37b9beea784f53373fc3f05
+size 167
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0097_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0097_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..80c3b5b1d83f59ffb2f4579146758c3d8184ff0a
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0097_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c2e1029b628a0749ae33367c14167e9427cdb9eefa34098eeb1e1a011d327c31
+size 347
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0099_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0099_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..757192d91dd1864410791d9a81aa643734eb2b02
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0099_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e931aa34f15c1323e756f0a39af5e25c2a407349a82ac4b573dea0bfe542495c
+size 231
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0099_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0099_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..89cc540bc4d7e249214afbb6753ee4bde7134442
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0099_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ed021b5c7a50049c9bd395cb4f888089e26e8e116cfff6253178eb2f3bd840ff
+size 267
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0100_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0100_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..16d2890e9279609839895ccb8bf6caeac622f864
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0100_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:978abdabb5c77a1d17860ba3868722e8885d9910f2bcf6b04fb2b7120d22e80f
+size 277
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0104_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0104_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..ca105ea94a544f7df85add48e758e43b4d7142d2
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0104_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:18b126208e18c3d6e1bda0aac136294596b53b60591d758c5c1ec141113b8d3f
+size 155
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0116_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0116_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..ac44c7f28ffd33122080692cd579b5e2d09aed7b
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0116_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8aedb332d1612d28a439fcfc91322b90069dbecf3de8b828af2dcf262552fd99
+size 250
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0119_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0119_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..7a0c90d460cc07550e6aca717a2d41f16806edf1
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0119_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:76fd97753b5c132888963b963a4f241ebd964657fea41ab4cd94c9a3cebe6644
+size 403
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0121_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0121_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..66fee24dd34ada972af7fd1245ededcdfdcf1d6c
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0121_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:24e85efeca626c6be9817be1700b94e00bddeebc648e3547b7f79d3e95a9420d
+size 234
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0129_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0129_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..6dd893a9eea65d6e6d7eaaca76192dd52fd1ff0d
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0129_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:19b3ace03eeca1c7ba52042339ecd966858a8ba64f3f8b21b61221c20222ce9a
+size 561
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0130_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0130_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..61b500f90ef08bc00c6a58d96ce5499141f6d7a8
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0130_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5a9c81b2e6820ed0f2175df74525abe3e0e3b8d147b5066dd90afa6f45d918ad
+size 258
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0141_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0141_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..8a82b0377415948d902a4a359b52a8f7d4fd875a
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0141_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8e2ab720f0658f45ddfa8ce4fd79e0edfb762191ea2209cfc7fc8c58e76c7d5c
+size 532
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0153_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0153_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..ed41284dea80e04c5abd602ae1b7111bebcfda88
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0153_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b5f73c87e43a20b156c51a4e142c117548457424c7d799d11a07e842f83ebc9b
+size 528
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0154_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0154_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..5107c180d63e93f4078e4024c5bd5f58b51ce0ac
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0154_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6599ed46ff3ff41a11b81164b17232e347ab54e9a11dfb6053fa94a01ccff529
+size 153
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0156_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0156_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..9bdde12c989a4ffec7342eea23b4444578920e18
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0156_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:dbb84e7306b67f25b91682dd965abe6cadd5d3cfed7c09a63c59e2815f132a7c
+size 169
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0161_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0161_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..df16dd14dee48fe7dee2b400c33ff51687f57db8
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0161_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c6bc86fce519a6388d8a1c74ddf8575aa357fd9c106660d95690e3cab767ee76
+size 468
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0163_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0163_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..fd593e37483f041ca639a9de39ed4bc4a19f655f
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0163_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e2da2f9c51eff475f2360a269a06ba7f11afeb67ef682b8ad1a71bdfa7ed447c
+size 242
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0165_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0165_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..a27b1ad2eee196cd887ed2c73f237735bab1856f
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0165_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9567642b2d5b6da43b381bacaeef68fa7be5391a21e7bb2f5c5fb6dbd02282a8
+size 509
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0184_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0184_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..05bac17012540610846b2e079a4e9da445f8f232
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0184_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:215ed90565dafef246af1ffbfdeae56eb2bb70c648e0e4330292a93b2a70da3d
+size 278
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0189_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0189_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..f671050d2c92694a8a20a622c55bf3f4deafd9fd
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0189_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:55dd07385146ec2bf170a045725fc1c6bb04a37b023661f92e734e10c50d171f
+size 444
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0194_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0194_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..a4a0fe803bdb5d61789d7c8c155311383fbf5102
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0194_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8731a8a0ef004eca8d7f14560ee8244922d510f43a991353c3be1f1a071118cd
+size 363
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0198_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0198_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..6381a884f79d714225a22ae4c19c932d700e39c2
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0198_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:fb279e390755e766c39abc7137635cf0af3042988145af3f13758e02d9e07472
+size 290
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0202_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0202_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..d4127433f995f8fb16db893b1f62875798c3d9ae
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0202_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ef9671c44dcfc195071db570af364aabf7ca9dbb1858051f2bcbe3141f2ce0f0
+size 143
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0207_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0207_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..ae41b10b2171e78368132a4e38bac2f554c8b2ff
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0207_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0ba6c754ba9585b6516445edd0187e2319f090b60a43f9ac534081d9bf159cff
+size 179
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0211_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0211_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..0679923fbc338d628ff674d9e9c900a740619782
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0211_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7f23c7026e9abf303dd9a3678295b0ca8e48887e8cf598bc6f65fc5d1c61244f
+size 158
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0214_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0214_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..b8da802c7eaa03ec4d3af0b94f49df7f9a4cd2d7
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0214_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d9690f4e10b0eaa6a3d091a941e133dcfc3e3e6aaffbe2de92f8dfcaa92984ef
+size 237
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0224_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0224_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..cdb67109ddc52026697f83e5d9e356f1cbc14bd2
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0224_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d14a06c43c3ca72ddfe74c465d69d5f08892b41712e684ee39989f798539e4cb
+size 366
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0229_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0229_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..fe3dcd96136ecb28661c614381324145d320a1a3
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0229_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b54935ee1c4b14492261434e6dcd20129684e3b7cc954335befdf757388b40d4
+size 245
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0251_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0251_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..78f3ca5cbfe12734ea4bd99b4b71de6d9cd4a40f
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0251_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:06089907e3f8b562e14df1f01e923c0c90dfebc2ec5433aa32d4025d97815fab
+size 207
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0270_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0270_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..452e12692ae04f608c985e3489915f4a98d0de64
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0270_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bd227318cdba6926f244a9a40f4259d7da0d4a142a1a8351bf6147f29b1ba5e7
+size 220
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0285_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0285_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..5cbd0401c0f98bc373f5885969224ff71810f8cc
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0285_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:903572f36154f4377bfd01a33baf14bc1a5d693f5edafd3b9731a95324118aca
+size 278
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0290_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0290_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..2a3e99564d60eabd764f089b9a869f630aa096d5
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0290_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9aebc44372e6de5754a8b75c67c73edc6800894ce463e788f1b1259c0bcb9792
+size 361
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0307_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0307_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..3ed2ce2129ba65fefc5804c98f8d0f55dc0330b0
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0307_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:566b5bba9137017515a552eeaff0a6c1cd23b7760e2a9b0cad350efc21d2e91c
+size 206
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0311_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0311_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..93549719822dbdce2ca03ed215dbfc176c9f8fb9
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0311_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:184afc249cb9856374cae2149e383f185487ff686a61782e9501f7d89b34eaeb
+size 365
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0316_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0316_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..e8dcb0402d10bce3198350f7bf694b6483457b8d
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0316_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f9f53c44adcd866ed3647d76a0982042aa47d2dc395547148704a0a5d2bbb0b7
+size 402
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0320_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0320_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..24212fdb41be7dd336940e91ed986007f38e3e0f
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0320_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d1d4cd8338941c1eab73c0ca9b9d649ab590e5d38177a21ab5098af4ee07aed3
+size 225
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0326_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0326_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..188d2a8e4a62524a8fc14bb40c4ed3e0cf35800a
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0326_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7f3a70ed23b97ec9f5506b6ab1bd77943e62955e854c8f14026a34bfc77bd115
+size 335
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0340_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0340_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..091f6ec01995ecf71dc9a5cf449d9d1b0da71e4d
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0340_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a9aa00fdc08300a1c9ad12d1131e3a89f964796cb1180c0728d3d1a44ebf9b95
+size 439
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0354_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0354_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..e2ccbb0de6cbd28cc9e69cfbeefbe72ff9522fe6
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0354_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8c0890d64150ed508cbc5842bc4b1c9f94b38c6344d5d0e84d1729553e0d95dc
+size 332
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0357_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0357_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..6b3b3f26dbd4afd6a8156edc97247fcc5e77c5b9
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0357_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ab15fef5720456489ca7d3d80ad92fe8e78e0e1699620949ab212cacd923b0b3
+size 238
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0358_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0358_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..8c482f05a50d62ca33c1eac179e60f090a2aade4
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0358_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cf66d6a85a828ca333d408f0d5e0262a9fdfba8be66ded100d903bc0a8ae459e
+size 272
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0361_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0361_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..392e8f610b7c4cd71a4f80e4b1ab83664578fc72
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0361_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4f1d05b2f9560083d1bfb77c2d532b9fcfef3e0581395374088c68d24dc2c780
+size 280
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0372_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0372_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..1ffc94daf216bb94f6d6ad31edfe8f27c4c5a09b
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0372_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f6bdccda7630b459f996d4a685d3f767de7c0fedf1ac0a4290be452aa8ae062a
+size 301
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0399_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0399_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..91ca423bd8f31187c86c13b2b14c7f7e206b03e6
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0399_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6e17b309efde6914583402b74f6b5470fc60c65382eca9caa4ad9de8324e0c29
+size 356
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0401_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0401_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..2221afb289e91de5f61d184a837e93fe6a3c7340
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0401_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b24d4e472656a6a7ef1306dd5e5a2bf3916c57aebb2c37a530725726f9a877c4
+size 339
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0411_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0411_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..4fb08b8de911c688538a25abc9223b8f0f7db607
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0411_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:020db7cfffb26ee90b7af05c20b53879f494e56ff9ea5d9289f87be40e391f11
+size 346
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0412_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0412_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..c809f71dd97d5c0389eca5100141b09775e03327
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0412_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:529b3a6384cb04b4447b69b39975dd7d9d8ce1202422bc2331f5ab8a12207293
+size 592
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0415_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0415_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..b9637f08f45caa41dd717d1e5f874d76245fdfa8
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/final/predictions_255/BUSI_0415_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7e13277e3ffb15f1709de07269fb982f777c75542389dc288e33bc0a6f45e4ab
+size 645
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/smoke_test/smoke_panel.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/smoke_test/smoke_panel.png
new file mode 100644
index 0000000000000000000000000000000000000000..ea799de685855b357bdf5469c8554210beb62bdc
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/smoke_test/smoke_panel.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6fe994f1c8997c303b77b819ed85549b86e585c437fc934cd5b6252e24313ac7
+size 57852
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/smoke_test/smoke_prediction.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/smoke_test/smoke_prediction.png
new file mode 100644
index 0000000000000000000000000000000000000000..65181471c262c7392f5628ac908c968d18c9ae2b
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=32/pct_50/strategy_3/smoke_test/smoke_prediction.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:99ff0b3aac9b9f3f257ac04938f62d1817c6fdadd2c6a4040830dc3965598968
+size 95
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/best.pt b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/best.pt
new file mode 100644
index 0000000000000000000000000000000000000000..f706dcf46e941d3f04d8a5bdf5e15b1a8bee8c38
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/best.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:23ed604cf17c4f3aa4954e038be288e7317a6f5d0417ecc3fc36967072ec2cd5
+size 117728661
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0010.pt b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0010.pt
new file mode 100644
index 0000000000000000000000000000000000000000..bc641d9e3a942693aa7c2eb7962da8194434fe08
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0010.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b735f1c9e74d3ff3882fff883f101eea526f8b88ab1f8711979d6cf35e94a3e2
+size 117785249
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0020.pt b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0020.pt
new file mode 100644
index 0000000000000000000000000000000000000000..fc804da6ca10e97800eca517f52b71e60703ca77
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0020.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:065a1045d59015f354b8320be372e4d82eb751c5cc710af6aad1bf7d5adfa4e3
+size 117785249
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0030.pt b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0030.pt
new file mode 100644
index 0000000000000000000000000000000000000000..efa3b3a1fdef792fbb0d4ff8c1d75b8c7bc3fa83
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0030.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c115cc76b300bb9d7413df462537b57761cbc188ccb1049b0dbc42e94bf4a8c1
+size 117785249
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0040.pt b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0040.pt
new file mode 100644
index 0000000000000000000000000000000000000000..0e90319c6029ea01a3d5680f6305d9eec8066188
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0040.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:53337fe66801a147b228074c50db33d68a0f608f9660efe968305003a3327665
+size 117785249
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0050.pt b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0050.pt
new file mode 100644
index 0000000000000000000000000000000000000000..ae9361ec8eff71e3d909ccff88b748206b2e7613
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0050.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2dade9b8559560c5c1fd74682c282cbf6b3a9ffc42d7b929628cdafa449204f8
+size 117785249
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0060.pt b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0060.pt
new file mode 100644
index 0000000000000000000000000000000000000000..174d4e034ca011188a57f191356027a555694567
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0060.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f08ad0c6d17473f7c563c31f9071ee90a3cc3ffa49f8376ff5869b5cee2ce433
+size 117785249
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0070.pt b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0070.pt
new file mode 100644
index 0000000000000000000000000000000000000000..c3e48af83e374ef471e2a22e2f22c05a133e7c77
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0070.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d715279b978f1e9ecc0a4522ed2f7899283f7bd80b095c19207eb5ad9f19a9a5
+size 117785249
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0080.pt b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0080.pt
new file mode 100644
index 0000000000000000000000000000000000000000..15ce1c24df71704ce5ea2b1f9233e1a3f2c781b4
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0080.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5dfc808b6a1eeb9a7ea834cdde8657e93d1d1c424929d32d757501f841eae517
+size 117785249
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0090.pt b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0090.pt
new file mode 100644
index 0000000000000000000000000000000000000000..8134ef8fb3da29ccc1cb60371314d789cf26a0f0
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0090.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6e3196c9b97da5ccf6e2ffd19070e8129420d7fa93e65a780d6652087def2ed9
+size 117785249
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0100.pt b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0100.pt
new file mode 100644
index 0000000000000000000000000000000000000000..0068016db4e0a84c2ae033c7a4dfe689ac217b4c
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/epoch_0100.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b3d3e1245d75bc234edbbd5fb9d07be83a996f451c214e5b552ad40b37f6da50
+size 117785249
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/latest.pt b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/latest.pt
new file mode 100644
index 0000000000000000000000000000000000000000..69c4d1ba954a72c02111c02978b710b05c3f9129
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/checkpoints/latest.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1c26a1e311b4c2f3625939181ca0ba0cd92ebcbd8f8ddee854c3c6444f55da97
+size 117775321
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/alpha.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/alpha.png
new file mode 100644
index 0000000000000000000000000000000000000000..8b5e99114dc03ea529f56935147e0d7160017aea
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/alpha.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c6b7a875bd325d1a68350b2d4524321926bcba5360080d6f830346f8e60f9414
+size 34549
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/ce_loss.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/ce_loss.png
new file mode 100644
index 0000000000000000000000000000000000000000..01fcc4deba328e846889c9f057a9496e4d2e11d7
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/ce_loss.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7cacfa77bf9ad4f1e8355c5d82ce78722299e04cbdd540a05ef9a35cd583f754
+size 30312
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/dice.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/dice.png
new file mode 100644
index 0000000000000000000000000000000000000000..c24dc346f2afd4b6175cc8f98c41ddc56a8666af
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/dice.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:41f806afa20aa724cdc167a33fa74d7149dca082a54008b781ad11b8f635142f
+size 27463
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/dice_loss.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/dice_loss.png
new file mode 100644
index 0000000000000000000000000000000000000000..3d5400010a8eefaf45f9f4736f60c5ec6af315d2
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/dice_loss.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6c40d897d10d74469ec8c826f60dd1b80f7dec47266af4a9c54c666774b4fe9c
+size 36414
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/entropy.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/entropy.png
new file mode 100644
index 0000000000000000000000000000000000000000..9275860511b4ce90a3ec82fb8eace322a21403c5
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/entropy.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:979a223fac6da6e49f72e35059a5b8a2af19f5bd0c4c0721c6324324da36f930
+size 38524
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/iou.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/iou.png
new file mode 100644
index 0000000000000000000000000000000000000000..71853a8bbfc73a187183c48cb6ff7fb513ceec62
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/iou.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0b6eec1625a4918601bd7039d72efaa08168aad1a445eabe24c7ae812900918e
+size 27784
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/loss.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/loss.png
new file mode 100644
index 0000000000000000000000000000000000000000..d363d5f93167cc7cf3200a21b2aaf79b78c1fef1
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/loss.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:99cc046b074306dba4f8b12f9fa4e203eeeb8580e5263ac73f7e70e6eb7a4cff
+size 27375
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/lr.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/lr.png
new file mode 100644
index 0000000000000000000000000000000000000000..aa2110e8c4217cd419756830e6e2d5e0495d9663
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/lr.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e05aa15408f7355e397a268fd45876d979e55dc95e00b57e7751498b722d93d9
+size 42152
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/reward.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/reward.png
new file mode 100644
index 0000000000000000000000000000000000000000..ab6ceb11e9c00936d039757806bc42b6ef6d771c
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/plots/reward.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1d385ab3ec6b54e20009b26eb4ad39767b42b911fc8666d4d393e20bbed21dc7
+size 35737
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0003_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0003_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..3eff8ebff82db0fc89ad0cf57904770efc1721c3
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0003_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:32738f2ab03e0b0cf3b2284c0cf8e96a95cce07faea68d4c880f14bdf2d7f938
+size 233
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0007_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0007_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..c12ab62d0a060e5818dd1cb2d23a68ee405e5e42
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0007_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0c031e166b4e53d5c9d850c27d0dcc0b8954b36c594f7f1e616ab352376f594b
+size 214
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0009_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0009_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..9ca9d7e5873c5b50c9bc3fe2de15b875e7971882
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0009_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e7daa688c667c490a0042a00b62fb1dd6f493c72ec3bc24804225e9aa5088f72
+size 228
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0016_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0016_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..891d09ab29c69bc00c5f15d7413126005532a0f4
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0016_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b3da05bcc553cc614b88fae23a2eadce9be362dbbcb7d89ec7da41981fdcacdb
+size 335
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0020_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0020_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..4e9af5e9ec7cae0e3815ce7a4c98af9be5293d14
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0020_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7094c5a60f7585367e73f31b791af7117767c1e608bc56dc7c2eda5495bcf11e
+size 233
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0021_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0021_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..f4ac6b20b7b17dc02255c0434d5d88535ab71ce1
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0021_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:40240750b2af26485e496496fd0d2722778d1585b0248d0a215a18e1e7957b50
+size 161
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0023_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0023_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..0e77f8b936edf9c2a6f71ea7a16e42d0d87e3408
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0023_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7033e0124a369ccabb056951678ea9d3d951876a2398776a7ec866de1b83c18b
+size 460
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0039_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0039_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..a64438bdfdece548584544143567d7ed9204140d
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0039_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:79a21ba91737c8cf115f7ff8c140445e928173ae9d6f36789753de0e409cbd49
+size 215
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0055_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0055_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..01a7a5a6f2e4d3e20fbe299f0f913020852b9f45
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0055_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:521ca7a963cb9c8a6bac4cccf44f215f69488f17cdebb9e873932b104cc4e336
+size 296
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0057_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0057_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..24a38b96126006be484435499bbb794abe95d785
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0057_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7179fce35aa1b22af326310eefa5099b040ad274ec950f467451a829335ccf13
+size 177
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0059_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0059_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..83d1c37435c9aed96362a511c1b2632c7c41a597
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0059_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e4ee5422653969db802b0dddc3856f9575cf99ab1b9f6cc58a9d00bc5a632370
+size 468
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0062_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0062_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..f5a892b138d2d54bc2da839f0d0c7115a0c4ec24
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0062_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c38f15338e34dd3bcb4006bdbeacef42f4d8b1859dd849707b53ddf04956e11d
+size 423
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0067_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0067_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..73905329eb29fee6b007010f485f8630226e5f2d
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0067_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c7a8182fffb5084f31348925ca8281e7cf352487071a93661fb21847d7304ae7
+size 336
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0067_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0067_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..ab7f01eeaf128f61df28b0fcc8a79807d329672c
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0067_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:678c89ecd5a0b764c51503f1a6ee4d309d73a496dbb32517f4b3f2f46e13bd77
+size 303
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0073_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0073_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..80a23245f052df6cbc0a342eb66f8c57231d0b28
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0073_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6b33fb081f69a4ae83661e0aa8f85bed28fe07b2cf07d3336500b20532bb3152
+size 276
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0075_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0075_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..a506a7db4e2b80f830c7c0ad4a1ee79cda3da6e7
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0075_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a5e8fde7fa138dc26d60bc9562e37d319f33f4f8e6001c72668f9a49307b4e1b
+size 213
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0080_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0080_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..16735433dd8b5b03042e6259a191f7f08ff71ddb
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0080_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:621ed586f90eb529f53013a3d853b0b82c2909ff752e3a457eb5870acaac20e2
+size 383
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0095_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0095_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..ce9d36e8cf4ee8d5b50b56c04d54b393dee79c3a
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0095_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3585c48a8195946b5ed5d043423a1e6cbf9eb10e99d1c6571b1e615b592f2c53
+size 171
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0097_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0097_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..7899df1e164ecda6ccbe6382e260dd1966f10168
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0097_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4286698b7109e392ef1658c86ee16d29ab19970f2e921aa68e52e6475c2666dd
+size 345
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0099_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0099_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..9d09698b856136e30633f5dfe84544f8d131e58f
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0099_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5a6df3a7d6c7ae53109acee41e410de55f999e3a644c2f05c2a32acc8478f3f2
+size 230
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0099_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0099_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..a4308c43989a881bd34cb2f0669f1a285f916f20
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0099_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:40039fe99633e5b62550cb8ebeaf8b952e2fc33b047b0ee625f00a7dfab6e402
+size 243
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0100_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0100_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..c58719e3dc849fe0121c1e2518133bb58dd28929
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0100_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c325fa11ea9c4211a689813fc60c6efc1af2e0ee4a082fa5ecea7ed80e7bb31b
+size 278
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0104_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0104_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..9f548add17f48d38138ecb3c562f3214fc73ca3d
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0104_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2280d965e5944072eadd152d9320ad369e4853298ec39d3a4b5b10bb831f7c15
+size 147
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0116_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0116_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..37f8df1049e53cefe34862a6e59425ae041013b2
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0116_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d739365384f11c62fc3a280833102bdf1330fe33c69b94adadeedec1e21f8bcc
+size 309
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0119_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0119_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..98b828225bddfab279ec02d954a7f8efd5d9363e
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0119_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1f49735e520c70c84e291d29c48a052e52dae93fd6f2352250b4772619713660
+size 433
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0121_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0121_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..3f422f018707a2b065b2d180f5ce3859288102ff
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0121_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8b202afe6bcd63a997d23f8f6a9bf313324f4ecfa73c4348ab732fb74e4efee4
+size 214
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0129_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0129_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..4c39294aad291c16b555cbcb811f8fbfee0414dc
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0129_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7be9a76f3aef11c0a5b41b91635add430f4a64082fbb4366bcc19df52b01541a
+size 514
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0130_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0130_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..80b3ab48e98da87900d9ac95f909bdbd3309e4a7
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0130_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:65d0f5b6a06c2159729cf0e4338c356b8d2511f5aa29840d6cb1c8cdfb8b3de7
+size 248
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0141_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0141_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..9f0c00b0efccde251e8ab5aadb5034a1ca2480fd
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0141_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b5b1ae64fd450e7c8500ec4e9d7d8f17c01576a27ec7ce22df4b4a68fc2d7cdb
+size 504
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0153_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0153_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..2958d943f80c8dce7026372bc4aa4100e407af05
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0153_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:954663a33e1555808d3c4c67e3624fc990f99dcd609f0ec078fa7b2f2cfe8b65
+size 503
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0154_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0154_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..a3c4070ee4d3bf24c9879e3d2b8080c6910b63b1
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0154_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:79d3c6285f6772b3afafb6202fe5012163e6f644fae970cc921ee23fd971901d
+size 158
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0156_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0156_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..05134ca3fe3b4099ee584f976e7ea0600c870180
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0156_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d7c28dd89d38fd35dc12e2f566e5c39a284dabe6459ae05e5af9fec0bfa7cda4
+size 185
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0161_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0161_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..179989ef3a3822c2f150a5c6c70faba215b498ca
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0161_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9db80958c91fd09011c021652caddbf98167f7cb76b2d8dd8a6070a480ad0b1a
+size 407
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0163_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0163_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..3699a262dfdda6b7bfebb4758af8d0d43be242b6
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0163_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:645361c88a8f4eb8e0647c45136bcacf4dd79c0d6c2af598478a19e1700b8913
+size 249
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0165_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0165_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..b588826eeb945379a801e86d4bc67fc74262873d
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0165_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4e182c33cdfe055b4623503b33ad64282df79f2acd13e49225abbededb764fd5
+size 476
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0184_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0184_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..ee17e23ffe264b1f3af51ed3e1757e44ea2eb661
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0184_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d59b685f0986af6e73ab03f753da4d752ba41ee5494e6006b9d668f5537bc112
+size 265
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0189_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0189_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..d0ebac9dbb81b162d0626d36c3262d9430c896dc
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0189_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b9004e291abb3653fa791d863ddde81b8b21929390ffd29d16b39d3407af2571
+size 402
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0194_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0194_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..7ef14f8750b4e8c6f50aa89fdd280602a34c487b
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0194_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8350ab1830f487185096d33c403e70cf0e9df7dec907828060ab6ce89bcb487f
+size 306
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0198_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0198_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..2653c2bbcd5a1adc87b3a0f3c3e56d1fcdf70b32
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0198_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:161b7da275e6ef579878507ede51ebf67574044c25892126df999c35f207690e
+size 283
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0202_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0202_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..69f6278df7256b691ed6b178809286404f06278a
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0202_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e2a751d6119fd4c7606bf897b6457b3cd34e5ee087f08d9d7831a2c25021a41e
+size 142
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0207_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0207_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..95b08763e33806fbcf401729966139aaa760e1e8
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0207_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:42b852bd8801e104e9c84963686087525f3461cfe55a4ec39dbb65e7203ad5c5
+size 121
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0211_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0211_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..fec3f42d7fd411715ba1363598c5ee090828b9fb
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0211_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c87000113b36cc9ef755bd71a6da3a16fdc28f38b71923a19576e84d38886de8
+size 193
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0214_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0214_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..b3e90df75508d12858e06320bebca0f685c3ad9a
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0214_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:64e02321f08a953d2f8c6acf48247ecdea03ef99d4ccbcdb24451eca38dece09
+size 236
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0224_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0224_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..ec20457b826645972eab3d9f9299e9029224500b
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0224_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c62cd17c3acba68b02c17df4eab6015791d30de4260b18bceb4517ace4f94b8a
+size 402
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0229_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0229_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..f0e57e01a459df9f9016d01430446f806f5b1717
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0229_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:162ae723cefab8ef1c68b9246d5ea35b95d5669f203dbdb6c46c73182c6e7618
+size 228
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0251_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0251_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..1c90b48b6bf2a51b24945671254042b6504c1e08
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0251_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9571c67addbee649fcdc0b51b740be89919307185e7a767a2d7c7699ec3cb105
+size 188
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0270_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0270_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..6b0050a5c41a0d7ae4ce671f4a0456281a4d8c2c
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0270_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9b39596b8788f8bb9c8c9fab525c81ba2dadfdc452a1180f751c4e0ba3f18fcd
+size 182
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0285_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0285_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..90673d328ab916baedf9bd1834a4d73b36ec3d07
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0285_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:059a429da3cc44ddc2c0714931556b1c544ff324b0b3ec6520af57c7ca25e99e
+size 289
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0290_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0290_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..dbd80892b25d3d27b15b95b7ef2dd2795dcf01e6
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0290_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cc70baa3dd264500d643a409437c1af8a035b7eb0e93f991f080725bd6b56360
+size 346
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0307_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0307_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..9804ddf48a0098b52d19db065df55458f849238c
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0307_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0ed20d85b98d9a58b005ba772bff2fa7be809da45f026fd2d926a6b9d418a144
+size 198
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0311_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0311_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..59e7cc18667cf967a1d7927f83dd369edc1aee61
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0311_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:893e5d617cbff967f0a0fdc8eb60023c37f21bdc641e24a75f4c94ce5c814843
+size 245
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0316_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0316_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..739cb451c970e4b0c4bf058bd9ba95594fdba37a
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0316_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a1091ec419ea809c006eb5cd4e4cbb5e202a78b383323db134393990c28aa7db
+size 522
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0320_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0320_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..35d164a22db5c17e4b54220c12c8b4fff655d0c1
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0320_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:097c204e564373cb89c1cb0a2232747ae52037414f9f0556d2badb38b0fc5d1d
+size 218
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0326_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0326_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..508d8e17590dd3abf7728c64b2951c6d28395d04
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0326_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:be8faa3fd531867478b724cef947b26186f9c2f0daa13ac1d013e7ee01acce56
+size 322
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0340_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0340_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..032bb2ba55ecde0dffca5d508ac3a1071eb6dc65
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0340_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:21b7636c58f6f0e2348814a6972db97c88c6e5b2a9eb5690098a50dfb489c89e
+size 426
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0354_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0354_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..5a4f0fbfaad1e76c8203a4f113ddeb8637fdbdf0
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0354_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:70ab28b34c5f0cccf24daa78d42c5b5007f66c09aae6eac83f0b095740dfa8f5
+size 321
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0357_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0357_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..a3a649b71bf18d4ec1142d9eb6e44c082b236f82
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0357_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b7e5f8a718d212cbae204a7bb97e23e291145cfbdd6378360db8cd28d3a7d715
+size 235
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0358_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0358_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..eca72aef20cb60474b379e3e0aedb1bea7821c4d
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0358_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:017f460767b660be170a183602d31145f7dae02c17c9850d1d99800c2d83cfa3
+size 267
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0361_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0361_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..9806598c34e5b61e53ee16a486e4e0a04fed81c4
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0361_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:08d28973bcb7213712455427161ccf1db3009b7893a951a077a6a6bc7ce7d3e6
+size 271
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0372_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0372_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..e47f9f588e574e64fcb164388b9e94b9a8cf34c0
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0372_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:461228740b7ceda6af085e2450b8399095f2fd1d60e810297db4a22630aaeed9
+size 307
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0399_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0399_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..0760c81dd13264b377d4bc030b5c0249d4f01ae4
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0399_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a7e363a8b9de8f0a163105d4daaf39af15815803ed6091bf1d2dda8369f9f427
+size 321
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0401_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0401_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..0eb10a07b24615c9e998e47a45cb030d87afa7e8
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0401_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7f47c547d1f4bd3cd4bded73a8d92ea642c7ca7e93128849d6b7e74ada5914b9
+size 340
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0411_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0411_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..5d05235152894bc19d78bc8974d33063e04e0930
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0411_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:11a02189e1995b951b63239db9424a5f235ca3a71cb495c6f6e5b42c33b55120
+size 329
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0412_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0412_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..469bb4c6b2d79437330f4089ee2112987b7a95bd
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0412_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d93c2c178845dbb8649816f93168f3d0de65bb712df12f77f038719c8f04a1a7
+size 550
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0415_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0415_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..782664a43aa1da3fce843f384097879f53abad68
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions/BUSI_0415_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e383ff5cc4623f2d5072d11139fe35b78cb590e3ce298f2946a185b750a46c9c
+size 656
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0003_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0003_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..30d3d8f13374477999bd4da40cd654849e475164
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0003_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:65813221ea95d48829faf64c58ca72f7c62c9be1e43cb626a637d8d66c99abc8
+size 235
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0007_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0007_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..95cf427d87fdcf63308b32c3b2983cf4189f4560
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0007_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3eb86744c85ee7ae9d200e12cd6bcc69a861a2c6d7e26a4d4440514860b5d498
+size 218
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0009_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0009_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..b33b40d1ea059fd5dadd05065406a53289cf239e
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0009_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:62b8be6fe7aea0b25e5fe48c0f7ce2d18e48e74a7193c387b40d40510b9637a7
+size 230
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0016_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0016_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..364b3e92ffa386f6b30503fc644bb6d3d7939470
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0016_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:fd440a8cc683983be73440908e66e79979f1c4bb4ef5f8c3fe0629a97e5d6ff8
+size 334
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0020_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0020_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..24205423211e16b3cdc4efc3dd201b958f96b28b
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0020_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7e9efc8c2ef98fe8d96aa6c8307ca74ac531566096c2b4797177c3029a2b8d14
+size 237
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0021_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0021_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..35b2aee0751e27acf7864cff13be45879f668fa9
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0021_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0627f72683ab47051fa2cfd8092b3a41e4ac612cfaff58a7c457de8ef4723d35
+size 161
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0023_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0023_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..8cdee47832b46a448f1b9d4df07146422b93c4f5
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0023_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6bb40608d8e50f7420bdb4d54bed038a607ae0565fbb1e183dd7798e4b05ec5a
+size 467
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0039_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0039_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..78cc676fb8cf94bd770e82fbf14ae09d03cee57d
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0039_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2d919f920f8cd93af3d8d17618e738ba3b35294e37f31178c4af15cb2c269615
+size 217
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0055_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0055_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..df381a67320ac4841da8e76d051491eaf963126d
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0055_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:87ee3784c822b2b00bd5564a23228096e34a607e8adabb772a665afd49ff2c24
+size 296
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0057_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0057_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..43b676e1557819c7f7c37f31552f50ff80691d09
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0057_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:806c68a61334809e3a6369b19c9f87d0ea6747b0af3129dccdcf8c433af81ba5
+size 180
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0059_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0059_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..e7e80f611565b1e7f0eeec3ace90c820f287e9b9
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0059_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:46a2e00c24e162392e6c8d6917392e777ecc124592360268b553346c9e44376a
+size 471
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0062_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0062_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..7931eafad494d97b64d78a3c818451e2ca61387b
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0062_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:02398e3b0cadcdd8a83641513ed10ec834c10c5e245d156b06427a55ca4c62ff
+size 429
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0067_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0067_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..8db050e68c102818b90fe4116aa2d401c5227032
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0067_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a38135d25b7ee3401cc18452c4101f1ddc91c71b4e8c5b3c7ea7d729fa663d9b
+size 338
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0067_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0067_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..de448b4f8c431492bb6061c2615fd4081d59e0af
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0067_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:95b5a356bf160c3b0afbdbf3e1321d6cd5933c91053a4d5fd83f3c73536b45b5
+size 308
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0073_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0073_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..e910cff8688a88f4b7e0e240470f3da5ab9a7475
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0073_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:be868a5f5765cfbd58d15b91ab989c139cf828eab70a7157e805a2084f342286
+size 280
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0075_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0075_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..79ceb35f80543badcc3ee8270c3fa354a82be874
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0075_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bb51f31ab002c16b1a89b3f34b75f18d801d5ea53c1023a78018d414d0be0af1
+size 214
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0080_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0080_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..ab5c0e94e9cf86b13fc017a7ec5fad4a55341d71
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0080_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:499ac4db35192983ed854dfd855ffe648ed5b24ec4e7b251fbe49571af957634
+size 392
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0095_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0095_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..eed46dfa9c03882f09dd3dd4994c5a0ee587251a
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0095_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6e8da892ea30b77938c3a0bb9940ffac64139022e59bdd618598d48048715e11
+size 173
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0097_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0097_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..f93ea34ba7b68be56c98d9a5f1259a2408fb262b
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0097_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:73e2cdbd65f828201fcdb28ae9161379c7c15d74658fbe88287c0d2f3cdba80a
+size 343
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0099_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0099_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..6709af10c508d8415ecbabfd1a8ad58442a7d2cf
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0099_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6bd071a5e726306a400aaa0d6ca5e54c196800cfc5bf8a5ab3e019fadd861745
+size 233
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0099_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0099_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..2b243dd88a431a0c880b44364b846f041efa1d5b
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0099_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6581cd6ed694641b0b52778b6114a00b5f4bf58d60e2dbc9bef5ad06399d7de5
+size 244
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0100_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0100_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..836d1e8ed05e14ece99f4de8758f47f23a79bd45
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0100_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e19a36ce2649d0a8e10d748895b24333408374e7396ab4a9427cda9fb03b73b6
+size 279
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0104_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0104_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..a42cdf132630f52f352bc3c7cd4d58e61bf9ee83
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0104_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c8988eac813b36d830a905025bb18a741d42b4ecd2643bf82d30ea1651fa4267
+size 148
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0116_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0116_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..893fbe6219011b8e3b646fc0e6fa107c6e1542ca
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0116_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c24f42791e6c6b97266cdd5273d5e8a3ef822c353361b23a491fdfd42de0f474
+size 305
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0119_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0119_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..66678a97e66e2429d8ffe849b46d94c57f2aa2df
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0119_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5ac5d1e1b5832e6e2deb8d6a14b014d8c916127b2a69417a38bdc34d1cd595e2
+size 432
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0121_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0121_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..8e8cb74f058872f02b96b042abf99a1d15de849e
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0121_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f22866650ef7c5075a657a14fb5771263c2408c3c5c57fa77b01583c80d356a9
+size 218
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0129_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0129_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..28e10a95b3a6ca26fd8b43ed5c486973afbced4c
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0129_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a142b1d11302c8bdd1cb248b338edc52847d5f76aa2e64a62470f3f4c6502103
+size 516
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0130_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0130_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..0b629c9c0b61ca28bce9670d955e322833450c95
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0130_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7e24a555493392149485c9ecaafdc445c65962728894a1a941aa9cfd5afc0a06
+size 249
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0141_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0141_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..ce2d6da4ca270e9168d61142cd082e61c6d5a7ad
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0141_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a45afbb754ee1ad7b5b9ca77ec6adac5a00a59067b8cb28fbda90d6d54a80260
+size 499
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0153_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0153_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..189fb36982d9497766ed96a13e5f5c3ebafdd72c
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0153_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5f514ce077aab13948ce929f8bd78e6454d51a5086064be4bb43ced44d308628
+size 511
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0154_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0154_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..2331d5fa60dc91c8d29bfe225b9f44f1f8c7d601
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0154_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:57fb25b8c942e70462c7d17a92d3ab46117572fa43290f07d5e67d97b7af9690
+size 156
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0156_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0156_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..886c1adc7269d0afb58b970e8376ae53cc107eee
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0156_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f5d6c650c9a30d317958ca3ce4e6b40cc2b5b682287b8daf002d0f870a2f19e0
+size 183
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0161_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0161_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..5d761a9f393d0aedb9e1e3771d46a1446b985bcb
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0161_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:fde2e0d8bb3398fb1ecbec977618b65262cd0c54ae035bf694fb0e3e7299d1c7
+size 409
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0163_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0163_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..7e4213a1a6fbd7c28dc165df5103ff2543f6acf8
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0163_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:be064429cc394c6c4aca580ddfe40dd6b848439ce64ec33eaf585785d0a4bd8a
+size 254
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0165_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0165_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..ac28c3be23386fa06acc9554a9a53e3e859654f2
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0165_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9e886b8cec4c664d7a8b816bcb17c0d3d54a04dfdec65170ae3b05a0e848bbbc
+size 476
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0184_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0184_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..981314690bede6e64c932870ce12c413fcb3e4cd
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0184_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:90aa21392e3b76d20400a4218144ccb961af23fa7110a6e482f9ae8f64381256
+size 264
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0189_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0189_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..ca520df22c9de67efdcdf2c0e86662a5d5d78b8c
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0189_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cac94d82c2dae530a63f8bc5f9a4c0488b5e1a317cc44aca05829e923240ec68
+size 395
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0194_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0194_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..736bf990be35757b2d224429d09581c39fd00d40
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0194_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:dada2b041131cd0866e4f6e98446bdc3d553855f29429bde6724c59b815367a2
+size 302
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0198_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0198_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..aa5b82b749c8644eeb936ab58d49b9b270717b36
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0198_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0135e7e75435142d43dac4d2efc3ad95b2739a07b73a618a356ce1ece47915ad
+size 286
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0202_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0202_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..9a80837635f5b74bfb8a77138a5763fe3d2ad7a1
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0202_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:07798fc048289545d04a03f5b869bd0c84f3413aa60a3c97a217e360111b76ba
+size 142
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0207_M.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0207_M.png
new file mode 100644
index 0000000000000000000000000000000000000000..2ba520f98a7532a15e0f4dcd6383bd3846c68e4c
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0207_M.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f4ebda295a4eb0e88c118a89955e24eaec04838ca53d0a69d0331a37f8422c07
+size 123
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0211_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0211_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..570eea61a8ea625fbbc1648017da99492e90ede8
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0211_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c144bf9c8f98e6635547e6d48b1fe14a551eb615d0eccc894de9521b5f8a911e
+size 194
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0214_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0214_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..95c8cffe9f0afda96b0ac0ffd38b86fdab98245e
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0214_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2128bfde87319bf99e16cc328d55c771927c8948db4100439c220aa2e62d6872
+size 238
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0224_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0224_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..c4175512d8a194855caa66dc72e549ca15c62146
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0224_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d3f666ba078cbd8e6c63e46d5076daf20653487ed57b0cc8e307c41b21f9047a
+size 396
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0229_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0229_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..7b0a7e6924f2cc41152a6efe0a30fddfdd295522
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0229_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:25300ee6999629f5f1bdb0f756e896092de0dfb054636e1e22254ac4a1d54e67
+size 229
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0251_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0251_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..4585fcd5616e7f4bfa4d62d0191319ab321e00bc
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0251_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:aad4746bbfa1acc5fe9604c6d07f138c49286c90c0b3210590235a2c01b77a15
+size 188
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0270_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0270_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..0d913f7ef30fc4050ad3a9f05e048fdda37cfe27
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0270_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d5cde35c1bf568b0a107ae659d412100afbe00f1e445a9b34a1c0bbbaf3b07bf
+size 183
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0285_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0285_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..e99321a93e71d5549ea56bda82cebd5679f21b58
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0285_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e98a25009d92122c5e06444e750f7ec3585449710007809fd340514b4897601c
+size 288
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0290_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0290_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..00ff0add73bd3c87959aab04bc719da0e816e37f
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0290_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4cf14810c41b0baafae49c95cfd8da1bba652ca52a2e0ca01d4474a042912002
+size 346
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0307_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0307_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..e5854b4d03226b62433d9d6361b1c258ce7d8872
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0307_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:79d87894c7b2b23c3e3afa64ab72d7c2bdb2b1b11fb0cf084fcc3ae8c5b88389
+size 198
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0311_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0311_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..bd314666dc730fe88746570a7844d6d6c0e22706
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0311_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9819fe74936c56e0ec6a28b0b376f095431d8276bf5514fab0514effc05d3f06
+size 242
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0316_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0316_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..c6ab22ccbd7f8bd5dcea1236ef924441638ee116
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0316_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c2fe9a2132b2b721c9b80c3ef0fd42746f59fe573a8d4a6a23cce4a8121d6d71
+size 529
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0320_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0320_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..4e995578d848718b7f95a8956de7da2ede51cf99
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0320_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e45e6a6f66770b965a992cc84ecadfe6b4322f8246428f12b72cedaca0044163
+size 220
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0326_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0326_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..6cab4a079d5d1cfc5230980088f7207bb5651f77
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0326_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8ebadc8844a4a81fde4637b72ca0070d4ce7d4588f330c44786574d3cc594671
+size 325
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0340_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0340_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..2ef529667710c989451d231170871cf2dfcbc975
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0340_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e2887d0550f15f1823b311cdb9b32b5b11d66d49d7eef071172dcb7af10342a3
+size 419
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0354_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0354_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..dada5b770d72c69003b6e06a8c90c392dfd270a7
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0354_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2a2f13bd36821c1bd15d76fc36d14608b12898085215f603ba916ff1d78e7db4
+size 329
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0357_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0357_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..bbff9eb803a9df2c1766e6dd0e45db7d59608d56
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0357_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3f6b2e25b4154315c10f23947142e00f413e7de42fa00cfc3e6b42e5f2c0c8f3
+size 236
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0358_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0358_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..0d6a612bd21ef5f0d682c88818e7b31a602efe36
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0358_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5864f8c5853d597eb67c946437304445c3781822379b727ed80651de53acef23
+size 266
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0361_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0361_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..8d217a005198059a04f2f2fc22b86e769bad708b
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0361_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:23a5eb403ba9f086354182e3d72f3fd223571fdeb32e41101aefc8839ae9445b
+size 273
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0372_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0372_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..fcc4dfb5a3511005b5ad35dfe3e9ace3ea7d21c4
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0372_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c9b8ff0b0d10340e0aa1a560b5e070323289c269bcbfa7bdfd2fadc9253f2ab7
+size 312
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0399_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0399_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..1c95e9490f7cfbf36cb934125f41e3f5b1951e69
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0399_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a5de4eaa60bc21e1706f936cdcec905a34a5fee22e5c5391560dec640d8391d3
+size 327
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0401_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0401_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..94e6af7b8c9b860e0a1bac7f304589e87e23831a
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0401_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:12cb49d5f3d01ed18dea4777fb44aae887e2c4bc5190e23a974edc167e57852e
+size 339
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0411_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0411_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..95ffa8b56d59f11465c0349ce3ac352a751f6f25
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0411_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a7b516f133afc3beaf57a431a75df073b624316d38aa8eb8b353f62575c290c6
+size 323
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0412_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0412_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..65e70d46eed0fcedd5b326d8c8b3a3af0b773546
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0412_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ec667004dc9e4d1e5c36cf193c0a90929bbde00db0d6edb32b1a4823b8b83112
+size 548
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0415_B.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0415_B.png
new file mode 100644
index 0000000000000000000000000000000000000000..8a0bc27c9132807e9d6e05dc0179fb07df582f3c
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/final/predictions_255/BUSI_0415_B.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5e94081b1f96a0a2dbd440d62a78e11acb50b9e2ec83917fad111a1accea0f9f
+size 646
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/smoke_test/smoke_panel.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/smoke_test/smoke_panel.png
new file mode 100644
index 0000000000000000000000000000000000000000..ea799de685855b357bdf5469c8554210beb62bdc
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/smoke_test/smoke_panel.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6fe994f1c8997c303b77b819ed85549b86e585c437fc934cd5b6252e24313ac7
+size 57852
diff --git a/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/smoke_test/smoke_prediction.png b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/smoke_test/smoke_prediction.png
new file mode 100644
index 0000000000000000000000000000000000000000..65181471c262c7392f5628ac908c968d18c9ae2b
--- /dev/null
+++ b/runs/EfficientNet_Strategy3_NewHyperParameters_RunPod_Proj=64_2actions/pct_50/strategy_3/smoke_test/smoke_prediction.png
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:99ff0b3aac9b9f3f257ac04938f62d1817c6fdadd2c6a4040830dc3965598968
+size 95