diff --git a/.cache/pip/http-v2/0/3/b/7/e/03b7e31ef41a67b4d5d0212d71d27b02703ce649d01a9d3a622a0976 b/.cache/pip/http-v2/0/3/b/7/e/03b7e31ef41a67b4d5d0212d71d27b02703ce649d01a9d3a622a0976 new file mode 100644 index 0000000000000000000000000000000000000000..176dd7872b007e57e22de49689b89345d8737cb8 Binary files /dev/null and b/.cache/pip/http-v2/0/3/b/7/e/03b7e31ef41a67b4d5d0212d71d27b02703ce649d01a9d3a622a0976 differ diff --git a/.cache/pip/http-v2/0/3/b/7/e/03b7e31ef41a67b4d5d0212d71d27b02703ce649d01a9d3a622a0976.body b/.cache/pip/http-v2/0/3/b/7/e/03b7e31ef41a67b4d5d0212d71d27b02703ce649d01a9d3a622a0976.body new file mode 100644 index 0000000000000000000000000000000000000000..63a6dc1ee5b0403adb354280c37225b8f335dd5f Binary files /dev/null and b/.cache/pip/http-v2/0/3/b/7/e/03b7e31ef41a67b4d5d0212d71d27b02703ce649d01a9d3a622a0976.body differ diff --git a/.cache/pip/http-v2/0/4/4/7/c/0447ce2249fdae0f8cf52a43e01f215eacdf7c35d89a248bd4b4ebaa b/.cache/pip/http-v2/0/4/4/7/c/0447ce2249fdae0f8cf52a43e01f215eacdf7c35d89a248bd4b4ebaa new file mode 100644 index 0000000000000000000000000000000000000000..7c9898d124b6e31f50b935f492a0fb886c711bbd Binary files /dev/null and b/.cache/pip/http-v2/0/4/4/7/c/0447ce2249fdae0f8cf52a43e01f215eacdf7c35d89a248bd4b4ebaa differ diff --git a/.cache/pip/http-v2/0/4/4/7/c/0447ce2249fdae0f8cf52a43e01f215eacdf7c35d89a248bd4b4ebaa.body b/.cache/pip/http-v2/0/4/4/7/c/0447ce2249fdae0f8cf52a43e01f215eacdf7c35d89a248bd4b4ebaa.body new file mode 100644 index 0000000000000000000000000000000000000000..c6943010343624a016ad876b704784caf52c7c73 Binary files /dev/null and b/.cache/pip/http-v2/0/4/4/7/c/0447ce2249fdae0f8cf52a43e01f215eacdf7c35d89a248bd4b4ebaa.body differ diff --git a/.cache/pip/http-v2/0/c/1/9/0/0c1902a50947e5344575b4ef11e0b41b63cc4e3e15eb945e6b0cd91d b/.cache/pip/http-v2/0/c/1/9/0/0c1902a50947e5344575b4ef11e0b41b63cc4e3e15eb945e6b0cd91d new file mode 100644 index 0000000000000000000000000000000000000000..fab85998d6503552a6f38bc043d571df3aaa64a2 Binary files /dev/null and b/.cache/pip/http-v2/0/c/1/9/0/0c1902a50947e5344575b4ef11e0b41b63cc4e3e15eb945e6b0cd91d differ diff --git a/.cache/pip/http-v2/0/c/1/9/0/0c1902a50947e5344575b4ef11e0b41b63cc4e3e15eb945e6b0cd91d.body b/.cache/pip/http-v2/0/c/1/9/0/0c1902a50947e5344575b4ef11e0b41b63cc4e3e15eb945e6b0cd91d.body new file mode 100644 index 0000000000000000000000000000000000000000..763cab3c9eec91e6cdde63ebaf22ef0532366358 Binary files /dev/null and b/.cache/pip/http-v2/0/c/1/9/0/0c1902a50947e5344575b4ef11e0b41b63cc4e3e15eb945e6b0cd91d.body differ diff --git a/.cache/pip/http-v2/1/8/e/c/b/18ecb372789efe49bbe46617b1f5d32a2b0eac245d58cbf930116174 b/.cache/pip/http-v2/1/8/e/c/b/18ecb372789efe49bbe46617b1f5d32a2b0eac245d58cbf930116174 new file mode 100644 index 0000000000000000000000000000000000000000..520617e54f4645409fe5baba0b30460aec40f670 Binary files /dev/null and b/.cache/pip/http-v2/1/8/e/c/b/18ecb372789efe49bbe46617b1f5d32a2b0eac245d58cbf930116174 differ diff --git a/.cache/pip/http-v2/4/3/e/d/3/43ed3174ff4116cbf9838897907fc0c56f75caf2487539a636aa7716 b/.cache/pip/http-v2/4/3/e/d/3/43ed3174ff4116cbf9838897907fc0c56f75caf2487539a636aa7716 new file mode 100644 index 0000000000000000000000000000000000000000..bc36c5f4b244651c0f04214054e51d63ca4883a3 Binary files /dev/null and b/.cache/pip/http-v2/4/3/e/d/3/43ed3174ff4116cbf9838897907fc0c56f75caf2487539a636aa7716 differ diff --git a/.cache/pip/http-v2/4/a/5/b/6/4a5b6f0d76201de4cfd4884f5e95617a1848cf81c8357be4d12cbf6a.body b/.cache/pip/http-v2/4/a/5/b/6/4a5b6f0d76201de4cfd4884f5e95617a1848cf81c8357be4d12cbf6a.body new file mode 100644 index 0000000000000000000000000000000000000000..bac5c461eec53105b93996ac609f1a8b3025bcdb --- /dev/null +++ b/.cache/pip/http-v2/4/a/5/b/6/4a5b6f0d76201de4cfd4884f5e95617a1848cf81c8357be4d12cbf6a.body @@ -0,0 +1,86 @@ +Metadata-Version: 2.1 +Name: openpyxl +Version: 3.1.5 +Summary: A Python library to read/write Excel 2010 xlsx/xlsm files +Home-page: https://openpyxl.readthedocs.io +Author: See AUTHORS +Author-email: charlie.clark@clark-consulting.eu +License: MIT +Project-URL: Documentation, https://openpyxl.readthedocs.io/en/stable/ +Project-URL: Source, https://foss.heptapod.net/openpyxl/openpyxl +Project-URL: Tracker, https://foss.heptapod.net/openpyxl/openpyxl/-/issues +Classifier: Development Status :: 5 - Production/Stable +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Requires-Python: >=3.8 +License-File: LICENCE.rst +Requires-Dist: et-xmlfile + +.. image:: https://coveralls.io/repos/bitbucket/openpyxl/openpyxl/badge.svg?branch=default + :target: https://coveralls.io/bitbucket/openpyxl/openpyxl?branch=default + :alt: coverage status + +Introduction +------------ + +openpyxl is a Python library to read/write Excel 2010 xlsx/xlsm/xltx/xltm files. + +It was born from lack of existing library to read/write natively from Python +the Office Open XML format. + +All kudos to the PHPExcel team as openpyxl was initially based on PHPExcel. + + +Security +-------- + +By default openpyxl does not guard against quadratic blowup or billion laughs +xml attacks. To guard against these attacks install defusedxml. + +Mailing List +------------ + +The user list can be found on http://groups.google.com/group/openpyxl-users + + +Sample code:: + + from openpyxl import Workbook + wb = Workbook() + + # grab the active worksheet + ws = wb.active + + # Data can be assigned directly to cells + ws['A1'] = 42 + + # Rows can also be appended + ws.append([1, 2, 3]) + + # Python types will automatically be converted + import datetime + ws['A2'] = datetime.datetime.now() + + # Save the file + wb.save("sample.xlsx") + + +Documentation +------------- + +The documentation is at: https://openpyxl.readthedocs.io + +* installation methods +* code examples +* instructions for contributing + +Release notes: https://openpyxl.readthedocs.io/en/stable/changes.html diff --git a/.cache/pip/http-v2/4/b/6/6/c/4b66ceb6c63be17cb2553c2d6a199ee95f971d93e3bb02611eaeeba7 b/.cache/pip/http-v2/4/b/6/6/c/4b66ceb6c63be17cb2553c2d6a199ee95f971d93e3bb02611eaeeba7 new file mode 100644 index 0000000000000000000000000000000000000000..e6bedf871d39bf3ab81617d75f67590227a33a4c Binary files /dev/null and b/.cache/pip/http-v2/4/b/6/6/c/4b66ceb6c63be17cb2553c2d6a199ee95f971d93e3bb02611eaeeba7 differ diff --git a/.cache/pip/http-v2/4/b/6/6/c/4b66ceb6c63be17cb2553c2d6a199ee95f971d93e3bb02611eaeeba7.body b/.cache/pip/http-v2/4/b/6/6/c/4b66ceb6c63be17cb2553c2d6a199ee95f971d93e3bb02611eaeeba7.body new file mode 100644 index 0000000000000000000000000000000000000000..9f6bc96c6dffe66668d531c1ef45a023820fdc67 --- /dev/null +++ b/.cache/pip/http-v2/4/b/6/6/c/4b66ceb6c63be17cb2553c2d6a199ee95f971d93e3bb02611eaeeba7.body @@ -0,0 +1,480 @@ +Metadata-Version: 2.4 +Name: rich +Version: 14.3.3 +Summary: Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal +License: MIT +License-File: LICENSE +Author: Will McGugan +Author-email: willmcgugan@gmail.com +Requires-Python: >=3.8.0 +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Framework :: IPython +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: MacOS +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX :: Linux +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Typing :: Typed +Provides-Extra: jupyter +Requires-Dist: ipywidgets (>=7.5.1,<9) ; extra == "jupyter" +Requires-Dist: markdown-it-py (>=2.2.0) +Requires-Dist: pygments (>=2.13.0,<3.0.0) +Project-URL: Documentation, https://rich.readthedocs.io/en/latest/ +Project-URL: Homepage, https://github.com/Textualize/rich +Description-Content-Type: text/markdown + +[![Supported Python Versions](https://img.shields.io/pypi/pyversions/rich)](https://pypi.org/project/rich/) [![PyPI version](https://badge.fury.io/py/rich.svg)](https://badge.fury.io/py/rich) + +[![Downloads](https://pepy.tech/badge/rich/month)](https://pepy.tech/project/rich) +[![codecov](https://img.shields.io/codecov/c/github/Textualize/rich?label=codecov&logo=codecov)](https://codecov.io/gh/Textualize/rich) +[![Rich blog](https://img.shields.io/badge/blog-rich%20news-yellowgreen)](https://www.willmcgugan.com/tag/rich/) +[![Twitter Follow](https://img.shields.io/twitter/follow/willmcgugan.svg?style=social)](https://twitter.com/willmcgugan) + +![Logo](https://github.com/textualize/rich/raw/master/imgs/logo.svg) + +[English readme](https://github.com/textualize/rich/blob/master/README.md) + • [简体中文 readme](https://github.com/textualize/rich/blob/master/README.cn.md) + • [正體中文 readme](https://github.com/textualize/rich/blob/master/README.zh-tw.md) + • [Lengua española readme](https://github.com/textualize/rich/blob/master/README.es.md) + • [Deutsche readme](https://github.com/textualize/rich/blob/master/README.de.md) + • [Läs på svenska](https://github.com/textualize/rich/blob/master/README.sv.md) + • [日本語 readme](https://github.com/textualize/rich/blob/master/README.ja.md) + • [한국어 readme](https://github.com/textualize/rich/blob/master/README.kr.md) + • [Français readme](https://github.com/textualize/rich/blob/master/README.fr.md) + • [Schwizerdütsch readme](https://github.com/textualize/rich/blob/master/README.de-ch.md) + • [हिन्दी readme](https://github.com/textualize/rich/blob/master/README.hi.md) + • [Português brasileiro readme](https://github.com/textualize/rich/blob/master/README.pt-br.md) + • [Italian readme](https://github.com/textualize/rich/blob/master/README.it.md) + • [Русский readme](https://github.com/textualize/rich/blob/master/README.ru.md) + • [Indonesian readme](https://github.com/textualize/rich/blob/master/README.id.md) + • [فارسی readme](https://github.com/textualize/rich/blob/master/README.fa.md) + • [Türkçe readme](https://github.com/textualize/rich/blob/master/README.tr.md) + • [Polskie readme](https://github.com/textualize/rich/blob/master/README.pl.md) + + +Rich is a Python library for _rich_ text and beautiful formatting in the terminal. + +The [Rich API](https://rich.readthedocs.io/en/latest/) makes it easy to add color and style to terminal output. Rich can also render pretty tables, progress bars, markdown, syntax highlighted source code, tracebacks, and more — out of the box. + +![Features](https://github.com/textualize/rich/raw/master/imgs/features.png) + +For a video introduction to Rich see [calmcode.io](https://calmcode.io/rich/introduction.html) by [@fishnets88](https://twitter.com/fishnets88). + +See what [people are saying about Rich](https://www.willmcgugan.com/blog/pages/post/rich-tweets/). + +## Compatibility + +Rich works with Linux, macOS and Windows. True color / emoji works with new Windows Terminal, classic terminal is limited to 16 colors. Rich requires Python 3.8 or later. + +Rich works with [Jupyter notebooks](https://jupyter.org/) with no additional configuration required. + +## Installing + +Install with `pip` or your favorite PyPI package manager. + +```sh +python -m pip install rich +``` + +Run the following to test Rich output on your terminal: + +```sh +python -m rich +``` + +## Rich Print + +To effortlessly add rich output to your application, you can import the [rich print](https://rich.readthedocs.io/en/latest/introduction.html#quick-start) method, which has the same signature as the builtin Python function. Try this: + +```python +from rich import print + +print("Hello, [bold magenta]World[/bold magenta]!", ":vampire:", locals()) +``` + +![Hello World](https://github.com/textualize/rich/raw/master/imgs/print.png) + +## Rich REPL + +Rich can be installed in the Python REPL, so that any data structures will be pretty printed and highlighted. + +```python +>>> from rich import pretty +>>> pretty.install() +``` + +![REPL](https://github.com/textualize/rich/raw/master/imgs/repl.png) + +## Using the Console + +For more control over rich terminal content, import and construct a [Console](https://rich.readthedocs.io/en/latest/reference/console.html#rich.console.Console) object. + +```python +from rich.console import Console + +console = Console() +``` + +The Console object has a `print` method which has an intentionally similar interface to the builtin `print` function. Here's an example of use: + +```python +console.print("Hello", "World!") +``` + +As you might expect, this will print `"Hello World!"` to the terminal. Note that unlike the builtin `print` function, Rich will word-wrap your text to fit within the terminal width. + +There are a few ways of adding color and style to your output. You can set a style for the entire output by adding a `style` keyword argument. Here's an example: + +```python +console.print("Hello", "World!", style="bold red") +``` + +The output will be something like the following: + +![Hello World](https://github.com/textualize/rich/raw/master/imgs/hello_world.png) + +That's fine for styling a line of text at a time. For more finely grained styling, Rich renders a special markup which is similar in syntax to [bbcode](https://en.wikipedia.org/wiki/BBCode). Here's an example: + +```python +console.print("Where there is a [bold cyan]Will[/bold cyan] there [u]is[/u] a [i]way[/i].") +``` + +![Console Markup](https://github.com/textualize/rich/raw/master/imgs/where_there_is_a_will.png) + +You can use a Console object to generate sophisticated output with minimal effort. See the [Console API](https://rich.readthedocs.io/en/latest/console.html) docs for details. + +## Rich Inspect + +Rich has an [inspect](https://rich.readthedocs.io/en/latest/reference/init.html?highlight=inspect#rich.inspect) function which can produce a report on any Python object, such as class, instance, or builtin. + +```python +>>> my_list = ["foo", "bar"] +>>> from rich import inspect +>>> inspect(my_list, methods=True) +``` + +![Log](https://github.com/textualize/rich/raw/master/imgs/inspect.png) + +See the [inspect docs](https://rich.readthedocs.io/en/latest/reference/init.html#rich.inspect) for details. + +# Rich Library + +Rich contains a number of builtin _renderables_ you can use to create elegant output in your CLI and help you debug your code. + +Click the following headings for details: + +
+Log + +The Console object has a `log()` method which has a similar interface to `print()`, but also renders a column for the current time and the file and line which made the call. By default Rich will do syntax highlighting for Python structures and for repr strings. If you log a collection (i.e. a dict or a list) Rich will pretty print it so that it fits in the available space. Here's an example of some of these features. + +```python +from rich.console import Console +console = Console() + +test_data = [ + {"jsonrpc": "2.0", "method": "sum", "params": [None, 1, 2, 4, False, True], "id": "1",}, + {"jsonrpc": "2.0", "method": "notify_hello", "params": [7]}, + {"jsonrpc": "2.0", "method": "subtract", "params": [42, 23], "id": "2"}, +] + +def test_log(): + enabled = False + context = { + "foo": "bar", + } + movies = ["Deadpool", "Rise of the Skywalker"] + console.log("Hello from", console, "!") + console.log(test_data, log_locals=True) + + +test_log() +``` + +The above produces the following output: + +![Log](https://github.com/textualize/rich/raw/master/imgs/log.png) + +Note the `log_locals` argument, which outputs a table containing the local variables where the log method was called. + +The log method could be used for logging to the terminal for long running applications such as servers, but is also a very nice debugging aid. + +
+
+Logging Handler + +You can also use the builtin [Handler class](https://rich.readthedocs.io/en/latest/logging.html) to format and colorize output from Python's logging module. Here's an example of the output: + +![Logging](https://github.com/textualize/rich/raw/master/imgs/logging.png) + +
+ +
+Emoji + +To insert an emoji in to console output place the name between two colons. Here's an example: + +```python +>>> console.print(":smiley: :vampire: :pile_of_poo: :thumbs_up: :raccoon:") +😃 🧛 💩 👍 🦝 +``` + +Please use this feature wisely. + +
+ +
+Tables + +Rich can render flexible [tables](https://rich.readthedocs.io/en/latest/tables.html) with unicode box characters. There is a large variety of formatting options for borders, styles, cell alignment etc. + +![table movie](https://github.com/textualize/rich/raw/master/imgs/table_movie.gif) + +The animation above was generated with [table_movie.py](https://github.com/textualize/rich/blob/master/examples/table_movie.py) in the examples directory. + +Here's a simpler table example: + +```python +from rich.console import Console +from rich.table import Table + +console = Console() + +table = Table(show_header=True, header_style="bold magenta") +table.add_column("Date", style="dim", width=12) +table.add_column("Title") +table.add_column("Production Budget", justify="right") +table.add_column("Box Office", justify="right") +table.add_row( + "Dec 20, 2019", "Star Wars: The Rise of Skywalker", "$275,000,000", "$375,126,118" +) +table.add_row( + "May 25, 2018", + "[red]Solo[/red]: A Star Wars Story", + "$275,000,000", + "$393,151,347", +) +table.add_row( + "Dec 15, 2017", + "Star Wars Ep. VIII: The Last Jedi", + "$262,000,000", + "[bold]$1,332,539,889[/bold]", +) + +console.print(table) +``` + +This produces the following output: + +![table](https://github.com/textualize/rich/raw/master/imgs/table.png) + +Note that console markup is rendered in the same way as `print()` and `log()`. In fact, anything that is renderable by Rich may be included in the headers / rows (even other tables). + +The `Table` class is smart enough to resize columns to fit the available width of the terminal, wrapping text as required. Here's the same example, with the terminal made smaller than the table above: + +![table2](https://github.com/textualize/rich/raw/master/imgs/table2.png) + +
+ +
+Progress Bars + +Rich can render multiple flicker-free [progress](https://rich.readthedocs.io/en/latest/progress.html) bars to track long-running tasks. + +For basic usage, wrap any sequence in the `track` function and iterate over the result. Here's an example: + +```python +from rich.progress import track + +for step in track(range(100)): + do_step(step) +``` + +It's not much harder to add multiple progress bars. Here's an example taken from the docs: + +![progress](https://github.com/textualize/rich/raw/master/imgs/progress.gif) + +The columns may be configured to show any details you want. Built-in columns include percentage complete, file size, file speed, and time remaining. Here's another example showing a download in progress: + +![progress](https://github.com/textualize/rich/raw/master/imgs/downloader.gif) + +To try this out yourself, see [examples/downloader.py](https://github.com/textualize/rich/blob/master/examples/downloader.py) which can download multiple URLs simultaneously while displaying progress. + +
+ +
+Status + +For situations where it is hard to calculate progress, you can use the [status](https://rich.readthedocs.io/en/latest/reference/console.html#rich.console.Console.status) method which will display a 'spinner' animation and message. The animation won't prevent you from using the console as normal. Here's an example: + +```python +from time import sleep +from rich.console import Console + +console = Console() +tasks = [f"task {n}" for n in range(1, 11)] + +with console.status("[bold green]Working on tasks...") as status: + while tasks: + task = tasks.pop(0) + sleep(1) + console.log(f"{task} complete") +``` + +This generates the following output in the terminal. + +![status](https://github.com/textualize/rich/raw/master/imgs/status.gif) + +The spinner animations were borrowed from [cli-spinners](https://www.npmjs.com/package/cli-spinners). You can select a spinner by specifying the `spinner` parameter. Run the following command to see the available values: + +``` +python -m rich.spinner +``` + +The above command generates the following output in the terminal: + +![spinners](https://github.com/textualize/rich/raw/master/imgs/spinners.gif) + +
+ +
+Tree + +Rich can render a [tree](https://rich.readthedocs.io/en/latest/tree.html) with guide lines. A tree is ideal for displaying a file structure, or any other hierarchical data. + +The labels of the tree can be simple text or anything else Rich can render. Run the following for a demonstration: + +``` +python -m rich.tree +``` + +This generates the following output: + +![markdown](https://github.com/textualize/rich/raw/master/imgs/tree.png) + +See the [tree.py](https://github.com/textualize/rich/blob/master/examples/tree.py) example for a script that displays a tree view of any directory, similar to the linux `tree` command. + +
+ +
+Columns + +Rich can render content in neat [columns](https://rich.readthedocs.io/en/latest/columns.html) with equal or optimal width. Here's a very basic clone of the (MacOS / Linux) `ls` command which displays a directory listing in columns: + +```python +import os +import sys + +from rich import print +from rich.columns import Columns + +directory = os.listdir(sys.argv[1]) +print(Columns(directory)) +``` + +The following screenshot is the output from the [columns example](https://github.com/textualize/rich/blob/master/examples/columns.py) which displays data pulled from an API in columns: + +![columns](https://github.com/textualize/rich/raw/master/imgs/columns.png) + +
+ +
+Markdown + +Rich can render [markdown](https://rich.readthedocs.io/en/latest/markdown.html) and does a reasonable job of translating the formatting to the terminal. + +To render markdown import the `Markdown` class and construct it with a string containing markdown code. Then print it to the console. Here's an example: + +```python +from rich.console import Console +from rich.markdown import Markdown + +console = Console() +with open("README.md") as readme: + markdown = Markdown(readme.read()) +console.print(markdown) +``` + +This will produce output something like the following: + +![markdown](https://github.com/textualize/rich/raw/master/imgs/markdown.png) + +
+ +
+Syntax Highlighting + +Rich uses the [pygments](https://pygments.org/) library to implement [syntax highlighting](https://rich.readthedocs.io/en/latest/syntax.html). Usage is similar to rendering markdown; construct a `Syntax` object and print it to the console. Here's an example: + +```python +from rich.console import Console +from rich.syntax import Syntax + +my_code = ''' +def iter_first_last(values: Iterable[T]) -> Iterable[Tuple[bool, bool, T]]: + """Iterate and generate a tuple with a flag for first and last value.""" + iter_values = iter(values) + try: + previous_value = next(iter_values) + except StopIteration: + return + first = True + for value in iter_values: + yield first, False, previous_value + first = False + previous_value = value + yield first, True, previous_value +''' +syntax = Syntax(my_code, "python", theme="monokai", line_numbers=True) +console = Console() +console.print(syntax) +``` + +This will produce the following output: + +![syntax](https://github.com/textualize/rich/raw/master/imgs/syntax.png) + +
+ +
+Tracebacks + +Rich can render [beautiful tracebacks](https://rich.readthedocs.io/en/latest/traceback.html) which are easier to read and show more code than standard Python tracebacks. You can set Rich as the default traceback handler so all uncaught exceptions will be rendered by Rich. + +Here's what it looks like on OSX (similar on Linux): + +![traceback](https://github.com/textualize/rich/raw/master/imgs/traceback.png) + +
+ +All Rich renderables make use of the [Console Protocol](https://rich.readthedocs.io/en/latest/protocol.html), which you can also use to implement your own Rich content. + +# Rich CLI + + +See also [Rich CLI](https://github.com/textualize/rich-cli) for a command line application powered by Rich. Syntax highlight code, render markdown, display CSVs in tables, and more, directly from the command prompt. + + +![Rich CLI](https://raw.githubusercontent.com/Textualize/rich-cli/main/imgs/rich-cli-splash.jpg) + +# Textual + +See also Rich's sister project, [Textual](https://github.com/Textualize/textual), which you can use to build sophisticated User Interfaces in the terminal. + +![textual-splash](https://github.com/user-attachments/assets/4caeb77e-48c0-4cf7-b14d-c53ded855ffd) + +# Toad + +[Toad](https://github.com/batrachianai/toad) is a unified interface for agentic coding. Built with Rich and Textual. + +![toad](https://github.com/user-attachments/assets/6678b707-1aeb-420f-99ad-abfcd4356771) + diff --git a/.cache/pip/http-v2/4/d/c/8/d/4dc8d567db4bc55c0684404c85a6b4572f87a99306be24cab65fbfae b/.cache/pip/http-v2/4/d/c/8/d/4dc8d567db4bc55c0684404c85a6b4572f87a99306be24cab65fbfae new file mode 100644 index 0000000000000000000000000000000000000000..295cfc319694652a2d8e64a93b394b24b2ae06e2 Binary files /dev/null and b/.cache/pip/http-v2/4/d/c/8/d/4dc8d567db4bc55c0684404c85a6b4572f87a99306be24cab65fbfae differ diff --git a/.cache/pip/http-v2/4/d/c/8/d/4dc8d567db4bc55c0684404c85a6b4572f87a99306be24cab65fbfae.body b/.cache/pip/http-v2/4/d/c/8/d/4dc8d567db4bc55c0684404c85a6b4572f87a99306be24cab65fbfae.body new file mode 100644 index 0000000000000000000000000000000000000000..11dbfd65dd921cf7d30d1955e09f6c0db9c2d166 Binary files /dev/null and b/.cache/pip/http-v2/4/d/c/8/d/4dc8d567db4bc55c0684404c85a6b4572f87a99306be24cab65fbfae.body differ diff --git a/.cache/pip/http-v2/5/9/1/a/0/591a0a7ea47d81cffb332bf5e1460e560ce743822558c6f345314d4b b/.cache/pip/http-v2/5/9/1/a/0/591a0a7ea47d81cffb332bf5e1460e560ce743822558c6f345314d4b new file mode 100644 index 0000000000000000000000000000000000000000..9cafdcadc8f1a013c914b2fae3414af49489703d Binary files /dev/null and b/.cache/pip/http-v2/5/9/1/a/0/591a0a7ea47d81cffb332bf5e1460e560ce743822558c6f345314d4b differ diff --git a/.cache/pip/http-v2/5/9/1/a/0/591a0a7ea47d81cffb332bf5e1460e560ce743822558c6f345314d4b.body b/.cache/pip/http-v2/5/9/1/a/0/591a0a7ea47d81cffb332bf5e1460e560ce743822558c6f345314d4b.body new file mode 100644 index 0000000000000000000000000000000000000000..52a7eecaf4c9067dd2c0053da9b2a5080a29d696 Binary files /dev/null and b/.cache/pip/http-v2/5/9/1/a/0/591a0a7ea47d81cffb332bf5e1460e560ce743822558c6f345314d4b.body differ diff --git a/.cache/pip/http-v2/5/d/0/b/4/5d0b459b3c4f2993a6258ed848f61436c9f7a018d2587028572840f2 b/.cache/pip/http-v2/5/d/0/b/4/5d0b459b3c4f2993a6258ed848f61436c9f7a018d2587028572840f2 new file mode 100644 index 0000000000000000000000000000000000000000..4d52f53b6de6d856ad5add6dc6efefd9923f554c Binary files /dev/null and b/.cache/pip/http-v2/5/d/0/b/4/5d0b459b3c4f2993a6258ed848f61436c9f7a018d2587028572840f2 differ diff --git a/.cache/pip/http-v2/6/1/6/5/a/6165a4393aa66a360946f71adc9147b3870cebf86b7878337a7535a9 b/.cache/pip/http-v2/6/1/6/5/a/6165a4393aa66a360946f71adc9147b3870cebf86b7878337a7535a9 new file mode 100644 index 0000000000000000000000000000000000000000..b9edfd02e0c15306012519e63cc7e64574c761f2 Binary files /dev/null and b/.cache/pip/http-v2/6/1/6/5/a/6165a4393aa66a360946f71adc9147b3870cebf86b7878337a7535a9 differ diff --git a/.cache/pip/http-v2/6/4/c/f/c/64cfc03e83f9fad4049b1d2a1d785c9273270a4ab9788b538f5054e3 b/.cache/pip/http-v2/6/4/c/f/c/64cfc03e83f9fad4049b1d2a1d785c9273270a4ab9788b538f5054e3 new file mode 100644 index 0000000000000000000000000000000000000000..634085574ed7a243b65662d5b9ce2b4f89e53bda Binary files /dev/null and b/.cache/pip/http-v2/6/4/c/f/c/64cfc03e83f9fad4049b1d2a1d785c9273270a4ab9788b538f5054e3 differ diff --git a/.cache/pip/http-v2/6/5/1/e/5/651e58859e8db8c99b9e7068d03984cfd4577518ff0e021c717afbf4 b/.cache/pip/http-v2/6/5/1/e/5/651e58859e8db8c99b9e7068d03984cfd4577518ff0e021c717afbf4 new file mode 100644 index 0000000000000000000000000000000000000000..ce391e30d546fe02415f2aaf3098f99f67bda33d Binary files /dev/null and b/.cache/pip/http-v2/6/5/1/e/5/651e58859e8db8c99b9e7068d03984cfd4577518ff0e021c717afbf4 differ diff --git a/.cache/pip/http-v2/6/5/1/e/5/651e58859e8db8c99b9e7068d03984cfd4577518ff0e021c717afbf4.body b/.cache/pip/http-v2/6/5/1/e/5/651e58859e8db8c99b9e7068d03984cfd4577518ff0e021c717afbf4.body new file mode 100644 index 0000000000000000000000000000000000000000..e81ab4fa3c9649ef7bc6355d1042f0344c90d83b --- /dev/null +++ b/.cache/pip/http-v2/6/5/1/e/5/651e58859e8db8c99b9e7068d03984cfd4577518ff0e021c717afbf4.body @@ -0,0 +1,78 @@ +Metadata-Version: 2.1 +Name: cycler +Version: 0.12.1 +Summary: Composable style cycles +Author-email: Thomas A Caswell +License: Copyright (c) 2015, matplotlib project + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + * Neither the name of the matplotlib project nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Project-URL: homepage, https://matplotlib.org/cycler/ +Project-URL: repository, https://github.com/matplotlib/cycler +Keywords: cycle kwargs +Classifier: License :: OSI Approved :: BSD License +Classifier: Development Status :: 4 - Beta +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +License-File: LICENSE +Provides-Extra: docs +Requires-Dist: ipython ; extra == 'docs' +Requires-Dist: matplotlib ; extra == 'docs' +Requires-Dist: numpydoc ; extra == 'docs' +Requires-Dist: sphinx ; extra == 'docs' +Provides-Extra: tests +Requires-Dist: pytest ; extra == 'tests' +Requires-Dist: pytest-cov ; extra == 'tests' +Requires-Dist: pytest-xdist ; extra == 'tests' + +|PyPi|_ |Conda|_ |Supported Python versions|_ |GitHub Actions|_ |Codecov|_ + +.. |PyPi| image:: https://img.shields.io/pypi/v/cycler.svg?style=flat +.. _PyPi: https://pypi.python.org/pypi/cycler + +.. |Conda| image:: https://img.shields.io/conda/v/conda-forge/cycler +.. _Conda: https://anaconda.org/conda-forge/cycler + +.. |Supported Python versions| image:: https://img.shields.io/pypi/pyversions/cycler.svg +.. _Supported Python versions: https://pypi.python.org/pypi/cycler + +.. |GitHub Actions| image:: https://github.com/matplotlib/cycler/actions/workflows/tests.yml/badge.svg +.. _GitHub Actions: https://github.com/matplotlib/cycler/actions + +.. |Codecov| image:: https://codecov.io/github/matplotlib/cycler/badge.svg?branch=main&service=github +.. _Codecov: https://codecov.io/github/matplotlib/cycler?branch=main + +cycler: composable cycles +========================= + +Docs: https://matplotlib.org/cycler/ diff --git a/.cache/pip/http-v2/6/6/e/c/7/66ec76a7b6ed4081044f5c7821af293b63c17bc2ac523ff93d5ca7d5 b/.cache/pip/http-v2/6/6/e/c/7/66ec76a7b6ed4081044f5c7821af293b63c17bc2ac523ff93d5ca7d5 new file mode 100644 index 0000000000000000000000000000000000000000..5885d6aabb77abaf2ed89bea928856fe82547561 Binary files /dev/null and b/.cache/pip/http-v2/6/6/e/c/7/66ec76a7b6ed4081044f5c7821af293b63c17bc2ac523ff93d5ca7d5 differ diff --git a/.cache/pip/http-v2/6/8/0/d/4/680d4dd80dc6a3d2df9b9478dfcc8e81e0e4f130e154a3268b98b877 b/.cache/pip/http-v2/6/8/0/d/4/680d4dd80dc6a3d2df9b9478dfcc8e81e0e4f130e154a3268b98b877 new file mode 100644 index 0000000000000000000000000000000000000000..e8c40e4ade005a1d494996bc2eb1f1d017a1700e Binary files /dev/null and b/.cache/pip/http-v2/6/8/0/d/4/680d4dd80dc6a3d2df9b9478dfcc8e81e0e4f130e154a3268b98b877 differ diff --git a/.cache/pip/http-v2/6/b/5/3/a/6b53a9dd0e4fce887cc28c1a921aa1befe8c1a82e6c213d2542d2acb b/.cache/pip/http-v2/6/b/5/3/a/6b53a9dd0e4fce887cc28c1a921aa1befe8c1a82e6c213d2542d2acb new file mode 100644 index 0000000000000000000000000000000000000000..8d00902ee18ff6fcfc1933924ec1b02eae2905ec Binary files /dev/null and b/.cache/pip/http-v2/6/b/5/3/a/6b53a9dd0e4fce887cc28c1a921aa1befe8c1a82e6c213d2542d2acb differ diff --git a/.cache/pip/http-v2/6/b/5/3/a/6b53a9dd0e4fce887cc28c1a921aa1befe8c1a82e6c213d2542d2acb.body b/.cache/pip/http-v2/6/b/5/3/a/6b53a9dd0e4fce887cc28c1a921aa1befe8c1a82e6c213d2542d2acb.body new file mode 100644 index 0000000000000000000000000000000000000000..aecd455dde9e456c02e6cdcbb828fe6f96dc556d Binary files /dev/null and b/.cache/pip/http-v2/6/b/5/3/a/6b53a9dd0e4fce887cc28c1a921aa1befe8c1a82e6c213d2542d2acb.body differ diff --git a/.cache/pip/http-v2/6/b/8/1/e/6b81e7b491d69713c085c9f59d6c9162e9c07ca91d4f2bb5b3cd4b8e b/.cache/pip/http-v2/6/b/8/1/e/6b81e7b491d69713c085c9f59d6c9162e9c07ca91d4f2bb5b3cd4b8e new file mode 100644 index 0000000000000000000000000000000000000000..ffe8123f6a1e3bb7c7d8031dcd815a24892ed035 Binary files /dev/null and b/.cache/pip/http-v2/6/b/8/1/e/6b81e7b491d69713c085c9f59d6c9162e9c07ca91d4f2bb5b3cd4b8e differ diff --git a/.cache/pip/http-v2/6/b/8/1/e/6b81e7b491d69713c085c9f59d6c9162e9c07ca91d4f2bb5b3cd4b8e.body b/.cache/pip/http-v2/6/b/8/1/e/6b81e7b491d69713c085c9f59d6c9162e9c07ca91d4f2bb5b3cd4b8e.body new file mode 100644 index 0000000000000000000000000000000000000000..42d2fbf63332b96c934214603322bcdaeac5639f --- /dev/null +++ b/.cache/pip/http-v2/6/b/8/1/e/6b81e7b491d69713c085c9f59d6c9162e9c07ca91d4f2bb5b3cd4b8e.body @@ -0,0 +1,386 @@ +Metadata-Version: 2.4 +Name: optuna +Version: 4.8.0 +Summary: A hyperparameter optimization framework +Author: Takuya Akiba +Project-URL: homepage, https://optuna.org/ +Project-URL: repository, https://github.com/optuna/optuna +Project-URL: documentation, https://optuna.readthedocs.io +Project-URL: bugtracker, https://github.com/optuna/optuna/issues +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Science/Research +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Topic :: Scientific/Engineering +Classifier: Topic :: Scientific/Engineering :: Mathematics +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Classifier: Topic :: Software Development +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.9 +Description-Content-Type: text/markdown +License-File: LICENSE +License-File: LICENSE_THIRD_PARTY +Requires-Dist: alembic>=1.5.0 +Requires-Dist: colorlog +Requires-Dist: numpy +Requires-Dist: packaging>=20.0 +Requires-Dist: sqlalchemy>=1.4.2 +Requires-Dist: tqdm +Requires-Dist: PyYAML +Provides-Extra: checking +Requires-Dist: mypy; extra == "checking" +Requires-Dist: mypy_boto3_s3; extra == "checking" +Requires-Dist: ruff; extra == "checking" +Requires-Dist: scipy-stubs; python_version >= "3.10" and extra == "checking" +Requires-Dist: types-PyYAML; extra == "checking" +Requires-Dist: types-redis; extra == "checking" +Requires-Dist: types-setuptools; extra == "checking" +Requires-Dist: types-tqdm; extra == "checking" +Requires-Dist: typing_extensions>=3.10.0.0; extra == "checking" +Provides-Extra: document +Requires-Dist: ase; extra == "document" +Requires-Dist: cmaes>=0.12.0; extra == "document" +Requires-Dist: fvcore; extra == "document" +Requires-Dist: kaleido<0.4; extra == "document" +Requires-Dist: lightgbm; extra == "document" +Requires-Dist: matplotlib!=3.6.0; extra == "document" +Requires-Dist: pandas; extra == "document" +Requires-Dist: pillow; extra == "document" +Requires-Dist: plotly>=4.9.0; extra == "document" +Requires-Dist: scikit-learn; extra == "document" +Requires-Dist: sphinx; extra == "document" +Requires-Dist: sphinx-copybutton; extra == "document" +Requires-Dist: sphinx-gallery; extra == "document" +Requires-Dist: sphinx-notfound-page; extra == "document" +Requires-Dist: sphinx_rtd_theme>=1.2.0; extra == "document" +Requires-Dist: torch; extra == "document" +Requires-Dist: torchvision; extra == "document" +Provides-Extra: optional +Requires-Dist: boto3; extra == "optional" +Requires-Dist: cmaes>=0.12.0; extra == "optional" +Requires-Dist: google-cloud-storage; extra == "optional" +Requires-Dist: matplotlib!=3.6.0; extra == "optional" +Requires-Dist: pandas; extra == "optional" +Requires-Dist: plotly>=4.9.0; extra == "optional" +Requires-Dist: redis; extra == "optional" +Requires-Dist: scikit-learn>=0.24.2; extra == "optional" +Requires-Dist: scipy; extra == "optional" +Requires-Dist: torch; extra == "optional" +Requires-Dist: greenlet; extra == "optional" +Requires-Dist: grpcio; extra == "optional" +Requires-Dist: protobuf>=5.28.1; extra == "optional" +Provides-Extra: test +Requires-Dist: fakeredis[lua]; extra == "test" +Requires-Dist: kaleido<0.4; extra == "test" +Requires-Dist: moto; extra == "test" +Requires-Dist: pytest; extra == "test" +Requires-Dist: pytest-xdist; extra == "test" +Requires-Dist: scipy>=1.9.2; extra == "test" +Requires-Dist: torch; extra == "test" +Requires-Dist: greenlet; extra == "test" +Requires-Dist: grpcio; extra == "test" +Requires-Dist: protobuf>=5.28.1; extra == "test" +Dynamic: license-file + +
+ +# Optuna: A hyperparameter optimization framework + +[![Python](https://img.shields.io/badge/python-3.9%20%7C%203.10%20%7C%203.11%20%7C%203.12%20%7C%203.13%20%7C%203.14-blue)](https://www.python.org) +[![pypi](https://img.shields.io/pypi/v/optuna.svg)](https://pypi.python.org/pypi/optuna) +[![conda](https://img.shields.io/conda/vn/conda-forge/optuna.svg)](https://anaconda.org/conda-forge/optuna) +[![GitHub license](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/optuna/optuna) +[![Read the Docs](https://readthedocs.org/projects/optuna/badge/?version=stable)](https://optuna.readthedocs.io/en/stable/) + +:link: [**Website**](https://optuna.org/) +| :page_with_curl: [**Docs**](https://optuna.readthedocs.io/en/stable/) +| :gear: [**Install Guide**](https://optuna.readthedocs.io/en/stable/installation.html) +| :pencil: [**Tutorial**](https://optuna.readthedocs.io/en/stable/tutorial/index.html) +| :bulb: [**Examples**](https://github.com/optuna/optuna-examples) +| [**Twitter**](https://twitter.com/OptunaAutoML) +| [**LinkedIn**](https://www.linkedin.com/showcase/optuna/) +| [**Medium**](https://medium.com/optuna) + +*Optuna* is an automatic hyperparameter optimization software framework, particularly designed +for machine learning. It features an imperative, *define-by-run* style user API. Thanks to our +*define-by-run* API, the code written with Optuna enjoys high modularity, and the user of +Optuna can dynamically construct the search spaces for the hyperparameters. + +## :loudspeaker: News +Help us create the next version of Optuna! + +Optuna 5.0 Roadmap published for review. Please take a look at [the planned improvements to Optuna](https://medium.com/optuna/optuna-v5-roadmap-ac7d6935a878), and share your feedback in [the github issues](https://github.com/optuna/optuna/labels/v5). PR contributions also welcome! + +Please take a few minutes to fill in [this survey](https://forms.gle/wVwLCQ9g6st6AXuq9), and let us know how you use Optuna now and what improvements you'd like.🤔 +All questions are optional. 🙇‍♂️ + + +* **Jan 19, 2026**: Optuna 4.7.0 is out! Check out [the release note](https://github.com/optuna/optuna/releases/tag/v4.7.0) for details. +* **Nov 10, 2025**: A new article [Announcing Optuna 4.6](https://medium.com/optuna/announcing-optuna-4-6-a9e82183ab07) has been published. +* **Oct 28, 2025**: A new article [AutoSampler: Full Support for Multi-Objective & Constrained Optimization](https://medium.com/optuna/autosampler-full-support-for-multi-objective-constrained-optimization-c1c4fc957ba2) has been published. +* **Sep 22, 2025**: A new article [[Optuna v4.5] Gaussian Process-Based Sampler (GPSampler) Can Now Perform Constrained Multi-Objective Optimization](https://medium.com/optuna/optuna-v4-5-81e78d8e077a) has been published. +* **Jun 16, 2025**: Optuna 4.4.0 has been released! Check out [the release blog](https://medium.com/optuna/announcing-optuna-4-4-ece661493126). +* **May 26, 2025**: Optuna 5.0 roadmap has been published! See [the blog](https://medium.com/optuna/optuna-v5-roadmap-ac7d6935a878) for more details. + +## :fire: Key Features + +Optuna has modern functionalities as follows: + +- [Lightweight, versatile, and platform agnostic architecture](https://optuna.readthedocs.io/en/stable/tutorial/10_key_features/001_first.html) + - Handle a wide variety of tasks with a simple installation that has few requirements. +- [Pythonic search spaces](https://optuna.readthedocs.io/en/stable/tutorial/10_key_features/002_configurations.html) + - Define search spaces using familiar Python syntax including conditionals and loops. +- [Efficient optimization algorithms](https://optuna.readthedocs.io/en/stable/tutorial/10_key_features/003_efficient_optimization_algorithms.html) + - Adopt state-of-the-art algorithms for sampling hyperparameters and efficiently pruning unpromising trials. +- [Easy parallelization](https://optuna.readthedocs.io/en/stable/tutorial/10_key_features/004_distributed.html) + - Scale studies to tens or hundreds of workers with little or no changes to the code. +- [Quick visualization](https://optuna.readthedocs.io/en/stable/tutorial/10_key_features/005_visualization.html) + - Inspect optimization histories from a variety of plotting functions. + + +## Basic Concepts + +We use the terms *study* and *trial* as follows: + +- Study: optimization based on an objective function +- Trial: a single execution of the objective function + +Please refer to the sample code below. The goal of a *study* is to find out the optimal set of +hyperparameter values (e.g., `regressor` and `svr_c`) through multiple *trials* (e.g., +`n_trials=100`). Optuna is a framework designed for automation and acceleration of +optimization *studies*. + +
+Sample code with scikit-learn + +[![Open in Colab](https://colab.research.google.com/assets/colab-badge.svg)](http://colab.research.google.com/github/optuna/optuna-examples/blob/main/quickstart.ipynb) + +```python +import optuna +import sklearn + + +# Define an objective function to be minimized. +def objective(trial): + + # Invoke suggest methods of a Trial object to generate hyperparameters. + regressor_name = trial.suggest_categorical("regressor", ["SVR", "RandomForest"]) + if regressor_name == "SVR": + svr_c = trial.suggest_float("svr_c", 1e-10, 1e10, log=True) + regressor_obj = sklearn.svm.SVR(C=svr_c) + else: + rf_max_depth = trial.suggest_int("rf_max_depth", 2, 32) + regressor_obj = sklearn.ensemble.RandomForestRegressor(max_depth=rf_max_depth) + + X, y = sklearn.datasets.fetch_california_housing(return_X_y=True) + X_train, X_val, y_train, y_val = sklearn.model_selection.train_test_split(X, y, random_state=0) + + regressor_obj.fit(X_train, y_train) + y_pred = regressor_obj.predict(X_val) + + error = sklearn.metrics.mean_squared_error(y_val, y_pred) + + return error # An objective value linked with the Trial object. + + +study = optuna.create_study() # Create a new study. +study.optimize(objective, n_trials=100) # Invoke optimization of the objective function. +``` +
+ +> [!NOTE] +> More examples can be found in [optuna/optuna-examples](https://github.com/optuna/optuna-examples). +> +> The examples cover diverse problem setups such as multi-objective optimization, constrained optimization, pruning, and distributed optimization. + +## Installation + +Optuna is available at [the Python Package Index](https://pypi.org/project/optuna/) and on [Anaconda Cloud](https://anaconda.org/conda-forge/optuna). + +```bash +# PyPI +$ pip install optuna +``` + +```bash +# Anaconda Cloud +$ conda install -c conda-forge optuna +``` + +> [!IMPORTANT] +> Optuna supports Python 3.9 or newer. +> +> Also, we provide Optuna docker images on [DockerHub](https://hub.docker.com/r/optuna/optuna). + +## Integrations + +Optuna has integration features with various third-party libraries. Integrations can be found in [optuna/optuna-integration](https://github.com/optuna/optuna-integration) and the document is available [here](https://optuna-integration.readthedocs.io/en/stable/index.html). + +
+Supported integration libraries + +* [Catboost](https://github.com/optuna/optuna-examples/tree/main/catboost/catboost_pruning.py) +* [Dask](https://github.com/optuna/optuna-examples/tree/main/dask/dask_simple.py) +* [fastai](https://github.com/optuna/optuna-examples/tree/main/fastai/fastai_simple.py) +* [Keras](https://github.com/optuna/optuna-examples/tree/main/keras/keras_integration.py) +* [LightGBM](https://github.com/optuna/optuna-examples/tree/main/lightgbm/lightgbm_integration.py) +* [MLflow](https://github.com/optuna/optuna-examples/tree/main/mlflow/keras_mlflow.py) +* [PyTorch](https://github.com/optuna/optuna-examples/tree/main/pytorch/pytorch_simple.py) +* [PyTorch Ignite](https://github.com/optuna/optuna-examples/tree/main/pytorch/pytorch_ignite_simple.py) +* [PyTorch Lightning](https://github.com/optuna/optuna-examples/tree/main/pytorch/pytorch_lightning_simple.py) +* [TensorBoard](https://github.com/optuna/optuna-examples/tree/main/tensorboard/tensorboard_simple.py) +* [TensorFlow](https://github.com/optuna/optuna-examples/tree/main/tensorflow/tensorflow_estimator_integration.py) +* [tf.keras](https://github.com/optuna/optuna-examples/tree/main/tfkeras/tfkeras_integration.py) +* [Weights & Biases](https://github.com/optuna/optuna-examples/tree/main/wandb/wandb_integration.py) +* [XGBoost](https://github.com/optuna/optuna-examples/tree/main/xgboost/xgboost_integration.py) +
+ +## Web Dashboard + +[Optuna Dashboard](https://github.com/optuna/optuna-dashboard) is a real-time web dashboard for Optuna. +You can check the optimization history, hyperparameter importance, etc. in graphs and tables. +You don't need to create a Python script to call [Optuna's visualization](https://optuna.readthedocs.io/en/stable/reference/visualization/index.html) functions. +Feature requests and bug reports are welcome! + +![optuna-dashboard](https://user-images.githubusercontent.com/5564044/204975098-95c2cb8c-0fb5-4388-abc4-da32f56cb4e5.gif) + +`optuna-dashboard` can be installed via pip: + +```shell +$ pip install optuna-dashboard +``` + +> [!TIP] +> Please check out the convenience of Optuna Dashboard using the sample code below. + +
+Sample code to launch Optuna Dashboard + +Save the following code as `optimize_toy.py`. + +```python +import optuna + + +def objective(trial): + x1 = trial.suggest_float("x1", -100, 100) + x2 = trial.suggest_float("x2", -100, 100) + return x1**2 + 0.01 * x2**2 + + +study = optuna.create_study(storage="sqlite:///db.sqlite3") # Create a new study with database. +study.optimize(objective, n_trials=100) +``` + +Then try the commands below: + +```shell +# Run the study specified above +$ python optimize_toy.py + +# Launch the dashboard based on the storage `sqlite:///db.sqlite3` +$ optuna-dashboard sqlite:///db.sqlite3 +... +Listening on http://localhost:8080/ +Hit Ctrl-C to quit. +``` + +
+ + +## OptunaHub + +[OptunaHub](https://hub.optuna.org/) is a feature-sharing platform for Optuna. +You can use the registered features and publish your packages. + +### Use registered features + +`optunahub` can be installed via pip: + +```shell +$ pip install optunahub +# Install AutoSampler dependencies (CPU only is sufficient for PyTorch) +$ pip install cmaes scipy torch --extra-index-url https://download.pytorch.org/whl/cpu +``` + +You can load registered module with `optunahub.load_module`. + +```python +import optuna +import optunahub + + +def objective(trial: optuna.Trial) -> float: + x = trial.suggest_float("x", -5, 5) + y = trial.suggest_float("y", -5, 5) + return x**2 + y**2 + + +module = optunahub.load_module(package="samplers/auto_sampler") +study = optuna.create_study(sampler=module.AutoSampler()) +study.optimize(objective, n_trials=10) + +print(study.best_trial.value, study.best_trial.params) +``` + +For more details, please refer to [the optunahub documentation](https://optuna.github.io/optunahub/). + +### Publish your packages + +You can publish your package via [optunahub-registry](https://github.com/optuna/optunahub-registry). +See the [Tutorials for Contributors](https://optuna.github.io/optunahub/tutorials_for_contributors.html) in OptunaHub. + + +## Communication + +- [GitHub Discussions] for questions. +- [GitHub Issues] for bug reports and feature requests. + +[GitHub Discussions]: https://github.com/optuna/optuna/discussions +[GitHub issues]: https://github.com/optuna/optuna/issues + + +## Contribution + +Any contributions to Optuna are more than welcome! + +If you are new to Optuna, please check the [good first issues](https://github.com/optuna/optuna/labels/good%20first%20issue). They are relatively simple, well-defined, and often good starting points for you to get familiar with the contribution workflow and other developers. + +If you already have contributed to Optuna, we recommend the other [contribution-welcome issues](https://github.com/optuna/optuna/labels/contribution-welcome). + +For general guidelines on how to contribute to the project, take a look at [CONTRIBUTING.md](./CONTRIBUTING.md). + + +## Reference + +If you use Optuna in one of your research projects, please cite [our KDD paper](https://doi.org/10.1145/3292500.3330701) "Optuna: A Next-generation Hyperparameter Optimization Framework": + +
+BibTeX + +```bibtex +@inproceedings{akiba2019optuna, + title={{O}ptuna: A Next-Generation Hyperparameter Optimization Framework}, + author={Akiba, Takuya and Sano, Shotaro and Yanase, Toshihiko and Ohta, Takeru and Koyama, Masanori}, + booktitle={The 25th ACM SIGKDD International Conference on Knowledge Discovery \& Data Mining}, + pages={2623--2631}, + year={2019} +} +``` +
+ + +## License + +MIT License (see [LICENSE](./LICENSE)). + +Optuna uses the codes from SciPy and fdlibm projects (see [LICENSE_THIRD_PARTY](./LICENSE_THIRD_PARTY)). diff --git a/.cache/pip/http-v2/6/c/6/e/e/6c6eeaf6757edbde690577822daacaba826c2b12ce67b57b33e8021d b/.cache/pip/http-v2/6/c/6/e/e/6c6eeaf6757edbde690577822daacaba826c2b12ce67b57b33e8021d new file mode 100644 index 0000000000000000000000000000000000000000..954819eaf7822c70c68feac06a140f4b45f9f14c Binary files /dev/null and b/.cache/pip/http-v2/6/c/6/e/e/6c6eeaf6757edbde690577822daacaba826c2b12ce67b57b33e8021d differ diff --git a/.cache/pip/http-v2/6/c/6/e/e/6c6eeaf6757edbde690577822daacaba826c2b12ce67b57b33e8021d.body b/.cache/pip/http-v2/6/c/6/e/e/6c6eeaf6757edbde690577822daacaba826c2b12ce67b57b33e8021d.body new file mode 100644 index 0000000000000000000000000000000000000000..6478c3f06b70e7486432ecf009461ab2365037b2 Binary files /dev/null and b/.cache/pip/http-v2/6/c/6/e/e/6c6eeaf6757edbde690577822daacaba826c2b12ce67b57b33e8021d.body differ diff --git a/.cache/pip/http-v2/7/2/8/a/2/728a2f33f382f4dacf08f6df77aad6f3d889f819ba4fa3efad5ec7e4 b/.cache/pip/http-v2/7/2/8/a/2/728a2f33f382f4dacf08f6df77aad6f3d889f819ba4fa3efad5ec7e4 new file mode 100644 index 0000000000000000000000000000000000000000..fb2d89725812e0dfc7e14d12aecb39b95a69efbe Binary files /dev/null and b/.cache/pip/http-v2/7/2/8/a/2/728a2f33f382f4dacf08f6df77aad6f3d889f819ba4fa3efad5ec7e4 differ diff --git a/.cache/pip/http-v2/7/7/3/7/4/77374f6555d766d1d452fe4918fb303c49f49a5a37a0986ea4f1b212 b/.cache/pip/http-v2/7/7/3/7/4/77374f6555d766d1d452fe4918fb303c49f49a5a37a0986ea4f1b212 new file mode 100644 index 0000000000000000000000000000000000000000..d67953d18eeaeb164af3930de14bfe53d32275c2 Binary files /dev/null and b/.cache/pip/http-v2/7/7/3/7/4/77374f6555d766d1d452fe4918fb303c49f49a5a37a0986ea4f1b212 differ diff --git a/.cache/pip/http-v2/7/7/3/7/4/77374f6555d766d1d452fe4918fb303c49f49a5a37a0986ea4f1b212.body b/.cache/pip/http-v2/7/7/3/7/4/77374f6555d766d1d452fe4918fb303c49f49a5a37a0986ea4f1b212.body new file mode 100644 index 0000000000000000000000000000000000000000..04d82dce2d6582e742a17cd5441787f483c2b9c6 --- /dev/null +++ b/.cache/pip/http-v2/7/7/3/7/4/77374f6555d766d1d452fe4918fb303c49f49a5a37a0986ea4f1b212.body @@ -0,0 +1,139 @@ +Metadata-Version: 2.4 +Name: alembic +Version: 1.18.4 +Summary: A database migration tool for SQLAlchemy. +Author-email: Mike Bayer +License-Expression: MIT +Project-URL: Homepage, https://alembic.sqlalchemy.org +Project-URL: Documentation, https://alembic.sqlalchemy.org/en/latest/ +Project-URL: Changelog, https://alembic.sqlalchemy.org/en/latest/changelog.html +Project-URL: Source, https://github.com/sqlalchemy/alembic/ +Project-URL: Issue Tracker, https://github.com/sqlalchemy/alembic/issues/ +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Environment :: Console +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Database :: Front-Ends +Requires-Python: >=3.10 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: SQLAlchemy>=1.4.23 +Requires-Dist: Mako +Requires-Dist: typing-extensions>=4.12 +Requires-Dist: tomli; python_version < "3.11" +Provides-Extra: tz +Requires-Dist: tzdata; extra == "tz" +Dynamic: license-file + +Alembic is a database migrations tool written by the author +of `SQLAlchemy `_. A migrations tool +offers the following functionality: + +* Can emit ALTER statements to a database in order to change + the structure of tables and other constructs +* Provides a system whereby "migration scripts" may be constructed; + each script indicates a particular series of steps that can "upgrade" a + target database to a new version, and optionally a series of steps that can + "downgrade" similarly, doing the same steps in reverse. +* Allows the scripts to execute in some sequential manner. + +The goals of Alembic are: + +* Very open ended and transparent configuration and operation. A new + Alembic environment is generated from a set of templates which is selected + among a set of options when setup first occurs. The templates then deposit a + series of scripts that define fully how database connectivity is established + and how migration scripts are invoked; the migration scripts themselves are + generated from a template within that series of scripts. The scripts can + then be further customized to define exactly how databases will be + interacted with and what structure new migration files should take. +* Full support for transactional DDL. The default scripts ensure that all + migrations occur within a transaction - for those databases which support + this (Postgresql, Microsoft SQL Server), migrations can be tested with no + need to manually undo changes upon failure. +* Minimalist script construction. Basic operations like renaming + tables/columns, adding/removing columns, changing column attributes can be + performed through one line commands like alter_column(), rename_table(), + add_constraint(). There is no need to recreate full SQLAlchemy Table + structures for simple operations like these - the functions themselves + generate minimalist schema structures behind the scenes to achieve the given + DDL sequence. +* "auto generation" of migrations. While real world migrations are far more + complex than what can be automatically determined, Alembic can still + eliminate the initial grunt work in generating new migration directives + from an altered schema. The ``--autogenerate`` feature will inspect the + current status of a database using SQLAlchemy's schema inspection + capabilities, compare it to the current state of the database model as + specified in Python, and generate a series of "candidate" migrations, + rendering them into a new migration script as Python directives. The + developer then edits the new file, adding additional directives and data + migrations as needed, to produce a finished migration. Table and column + level changes can be detected, with constraints and indexes to follow as + well. +* Full support for migrations generated as SQL scripts. Those of us who + work in corporate environments know that direct access to DDL commands on a + production database is a rare privilege, and DBAs want textual SQL scripts. + Alembic's usage model and commands are oriented towards being able to run a + series of migrations into a textual output file as easily as it runs them + directly to a database. Care must be taken in this mode to not invoke other + operations that rely upon in-memory SELECTs of rows - Alembic tries to + provide helper constructs like bulk_insert() to help with data-oriented + operations that are compatible with script-based DDL. +* Non-linear, dependency-graph versioning. Scripts are given UUID + identifiers similarly to a DVCS, and the linkage of one script to the next + is achieved via human-editable markers within the scripts themselves. + The structure of a set of migration files is considered as a + directed-acyclic graph, meaning any migration file can be dependent + on any other arbitrary set of migration files, or none at + all. Through this open-ended system, migration files can be organized + into branches, multiple roots, and mergepoints, without restriction. + Commands are provided to produce new branches, roots, and merges of + branches automatically. +* Provide a library of ALTER constructs that can be used by any SQLAlchemy + application. The DDL constructs build upon SQLAlchemy's own DDLElement base + and can be used standalone by any application or script. +* At long last, bring SQLite and its inability to ALTER things into the fold, + but in such a way that SQLite's very special workflow needs are accommodated + in an explicit way that makes the most of a bad situation, through the + concept of a "batch" migration, where multiple changes to a table can + be batched together to form a series of instructions for a single, subsequent + "move-and-copy" workflow. You can even use "move-and-copy" workflow for + other databases, if you want to recreate a table in the background + on a busy system. + +Documentation and status of Alembic is at https://alembic.sqlalchemy.org/ + +The SQLAlchemy Project +====================== + +Alembic is part of the `SQLAlchemy Project `_ and +adheres to the same standards and conventions as the core project. + +Development / Bug reporting / Pull requests +___________________________________________ + +Please refer to the +`SQLAlchemy Community Guide `_ for +guidelines on coding and participating in this project. + +Code of Conduct +_______________ + +Above all, SQLAlchemy places great emphasis on polite, thoughtful, and +constructive communication between users and developers. +Please see our current Code of Conduct at +`Code of Conduct `_. + +License +======= + +Alembic is distributed under the `MIT license +`_. diff --git a/.cache/pip/http-v2/7/7/3/b/e/773be4e62f2a7f9be9d2b777b9be56e14e2b6c9666994e8793db52fd b/.cache/pip/http-v2/7/7/3/b/e/773be4e62f2a7f9be9d2b777b9be56e14e2b6c9666994e8793db52fd new file mode 100644 index 0000000000000000000000000000000000000000..5d792933062bc839b8734147cd2ab2432b53d790 Binary files /dev/null and b/.cache/pip/http-v2/7/7/3/b/e/773be4e62f2a7f9be9d2b777b9be56e14e2b6c9666994e8793db52fd differ diff --git a/.cache/pip/http-v2/7/9/2/1/a/7921ac3318a5cdb592026cc26a94f7a2c1e1f7d3a1dc1e3857fd49f1 b/.cache/pip/http-v2/7/9/2/1/a/7921ac3318a5cdb592026cc26a94f7a2c1e1f7d3a1dc1e3857fd49f1 new file mode 100644 index 0000000000000000000000000000000000000000..dad23ac46682005ee9d1da5373be43592364bd47 Binary files /dev/null and b/.cache/pip/http-v2/7/9/2/1/a/7921ac3318a5cdb592026cc26a94f7a2c1e1f7d3a1dc1e3857fd49f1 differ diff --git a/.cache/pip/http-v2/7/9/2/1/a/7921ac3318a5cdb592026cc26a94f7a2c1e1f7d3a1dc1e3857fd49f1.body b/.cache/pip/http-v2/7/9/2/1/a/7921ac3318a5cdb592026cc26a94f7a2c1e1f7d3a1dc1e3857fd49f1.body new file mode 100644 index 0000000000000000000000000000000000000000..ec61577d0c5e5db8d0157b587bceaa884a41b5dc Binary files /dev/null and b/.cache/pip/http-v2/7/9/2/1/a/7921ac3318a5cdb592026cc26a94f7a2c1e1f7d3a1dc1e3857fd49f1.body differ diff --git a/.cache/pip/http-v2/7/b/3/0/7/7b3075adb708114992fb27c6511ef6dfacffdcb852b8e8d037a10c4b b/.cache/pip/http-v2/7/b/3/0/7/7b3075adb708114992fb27c6511ef6dfacffdcb852b8e8d037a10c4b new file mode 100644 index 0000000000000000000000000000000000000000..fe1f195a9b53b6559da0c7bb43f158b33989e5a9 Binary files /dev/null and b/.cache/pip/http-v2/7/b/3/0/7/7b3075adb708114992fb27c6511ef6dfacffdcb852b8e8d037a10c4b differ diff --git a/.cache/pip/http-v2/8/1/8/8/f/8188f9db6169e57bb653240cc02c629488d0d899420b0725db0f7e4a b/.cache/pip/http-v2/8/1/8/8/f/8188f9db6169e57bb653240cc02c629488d0d899420b0725db0f7e4a new file mode 100644 index 0000000000000000000000000000000000000000..5085efe912476cded11952da0bb452298b26d78d Binary files /dev/null and b/.cache/pip/http-v2/8/1/8/8/f/8188f9db6169e57bb653240cc02c629488d0d899420b0725db0f7e4a differ diff --git a/.cache/pip/http-v2/8/2/d/3/b/82d3b1270e716906b92643ecea0c7d53df8db0a043c71c5b2bb46c10 b/.cache/pip/http-v2/8/2/d/3/b/82d3b1270e716906b92643ecea0c7d53df8db0a043c71c5b2bb46c10 new file mode 100644 index 0000000000000000000000000000000000000000..779e80ebf1f793840682603716fb226512d2f3da Binary files /dev/null and b/.cache/pip/http-v2/8/2/d/3/b/82d3b1270e716906b92643ecea0c7d53df8db0a043c71c5b2bb46c10 differ diff --git a/.cache/pip/http-v2/8/2/d/3/b/82d3b1270e716906b92643ecea0c7d53df8db0a043c71c5b2bb46c10.body b/.cache/pip/http-v2/8/2/d/3/b/82d3b1270e716906b92643ecea0c7d53df8db0a043c71c5b2bb46c10.body new file mode 100644 index 0000000000000000000000000000000000000000..44b7635cb1058f6b75bd483488ba3d664e1a5883 --- /dev/null +++ b/.cache/pip/http-v2/8/2/d/3/b/82d3b1270e716906b92643ecea0c7d53df8db0a043c71c5b2bb46c10.body @@ -0,0 +1,1383 @@ +Metadata-Version: 2.1 +Name: pandas +Version: 3.0.1 +Summary: Powerful data structures for data analysis, time series, and statistics +Author-Email: The Pandas Development Team +License: BSD 3-Clause License + + Copyright (c) 2008-2011, AQR Capital Management, LLC, Lambda Foundry, Inc. and PyData Development Team + All rights reserved. + + Copyright (c) 2011-2026, Open source contributors. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + * Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + Copyright (c) 2010-2019 Keith Goodman + Copyright (c) 2019 Bottleneck Developers + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE.Copyright 2017- Paul Ganssle + Copyright 2017- dateutil contributors (see AUTHORS file) + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + The above license applies to all contributions after 2017-12-01, as well as + all contributions that have been re-licensed (see AUTHORS file for the list of + contributors who have re-licensed their code). + -------------------------------------------------------------------------------- + dateutil - Extensions to the standard Python datetime module. + + Copyright (c) 2003-2011 - Gustavo Niemeyer + Copyright (c) 2012-2014 - Tomi Pieviläinen + Copyright (c) 2014-2016 - Yaron de Leeuw + Copyright (c) 2015- - Paul Ganssle + Copyright (c) 2015- - dateutil contributors (see AUTHORS file) + + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + The above BSD License Applies to all code, even that also covered by Apache 2.0.# MIT License + + Copyright (c) 2019 Hadley Wickham; RStudio; and Evan Miller + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + Based on http://opensource.org/licenses/MIT + + This is a template. Complete and ship as file LICENSE the following 2 + lines (only) + + YEAR: + COPYRIGHT HOLDER: + + and specify as + + License: MIT + file LICENSE + + Copyright (c) , + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + The MIT License + + Copyright (c) 2008- Attractive Chaos + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS + BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN + ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN + CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE.musl as a whole is licensed under the following standard MIT license: + + ---------------------------------------------------------------------- + Copyright © 2005-2020 Rich Felker, et al. + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + ---------------------------------------------------------------------- + + Authors/contributors include: + + A. Wilcox + Ada Worcester + Alex Dowad + Alex Suykov + Alexander Monakov + Andre McCurdy + Andrew Kelley + Anthony G. Basile + Aric Belsito + Arvid Picciani + Bartosz Brachaczek + Benjamin Peterson + Bobby Bingham + Boris Brezillon + Brent Cook + Chris Spiegel + Clément Vasseur + Daniel Micay + Daniel Sabogal + Daurnimator + David Carlier + David Edelsohn + Denys Vlasenko + Dmitry Ivanov + Dmitry V. Levin + Drew DeVault + Emil Renner Berthing + Fangrui Song + Felix Fietkau + Felix Janda + Gianluca Anzolin + Hauke Mehrtens + He X + Hiltjo Posthuma + Isaac Dunham + Jaydeep Patil + Jens Gustedt + Jeremy Huntwork + Jo-Philipp Wich + Joakim Sindholt + John Spencer + Julien Ramseier + Justin Cormack + Kaarle Ritvanen + Khem Raj + Kylie McClain + Leah Neukirchen + Luca Barbato + Luka Perkov + M Farkas-Dyck (Strake) + Mahesh Bodapati + Markus Wichmann + Masanori Ogino + Michael Clark + Michael Forney + Mikhail Kremnyov + Natanael Copa + Nicholas J. Kain + orc + Pascal Cuoq + Patrick Oppenlander + Petr Hosek + Petr Skocik + Pierre Carrier + Reini Urban + Rich Felker + Richard Pennington + Ryan Fairfax + Samuel Holland + Segev Finer + Shiz + sin + Solar Designer + Stefan Kristiansson + Stefan O'Rear + Szabolcs Nagy + Timo Teräs + Trutz Behn + Valentin Ochs + Will Dietz + William Haddon + William Pitcock + + Portions of this software are derived from third-party works licensed + under terms compatible with the above MIT license: + + The TRE regular expression implementation (src/regex/reg* and + src/regex/tre*) is Copyright © 2001-2008 Ville Laurikari and licensed + under a 2-clause BSD license (license text in the source files). The + included version has been heavily modified by Rich Felker in 2012, in + the interests of size, simplicity, and namespace cleanliness. + + Much of the math library code (src/math/* and src/complex/*) is + Copyright © 1993,2004 Sun Microsystems or + Copyright © 2003-2011 David Schultz or + Copyright © 2003-2009 Steven G. Kargl or + Copyright © 2003-2009 Bruce D. Evans or + Copyright © 2008 Stephen L. Moshier or + Copyright © 2017-2018 Arm Limited + and labelled as such in comments in the individual source files. All + have been licensed under extremely permissive terms. + + The ARM memcpy code (src/string/arm/memcpy.S) is Copyright © 2008 + The Android Open Source Project and is licensed under a two-clause BSD + license. It was taken from Bionic libc, used on Android. + + The AArch64 memcpy and memset code (src/string/aarch64/*) are + Copyright © 1999-2019, Arm Limited. + + The implementation of DES for crypt (src/crypt/crypt_des.c) is + Copyright © 1994 David Burren. It is licensed under a BSD license. + + The implementation of blowfish crypt (src/crypt/crypt_blowfish.c) was + originally written by Solar Designer and placed into the public + domain. The code also comes with a fallback permissive license for use + in jurisdictions that may not recognize the public domain. + + The smoothsort implementation (src/stdlib/qsort.c) is Copyright © 2011 + Valentin Ochs and is licensed under an MIT-style license. + + The x86_64 port was written by Nicholas J. Kain and is licensed under + the standard MIT terms. + + The mips and microblaze ports were originally written by Richard + Pennington for use in the ellcc project. The original code was adapted + by Rich Felker for build system and code conventions during upstream + integration. It is licensed under the standard MIT terms. + + The mips64 port was contributed by Imagination Technologies and is + licensed under the standard MIT terms. + + The powerpc port was also originally written by Richard Pennington, + and later supplemented and integrated by John Spencer. It is licensed + under the standard MIT terms. + + All other files which have no copyright comments are original works + produced specifically for use as part of this library, written either + by Rich Felker, the main author of the library, or by one or more + contibutors listed above. Details on authorship of individual files + can be found in the git version control history of the project. The + omission of copyright and license comments in each file is in the + interest of source tree size. + + In addition, permission is hereby granted for all public header files + (include/* and arch/*/bits/*) and crt files intended to be linked into + applications (crt/*, ldso/dlstart.c, and arch/*/crt_arch.h) to omit + the copyright notice and permission notice otherwise required by the + license, and to use these files without any requirement of + attribution. These files include substantial contributions from: + + Bobby Bingham + John Spencer + Nicholas J. Kain + Rich Felker + Richard Pennington + Stefan Kristiansson + Szabolcs Nagy + + all of whom have explicitly granted such permission. + + This file previously contained text expressing a belief that most of + the files covered by the above exception were sufficiently trivial not + to be subject to copyright, resulting in confusion over whether it + negated the permissions granted in the license. In the spirit of + permissive licensing, and of not having licensing issues being an + obstacle to adoption, that text has been removed.Copyright (c) 2005-2023, NumPy Developers. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of the NumPy Developers nor the names of any + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + + Copyright (c) Donald Stufft and individual contributors. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.A. HISTORY OF THE SOFTWARE + ========================== + + Python was created in the early 1990s by Guido van Rossum at Stichting + Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands + as a successor of a language called ABC. Guido remains Python's + principal author, although it includes many contributions from others. + + In 1995, Guido continued his work on Python at the Corporation for + National Research Initiatives (CNRI, see https://www.cnri.reston.va.us) + in Reston, Virginia where he released several versions of the + software. + + In May 2000, Guido and the Python core development team moved to + BeOpen.com to form the BeOpen PythonLabs team. In October of the same + year, the PythonLabs team moved to Digital Creations, which became + Zope Corporation. In 2001, the Python Software Foundation (PSF, see + https://www.python.org/psf/) was formed, a non-profit organization + created specifically to own Python-related Intellectual Property. + Zope Corporation was a sponsoring member of the PSF. + + All Python releases are Open Source (see https://opensource.org for + the Open Source Definition). Historically, most, but not all, Python + releases have also been GPL-compatible; the table below summarizes + the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + + Footnotes: + + (1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + + (2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + + Thanks to the many outside volunteers who have worked under Guido's + direction to make these releases possible. + + + B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON + =============================================================== + + Python software and documentation are licensed under the + Python Software Foundation License Version 2. + + Starting with Python 3.8.6, examples, recipes, and other code in + the documentation are dual licensed under the PSF License Version 2 + and the Zero-Clause BSD license. + + Some software incorporated into Python is under different licenses. + The licenses are listed with code falling under that license. + + + PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 + -------------------------------------------- + + 1. This LICENSE AGREEMENT is between the Python Software Foundation + ("PSF"), and the Individual or Organization ("Licensee") accessing and + otherwise using this software ("Python") in source or binary form and + its associated documentation. + + 2. Subject to the terms and conditions of this License Agreement, PSF hereby + grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, + analyze, test, perform and/or display publicly, prepare derivative works, + distribute, and otherwise use Python alone or in any derivative version, + provided, however, that PSF's License Agreement and PSF's notice of copyright, + i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, + 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation; + All Rights Reserved" are retained in Python alone or in any derivative version + prepared by Licensee. + + 3. In the event Licensee prepares a derivative work that is based on + or incorporates Python or any part thereof, and wants to make + the derivative work available to others as provided herein, then + Licensee hereby agrees to include in any such work a brief summary of + the changes made to Python. + + 4. PSF is making Python available to Licensee on an "AS IS" + basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR + IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND + DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS + FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT + INFRINGE ANY THIRD PARTY RIGHTS. + + 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON + FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS + A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, + OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + + 6. This License Agreement will automatically terminate upon a material + breach of its terms and conditions. + + 7. Nothing in this License Agreement shall be deemed to create any + relationship of agency, partnership, or joint venture between PSF and + Licensee. This License Agreement does not grant permission to use PSF + trademarks or trade name in a trademark sense to endorse or promote + products or services of Licensee, or any third party. + + 8. By copying, installing or otherwise using Python, Licensee + agrees to be bound by the terms and conditions of this License + Agreement. + + + BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 + ------------------------------------------- + + BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + + 1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an + office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the + Individual or Organization ("Licensee") accessing and otherwise using + this software in source or binary form and its associated + documentation ("the Software"). + + 2. Subject to the terms and conditions of this BeOpen Python License + Agreement, BeOpen hereby grants Licensee a non-exclusive, + royalty-free, world-wide license to reproduce, analyze, test, perform + and/or display publicly, prepare derivative works, distribute, and + otherwise use the Software alone or in any derivative version, + provided, however, that the BeOpen Python License is retained in the + Software, alone or in any derivative version prepared by Licensee. + + 3. BeOpen is making the Software available to Licensee on an "AS IS" + basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR + IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND + DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS + FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT + INFRINGE ANY THIRD PARTY RIGHTS. + + 4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE + SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS + AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY + DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + + 5. This License Agreement will automatically terminate upon a material + breach of its terms and conditions. + + 6. This License Agreement shall be governed by and interpreted in all + respects by the law of the State of California, excluding conflict of + law provisions. Nothing in this License Agreement shall be deemed to + create any relationship of agency, partnership, or joint venture + between BeOpen and Licensee. This License Agreement does not grant + permission to use BeOpen trademarks or trade names in a trademark + sense to endorse or promote products or services of Licensee, or any + third party. As an exception, the "BeOpen Python" logos available at + http://www.pythonlabs.com/logos.html may be used according to the + permissions granted on that web page. + + 7. By copying, installing or otherwise using the software, Licensee + agrees to be bound by the terms and conditions of this License + Agreement. + + + CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 + --------------------------------------- + + 1. This LICENSE AGREEMENT is between the Corporation for National + Research Initiatives, having an office at 1895 Preston White Drive, + Reston, VA 20191 ("CNRI"), and the Individual or Organization + ("Licensee") accessing and otherwise using Python 1.6.1 software in + source or binary form and its associated documentation. + + 2. Subject to the terms and conditions of this License Agreement, CNRI + hereby grants Licensee a nonexclusive, royalty-free, world-wide + license to reproduce, analyze, test, perform and/or display publicly, + prepare derivative works, distribute, and otherwise use Python 1.6.1 + alone or in any derivative version, provided, however, that CNRI's + License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) + 1995-2001 Corporation for National Research Initiatives; All Rights + Reserved" are retained in Python 1.6.1 alone or in any derivative + version prepared by Licensee. Alternately, in lieu of CNRI's License + Agreement, Licensee may substitute the following text (omitting the + quotes): "Python 1.6.1 is made available subject to the terms and + conditions in CNRI's License Agreement. This Agreement together with + Python 1.6.1 may be located on the internet using the following + unique, persistent identifier (known as a handle): 1895.22/1013. This + Agreement may also be obtained from a proxy server on the internet + using the following URL: http://hdl.handle.net/1895.22/1013". + + 3. In the event Licensee prepares a derivative work that is based on + or incorporates Python 1.6.1 or any part thereof, and wants to make + the derivative work available to others as provided herein, then + Licensee hereby agrees to include in any such work a brief summary of + the changes made to Python 1.6.1. + + 4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" + basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR + IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND + DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS + FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT + INFRINGE ANY THIRD PARTY RIGHTS. + + 5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON + 1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS + A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, + OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + + 6. This License Agreement will automatically terminate upon a material + breach of its terms and conditions. + + 7. This License Agreement shall be governed by the federal + intellectual property law of the United States, including without + limitation the federal copyright law, and, to the extent such + U.S. federal law does not apply, by the law of the Commonwealth of + Virginia, excluding Virginia's conflict of law provisions. + Notwithstanding the foregoing, with regard to derivative works based + on Python 1.6.1 that incorporate non-separable material that was + previously distributed under the GNU General Public License (GPL), the + law of the Commonwealth of Virginia shall govern this License + Agreement only as to issues arising under or with respect to + Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this + License Agreement shall be deemed to create any relationship of + agency, partnership, or joint venture between CNRI and Licensee. This + License Agreement does not grant permission to use CNRI trademarks or + trade name in a trademark sense to endorse or promote products or + services of Licensee, or any third party. + + 8. By clicking on the "ACCEPT" button where indicated, or by copying, + installing or otherwise using Python 1.6.1, Licensee agrees to be + bound by the terms and conditions of this License Agreement. + + ACCEPT + + + CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 + -------------------------------------------------- + + Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, + The Netherlands. All rights reserved. + + Permission to use, copy, modify, and distribute this software and its + documentation for any purpose and without fee is hereby granted, + provided that the above copyright notice appear in all copies and that + both that copyright notice and this permission notice appear in + supporting documentation, and that the name of Stichting Mathematisch + Centrum or CWI not be used in advertising or publicity pertaining to + distribution of the software without specific, written prior + permission. + + STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO + THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND + FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE + FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT + OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION + ---------------------------------------------------------------------- + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted. + + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH + REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY + AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, + INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM + LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR + OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + PERFORMANCE OF THIS SOFTWARE. + Copyright (c) 2014, Al Sweigart + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + * Neither the name of the {organization} nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.Copyright (c) 2017 Anthony Sottile + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE.Copyright (c) 2015-2019 Jared Hobbs + + Permission is hereby granted, free of charge, to any person obtaining a copy of + this software and associated documentation files (the "Software"), to deal in + the Software without restriction, including without limitation the rights to + use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies + of the Software, and to permit persons to whom the Software is furnished to do + so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE.Developed by ESN, an Electronic Arts Inc. studio. + Copyright (c) 2014, Electronic Arts Inc. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of ESN, Electronic Arts Inc. nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL ELECTRONIC ARTS INC. BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + ---- + + Portions of code from MODP_ASCII - Ascii transformations (upper/lower, etc) + https://github.com/client9/stringencoders + + Copyright 2005, 2006, 2007 + Nick Galbreath -- nickg [at] modp [dot] com + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + Neither the name of the modp.com nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + This is the standard "new" BSD license: + http://www.opensource.org/licenses/bsd-license.php + + https://github.com/client9/stringencoders/blob/cfd5c1507325ae497ea9bacdacba12c0ffd79d30/COPYING + + ---- + + Numeric decoder derived from from TCL library + https://opensource.apple.com/source/tcl/tcl-14/tcl/license.terms + * Copyright (c) 1988-1993 The Regents of the University of California. + * Copyright (c) 1994 Sun Microsystems, Inc. + + This software is copyrighted by the Regents of the University of + California, Sun Microsystems, Inc., Scriptics Corporation, ActiveState + Corporation and other parties. The following terms apply to all files + associated with the software unless explicitly disclaimed in + individual files. + + The authors hereby grant permission to use, copy, modify, distribute, + and license this software and its documentation for any purpose, provided + that existing copyright notices are retained in all copies and that this + notice is included verbatim in any distributions. No written agreement, + license, or royalty fee is required for any of the authorized uses. + Modifications to this software may be copyrighted by their authors + and need not follow the licensing terms described here, provided that + the new terms are clearly indicated on the first page of each file where + they apply. + + IN NO EVENT SHALL THE AUTHORS OR DISTRIBUTORS BE LIABLE TO ANY PARTY + FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES + ARISING OUT OF THE USE OF THIS SOFTWARE, ITS DOCUMENTATION, OR ANY + DERIVATIVES THEREOF, EVEN IF THE AUTHORS HAVE BEEN ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + THE AUTHORS AND DISTRIBUTORS SPECIFICALLY DISCLAIM ANY WARRANTIES, + INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE, AND NON-INFRINGEMENT. THIS SOFTWARE + IS PROVIDED ON AN "AS IS" BASIS, AND THE AUTHORS AND DISTRIBUTORS HAVE + NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR + MODIFICATIONS. + + GOVERNMENT USE: If you are acquiring this software on behalf of the + U.S. government, the Government shall have only "Restricted Rights" + in the software and related documentation as defined in the Federal + Acquisition Regulations (FARs) in Clause 52.227.19 (c) (2). If you + are acquiring the software on behalf of the Department of Defense, the + software shall be classified as "Commercial Computer Software" and the + Government shall have only "Restricted Rights" as defined in Clause + 252.227-7013 (c) (1) of DFARs. Notwithstanding the foregoing, the + authors grant the U.S. Government and others acting in its behalf + permission to use and distribute the software in accordance with the + terms specified in this license. +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Cython +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Topic :: Scientific/Engineering +Project-URL: homepage, https://pandas.pydata.org +Project-URL: documentation, https://pandas.pydata.org/docs/ +Project-URL: repository, https://github.com/pandas-dev/pandas +Requires-Python: >=3.11 +Requires-Dist: numpy>=1.26.0; python_version < "3.14" +Requires-Dist: numpy>=2.3.3; python_version >= "3.14" +Requires-Dist: python-dateutil>=2.8.2 +Requires-Dist: tzdata; sys_platform == "win32" +Requires-Dist: tzdata; sys_platform == "emscripten" +Provides-Extra: test +Requires-Dist: hypothesis>=6.116.0; extra == "test" +Requires-Dist: pytest>=8.3.4; extra == "test" +Requires-Dist: pytest-xdist>=3.6.1; extra == "test" +Provides-Extra: pyarrow +Requires-Dist: pyarrow>=13.0.0; extra == "pyarrow" +Provides-Extra: performance +Requires-Dist: bottleneck>=1.4.2; extra == "performance" +Requires-Dist: numba>=0.60.0; extra == "performance" +Requires-Dist: numexpr>=2.10.2; extra == "performance" +Provides-Extra: computation +Requires-Dist: scipy>=1.14.1; extra == "computation" +Requires-Dist: xarray>=2024.10.0; extra == "computation" +Provides-Extra: fss +Requires-Dist: fsspec>=2024.10.0; extra == "fss" +Provides-Extra: aws +Requires-Dist: s3fs>=2024.10.0; extra == "aws" +Provides-Extra: gcp +Requires-Dist: gcsfs>=2024.10.0; extra == "gcp" +Provides-Extra: excel +Requires-Dist: odfpy>=1.4.1; extra == "excel" +Requires-Dist: openpyxl>=3.1.5; extra == "excel" +Requires-Dist: python-calamine>=0.3.0; extra == "excel" +Requires-Dist: pyxlsb>=1.0.10; extra == "excel" +Requires-Dist: xlrd>=2.0.1; extra == "excel" +Requires-Dist: xlsxwriter>=3.2.0; extra == "excel" +Provides-Extra: parquet +Requires-Dist: pyarrow>=13.0.0; extra == "parquet" +Provides-Extra: feather +Requires-Dist: pyarrow>=13.0.0; extra == "feather" +Provides-Extra: iceberg +Requires-Dist: pyiceberg>=0.8.1; extra == "iceberg" +Provides-Extra: hdf5 +Requires-Dist: tables>=3.10.1; extra == "hdf5" +Provides-Extra: spss +Requires-Dist: pyreadstat>=1.2.8; extra == "spss" +Provides-Extra: postgresql +Requires-Dist: SQLAlchemy>=2.0.36; extra == "postgresql" +Requires-Dist: psycopg2>=2.9.10; extra == "postgresql" +Requires-Dist: adbc-driver-postgresql>=1.2.0; extra == "postgresql" +Provides-Extra: mysql +Requires-Dist: SQLAlchemy>=2.0.36; extra == "mysql" +Requires-Dist: pymysql>=1.1.1; extra == "mysql" +Provides-Extra: sql-other +Requires-Dist: SQLAlchemy>=2.0.36; extra == "sql-other" +Requires-Dist: adbc-driver-postgresql>=1.2.0; extra == "sql-other" +Requires-Dist: adbc-driver-sqlite>=1.2.0; extra == "sql-other" +Provides-Extra: html +Requires-Dist: beautifulsoup4>=4.12.3; extra == "html" +Requires-Dist: html5lib>=1.1; extra == "html" +Requires-Dist: lxml>=5.3.0; extra == "html" +Provides-Extra: xml +Requires-Dist: lxml>=5.3.0; extra == "xml" +Provides-Extra: plot +Requires-Dist: matplotlib>=3.9.3; extra == "plot" +Provides-Extra: output-formatting +Requires-Dist: jinja2>=3.1.5; extra == "output-formatting" +Requires-Dist: tabulate>=0.9.0; extra == "output-formatting" +Provides-Extra: clipboard +Requires-Dist: PyQt5>=5.15.9; extra == "clipboard" +Requires-Dist: qtpy>=2.4.2; extra == "clipboard" +Provides-Extra: compression +Requires-Dist: zstandard>=0.23.0; extra == "compression" +Provides-Extra: timezone +Requires-Dist: pytz>=2024.2; extra == "timezone" +Provides-Extra: all +Requires-Dist: adbc-driver-postgresql>=1.2.0; extra == "all" +Requires-Dist: adbc-driver-sqlite>=1.2.0; extra == "all" +Requires-Dist: beautifulsoup4>=4.12.3; extra == "all" +Requires-Dist: bottleneck>=1.4.2; extra == "all" +Requires-Dist: fastparquet>=2024.11.0; extra == "all" +Requires-Dist: fsspec>=2024.10.0; extra == "all" +Requires-Dist: gcsfs>=2024.10.0; extra == "all" +Requires-Dist: html5lib>=1.1; extra == "all" +Requires-Dist: hypothesis>=6.116.0; extra == "all" +Requires-Dist: jinja2>=3.1.5; extra == "all" +Requires-Dist: lxml>=5.3.0; extra == "all" +Requires-Dist: matplotlib>=3.9.3; extra == "all" +Requires-Dist: numba>=0.60.0; extra == "all" +Requires-Dist: numexpr>=2.10.2; extra == "all" +Requires-Dist: odfpy>=1.4.1; extra == "all" +Requires-Dist: openpyxl>=3.1.5; extra == "all" +Requires-Dist: psycopg2>=2.9.10; extra == "all" +Requires-Dist: pyarrow>=13.0.0; extra == "all" +Requires-Dist: pyiceberg>=0.8.1; extra == "all" +Requires-Dist: pymysql>=1.1.1; extra == "all" +Requires-Dist: PyQt5>=5.15.9; extra == "all" +Requires-Dist: pyreadstat>=1.2.8; extra == "all" +Requires-Dist: pytest>=8.3.4; extra == "all" +Requires-Dist: pytest-xdist>=3.6.1; extra == "all" +Requires-Dist: python-calamine>=0.3.0; extra == "all" +Requires-Dist: pytz>=2024.2; extra == "all" +Requires-Dist: pyxlsb>=1.0.10; extra == "all" +Requires-Dist: qtpy>=2.4.2; extra == "all" +Requires-Dist: scipy>=1.14.1; extra == "all" +Requires-Dist: s3fs>=2024.10.0; extra == "all" +Requires-Dist: SQLAlchemy>=2.0.36; extra == "all" +Requires-Dist: tables>=3.10.1; extra == "all" +Requires-Dist: tabulate>=0.9.0; extra == "all" +Requires-Dist: xarray>=2024.10.0; extra == "all" +Requires-Dist: xlrd>=2.0.1; extra == "all" +Requires-Dist: xlsxwriter>=3.2.0; extra == "all" +Requires-Dist: zstandard>=0.23.0; extra == "all" +Description-Content-Type: text/markdown + + + + Pandas Logo + + +----------------- + +# pandas: A Powerful Python Data Analysis Toolkit + +| | | +| --- | --- | +| Testing | [![CI - Test](https://github.com/pandas-dev/pandas/actions/workflows/unit-tests.yml/badge.svg)](https://github.com/pandas-dev/pandas/actions/workflows/unit-tests.yml) [![Coverage](https://codecov.io/github/pandas-dev/pandas/coverage.svg?branch=main)](https://codecov.io/gh/pandas-dev/pandas) | +| Package | [![PyPI Latest Release](https://img.shields.io/pypi/v/pandas.svg)](https://pypi.org/project/pandas/) [![PyPI Downloads](https://img.shields.io/pypi/dm/pandas.svg?label=PyPI%20downloads)](https://pypi.org/project/pandas/) [![Conda Latest Release](https://anaconda.org/conda-forge/pandas/badges/version.svg)](https://anaconda.org/conda-forge/pandas) [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/pandas.svg?label=Conda%20downloads)](https://anaconda.org/conda-forge/pandas) | +| Meta | [![Powered by NumFOCUS](https://img.shields.io/badge/powered%20by-NumFOCUS-orange.svg?style=flat&colorA=E1523D&colorB=007D8A)](https://numfocus.org) [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.3509134.svg)](https://doi.org/10.5281/zenodo.3509134) [![License - BSD 3-Clause](https://img.shields.io/pypi/l/pandas.svg)](https://github.com/pandas-dev/pandas/blob/main/LICENSE) [![Slack](https://img.shields.io/badge/join_Slack-information-brightgreen.svg?logo=slack)](https://pandas.pydata.org/docs/dev/development/community.html?highlight=slack#community-slack) [![LFX Health Score](https://insights.linuxfoundation.org/api/badge/health-score?project=pandas-dev-pandas)](https://insights.linuxfoundation.org/project/pandas-dev-pandas) | + + +## What is it? + +**pandas** is a Python package that provides fast, flexible, and expressive data +structures designed to make working with "relational" or "labeled" data both +easy and intuitive. It aims to be the fundamental high-level building block for +doing practical, **real-world** data analysis in Python. Additionally, it has +the broader goal of becoming **the most powerful and flexible open-source data +analysis/manipulation tool available in any language**. It is already well on +its way towards this goal. + +## Table of Contents + +- [Main Features](#main-features) +- [Where to get it](#where-to-get-it) +- [Dependencies](#dependencies) +- [Installation from sources](#installation-from-sources) +- [License](#license) +- [Documentation](#documentation) +- [Background](#background) +- [Getting Help](#getting-help) +- [Discussion and Development](#discussion-and-development) +- [Contributing to pandas](#contributing-to-pandas) + +## Main Features +Here are just a few of the things that pandas does well: + + - Easy handling of [**missing data**][missing-data] (represented as + `NaN`, `NA`, or `NaT`) in floating point as well as non-floating point data + - Size mutability: columns can be [**inserted and + deleted**][insertion-deletion] from DataFrame and higher dimensional + objects + - Automatic and explicit [**data alignment**][alignment]: objects can + be explicitly aligned to a set of labels, or the user can simply + ignore the labels and let `Series`, `DataFrame`, etc. automatically + align the data for you in computations + - Powerful, flexible [**group by**][groupby] functionality to perform + split-apply-combine operations on data sets, for both aggregating + and transforming data + - Make it [**easy to convert**][conversion] ragged, + differently-indexed data in other Python and NumPy data structures + into DataFrame objects + - Intelligent label-based [**slicing**][slicing], [**fancy + indexing**][fancy-indexing], and [**subsetting**][subsetting] of + large data sets + - Intuitive [**merging**][merging] and [**joining**][joining] data + sets + - Flexible [**reshaping**][reshape] and [**pivoting**][pivot-table] of + data sets + - [**Hierarchical**][mi] labeling of axes (possible to have multiple + labels per tick) + - Robust I/O tools for loading data from [**flat files**][flat-files] + (CSV and delimited), [**Excel files**][excel], [**databases**][db], + and saving/loading data from the ultrafast [**HDF5 format**][hdfstore] + - [**Time series**][timeseries]-specific functionality: date range + generation and frequency conversion, moving window statistics, + date shifting and lagging + + + [missing-data]: https://pandas.pydata.org/pandas-docs/stable/user_guide/missing_data.html + [insertion-deletion]: https://pandas.pydata.org/pandas-docs/stable/user_guide/dsintro.html#column-selection-addition-deletion + [alignment]: https://pandas.pydata.org/pandas-docs/stable/user_guide/dsintro.html?highlight=alignment#intro-to-data-structures + [groupby]: https://pandas.pydata.org/pandas-docs/stable/user_guide/groupby.html#group-by-split-apply-combine + [conversion]: https://pandas.pydata.org/pandas-docs/stable/user_guide/dsintro.html#dataframe + [slicing]: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#slicing-ranges + [fancy-indexing]: https://pandas.pydata.org/pandas-docs/stable/user_guide/advanced.html#advanced + [subsetting]: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#boolean-indexing + [merging]: https://pandas.pydata.org/pandas-docs/stable/user_guide/merging.html#database-style-dataframe-or-named-series-joining-merging + [joining]: https://pandas.pydata.org/pandas-docs/stable/user_guide/merging.html#joining-on-index + [reshape]: https://pandas.pydata.org/pandas-docs/stable/user_guide/reshaping.html + [pivot-table]: https://pandas.pydata.org/pandas-docs/stable/user_guide/reshaping.html + [mi]: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#hierarchical-indexing-multiindex + [flat-files]: https://pandas.pydata.org/pandas-docs/stable/user_guide/io.html#csv-text-files + [excel]: https://pandas.pydata.org/pandas-docs/stable/user_guide/io.html#excel-files + [db]: https://pandas.pydata.org/pandas-docs/stable/user_guide/io.html#sql-queries + [hdfstore]: https://pandas.pydata.org/pandas-docs/stable/user_guide/io.html#hdf5-pytables + [timeseries]: https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#time-series-date-functionality + +## Where to get it +The source code is currently hosted on GitHub at: +https://github.com/pandas-dev/pandas + +Binary installers for the latest released version are available at the [Python +Package Index (PyPI)](https://pypi.org/project/pandas) and on [Conda](https://anaconda.org/conda-forge/pandas). + +```sh +# conda +conda install -c conda-forge pandas +``` + +```sh +# or PyPI +pip install pandas +``` + +The list of changes to pandas between each release can be found +[here](https://pandas.pydata.org/pandas-docs/stable/whatsnew/index.html). For full +details, see the commit logs at https://github.com/pandas-dev/pandas. + +## Dependencies +- [NumPy - Adds support for large, multi-dimensional arrays, matrices and high-level mathematical functions to operate on these arrays](https://www.numpy.org) +- [python-dateutil - Provides powerful extensions to the standard datetime module](https://dateutil.readthedocs.io/en/stable/index.html) +- [tzdata - Provides an IANA time zone database](https://tzdata.readthedocs.io/en/latest/) (Only required on Windows/Emscripten) + +See the [full installation instructions](https://pandas.pydata.org/pandas-docs/stable/install.html#dependencies) for minimum supported versions of required, recommended and optional dependencies. + +## Installation from sources +To install pandas from source you need [Cython](https://cython.org/) in addition to the normal +dependencies above. Cython can be installed from PyPI: + +```sh +pip install cython +``` + +In the `pandas` directory (same one where you found this file after +cloning the git repo), execute: + +```sh +pip install . +``` + +or for installing in [development mode](https://pip.pypa.io/en/latest/cli/pip_install/#install-editable): + + +```sh +python -m pip install -ve . --no-build-isolation --config-settings editable-verbose=true +``` + +See the full instructions for [installing from source](https://pandas.pydata.org/docs/dev/development/contributing_environment.html). + +## License +[BSD 3](LICENSE) + +## Documentation +The official documentation is hosted on [PyData.org](https://pandas.pydata.org/pandas-docs/stable/). + +## Background +Work on ``pandas`` started at [AQR](https://www.aqr.com/) (a quantitative hedge fund) in 2008 and +has been under active development since then. + +## Getting Help + +For usage questions, the best place to go to is [Stack Overflow](https://stackoverflow.com/questions/tagged/pandas). +Further, general questions and discussions can also take place on the [pydata mailing list](https://groups.google.com/forum/?fromgroups#!forum/pydata). + +## Discussion and Development +Most development discussions take place on GitHub in this repo, via the [GitHub issue tracker](https://github.com/pandas-dev/pandas/issues). + +Further, the [pandas-dev mailing list](https://mail.python.org/mailman/listinfo/pandas-dev) can also be used for specialized discussions or design issues, and a [Slack channel](https://pandas.pydata.org/docs/dev/development/community.html?highlight=slack#community-slack) is available for quick development related questions. + +There are also frequent [community meetings](https://pandas.pydata.org/docs/dev/development/community.html#community-meeting) for project maintainers open to the community as well as monthly [new contributor meetings](https://pandas.pydata.org/docs/dev/development/community.html#new-contributor-meeting) to help support new contributors. + +Additional information on the communication channels can be found on the [contributor community](https://pandas.pydata.org/docs/development/community.html) page. + +## Contributing to pandas + +[![Open Source Helpers](https://www.codetriage.com/pandas-dev/pandas/badges/users.svg)](https://www.codetriage.com/pandas-dev/pandas) + +All contributions, bug reports, bug fixes, documentation improvements, enhancements, and ideas are welcome. + +A detailed overview on how to contribute can be found in the **[contributing guide](https://pandas.pydata.org/docs/dev/development/contributing.html)**. + +If you are simply looking to start working with the pandas codebase, navigate to the [GitHub "issues" tab](https://github.com/pandas-dev/pandas/issues) and start looking through interesting issues. There are a number of issues listed under [Docs](https://github.com/pandas-dev/pandas/issues?q=is%3Aissue%20state%3Aopen%20label%3ADocs%20sort%3Aupdated-desc) and [good first issue](https://github.com/pandas-dev/pandas/issues?q=is%3Aissue%20state%3Aopen%20label%3A%22good%20first%20issue%22%20sort%3Aupdated-desc) where you could start out. + +You can also triage issues which may include reproducing bug reports, or asking for vital information such as version numbers or reproduction instructions. If you would like to start triaging issues, one easy way to get started is to [subscribe to pandas on CodeTriage](https://www.codetriage.com/pandas-dev/pandas). + +Or maybe through using pandas you have an idea of your own or are looking for something in the documentation and thinking ‘this can be improved’... you can do something about it! + +Feel free to ask questions on the [mailing list](https://groups.google.com/forum/?fromgroups#!forum/pydata) or on [Slack](https://pandas.pydata.org/docs/dev/development/community.html?highlight=slack#community-slack). + +As contributors and maintainers to this project, you are expected to abide by pandas' code of conduct. More information can be found at: [Contributor Code of Conduct](https://github.com/pandas-dev/.github/blob/master/CODE_OF_CONDUCT.md) + +
+ +[Go to Top](#table-of-contents) diff --git a/.cache/pip/http-v2/8/4/3/f/2/843f29ad5a195cb2664dafac8345ae4f121177ea8b150533e02b27f2 b/.cache/pip/http-v2/8/4/3/f/2/843f29ad5a195cb2664dafac8345ae4f121177ea8b150533e02b27f2 new file mode 100644 index 0000000000000000000000000000000000000000..6aab2f5bce13aa2326d360063d5e1970502fce65 Binary files /dev/null and b/.cache/pip/http-v2/8/4/3/f/2/843f29ad5a195cb2664dafac8345ae4f121177ea8b150533e02b27f2 differ diff --git a/.cache/pip/http-v2/8/4/3/f/2/843f29ad5a195cb2664dafac8345ae4f121177ea8b150533e02b27f2.body b/.cache/pip/http-v2/8/4/3/f/2/843f29ad5a195cb2664dafac8345ae4f121177ea8b150533e02b27f2.body new file mode 100644 index 0000000000000000000000000000000000000000..11d571e789c6db0e317c52de8e297e0f4a07c035 --- /dev/null +++ b/.cache/pip/http-v2/8/4/3/f/2/843f29ad5a195cb2664dafac8345ae4f121177ea8b150533e02b27f2.body @@ -0,0 +1,98 @@ +Metadata-Version: 2.4 +Name: greenlet +Version: 3.3.2 +Summary: Lightweight in-process concurrent programming +Author-email: Alexey Borzenkov +Maintainer-email: Jason Madden +License-Expression: MIT AND PSF-2.0 +Project-URL: Homepage, https://greenlet.readthedocs.io +Project-URL: Documentation, https://greenlet.readthedocs.io +Project-URL: Repository, https://github.com/python-greenlet/greenlet +Project-URL: Issues, https://github.com/python-greenlet/greenlet/issues +Project-URL: Changelog, https://greenlet.readthedocs.io/en/latest/changes.html +Keywords: greenlet,coroutine,concurrency,threads,cooperative +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: Programming Language :: C +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Operating System :: OS Independent +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.10 +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: LICENSE.PSF +Provides-Extra: docs +Requires-Dist: Sphinx; extra == "docs" +Requires-Dist: furo; extra == "docs" +Provides-Extra: test +Requires-Dist: objgraph; extra == "test" +Requires-Dist: psutil; extra == "test" +Requires-Dist: setuptools; extra == "test" +Dynamic: license-file + +.. This file is included into docs/history.rst + + +Greenlets are lightweight coroutines for in-process concurrent +programming. + +The "greenlet" package is a spin-off of `Stackless`_, a version of +CPython that supports micro-threads called "tasklets". Tasklets run +pseudo-concurrently (typically in a single or a few OS-level threads) +and are synchronized with data exchanges on "channels". + +A "greenlet", on the other hand, is a still more primitive notion of +micro-thread with no implicit scheduling; coroutines, in other words. +This is useful when you want to control exactly when your code runs. +You can build custom scheduled micro-threads on top of greenlet; +however, it seems that greenlets are useful on their own as a way to +make advanced control flow structures. For example, we can recreate +generators; the difference with Python's own generators is that our +generators can call nested functions and the nested functions can +yield values too. (Additionally, you don't need a "yield" keyword. See +the example in `test_generator.py +`_). + +Greenlets are provided as a C extension module for the regular unmodified +interpreter. + +.. _`Stackless`: http://www.stackless.com + + +Who is using Greenlet? +====================== + +There are several libraries that use Greenlet as a more flexible +alternative to Python's built in coroutine support: + + - `Concurrence`_ + - `Eventlet`_ + - `Gevent`_ + +.. _Concurrence: http://opensource.hyves.org/concurrence/ +.. _Eventlet: http://eventlet.net/ +.. _Gevent: http://www.gevent.org/ + +Getting Greenlet +================ + +The easiest way to get Greenlet is to install it with pip:: + + pip install greenlet + + +Source code archives and binary distributions are available on the +python package index at https://pypi.org/project/greenlet + +The source code repository is hosted on github: +https://github.com/python-greenlet/greenlet + +Documentation is available on readthedocs.org: +https://greenlet.readthedocs.io diff --git a/.cache/pip/http-v2/8/6/1/4/a/8614a6e8a4ad9aa5595a607eb8f7a0d8a8f7a7a34cff9540f443a21f b/.cache/pip/http-v2/8/6/1/4/a/8614a6e8a4ad9aa5595a607eb8f7a0d8a8f7a7a34cff9540f443a21f new file mode 100644 index 0000000000000000000000000000000000000000..0485206f8aa537eb4433e8390a320184c34cca38 Binary files /dev/null and b/.cache/pip/http-v2/8/6/1/4/a/8614a6e8a4ad9aa5595a607eb8f7a0d8a8f7a7a34cff9540f443a21f differ diff --git a/.cache/pip/http-v2/8/6/1/4/a/8614a6e8a4ad9aa5595a607eb8f7a0d8a8f7a7a34cff9540f443a21f.body b/.cache/pip/http-v2/8/6/1/4/a/8614a6e8a4ad9aa5595a607eb8f7a0d8a8f7a7a34cff9540f443a21f.body new file mode 100644 index 0000000000000000000000000000000000000000..2b144bdc6fcb832122640aa0c9bf3fa5c788ea7e Binary files /dev/null and b/.cache/pip/http-v2/8/6/1/4/a/8614a6e8a4ad9aa5595a607eb8f7a0d8a8f7a7a34cff9540f443a21f.body differ diff --git a/.cache/pip/http-v2/8/6/e/d/b/86edbe0c05b42c1fe86aacc3cb9eaf8cb8a43c81ec1ba8b34d47241c b/.cache/pip/http-v2/8/6/e/d/b/86edbe0c05b42c1fe86aacc3cb9eaf8cb8a43c81ec1ba8b34d47241c new file mode 100644 index 0000000000000000000000000000000000000000..cdcffa182a1b8a2482f5c049fcdc9e0e7fa3a66f Binary files /dev/null and b/.cache/pip/http-v2/8/6/e/d/b/86edbe0c05b42c1fe86aacc3cb9eaf8cb8a43c81ec1ba8b34d47241c differ diff --git a/.cache/pip/http-v2/8/a/c/4/d/8ac4d14dc45e27d21da49fb515570b6f875b78707de9b08ce1088d1b b/.cache/pip/http-v2/8/a/c/4/d/8ac4d14dc45e27d21da49fb515570b6f875b78707de9b08ce1088d1b new file mode 100644 index 0000000000000000000000000000000000000000..7570cbb4392dad7aad2f752ea1918a0f3c00902b Binary files /dev/null and b/.cache/pip/http-v2/8/a/c/4/d/8ac4d14dc45e27d21da49fb515570b6f875b78707de9b08ce1088d1b differ diff --git a/.cache/pip/http-v2/8/a/c/4/d/8ac4d14dc45e27d21da49fb515570b6f875b78707de9b08ce1088d1b.body b/.cache/pip/http-v2/8/a/c/4/d/8ac4d14dc45e27d21da49fb515570b6f875b78707de9b08ce1088d1b.body new file mode 100644 index 0000000000000000000000000000000000000000..2d2ffe8cde67d248ca8cbc27519f33616340982c Binary files /dev/null and b/.cache/pip/http-v2/8/a/c/4/d/8ac4d14dc45e27d21da49fb515570b6f875b78707de9b08ce1088d1b.body differ diff --git a/.cache/pip/http-v2/8/d/8/7/b/8d87bf7a308bb8a91752d9c7e9e4d96db27c3fd3d01d3bb4e2dd8afc b/.cache/pip/http-v2/8/d/8/7/b/8d87bf7a308bb8a91752d9c7e9e4d96db27c3fd3d01d3bb4e2dd8afc new file mode 100644 index 0000000000000000000000000000000000000000..f6fd054b26f3eedc23b786500cab1a0d97d35c25 Binary files /dev/null and b/.cache/pip/http-v2/8/d/8/7/b/8d87bf7a308bb8a91752d9c7e9e4d96db27c3fd3d01d3bb4e2dd8afc differ diff --git a/.cache/pip/http-v2/8/d/8/7/b/8d87bf7a308bb8a91752d9c7e9e4d96db27c3fd3d01d3bb4e2dd8afc.body b/.cache/pip/http-v2/8/d/8/7/b/8d87bf7a308bb8a91752d9c7e9e4d96db27c3fd3d01d3bb4e2dd8afc.body new file mode 100644 index 0000000000000000000000000000000000000000..5d12ea2c9ef0baff5b5a56572bf63b247572561c --- /dev/null +++ b/.cache/pip/http-v2/8/d/8/7/b/8d87bf7a308bb8a91752d9c7e9e4d96db27c3fd3d01d3bb4e2dd8afc.body @@ -0,0 +1,2316 @@ +Metadata-Version: 2.4 +Name: fonttools +Version: 4.62.1 +Summary: Tools to manipulate font files +Home-page: http://github.com/fonttools/fonttools +Author: Just van Rossum +Author-email: just@letterror.com +Maintainer: Behdad Esfahbod +Maintainer-email: behdad@behdad.org +License: MIT +Platform: Any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Environment :: Other Environment +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: End Users/Desktop +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: 3 +Classifier: Topic :: Text Processing :: Fonts +Classifier: Topic :: Multimedia :: Graphics +Classifier: Topic :: Multimedia :: Graphics :: Graphics Conversion +Requires-Python: >=3.10 +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: LICENSE.external +Provides-Extra: ufo +Provides-Extra: lxml +Requires-Dist: lxml>=4.0; extra == "lxml" +Provides-Extra: woff +Requires-Dist: brotli>=1.0.1; platform_python_implementation == "CPython" and extra == "woff" +Requires-Dist: brotlicffi>=0.8.0; platform_python_implementation != "CPython" and extra == "woff" +Requires-Dist: zopfli>=0.1.4; extra == "woff" +Provides-Extra: unicode +Requires-Dist: unicodedata2>=17.0.0; python_version <= "3.14" and extra == "unicode" +Provides-Extra: graphite +Requires-Dist: lz4>=1.7.4.2; extra == "graphite" +Provides-Extra: interpolatable +Requires-Dist: scipy; platform_python_implementation != "PyPy" and extra == "interpolatable" +Requires-Dist: munkres; platform_python_implementation == "PyPy" and extra == "interpolatable" +Requires-Dist: pycairo; extra == "interpolatable" +Provides-Extra: plot +Requires-Dist: matplotlib; extra == "plot" +Provides-Extra: symfont +Requires-Dist: sympy; extra == "symfont" +Provides-Extra: type1 +Requires-Dist: xattr; sys_platform == "darwin" and extra == "type1" +Provides-Extra: pathops +Requires-Dist: skia-pathops>=0.5.0; extra == "pathops" +Provides-Extra: repacker +Requires-Dist: uharfbuzz>=0.45.0; extra == "repacker" +Provides-Extra: all +Requires-Dist: lxml>=4.0; extra == "all" +Requires-Dist: brotli>=1.0.1; platform_python_implementation == "CPython" and extra == "all" +Requires-Dist: brotlicffi>=0.8.0; platform_python_implementation != "CPython" and extra == "all" +Requires-Dist: zopfli>=0.1.4; extra == "all" +Requires-Dist: unicodedata2>=17.0.0; python_version <= "3.14" and extra == "all" +Requires-Dist: lz4>=1.7.4.2; extra == "all" +Requires-Dist: scipy; platform_python_implementation != "PyPy" and extra == "all" +Requires-Dist: munkres; platform_python_implementation == "PyPy" and extra == "all" +Requires-Dist: pycairo; extra == "all" +Requires-Dist: matplotlib; extra == "all" +Requires-Dist: sympy; extra == "all" +Requires-Dist: xattr; sys_platform == "darwin" and extra == "all" +Requires-Dist: skia-pathops>=0.5.0; extra == "all" +Requires-Dist: uharfbuzz>=0.45.0; extra == "all" +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: description-content-type +Dynamic: home-page +Dynamic: license +Dynamic: license-file +Dynamic: maintainer +Dynamic: maintainer-email +Dynamic: platform +Dynamic: provides-extra +Dynamic: requires-python +Dynamic: summary + +|CI Build Status| |Coverage Status| |PyPI| |Gitter Chat| + +What is this? +~~~~~~~~~~~~~ + +| fontTools is a library for manipulating fonts, written in Python. The + project includes the TTX tool, that can convert TrueType and OpenType + fonts to and from an XML text format, which is also called TTX. It + supports TrueType, OpenType, AFM and to an extent Type 1 and some + Mac-specific formats. The project has an `MIT open-source + license `__. +| Among other things this means you can use it free of charge. + +`User documentation `_ and +`developer documentation `_ +are available at `Read the Docs `_. + +Installation +~~~~~~~~~~~~ + +FontTools requires `Python `__ 3.10 +or later. We try to follow the same schedule of minimum Python version support as +NumPy (see `NEP 29 `__). + +The package is listed in the Python Package Index (PyPI), so you can +install it with `pip `__: + +.. code:: sh + + pip install fonttools + +If you would like to contribute to its development, you can clone the +repository from GitHub, install the package in 'editable' mode and +modify the source code in place. We recommend creating a virtual +environment, using `virtualenv `__ or +Python 3 `venv `__ module. + +.. code:: sh + + # download the source code to 'fonttools' folder + git clone https://github.com/fonttools/fonttools.git + cd fonttools + + # create new virtual environment called e.g. 'fonttools-venv', or anything you like + python -m virtualenv fonttools-venv + + # source the `activate` shell script to enter the environment (Unix-like); to exit, just type `deactivate` + . fonttools-venv/bin/activate + + # to activate the virtual environment in Windows `cmd.exe`, do + fonttools-venv\Scripts\activate.bat + + # install in 'editable' mode + pip install -e . + +Optional Requirements +--------------------- + +The ``fontTools`` package currently has no (required) external dependencies +besides the modules included in the Python Standard Library. +However, a few extra dependencies are required by some of its modules, which +are needed to unlock optional features. +The ``fonttools`` PyPI distribution also supports so-called "extras", i.e. a +set of keywords that describe a group of additional dependencies, which can be +used when installing via pip, or when specifying a requirement. +For example: + +.. code:: sh + + pip install fonttools[ufo,lxml,woff,unicode] + +This command will install fonttools, as well as the optional dependencies that +are required to unlock the extra features named "ufo", etc. + +- ``Lib/fontTools/misc/etree.py`` + + The module exports a ElementTree-like API for reading/writing XML files, and + allows to use as the backend either the built-in ``xml.etree`` module or + `lxml `__. The latter is preferred whenever present, + as it is generally faster and more secure. + + *Extra:* ``lxml`` + +- ``Lib/fontTools/ttLib/woff2.py`` + + Module to compress/decompress WOFF 2.0 web fonts; it requires: + + * `brotli `__: Python bindings of + the Brotli compression library. + + *Extra:* ``woff`` + +- ``Lib/fontTools/ttLib/sfnt.py`` + + To better compress WOFF 1.0 web fonts, the following module can be used + instead of the built-in ``zlib`` library: + + * `zopfli `__: Python bindings of + the Zopfli compression library. + + *Extra:* ``woff`` + +- ``Lib/fontTools/unicode.py`` + + To display the Unicode character names when dumping the ``cmap`` table + with ``ttx`` we use the ``unicodedata`` module in the Standard Library. + The version included in there varies between different Python versions. + To use the latest available data, you can install: + + * `unicodedata2 `__: + ``unicodedata`` backport for Python 3.x updated to the latest Unicode + version 17.0. + + *Extra:* ``unicode`` + +- ``Lib/fontTools/varLib/interpolatable.py`` + + Module for finding wrong contour/component order between different masters. + It requires one of the following packages in order to solve the so-called + "minimum weight perfect matching problem in bipartite graphs", or + the Assignment problem: + + * `scipy `__: the Scientific Library + for Python, which internally uses `NumPy `__ + arrays and hence is very fast; + * `munkres `__: a pure-Python + module that implements the Hungarian or Kuhn-Munkres algorithm. Slower than + SciPy, but useful for minimalistic systems where adding SciPy is undesirable. + + This ensures both performance (via SciPy) and minimal footprint (via Munkres) + are possible. + + To plot the results to a PDF or HTML format, you also need to install: + + * `pycairo `__: Python bindings for the + Cairo graphics library. Note that wheels are currently only available for + Windows, for other platforms see pycairo's `installation instructions + `__. + + *Extra:* ``interpolatable`` + +- ``Lib/fontTools/varLib/plot.py`` + + Module for visualizing DesignSpaceDocument and resulting VariationModel. + + * `matplotlib `__: 2D plotting library. + + *Extra:* ``plot`` + +- ``Lib/fontTools/misc/symfont.py`` + + Advanced module for symbolic font statistics analysis; it requires: + + * `sympy `__: the Python library for + symbolic mathematics. + + *Extra:* ``symfont`` + +- ``Lib/fontTools/t1Lib.py`` + + To get the file creator and type of Macintosh PostScript Type 1 fonts + on Python 3 you need to install the following module, as the old ``MacOS`` + module is no longer included in Mac Python: + + * `xattr `__: Python wrapper for + extended filesystem attributes (macOS platform only). + + *Extra:* ``type1`` + +- ``Lib/fontTools/ttLib/removeOverlaps.py`` + + Simplify TrueType glyphs by merging overlapping contours and components. + + * `skia-pathops `__: Python + bindings for the Skia library's PathOps module, performing boolean + operations on paths (union, intersection, etc.). + + *Extra:* ``pathops`` + +- ``Lib/fontTools/ufoLib`` + + Package for reading and writing UFO source files; if available, it will use: + + * `fs `__: (aka ``pyfilesystem2``) filesystem abstraction layer + + for reading and writing UFOs to the local filesystem or zip files (.ufoz), instead of + the built-in ``fontTools.misc.filesystem`` package. + The reader and writer classes can in theory also accept any object compatible the + ``fs.base.FS`` interface, although not all have been tested. + +- ``Lib/fontTools/pens/cocoaPen.py`` and ``Lib/fontTools/pens/quartzPen.py`` + + Pens for drawing glyphs with Cocoa ``NSBezierPath`` or ``CGPath`` require: + + * `PyObjC `__: the bridge between + Python and the Objective-C runtime (macOS platform only). + +- ``Lib/fontTools/pens/qtPen.py`` + + Pen for drawing glyphs with Qt's ``QPainterPath``, requires: + + * `PyQt5 `__: Python bindings for + the Qt cross platform UI and application toolkit. + +- ``Lib/fontTools/pens/reportLabPen.py`` + + Pen to drawing glyphs as PNG images, requires: + + * `reportlab `__: Python toolkit + for generating PDFs and graphics. + +- ``Lib/fontTools/pens/freetypePen.py`` + + Pen to drawing glyphs with FreeType as raster images, requires: + + * `freetype-py `__: Python binding + for the FreeType library. + +- ``Lib/fontTools/ttLib/tables/otBase.py`` + + Use the Harfbuzz library to serialize GPOS/GSUB using ``hb_repack`` method, requires: + + * `uharfbuzz `__: Streamlined Cython + bindings for the harfbuzz shaping engine + + *Extra:* ``repacker`` + +How to make a new release +~~~~~~~~~~~~~~~~~~~~~~~~~ + +1) Update ``NEWS.rst`` with all the changes since the last release. Write a + changelog entry for each PR, with one or two short sentences summarizing it, + as well as links to the PR and relevant issues addressed by the PR. Do not + put a new title, the next command will do it for you. +2) Use semantic versioning to decide whether the new release will be a 'major', + 'minor' or 'patch' release. It's usually one of the latter two, depending on + whether new backward compatible APIs were added, or simply some bugs were fixed. +3) From inside a venv, first do ``pip install -r dev-requirements.txt``, then run + the ``python setup.py release`` command from the tip of the ``main`` branch. + By default this bumps the third or 'patch' digit only, unless you pass ``--major`` + or ``--minor`` to bump respectively the first or second digit. + This bumps the package version string, extracts the changes since the latest + version from ``NEWS.rst``, and uses that text to create an annotated git tag + (or a signed git tag if you pass the ``--sign`` option and your git and Github + account are configured for `signing commits `__ + using a GPG key). + It also commits an additional version bump which opens the main branch for + the subsequent developmental cycle +4) Push both the tag and commit to the upstream repository, by running the command + ``git push --follow-tags``. Note: it may push other local tags as well, be + careful. +5) Let the CI build the wheel and source distribution packages and verify both + get uploaded to the Python Package Index (PyPI). +6) [Optional] Go to fonttools `Github Releases `__ + page and create a new release, copy-pasting the content of the git tag + message. This way, the release notes are nicely formatted as markdown, and + users watching the repo will get an email notification. One day we shall + automate that too. + + +Acknowledgments +~~~~~~~~~~~~~~~~ + +In alphabetical order: + +aschmitz, Olivier Berten, Samyak Bhuta, Erik van Blokland, Petr van Blokland, +Jelle Bosma, Sascha Brawer, Tom Byrer, Antonio Cavedoni, Frédéric Coiffier, +Vincent Connare, David Corbett, Simon Cozens, Dave Crossland, Simon Daniels, +Peter Dekkers, Behdad Esfahbod, Behnam Esfahbod, Hannes Famira, Sam Fishman, +Matt Fontaine, Takaaki Fuji, Rob Hagemans, Yannis Haralambous, Greg Hitchcock, +Jeremie Hornus, Khaled Hosny, John Hudson, Denis Moyogo Jacquerye, Jack Jansen, +Tom Kacvinsky, Jens Kutilek, Antoine Leca, Werner Lemberg, Tal Leming, Liang Hai, Peter +Lofting, Cosimo Lupo, Olli Meier, Masaya Nakamura, Dave Opstad, Laurence Penney, +Roozbeh Pournader, Garret Rieger, Read Roberts, Colin Rofls, Guido van Rossum, +Just van Rossum, Andreas Seidel, Georg Seifert, Chris Simpkins, Miguel Sousa, +Adam Twardoch, Adrien Tétar, Vitaly Volkov, Paul Wise. + +Copyrights +~~~~~~~~~~ + +| Copyright (c) 1999-2004 Just van Rossum, LettError + (just@letterror.com) +| See `LICENSE `__ for the full license. + +Copyright (c) 2000 BeOpen.com. All Rights Reserved. + +Copyright (c) 1995-2001 Corporation for National Research Initiatives. +All Rights Reserved. + +Copyright (c) 1991-1995 Stichting Mathematisch Centrum, Amsterdam. All +Rights Reserved. + +Have fun! + +.. |CI Build Status| image:: https://github.com/fonttools/fonttools/workflows/Test/badge.svg + :target: https://github.com/fonttools/fonttools/actions?query=workflow%3ATest +.. |Coverage Status| image:: https://codecov.io/gh/fonttools/fonttools/branch/main/graph/badge.svg + :target: https://codecov.io/gh/fonttools/fonttools +.. |PyPI| image:: https://img.shields.io/pypi/v/fonttools.svg + :target: https://pypi.org/project/FontTools +.. |Gitter Chat| image:: https://badges.gitter.im/fonttools-dev/Lobby.svg + :alt: Join the chat at https://gitter.im/fonttools-dev/Lobby + :target: https://gitter.im/fonttools-dev/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge + +Changelog +~~~~~~~~~ + +4.62.1 (released 2026-03-13) +---------------------------- + +- [feaLib] Extend contextual rule merging to all rule types: single subst, GSUB/GPOS + named lookups, ignore rules, and chained alternate subst (#4061). + +4.62.0 (released 2026-03-09) +---------------------------- + +- [diff] Add new ``fonttools diff`` command for comparing font files, imported from the + ``fdiff`` project and heavily reworked (#1190, #4007, #4009, #4011, #4013, #4019). +- [feaLib] Fix ``VariableScalar`` interpolation bug with non-linear avar mappings. Also + decouple ``VariableScalar`` from compiled fonts, allowing it to work with designspace data + before compilation (#3938, #4054). +- [feaLib] Fix ``VariableScalar`` axis ordering and iterative delta rounding to match fontc + behavior (#4053). +- [feaLib] Merge chained multi subst rules with same context into a single subtable instead of + emitting one subtable per glyph (#4016, #4058). +- [feaLib] Pass location to ``ConditionsetStatement`` to fix glyphsLib round-tripping + (fontra/fontra-glyphs#130, #4057). +- [feaLib] Write ``0xFFFF`` instead of ``0`` for missing nameIDs in ``cv`` feature params + (#4010, #4012). +- [cmap] Fix ``CmapSubtable.__lt__()`` ``TypeError`` on Python 3 when subtables share the + same encoding record, and add compile-time validation for unique encoding records (#4035, + #4055). +- [svgLib] Skip non-element XML nodes (comments, processing instructions) when drawing SVG + paths (#4042, #4043). +- [glifLib] Fix regression reading glyph outlines when ``glyphObject=None`` (#4030, #4031). +- [pointPen] Fix ``SegmentToPointPen`` edge case: only remove a duplicate final point on + ``closePath()`` if it is an on-curve point (#4014, #4015). +- [cffLib] **SECURITY** Replace ``eval()`` with ``safeEval()`` in ``parseBlendList()`` to + prevent arbitrary code execution from crafted TTX files (#4039, #4040). +- [ttLib] Remove defunct Adobe SING Glyphlet tables (``META``, ``SING``, ``GMAP``, ``GPKG``) + (#4044). +- [varLib.interpolatable] Various bugfixes: fix swapped nodeTypes assignment, duplicate + kink-detector condition, typos, CFF2 vsindex parsing, glyph existence check, and plot + helpers (#4046). +- [varLib.models] Fix ``getSubModel`` not forwarding ``extrapolate``/``axisRanges``; check + location uniqueness after stripping zeros (#4047). +- [varLib] Fix ``--variable-fonts`` filter in ``build_many``; remove dead code and fix + comments (#4048). +- [avar] Preserve existing name table in build; keep ``unbuild`` return types consistent; + validate ``map`` CLI coordinates (#4051). +- [cu2qu/qu2cu] Add input validation: reject non-positive tolerances, validate curve inputs + and list lengths (#4052). +- [colorLib] Raise a clear ``ColorLibError`` when base glyphs are missing from glyphMap, + instead of a confusing ``KeyError`` (#4041). +- [glyf] Remove unnecessary ``fvar`` table dependency (#4017). +- [fvar/trak] Remove unnecessary ``name`` table dependency (#4018). +- [ufoLib] Relax guideline validation to follow the updated spec (#3537, #3553). +- [ttFont] Fix ``saveXML`` regression with empty table lists, clarify docstring (#4025, #4026, + #4056). +- [setup.py] Link ``libm`` for Cython extensions using math functions (#4028, #4029). +- Add typing annotations for ``DSIG``, ``DefaultTable``, ``ttProgram`` (#4033). + +4.61.1 (released 2025-12-12) +---------------------------- + +- [otlLib] buildCoverage: return empty Coverage instead of None (#4003, #4004). +- [instancer] bug fix in ``avar2`` full instancing (#4002). +- [designspaceLib] Preserve empty conditionsets when serializing to XML (#4001). +- [fontBu ilder] Fix FontBuilder ``setupOS2()`` default params globally polluted (#3996, #3997). +- [ttFont] Add more typing annotations to ttFont, xmlWriter, sfnt, varLib.models and others (#3952, #3826). +- Explicitly test and declare support for Python 3.14, even though we were already shipping pre-built wheels for it (#3990). + +4.60.2 (released 2025-12-09) +---------------------------- + +- **Backport release** Same as 4.61.0 but without "Drop support for EOL Python 3.9" change to allow + downstream projects still on Python 3.9 to avail of the security fix for CVE-2025-66034 (#3994, #3999). + +4.61.0 (released 2025-11-28) +---------------------------- + +- [varLib.main]: **SECURITY** Only use basename(vf.filename) to prevent path traversal attacks when + running ``fonttools varLib`` command, or code which invokes ``fonttools.varLib.main()``. + Fixes CVE-2025-66034, see: + https://github.com/fonttools/fonttools/security/advisories/GHSA-768j-98cg-p3fv. +- [feaLib] Sort BaseLangSysRecords by tag (#3986). +- Drop support for EOL Python 3.9 (#3982). +- [instancer] Support --remove-overlaps for fonts with CFF2 table (#3975). +- [CFF2ToCFF] Add --remove-overlaps option (#3976). +- [feaLib] Raise an error for rsub with NULL target (#3979). +- [bezierTools] Fix logic bug in curveCurveIntersections (#3963). +- [feaLib] Error when condition sets have the same name (#3958). +- [cu2qu.ufo] skip processing empty glyphs to support sparse kerning masters (#3956). +- [unicodedata] Update to Unicode 17. Require ``unicodedata2 >= 17.0.0`` when installed with 'unicode' extra. + +4.60.1 (released 2025-09-29) +---------------------------- + +- [ufoLib] Reverted accidental method name change in ``UFOReader.getKerningGroupConversionRenameMaps`` + that broke compatibility with downstream projects like defcon (#3948, #3947, robotools/defcon#478). +- [ufoLib] Added test coverage for ``getKerningGroupConversionRenameMaps`` method (#3950). +- [subset] Don't try to subset BASE table; pass it through by default instead (#3949). +- [subset] Remove empty BaseRecord entries in MarkBasePos lookups (#3897, #3892). +- [subset] Add pruning for MarkLigPos and MarkMarkPos lookups (#3946). +- [subset] Remove duplicate features when subsetting (#3945). +- [Docs] Added documentation for the visitor module (#3944). + +4.60.0 (released 2025-09-17) +---------------------------- + +- [pointPen] Allow ``reverseFlipped`` parameter of ``DecomposingPointPen`` to take a ``ReverseFlipped`` + enum value to control whether/how to reverse contour direction of flipped components, in addition to + the existing True/False. This allows to set ``ReverseFlipped.ON_CURVE_FIRST`` to ensure that + the decomposed outline starts with an on-curve point before being reversed, for better consistency + with other segment-oriented contour transformations. The change is backward compatible, and the + default behavior hasn't changed (#3934). +- [filterPen] Added ``ContourFilterPointPen``, base pen for buffered contour operations, and + ``OnCurveStartPointPen`` filter to ensure contours start with an on-curve point (#3934). +- [cu2qu] Fixed difference in cython vs pure-python complex division by real number (#3930). +- [varLib.avar] Refactored and added some new sub-modules and scripts (#3926). + * ``varLib.avar.build`` module to build avar (and a missing fvar) binaries into a possibly empty TTFont, + * ``varLib.avar.unbuild`` module to print a .designspace snippet that would generate the same avar binary, + * ``varLib.avar.map`` module to take TTFont and do the mapping, in user/normalized space, + * ``varLib.avar.plan`` module moved from ``varLib.avarPlanner``. + The bare ``fonttools varLib.avar`` script is deprecated, in favour of ``fonttools varLib.avar.build`` (or ``unbuild``). +- [interpolatable] Clarify ``linear_sum_assignment`` backend options and minimal dependency + usage (#3927). +- [post] Speed up ``build_psNameMapping`` (#3923). +- [ufoLib] Added typing annotations to fontTools.ufoLib (#3875). + +4.59.2 (released 2025-08-27) +---------------------------- + +- [varLib] Clear ``USE_MY_METRICS`` component flags when inconsistent across masters (#3912). +- [varLib.instancer] Avoid negative advance width/height values when instatiating HVAR/VVAR, + (unlikely in well-behaved fonts) (#3918). +- [subset] Fix shaping behaviour when pruning empty mark sets (#3915, harfbuzz/harfbuzz#5499). +- [cu2qu] Fixed ``dot()`` product of perpendicular vectors not always returning exactly 0.0 + in all Python implementations (#3911) +- [varLib.instancer] Implemented fully-instantiating ``avar2`` fonts (#3909). +- [feaLib] Allow float values in ``VariableScalar``'s axis locations (#3906, #3907). +- [cu2qu] Handle special case in ``calc_intersect`` for degenerate cubic curves where 3 to 4 + control points are equal (#3904). + +4.59.1 (released 2025-08-14) +---------------------------- + +- [featureVars] Update OS/2.usMaxContext if possible after addFeatureVariationsRaw (#3894). +- [vhmtx] raise TTLibError('not enough data...') when hmtx/vmtx are truncated (#3843, #3901). +- [feaLib] Combine duplicate features that have the same set of lookups regardless of the order in which those lookups are added to the feature (#3895). +- [varLib] Deprecate ``varLib.mutator`` in favor of ``varLib.instancer``. The latter + provides equivalent full (static font) instancing in addition to partial VF instancing. + CLI users should replace ``fonttools varLib.mutator`` with ``fonttools varLib.instancer``. + API users should migrate to ``fontTools.varLib.instancer.instantiateVariableFont`` (#2680). + + +4.59.0 (released 2025-07-16) +---------------------------- + +- Removed hard-dependency on pyfilesystem2 (``fs`` package) from ``fonttools[ufo]`` extra. + This is replaced by the `fontTools.misc.filesystem` package, a stdlib-only, drop-in + replacement for the subset of the pyfilesystem2's API used by ``fontTools.ufoLib``. + The latter should continue to work with the upstream ``fs`` (we even test with/without). + Clients who wish to continue using ``fs`` can do so by depending on it directly instead + of via the ``fonttools[ufo]`` extra (#3885, #3620). +- [xmlWriter] Replace illegal XML characters (e.g. control or non-characters) with "?" + when dumping to ttx (#3868, #71). +- [varLib.hvar] Fixed vertical metrics fields copy/pasta error (#3884). +- Micro optimizations in ttLib and sstruct modules (#3878, #3879). +- [unicodedata] Add Garay script to RTL_SCRIPTS (#3882). +- [roundingPen] Remove unreliable kwarg usage. Argument names aren’t consistent among + point pens’ ``.addComponent()`` implementations, in particular ``baseGlyphName`` + vs ``glyphName`` (#3880). + +4.58.5 (released 2025-07-03) +---------------------------- + +- [feaLib] Don't try to combine ligature & multisub rules (#3874). +- [feaLib/ast] Use weakref proxies to avoid cycles in visitor (#3873). +- [varLib.instancer] Fixed instancing CFF2 fonts where VarData contains more than 64k items (#3858). + +4.58.4 (released 2025-06-13) +---------------------------- + +- [feaLib] Allow for empty MarkFilter & MarkAttach sets (#3856). + +4.58.3 (released 2025-06-13) +---------------------------- + +- [feaLib] Fixed iterable check for Python 3.13.4 and newer (#3854, #3855). + +4.58.2 (released 2025-06-06) +---------------------------- + +- [ttLib.reorderGlyphs] Handle CFF2 when reordering glyphs (#3852) +- [subset] Copy name IDs in use before scrapping or scrambling them for webfonts (#3853) + +4.58.1 (released 2025-05-28) +---------------------------- + +- [varLib] Make sure that fvar named instances only reuse name ID 2 or 17 if they are at the default location across all axes, to match OT spec requirement (#3831). +- [feaLib] Improve single substitution promotion to multiple/ligature substitutions, fixing a few bugs as well (#3849). +- [loggingTools] Make ``Timer._time`` a static method that doesn't take self, makes it easier to override (#3836). +- [featureVars] Use ``None`` for empty ConditionSet, which translates to a null offset in the compiled table (#3850). +- [feaLib] Raise an error on conflicting ligature substitution rules instead of silently taking the last one (#3835). +- Add typing annotations to T2CharStringPen (#3837). +- [feaLib] Add single substitutions that were promoted to multiple or ligature substitutions to ``aalt`` feature (#3847). +- [featureVars] Create a default ``LangSys`` in a ``ScriptRecord`` if missing when adding feature variations to existing GSUB later in the build (#3838). +- [symfont] Added a ``main()``. +- [cffLib.specializer] Fix rmoveto merging when blends used (#3839, #3840). +- [pyftmerge] Add support for cmap format 14 in the merge tool (#3830). +- [varLib.instancer/cff2] Fix vsindex of Private dicts when instantiating (#3828, #3232). +- Update text file read to use UTF-8 with optional BOM so it works with e.g. Windows Notepad.exe (#3824). +- [varLib] Ensure that instances only reuse name ID 2 or 17 if they are at the default location across all axes (#3831). +- [varLib] Create a dflt LangSys in a ScriptRecord when adding variations later, to fix an avoidable crash in an edge case (#3838). + +4.58.0 (released 2025-05-10) +---------------------------- + +- Drop Python 3.8, require 3.9+ (#3819) +- [HVAR, VVAR] Prune unused regions when using a direct mapping (#3797) +- [Docs] Improvements to ufoLib documentation (#3721) +- [Docs] Improvements to varLib documentation (#3727) +- [Docs] Improvements to Pens and pen-module documentation (#3724) +- [Docs] Miscellany updates to docs (misc modules and smaller modules) (#3730) +- [subset] Close codepoints over BiDi mirror variants. (#3801) +- [feaLib] Fix serializing ChainContextPosStatement and + ChainContextSubstStatement in some rare cases (#3788) +- [designspaceLib] Clarify user expectations for getStatNames (#2892) +- [GVAR] Add support for new `GVAR` table (#3728) +- [TSI0, TSI5] Derive number of entries to decompile from data length (#2477) +- [ttLib] Fix `AttributeError` when reporting table overflow (#3808) +- [ttLib] Apply rounding more often in getCoordinates (#3798) +- [ttLib] Ignore component bounds if empty (#3799) +- [ttLib] Change the separator for duplicate glyph names from "#" to "." (#3809) +- [feaLib] Support subtable breaks in CursivePos, MarkBasePos, MarkToLigPos and + MarkToMarkPos lookups (#3800, #3807) +- [feaLib] If the same lookup has single substitutions and ligature + substitutions, upgrade single substitutions to ligature substitutions with + one input glyph (#3805) +- [feaLib] Correctly handle in single pos lookups (#3803) +- [feaLib] Remove duplicates from class pair pos classes instead of raising an + error (#3804) +- [feaLib] Support creating extension lookups using useExtenion lookup flag + instead of silently ignoring it (#3811) +- [STAT] Add typing for the simpler STAT arguments (#3812) +- [otlLib.builder] Add future import for annotations (#3814) +- [cffLib] Fix reading supplement encoding (#3813) +- [voltLib] Add some missing functionality and fixes to voltLib and VoltToFea, + making the conversion to feature files more robust. Add also `fonttools + voltLib` command line tool to compile VOLT sources directly (doing an + intermediate fea conversion internally) (#3818) +- [pens] Add some PointPen annotations (#3820) + +4.57.0 (released 2025-04-03) +---------------------------- + +- [ttLib.__main__] Add `--no-recalc-timestamp` flag (#3771) +- [ttLib.__main__] Add `-b` (recalcBBoxes=False) flag (#3772) +- [cmap] Speed up glyphOrder loading from cmap (#3774) +- [ttLib.__main__] Improvements around the `-t` flag (#3776) +- [Debg] Fix parsing from XML; add roundtrip tests (#3781) +- [fealib] Support \*Base.MinMax tables (#3783, #3786) +- [config] Add OPTIMIZE_FONT_SPEED (#3784) +- [varLib.hvar] New module to add HVAR table to the font (#3780) +- [otlLib.optimize] Fix crash when the provided TTF does not contain a `GPOS` (#3794) + +4.56.0 (released 2025-02-07) +---------------------------- + +- [varStore] Sort the input todo list with the same sorting key used for the opimizer's output (#3767). +- [otData] Fix DeviceTable's ``DeltaValue`` repeat value which caused a crash after importing from XML and then compiling a GPOS containing Device tables (#3758). +- [feaLib] Make ``FeatureLibError`` pickleable, so client can e.g. use feaLib to can compile features in parallel with multiprocessing (#3762). +- [varLib/gvar] Removed workaround for old, long-fixed macOS bug about composite glyphs with all zero deltas (#1381, #1788). +- [Docs] Updated ttLib documentation, beefed up TTFont and TTGlyphSet explanations (#3720). + +4.55.8 (released 2025-01-29) +---------------------------- + +- [MetaTools] Fixed bug in buildUCD.py script whereby the first non-header line of some UCD text file was being skipped. This affected in particular the U+00B7 (MIDDLE DOT) entry of ScriptExtensions.txt (#3756). + +4.55.7 (released 2025-01-28) +---------------------------- + +- Shorten the changelog included in PyPI package description to accommodate maximum length limit imposed by Azure DevOps. No actual code changes since v4.55.6 (#3754). + +4.55.6 (released 2025-01-24) +---------------------------- + +- [glyf] Fixed regression introduced in 4.55.5 when computing bounds of nested composite glyphs with transformed components (#3752). + +4.55.5 (released 2025-01-23) +---------------------------- + +- [glyf] Fixed recalcBounds of transformed components with unrounded coordinates (#3750). +- [feaLib] Allow duplicate script/language statements (#3749). + +4.55.4 (released 2025-01-21) +---------------------------- + +- [bezierTools] Fixed ``splitCubicAtT`` sometimes not returning identical start/end points as result of numerical precision (#3742, #3743). +- [feaLib/ast] Fixed docstring of ``AlternateSubstStatement`` (#3735). +- [transform] Typing fixes (#3734). + +4.55.3 (released 2024-12-10) +---------------------------- + +- [Docs] fill out ttLib table section [#3716] +- [feaLib] More efficient inline format 4 lookups [#3726] + +4.55.2 (released 2024-12-05) +---------------------------- + +- [Docs] update Sphinx config (#3712) +- [designspaceLib] Allow axisOrdering to be set to zero (#3715) +- [feaLib] Don’t modify variable anchors in place (#3717) + +4.55.1 (released 2024-12-02) +---------------------------- + +- [ttGlyphSet] Support VARC CFF2 fonts (#3683) +- [DecomposedTransform] Document and implement always skewY == 0 (#3697) +- [varLib] "Fix" cython iup issue? (#3704) +- Cython minor refactor (#3705) + + +4.55.0 (released 2024-11-14) +---------------------------- + +- [cffLib.specializer] Adjust stack use calculation (#3689) +- [varLib] Lets not add mac names if the rest of name doesn't have them (#3688) +- [ttLib.reorderGlyphs] Update CFF table charstrings and charset (#3682) +- [cffLib.specializer] Add cmdline to specialize a CFF2 font (#3675, #3679) +- [CFF2] Lift uint16 VariationStore.length limitation (#3674) +- [subset] consider variation selectors subsetting cmap14 (#3672) +- [varLib.interpolatable] Support CFF2 fonts (#3670) +- Set isfinal to true in XML parser for proper resource cleanup (#3669) +- [removeOverlaps] Fix CFF CharString width (#3659) +- [glyf] Add optimizeSize option (#3657) +- Python 3.13 support (#3656) +- [TupleVariation] Optimize for loading speed, not size (#3650, #3653) + + +4.54.1 (released 2024-09-24) +---------------------------- + +- [unicodedata] Update to Unicode 16 +- [subset] Escape ``\\`` in doc string + +4.54.0 (released 2024-09-23) +---------------------------- + +- [Docs] Small docs cleanups by @n8willis (#3611) +- [Docs] cleanup code blocks by @n8willis (#3627) +- [Docs] fix Sphinx builds by @n8willis (#3625) +- [merge] Minor fixes to documentation for merge by @drj11 (#3588) +- [subset] Small tweaks to pyftsubset documentation by @RoelN (#3633) +- [Tests] Do not require fonttools command to be available by @behdad (#3612) +- [Tests] subset_test: add failing test to reproduce issue #3616 by @anthrotype (#3622) +- [ttLib] NameRecordVisitor: include whole sequence of character variants' UI labels, not just the first by @anthrotype (#3617) +- [varLib.avar] Reconstruct mappings from binary by @behdad (#3598) +- [varLib.instancer] Fix visual artefacts with partial L2 instancing by @Hoolean (#3635) +- [varLib.interpolatable] Support discrete axes in .designspace by @behdad (#3599) +- [varLib.models] By default, assume OpenType-like normalized space by @behdad (#3601) + +4.53.1 (released 2024-07-05) +---------------------------- + +- [feaLib] Improve the sharing of inline chained lookups (#3559) +- [otlLib] Correct the calculation of OS/2.usMaxContext with reversed chaining contextual single substitutions (#3569) +- [misc.visitor] Visitors search the inheritance chain of objects they are visiting (#3581) + +4.53.0 (released 2024-05-31) +---------------------------- + +- [ttLib.removeOverlaps] Support CFF table to aid in downconverting CFF2 fonts (#3528) +- [avar] Fix crash when accessing not-yet-existing attribute (#3550) +- [docs] Add buildMathTable to otlLib.builder documentation (#3540) +- [feaLib] Allow UTF-8 with BOM when reading features (#3495) +- [SVGPathPen] Revert rounding coordinates to two decimal places by default (#3543) +- [varLib.instancer] Refix output filename decision-making (#3545, #3544, #3548) + +4.52.4 (released 2024-05-27) +---------------------------- + +- [varLib.cff] Restore and deprecate convertCFFtoCFF2 that was removed in 4.52.0 + release as it is used by downstream projects (#3535). + +4.52.3 (released 2024-05-27) +---------------------------- + +- Fixed a small syntax error in the reStructuredText-formatted NEWS.rst file + which caused the upload to PyPI to fail for 4.52.2. No other code changes. + +4.52.2 (released 2024-05-27) +---------------------------- + +- [varLib.interpolatable] Ensure that scipy/numpy output is JSON-serializable + (#3522, #3526). +- [housekeeping] Regenerate table lists, to fix pyinstaller packaging of the new + ``VARC`` table (#3531, #3529). +- [cffLib] Make CFFToCFF2 and CFF2ToCFF more robust (#3521, #3525). + +4.52.1 (released 2024-05-24) +---------------------------- + +- Fixed a small syntax error in the reStructuredText-formatted NEWS.rst file + which caused the upload to PyPI to fail for 4.52.0. No other code changes. + +4.52.0 (released 2024-05-24) +---------------------------- + +- Added support for the new ``VARC`` (Variable Composite) table that is being + proposed to OpenType spec (#3395). For more info: + https://github.com/harfbuzz/boring-expansion-spec/blob/main/VARC.md +- [ttLib.__main__] Fixed decompiling all tables (90fed08). +- [feaLib] Don't reference the same lookup index multiple times within the same + feature record, it is only applied once anyway (#3520). +- [cffLib] Moved methods to desubroutinize, remove hints and unused subroutines + from subset module to cffLib (#3517). +- [varLib.instancer] Added support for partial-instancing CFF2 tables! Also, added + method to down-convert from CFF2 to CFF 1.0, and CLI entry points to convert + CFF<->CFF2 (#3506). +- [subset] Prune unused user name IDs even with --name-IDs='*' (#3410). +- [ttx] use GNU-style getopt to intermix options and positional arguments (#3509). +- [feaLib.variableScalar] Fixed ``value_at_location()`` method (#3491) +- [psCharStrings] Shorten output of ``encodeFloat`` (#3492). +- [bezierTools] Fix infinite-recursion in ``calcCubicArcLength`` (#3502). +- [avar2] Implement ``avar2`` support in ``TTFont.getGlyphSet()`` (#3473). + +4.51.0 (released 2024-04-05) +---------------------------- + +- [ttLib] Optimization on loading aux fields (#3464). +- [ttFont] Add reorderGlyphs (#3468). + +4.50.0 (released 2024-03-15) +---------------------------- + +- [pens] Added decomposing filter pens that draw components as regular contours (#3460). +- [instancer] Drop explicit no-op axes from TupleVariations (#3457). +- [cu2qu/ufo] Return set of modified glyph names from fonts_to_quadratic (#3456). + +4.49.0 (released 2024-02-15) +---------------------------- + +- [otlLib] Add API for building ``MATH`` table (#3446) + +4.48.1 (released 2024-02-06) +---------------------------- + +- Fixed uploading wheels to PyPI, no code changes since v4.48.0. + +4.48.0 (released 2024-02-06) +---------------------------- + +- [varLib] Do not log when there are no OTL tables to be merged. +- [setup.py] Do not restrict lxml<5 any more, tests pass just fine with lxml>=5. +- [feaLib] Remove glyph and class names length restrictions in FEA (#3424). +- [roundingPens] Added ``transformRoundFunc`` parameter to the rounding pens to allow + for custom rounding of the components' transforms (#3426). +- [feaLib] Keep declaration order of ligature components within a ligature set, instead + of sorting by glyph name (#3429). +- [feaLib] Fixed ordering of alternates in ``aalt`` lookups, following the declaration + order of feature references within the ``aalt`` feature block (#3430). +- [varLib.instancer] Fixed a bug in the instancer's IUP optimization (#3432). +- [sbix] Support sbix glyphs with new graphicType "flip" (#3433). +- [svgPathPen] Added ``--glyphs`` option to dump the SVG paths for the named glyphs + in the font (0572f78). +- [designspaceLib] Added "description" attribute to ```` and ```` + elements, and allow multiple ```` elements to group ```` elements + that are logically related (#3435, #3437). +- [otlLib] Correctly choose the most compact GSUB contextual lookup format (#3439). + +4.47.2 (released 2024-01-11) +---------------------------- + +Minor release to fix uploading wheels to PyPI. + +4.47.1 (released 2024-01-11) +---------------------------- + +- [merge] Improve help message and add standard command line options (#3408) +- [otlLib] Pass ``ttFont`` to ``name.addName`` in ``buildStatTable`` (#3406) +- [featureVars] Re-use ``FeatureVariationRecord``'s when possible (#3413) + +4.47.0 (released 2023-12-18) +---------------------------- + +- [varLib.models] New API for VariationModel: ``getMasterScalars`` and + ``interpolateFromValuesAndScalars``. +- [varLib.interpolatable] Various bugfixes and rendering improvements. In particular, + add a Summary page in the front, and an Index and Table-of-Contents in the back. + Change the page size to Letter. +- [Docs/designspaceLib] Defined a new ``public.fontInfo`` lib key, not used anywhere yet (#3358). + +4.46.0 (released 2023-12-02) +---------------------------- + +- [featureVars] Allow to register the same set of substitution rules to multiple features. + The ``addFeatureVariations`` function can now take a list of featureTags; similarly, the + lib key 'com.github.fonttools.varLib.featureVarsFeatureTag' can now take a + comma-separateed string of feature tags (e.g. "salt,ss01") instead of a single tag (#3360). +- [featureVars] Don't overwrite GSUB FeatureVariations, but append new records to it + for features which are not already there. But raise ``VarLibError`` if the feature tag + already has feature variations associated with it (#3363). +- [varLib] Added ``addGSUBFeatureVariations`` function to add GSUB Feature Variations + to an existing variable font from rules defined in a DesignSpace document (#3362). +- [varLib.interpolatable] Various bugfixes and rendering improvements. In particular, + a new test for "underweight" glyphs. The new test reports quite a few false-positives + though. Please send feedback. + +4.45.1 (released 2023-11-23) +---------------------------- + +- [varLib.interpolatable] Various bugfixes and improvements, better reporting, reduced + false positives. +- [ttGlyphSet] Added option to not recalculate glyf bounds (#3348). + +4.45.0 (released 2023-11-20) +---------------------------- + +- [varLib.interpolatable] Vastly improved algorithms. Also available now is ``--pdf`` + and ``--html`` options to generate a PDF or HTML report of the interpolation issues. + The PDF/HTML report showcases the problematic masters, the interpolated broken + glyph, as well as the proposed fixed version. + +4.44.3 (released 2023-11-15) +---------------------------- + +- [subset] Only prune codepage ranges for OS/2.version >= 1, ignore otherwise (#3334). +- [instancer] Ensure hhea vertical metrics stay in sync with OS/2 ones after instancing + MVAR table containing 'hasc', 'hdsc' or 'hlgp' tags (#3297). + +4.44.2 (released 2023-11-14) +---------------------------- + +- [glyf] Have ``Glyph.recalcBounds`` skip empty components (base glyph with no contours) + when computing the bounding box of composite glyphs. This simply restores the existing + behavior before some changes were introduced in fonttools 4.44.0 (#3333). + +4.44.1 (released 2023-11-14) +---------------------------- + +- [feaLib] Ensure variable mark anchors are deep-copied while building since they + get modified in-place and later reused (#3330). +- [OS/2|subset] Added method to ``recalcCodePageRanges`` to OS/2 table class; added + ``--prune-codepage-ranges`` to `fonttools subset` command (#3328, #2607). + +4.44.0 (released 2023-11-03) +---------------------------- + +- [instancer] Recalc OS/2 AvgCharWidth after instancing if default changes (#3317). +- [otlLib] Make ClassDefBuilder class order match varLib.merger's, i.e. large + classes first, then glyph lexicographic order (#3321, #3324). +- [instancer] Allow not specifying any of min:default:max values and let be filled + up with fvar's values (#3322, #3323). +- [instancer] When running --update-name-table ignore axes that have no STAT axis + values (#3318, #3319). +- [Debg] When dumping to ttx, write the embedded JSON as multi-line string with + indentation (92cbfee0d). +- [varStore] Handle > 65535 items per encoding by splitting VarData subtable (#3310). +- [subset] Handle null-offsets in MarkLigPos subtables. +- [subset] Keep East Asian spacing fatures vhal, halt, chws, vchw by default (#3305). +- [instancer.solver] Fixed case where axisDef < lower and upper < axisMax (#3304). +- [glyf] Speed up compilation, mostly around ``recalcBounds`` (#3301). +- [varLib.interpolatable] Speed it up when working on variable fonts, plus various + micro-optimizations (#3300). +- Require unicodedata2 >= 15.1.0 when installed with 'unicode' extra, contains UCD 15.1. + +4.43.1 (released 2023-10-06) +---------------------------- + +- [EBDT] Fixed TypeError exception in `_reverseBytes` method triggered when dumping + some bitmap fonts with `ttx -z bitwise` option (#3162). +- [v/hhea] Fixed UnboundLocalError exception in ``recalc`` method when no vmtx or hmtx + tables are present (#3290). +- [bezierTools] Fixed incorrectly typed cython local variable leading to TypeError when + calling ``calcQuadraticArcLength`` (#3288). +- [feaLib/otlLib] Better error message when building Coverage table with missing glyph (#3286). + +4.43.0 (released 2023-09-29) +---------------------------- + +- [subset] Set up lxml ``XMLParser(resolve_entities=False)`` when parsing OT-SVG documents + to prevent XML External Entity (XXE) attacks (9f61271dc): + https://codeql.github.com/codeql-query-help/python/py-xxe/ +- [varLib.iup] Added workaround for a Cython bug in ``iup_delta_optimize`` that was + leading to IUP tolerance being incorrectly initialised, resulting in sub-optimal deltas + (60126435d, cython/cython#5732). +- [varLib] Added new command-line entry point ``fonttools varLib.avar`` to add an + ``avar`` table to an existing VF from axes mappings in a .designspace file (0a3360e52). +- [instancer] Fixed bug whereby no longer used variation regions were not correctly pruned + after VarData optimization (#3268). +- Added support for Python 3.12 (#3283). + +4.42.1 (released 2023-08-20) +---------------------------- + +- [t1Lib] Fixed several Type 1 issues (#3238, #3240). +- [otBase/packer] Allow sharing tables reached by different offset sizes (#3241, #3236). +- [varLib/merger] Fix Cursive attachment merging error when all anchors are NULL (#3248, #3247). +- [ttLib] Fixed warning when calling ``addMultilingualName`` and ``ttFont`` parameter was not + passed on to ``findMultilingualName`` (#3253). + +4.42.0 (released 2023-08-02) +---------------------------- + +- [varLib] Use sentinel value 0xFFFF to mark a glyph advance in hmtx/vmtx as non + participating, allowing sparse masters to contain glyphs for variation purposes other + than {H,V}VAR (#3235). +- [varLib/cff] Treat empty glyphs in non-default masters as missing, thus not participating + in CFF2 delta computation, similarly to how varLib already treats them for gvar (#3234). +- Added varLib.avarPlanner script to deduce 'correct' avar v1 axis mappings based on + glyph average weights (#3223). + +4.41.1 (released 2023-07-21) +---------------------------- + +- [subset] Fixed perf regression in v4.41.0 by making ``NameRecordVisitor`` only visit + tables that do contain nameID references (#3213, #3214). +- [varLib.instancer] Support instancing fonts containing null ConditionSet offsets in + FeatureVariationRecords (#3211, #3212). +- [statisticsPen] Report font glyph-average weight/width and font-wide slant. +- [fontBuilder] Fixed head.created date incorrectly set to 0 instead of the current + timestamp, regression introduced in v4.40.0 (#3210). +- [varLib.merger] Support sparse ``CursivePos`` masters (#3209). + +4.41.0 (released 2023-07-12) +---------------------------- + +- [fontBuilder] Fixed bug in setupOS2 with default panose attribute incorrectly being + set to a dict instead of a Panose object (#3201). +- [name] Added method to ``removeUnusedNameRecords`` in the user range (#3185). +- [varLib.instancer] Fixed issue with L4 instancing (moving default) (#3179). +- [cffLib] Use latin1 so we can roundtrip non-ASCII in {Full,Font,Family}Name (#3202). +- [designspaceLib] Mark as optional in docs (as it is in the code). +- [glyf-1] Fixed drawPoints() bug whereby last cubic segment becomes quadratic (#3189, #3190). +- [fontBuilder] Propagate the 'hidden' flag to the fvar Axis instance (#3184). +- [fontBuilder] Update setupAvar() to also support avar 2, fixing ``_add_avar()`` call + site (#3183). +- Added new ``voltLib.voltToFea`` submodule (originally Tiro Typeworks' "Volto") for + converting VOLT OpenType Layout sources to FEA format (#3164). + +4.40.0 (released 2023-06-12) +---------------------------- + +- Published native binary wheels to PyPI for all the python minor versions and platform + and architectures currently supported that would benefit from this. They will include + precompiled Cython-accelerated modules (e.g. cu2qu) without requiring to compile them + from source. The pure-python wheel and source distribution will continue to be + published as always (pip will automatically chose them when no binary wheel is + available for the given platform, e.g. pypy). Use ``pip install --no-binary=fonttools fonttools`` + to expliclity request pip to install from the pure-python source. +- [designspaceLib|varLib] Add initial support for specifying axis mappings and build + ``avar2`` table from those (#3123). +- [feaLib] Support variable ligature caret position (#3130). +- [varLib|glyf] Added option to --drop-implied-oncurves; test for impliable oncurve + points either before or after rounding (#3146, #3147, #3155, #3156). +- [TTGlyphPointPen] Don't error with empty contours, simply ignore them (#3145). +- [sfnt] Fixed str vs bytes remnant of py3 transition in code dealing with de/compiling + WOFF metadata (#3129). +- [instancer-solver] Fixed bug when moving default instance with sparse masters (#3139, #3140). +- [feaLib] Simplify variable scalars that don’t vary (#3132). +- [pens] Added filter pen that explicitly emits closing line when lastPt != movePt (#3100). +- [varStore] Improve optimize algorithm and better document the algorithm (#3124, #3127). + Added ``quantization`` option (#3126). +- Added CI workflow config file for building native binary wheels (#3121). +- [fontBuilder] Added glyphDataFormat=0 option; raise error when glyphs contain cubic + outlines but glyphDataFormat was not explicitly set to 1 (#3113, #3119). +- [subset] Prune emptied GDEF.MarkGlyphSetsDef and remap indices; ensure GDEF is + subsetted before GSUB and GPOS (#3114, #3118). +- [xmlReader] Fixed issue whereby DSIG table data was incorrectly parsed (#3115, #2614). +- [varLib/merger] Fixed merging of SinglePos with pos=0 (#3111, #3112). +- [feaLib] Demote "Feature has not been defined" error to a warning when building aalt + and referenced feature is empty (#3110). +- [feaLib] Dedupe multiple substitutions with classes (#3105). + +4.39.4 (released 2023-05-10) +---------------------------- + +- [varLib.interpolatable] Allow for sparse masters (#3075) +- [merge] Handle differing default/nominalWidthX in CFF (#3070) +- [ttLib] Add missing main.py file to ttLib package (#3088) +- [ttx] Fix missing composite instructions in XML (#3092) +- [ttx] Fix split tables option to work on filenames containing '%' (#3096) +- [featureVars] Process lookups for features other than rvrn last (#3099) +- [feaLib] support multiple substitution with classes (#3103) + +4.39.3 (released 2023-03-28) +---------------------------- + +- [sbix] Fixed TypeError when compiling empty glyphs whose imageData is None, regression + was introduced in v4.39 (#3059). +- [ttFont] Fixed AttributeError on python <= 3.10 when opening a TTFont from a tempfile + SpooledTemporaryFile, seekable method only added on python 3.11 (#3052). + +4.39.2 (released 2023-03-16) +---------------------------- + +- [varLib] Fixed regression introduced in 4.39.1 whereby an incomplete 'STAT' table + would be built even though a DesignSpace v5 did contain 'STAT' definitions (#3045, #3046). + +4.39.1 (released 2023-03-16) +---------------------------- + +- [avar2] Added experimental support for reading/writing avar version 2 as specified in + this draft proposal: https://github.com/harfbuzz/boring-expansion-spec/blob/main/avar2.md +- [glifLib] Wrap underlying XML library exceptions with GlifLibError when parsing GLIFs, + and also print the name and path of the glyph that fails to be parsed (#3042). +- [feaLib] Consult avar for normalizing user-space values in ConditionSets and in + VariableScalars (#3042, #3043). +- [ttProgram] Handle string input to Program.fromAssembly() (#3038). +- [otlLib] Added a config option to emit GPOS 7 lookups, currently disabled by default + because of a macOS bug (#3034). +- [COLRv1] Added method to automatically compute ClipBoxes (#3027). +- [ttFont] Fixed getGlyphID to raise KeyError on missing glyphs instead of returning + None. The regression was introduced in v4.27.0 (#3032). +- [sbix] Fixed UnboundLocalError: cannot access local variable 'rawdata' (#3031). +- [varLib] When building VF, do not overwrite a pre-existing ``STAT`` table that was built + with feaLib from FEA feature file. Also, added support for building multiple VFs + defined in Designspace v5 from ``fonttools varLib`` script (#3024). +- [mtiLib] Only add ``Debg`` table with lookup names when ``FONTTOOLS_LOOKUP_DEBUGGING`` + env variable is set (#3023). + +4.39.0 (released 2023-03-06) +---------------------------- + +- [mtiLib] Optionally add `Debg` debug info for MTI feature builds (#3018). +- [ttx] Support reading input file from standard input using special `-` character, + similar to existing `-o -` option to write output to standard output (#3020). +- [cython] Prevent ``cython.compiled`` raise AttributeError if cython not installed + properly (#3017). +- [OS/2] Guard against ZeroDivisionError when calculating xAvgCharWidth in the unlikely + scenario no glyph has non-zero advance (#3015). +- [subset] Recompute xAvgCharWidth independently of --no-prune-unicode-ranges, + previously the two options were involuntarily bundled together (#3012). +- [fontBuilder] Add ``debug`` parameter to addOpenTypeFeatures method to add source + debugging information to the font in the ``Debg`` private table (#3008). +- [name] Make NameRecord `__lt__` comparison not fail on Unicode encoding errors (#3006). +- [featureVars] Fixed bug in ``overlayBox`` (#3003, #3005). +- [glyf] Added experimental support for cubic bezier curves in TrueType glyf table, as + outlined in glyf v1 proposal (#2988): + https://github.com/harfbuzz/boring-expansion-spec/blob/main/glyf1-cubicOutlines.md +- Added new qu2cu module and related qu2cuPen, the reverse of cu2qu for converting + TrueType quadratic splines to cubic bezier curves (#2993). +- [glyf] Added experimental support for reading and writing Variable Composites/Components + as defined in glyf v1 spec proposal (#2958): + https://github.com/harfbuzz/boring-expansion-spec/blob/main/glyf1-varComposites.md. +- [pens]: Added `addVarComponent` method to pen protocols' base classes, which pens can implement + to handle varcomponents (by default they get decomposed) (#2958). +- [misc.transform] Added DecomposedTransform class which implements an affine transformation + with separate translate, rotation, scale, skew, and transformation-center components (#2598) +- [sbix] Ensure Glyph.referenceGlyphName is set; fixes error after dumping and + re-compiling sbix table with 'dupe' glyphs (#2984). +- [feaLib] Be cleverer when merging chained single substitutions into same lookup + when they are specified using the inline notation (#2150, #2974). +- [instancer] Clamp user-inputted axis ranges to those of fvar (#2959). +- [otBase/subset] Define ``__getstate__`` for BaseTable so that a copied/pickled 'lazy' + object gets its own OTTableReader to read from; incidentally fixes a bug while + subsetting COLRv1 table containing ClipBoxes on python 3.11 (#2965, #2968). +- [sbix] Handle glyphs with "dupe" graphic type on compile correctly (#2963). +- [glyf] ``endPointsOfContours`` field should be unsigned! Kudos to behdad for + spotting one of the oldest bugs in FT. Probably nobody has ever dared to make + glyphs with more than 32767 points... (#2957). +- [feaLib] Fixed handling of ``ignore`` statements with unmarked glyphs to match + makeotf behavior, which assumes the first glyph is marked (#2950). +- Reformatted code with ``black`` and enforce new code style via CI check (#2925). +- [feaLib] Sort name table entries following OT spec prescribed order in the builder (#2927). +- [cu2quPen] Add Cu2QuMultiPen that converts multiple outlines at a time in + interpolation compatible way; its methods take a list of tuples arguments + that would normally be passed to individual segment pens, and at the end it + dispatches the converted outlines to each pen (#2912). +- [reverseContourPen/ttGlyphPen] Add outputImpliedClosingLine option (#2913, #2914, + #2921, #2922, #2995). +- [gvar] Avoid expanding all glyphs unnecessarily upon compile (#2918). +- [scaleUpem] Fixed bug whereby CFF2 vsindex was scaled; it should not (#2893, #2894). +- [designspaceLib] Add DS.getAxisByTag and refactor getAxis (#2891). +- [unicodedata] map Zmth<->math in ot_tag_{to,from}_script (#1737, #2889). +- [woff2] Support encoding/decoding OVERLAP_SIMPLE glyf flags (#2576, #2884). +- [instancer] Update OS/2 class and post.italicAngle when default moved (L4) +- Dropped support for Python 3.7 which reached EOL, fontTools requires 3.8+. +- [instancer] Fixed instantiateFeatureVariations logic when a rule range becomes + default-applicable (#2737, #2880). +- [ttLib] Add main to ttFont and ttCollection that just decompile and re-compile the + input font (#2869). +- [featureVars] Insert 'rvrn' lookup at the beginning of LookupList, to work around bug + in Apple implementation of 'rvrn' feature which the spec says it should be processed + early whereas on macOS 10.15 it follows lookup order (#2140, #2867). +- [instancer/mutator] Remove 'DSIG' table if present. +- [svgPathPen] Don't close path in endPath(), assume open unless closePath() (#2089, #2865). + +4.38.0 (released 2022-10-21) +---------------------------- + +- [varLib.instancer] Added support for L4 instancing, i.e. moving the default value of + an axis while keeping it variable. Thanks Behdad! (#2728, #2861). + It's now also possible to restrict an axis min/max values beyond the current default + value, e.g. a font wght has min=100, def=400, max=900 and you want a partial VF that + only varies between 500 and 700, you can now do that. + You can either specify two min/max values (wght=500:700), and the new default will be + set to either the minimum or maximum, depending on which one is closer to the current + default (e.g. 500 in this case). Or you can specify three values (e.g. wght=500:600:700) + to specify the new default value explicitly. +- [otlLib/featureVars] Set a few Count values so one doesn't need to compile the font + to update them (#2860). +- [varLib.models] Make extrapolation work for 2-master models as well where one master + is at the default location (#2843, #2846). + Add optional extrapolate=False to normalizeLocation() (#2847, #2849). +- [varLib.cff] Fixed sub-optimal packing of CFF2 deltas by no longer rounding them to + integer (#2838). +- [scaleUpem] Calculate numShorts in VarData after scale; handle CFF hintmasks (#2840). + +4.37.4 (released 2022-09-30) +---------------------------- + +- [subset] Keep nameIDs used by CPAL palette entry labels (#2837). +- [varLib] Avoid negative hmtx values when creating font from variable CFF2 font (#2827). +- [instancer] Don't prune stat.ElidedFallbackNameID (#2828). +- [unicodedata] Update Scripts/Blocks to Unicode 15.0 (#2833). + +4.37.3 (released 2022-09-20) +---------------------------- + +- Fix arguments in calls to (glyf) glyph.draw() and drawPoints(), whereby offset wasn't + correctly passed down; this fix also exposed a second bug, where lsb and tsb were not + set (#2824, #2825, adobe-type-tools/afdko#1560). + +4.37.2 (released 2022-09-15) +---------------------------- + +- [subset] Keep CPAL table and don't attempt to prune unused color indices if OT-SVG + table is present even if COLR table was subsetted away; OT-SVG may be referencing the + CPAL table; for now we assume that's the case (#2814, #2815). +- [varLib.instancer] Downgrade GPOS/GSUB version if there are no more FeatureVariations + after instancing (#2812). +- [subset] Added ``--no-lazy`` to optionally load fonts eagerly (mostly to ease + debugging of table lazy loading, no practical effects) (#2807). +- [varLib] Avoid building empty COLR.DeltaSetIndexMap with only identity mappings (#2803). +- [feaLib] Allow multiple value record types (by promoting to the most general format) + within the same PairPos subtable; e.g. this allows variable and non variable kerning + rules to share the same subtable. This also fixes a bug whereby some kerning pairs + would become unreachable while shapiong because of premature subtable splitting (#2772, #2776). +- [feaLib] Speed up ``VarScalar`` by caching models for recurring master locations (#2798). +- [feaLib] Optionally cythonize ``feaLib.lexer``, speeds up parsing FEA a bit (#2799). +- [designspaceLib] Avoid crash when handling unbounded rule conditions (#2797). +- [post] Don't crash if ``post`` legacy format 1 is malformed/improperly used (#2786) +- [gvar] Don't be "lazy" (load all glyph variations up front) when TTFont.lazy=False (#2771). +- [TTFont] Added ``normalizeLocation`` method to normalize a location dict from the + font's defined axes space (also known as "user space") into the normalized (-1..+1) + space. It applies ``avar`` mapping if the font contains an ``avar`` table (#2789). +- [TTVarGlyphSet] Support drawing glyph instances from CFF2 variable glyph set (#2784). +- [fontBuilder] Do not error when building cmap if there are zero code points (#2785). +- [varLib.plot] Added ability to plot a variation model and set of accompaning master + values corresponding to the model's master locations into a pyplot figure (#2767). +- [Snippets] Added ``statShape.py`` script to draw statistical shape of a glyph as an + ellips (requires pycairo) (baecd88). +- [TTVarGlyphSet] implement drawPoints natively, avoiding going through + SegmentToPointPen (#2778). +- [TTVarGlyphSet] Fixed bug whereby drawing a composite glyph multiple times, its + components would shif; needed an extra copy (#2774). + +4.37.1 (released 2022-08-24) +---------------------------- + +- [subset] Fixed regression introduced with v4.37.0 while subsetting the VarStore of + ``HVAR`` and ``VVAR`` tables, whereby an ``AttributeError: subset_varidxes`` was + thrown because an apparently unused import statement (with the side-effect of + dynamically binding that ``subset_varidxes`` method to the VarStore class) had been + accidentally deleted in an unrelated PR (#2679, #2773). +- [pens] Added ``cairoPen`` (#2678). +- [gvar] Read ``gvar`` more lazily by not parsing all of the ``glyf`` table (#2771). +- [ttGlyphSet] Make ``drawPoints(pointPen)`` method work for CFF fonts as well via + adapter pen (#2770). + +4.37.0 (released 2022-08-23) +---------------------------- + +- [varLib.models] Reverted PR #2717 which added support for "narrow tents" in v4.36.0, + as it introduced a regression (#2764, #2765). It will be restored in upcoming release + once we found a solution to the bug. +- [cff.specializer] Fixed issue in charstring generalizer with the ``blend`` operator + (#2750, #1975). +- [varLib.models] Added support for extrapolation (#2757). +- [ttGlyphSet] Ensure the newly added ``_TTVarGlyphSet`` inherits from ``_TTGlyphSet`` + to keep backward compatibility with existing API (#2762). +- [kern] Allow compiling legacy kern tables with more than 64k entries (d21cfdede). +- [visitor] Added new visitor API to traverse tree of objects and dispatch based + on the attribute type: cf. ``fontTools.misc.visitor`` and ``fontTools.ttLib.ttVisitor``. Added ``fontTools.ttLib.scaleUpem`` module that uses the latter to + change a font's units-per-em and scale all the related fields accordingly (#2718, + #2755). + +4.36.0 (released 2022-08-17) +---------------------------- + +- [varLib.models] Use a simpler model that generates narrower "tents" (regions, master + supports) whenever possible: specifically when any two axes that actively "cooperate" + (have masters at non-zero positions for both axes) have a complete set of intermediates. + The simpler algorithm produces fewer overlapping regions and behaves better with + respect to rounding at the peak positions than the generic solver, always matching + intermediate masters exactly, instead of maximally 0.5 units off. This may be useful + when 100% metrics compatibility is desired (#2218, #2717). +- [feaLib] Remove warning when about ``GDEF`` not being built when explicitly not + requested; don't build one unconditonally even when not requested (#2744, also works + around #2747). +- [ttFont] ``TTFont.getGlyphSet`` method now supports selecting a location that + represents an instance of a variable font (supports both user-scale and normalized + axes coordinates via the ``normalized=False`` parameter). Currently this only works + for TrueType-flavored variable fonts (#2738). + +4.35.0 (released 2022-08-15) +---------------------------- + +- [otData/otConverters] Added support for 'biased' PaintSweepGradient start/end angles + to match latest COLRv1 spec (#2743). +- [varLib.instancer] Fixed bug in ``_instantiateFeatureVariations`` when at the same + time pinning one axis and restricting the range of a subsequent axis; the wrong axis + tag was being used in the latter step (as the records' axisIdx was updated in the + preceding step but looked up using the old axes order in the following step) (#2733, + #2734). +- [mtiLib] Pad script tags with space when less than 4 char long (#1727). +- [merge] Use ``'.'`` instead of ``'#'`` in duplicate glyph names (#2742). +- [gvar] Added support for lazily loading glyph variations (#2741). +- [varLib] In ``build_many``, we forgot to pass on ``colr_layer_reuse`` parameter to + the ``build`` method (#2730). +- [svgPathPen] Add a main that prints SVG for input text (6df779fd). +- [cffLib.width] Fixed off-by-one in optimized values; previous code didn't match the + code block above it (2963fa50). +- [varLib.interpolatable] Support reading .designspace and .glyphs files (via optional + ``glyphsLib``). +- Compile some modules with Cython when available and building/installing fonttools + from source: ``varLib.iup`` (35% faster), ``pens.momentsPen`` (makes + ``varLib.interpolatable`` 3x faster). +- [feaLib] Allow features to be built for VF without also building a GDEF table (e.g. + only build GSUB); warn when GDEF would be needed but isn't requested (#2705, 2694). +- [otBase] Fixed ``AttributeError`` when uharfbuzz < 0.23.0 and 'repack' method is + missing (32aa8eaf). Use new ``uharfbuzz.repack_with_tag`` when available (since + uharfbuzz>=0.30.0), enables table-specific optimizations to be performed during + repacking (#2724). +- [statisticsPen] By default report all glyphs (4139d891). Avoid division-by-zero + (52b28f90). +- [feaLib] Added missing required argument to FeatureLibError exception (#2693) +- [varLib.merge] Fixed error during error reporting (#2689). Fixed undefined + ``NotANone`` variable (#2714). + +4.34.4 (released 2022-07-07) +---------------------------- + +- Fixed typo in varLib/merger.py that causes NameError merging COLR glyphs + containing more than 255 layers (#2685). + +4.34.3 (released 2022-07-07) +---------------------------- + +- [designspaceLib] Don't make up bad PS names when no STAT data (#2684) + +4.34.2 (released 2022-07-06) +---------------------------- + +- [varStore/subset] fixed KeyError exception to do with NO_VARIATION_INDEX while + subsetting varidxes in GPOS/GDEF (a08140d). + +4.34.1 (released 2022-07-06) +---------------------------- + +- [instancer] When optimizing HVAR/VVAR VarStore, use_NO_VARIATION_INDEX=False to avoid + including NO_VARIATION_INDEX in AdvWidthMap, RsbMap, LsbMap mappings, which would + push the VarIdx width to maximum (4bytes), which is not desirable. This also fixes + a hard crash when attempting to subset a varfont after it had been partially instanced + with use_NO_VARIATION_INDEX=True. + +4.34.0 (released 2022-07-06) +---------------------------- + +- [instancer] Set RIBBI bits in head and OS/2 table when cutting instances and the + subfamily nameID=2 contains strings like 'Italic' or 'Bold' (#2673). +- [otTraverse] Addded module containing methods for traversing trees of otData tables + (#2660). +- [otTables] Made DeltaSetIndexMap TTX dump less verbose by omitting no-op entries + (#2660). +- [colorLib.builder] Added option to disable PaintColrLayers's reuse of layers from + LayerList (#2660). +- [varLib] Added support for merging multiple master COLRv1 tables into a variable + COLR table (#2660, #2328). Base color glyphs of same name in different masters must have + identical paint graph structure (incl. number of layers, palette indices, number + of color line stops, corresponding paint formats at each level of the graph), + but can differ in the variable fields (e.g. PaintSolid.Alpha). PaintVar* tables + are produced when this happens and a VarStore/DeltaSetIndexMap is added to the + variable COLR table. It is possible for non-default masters to be 'sparse', i.e. + omit some of the color glyphs present in the default master. +- [feaLib] Let the Parser set nameIDs 1 through 6 that were previously reserved (#2675). +- [varLib.varStore] Support NO_VARIATION_INDEX in optimizer and instancer. +- [feaLib] Show all missing glyphs at once at end of parsing (#2665). +- [varLib.iup] Rewrite force-set conditions and limit DP loopback length (#2651). + For Noto Sans, IUP time drops from 23s down to 9s, with only a slight size increase + in the final font. This basically turns the algorithm from O(n^3) into O(n). +- [featureVars] Report about missing glyphs in substitution rules (#2654). +- [mutator/instancer] Added CLI flag to --no-recalc-timestamp (#2649). +- [SVG] Allow individual SVG documents in SVG OT table to be compressed on uncompressed, + and remember that when roundtripping to/from ttx. The SVG.docList is now a list + of SVGDocument namedtuple-like dataclass containing an extra ``compressed`` field, + and no longer a bare 3-tuple (#2645). +- [designspaceLib] Check for descriptor types with hasattr() to allow custom classes + that don't inherit the default descriptors (#2634). +- [subset] Enable sharing across subtables of extension lookups for harfbuzz packing + (#2626). Updated how table packing falls back to fontTools from harfbuzz (#2668). +- [subset] Updated default feature tags following current Harfbuzz (#2637). +- [svgLib] Fixed regex for real number to support e.g. 1e-4 in addition to 1.0e-4. + Support parsing negative rx, ry on arc commands (#2596, #2611). +- [subset] Fixed subsetting SinglePosFormat2 when ValueFormat=0 (#2603). + +4.33.3 (released 2022-04-26) +---------------------------- + +- [designspaceLib] Fixed typo in ``deepcopyExceptFonts`` method, preventing font + references to be transferred (#2600). Fixed another typo in the name of ``Range`` + dataclass's ``__post_init__`` magic method (#2597). + +4.33.2 (released 2022-04-22) +---------------------------- + +- [otBase] Make logging less verbose when harfbuzz fails to serialize. Do not exit + at the first failure but continue attempting to fix offset overflow error using + the pure-python serializer even when the ``USE_HARFBUZZ_REPACKER`` option was + explicitly set to ``True``. This is normal with fonts with relatively large + tables, at least until hb.repack implements proper table splitting. + +4.33.1 (released 2022-04-22) +---------------------------- + +- [otlLib] Put back the ``FONTTOOLS_GPOS_COMPACT_MODE`` environment variable to fix + regression in ufo2ft (and thus fontmake) introduced with v4.33.0 (#2592, #2593). + This is deprecated and will be removed one ufo2ft gets updated to use the new + config setup. + +4.33.0 (released 2022-04-21) +---------------------------- + +- [OS/2 / merge] Automatically recalculate ``OS/2.xAvgCharWidth`` after merging + fonts with ``fontTools.merge`` (#2591, #2538). +- [misc/config] Added ``fontTools.misc.configTools`` module, a generic configuration + system (#2416, #2439). + Added ``fontTools.config`` module, a fontTools-specific configuration + system using ``configTools`` above. + Attached a ``Config`` object to ``TTFont``. +- [otlLib] Replaced environment variable for GPOS compression level with an + equivalent option using the new config system. +- [designspaceLib] Incremented format version to 5.0 (#2436). + Added discrete axes, variable fonts, STAT information, either design- or + user-space location on instances. + Added ``fontTools.designspaceLib.split`` module to split a designspace + into sub-spaces that interpolate and that represent the variable fonts + listed in the document. + Made instance names optional and allow computing them from STAT data instead. + Added ``fontTools.designspaceLib.statNames`` module. + Allow instances to have the same location as a previously defined STAT label. + Deprecated some attributes: + ``SourceDescriptor``: ``copyLib``, ``copyInfo``, ``copyGroups``, ``copyFeatures``. + ``InstanceDescriptor``: ``kerning``, ``info``; ``glyphs``: use rules or sparse + sources. + For both, ``location``: use the more explicit designLocation. + Note: all are soft deprecations and existing code should keep working. + Updated documentation for Python methods and the XML format. +- [varLib] Added ``build_many`` to build several variable fonts from a single + designspace document (#2436). + Added ``fontTools.varLib.stat`` module to build STAT tables from a designspace + document. +- [otBase] Try to use the Harfbuzz Repacker for packing GSUB/GPOS tables when + ``uharfbuzz`` python bindings are available (#2552). Disable it by setting the + "fontTools.ttLib.tables.otBase:USE_HARFBUZZ_REPACKER" config option to ``False``. + If the option is set explicitly to ``True`` but ``uharfbuzz`` can't be imported + or fails to serialize for any reasons, an error will be raised (ImportError or + uharfbuzz errors). +- [CFF/T2] Ensure that ``pen.closePath()`` gets called for CFF2 charstrings (#2577). + Handle implicit CFF2 closePath within ``T2OutlineExtractor`` (#2580). + +4.32.0 (released 2022-04-08) +---------------------------- + +- [otlLib] Disable GPOS7 optimization to work around bug in Apple CoreText. + Always force Chaining GPOS8 for now (#2540). +- [glifLib] Added ``outputImpliedClosingLine=False`` parameter to ``Glyph.draw()``, + to control behaviour of ``PointToSegmentPen`` (6b4e2e7). +- [varLib.interpolatable] Check for wrong contour starting point (#2571). +- [cffLib] Remove leftover ``GlobalState`` class and fix calls to ``TopDictIndex()`` + (#2569, #2570). +- [instancer] Clear ``AxisValueArray`` if it is empty after instantiating (#2563). + +4.31.2 (released 2022-03-22) +---------------------------- + +- [varLib] fix instantiation of GPOS SinglePos values (#2555). + +4.31.1 (released 2022-03-18) +---------------------------- + +- [subset] fix subsetting OT-SVG when glyph id attribute is on the root ```` + element (#2553). + +4.31.0 (released 2022-03-18) +---------------------------- + +- [ttCollection] Fixed 'ResourceWarning: unclosed file' warning (#2549). +- [varLib.merger] Handle merging SinglePos with valueformat=0 (#2550). +- [ttFont] Update glyf's glyphOrder when calling TTFont.setGlyphOrder() (#2544). +- [ttFont] Added ``ensureDecompiled`` method to load all tables irrespective + of the ``lazy`` attribute (#2551). +- [otBase] Added ``iterSubTable`` method to iterate over BaseTable's children of + type BaseTable; useful for traversing a tree of otTables (#2551). + +4.30.0 (released 2022-03-10) +---------------------------- + +- [varLib] Added debug logger showing the glyph name for which ``gvar`` is built (#2542). +- [varLib.errors] Fixed undefined names in ``FoundANone`` and ``UnsupportedFormat`` + exceptions (ac4d5611). +- [otlLib.builder] Added ``windowsNames`` and ``macNames`` (bool) parameters to the + ``buildStatTabe`` function, so that one can select whether to only add one or both + of the two sets (#2528). +- [t1Lib] Added the ability to recreate PostScript stream (#2504). +- [name] Added ``getFirstDebugName``, ``getBest{Family,SubFamily,Full}Name`` methods (#2526). + +4.29.1 (released 2022-02-01) +---------------------------- + +- [colorLib] Fixed rounding issue with radial gradient's start/end circles inside + one another (#2521). +- [freetypePen] Handle rotate/skew transform when auto-computing width/height of the + buffer; raise PenError wen missing moveTo (#2517) + +4.29.0 (released 2022-01-24) +---------------------------- + +- [ufoLib] Fixed illegal characters and expanded reserved filenames (#2506). +- [COLRv1] Don't emit useless PaintColrLayers of lenght=1 in LayerListBuilder (#2513). +- [ttx] Removed legacy ``waitForKeyPress`` method on Windows (#2509). +- [pens] Added FreeTypePen that uses ``freetype-py`` and the pen protocol for + rasterizating outline paths (#2494). +- [unicodedata] Updated the script direction list to Unicode 14.0 (#2484). + Bumped unicodedata2 dependency to 14.0 (#2499). +- [psLib] Fixed type of ``fontName`` in ``suckfont`` (#2496). + +4.28.5 (released 2021-12-19) +---------------------------- + +- [svgPathPen] Continuation of #2471: make sure all occurrences of ``str()`` are now + replaced with user-defined ``ntos`` callable. +- [merge] Refactored code into submodules, plus several bugfixes and improvements: + fixed duplicate-glyph-resolution GSUB-lookup generation code; use tolerance in glyph + comparison for empty glyph's width; ignore space of default ignorable glyphs; + downgrade duplicates-resolution missing-GSUB from assert to warn; added --drop-tables + option (#2473, #2475, #2476). + +4.28.4 (released 2021-12-15) +---------------------------- + +- [merge] Merge GDEF marksets in Lookups properly (#2474). +- [feaLib] Have ``fontTools feaLib`` script exit with error code when build fails (#2459) +- [svgPathPen] Added ``ntos`` option to customize number formatting (e.g. rounding) (#2471). +- [subset] Speed up subsetting of large CFF fonts (#2467). +- [otTables] Speculatively promote lookups to extension to speed up compilation. If the + offset to lookup N is too big to fit in a ushort, the offset to lookup N+1 is going to + be too big as well, so we promote to extension all lookups from lookup N onwards (#2465). + +4.28.3 (released 2021-12-03) +---------------------------- + +- [subset] Fixed bug while subsetting ``COLR`` table, whereby incomplete layer records + pointing to missing glyphs were being retained leading to ``struct.error`` upon + compiling. Make it so that ``glyf`` glyph closure, which follows the ``COLR`` glyph + closure, does not influence the ``COLR`` table subsetting (#2461, #2462). +- [docs] Fully document the ``cmap`` and ``glyf`` tables (#2454, #2457). +- [colorLib.unbuilder] Fixed CLI by deleting no longer existing parameter (180bb1867). + +4.28.2 (released 2021-11-22) +---------------------------- + +- [otlLib] Remove duplicates when building coverage (#2433). +- [docs] Add interrogate configuration (#2443). +- [docs] Remove comment about missing “start” optional argument to ``calcChecksum`` (#2448). +- [cu2qu/cli] Adapt to the latest ufoLib2. +- [subset] Support subsetting SVG table and remove it from the list of drop by default tables (#534). +- [subset] add ``--pretty-svg`` option to pretty print SVG table contents (#2452). +- [merge] Support merging ``CFF`` tables (CID-keyed ``CFF`` is still not supported) (#2447). +- [merge] Support ``--output-file`` (#2447). +- [docs] Split table docs into individual pages (#2444). +- [feaLib] Forbid empty classes (#2446). +- [docs] Improve documentation for ``fontTools.ttLib.ttFont`` (#2442). + +4.28.1 (released 2021-11-08) +---------------------------- + +- [subset] Fixed AttributeError while traversing a color glyph's Paint graph when there is no + LayerList, which is optional (#2441). + +4.28.0 (released 2021-11-05) +---------------------------- + +- Dropped support for EOL Python 3.6, require Python 3.7 (#2417). +- [ufoLib/glifLib] Make filename-clash checks faster by using a set instead of a list (#2422). +- [subset] Don't crash if optional ClipList and LayerList are ``None`` (empty) (#2424, 2439). +- [OT-SVG] Removed support for old deprecated version 1 and embedded color palettes, + which were never officially part of the OpenType SVG spec. Upon compile, reuse offsets + to SVG documents that are identical (#2430). +- [feaLib] Added support for Variable Feature File syntax. This is experimental and subject + to change until it is finalized in the Adobe FEA spec (#2432). +- [unicodedata] Update Scripts/ScriptExtensions/Blocks to UnicodeData 14.0 (#2437). + +4.27.1 (released 2021-09-23) +---------------------------- + +- [otlLib] Fixed error when chained contextual lookup builder overflows (#2404, #2411). +- [bezierTools] Fixed two floating-point bugs: one when computing `t` for a point + lying on an almost horizontal/vertical line; another when computing the intersection + point between a curve and a line (#2413). + +4.27.0 (released 2021-09-14) +---------------------------- + +- [ttLib/otTables] Cleaned up virtual GID handling: allow virtual GIDs in ``Coverage`` + and ``ClassDef`` readers; removed unused ``allowVID`` argument from ``TTFont`` + constructor, and ``requireReal`` argument in ``TTFont.getGlyphID`` method. + Make ``TTFont.setGlyphOrder`` clear reverse glyphOrder map, and assume ``glyphOrder`` + internal attribute is never modified outside setGlyphOrder; added ``TTFont.getGlyphNameMany`` + and ``getGlyphIDMany`` (#1536, #1654, #2334, #2398). +- [py23] Dropped internal use of ``fontTools.py23`` module to fix deprecation warnings + in client code that imports from fontTools (#2234, #2399, #2400). +- [subset] Fix subsetting COLRv1 clip boxes when font is loaded lazily (#2408). + +4.26.2 (released 2021-08-09) +---------------------------- + +- [otTables] Added missing ``CompositeMode.PLUS`` operator (#2390). + +4.26.1 (released 2021-08-03) +---------------------------- + +- [transform] Added ``transformVector`` and ``transformVectors`` methods to the + ``Transform`` class. Similar to ``transformPoint`` but ignore the translation + part (#2386). + +4.26.0 (released 2021-08-03) +---------------------------- + +- [xmlWriter] Default to ``"\n"`` for ``newlinestr`` instead of platform-specific + ``os.linesep`` (#2384). +- [otData] Define COLRv1 ClipList and ClipBox (#2379). +- [removeOverlaps/instancer] Added --ignore-overlap-errors option to work around + Skia PathOps.Simplify bug (#2382, #2363, google/fonts#3365). +- NOTE: This will be the last version to support Python 3.6. FontTools will require + Python 3.7 or above from the next release (#2350) + +4.25.2 (released 2021-07-26) +---------------------------- + +- [COLRv1] Various changes to sync with the latest CORLv1 draft spec. In particular: + define COLR.VarIndexMap, remove/inline ColorIndex struct, add VarIndexBase to ``PaintVar*`` tables (#2372); + add reduced-precicion specialized transform Paints; + define Angle as fraction of half circle encoded as F2Dot14; + use FWORD (int16) for all Paint center coordinates; + change PaintTransform to have an offset to Affine2x3; +- [ttLib] when importing XML, only set sfntVersion if the font has no reader and is empty (#2376) + +4.25.1 (released 2021-07-16) +---------------------------- + +- [ttGlyphPen] Fixed bug in ``TTGlyphPointPen``, whereby open contours (i.e. starting + with segmentType "move") would throw ``NotImplementedError``. They are now treated + as if they are closed, like with the ``TTGlyphPen`` (#2364, #2366). + +4.25.0 (released 2021-07-05) +---------------------------- + +- [tfmLib] Added new library for parsing TeX Font Metric (TFM) files (#2354). +- [TupleVariation] Make shared tuples order deterministic on python < 3.7 where + Counter (subclass of dict) doesn't remember insertion order (#2351, #2353). +- [otData] Renamed COLRv1 structs to remove 'v1' suffix and match the updated draft + spec: 'LayerV1List' -> 'LayerList', 'BaseGlyphV1List' -> 'BaseGlyphList', + 'BaseGlyphV1Record' -> 'BaseGlyphPaintRecord' (#2346). + Added 8 new ``PaintScale*`` tables: with/without centers, uniform vs non-uniform. + Added ``*AroundCenter`` variants to ``PaintRotate`` and ``PaintSkew``: the default + versions no longer have centerX/Y, but default to origin. + ``PaintRotate``, ``PaintSkew`` and ``PaintComposite`` formats were re-numbered. + NOTE: these are breaking changes; clients using the experimental COLRv1 API will + have to be updated (#2348). +- [pointPens] Allow ``GuessSmoothPointPen`` to accept a tolerance. Fixed call to + ``math.atan2`` with x/y parameters inverted. Sync the code with fontPens (#2344). +- [post] Fixed parsing ``post`` table format 2.0 when it contains extra garbage + at the end of the stringData array (#2314). +- [subset] drop empty features unless 'size' with FeatureParams table (#2324). +- [otlLib] Added ``otlLib.optimize`` module; added GPOS compaction algorithm. + The compaction can be run on existing fonts with ``fonttools otlLib.optimize`` + or using the snippet ``compact_gpos.py``. There's experimental support for + compacting fonts at compilation time using an environment variable, but that + might be removed later (#2326). + +4.24.4 (released 2021-05-25) +---------------------------- + +- [subset/instancer] Fixed ``AttributeError`` when instantiating a VF that + contains GPOS ValueRecords with ``Device`` tables but without the respective + non-Device values (e.g. ``XAdvDevice`` without ``XAdvance``). When not + explicitly set, the latter are assumed to be 0 (#2323). + +4.24.3 (released 2021-05-20) +---------------------------- + +- [otTables] Fixed ``AttributeError`` in methods that split LigatureSubst, + MultipleSubst and AlternateSubst subtables when an offset overflow occurs. + The ``Format`` attribute was removed in v4.22.0 (#2319). + +4.24.2 (released 2021-05-20) +---------------------------- + +- [ttGlyphPen] Fixed typing annotation of TTGlyphPen glyphSet parameter (#2315). +- Fixed two instances of DeprecationWarning: invalid escape sequence (#2311). + +4.24.1 (released 2021-05-20) +---------------------------- + +- [subset] Fixed AttributeError when SinglePos subtable has None Value (ValueFormat 0) + (#2312, #2313). + +4.24.0 (released 2021-05-17) +---------------------------- + +- [pens] Add ``ttGlyphPen.TTGlyphPointPen`` similar to ``TTGlyphPen`` (#2205). + +4.23.1 (released 2021-05-14) +---------------------------- + +- [subset] Fix ``KeyError`` after subsetting ``COLR`` table that initially contains + both v0 and v1 color glyphs when the subset only requested v1 glyphs; we were + not pruning the v0 portion of the table (#2308). +- [colorLib] Set ``LayerV1List`` attribute to ``None`` when empty, it's optional + in CORLv1 (#2308). + +4.23.0 (released 2021-05-13) +---------------------------- + +- [designspaceLib] Allow to use ``\\UNC`` absolute paths on Windows (#2299, #2306). +- [varLib.merger] Fixed bug where ``VarLibMergeError`` was raised with incorrect + parameters (#2300). +- [feaLib] Allow substituting a glyph class with ``NULL`` to delete multiple glyphs + (#2303). +- [glyf] Fixed ``NameError`` exception in ``getPhantomPoints`` (#2295, #2305). +- [removeOverlaps] Retry pathops.simplify after rounding path coordinates to integers + if it fails the first time using floats, to work around a rare and hard to debug + Skia bug (#2288). +- [varLib] Added support for building, reading, writing and optimizing 32-bit + ``ItemVariationStore`` as used in COLRv1 table (#2285). +- [otBase/otConverters] Add array readers/writers for int types (#2285). +- [feaLib] Allow more than one lookahead glyph/class in contextual positioning with + "value at end" (#2293, #2294). +- [COLRv1] Default varIdx should be 0xFFFFFFFF (#2297, #2298). +- [pens] Make RecordingPointPen actually pass on identifiers; replace asserts with + explicit ``PenError`` exception (#2284). +- [mutator] Round lsb for CF2 fonts as well (#2286). + +4.22.1 (released 2021-04-26) +---------------------------- + +- [feaLib] Skip references to named lookups if the lookup block definition + is empty, similarly to makeotf. This also fixes an ``AttributeError`` while + generating ``aalt`` feature (#2276, #2277). +- [subset] Fixed bug with ``--no-hinting`` implementation for Device tables (#2272, + #2275). The previous code was alwyas dropping Device tables if no-hinting was + requested, but some Device tables (DeltaFormat=0x8000) are also used to encode + variation indices and need to be retained. +- [otBase] Fixed bug in getting the ValueRecordSize when decompiling ``MVAR`` + table with ``lazy=True`` (#2273, #2274). +- [varLib/glyf/gvar] Optimized and simplified ``GlyphCoordinates`` and + ``TupleVariation`` classes, use ``bytearray`` where possible, refactored + phantom-points calculations. We measured about 30% speedup in total time + of loading master ttfs, building gvar, and saving (#2261, #2266). +- [subset] Fixed ``AssertionError`` while pruning unused CPAL palettes when + ``0xFFFF`` is present (#2257, #2259). + +4.22.0 (released 2021-04-01) +---------------------------- + +- [ttLib] Remove .Format from Coverage, ClassDef, SingleSubst, LigatureSubst, + AlternateSubst, MultipleSubst (#2238). + ATTENTION: This will change your TTX dumps! +- [misc.arrayTools] move Vector to its own submodule, and rewrite as a tuple + subclass (#2201). +- [docs] Added a terminology section for varLib (#2209). +- [varLib] Move rounding to VariationModel, to avoid error accumulation from + multiple deltas (#2214) +- [varLib] Explain merge errors in more human-friendly terms (#2223, #2226) +- [otlLib] Correct some documentation (#2225) +- [varLib/otlLib] Allow merging into VariationFont without first saving GPOS + PairPos2 (#2229) +- [subset] Improve PairPosFormat2 subsetting (#2221) +- [ttLib] TTFont.save: create file on disk as late as possible (#2253) +- [cffLib] Add missing CFF2 dict operators LanguageGroup and ExpansionFactor + (#2249) + ATTENTION: This will change your TTX dumps! + +4.21.1 (released 2021-02-26) +---------------------------- + +- [pens] Reverted breaking change that turned ``AbstractPen`` and ``AbstractPointPen`` + into abstract base classes (#2164, #2198). + +4.21.0 (released 2021-02-26) +---------------------------- + +- [feaLib] Indent anchor statements in ``asFea()`` to make them more legible and + diff-able (#2193). +- [pens] Turn ``AbstractPen`` and ``AbstractPointPen`` into abstract base classes + (#2164). +- [feaLib] Added support for parsing and building ``STAT`` table from AFDKO feature + files (#2039). +- [instancer] Added option to update name table of generated instance using ``STAT`` + table's axis values (#2189). +- [bezierTools] Added functions to compute bezier point-at-time, as well as line-line, + curve-line and curve-curve intersections (#2192). + +4.20.0 (released 2021-02-15) +---------------------------- + +- [COLRv1] Added ``unbuildColrV1`` to deconstruct COLRv1 otTables to raw json-able + data structure; it does the reverse of ``buildColrV1`` (#2171). +- [feaLib] Allow ``sub X by NULL`` sequence to delete a glyph (#2170). +- [arrayTools] Fixed ``Vector`` division (#2173). +- [COLRv1] Define new ``PaintSweepGradient`` (#2172). +- [otTables] Moved ``Paint.Format`` enum class outside of ``Paint`` class definition, + now named ``PaintFormat``. It was clashing with paint instance ``Format`` attribute + and thus was breaking lazy load of COLR table which relies on magic ``__getattr__`` + (#2175). +- [COLRv1] Replace hand-coded builder functions with otData-driven dynamic + implementation (#2181). +- [COLRv1] Define additional static (non-variable) Paint formats (#2181). +- [subset] Added support for subsetting COLR v1 and CPAL tables (#2174, #2177). +- [fontBuilder] Allow ``setupFvar`` to optionally take ``designspaceLib.AxisDescriptor`` + objects. Added new ``setupAvar`` method. Support localised names for axes and + named instances (#2185). + +4.19.1 (released 2021-01-28) +---------------------------- + +- [woff2] An initial off-curve point with an overlap flag now stays an off-curve + point after compression. + +4.19.0 (released 2021-01-25) +---------------------------- + +- [codecs] Handle ``errors`` parameter different from 'strict' for the custom + extended mac encodings (#2137, #2132). +- [featureVars] Raise better error message when a script is missing the required + default language system (#2154). +- [COLRv1] Avoid abrupt change caused by rounding ``PaintRadialGradient.c0`` when + the start circle almost touches the end circle's perimeter (#2148). +- [COLRv1] Support building unlimited lists of paints as 255-ary trees of + ``PaintColrLayers`` tables (#2153). +- [subset] Prune redundant format-12 cmap subtables when all non-BMP characters + are dropped (#2146). +- [basePen] Raise ``MissingComponentError`` instead of bare ``KeyError`` when a + referenced component is missing (#2145). + +4.18.2 (released 2020-12-16) +---------------------------- + +- [COLRv1] Implemented ``PaintTranslate`` paint format (#2129). +- [varLib.cff] Fixed unbound local variable error (#1787). +- [otlLib] Don't crash when creating OpenType class definitions if some glyphs + occur more than once (#2125). + +4.18.1 (released 2020-12-09) +---------------------------- + +- [colorLib] Speed optimization for ``LayerV1ListBuilder`` (#2119). +- [mutator] Fixed missing tab in ``interpolate_cff2_metrics`` (0957dc7a). + +4.18.0 (released 2020-12-04) +---------------------------- + +- [COLRv1] Update to latest draft: added ``PaintRotate`` and ``PaintSkew`` (#2118). +- [woff2] Support new ``brotlicffi`` bindings for PyPy (#2117). +- [glifLib] Added ``expectContentsFile`` parameter to ``GlyphSet``, for use when + reading existing UFOs, to comply with the specification stating that a + ``contents.plist`` file must exist in a glyph set (#2114). +- [subset] Allow ``LangSys`` tags in ``--layout-scripts`` option (#2112). For example: + ``--layout-scripts=arab.dflt,arab.URD,latn``; this will keep ``DefaultLangSys`` + and ``URD`` language for ``arab`` script, and all languages for ``latn`` script. +- [varLib.interpolatable] Allow UFOs to be checked; report open paths, non existant + glyphs; add a ``--json`` option to produce a machine-readable list of + incompatibilities +- [pens] Added ``QuartzPen`` to create ``CGPath`` from glyph outlines on macOS. + Requires pyobjc (#2107). +- [feaLib] You can export ``FONTTOOLS_LOOKUP_DEBUGGING=1`` to enable feature file + debugging info stored in ``Debg`` table (#2106). +- [otlLib] Build more efficient format 1 and format 2 contextual lookups whenever + possible (#2101). + +4.17.1 (released 2020-11-16) +---------------------------- + +- [colorLib] Fixed regression in 4.17.0 when building COLR v0 table; when color + layers are stored in UFO lib plist, we can't distinguish tuples from lists so + we need to accept either types (e5439eb9, googlefonts/ufo2ft/issues#426). + +4.17.0 (released 2020-11-12) +---------------------------- + +- [colorLib/otData] Updated to latest draft ``COLR`` v1 spec (#2092). +- [svgLib] Fixed parsing error when arc commands' boolean flags are not separated + by space or comma (#2094). +- [varLib] Interpret empty non-default glyphs as 'missing', if the default glyph is + not empty (#2082). +- [feaLib.builder] Only stash lookup location for ``Debg`` if ``Builder.buildLookups_`` + has cooperated (#2065, #2067). +- [varLib] Fixed bug in VarStore optimizer (#2073, #2083). +- [varLib] Add designspace lib key for custom feavar feature tag (#2080). +- Add HashPointPen adapted from psautohint. With this pen, a hash value of a glyph + can be computed, which can later be used to detect glyph changes (#2005). + +4.16.1 (released 2020-10-05) +---------------------------- + +- [varLib.instancer] Fixed ``TypeError`` exception when instantiating a VF with + a GSUB table 1.1 in which ``FeatureVariations`` attribute is present but set to + ``None`` -- indicating that optional ``FeatureVariations`` is missing (#2077). +- [glifLib] Make ``x`` and ``y`` attributes of the ``point`` element required + even when validation is turned off, and raise a meaningful ``GlifLibError`` + message when that happens (#2075). + +4.16.0 (released 2020-09-30) +---------------------------- + +- [removeOverlaps] Added new module and ``removeOverlaps`` function that merges + overlapping contours and components in TrueType glyphs. It requires the + `skia-pathops `__ module. + Note that removing overlaps invalidates the TrueType hinting (#2068). +- [varLib.instancer] Added ``--remove-overlaps`` command-line option. + The ``overlap`` option in ``instantiateVariableFont`` now takes an ``OverlapMode`` + enum: 0: KEEP_AND_DONT_SET_FLAGS, 1: KEEP_AND_SET_FLAGS (default), and 2: REMOVE. + The latter is equivalent to calling ``removeOverlaps`` on the generated static + instance. The option continues to accept ``bool`` value for backward compatibility. + + +4.15.0 (released 2020-09-21) +---------------------------- + +- [plistlib] Added typing annotations to plistlib module. Set up mypy static + typechecker to run automatically on CI (#2061). +- [ttLib] Implement private ``Debg`` table, a reverse-DNS namespaced JSON dict. +- [feaLib] Optionally add an entry into the ``Debg`` table with the original + lookup name (if any), feature name / script / language combination (if any), + and original source filename and line location. Annotate the ttx output for + a lookup with the information from the Debg table (#2052). +- [sfnt] Disabled checksum checking by default in ``SFNTReader`` (#2058). +- [Docs] Document ``mtiLib`` module (#2027). +- [varLib.interpolatable] Added checks for contour node count and operation type + of each node (#2054). +- [ttLib] Added API to register custom table packer/unpacker classes (#2055). + +4.14.0 (released 2020-08-19) +---------------------------- + +- [feaLib] Allow anonymous classes in LookupFlags definitions (#2037). +- [Docs] Better document DesignSpace rules processing order (#2041). +- [ttLib] Fixed 21-year old bug in ``maxp.maxComponentDepth`` calculation (#2044, + #2045). +- [varLib.models] Fixed misspelled argument name in CLI entry point (81d0042a). +- [subset] When subsetting GSUB v1.1, fixed TypeError by checking whether the + optional FeatureVariations table is present (e63ecc5b). +- [Snippets] Added snippet to show how to decompose glyphs in a TTF (#2030). +- [otlLib] Generate GSUB type 5 and GPOS type 7 contextual lookups where appropriate + (#2016). + +4.13.0 (released 2020-07-10) +---------------------------- + +- [feaLib/otlLib] Moved lookup subtable builders from feaLib to otlLib; refactored + some common code (#2004, #2007). +- [docs] Document otlLib module (#2009). +- [glifLib] Fixed bug with some UFO .glif filenames clashing on case-insensitive + filesystems (#2001, #2002). +- [colorLib] Updated COLRv1 implementation following changes in the draft spec: + (#2008, googlefonts/colr-gradients-spec#24). + +4.12.1 (released 2020-06-16) +---------------------------- + +- [_n_a_m_e] Fixed error in ``addMultilingualName`` with one-character names. + Only attempt to recovered malformed UTF-16 data from a ``bytes`` string, + not from unicode ``str`` (#1997, #1998). + +4.12.0 (released 2020-06-09) +---------------------------- + +- [otlLib/varLib] Ensure that the ``AxisNameID`` in the ``STAT`` and ``fvar`` + tables is grater than 255 as per OpenType spec (#1985, #1986). +- [docs] Document more modules in ``fontTools.misc`` package: ``filenames``, + ``fixedTools``, ``intTools``, ``loggingTools``, ``macCreatorType``, ``macRes``, + ``plistlib`` (#1981). +- [OS/2] Don't calculate whole sets of unicode codepoints, use faster and more memory + efficient ranges and bisect lookups (#1984). +- [voltLib] Support writing back abstract syntax tree as VOLT data (#1983). +- [voltLib] Accept DO_NOT_TOUCH_CMAP keyword (#1987). +- [subset/merge] Fixed a namespace clash involving a private helper class (#1955). + +4.11.0 (released 2020-05-28) +---------------------------- + +- [feaLib] Introduced ``includeDir`` parameter on Parser and IncludingLexer to + explicitly specify the directory to search when ``include()`` statements are + encountered (#1973). +- [ufoLib] Silently delete duplicate glyphs within the same kerning group when reading + groups (#1970). +- [ttLib] Set version of COLR table when decompiling COLRv1 (commit 9d8a7e2). + +4.10.2 (released 2020-05-20) +---------------------------- + +- [sfnt] Fixed ``NameError: SimpleNamespace`` while reading TTC header. The regression + was introduced with 4.10.1 after removing ``py23`` star import. + +4.10.1 (released 2020-05-19) +---------------------------- + +- [sfnt] Make ``SFNTReader`` pickleable even when TTFont is loaded with lazy=True + option and thus keeps a reference to an external file (#1962, #1967). +- [feaLib.ast] Restore backward compatibility (broken in 4.10 with #1905) for + ``ChainContextPosStatement`` and ``ChainContextSubstStatement`` classes. + Make them accept either list of lookups or list of lists of lookups (#1961). +- [docs] Document some modules in ``fontTools.misc`` package: ``arrayTools``, + ``bezierTools`` ``cliTools`` and ``eexec`` (#1956). +- [ttLib._n_a_m_e] Fixed ``findMultilingualName()`` when name record's ``string`` is + encoded as bytes sequence (#1963). + +4.10.0 (released 2020-05-15) +---------------------------- + +- [varLib] Allow feature variations to be active across the entire space (#1957). +- [ufoLib] Added support for ``formatVersionMinor`` in UFO's ``fontinfo.plist`` and for + ``formatMinor`` attribute in GLIF file as discussed in unified-font-object/ufo-spec#78. + No changes in reading or writing UFOs until an upcoming (non-0) minor update of the + UFO specification is published (#1786). +- [merge] Fixed merging fonts with different versions of ``OS/2`` table (#1865, #1952). +- [subset] Fixed ``AttributeError`` while subsetting ``ContextSubst`` and ``ContextPos`` + Format 3 subtable (#1879, #1944). +- [ttLib.table._m_e_t_a] if data happens to be ascii, emit comment in TTX (#1938). +- [feaLib] Support multiple lookups per glyph position (#1905). +- [psCharStrings] Use inheritance to avoid repeated code in initializer (#1932). +- [Doc] Improved documentation for the following modules: ``afmLib`` (#1933), ``agl`` + (#1934), ``cffLib`` (#1935), ``cu2qu`` (#1937), ``encodings`` (#1940), ``feaLib`` + (#1941), ``merge`` (#1949). +- [Doc] Split off developer-centric info to new page, making front page of docs more + user-focused. List all utilities and sub-modules with brief descriptions. + Make README more concise and focused (#1914). +- [otlLib] Add function to build STAT table from high-level description (#1926). +- [ttLib._n_a_m_e] Add ``findMultilingualName()`` method (#1921). +- [unicodedata] Update ``RTL_SCRIPTS`` for Unicode 13.0 (#1925). +- [gvar] Sort ``gvar`` XML output by glyph name, not glyph order (#1907, #1908). +- [Doc] Added help options to ``fonttools`` command line tool (#1913, #1920). + Ensure all fonttools CLI tools have help documentation (#1948). +- [ufoLib] Only write fontinfo.plist when there actually is content (#1911). + +4.9.0 (released 2020-04-29) +--------------------------- + +- [subset] Fixed subsetting of FeatureVariations table. The subsetter no longer drops + FeatureVariationRecords that have empty substitutions as that will keep the search + going and thus change the logic. It will only drop empty records that occur at the + end of the FeatureVariationRecords array (#1881). +- [subset] Remove FeatureVariations table and downgrade GSUB/GPOS to version 0x10000 + when FeatureVariations contain no FeatureVariationRecords after subsetting (#1903). +- [agl] Add support for legacy Adobe Glyph List of glyph names in ``fontTools.agl`` + (#1895). +- [feaLib] Ignore superfluous script statements (#1883). +- [feaLib] Hide traceback by default on ``fonttools feaLib`` command line. + Use ``--traceback`` option to show (#1898). +- [feaLib] Check lookup index in chaining sub/pos lookups and print better error + message (#1896, #1897). +- [feaLib] Fix building chained alt substitutions (#1902). +- [Doc] Included all fontTools modules in the sphinx-generated documentation, and + published it to ReadTheDocs for continuous documentation of the fontTools project + (#1333). Check it out at https://fonttools.readthedocs.io/. Thanks to Chris Simpkins! +- [transform] The ``Transform`` class is now subclass of ``typing.NamedTuple``. No + change in functionality (#1904). + + +4.8.1 (released 2020-04-17) +--------------------------- + +- [feaLib] Fixed ``AttributeError: 'NoneType' has no attribute 'getAlternateGlyphs'`` + when ``aalt`` feature references a chain contextual substitution lookup + (googlefonts/fontmake#648, #1878). + +4.8.0 (released 2020-04-16) +--------------------------- + +- [feaLib] If Parser is initialized without a ``glyphNames`` parameter, it cannot + distinguish between a glyph name containing an hyphen, or a range of glyph names; + instead of raising an error, it now interprets them as literal glyph names, while + also outputting a logging warning to alert user about the ambiguity (#1768, #1870). +- [feaLib] When serializing AST to string, emit spaces around hyphens that denote + ranges. Also, fixed an issue with CID ranges when round-tripping AST->string->AST + (#1872). +- [Snippets/otf2ttf] In otf2ttf.py script update LSB in hmtx to match xMin (#1873). +- [colorLib] Added experimental support for building ``COLR`` v1 tables as per + the `colr-gradients-spec `__ + draft proposal. **NOTE**: both the API and the XML dump of ``COLR`` v1 are + susceptible to change while the proposal is being discussed and formalized (#1822). + +4.7.0 (released 2020-04-03) +--------------------------- + +- [cu2qu] Added ``fontTools.cu2qu`` package, imported from the original + `cu2qu `__ project. The ``cu2qu.pens`` module + was moved to ``fontTools.pens.cu2quPen``. The optional cu2qu extension module + can be compiled by installing `Cython `__ before installing + fonttools from source (i.e. git repo or sdist tarball). The wheel package that + is published on PyPI (i.e. the one ``pip`` downloads, unless ``--no-binary`` + option is used), will continue to be pure-Python for now (#1868). + +4.6.0 (released 2020-03-24) +--------------------------- + +- [varLib] Added support for building variable ``BASE`` table version 1.1 (#1858). +- [CPAL] Added ``fromRGBA`` method to ``Color`` class (#1861). + + +4.5.0 (released 2020-03-20) +--------------------------- + +- [designspaceLib] Added ``add{Axis,Source,Instance,Rule}Descriptor`` methods to + ``DesignSpaceDocument`` class, to initialize new descriptor objects using keyword + arguments, and at the same time append them to the current document (#1860). +- [unicodedata] Update to Unicode 13.0 (#1859). + +4.4.3 (released 2020-03-13) +--------------------------- + +- [varLib] Always build ``gvar`` table for TrueType-flavored Variable Fonts, + even if it contains no variation data. The table is required according to + the OpenType spec (#1855, #1857). + +4.4.2 (released 2020-03-12) +--------------------------- + +- [ttx] Annotate ``LookupFlag`` in XML dump with comment explaining what bits + are set and what they mean (#1850). +- [feaLib] Added more descriptive message to ``IncludedFeaNotFound`` error (#1842). + +4.4.1 (released 2020-02-26) +--------------------------- + +- [woff2] Skip normalizing ``glyf`` and ``loca`` tables if these are missing from + a font (e.g. in NotoColorEmoji using ``CBDT/CBLC`` tables). +- [timeTools] Use non-localized date parsing in ``timestampFromString``, to fix + error when non-English ``LC_TIME`` locale is set (#1838, #1839). +- [fontBuilder] Make sure the CFF table generated by fontBuilder can be used by varLib + without having to compile and decompile the table first. This was breaking in + converting the CFF table to CFF2 due to some unset attributes (#1836). + +4.4.0 (released 2020-02-18) +--------------------------- + +- [colorLib] Added ``fontTools.colorLib.builder`` module, initially with ``buildCOLR`` + and ``buildCPAL`` public functions. More color font formats will follow (#1827). +- [fontBuilder] Added ``setupCOLR`` and ``setupCPAL`` methods (#1826). +- [ttGlyphPen] Quantize ``GlyphComponent.transform`` floats to ``F2Dot14`` to fix + round-trip issue when computing bounding boxes of transformed components (#1830). +- [glyf] If a component uses reference points (``firstPt`` and ``secondPt``) for + alignment (instead of X and Y offsets), compute the effective translation offset + *after* having applied any transform (#1831). +- [glyf] When all glyphs have zero contours, compile ``glyf`` table data as a single + null byte in order to pass validation by OTS and Windows (#1829). +- [feaLib] Parsing feature code now ensures that referenced glyph names are part of + the known glyph set, unless a glyph set was not provided. +- [varLib] When filling in the default axis value for a missing location of a source or + instance, correctly map the value forward. +- [varLib] The avar table can now contain mapping output values that are greater than + OR EQUAL to the preceeding value, as the avar specification allows this. +- [varLib] The errors of the module are now ordered hierarchically below VarLibError. + See #1821. + +4.3.0 (released 2020-02-03) +--------------------------- + +- [EBLC/CBLC] Fixed incorrect padding length calculation for Format 3 IndexSubTable + (#1817, #1818). +- [varLib] Fixed error when merging OTL tables and TTFonts were loaded as ``lazy=True`` + (#1808, #1809). +- [varLib] Allow to use master fonts containing ``CFF2`` table when building VF (#1816). +- [ttLib] Make ``recalcBBoxes`` option work also with ``CFF2`` table (#1816). +- [feaLib] Don't reset ``lookupflag`` in lookups defined inside feature blocks. + They will now inherit the current ``lookupflag`` of the feature. This is what + Adobe ``makeotf`` also does in this case (#1815). +- [feaLib] Fixed bug with mixed single/multiple substitutions. If a single substitution + involved a glyph class, we were incorrectly using only the first glyph in the class + (#1814). + +4.2.5 (released 2020-01-29) +--------------------------- + +- [feaLib] Do not fail on duplicate multiple substitutions, only warn (#1811). +- [subset] Optimize SinglePos subtables to Format 1 if all ValueRecords are the same + (#1802). + +4.2.4 (released 2020-01-09) +--------------------------- + +- [unicodedata] Update RTL_SCRIPTS for Unicode 11 and 12. + +4.2.3 (released 2020-01-07) +--------------------------- + +- [otTables] Fixed bug when splitting `MarkBasePos` subtables as offsets overflow. + The mark class values in the split subtable were not being updated, leading to + invalid mark-base attachments (#1797, googlefonts/noto-source#145). +- [feaLib] Only log a warning instead of error when features contain duplicate + substitutions (#1767). +- [glifLib] Strip XML comments when parsing with lxml (#1784, #1785). + +4.2.2 (released 2019-12-12) +--------------------------- + +- [subset] Fixed issue with subsetting FeatureVariations table when the index + of features changes as features get dropped. The feature index need to be + remapped to point to index of the remaining features (#1777, #1782). +- [fontBuilder] Added `addFeatureVariations` method to `FontBuilder` class. This + is a shorthand for calling `featureVars.addFeatureVariations` on the builder's + TTFont object (#1781). +- [glyf] Fixed the flags bug in glyph.drawPoints() like we did for glyph.draw() + (#1771, #1774). + +4.2.1 (released 2019-12-06) +--------------------------- + +- [glyf] Use the ``flagOnCurve`` bit mask in ``glyph.draw()``, so that we ignore + the ``overlap`` flag that may be set when instantiating variable fonts (#1771). + +4.2.0 (released 2019-11-28) +--------------------------- + +- [pens] Added the following pens: + + * ``roundingPen.RoundingPen``: filter pen that rounds coordinates and components' + offsets to integer; + * ``roundingPen.RoundingPointPen``: like the above, but using PointPen protocol. + * ``filterPen.FilterPointPen``: base class for filter point pens; + * ``transformPen.TransformPointPen``: filter point pen to apply affine transform; + * ``recordingPen.RecordingPointPen``: records and replays point-pen commands. + +- [ttGlyphPen] Always round float coordinates and component offsets to integers + (#1763). +- [ufoLib] When converting kerning groups from UFO2 to UFO3, avoid confusing + groups with the same name as one of the glyphs (#1761, #1762, + unified-font-object/ufo-spec#98). + +4.1.0 (released 2019-11-18) +--------------------------- + +- [instancer] Implemented restricting axis ranges (level 3 partial instancing). + You can now pass ``{axis_tag: (min, max)}`` tuples as input to the + ``instantiateVariableFont`` function. Note that changing the default axis + position is not supported yet. The command-line script also accepts axis ranges + in the form of colon-separated float values, e.g. ``wght=400:700`` (#1753, #1537). +- [instancer] Never drop STAT ``DesignAxis`` records, but only prune out-of-range + ``AxisValue`` records. +- [otBase/otTables] Enforce that VarStore.RegionAxisCount == fvar.axisCount, even + when regions list is empty to appease OTS < v8.0 (#1752). +- [designspaceLib] Defined new ``processing`` attribute for ```` element, + with values "first" or "last", plus other editorial changes to DesignSpace + specification. Bumped format version to 4.1 (#1750). +- [varLib] Improved error message when masters' glyph orders do not match (#1758, + #1759). +- [featureVars] Allow to specify custom feature tag in ``addFeatureVariations``; + allow said feature to already exist, in which case we append new lookup indices + to existing features. Implemented ```` attribute ``processing`` according to + DesignSpace specification update in #1750. Depending on this flag, we generate + either an 'rvrn' (always processed first) or a 'rclt' feature (follows lookup order, + therefore last) (#1747, #1625, #1371). +- [ttCollection] Added support for context manager auto-closing via ``with`` statement + like with ``TTFont`` (#1751). +- [unicodedata] Require unicodedata2 >= 12.1.0. +- [py2.py3] Removed yet more PY2 vestiges (#1743). +- [_n_a_m_e] Fixed issue when comparing NameRecords with different string types (#1742). +- [fixedTools] Changed ``fixedToFloat`` to not do any rounding but simply return + ``value / (1 << precisionBits)``. Added ``floatToFixedToStr`` and + ``strToFixedToFloat`` functions to be used when loading from or dumping to XML. + Fixed values (e.g. fvar axes and instance coordinates, avar mappings, etc.) are + are now stored as un-rounded decimal floats upon decompiling (#1740, #737). +- [feaLib] Fixed handling of multiple ``LigatureCaret`` statements for the same glyph. + Only the first rule per glyph is used, additional ones are ignored (#1733). + +4.0.2 (released 2019-09-26) +--------------------------- + +- [voltLib] Added support for ``ALL`` and ``NONE`` in ``PROCESS_MARKS`` (#1732). +- [Silf] Fixed issue in ``Silf`` table compilation and decompilation regarding str vs + bytes in python3 (#1728). +- [merge] Handle duplicate glyph names better: instead of appending font index to + all glyph names, use similar code like we use in ``post`` and ``CFF`` tables (#1729). + +4.0.1 (released 2019-09-11) +--------------------------- + +- [otTables] Support fixing offset overflows in ``MultipleSubst`` lookup subtables + (#1706). +- [subset] Prune empty strikes in ``EBDT`` and ``CBDT`` table data (#1698, #1633). +- [pens] Fixed issue in ``PointToSegmentPen`` when last point of closed contour has + same coordinates as the starting point and was incorrectly dropped (#1720). +- [Graphite] Fixed ``Sill`` table output to pass OTS (#1705). +- [name] Added ``removeNames`` method to ``table__n_a_m_e`` class (#1719). +- [ttLib] Added aliases for renamed entries ``ascender`` and ``descender`` in + ``hhea`` table (#1715). + +4.0.0 (released 2019-08-22) +--------------------------- + +- NOTE: The v4.x version series only supports Python 3.6 or greater. You can keep + using fonttools 3.x if you need support for Python 2. +- [py23] Removed all the python2-only code since it is no longer reachable, thus + unused; only the Python3 symbols were kept, but these are no-op. The module is now + DEPRECATED and will removed in the future. +- [ttLib] Fixed UnboundLocalError for empty loca/glyph tables (#1680). Also, allow + the glyf table to be incomplete when dumping to XML (#1681). +- [varLib.models] Fixed KeyError while sorting masters and there are no on-axis for + a given axis (38a8eb0e). +- [cffLib] Make sure glyph names are unique (#1699). +- [feaLib] Fix feature parser to correctly handle octal numbers (#1700). + +\... see `here `__ for earlier changes diff --git a/.cache/pip/http-v2/8/f/0/a/d/8f0ad583652318494c12da41e4d30ff6225ec73d448a0754a0a7e898 b/.cache/pip/http-v2/8/f/0/a/d/8f0ad583652318494c12da41e4d30ff6225ec73d448a0754a0a7e898 new file mode 100644 index 0000000000000000000000000000000000000000..fcf38640e52aee4849824bd3b94ed926694c3614 Binary files /dev/null and b/.cache/pip/http-v2/8/f/0/a/d/8f0ad583652318494c12da41e4d30ff6225ec73d448a0754a0a7e898 differ diff --git a/.cache/pip/http-v2/b/0/c/3/0/b0c304a2d0d2dd17cd2e818948220da223f52dfe26c6e170a35508ec b/.cache/pip/http-v2/b/0/c/3/0/b0c304a2d0d2dd17cd2e818948220da223f52dfe26c6e170a35508ec new file mode 100644 index 0000000000000000000000000000000000000000..05d69d936eaef114bb382406a412161802656427 Binary files /dev/null and b/.cache/pip/http-v2/b/0/c/3/0/b0c304a2d0d2dd17cd2e818948220da223f52dfe26c6e170a35508ec differ diff --git a/.cache/pip/http-v2/b/0/c/3/0/b0c304a2d0d2dd17cd2e818948220da223f52dfe26c6e170a35508ec.body b/.cache/pip/http-v2/b/0/c/3/0/b0c304a2d0d2dd17cd2e818948220da223f52dfe26c6e170a35508ec.body new file mode 100644 index 0000000000000000000000000000000000000000..b4670e86b6dc207c944c55c5d3b84911fb41157a --- /dev/null +++ b/.cache/pip/http-v2/b/0/c/3/0/b0c304a2d0d2dd17cd2e818948220da223f52dfe26c6e170a35508ec.body @@ -0,0 +1,32 @@ +Metadata-Version: 2.1 +Name: mdurl +Version: 0.1.2 +Summary: Markdown URL utilities +Keywords: markdown,commonmark +Author-email: Taneli Hukkinen +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: MacOS +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX :: Linux +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Typing :: Typed +Project-URL: Homepage, https://github.com/executablebooks/mdurl + +# mdurl + +[![Build Status](https://github.com/executablebooks/mdurl/workflows/Tests/badge.svg?branch=master)](https://github.com/executablebooks/mdurl/actions?query=workflow%3ATests+branch%3Amaster+event%3Apush) +[![codecov.io](https://codecov.io/gh/executablebooks/mdurl/branch/master/graph/badge.svg)](https://codecov.io/gh/executablebooks/mdurl) +[![PyPI version](https://img.shields.io/pypi/v/mdurl)](https://pypi.org/project/mdurl) + +This is a Python port of the JavaScript [mdurl](https://www.npmjs.com/package/mdurl) package. +See the [upstream README.md file](https://github.com/markdown-it/mdurl/blob/master/README.md) for API documentation. + diff --git a/.cache/pip/http-v2/b/1/b/c/4/b1bc490c5e0cca2a2d96076c15d68fbd776750f65fed2e31e85eb654 b/.cache/pip/http-v2/b/1/b/c/4/b1bc490c5e0cca2a2d96076c15d68fbd776750f65fed2e31e85eb654 new file mode 100644 index 0000000000000000000000000000000000000000..d20b6541cf80b1f9a03c8bd839434b2a4ae19f47 Binary files /dev/null and b/.cache/pip/http-v2/b/1/b/c/4/b1bc490c5e0cca2a2d96076c15d68fbd776750f65fed2e31e85eb654 differ diff --git a/.cache/pip/http-v2/b/1/f/5/d/b1f5dc01896eee598e8915be35e61304ad2b16363d00711f8576642c b/.cache/pip/http-v2/b/1/f/5/d/b1f5dc01896eee598e8915be35e61304ad2b16363d00711f8576642c new file mode 100644 index 0000000000000000000000000000000000000000..70c86e29654eeb0f2ee290fc92fb9446a7f9235d Binary files /dev/null and b/.cache/pip/http-v2/b/1/f/5/d/b1f5dc01896eee598e8915be35e61304ad2b16363d00711f8576642c differ diff --git a/.cache/pip/http-v2/b/5/7/a/9/b57a93fe5f8c75cd00cfe703d012d39466ff7ca30bf44d13df4112f6 b/.cache/pip/http-v2/b/5/7/a/9/b57a93fe5f8c75cd00cfe703d012d39466ff7ca30bf44d13df4112f6 new file mode 100644 index 0000000000000000000000000000000000000000..c31f032937cf6b53c58e7005ccb78ec2ae4d6bc3 Binary files /dev/null and b/.cache/pip/http-v2/b/5/7/a/9/b57a93fe5f8c75cd00cfe703d012d39466ff7ca30bf44d13df4112f6 differ diff --git a/.cache/pip/http-v2/b/5/7/a/9/b57a93fe5f8c75cd00cfe703d012d39466ff7ca30bf44d13df4112f6.body b/.cache/pip/http-v2/b/5/7/a/9/b57a93fe5f8c75cd00cfe703d012d39466ff7ca30bf44d13df4112f6.body new file mode 100644 index 0000000000000000000000000000000000000000..67b4d5723c2ad1377810f7dc8c12dbdcae9b3e6d --- /dev/null +++ b/.cache/pip/http-v2/b/5/7/a/9/b57a93fe5f8c75cd00cfe703d012d39466ff7ca30bf44d13df4112f6.body @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:02ae3b274fde71c5e92ac4d54bc06c42d80e399fec704383dcd99b301df37458 +size 35235890 diff --git a/.cache/pip/http-v2/b/b/a/d/c/bbadc07fc383ff650474f2a8246a59ba64776ab6b35809d02400a398 b/.cache/pip/http-v2/b/b/a/d/c/bbadc07fc383ff650474f2a8246a59ba64776ab6b35809d02400a398 new file mode 100644 index 0000000000000000000000000000000000000000..18ecd335bd3e7742bd183f7f876e98f7f04c14fb Binary files /dev/null and b/.cache/pip/http-v2/b/b/a/d/c/bbadc07fc383ff650474f2a8246a59ba64776ab6b35809d02400a398 differ diff --git a/.cache/pip/http-v2/b/b/a/d/c/bbadc07fc383ff650474f2a8246a59ba64776ab6b35809d02400a398.body b/.cache/pip/http-v2/b/b/a/d/c/bbadc07fc383ff650474f2a8246a59ba64776ab6b35809d02400a398.body new file mode 100644 index 0000000000000000000000000000000000000000..776e9421839d3f5e4d2e7d7dc08ff857be818cae Binary files /dev/null and b/.cache/pip/http-v2/b/b/a/d/c/bbadc07fc383ff650474f2a8246a59ba64776ab6b35809d02400a398.body differ diff --git a/.cache/pip/http-v2/b/e/5/9/9/be599e435b04e9191df28c1690b91394df11dcc43b78b8097f2cac95 b/.cache/pip/http-v2/b/e/5/9/9/be599e435b04e9191df28c1690b91394df11dcc43b78b8097f2cac95 new file mode 100644 index 0000000000000000000000000000000000000000..78e9953b5be11fe1f927d928ec8f38e7956665e4 Binary files /dev/null and b/.cache/pip/http-v2/b/e/5/9/9/be599e435b04e9191df28c1690b91394df11dcc43b78b8097f2cac95 differ diff --git a/.cache/pip/http-v2/b/e/5/9/9/be599e435b04e9191df28c1690b91394df11dcc43b78b8097f2cac95.body b/.cache/pip/http-v2/b/e/5/9/9/be599e435b04e9191df28c1690b91394df11dcc43b78b8097f2cac95.body new file mode 100644 index 0000000000000000000000000000000000000000..3faee0eca1e21fae28d6c331ff611176403e21e8 Binary files /dev/null and b/.cache/pip/http-v2/b/e/5/9/9/be599e435b04e9191df28c1690b91394df11dcc43b78b8097f2cac95.body differ diff --git a/.cache/pip/http-v2/b/e/d/4/e/bed4e7c2b407105e5c80f6f3c9c9ce06074cd1bc90409700e4489759 b/.cache/pip/http-v2/b/e/d/4/e/bed4e7c2b407105e5c80f6f3c9c9ce06074cd1bc90409700e4489759 new file mode 100644 index 0000000000000000000000000000000000000000..234d47faa92fdbeb814b4eb9c5a7ea3242dd7383 Binary files /dev/null and b/.cache/pip/http-v2/b/e/d/4/e/bed4e7c2b407105e5c80f6f3c9c9ce06074cd1bc90409700e4489759 differ diff --git a/.cache/pip/http-v2/b/e/d/4/e/bed4e7c2b407105e5c80f6f3c9c9ce06074cd1bc90409700e4489759.body b/.cache/pip/http-v2/b/e/d/4/e/bed4e7c2b407105e5c80f6f3c9c9ce06074cd1bc90409700e4489759.body new file mode 100644 index 0000000000000000000000000000000000000000..6b8b6aba83b1b8f8a9f33007ed178b42cdefd850 Binary files /dev/null and b/.cache/pip/http-v2/b/e/d/4/e/bed4e7c2b407105e5c80f6f3c9c9ce06074cd1bc90409700e4489759.body differ diff --git a/.cache/pip/http-v2/c/d/5/d/e/cd5de44b899cbe1869765c21b55cffc53c0c0ffa9d3c6f1fd40a42a2.body b/.cache/pip/http-v2/c/d/5/d/e/cd5de44b899cbe1869765c21b55cffc53c0c0ffa9d3c6f1fd40a42a2.body new file mode 100644 index 0000000000000000000000000000000000000000..0a105c4f206471fcbf55424e72b95b5fa92ded19 --- /dev/null +++ b/.cache/pip/http-v2/c/d/5/d/e/cd5de44b899cbe1869765c21b55cffc53c0c0ffa9d3c6f1fd40a42a2.body @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ce8110a1fab9fa20de934002c6cdd93e02af0250708d7d82cd7aeb4afbfb998a +size 263976 diff --git a/.cache/pip/http-v2/d/0/1/5/f/d015fbc6aca5c05d98c5a5fd1b5a5da789d8a3e8323acf92db497bce b/.cache/pip/http-v2/d/0/1/5/f/d015fbc6aca5c05d98c5a5fd1b5a5da789d8a3e8323acf92db497bce new file mode 100644 index 0000000000000000000000000000000000000000..606719f9eacd6bb912d46d24e4bed1912879332d Binary files /dev/null and b/.cache/pip/http-v2/d/0/1/5/f/d015fbc6aca5c05d98c5a5fd1b5a5da789d8a3e8323acf92db497bce differ diff --git a/.cache/pip/http-v2/d/0/1/b/7/d01b75bf0e331a9e22160ebda15b330e26789777201136ee08c89b17 b/.cache/pip/http-v2/d/0/1/b/7/d01b75bf0e331a9e22160ebda15b330e26789777201136ee08c89b17 new file mode 100644 index 0000000000000000000000000000000000000000..6fcef4cac853078b11c6c1f4448e3c7951a1a9cc Binary files /dev/null and b/.cache/pip/http-v2/d/0/1/b/7/d01b75bf0e331a9e22160ebda15b330e26789777201136ee08c89b17 differ diff --git a/.cache/pip/http-v2/d/0/1/b/7/d01b75bf0e331a9e22160ebda15b330e26789777201136ee08c89b17.body b/.cache/pip/http-v2/d/0/1/b/7/d01b75bf0e331a9e22160ebda15b330e26789777201136ee08c89b17.body new file mode 100644 index 0000000000000000000000000000000000000000..91785467dfa8c2370be5bb23e9806393857ad058 Binary files /dev/null and b/.cache/pip/http-v2/d/0/1/b/7/d01b75bf0e331a9e22160ebda15b330e26789777201136ee08c89b17.body differ diff --git a/.cache/pip/http-v2/d/3/2/9/d/d329d269473dcb26bc2fcf82e354619e8463ba8855d5a3b77637c124 b/.cache/pip/http-v2/d/3/2/9/d/d329d269473dcb26bc2fcf82e354619e8463ba8855d5a3b77637c124 new file mode 100644 index 0000000000000000000000000000000000000000..cbfb68b722b4f54398fa51462897b46b2b949f51 Binary files /dev/null and b/.cache/pip/http-v2/d/3/2/9/d/d329d269473dcb26bc2fcf82e354619e8463ba8855d5a3b77637c124 differ diff --git a/.cache/pip/http-v2/d/b/c/9/6/dbc96dffe61b94bcd3688bd8959baaf90ecc4c26bd760252ca8c1de1 b/.cache/pip/http-v2/d/b/c/9/6/dbc96dffe61b94bcd3688bd8959baaf90ecc4c26bd760252ca8c1de1 new file mode 100644 index 0000000000000000000000000000000000000000..236993a450e000a556c57355372f22a4f6169fb1 Binary files /dev/null and b/.cache/pip/http-v2/d/b/c/9/6/dbc96dffe61b94bcd3688bd8959baaf90ecc4c26bd760252ca8c1de1 differ diff --git a/.cache/pip/http-v2/d/b/c/9/6/dbc96dffe61b94bcd3688bd8959baaf90ecc4c26bd760252ca8c1de1.body b/.cache/pip/http-v2/d/b/c/9/6/dbc96dffe61b94bcd3688bd8959baaf90ecc4c26bd760252ca8c1de1.body new file mode 100644 index 0000000000000000000000000000000000000000..4b69591b7f27a26918d89f660ac783b4a7077ffe --- /dev/null +++ b/.cache/pip/http-v2/d/b/c/9/6/dbc96dffe61b94bcd3688bd8959baaf90ecc4c26bd760252ca8c1de1.body @@ -0,0 +1,87 @@ +Metadata-Version: 2.4 +Name: hf-xet +Version: 1.4.2 +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Rust +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: Free Threading +Classifier: Programming Language :: Python :: Free Threading :: 2 - Beta +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Requires-Dist: pytest ; extra == 'tests' +Provides-Extra: tests +License-File: LICENSE +Summary: Fast transfer of large files with the Hugging Face Hub. +Maintainer-email: Rajat Arya , Jared Sulzdorf , Di Xiao , Assaf Vayner , Hoyt Koepke +License-Expression: Apache-2.0 +Requires-Python: >=3.8 +Description-Content-Type: text/markdown; charset=UTF-8; variant=GFM +Project-URL: Documentation, https://huggingface.co/docs/hub/xet/index +Project-URL: Homepage, https://github.com/huggingface/xet-core +Project-URL: Issues, https://github.com/huggingface/xet-core/issues +Project-URL: Repository, https://github.com/huggingface/xet-core.git + + +

+ License + GitHub release + Contributor Covenant +

+ +

+

🤗 hf-xet - xet client tech, used in huggingface_hub

+

+ +## Welcome + +`hf-xet` enables `huggingface_hub` to utilize xet storage for uploading and downloading to HF Hub. Xet storage provides chunk-based deduplication, efficient storage/retrieval with local disk caching, and backwards compatibility with Git LFS. This library is not meant to be used directly, and is instead intended to be used from [huggingface_hub](https://pypi.org/project/huggingface-hub). + +## Key features + +♻ **chunk-based deduplication implementation**: avoid transferring and storing chunks that are shared across binary files (models, datasets, etc). + +🤗 **Python bindings**: bindings for [huggingface_hub](https://github.com/huggingface/huggingface_hub/) package. + +↔ **network communications**: concurrent communication to HF Hub Xet backend services (CAS). + +🔖 **local disk caching**: chunk-based cache that sits alongside the existing [huggingface_hub disk cache](https://huggingface.co/docs/huggingface_hub/guides/manage-cache). + +## Installation + +Install the `hf_xet` package with [pip](https://pypi.org/project/hf-xet/): + +```bash +pip install hf_xet +``` + +## Quick Start + +`hf_xet` is not intended to be run independently as it is expected to be used from `huggingface_hub`, so to get started with `huggingface_hub` check out the documentation [here]("https://hf.co/docs/huggingface_hub"). + +## Contributions (feature requests, bugs, etc.) are encouraged & appreciated 💙💚💛💜🧡❤️ + +Please join us in making hf-xet better. We value everyone's contributions. Code is not the only way to help. Answering questions, helping each other, improving documentation, filing issues all help immensely. If you are interested in contributing (please do!), check out the [contribution guide](https://github.com/huggingface/xet-core/blob/main/CONTRIBUTING.md) for this repository. diff --git a/.cache/pip/http-v2/d/d/2/0/6/dd206ee8d449d4bec512242e8f8ebadb5a808c682766018f3921f62b b/.cache/pip/http-v2/d/d/2/0/6/dd206ee8d449d4bec512242e8f8ebadb5a808c682766018f3921f62b new file mode 100644 index 0000000000000000000000000000000000000000..2c7c900b9d21ada7bd8c844aa024ad5a90e3e3a4 Binary files /dev/null and b/.cache/pip/http-v2/d/d/2/0/6/dd206ee8d449d4bec512242e8f8ebadb5a808c682766018f3921f62b differ diff --git a/.cache/pip/http-v2/d/d/2/0/6/dd206ee8d449d4bec512242e8f8ebadb5a808c682766018f3921f62b.body b/.cache/pip/http-v2/d/d/2/0/6/dd206ee8d449d4bec512242e8f8ebadb5a808c682766018f3921f62b.body new file mode 100644 index 0000000000000000000000000000000000000000..3eee724166b8ba6132c01f452f7df3c1975a5b2c --- /dev/null +++ b/.cache/pip/http-v2/d/d/2/0/6/dd206ee8d449d4bec512242e8f8ebadb5a808c682766018f3921f62b.body @@ -0,0 +1,51 @@ +Metadata-Version: 2.1 +Name: et_xmlfile +Version: 2.0.0 +Summary: An implementation of lxml.xmlfile for the standard library +Home-page: https://foss.heptapod.net/openpyxl/et_xmlfile +Author: See AUTHORS.txt +Author-email: charlie.clark@clark-consulting.eu +License: MIT +Project-URL: Documentation, https://openpyxl.pages.heptapod.net/et_xmlfile/ +Project-URL: Source, https://foss.heptapod.net/openpyxl/et_xmlfile +Project-URL: Tracker, https://foss.heptapod.net/openpyxl/et_xmfile/-/issues +Classifier: Development Status :: 5 - Production/Stable +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Requires-Python: >=3.8 +License-File: LICENCE.python +License-File: LICENCE.rst +License-File: AUTHORS.txt + +.. image:: https://foss.heptapod.net/openpyxl/et_xmlfile/badges/branch/default/coverage.svg + :target: https://coveralls.io/bitbucket/openpyxl/et_xmlfile?branch=default + :alt: coverage status + +et_xmfile +========= + +XML can use lots of memory, and et_xmlfile is a low memory library for creating large XML files +And, although the standard library already includes an incremental parser, `iterparse` it has no equivalent when writing XML. Once an element has been added to the tree, it is written to +the file or stream and the memory is then cleared. + +This module is based upon the `xmlfile module from lxml `_ with the aim of allowing code to be developed that will work with both libraries. +It was developed initially for the openpyxl project, but is now a standalone module. + +The code was written by Elias Rabel as part of the `Python Düsseldorf `_ openpyxl sprint in September 2014. + +Proper support for incremental writing was provided by Daniel Hillier in 2024 + +Note on performance +------------------- + +The code was not developed with performance in mind, but turned out to be faster than the existing SAX-based implementation but is generally slower than lxml's xmlfile. +There is one area where an optimisation for lxml may negatively affect the performance of et_xmfile and that is when using the `.element()` method on the xmlfile context manager. It is, therefore, recommended simply to create Elements write these directly, as in the sample code. diff --git a/.cache/pip/http-v2/e/5/7/b/5/e57b55fae68b16ab8d58c168c03725b4e894bffb9cdfa0c5252b04f9 b/.cache/pip/http-v2/e/5/7/b/5/e57b55fae68b16ab8d58c168c03725b4e894bffb9cdfa0c5252b04f9 new file mode 100644 index 0000000000000000000000000000000000000000..9f9c8b5db785ba79e023f23d1e0b3e467596dc1b Binary files /dev/null and b/.cache/pip/http-v2/e/5/7/b/5/e57b55fae68b16ab8d58c168c03725b4e894bffb9cdfa0c5252b04f9 differ diff --git a/.cache/pip/http-v2/e/7/5/e/3/e75e3a72ec5ffce867a4e738b4b84c7a8477fb04f819b70bfe837470 b/.cache/pip/http-v2/e/7/5/e/3/e75e3a72ec5ffce867a4e738b4b84c7a8477fb04f819b70bfe837470 new file mode 100644 index 0000000000000000000000000000000000000000..fd68567cfc66126e9dd0e00411296572bea1dce2 Binary files /dev/null and b/.cache/pip/http-v2/e/7/5/e/3/e75e3a72ec5ffce867a4e738b4b84c7a8477fb04f819b70bfe837470 differ diff --git a/.cache/pip/http-v2/e/7/5/e/3/e75e3a72ec5ffce867a4e738b4b84c7a8477fb04f819b70bfe837470.body b/.cache/pip/http-v2/e/7/5/e/3/e75e3a72ec5ffce867a4e738b4b84c7a8477fb04f819b70bfe837470.body new file mode 100644 index 0000000000000000000000000000000000000000..52118f1e5c83bd7ef39196a749651fc87d176812 --- /dev/null +++ b/.cache/pip/http-v2/e/7/5/e/3/e75e3a72ec5ffce867a4e738b4b84c7a8477fb04f819b70bfe837470.body @@ -0,0 +1,106 @@ +Metadata-Version: 2.1 +Name: shellingham +Version: 1.5.4 +Summary: Tool to Detect Surrounding Shell +Home-page: https://github.com/sarugaku/shellingham +Author: Tzu-ping Chung +Author-email: uranusjr@gmail.com +License: ISC License +Keywords: shell +Classifier: Development Status :: 3 - Alpha +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: ISC License (ISCL) +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE + +============================================= +Shellingham: Tool to Detect Surrounding Shell +============================================= + +.. image:: https://img.shields.io/pypi/v/shellingham.svg + :target: https://pypi.org/project/shellingham/ + +Shellingham detects what shell the current Python executable is running in. + + +Usage +===== + +.. code-block:: python + + >>> import shellingham + >>> shellingham.detect_shell() + ('bash', '/bin/bash') + +``detect_shell`` pokes around the process's running environment to determine +what shell it is run in. It returns a 2-tuple: + +* The shell name, always lowercased. +* The command used to run the shell. + +``ShellDetectionFailure`` is raised if ``detect_shell`` fails to detect the +surrounding shell. + + +Notes +===== + +* The shell name is always lowercased. +* On Windows, the shell name is the name of the executable, minus the file + extension. + + +Notes for Application Developers +================================ + +Remember, your application's user is not necessarily using a shell. +Shellingham raises ``ShellDetectionFailure`` if there is no shell to detect, +but *your application should almost never do this to your user*. + +A practical approach to this is to wrap ``detect_shell`` in a try block, and +provide a sane default on failure + +.. code-block:: python + + try: + shell = shellingham.detect_shell() + except shellingham.ShellDetectionFailure: + shell = provide_default() + + +There are a few choices for you to choose from. + +* The POSIX standard mandates the environment variable ``SHELL`` to refer to + "the user's preferred command language interpreter". This is always available + (even if the user is not in an interactive session), and likely the correct + choice to launch an interactive sub-shell with. +* A command ``sh`` is almost guaranteed to exist, likely at ``/bin/sh``, since + several POSIX tools rely on it. This should be suitable if you want to run a + (possibly non-interactive) script. +* All versions of DOS and Windows have an environment variable ``COMSPEC``. + This can always be used to launch a usable command prompt (e.g. `cmd.exe` on + Windows). + +Here's a simple implementation to provide a default shell + +.. code-block:: python + + import os + + def provide_default(): + if os.name == 'posix': + return os.environ['SHELL'] + elif os.name == 'nt': + return os.environ['COMSPEC'] + raise NotImplementedError(f'OS {os.name!r} support not available') diff --git a/.cache/pip/http-v2/e/a/2/2/a/ea22a79fcbb7a5195dc8846d5b7ddabfe6981d47f448fe52cbe95a8a b/.cache/pip/http-v2/e/a/2/2/a/ea22a79fcbb7a5195dc8846d5b7ddabfe6981d47f448fe52cbe95a8a new file mode 100644 index 0000000000000000000000000000000000000000..0291af6bbd7efcfbffe2281add017d1bb6c9c519 Binary files /dev/null and b/.cache/pip/http-v2/e/a/2/2/a/ea22a79fcbb7a5195dc8846d5b7ddabfe6981d47f448fe52cbe95a8a differ diff --git a/.cache/pip/http-v2/e/a/2/2/a/ea22a79fcbb7a5195dc8846d5b7ddabfe6981d47f448fe52cbe95a8a.body b/.cache/pip/http-v2/e/a/2/2/a/ea22a79fcbb7a5195dc8846d5b7ddabfe6981d47f448fe52cbe95a8a.body new file mode 100644 index 0000000000000000000000000000000000000000..e60029b59c57e802208670707df281db53c1c6fd Binary files /dev/null and b/.cache/pip/http-v2/e/a/2/2/a/ea22a79fcbb7a5195dc8846d5b7ddabfe6981d47f448fe52cbe95a8a.body differ diff --git a/.cache/pip/http-v2/e/d/c/8/1/edc81a082c4f78d9a61d64310085b4ef8cb4cf6233ba48e26127800a b/.cache/pip/http-v2/e/d/c/8/1/edc81a082c4f78d9a61d64310085b4ef8cb4cf6233ba48e26127800a new file mode 100644 index 0000000000000000000000000000000000000000..799a24b2f703c1e82c6794f3f39a718e0fd0bc85 Binary files /dev/null and b/.cache/pip/http-v2/e/d/c/8/1/edc81a082c4f78d9a61d64310085b4ef8cb4cf6233ba48e26127800a differ diff --git a/.cache/pip/http-v2/e/d/c/8/1/edc81a082c4f78d9a61d64310085b4ef8cb4cf6233ba48e26127800a.body b/.cache/pip/http-v2/e/d/c/8/1/edc81a082c4f78d9a61d64310085b4ef8cb4cf6233ba48e26127800a.body new file mode 100644 index 0000000000000000000000000000000000000000..2f85cd7b576bf7998b55999cfe2f5519e3b75be3 Binary files /dev/null and b/.cache/pip/http-v2/e/d/c/8/1/edc81a082c4f78d9a61d64310085b4ef8cb4cf6233ba48e26127800a.body differ diff --git a/.cache/pip/http-v2/e/d/e/d/1/eded1062df3a835b1d654db11016327d0de7276866caf329f6f27b1c b/.cache/pip/http-v2/e/d/e/d/1/eded1062df3a835b1d654db11016327d0de7276866caf329f6f27b1c new file mode 100644 index 0000000000000000000000000000000000000000..6435d38c26a12bfeea7dfa1441eb372f92887cc5 Binary files /dev/null and b/.cache/pip/http-v2/e/d/e/d/1/eded1062df3a835b1d654db11016327d0de7276866caf329f6f27b1c differ diff --git a/.cache/pip/http-v2/e/d/e/d/1/eded1062df3a835b1d654db11016327d0de7276866caf329f6f27b1c.body b/.cache/pip/http-v2/e/d/e/d/1/eded1062df3a835b1d654db11016327d0de7276866caf329f6f27b1c.body new file mode 100644 index 0000000000000000000000000000000000000000..68529fbd400e902db54175b9286d9f24eb6976e1 --- /dev/null +++ b/.cache/pip/http-v2/e/d/e/d/1/eded1062df3a835b1d654db11016327d0de7276866caf329f6f27b1c.body @@ -0,0 +1,94 @@ +Metadata-Version: 2.1 +Name: contourpy +Version: 1.3.3 +Summary: Python library for calculating contours of 2D quadrilateral grids +Author-Email: Ian Thomas +License: BSD 3-Clause License + + Copyright (c) 2021-2025, ContourPy Developers. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + 3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: C++ +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Topic :: Scientific/Engineering :: Information Analysis +Classifier: Topic :: Scientific/Engineering :: Mathematics +Classifier: Topic :: Scientific/Engineering :: Visualization +Project-URL: Homepage, https://github.com/contourpy/contourpy +Project-URL: Changelog, https://contourpy.readthedocs.io/en/latest/changelog.html +Project-URL: Documentation, https://contourpy.readthedocs.io +Project-URL: Repository, https://github.com/contourpy/contourpy +Requires-Python: >=3.11 +Requires-Dist: numpy>=1.25 +Provides-Extra: docs +Requires-Dist: furo; extra == "docs" +Requires-Dist: sphinx>=7.2; extra == "docs" +Requires-Dist: sphinx-copybutton; extra == "docs" +Provides-Extra: bokeh +Requires-Dist: bokeh; extra == "bokeh" +Requires-Dist: selenium; extra == "bokeh" +Provides-Extra: mypy +Requires-Dist: contourpy[bokeh,docs]; extra == "mypy" +Requires-Dist: bokeh; extra == "mypy" +Requires-Dist: docutils-stubs; extra == "mypy" +Requires-Dist: mypy==1.17.0; extra == "mypy" +Requires-Dist: types-Pillow; extra == "mypy" +Provides-Extra: test +Requires-Dist: contourpy[test-no-images]; extra == "test" +Requires-Dist: matplotlib; extra == "test" +Requires-Dist: Pillow; extra == "test" +Provides-Extra: test-no-images +Requires-Dist: pytest; extra == "test-no-images" +Requires-Dist: pytest-cov; extra == "test-no-images" +Requires-Dist: pytest-rerunfailures; extra == "test-no-images" +Requires-Dist: pytest-xdist; extra == "test-no-images" +Requires-Dist: wurlitzer; extra == "test-no-images" +Description-Content-Type: text/markdown + +ContourPy + +ContourPy is a Python library for calculating contours of 2D quadrilateral grids. It is written in C++11 and wrapped using pybind11. + +It contains the 2005 and 2014 algorithms used in Matplotlib as well as a newer algorithm that includes more features and is available in both serial and multithreaded versions. It provides an easy way for Python libraries to use contouring algorithms without having to include Matplotlib as a dependency. + + * **Documentation**: https://contourpy.readthedocs.io + * **Source code**: https://github.com/contourpy/contourpy + +| | | +| --- | --- | +| Latest release | [![PyPI version](https://img.shields.io/pypi/v/contourpy.svg?label=pypi&color=fdae61)](https://pypi.python.org/pypi/contourpy) [![conda-forge version](https://img.shields.io/conda/v/conda-forge/contourpy.svg?label=conda-forge&color=a6d96a)](https://anaconda.org/conda-forge/contourpy) | +| Downloads | [![PyPi downloads](https://img.shields.io/pypi/dm/contourpy?label=pypi&style=flat&color=fdae61)](https://pepy.tech/project/contourpy) | +| Python version | [![Platforms](https://img.shields.io/pypi/pyversions/contourpy?color=fdae61)](https://pypi.org/project/contourpy/) | +| Coverage | [![Codecov](https://img.shields.io/codecov/c/gh/contourpy/contourpy?color=fdae61&label=codecov)](https://app.codecov.io/gh/contourpy/contourpy) | diff --git a/.cache/pip/http-v2/f/0/0/1/6/f0016405b12ca3de2d9a4b45f0fd6c81001c3eb4ace95d2f6bb0a468 b/.cache/pip/http-v2/f/0/0/1/6/f0016405b12ca3de2d9a4b45f0fd6c81001c3eb4ace95d2f6bb0a468 new file mode 100644 index 0000000000000000000000000000000000000000..0e0ffec3edb06db50378a70351018445950744d6 Binary files /dev/null and b/.cache/pip/http-v2/f/0/0/1/6/f0016405b12ca3de2d9a4b45f0fd6c81001c3eb4ace95d2f6bb0a468 differ diff --git a/.cache/pip/http-v2/f/0/0/1/6/f0016405b12ca3de2d9a4b45f0fd6c81001c3eb4ace95d2f6bb0a468.body b/.cache/pip/http-v2/f/0/0/1/6/f0016405b12ca3de2d9a4b45f0fd6c81001c3eb4ace95d2f6bb0a468.body new file mode 100644 index 0000000000000000000000000000000000000000..067e8fa64f7f45553a3fddf950eaaa58b6b333ec --- /dev/null +++ b/.cache/pip/http-v2/f/0/0/1/6/f0016405b12ca3de2d9a4b45f0fd6c81001c3eb4ace95d2f6bb0a468.body @@ -0,0 +1,88 @@ +Metadata-Version: 2.4 +Name: Mako +Version: 1.3.10 +Summary: A super-fast templating language that borrows the best ideas from the existing templating languages. +Home-page: https://www.makotemplates.org/ +Author: Mike Bayer +Author-email: mike@zzzcomputing.com +License: MIT +Project-URL: Documentation, https://docs.makotemplates.org +Project-URL: Issue Tracker, https://github.com/sqlalchemy/mako +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: MIT License +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: MarkupSafe>=0.9.2 +Provides-Extra: testing +Requires-Dist: pytest; extra == "testing" +Provides-Extra: babel +Requires-Dist: Babel; extra == "babel" +Provides-Extra: lingua +Requires-Dist: lingua; extra == "lingua" +Dynamic: license-file + +========================= +Mako Templates for Python +========================= + +Mako is a template library written in Python. It provides a familiar, non-XML +syntax which compiles into Python modules for maximum performance. Mako's +syntax and API borrows from the best ideas of many others, including Django +templates, Cheetah, Myghty, and Genshi. Conceptually, Mako is an embedded +Python (i.e. Python Server Page) language, which refines the familiar ideas +of componentized layout and inheritance to produce one of the most +straightforward and flexible models available, while also maintaining close +ties to Python calling and scoping semantics. + +Nutshell +======== + +:: + + <%inherit file="base.html"/> + <% + rows = [[v for v in range(0,10)] for row in range(0,10)] + %> + + % for row in rows: + ${makerow(row)} + % endfor +
+ + <%def name="makerow(row)"> + + % for name in row: + ${name}\ + % endfor + + + +Philosophy +=========== + +Python is a great scripting language. Don't reinvent the wheel...your templates can handle it ! + +Documentation +============== + +See documentation for Mako at https://docs.makotemplates.org/en/latest/ + +License +======== + +Mako is licensed under an MIT-style license (see LICENSE). +Other incorporated projects may be licensed under different licenses. +All licenses allow for non-commercial and commercial use. diff --git a/.cache/pip/http-v2/f/0/2/8/8/f02886ef0c4db9ba4f7adb776b44f35b03cd06cb61489cc10b4ded73 b/.cache/pip/http-v2/f/0/2/8/8/f02886ef0c4db9ba4f7adb776b44f35b03cd06cb61489cc10b4ded73 new file mode 100644 index 0000000000000000000000000000000000000000..18c3d8589fd5eddef7bb976a80da482318a57381 Binary files /dev/null and b/.cache/pip/http-v2/f/0/2/8/8/f02886ef0c4db9ba4f7adb776b44f35b03cd06cb61489cc10b4ded73 differ diff --git a/.cache/pip/http-v2/f/0/2/8/8/f02886ef0c4db9ba4f7adb776b44f35b03cd06cb61489cc10b4ded73.body b/.cache/pip/http-v2/f/0/2/8/8/f02886ef0c4db9ba4f7adb776b44f35b03cd06cb61489cc10b4ded73.body new file mode 100644 index 0000000000000000000000000000000000000000..d63401ebb248612ae0276512e071bdd868157e9d --- /dev/null +++ b/.cache/pip/http-v2/f/0/2/8/8/f02886ef0c4db9ba4f7adb776b44f35b03cd06cb61489cc10b4ded73.body @@ -0,0 +1,243 @@ +Metadata-Version: 2.4 +Name: SQLAlchemy +Version: 2.0.48 +Summary: Database Abstraction Library +Home-page: https://www.sqlalchemy.org +Author: Mike Bayer +Author-email: mike_mp@zzzcomputing.com +License: MIT +Project-URL: Documentation, https://docs.sqlalchemy.org +Project-URL: Issue Tracker, https://github.com/sqlalchemy/sqlalchemy/ +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Database :: Front-Ends +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: importlib-metadata; python_version < "3.8" +Requires-Dist: greenlet>=1; platform_machine == "aarch64" or (platform_machine == "ppc64le" or (platform_machine == "x86_64" or (platform_machine == "amd64" or (platform_machine == "AMD64" or (platform_machine == "win32" or platform_machine == "WIN32"))))) +Requires-Dist: typing-extensions>=4.6.0 +Provides-Extra: asyncio +Requires-Dist: greenlet>=1; extra == "asyncio" +Provides-Extra: mypy +Requires-Dist: mypy>=0.910; extra == "mypy" +Provides-Extra: mssql +Requires-Dist: pyodbc; extra == "mssql" +Provides-Extra: mssql-pymssql +Requires-Dist: pymssql; extra == "mssql-pymssql" +Provides-Extra: mssql-pyodbc +Requires-Dist: pyodbc; extra == "mssql-pyodbc" +Provides-Extra: mysql +Requires-Dist: mysqlclient>=1.4.0; extra == "mysql" +Provides-Extra: mysql-connector +Requires-Dist: mysql-connector-python; extra == "mysql-connector" +Provides-Extra: mariadb-connector +Requires-Dist: mariadb!=1.1.10,!=1.1.2,!=1.1.5,>=1.0.1; extra == "mariadb-connector" +Provides-Extra: oracle +Requires-Dist: cx_oracle>=8; extra == "oracle" +Provides-Extra: oracle-oracledb +Requires-Dist: oracledb>=1.0.1; extra == "oracle-oracledb" +Provides-Extra: postgresql +Requires-Dist: psycopg2>=2.7; extra == "postgresql" +Provides-Extra: postgresql-pg8000 +Requires-Dist: pg8000>=1.29.1; extra == "postgresql-pg8000" +Provides-Extra: postgresql-asyncpg +Requires-Dist: greenlet>=1; extra == "postgresql-asyncpg" +Requires-Dist: asyncpg; extra == "postgresql-asyncpg" +Provides-Extra: postgresql-psycopg2binary +Requires-Dist: psycopg2-binary; extra == "postgresql-psycopg2binary" +Provides-Extra: postgresql-psycopg2cffi +Requires-Dist: psycopg2cffi; extra == "postgresql-psycopg2cffi" +Provides-Extra: postgresql-psycopg +Requires-Dist: psycopg>=3.0.7; extra == "postgresql-psycopg" +Provides-Extra: postgresql-psycopgbinary +Requires-Dist: psycopg[binary]>=3.0.7; extra == "postgresql-psycopgbinary" +Provides-Extra: pymysql +Requires-Dist: pymysql; extra == "pymysql" +Provides-Extra: aiomysql +Requires-Dist: greenlet>=1; extra == "aiomysql" +Requires-Dist: aiomysql>=0.2.0; extra == "aiomysql" +Provides-Extra: aioodbc +Requires-Dist: greenlet>=1; extra == "aioodbc" +Requires-Dist: aioodbc; extra == "aioodbc" +Provides-Extra: asyncmy +Requires-Dist: greenlet>=1; extra == "asyncmy" +Requires-Dist: asyncmy!=0.2.4,!=0.2.6,>=0.2.3; extra == "asyncmy" +Provides-Extra: aiosqlite +Requires-Dist: greenlet>=1; extra == "aiosqlite" +Requires-Dist: aiosqlite; extra == "aiosqlite" +Requires-Dist: typing_extensions!=3.10.0.1; extra == "aiosqlite" +Provides-Extra: sqlcipher +Requires-Dist: sqlcipher3_binary; extra == "sqlcipher" +Dynamic: license-file + +SQLAlchemy +========== + +|PyPI| |Python| |Downloads| + +.. |PyPI| image:: https://img.shields.io/pypi/v/sqlalchemy + :target: https://pypi.org/project/sqlalchemy + :alt: PyPI + +.. |Python| image:: https://img.shields.io/pypi/pyversions/sqlalchemy + :target: https://pypi.org/project/sqlalchemy + :alt: PyPI - Python Version + +.. |Downloads| image:: https://static.pepy.tech/badge/sqlalchemy/month + :target: https://pepy.tech/project/sqlalchemy + :alt: PyPI - Downloads + + +The Python SQL Toolkit and Object Relational Mapper + +Introduction +------------- + +SQLAlchemy is the Python SQL toolkit and Object Relational Mapper +that gives application developers the full power and +flexibility of SQL. SQLAlchemy provides a full suite +of well known enterprise-level persistence patterns, +designed for efficient and high-performing database +access, adapted into a simple and Pythonic domain +language. + +Major SQLAlchemy features include: + +* An industrial strength ORM, built + from the core on the identity map, unit of work, + and data mapper patterns. These patterns + allow transparent persistence of objects + using a declarative configuration system. + Domain models + can be constructed and manipulated naturally, + and changes are synchronized with the + current transaction automatically. +* A relationally-oriented query system, exposing + the full range of SQL's capabilities + explicitly, including joins, subqueries, + correlation, and most everything else, + in terms of the object model. + Writing queries with the ORM uses the same + techniques of relational composition you use + when writing SQL. While you can drop into + literal SQL at any time, it's virtually never + needed. +* A comprehensive and flexible system + of eager loading for related collections and objects. + Collections are cached within a session, + and can be loaded on individual access, all + at once using joins, or by query per collection + across the full result set. +* A Core SQL construction system and DBAPI + interaction layer. The SQLAlchemy Core is + separate from the ORM and is a full database + abstraction layer in its own right, and includes + an extensible Python-based SQL expression + language, schema metadata, connection pooling, + type coercion, and custom types. +* All primary and foreign key constraints are + assumed to be composite and natural. Surrogate + integer primary keys are of course still the + norm, but SQLAlchemy never assumes or hardcodes + to this model. +* Database introspection and generation. Database + schemas can be "reflected" in one step into + Python structures representing database metadata; + those same structures can then generate + CREATE statements right back out - all within + the Core, independent of the ORM. + +SQLAlchemy's philosophy: + +* SQL databases behave less and less like object + collections the more size and performance start to + matter; object collections behave less and less like + tables and rows the more abstraction starts to matter. + SQLAlchemy aims to accommodate both of these + principles. +* An ORM doesn't need to hide the "R". A relational + database provides rich, set-based functionality + that should be fully exposed. SQLAlchemy's + ORM provides an open-ended set of patterns + that allow a developer to construct a custom + mediation layer between a domain model and + a relational schema, turning the so-called + "object relational impedance" issue into + a distant memory. +* The developer, in all cases, makes all decisions + regarding the design, structure, and naming conventions + of both the object model as well as the relational + schema. SQLAlchemy only provides the means + to automate the execution of these decisions. +* With SQLAlchemy, there's no such thing as + "the ORM generated a bad query" - you + retain full control over the structure of + queries, including how joins are organized, + how subqueries and correlation is used, what + columns are requested. Everything SQLAlchemy + does is ultimately the result of a developer-initiated + decision. +* Don't use an ORM if the problem doesn't need one. + SQLAlchemy consists of a Core and separate ORM + component. The Core offers a full SQL expression + language that allows Pythonic construction + of SQL constructs that render directly to SQL + strings for a target database, returning + result sets that are essentially enhanced DBAPI + cursors. +* Transactions should be the norm. With SQLAlchemy's + ORM, nothing goes to permanent storage until + commit() is called. SQLAlchemy encourages applications + to create a consistent means of delineating + the start and end of a series of operations. +* Never render a literal value in a SQL statement. + Bound parameters are used to the greatest degree + possible, allowing query optimizers to cache + query plans effectively and making SQL injection + attacks a non-issue. + +Documentation +------------- + +Latest documentation is at: + +https://www.sqlalchemy.org/docs/ + +Installation / Requirements +--------------------------- + +Full documentation for installation is at +`Installation `_. + +Getting Help / Development / Bug reporting +------------------------------------------ + +Please refer to the `SQLAlchemy Community Guide `_. + +Code of Conduct +--------------- + +Above all, SQLAlchemy places great emphasis on polite, thoughtful, and +constructive communication between users and developers. +Please see our current Code of Conduct at +`Code of Conduct `_. + +License +------- + +SQLAlchemy is distributed under the `MIT license +`_. + diff --git a/.cache/pip/http-v2/f/1/9/c/b/f19cb9f2548ebec3162f98dc0753cc5fe07d32364a02b1720f376edb b/.cache/pip/http-v2/f/1/9/c/b/f19cb9f2548ebec3162f98dc0753cc5fe07d32364a02b1720f376edb new file mode 100644 index 0000000000000000000000000000000000000000..9db3e8d6c3244e65c567ba3eff481f8fae8710f8 Binary files /dev/null and b/.cache/pip/http-v2/f/1/9/c/b/f19cb9f2548ebec3162f98dc0753cc5fe07d32364a02b1720f376edb differ diff --git a/.cache/pip/http-v2/f/1/9/c/b/f19cb9f2548ebec3162f98dc0753cc5fe07d32364a02b1720f376edb.body b/.cache/pip/http-v2/f/1/9/c/b/f19cb9f2548ebec3162f98dc0753cc5fe07d32364a02b1720f376edb.body new file mode 100644 index 0000000000000000000000000000000000000000..43db7560aef991d58f37f654c54cb68010e607bf Binary files /dev/null and b/.cache/pip/http-v2/f/1/9/c/b/f19cb9f2548ebec3162f98dc0753cc5fe07d32364a02b1720f376edb.body differ diff --git a/.cache/pip/http-v2/f/7/7/1/5/f7715d9735eb5aed5a2c8ef4bc2b1922cbb4a230b04287a26e106045 b/.cache/pip/http-v2/f/7/7/1/5/f7715d9735eb5aed5a2c8ef4bc2b1922cbb4a230b04287a26e106045 new file mode 100644 index 0000000000000000000000000000000000000000..b11572b6d1702fd78ab355ade6bfadff35335d28 Binary files /dev/null and b/.cache/pip/http-v2/f/7/7/1/5/f7715d9735eb5aed5a2c8ef4bc2b1922cbb4a230b04287a26e106045 differ diff --git a/.cache/pip/http-v2/f/7/7/1/5/f7715d9735eb5aed5a2c8ef4bc2b1922cbb4a230b04287a26e106045.body b/.cache/pip/http-v2/f/7/7/1/5/f7715d9735eb5aed5a2c8ef4bc2b1922cbb4a230b04287a26e106045.body new file mode 100644 index 0000000000000000000000000000000000000000..502e1e68ef0283477468639308d430809b05f082 --- /dev/null +++ b/.cache/pip/http-v2/f/7/7/1/5/f7715d9735eb5aed5a2c8ef4bc2b1922cbb4a230b04287a26e106045.body @@ -0,0 +1,907 @@ +Metadata-Version: 2.1 +Name: matplotlib +Version: 3.10.8 +Summary: Python plotting package +Author: John D. Hunter, Michael Droettboom +Author-Email: Unknown +License: License agreement for matplotlib versions 1.3.0 and later + ========================================================= + + 1. This LICENSE AGREEMENT is between the Matplotlib Development Team + ("MDT"), and the Individual or Organization ("Licensee") accessing and + otherwise using matplotlib software in source or binary form and its + associated documentation. + + 2. Subject to the terms and conditions of this License Agreement, MDT + hereby grants Licensee a nonexclusive, royalty-free, world-wide license + to reproduce, analyze, test, perform and/or display publicly, prepare + derivative works, distribute, and otherwise use matplotlib + alone or in any derivative version, provided, however, that MDT's + License Agreement and MDT's notice of copyright, i.e., "Copyright (c) + 2012- Matplotlib Development Team; All Rights Reserved" are retained in + matplotlib alone or in any derivative version prepared by + Licensee. + + 3. In the event Licensee prepares a derivative work that is based on or + incorporates matplotlib or any part thereof, and wants to + make the derivative work available to others as provided herein, then + Licensee hereby agrees to include in any such work a brief summary of + the changes made to matplotlib . + + 4. MDT is making matplotlib available to Licensee on an "AS + IS" basis. MDT MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR + IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, MDT MAKES NO AND + DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS + FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF MATPLOTLIB + WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. + + 5. MDT SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF MATPLOTLIB + FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR + LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING + MATPLOTLIB , OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF + THE POSSIBILITY THEREOF. + + 6. This License Agreement will automatically terminate upon a material + breach of its terms and conditions. + + 7. Nothing in this License Agreement shall be deemed to create any + relationship of agency, partnership, or joint venture between MDT and + Licensee. This License Agreement does not grant permission to use MDT + trademarks or trade name in a trademark sense to endorse or promote + products or services of Licensee, or any third party. + + 8. By copying, installing or otherwise using matplotlib , + Licensee agrees to be bound by the terms and conditions of this License + Agreement. + + License agreement for matplotlib versions prior to 1.3.0 + ======================================================== + + 1. This LICENSE AGREEMENT is between John D. Hunter ("JDH"), and the + Individual or Organization ("Licensee") accessing and otherwise using + matplotlib software in source or binary form and its associated + documentation. + + 2. Subject to the terms and conditions of this License Agreement, JDH + hereby grants Licensee a nonexclusive, royalty-free, world-wide license + to reproduce, analyze, test, perform and/or display publicly, prepare + derivative works, distribute, and otherwise use matplotlib + alone or in any derivative version, provided, however, that JDH's + License Agreement and JDH's notice of copyright, i.e., "Copyright (c) + 2002-2011 John D. Hunter; All Rights Reserved" are retained in + matplotlib alone or in any derivative version prepared by + Licensee. + + 3. In the event Licensee prepares a derivative work that is based on or + incorporates matplotlib or any part thereof, and wants to + make the derivative work available to others as provided herein, then + Licensee hereby agrees to include in any such work a brief summary of + the changes made to matplotlib. + + 4. JDH is making matplotlib available to Licensee on an "AS + IS" basis. JDH MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR + IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, JDH MAKES NO AND + DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS + FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF MATPLOTLIB + WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. + + 5. JDH SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF MATPLOTLIB + FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR + LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING + MATPLOTLIB , OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF + THE POSSIBILITY THEREOF. + + 6. This License Agreement will automatically terminate upon a material + breach of its terms and conditions. + + 7. Nothing in this License Agreement shall be deemed to create any + relationship of agency, partnership, or joint venture between JDH and + Licensee. This License Agreement does not grant permission to use JDH + trademarks or trade name in a trademark sense to endorse or promote + products or services of Licensee, or any third party. + + 8. By copying, installing or otherwise using matplotlib, + Licensee agrees to be bound by the terms and conditions of this License + Agreement. + ---- + + This binary distrubution of Matplotlib can also bundle the following software + (depending on the build): + + Name: AMS Fonts + Files: matplotlib/tests/cmr10.pfb + Description: Type-1 version of one of Knuth's Computer Modern fonts + License: OFL-1.1 + The cmr10.pfb file is a Type-1 version of one of Knuth's Computer Modern fonts. + It is included here as test data only, but the following license applies. + + Copyright (c) 1997, 2009, American Mathematical Society (http://www.ams.org). + All Rights Reserved. + + "cmb10" is a Reserved Font Name for this Font Software. + "cmbsy10" is a Reserved Font Name for this Font Software. + "cmbsy5" is a Reserved Font Name for this Font Software. + "cmbsy6" is a Reserved Font Name for this Font Software. + "cmbsy7" is a Reserved Font Name for this Font Software. + "cmbsy8" is a Reserved Font Name for this Font Software. + "cmbsy9" is a Reserved Font Name for this Font Software. + "cmbx10" is a Reserved Font Name for this Font Software. + "cmbx12" is a Reserved Font Name for this Font Software. + "cmbx5" is a Reserved Font Name for this Font Software. + "cmbx6" is a Reserved Font Name for this Font Software. + "cmbx7" is a Reserved Font Name for this Font Software. + "cmbx8" is a Reserved Font Name for this Font Software. + "cmbx9" is a Reserved Font Name for this Font Software. + "cmbxsl10" is a Reserved Font Name for this Font Software. + "cmbxti10" is a Reserved Font Name for this Font Software. + "cmcsc10" is a Reserved Font Name for this Font Software. + "cmcsc8" is a Reserved Font Name for this Font Software. + "cmcsc9" is a Reserved Font Name for this Font Software. + "cmdunh10" is a Reserved Font Name for this Font Software. + "cmex10" is a Reserved Font Name for this Font Software. + "cmex7" is a Reserved Font Name for this Font Software. + "cmex8" is a Reserved Font Name for this Font Software. + "cmex9" is a Reserved Font Name for this Font Software. + "cmff10" is a Reserved Font Name for this Font Software. + "cmfi10" is a Reserved Font Name for this Font Software. + "cmfib8" is a Reserved Font Name for this Font Software. + "cminch" is a Reserved Font Name for this Font Software. + "cmitt10" is a Reserved Font Name for this Font Software. + "cmmi10" is a Reserved Font Name for this Font Software. + "cmmi12" is a Reserved Font Name for this Font Software. + "cmmi5" is a Reserved Font Name for this Font Software. + "cmmi6" is a Reserved Font Name for this Font Software. + "cmmi7" is a Reserved Font Name for this Font Software. + "cmmi8" is a Reserved Font Name for this Font Software. + "cmmi9" is a Reserved Font Name for this Font Software. + "cmmib10" is a Reserved Font Name for this Font Software. + "cmmib5" is a Reserved Font Name for this Font Software. + "cmmib6" is a Reserved Font Name for this Font Software. + "cmmib7" is a Reserved Font Name for this Font Software. + "cmmib8" is a Reserved Font Name for this Font Software. + "cmmib9" is a Reserved Font Name for this Font Software. + "cmr10" is a Reserved Font Name for this Font Software. + "cmr12" is a Reserved Font Name for this Font Software. + "cmr17" is a Reserved Font Name for this Font Software. + "cmr5" is a Reserved Font Name for this Font Software. + "cmr6" is a Reserved Font Name for this Font Software. + "cmr7" is a Reserved Font Name for this Font Software. + "cmr8" is a Reserved Font Name for this Font Software. + "cmr9" is a Reserved Font Name for this Font Software. + "cmsl10" is a Reserved Font Name for this Font Software. + "cmsl12" is a Reserved Font Name for this Font Software. + "cmsl8" is a Reserved Font Name for this Font Software. + "cmsl9" is a Reserved Font Name for this Font Software. + "cmsltt10" is a Reserved Font Name for this Font Software. + "cmss10" is a Reserved Font Name for this Font Software. + "cmss12" is a Reserved Font Name for this Font Software. + "cmss17" is a Reserved Font Name for this Font Software. + "cmss8" is a Reserved Font Name for this Font Software. + "cmss9" is a Reserved Font Name for this Font Software. + "cmssbx10" is a Reserved Font Name for this Font Software. + "cmssdc10" is a Reserved Font Name for this Font Software. + "cmssi10" is a Reserved Font Name for this Font Software. + "cmssi12" is a Reserved Font Name for this Font Software. + "cmssi17" is a Reserved Font Name for this Font Software. + "cmssi8" is a Reserved Font Name for this Font Software. + "cmssi9" is a Reserved Font Name for this Font Software. + "cmssq8" is a Reserved Font Name for this Font Software. + "cmssqi8" is a Reserved Font Name for this Font Software. + "cmsy10" is a Reserved Font Name for this Font Software. + "cmsy5" is a Reserved Font Name for this Font Software. + "cmsy6" is a Reserved Font Name for this Font Software. + "cmsy7" is a Reserved Font Name for this Font Software. + "cmsy8" is a Reserved Font Name for this Font Software. + "cmsy9" is a Reserved Font Name for this Font Software. + "cmtcsc10" is a Reserved Font Name for this Font Software. + "cmtex10" is a Reserved Font Name for this Font Software. + "cmtex8" is a Reserved Font Name for this Font Software. + "cmtex9" is a Reserved Font Name for this Font Software. + "cmti10" is a Reserved Font Name for this Font Software. + "cmti12" is a Reserved Font Name for this Font Software. + "cmti7" is a Reserved Font Name for this Font Software. + "cmti8" is a Reserved Font Name for this Font Software. + "cmti9" is a Reserved Font Name for this Font Software. + "cmtt10" is a Reserved Font Name for this Font Software. + "cmtt12" is a Reserved Font Name for this Font Software. + "cmtt8" is a Reserved Font Name for this Font Software. + "cmtt9" is a Reserved Font Name for this Font Software. + "cmu10" is a Reserved Font Name for this Font Software. + "cmvtt10" is a Reserved Font Name for this Font Software. + "euex10" is a Reserved Font Name for this Font Software. + "euex7" is a Reserved Font Name for this Font Software. + "euex8" is a Reserved Font Name for this Font Software. + "euex9" is a Reserved Font Name for this Font Software. + "eufb10" is a Reserved Font Name for this Font Software. + "eufb5" is a Reserved Font Name for this Font Software. + "eufb7" is a Reserved Font Name for this Font Software. + "eufm10" is a Reserved Font Name for this Font Software. + "eufm5" is a Reserved Font Name for this Font Software. + "eufm7" is a Reserved Font Name for this Font Software. + "eurb10" is a Reserved Font Name for this Font Software. + "eurb5" is a Reserved Font Name for this Font Software. + "eurb7" is a Reserved Font Name for this Font Software. + "eurm10" is a Reserved Font Name for this Font Software. + "eurm5" is a Reserved Font Name for this Font Software. + "eurm7" is a Reserved Font Name for this Font Software. + "eusb10" is a Reserved Font Name for this Font Software. + "eusb5" is a Reserved Font Name for this Font Software. + "eusb7" is a Reserved Font Name for this Font Software. + "eusm10" is a Reserved Font Name for this Font Software. + "eusm5" is a Reserved Font Name for this Font Software. + "eusm7" is a Reserved Font Name for this Font Software. + "lasy10" is a Reserved Font Name for this Font Software. + "lasy5" is a Reserved Font Name for this Font Software. + "lasy6" is a Reserved Font Name for this Font Software. + "lasy7" is a Reserved Font Name for this Font Software. + "lasy8" is a Reserved Font Name for this Font Software. + "lasy9" is a Reserved Font Name for this Font Software. + "lasyb10" is a Reserved Font Name for this Font Software. + "lcircle1" is a Reserved Font Name for this Font Software. + "lcirclew" is a Reserved Font Name for this Font Software. + "lcmss8" is a Reserved Font Name for this Font Software. + "lcmssb8" is a Reserved Font Name for this Font Software. + "lcmssi8" is a Reserved Font Name for this Font Software. + "line10" is a Reserved Font Name for this Font Software. + "linew10" is a Reserved Font Name for this Font Software. + "msam10" is a Reserved Font Name for this Font Software. + "msam5" is a Reserved Font Name for this Font Software. + "msam6" is a Reserved Font Name for this Font Software. + "msam7" is a Reserved Font Name for this Font Software. + "msam8" is a Reserved Font Name for this Font Software. + "msam9" is a Reserved Font Name for this Font Software. + "msbm10" is a Reserved Font Name for this Font Software. + "msbm5" is a Reserved Font Name for this Font Software. + "msbm6" is a Reserved Font Name for this Font Software. + "msbm7" is a Reserved Font Name for this Font Software. + "msbm8" is a Reserved Font Name for this Font Software. + "msbm9" is a Reserved Font Name for this Font Software. + "wncyb10" is a Reserved Font Name for this Font Software. + "wncyi10" is a Reserved Font Name for this Font Software. + "wncyr10" is a Reserved Font Name for this Font Software. + "wncysc10" is a Reserved Font Name for this Font Software. + "wncyss10" is a Reserved Font Name for this Font Software. + + This Font Software is licensed under the SIL Open Font License, Version 1.1. + This license is copied below, and is also available with a FAQ at: + http://scripts.sil.org/OFL + + ----------------------------------------------------------- + SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 + ----------------------------------------------------------- + + PREAMBLE + The goals of the Open Font License (OFL) are to stimulate worldwide + development of collaborative font projects, to support the font creation + efforts of academic and linguistic communities, and to provide a free and + open framework in which fonts may be shared and improved in partnership + with others. + + The OFL allows the licensed fonts to be used, studied, modified and + redistributed freely as long as they are not sold by themselves. The + fonts, including any derivative works, can be bundled, embedded, + redistributed and/or sold with any software provided that any reserved + names are not used by derivative works. The fonts and derivatives, + however, cannot be released under any other type of license. The + requirement for fonts to remain under this license does not apply + to any document created using the fonts or their derivatives. + + DEFINITIONS + "Font Software" refers to the set of files released by the Copyright + Holder(s) under this license and clearly marked as such. This may + include source files, build scripts and documentation. + + "Reserved Font Name" refers to any names specified as such after the + copyright statement(s). + + "Original Version" refers to the collection of Font Software components as + distributed by the Copyright Holder(s). + + "Modified Version" refers to any derivative made by adding to, deleting, + or substituting -- in part or in whole -- any of the components of the + Original Version, by changing formats or by porting the Font Software to a + new environment. + + "Author" refers to any designer, engineer, programmer, technical + writer or other person who contributed to the Font Software. + + PERMISSION & CONDITIONS + Permission is hereby granted, free of charge, to any person obtaining + a copy of the Font Software, to use, study, copy, merge, embed, modify, + redistribute, and sell modified and unmodified copies of the Font + Software, subject to the following conditions: + + 1) Neither the Font Software nor any of its individual components, + in Original or Modified Versions, may be sold by itself. + + 2) Original or Modified Versions of the Font Software may be bundled, + redistributed and/or sold with any software, provided that each copy + contains the above copyright notice and this license. These can be + included either as stand-alone text files, human-readable headers or + in the appropriate machine-readable metadata fields within text or + binary files as long as those fields can be easily viewed by the user. + + 3) No Modified Version of the Font Software may use the Reserved Font + Name(s) unless explicit written permission is granted by the corresponding + Copyright Holder. This restriction only applies to the primary font name as + presented to the users. + + 4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font + Software shall not be used to promote, endorse or advertise any + Modified Version, except to acknowledge the contribution(s) of the + Copyright Holder(s) and the Author(s) or with their explicit written + permission. + + 5) The Font Software, modified or unmodified, in part or in whole, + must be distributed entirely under this license, and must not be + distributed under any other license. The requirement for fonts to + remain under this license does not apply to any document created + using the Font Software. + + TERMINATION + This license becomes null and void if any of the above conditions are + not met. + + DISCLAIMER + THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT + OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE + COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL + DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM + OTHER DEALINGS IN THE FONT SOFTWARE. + + + + Name: BaKoMa Fonts + Files: matplotlib/mpl-data/fonts/ttf/cm*.ttf matplotlib/mpl-data/fonts/afm/cm*.afm + Description: Computer Modern Fonts in PostScript Type 1 and TrueType font formats. + License: BaKoMa Fonts Licence + BaKoMa Fonts Licence + -------------------- + + This licence covers two font packs (known as BaKoMa Fonts Collection, + which is available at `CTAN:fonts/cm/ps-type1/bakoma/'): + + 1) BaKoMa-CM (1.1/12-Nov-94) + Computer Modern Fonts in PostScript Type 1 and TrueType font formats. + + 2) BaKoMa-AMS (1.2/19-Jan-95) + AMS TeX fonts in PostScript Type 1 and TrueType font formats. + + Copyright (C) 1994, 1995, Basil K. Malyshev. All Rights Reserved. + + Permission to copy and distribute these fonts for any purpose is + hereby granted without fee, provided that the above copyright notice, + author statement and this permission notice appear in all copies of + these fonts and related documentation. + + Permission to modify and distribute modified fonts for any purpose is + hereby granted without fee, provided that the copyright notice, + author statement, this permission notice and location of original + fonts (http://www.ctan.org/tex-archive/fonts/cm/ps-type1/bakoma) + appear in all copies of modified fonts and related documentation. + + Permission to use these fonts (embedding into PostScript, PDF, SVG + and printing by using any software) is hereby granted without fee. + It is not required to provide any notices about using these fonts. + + Basil K. Malyshev + INSTITUTE FOR HIGH ENERGY PHYSICS + IHEP, OMVT + Moscow Region + 142281 PROTVINO + RUSSIA + + E-Mail: bakoma@mail.ru + or malyshev@mail.ihep.ru + + + + + Name: ColorBrewer Color Schemes + Files: lib/matplotlib/_cm.py + Description: Color schemes from ColorBrewer + License: Apache-2.0 + Apache-Style Software License for ColorBrewer software and ColorBrewer Color Schemes + + Copyright (c) 2002 Cynthia Brewer, Mark Harrower, and The Pennsylvania State University. + + Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software distributed + under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR + CONDITIONS OF ANY KIND, either express or implied. See the License for the + specific language governing permissions and limitations under the License. + + + Name: Courier 10 + Files: matplotlib/tests/Courier10PitchBT-Bold.pfb + Description: Courier 10 font, used in tests. + License: Bitstream-Charter + The Courier10PitchBT-Bold.pfb file is a Type-1 version of + Courier 10 Pitch BT Bold by Bitstream, obtained from + . It is included + here as test data only, but the following license applies. + + + (c) Copyright 1989-1992, Bitstream Inc., Cambridge, MA. + + You are hereby granted permission under all Bitstream propriety rights + to use, copy, modify, sublicense, sell, and redistribute the 4 Bitstream + Charter (r) Type 1 outline fonts and the 4 Courier Type 1 outline fonts + for any purpose and without restriction; provided, that this notice is + left intact on all copies of such fonts and that Bitstream's trademark + is acknowledged as shown below on all unmodified copies of the 4 Charter + Type 1 fonts. + + BITSTREAM CHARTER is a registered trademark of Bitstream Inc. + + + + Name: JSXTools resize observer + Files: + Description: Minimal polyfill for the ResizeObserver API + License: CC0-1.0 + # CC0 1.0 Universal + + ## Statement of Purpose + + The laws of most jurisdictions throughout the world automatically confer + exclusive Copyright and Related Rights (defined below) upon the creator and + subsequent owner(s) (each and all, an “owner”) of an original work of + authorship and/or a database (each, a “Work”). + + Certain owners wish to permanently relinquish those rights to a Work for the + purpose of contributing to a commons of creative, cultural and scientific works + (“Commons”) that the public can reliably and without fear of later claims of + infringement build upon, modify, incorporate in other works, reuse and + redistribute as freely as possible in any form whatsoever and for any purposes, + including without limitation commercial purposes. These owners may contribute + to the Commons to promote the ideal of a free culture and the further + production of creative, cultural and scientific works, or to gain reputation or + greater distribution for their Work in part through the use and efforts of + others. + + For these and/or other purposes and motivations, and without any expectation of + additional consideration or compensation, the person associating CC0 with a + Work (the “Affirmer”), to the extent that he or she is an owner of Copyright + and Related Rights in the Work, voluntarily elects to apply CC0 to the Work and + publicly distribute the Work under its terms, with knowledge of his or her + Copyright and Related Rights in the Work and the meaning and intended legal + effect of CC0 on those rights. + + 1. Copyright and Related Rights. A Work made available under CC0 may be + protected by copyright and related or neighboring rights (“Copyright and + Related Rights”). Copyright and Related Rights include, but are not limited + to, the following: + 1. the right to reproduce, adapt, distribute, perform, display, communicate, + and translate a Work; + 2. moral rights retained by the original author(s) and/or performer(s); + 3. publicity and privacy rights pertaining to a person’s image or likeness + depicted in a Work; + 4. rights protecting against unfair competition in regards to a Work, + subject to the limitations in paragraph 4(i), below; + 5. rights protecting the extraction, dissemination, use and reuse of data in + a Work; + 6. database rights (such as those arising under Directive 96/9/EC of the + European Parliament and of the Council of 11 March 1996 on the legal + protection of databases, and under any national implementation thereof, + including any amended or successor version of such directive); and + 7. other similar, equivalent or corresponding rights throughout the world + based on applicable law or treaty, and any national implementations + thereof. + + 2. Waiver. To the greatest extent permitted by, but not in contravention of, + applicable law, Affirmer hereby overtly, fully, permanently, irrevocably and + unconditionally waives, abandons, and surrenders all of Affirmer’s Copyright + and Related Rights and associated claims and causes of action, whether now + known or unknown (including existing as well as future claims and causes of + action), in the Work (i) in all territories worldwide, (ii) for the maximum + duration provided by applicable law or treaty (including future time + extensions), (iii) in any current or future medium and for any number of + copies, and (iv) for any purpose whatsoever, including without limitation + commercial, advertising or promotional purposes (the “Waiver”). Affirmer + makes the Waiver for the benefit of each member of the public at large and + to the detriment of Affirmer’s heirs and successors, fully intending that + such Waiver shall not be subject to revocation, rescission, cancellation, + termination, or any other legal or equitable action to disrupt the quiet + enjoyment of the Work by the public as contemplated by Affirmer’s express + Statement of Purpose. + + 3. Public License Fallback. Should any part of the Waiver for any reason be + judged legally invalid or ineffective under applicable law, then the Waiver + shall be preserved to the maximum extent permitted taking into account + Affirmer’s express Statement of Purpose. In addition, to the extent the + Waiver is so judged Affirmer hereby grants to each affected person a + royalty-free, non transferable, non sublicensable, non exclusive, + irrevocable and unconditional license to exercise Affirmer’s Copyright and + Related Rights in the Work (i) in all territories worldwide, (ii) for the + maximum duration provided by applicable law or treaty (including future time + extensions), (iii) in any current or future medium and for any number of + copies, and (iv) for any purpose whatsoever, including without limitation + commercial, advertising or promotional purposes (the “License”). The License + shall be deemed effective as of the date CC0 was applied by Affirmer to the + Work. Should any part of the License for any reason be judged legally + invalid or ineffective under applicable law, such partial invalidity or + ineffectiveness shall not invalidate the remainder of the License, and in + such case Affirmer hereby affirms that he or she will not (i) exercise any + of his or her remaining Copyright and Related Rights in the Work or (ii) + assert any associated claims and causes of action with respect to the Work, + in either case contrary to Affirmer’s express Statement of Purpose. + + 4. Limitations and Disclaimers. + 1. No trademark or patent rights held by Affirmer are waived, abandoned, + surrendered, licensed or otherwise affected by this document. + 2. Affirmer offers the Work as-is and makes no representations or warranties + of any kind concerning the Work, express, implied, statutory or + otherwise, including without limitation warranties of title, + merchantability, fitness for a particular purpose, non infringement, or + the absence of latent or other defects, accuracy, or the present or + absence of errors, whether or not discoverable, all to the greatest + extent permissible under applicable law. + 3. Affirmer disclaims responsibility for clearing rights of other persons + that may apply to the Work or any use thereof, including without + limitation any person’s Copyright and Related Rights in the Work. + Further, Affirmer disclaims responsibility for obtaining any necessary + consents, permissions or other rights required for any use of the Work. + 4. Affirmer understands and acknowledges that Creative Commons is not a + party to this document and has no duty or obligation with respect to this + CC0 or use of the Work. + + For more information, please see + http://creativecommons.org/publicdomain/zero/1.0/. + + + Name: QHull + Files: matplotlib/_qhull.*.so + Description: Convex hull, Delaunay triangulation, Voronoi diagrams, Halfspace intersection + License: Qhull + Qhull, Copyright (c) 1993-2020 + + C.B. Barber + Arlington, MA + + and + + The National Science and Technology Research Center for + Computation and Visualization of Geometric Structures + (The Geometry Center) + University of Minnesota + + email: qhull@qhull.org + + This software includes Qhull from C.B. Barber and The Geometry Center. + Files derived from Qhull 1.0 are copyrighted by the Geometry Center. The + remaining files are copyrighted by C.B. Barber. Qhull is free software + and may be obtained via http from www.qhull.org. It may be freely copied, + modified, and redistributed under the following conditions: + + 1. All copyright notices must remain intact in all files. + + 2. A copy of this text file must be distributed along with any copies + of Qhull that you redistribute; this includes copies that you have + modified, or copies of programs or other software products that + include Qhull. + + 3. If you modify Qhull, you must include a notice giving the + name of the person performing the modification, the date of + modification, and the reason for such modification. + + 4. When distributing modified versions of Qhull, or other software + products that include Qhull, you must provide notice that the original + source code may be obtained as noted above. + + 5. There is no warranty or other guarantee of fitness for Qhull, it is + provided solely "as is". Bug reports or fixes may be sent to + qhull_bug@qhull.org; the authors may or may not act on them as + they desire. + + + Name: Qt4 Editor + Files: matplotlib/backends/qt_editor + Description: Module creating PyQt4 form dialogs/layouts to edit various type of parameters + License: MIT + Module creating PyQt4 form dialogs/layouts to edit various type of parameters + + + formlayout License Agreement (MIT License) + ------------------------------------------ + + Copyright (c) 2009 Pierre Raybaut + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without + restriction, including without limitation the rights to use, + copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following + conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + """ + + + Name: Solarized + Files: matplotlib/mpl-data/stylelib/Solarize_Light2.mplstyle + Description: Solarized color scheme/style + License: MIT + https://github.com/altercation/solarized/blob/master/LICENSE + Copyright (c) 2011 Ethan Schoonover + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + + + Name: Stix fonts + Files: matplotlib/mpl-data/fonts/ttf/STIX*.ttf + Description: STIX fonts + License: + TERMS AND CONDITIONS + + 1. Permission is hereby granted, free of charge, to any person + obtaining a copy of the STIX Fonts-TM set accompanying this license + (collectively, the "Fonts") and the associated documentation files + (collectively with the Fonts, the "Font Software"), to reproduce and + distribute the Font Software, including the rights to use, copy, merge + and publish copies of the Font Software, and to permit persons to whom + the Font Software is furnished to do so same, subject to the following + terms and conditions (the "License"). + + 2. The following copyright and trademark notice and these Terms and + Conditions shall be included in all copies of one or more of the Font + typefaces and any derivative work created as permitted under this + License: + + Copyright (c) 2001-2005 by the STI Pub Companies, consisting of + the American Institute of Physics, the American Chemical Society, the + American Mathematical Society, the American Physical Society, Elsevier, + Inc., and The Institute of Electrical and Electronic Engineers, Inc. + Portions copyright (c) 1998-2003 by MicroPress, Inc. Portions copyright + (c) 1990 by Elsevier, Inc. All rights reserved. STIX Fonts-TM is a + trademark of The Institute of Electrical and Electronics Engineers, Inc. + + 3. You may (a) convert the Fonts from one format to another (e.g., + from TrueType to PostScript), in which case the normal and reasonable + distortion that occurs during such conversion shall be permitted and (b) + embed or include a subset of the Fonts in a document for the purposes of + allowing users to read text in the document that utilizes the Fonts. In + each case, you may use the STIX Fonts-TM mark to designate the resulting + Fonts or subset of the Fonts. + + 4. You may also (a) add glyphs or characters to the Fonts, or modify + the shape of existing glyphs, so long as the base set of glyphs is not + removed and (b) delete glyphs or characters from the Fonts, provided + that the resulting font set is distributed with the following + disclaimer: "This [name] font does not include all the Unicode points + covered in the STIX Fonts-TM set but may include others." In each case, + the name used to denote the resulting font set shall not include the + term "STIX" or any similar term. + + 5. You may charge a fee in connection with the distribution of the + Font Software, provided that no copy of one or more of the individual + Font typefaces that form the STIX Fonts-TM set may be sold by itself. + + 6. THE FONT SOFTWARE IS PROVIDED "AS IS," WITHOUT WARRANTY OF ANY + KIND, EXPRESS OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, ANY WARRANTIES + OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT + OF COPYRIGHT, PATENT, TRADEMARK OR OTHER RIGHT. IN NO EVENT SHALL + MICROPRESS OR ANY OF THE STI PUB COMPANIES BE LIABLE FOR ANY CLAIM, + DAMAGES OR OTHER LIABILITY, INCLUDING, BUT NOT LIMITED TO, ANY GENERAL, + SPECIAL, INDIRECT, INCIDENTAL OR CONSEQUENTIAL DAMAGES, WHETHER IN AN + ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM OR OUT OF THE USE OR + INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE FONT + SOFTWARE. + + 7. Except as contained in the notice set forth in Section 2, the + names MicroPress Inc. and STI Pub Companies, as well as the names of the + companies/organizations that compose the STI Pub Companies, shall not be + used in advertising or otherwise to promote the sale, use or other + dealings in the Font Software without the prior written consent of the + respective company or organization. + + 8. This License shall become null and void in the event of any + material breach of the Terms and Conditions herein by licensee. + + 9. A substantial portion of the STIX Fonts set was developed by + MicroPress Inc. for the STI Pub Companies. To obtain additional + mathematical fonts, please contact MicroPress, Inc., 68-30 Harrow + Street, Forest Hills, NY 11375, USA - Phone: (718) 575-1816. + + + Name: Yorick Colormaps + Files: lib/matplotlib/_cm.py + Description: Gist/Yorick colormaps + License: + BSD-style license for gist/yorick colormaps. + + Copyright: + + Copyright (c) 1996. The Regents of the University of California. + All rights reserved. + + Permission to use, copy, modify, and distribute this software for any + purpose without fee is hereby granted, provided that this entire + notice is included in all copies of any software which is or includes + a copy or modification of this software and in all copies of the + supporting documentation for such software. + + This work was produced at the University of California, Lawrence + Livermore National Laboratory under contract no. W-7405-ENG-48 between + the U.S. Department of Energy and The Regents of the University of + California for the operation of UC LLNL. + + + DISCLAIMER + + This software was prepared as an account of work sponsored by an + agency of the United States Government. Neither the United States + Government nor the University of California nor any of their + employees, makes any warranty, express or implied, or assumes any + liability or responsibility for the accuracy, completeness, or + usefulness of any information, apparatus, product, or process + disclosed, or represents that its use would not infringe + privately-owned rights. Reference herein to any specific commercial + products, process, or service by trade name, trademark, manufacturer, + or otherwise, does not necessarily constitute or imply its + endorsement, recommendation, or favoring by the United States + Government or the University of California. The views and opinions of + authors expressed herein do not necessarily state or reflect those of + the United States Government or the University of California, and + shall not be used for advertising or product endorsement purposes. + + + AUTHOR + + David H. Munro wrote Yorick and Gist. Berkeley Yacc (byacc) generated + the Yorick parser. The routines in Math are from LAPACK and FFTPACK; + MathC contains C translations by David H. Munro. The algorithms for + Yorick's random number generator and several special functions in + Yorick/include were taken from Numerical Recipes by Press, et. al., + although the Yorick implementations are unrelated to those in + Numerical Recipes. A small amount of code in Gist was adapted from + the X11R4 release, copyright M.I.T. -- the complete copyright notice + may be found in the (unused) file Gist/host.c. + +Classifier: Development Status :: 5 - Production/Stable +Classifier: Framework :: Matplotlib +Classifier: Intended Audience :: Science/Research +Classifier: Intended Audience :: Education +Classifier: License :: OSI Approved :: Python Software Foundation License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Topic :: Scientific/Engineering :: Visualization +Project-URL: Homepage, https://matplotlib.org +Project-URL: Download, https://matplotlib.org/stable/install/index.html +Project-URL: Documentation, https://matplotlib.org +Project-URL: Source Code, https://github.com/matplotlib/matplotlib +Project-URL: Bug Tracker, https://github.com/matplotlib/matplotlib/issues +Project-URL: Forum, https://discourse.matplotlib.org/ +Project-URL: Donate, https://numfocus.org/donate-to-matplotlib +Requires-Python: >=3.10 +Requires-Dist: contourpy>=1.0.1 +Requires-Dist: cycler>=0.10 +Requires-Dist: fonttools>=4.22.0 +Requires-Dist: kiwisolver>=1.3.1 +Requires-Dist: numpy>=1.23 +Requires-Dist: packaging>=20.0 +Requires-Dist: pillow>=8 +Requires-Dist: pyparsing>=3 +Requires-Dist: python-dateutil>=2.7 +Provides-Extra: dev +Requires-Dist: meson-python<0.17.0,>=0.13.1; extra == "dev" +Requires-Dist: pybind11!=2.13.3,>=2.13.2; extra == "dev" +Requires-Dist: setuptools_scm>=7; extra == "dev" +Requires-Dist: setuptools>=64; extra == "dev" +Description-Content-Type: text/markdown + +[![PyPi](https://img.shields.io/pypi/v/matplotlib)](https://pypi.org/project/matplotlib/) +[![Conda](https://img.shields.io/conda/vn/conda-forge/matplotlib)](https://anaconda.org/conda-forge/matplotlib) +[![Downloads](https://img.shields.io/pypi/dm/matplotlib)](https://pypi.org/project/matplotlib) +[![NUMFocus](https://img.shields.io/badge/powered%20by-NumFOCUS-orange.svg?style=flat&colorA=E1523D&colorB=007D8A)](https://numfocus.org) + +[![Discourse help forum](https://img.shields.io/badge/help_forum-discourse-blue.svg)](https://discourse.matplotlib.org) +[![Gitter](https://badges.gitter.im/matplotlib/matplotlib.svg)](https://gitter.im/matplotlib/matplotlib) +[![GitHub issues](https://img.shields.io/badge/issue_tracking-github-blue.svg)](https://github.com/matplotlib/matplotlib/issues) +[![Contributing](https://img.shields.io/badge/PR-Welcome-%23FF8300.svg?)](https://matplotlib.org/stable/devel/index.html) + +[![GitHub actions status](https://github.com/matplotlib/matplotlib/workflows/Tests/badge.svg)](https://github.com/matplotlib/matplotlib/actions?query=workflow%3ATests) +[![Azure pipelines status](https://dev.azure.com/matplotlib/matplotlib/_apis/build/status/matplotlib.matplotlib?branchName=main)](https://dev.azure.com/matplotlib/matplotlib/_build/latest?definitionId=1&branchName=main) +[![AppVeyor status](https://ci.appveyor.com/api/projects/status/github/matplotlib/matplotlib?branch=main&svg=true)](https://ci.appveyor.com/project/matplotlib/matplotlib) +[![Codecov status](https://codecov.io/github/matplotlib/matplotlib/badge.svg?branch=main&service=github)](https://app.codecov.io/gh/matplotlib/matplotlib) +[![EffVer Versioning](https://img.shields.io/badge/version_scheme-EffVer-0097a7)](https://jacobtomlinson.dev/effver) + +![Matplotlib logotype](https://matplotlib.org/_static/logo2.svg) + +Matplotlib is a comprehensive library for creating static, animated, and +interactive visualizations in Python. + +Check out our [home page](https://matplotlib.org/) for more information. + +![image](https://matplotlib.org/_static/readme_preview.png) + +Matplotlib produces publication-quality figures in a variety of hardcopy +formats and interactive environments across platforms. Matplotlib can be +used in Python scripts, Python/IPython shells, web application servers, +and various graphical user interface toolkits. + +## Install + +See the [install +documentation](https://matplotlib.org/stable/users/installing/index.html), +which is generated from `/doc/install/index.rst` + +## Contribute + +You've discovered a bug or something else you want to change — excellent! + +You've worked out a way to fix it — even better! + +You want to tell us about it — best of all! + +Start at the [contributing +guide](https://matplotlib.org/devdocs/devel/contribute.html)! + +## Contact + +[Discourse](https://discourse.matplotlib.org/) is the discussion forum +for general questions and discussions and our recommended starting +point. + +Our active mailing lists (which are mirrored on Discourse) are: + +- [Users](https://mail.python.org/mailman/listinfo/matplotlib-users) + mailing list: +- [Announcement](https://mail.python.org/mailman/listinfo/matplotlib-announce) + mailing list: +- [Development](https://mail.python.org/mailman/listinfo/matplotlib-devel) + mailing list: + +[Gitter](https://gitter.im/matplotlib/matplotlib) is for coordinating +development and asking questions directly related to contributing to +matplotlib. + +## Citing Matplotlib + +If Matplotlib contributes to a project that leads to publication, please +acknowledge this by citing Matplotlib. + +[A ready-made citation +entry](https://matplotlib.org/stable/users/project/citing.html) is +available. diff --git a/.cache/pip/http-v2/f/9/5/d/2/f95d294c2281f9705ab34177ee4732e524880c56ef894e620cf4cb95 b/.cache/pip/http-v2/f/9/5/d/2/f95d294c2281f9705ab34177ee4732e524880c56ef894e620cf4cb95 new file mode 100644 index 0000000000000000000000000000000000000000..32209ccc02e4b5566b7cede8f9ad60a4d94aaf6c Binary files /dev/null and b/.cache/pip/http-v2/f/9/5/d/2/f95d294c2281f9705ab34177ee4732e524880c56ef894e620cf4cb95 differ diff --git a/.cache/pip/http-v2/f/9/5/d/2/f95d294c2281f9705ab34177ee4732e524880c56ef894e620cf4cb95.body b/.cache/pip/http-v2/f/9/5/d/2/f95d294c2281f9705ab34177ee4732e524880c56ef894e620cf4cb95.body new file mode 100644 index 0000000000000000000000000000000000000000..3a23e1f569284a59cfd86527da892497a666437b Binary files /dev/null and b/.cache/pip/http-v2/f/9/5/d/2/f95d294c2281f9705ab34177ee4732e524880c56ef894e620cf4cb95.body differ diff --git a/.cache/pip/http-v2/f/c/6/8/7/fc687abc6120c7bc7cfd867fe3788604e9f8e9dcf17e51396d803e25 b/.cache/pip/http-v2/f/c/6/8/7/fc687abc6120c7bc7cfd867fe3788604e9f8e9dcf17e51396d803e25 new file mode 100644 index 0000000000000000000000000000000000000000..694c56984ae963ecc1ad6515294ea2d9f8cded44 Binary files /dev/null and b/.cache/pip/http-v2/f/c/6/8/7/fc687abc6120c7bc7cfd867fe3788604e9f8e9dcf17e51396d803e25 differ diff --git a/.cache/pip/http-v2/f/d/4/0/6/fd406c904ba0d9ecbc45615ea0129f7f3a26756195fc93c0f69dce75 b/.cache/pip/http-v2/f/d/4/0/6/fd406c904ba0d9ecbc45615ea0129f7f3a26756195fc93c0f69dce75 new file mode 100644 index 0000000000000000000000000000000000000000..d3fe4cf49d6400c053b203185eefced1bc32dbe5 Binary files /dev/null and b/.cache/pip/http-v2/f/d/4/0/6/fd406c904ba0d9ecbc45615ea0129f7f3a26756195fc93c0f69dce75 differ diff --git a/.cache/pip/http-v2/f/d/4/0/6/fd406c904ba0d9ecbc45615ea0129f7f3a26756195fc93c0f69dce75.body b/.cache/pip/http-v2/f/d/4/0/6/fd406c904ba0d9ecbc45615ea0129f7f3a26756195fc93c0f69dce75.body new file mode 100644 index 0000000000000000000000000000000000000000..834b0ff2b68818889ac9d085eccfcc61a7da2eca --- /dev/null +++ b/.cache/pip/http-v2/f/d/4/0/6/fd406c904ba0d9ecbc45615ea0129f7f3a26756195fc93c0f69dce75.body @@ -0,0 +1,322 @@ +Metadata-Version: 2.4 +Name: huggingface_hub +Version: 1.7.2 +Summary: Client library to download and publish models, datasets and other repos on the huggingface.co hub +Home-page: https://github.com/huggingface/huggingface_hub +Author: Hugging Face, Inc. +Author-email: julien@huggingface.co +License: Apache-2.0 +Keywords: model-hub machine-learning models natural-language-processing deep-learning pytorch pretrained-models +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Requires-Python: >=3.9.0 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: filelock>=3.10.0 +Requires-Dist: fsspec>=2023.5.0 +Requires-Dist: hf-xet<2.0.0,>=1.4.2; platform_machine == "x86_64" or platform_machine == "amd64" or platform_machine == "AMD64" or platform_machine == "arm64" or platform_machine == "aarch64" +Requires-Dist: httpx<1,>=0.23.0 +Requires-Dist: packaging>=20.9 +Requires-Dist: pyyaml>=5.1 +Requires-Dist: tqdm>=4.42.1 +Requires-Dist: typer +Requires-Dist: typing-extensions>=4.1.0 +Provides-Extra: oauth +Requires-Dist: authlib>=1.3.2; extra == "oauth" +Requires-Dist: fastapi; extra == "oauth" +Requires-Dist: httpx; extra == "oauth" +Requires-Dist: itsdangerous; extra == "oauth" +Provides-Extra: torch +Requires-Dist: torch; extra == "torch" +Requires-Dist: safetensors[torch]; extra == "torch" +Provides-Extra: fastai +Requires-Dist: toml; extra == "fastai" +Requires-Dist: fastai>=2.4; extra == "fastai" +Requires-Dist: fastcore>=1.3.27; extra == "fastai" +Provides-Extra: hf-xet +Requires-Dist: hf-xet<2.0.0,>=1.4.2; extra == "hf-xet" +Provides-Extra: mcp +Requires-Dist: mcp>=1.8.0; extra == "mcp" +Provides-Extra: testing +Requires-Dist: authlib>=1.3.2; extra == "testing" +Requires-Dist: fastapi; extra == "testing" +Requires-Dist: httpx; extra == "testing" +Requires-Dist: itsdangerous; extra == "testing" +Requires-Dist: jedi; extra == "testing" +Requires-Dist: Jinja2; extra == "testing" +Requires-Dist: pytest>=8.4.2; extra == "testing" +Requires-Dist: pytest-cov; extra == "testing" +Requires-Dist: pytest-env; extra == "testing" +Requires-Dist: pytest-xdist; extra == "testing" +Requires-Dist: pytest-vcr; extra == "testing" +Requires-Dist: pytest-asyncio; extra == "testing" +Requires-Dist: pytest-rerunfailures<16.0; extra == "testing" +Requires-Dist: pytest-mock; extra == "testing" +Requires-Dist: urllib3<2.0; extra == "testing" +Requires-Dist: soundfile; extra == "testing" +Requires-Dist: Pillow; extra == "testing" +Requires-Dist: numpy; extra == "testing" +Requires-Dist: duckdb; extra == "testing" +Requires-Dist: fastapi; extra == "testing" +Provides-Extra: typing +Requires-Dist: typing-extensions>=4.8.0; extra == "typing" +Requires-Dist: types-PyYAML; extra == "typing" +Requires-Dist: types-simplejson; extra == "typing" +Requires-Dist: types-toml; extra == "typing" +Requires-Dist: types-tqdm; extra == "typing" +Requires-Dist: types-urllib3; extra == "typing" +Provides-Extra: quality +Requires-Dist: ruff>=0.9.0; extra == "quality" +Requires-Dist: mypy==1.15.0; extra == "quality" +Requires-Dist: libcst>=1.4.0; extra == "quality" +Requires-Dist: ty; extra == "quality" +Provides-Extra: all +Requires-Dist: authlib>=1.3.2; extra == "all" +Requires-Dist: fastapi; extra == "all" +Requires-Dist: httpx; extra == "all" +Requires-Dist: itsdangerous; extra == "all" +Requires-Dist: jedi; extra == "all" +Requires-Dist: Jinja2; extra == "all" +Requires-Dist: pytest>=8.4.2; extra == "all" +Requires-Dist: pytest-cov; extra == "all" +Requires-Dist: pytest-env; extra == "all" +Requires-Dist: pytest-xdist; extra == "all" +Requires-Dist: pytest-vcr; extra == "all" +Requires-Dist: pytest-asyncio; extra == "all" +Requires-Dist: pytest-rerunfailures<16.0; extra == "all" +Requires-Dist: pytest-mock; extra == "all" +Requires-Dist: urllib3<2.0; extra == "all" +Requires-Dist: soundfile; extra == "all" +Requires-Dist: Pillow; extra == "all" +Requires-Dist: numpy; extra == "all" +Requires-Dist: duckdb; extra == "all" +Requires-Dist: fastapi; extra == "all" +Requires-Dist: ruff>=0.9.0; extra == "all" +Requires-Dist: mypy==1.15.0; extra == "all" +Requires-Dist: libcst>=1.4.0; extra == "all" +Requires-Dist: ty; extra == "all" +Requires-Dist: typing-extensions>=4.8.0; extra == "all" +Requires-Dist: types-PyYAML; extra == "all" +Requires-Dist: types-simplejson; extra == "all" +Requires-Dist: types-toml; extra == "all" +Requires-Dist: types-tqdm; extra == "all" +Requires-Dist: types-urllib3; extra == "all" +Provides-Extra: dev +Requires-Dist: authlib>=1.3.2; extra == "dev" +Requires-Dist: fastapi; extra == "dev" +Requires-Dist: httpx; extra == "dev" +Requires-Dist: itsdangerous; extra == "dev" +Requires-Dist: jedi; extra == "dev" +Requires-Dist: Jinja2; extra == "dev" +Requires-Dist: pytest>=8.4.2; extra == "dev" +Requires-Dist: pytest-cov; extra == "dev" +Requires-Dist: pytest-env; extra == "dev" +Requires-Dist: pytest-xdist; extra == "dev" +Requires-Dist: pytest-vcr; extra == "dev" +Requires-Dist: pytest-asyncio; extra == "dev" +Requires-Dist: pytest-rerunfailures<16.0; extra == "dev" +Requires-Dist: pytest-mock; extra == "dev" +Requires-Dist: urllib3<2.0; extra == "dev" +Requires-Dist: soundfile; extra == "dev" +Requires-Dist: Pillow; extra == "dev" +Requires-Dist: numpy; extra == "dev" +Requires-Dist: duckdb; extra == "dev" +Requires-Dist: fastapi; extra == "dev" +Requires-Dist: ruff>=0.9.0; extra == "dev" +Requires-Dist: mypy==1.15.0; extra == "dev" +Requires-Dist: libcst>=1.4.0; extra == "dev" +Requires-Dist: ty; extra == "dev" +Requires-Dist: typing-extensions>=4.8.0; extra == "dev" +Requires-Dist: types-PyYAML; extra == "dev" +Requires-Dist: types-simplejson; extra == "dev" +Requires-Dist: types-toml; extra == "dev" +Requires-Dist: types-tqdm; extra == "dev" +Requires-Dist: types-urllib3; extra == "dev" +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: description-content-type +Dynamic: home-page +Dynamic: keywords +Dynamic: license +Dynamic: license-file +Dynamic: provides-extra +Dynamic: requires-dist +Dynamic: requires-python +Dynamic: summary + +

+ + + + huggingface_hub library logo + +
+
+

+ +

+ The official Python client for the Huggingface Hub. +

+ +

+ Documentation + GitHub release + PyPi version + PyPI - Downloads + Code coverage +

+ +

+

+ English | + Deutsch | + Français | + हिंदी | + 한국어 | + 中文 (简体) +

+

+ +--- + +**Documentation**: https://hf.co/docs/huggingface_hub + +**Source Code**: https://github.com/huggingface/huggingface_hub + +--- + +## Welcome to the huggingface_hub library + +The `huggingface_hub` library allows you to interact with the [Hugging Face Hub](https://huggingface.co/), a platform democratizing open-source Machine Learning for creators and collaborators. Discover pre-trained models and datasets for your projects or play with the thousands of machine learning apps hosted on the Hub. You can also create and share your own models, datasets and demos with the community. The `huggingface_hub` library provides a simple way to do all these things with Python. + +## Key features + +- [Download files](https://huggingface.co/docs/huggingface_hub/en/guides/download) from the Hub. +- [Upload files](https://huggingface.co/docs/huggingface_hub/en/guides/upload) to the Hub. +- [Manage your repositories](https://huggingface.co/docs/huggingface_hub/en/guides/repository). +- [Run Inference](https://huggingface.co/docs/huggingface_hub/en/guides/inference) on deployed models. +- [Search](https://huggingface.co/docs/huggingface_hub/en/guides/search) for models, datasets and Spaces. +- [Share Model Cards](https://huggingface.co/docs/huggingface_hub/en/guides/model-cards) to document your models. +- [Engage with the community](https://huggingface.co/docs/huggingface_hub/en/guides/community) through PRs and comments. + +## Installation + +Install the `huggingface_hub` package with [pip](https://pypi.org/project/huggingface-hub/): + +```bash +pip install huggingface_hub +``` + +If you prefer, you can also install it with [conda](https://huggingface.co/docs/huggingface_hub/en/installation#install-with-conda). + +In order to keep the package minimal by default, `huggingface_hub` comes with optional dependencies useful for some use cases. For example, if you want to use the MCP module, run: + +```bash +pip install "huggingface_hub[mcp]" +``` + +To learn more installation and optional dependencies, check out the [installation guide](https://huggingface.co/docs/huggingface_hub/en/installation). + +## Quick start + +### Download files + +Download a single file + +```py +from huggingface_hub import hf_hub_download + +hf_hub_download(repo_id="tiiuae/falcon-7b-instruct", filename="config.json") +``` + +Or an entire repository + +```py +from huggingface_hub import snapshot_download + +snapshot_download("stabilityai/stable-diffusion-2-1") +``` + +Files will be downloaded in a local cache folder. More details in [this guide](https://huggingface.co/docs/huggingface_hub/en/guides/manage-cache). + +### Login + +The Hugging Face Hub uses tokens to authenticate applications (see [docs](https://huggingface.co/docs/hub/security-tokens)). To log in your machine, run the following CLI: + +```bash +hf auth login +# or using an environment variable +hf auth login --token $HUGGINGFACE_TOKEN +``` + +### Create a repository + +```py +from huggingface_hub import create_repo + +create_repo(repo_id="super-cool-model") +``` + +### Upload files + +Upload a single file + +```py +from huggingface_hub import upload_file + +upload_file( + path_or_fileobj="/home/lysandre/dummy-test/README.md", + path_in_repo="README.md", + repo_id="lysandre/test-model", +) +``` + +Or an entire folder + +```py +from huggingface_hub import upload_folder + +upload_folder( + folder_path="/path/to/local/space", + repo_id="username/my-cool-space", + repo_type="space", +) +``` + +For details in the [upload guide](https://huggingface.co/docs/huggingface_hub/en/guides/upload). + +## Integrating to the Hub. + +We're partnering with cool open source ML libraries to provide free model hosting and versioning. You can find the existing integrations [here](https://huggingface.co/docs/hub/libraries). + +The advantages are: + +- Free model or dataset hosting for libraries and their users. +- Built-in file versioning, even with very large files, thanks to a git-based approach. +- In-browser widgets to play with the uploaded models. +- Anyone can upload a new model for your library, they just need to add the corresponding tag for the model to be discoverable. +- Fast downloads! We use Cloudfront (a CDN) to geo-replicate downloads so they're blazing fast from anywhere on the globe. +- Usage stats and more features to come. + +If you would like to integrate your library, feel free to open an issue to begin the discussion. We wrote a [step-by-step guide](https://huggingface.co/docs/hub/adding-a-library) with ❤️ showing how to do this integration. + +## Contributions (feature requests, bugs, etc.) are super welcome 💙💚💛💜🧡❤️ + +Everyone is welcome to contribute, and we value everybody's contribution. Code is not the only way to help the community. +Answering questions, helping others, reaching out and improving the documentations are immensely valuable to the community. +We wrote a [contribution guide](https://github.com/huggingface/huggingface_hub/blob/main/CONTRIBUTING.md) to summarize +how to get started to contribute to this repository. diff --git a/.cache/pip/http-v2/f/f/6/3/f/ff63f8aee48f79bb6d2cdeff8f7863b5820a3c507a2623f007846bb0 b/.cache/pip/http-v2/f/f/6/3/f/ff63f8aee48f79bb6d2cdeff8f7863b5820a3c507a2623f007846bb0 new file mode 100644 index 0000000000000000000000000000000000000000..784ed0976f533b6cecb142363265e469228fb463 Binary files /dev/null and b/.cache/pip/http-v2/f/f/6/3/f/ff63f8aee48f79bb6d2cdeff8f7863b5820a3c507a2623f007846bb0 differ diff --git a/.cache/pip/http-v2/f/f/6/3/f/ff63f8aee48f79bb6d2cdeff8f7863b5820a3c507a2623f007846bb0.body b/.cache/pip/http-v2/f/f/6/3/f/ff63f8aee48f79bb6d2cdeff8f7863b5820a3c507a2623f007846bb0.body new file mode 100644 index 0000000000000000000000000000000000000000..0f2b466a638a34e304c69fb7976ab05a736d9ab8 --- /dev/null +++ b/.cache/pip/http-v2/f/f/6/3/f/ff63f8aee48f79bb6d2cdeff8f7863b5820a3c507a2623f007846bb0.body @@ -0,0 +1,219 @@ +Metadata-Version: 2.4 +Name: markdown-it-py +Version: 4.0.0 +Summary: Python port of markdown-it. Markdown parsing, done right! +Keywords: markdown,lexer,parser,commonmark,markdown-it +Author-email: Chris Sewell +Requires-Python: >=3.10 +Description-Content-Type: text/markdown +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Markup +License-File: LICENSE +License-File: LICENSE.markdown-it +Requires-Dist: mdurl~=0.1 +Requires-Dist: psutil ; extra == "benchmarking" +Requires-Dist: pytest ; extra == "benchmarking" +Requires-Dist: pytest-benchmark ; extra == "benchmarking" +Requires-Dist: commonmark~=0.9 ; extra == "compare" +Requires-Dist: markdown~=3.4 ; extra == "compare" +Requires-Dist: mistletoe~=1.0 ; extra == "compare" +Requires-Dist: mistune~=3.0 ; extra == "compare" +Requires-Dist: panflute~=2.3 ; extra == "compare" +Requires-Dist: markdown-it-pyrs ; extra == "compare" +Requires-Dist: linkify-it-py>=1,<3 ; extra == "linkify" +Requires-Dist: mdit-py-plugins>=0.5.0 ; extra == "plugins" +Requires-Dist: gprof2dot ; extra == "profiling" +Requires-Dist: mdit-py-plugins>=0.5.0 ; extra == "rtd" +Requires-Dist: myst-parser ; extra == "rtd" +Requires-Dist: pyyaml ; extra == "rtd" +Requires-Dist: sphinx ; extra == "rtd" +Requires-Dist: sphinx-copybutton ; extra == "rtd" +Requires-Dist: sphinx-design ; extra == "rtd" +Requires-Dist: sphinx-book-theme~=1.0 ; extra == "rtd" +Requires-Dist: jupyter_sphinx ; extra == "rtd" +Requires-Dist: ipykernel ; extra == "rtd" +Requires-Dist: coverage ; extra == "testing" +Requires-Dist: pytest ; extra == "testing" +Requires-Dist: pytest-cov ; extra == "testing" +Requires-Dist: pytest-regressions ; extra == "testing" +Requires-Dist: requests ; extra == "testing" +Project-URL: Documentation, https://markdown-it-py.readthedocs.io +Project-URL: Homepage, https://github.com/executablebooks/markdown-it-py +Provides-Extra: benchmarking +Provides-Extra: compare +Provides-Extra: linkify +Provides-Extra: plugins +Provides-Extra: profiling +Provides-Extra: rtd +Provides-Extra: testing + +# markdown-it-py + +[![Github-CI][github-ci]][github-link] +[![Coverage Status][codecov-badge]][codecov-link] +[![PyPI][pypi-badge]][pypi-link] +[![Conda][conda-badge]][conda-link] +[![PyPI - Downloads][install-badge]][install-link] + +

+ markdown-it-py icon +

+ +> Markdown parser done right. + +- Follows the __[CommonMark spec](http://spec.commonmark.org/)__ for baseline parsing +- Configurable syntax: you can add new rules and even replace existing ones. +- Pluggable: Adds syntax extensions to extend the parser (see the [plugin list][md-plugins]). +- High speed (see our [benchmarking tests][md-performance]) +- Easy to configure for [security][md-security] +- Member of [Google's Assured Open Source Software](https://cloud.google.com/assured-open-source-software/docs/supported-packages) + +This is a Python port of [markdown-it], and some of its associated plugins. +For more details see: . + +For details on [markdown-it] itself, see: + +- The __[Live demo](https://markdown-it.github.io)__ +- [The markdown-it README][markdown-it-readme] + +**See also:** [markdown-it-pyrs](https://github.com/chrisjsewell/markdown-it-pyrs) for an experimental Rust binding, +for even more speed! + +## Installation + +### PIP + +```bash +pip install markdown-it-py[plugins] +``` + +or with extras + +```bash +pip install markdown-it-py[linkify,plugins] +``` + +### Conda + +```bash +conda install -c conda-forge markdown-it-py +``` + +or with extras + +```bash +conda install -c conda-forge markdown-it-py linkify-it-py mdit-py-plugins +``` + +## Usage + +### Python API Usage + +Render markdown to HTML with markdown-it-py and a custom configuration +with and without plugins and features: + +```python +from markdown_it import MarkdownIt +from mdit_py_plugins.front_matter import front_matter_plugin +from mdit_py_plugins.footnote import footnote_plugin + +md = ( + MarkdownIt('commonmark', {'breaks':True,'html':True}) + .use(front_matter_plugin) + .use(footnote_plugin) + .enable('table') +) +text = (""" +--- +a: 1 +--- + +a | b +- | - +1 | 2 + +A footnote [^1] + +[^1]: some details +""") +tokens = md.parse(text) +html_text = md.render(text) + +## To export the html to a file, uncomment the lines below: +# from pathlib import Path +# Path("output.html").write_text(html_text) +``` + +### Command-line Usage + +Render markdown to HTML with markdown-it-py from the +command-line: + +```console +usage: markdown-it [-h] [-v] [filenames [filenames ...]] + +Parse one or more markdown files, convert each to HTML, and print to stdout + +positional arguments: + filenames specify an optional list of files to convert + +optional arguments: + -h, --help show this help message and exit + -v, --version show program's version number and exit + +Interactive: + + $ markdown-it + markdown-it-py [version 0.0.0] (interactive) + Type Ctrl-D to complete input, or Ctrl-C to exit. + >>> # Example + ... > markdown *input* + ... +

Example

+
+

markdown input

+
+ +Batch: + + $ markdown-it README.md README.footer.md > index.html + +``` + +## References / Thanks + +Big thanks to the authors of [markdown-it]: + +- Alex Kocharin [github/rlidwka](https://github.com/rlidwka) +- Vitaly Puzrin [github/puzrin](https://github.com/puzrin) + +Also [John MacFarlane](https://github.com/jgm) for his work on the CommonMark spec and reference implementations. + +[github-ci]: https://github.com/executablebooks/markdown-it-py/actions/workflows/tests.yml/badge.svg?branch=master +[github-link]: https://github.com/executablebooks/markdown-it-py +[pypi-badge]: https://img.shields.io/pypi/v/markdown-it-py.svg +[pypi-link]: https://pypi.org/project/markdown-it-py +[conda-badge]: https://anaconda.org/conda-forge/markdown-it-py/badges/version.svg +[conda-link]: https://anaconda.org/conda-forge/markdown-it-py +[codecov-badge]: https://codecov.io/gh/executablebooks/markdown-it-py/branch/master/graph/badge.svg +[codecov-link]: https://codecov.io/gh/executablebooks/markdown-it-py +[install-badge]: https://img.shields.io/pypi/dw/markdown-it-py?label=pypi%20installs +[install-link]: https://pypistats.org/packages/markdown-it-py + +[CommonMark spec]: http://spec.commonmark.org/ +[markdown-it]: https://github.com/markdown-it/markdown-it +[markdown-it-readme]: https://github.com/markdown-it/markdown-it/blob/master/README.md +[md-security]: https://markdown-it-py.readthedocs.io/en/latest/security.html +[md-performance]: https://markdown-it-py.readthedocs.io/en/latest/performance.html +[md-plugins]: https://markdown-it-py.readthedocs.io/en/latest/plugins.html + diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/METADATA b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..8a2f639061cc4a203f7109d8335d28076442c61d --- /dev/null +++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/METADATA @@ -0,0 +1,202 @@ +Metadata-Version: 2.4 +Name: h11 +Version: 0.16.0 +Summary: A pure-Python, bring-your-own-I/O implementation of HTTP/1.1 +Home-page: https://github.com/python-hyper/h11 +Author: Nathaniel J. Smith +Author-email: njs@pobox.com +License: MIT +Classifier: Development Status :: 3 - Alpha +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: System :: Networking +Requires-Python: >=3.8 +License-File: LICENSE.txt +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: home-page +Dynamic: license +Dynamic: license-file +Dynamic: requires-python +Dynamic: summary + +h11 +=== + +.. image:: https://travis-ci.org/python-hyper/h11.svg?branch=master + :target: https://travis-ci.org/python-hyper/h11 + :alt: Automated test status + +.. image:: https://codecov.io/gh/python-hyper/h11/branch/master/graph/badge.svg + :target: https://codecov.io/gh/python-hyper/h11 + :alt: Test coverage + +.. image:: https://readthedocs.org/projects/h11/badge/?version=latest + :target: http://h11.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +This is a little HTTP/1.1 library written from scratch in Python, +heavily inspired by `hyper-h2 `_. + +It's a "bring-your-own-I/O" library; h11 contains no IO code +whatsoever. This means you can hook h11 up to your favorite network +API, and that could be anything you want: synchronous, threaded, +asynchronous, or your own implementation of `RFC 6214 +`_ -- h11 won't judge you. +(Compare this to the current state of the art, where every time a `new +network API `_ comes along then someone +gets to start over reimplementing the entire HTTP protocol from +scratch.) Cory Benfield made an `excellent blog post describing the +benefits of this approach +`_, or if you like video +then here's his `PyCon 2016 talk on the same theme +`_. + +This also means that h11 is not immediately useful out of the box: +it's a toolkit for building programs that speak HTTP, not something +that could directly replace ``requests`` or ``twisted.web`` or +whatever. But h11 makes it much easier to implement something like +``requests`` or ``twisted.web``. + +At a high level, working with h11 goes like this: + +1) First, create an ``h11.Connection`` object to track the state of a + single HTTP/1.1 connection. + +2) When you read data off the network, pass it to + ``conn.receive_data(...)``; you'll get back a list of objects + representing high-level HTTP "events". + +3) When you want to send a high-level HTTP event, create the + corresponding "event" object and pass it to ``conn.send(...)``; + this will give you back some bytes that you can then push out + through the network. + +For example, a client might instantiate and then send a +``h11.Request`` object, then zero or more ``h11.Data`` objects for the +request body (e.g., if this is a POST), and then a +``h11.EndOfMessage`` to indicate the end of the message. Then the +server would then send back a ``h11.Response``, some ``h11.Data``, and +its own ``h11.EndOfMessage``. If either side violates the protocol, +you'll get a ``h11.ProtocolError`` exception. + +h11 is suitable for implementing both servers and clients, and has a +pleasantly symmetric API: the events you send as a client are exactly +the ones that you receive as a server and vice-versa. + +`Here's an example of a tiny HTTP client +`_ + +It also has `a fine manual `_. + +FAQ +--- + +*Whyyyyy?* + +I wanted to play with HTTP in `Curio +`__ and `Trio +`__, which at the time didn't have any +HTTP libraries. So I thought, no big deal, Python has, like, a dozen +different implementations of HTTP, surely I can find one that's +reusable. I didn't find one, but I did find Cory's call-to-arms +blog-post. So I figured, well, fine, if I have to implement HTTP from +scratch, at least I can make sure no-one *else* has to ever again. + +*Should I use it?* + +Maybe. You should be aware that it's a very young project. But, it's +feature complete and has an exhaustive test-suite and complete docs, +so the next step is for people to try using it and see how it goes +:-). If you do then please let us know -- if nothing else we'll want +to talk to you before making any incompatible changes! + +*What are the features/limitations?* + +Roughly speaking, it's trying to be a robust, complete, and non-hacky +implementation of the first "chapter" of the HTTP/1.1 spec: `RFC 7230: +HTTP/1.1 Message Syntax and Routing +`_. That is, it mostly focuses on +implementing HTTP at the level of taking bytes on and off the wire, +and the headers related to that, and tries to be anal about spec +conformance. It doesn't know about higher-level concerns like URL +routing, conditional GETs, cross-origin cookie policies, or content +negotiation. But it does know how to take care of framing, +cross-version differences in keep-alive handling, and the "obsolete +line folding" rule, so you can focus your energies on the hard / +interesting parts for your application, and it tries to support the +full specification in the sense that any useful HTTP/1.1 conformant +application should be able to use h11. + +It's pure Python, and has no dependencies outside of the standard +library. + +It has a test suite with 100.0% coverage for both statements and +branches. + +Currently it supports Python 3 (testing on 3.8-3.12) and PyPy 3. +The last Python 2-compatible version was h11 0.11.x. +(Originally it had a Cython wrapper for `http-parser +`_ and a beautiful nested state +machine implemented with ``yield from`` to postprocess the output. But +I had to take these out -- the new *parser* needs fewer lines-of-code +than the old *parser wrapper*, is written in pure Python, uses no +exotic language syntax, and has more features. It's sad, really; that +old state machine was really slick. I just need a few sentences here +to mourn that.) + +I don't know how fast it is. I haven't benchmarked or profiled it yet, +so it's probably got a few pointless hot spots, and I've been trying +to err on the side of simplicity and robustness instead of +micro-optimization. But at the architectural level I tried hard to +avoid fundamentally bad decisions, e.g., I believe that all the +parsing algorithms remain linear-time even in the face of pathological +input like slowloris, and there are no byte-by-byte loops. (I also +believe that it maintains bounded memory usage in the face of +arbitrary/pathological input.) + +The whole library is ~800 lines-of-code. You can read and understand +the whole thing in less than an hour. Most of the energy invested in +this so far has been spent on trying to keep things simple by +minimizing special-cases and ad hoc state manipulation; even though it +is now quite small and simple, I'm still annoyed that I haven't +figured out how to make it even smaller and simpler. (Unfortunately, +HTTP does not lend itself to simplicity.) + +The API is ~feature complete and I don't expect the general outlines +to change much, but you can't judge an API's ergonomics until you +actually document and use it, so I'd expect some changes in the +details. + +*How do I try it?* + +.. code-block:: sh + + $ pip install h11 + $ git clone git@github.com:python-hyper/h11 + $ cd h11/examples + $ python basic-client.py + +and go from there. + +*License?* + +MIT + +*Code of conduct?* + +Contributors are requested to follow our `code of conduct +`_ in +all project spaces. diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/RECORD b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..0ec9e3ef9debc2fafb8dee3b566802de58a6df83 --- /dev/null +++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/RECORD @@ -0,0 +1,17 @@ +h11/__init__.py,sha256=iO1KzkSO42yZ6ffg-VMgbx_ZVTWGUY00nRYEWn-s3kY,1507 +h11/_abnf.py,sha256=ybixr0xsupnkA6GFAyMubuXF6Tc1lb_hF890NgCsfNc,4815 +h11/_connection.py,sha256=k9YRVf6koZqbttBW36xSWaJpWdZwa-xQVU9AHEo9DuI,26863 +h11/_events.py,sha256=I97aXoal1Wu7dkL548BANBUCkOIbe-x5CioYA9IBY14,11792 +h11/_headers.py,sha256=P7D-lBNxHwdLZPLimmYwrPG-9ZkjElvvJZJdZAgSP-4,10412 +h11/_readers.py,sha256=a4RypORUCC3d0q_kxPuBIM7jTD8iLt5X91TH0FsduN4,8590 +h11/_receivebuffer.py,sha256=xrspsdsNgWFxRfQcTXxR8RrdjRXXTK0Io5cQYWpJ1Ws,5252 +h11/_state.py,sha256=_5LG_BGR8FCcFQeBPH-TMHgm_-B-EUcWCnQof_9XjFE,13231 +h11/_util.py,sha256=LWkkjXyJaFlAy6Lt39w73UStklFT5ovcvo0TkY7RYuk,4888 +h11/_version.py,sha256=GVSsbPSPDcOuF6ptfIiXnVJoaEm3ygXbMnqlr_Giahw,686 +h11/_writers.py,sha256=oFKm6PtjeHfbj4RLX7VB7KDc1gIY53gXG3_HR9ltmTA,5081 +h11/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7 +h11-0.16.0.dist-info/licenses/LICENSE.txt,sha256=N9tbuFkm2yikJ6JYZ_ELEjIAOuob5pzLhRE4rbjm82E,1124 +h11-0.16.0.dist-info/METADATA,sha256=KPMmCYrAn8unm48YD5YIfIQf4kViFct7hyqcfVzRnWQ,8348 +h11-0.16.0.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91 +h11-0.16.0.dist-info/top_level.txt,sha256=F7dC4jl3zeh8TGHEPaWJrMbeuoWbS379Gwdi-Yvdcis,4 +h11-0.16.0.dist-info/RECORD,, diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/WHEEL b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..1eb3c49d99559863120cfb8433fc8738fba43ba9 --- /dev/null +++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (78.1.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/licenses/LICENSE.txt b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/licenses/LICENSE.txt new file mode 100644 index 0000000000000000000000000000000000000000..8f080eae848f759c9173bfc0c79506357ebe5090 --- /dev/null +++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/licenses/LICENSE.txt @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2016 Nathaniel J. Smith and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/top_level.txt b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..0d24def711344ec6f4da2108f7d5c9261eb35f8b --- /dev/null +++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11-0.16.0.dist-info/top_level.txt @@ -0,0 +1 @@ +h11 diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/__init__.py b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..989e92c3458681a6f0be72ae4105ea742750d328 --- /dev/null +++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/__init__.py @@ -0,0 +1,62 @@ +# A highish-level implementation of the HTTP/1.1 wire protocol (RFC 7230), +# containing no networking code at all, loosely modelled on hyper-h2's generic +# implementation of HTTP/2 (and in particular the h2.connection.H2Connection +# class). There's still a bunch of subtle details you need to get right if you +# want to make this actually useful, because it doesn't implement all the +# semantics to check that what you're asking to write to the wire is sensible, +# but at least it gets you out of dealing with the wire itself. + +from h11._connection import Connection, NEED_DATA, PAUSED +from h11._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from h11._state import ( + CLIENT, + CLOSED, + DONE, + ERROR, + IDLE, + MIGHT_SWITCH_PROTOCOL, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, + SWITCHED_PROTOCOL, +) +from h11._util import LocalProtocolError, ProtocolError, RemoteProtocolError +from h11._version import __version__ + +PRODUCT_ID = "python-h11/" + __version__ + + +__all__ = ( + "Connection", + "NEED_DATA", + "PAUSED", + "ConnectionClosed", + "Data", + "EndOfMessage", + "Event", + "InformationalResponse", + "Request", + "Response", + "CLIENT", + "CLOSED", + "DONE", + "ERROR", + "IDLE", + "MUST_CLOSE", + "SEND_BODY", + "SEND_RESPONSE", + "SERVER", + "SWITCHED_PROTOCOL", + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", +) diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_abnf.py b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_abnf.py new file mode 100644 index 0000000000000000000000000000000000000000..933587fba22290d7eb7df4c88e12f1e61702b8ce --- /dev/null +++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_abnf.py @@ -0,0 +1,132 @@ +# We use native strings for all the re patterns, to take advantage of string +# formatting, and then convert to bytestrings when compiling the final re +# objects. + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#whitespace +# OWS = *( SP / HTAB ) +# ; optional whitespace +OWS = r"[ \t]*" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.token.separators +# token = 1*tchar +# +# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" +# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" +# / DIGIT / ALPHA +# ; any VCHAR, except delimiters +token = r"[-!#$%&'*+.^_`|~0-9a-zA-Z]+" + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#header.fields +# field-name = token +field_name = token + +# The standard says: +# +# field-value = *( field-content / obs-fold ) +# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] +# field-vchar = VCHAR / obs-text +# obs-fold = CRLF 1*( SP / HTAB ) +# ; obsolete line folding +# ; see Section 3.2.4 +# +# https://tools.ietf.org/html/rfc5234#appendix-B.1 +# +# VCHAR = %x21-7E +# ; visible (printing) characters +# +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.quoted-string +# obs-text = %x80-FF +# +# However, the standard definition of field-content is WRONG! It disallows +# fields containing a single visible character surrounded by whitespace, +# e.g. "foo a bar". +# +# See: https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189 +# +# So our definition of field_content attempts to fix it up... +# +# Also, we allow lots of control characters, because apparently people assume +# that they're legal in practice (e.g., google analytics makes cookies with +# \x01 in them!): +# https://github.com/python-hyper/h11/issues/57 +# We still don't allow NUL or whitespace, because those are often treated as +# meta-characters and letting them through can lead to nasty issues like SSRF. +vchar = r"[\x21-\x7e]" +vchar_or_obs_text = r"[^\x00\s]" +field_vchar = vchar_or_obs_text +field_content = r"{field_vchar}+(?:[ \t]+{field_vchar}+)*".format(**globals()) + +# We handle obs-fold at a different level, and our fixed-up field_content +# already grows to swallow the whole value, so ? instead of * +field_value = r"({field_content})?".format(**globals()) + +# header-field = field-name ":" OWS field-value OWS +header_field = ( + r"(?P{field_name})" + r":" + r"{OWS}" + r"(?P{field_value})" + r"{OWS}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#request.line +# +# request-line = method SP request-target SP HTTP-version CRLF +# method = token +# HTTP-version = HTTP-name "/" DIGIT "." DIGIT +# HTTP-name = %x48.54.54.50 ; "HTTP", case-sensitive +# +# request-target is complicated (see RFC 7230 sec 5.3) -- could be path, full +# URL, host+port (for connect), or even "*", but in any case we are guaranteed +# that it contists of the visible printing characters. +method = token +request_target = r"{vchar}+".format(**globals()) +http_version = r"HTTP/(?P[0-9]\.[0-9])" +request_line = ( + r"(?P{method})" + r" " + r"(?P{request_target})" + r" " + r"{http_version}".format(**globals()) +) + +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#status.line +# +# status-line = HTTP-version SP status-code SP reason-phrase CRLF +# status-code = 3DIGIT +# reason-phrase = *( HTAB / SP / VCHAR / obs-text ) +status_code = r"[0-9]{3}" +reason_phrase = r"([ \t]|{vchar_or_obs_text})*".format(**globals()) +status_line = ( + r"{http_version}" + r" " + r"(?P{status_code})" + # However, there are apparently a few too many servers out there that just + # leave out the reason phrase: + # https://github.com/scrapy/scrapy/issues/345#issuecomment-281756036 + # https://github.com/seanmonstar/httparse/issues/29 + # so make it optional. ?: is a non-capturing group. + r"(?: (?P{reason_phrase}))?".format(**globals()) +) + +HEXDIG = r"[0-9A-Fa-f]" +# Actually +# +# chunk-size = 1*HEXDIG +# +# but we impose an upper-limit to avoid ridiculosity. len(str(2**64)) == 20 +chunk_size = r"({HEXDIG}){{1,20}}".format(**globals()) +# Actually +# +# chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] ) +# +# but we aren't parsing the things so we don't really care. +chunk_ext = r";.*" +chunk_header = ( + r"(?P{chunk_size})" + r"(?P{chunk_ext})?" + r"{OWS}\r\n".format( + **globals() + ) # Even though the specification does not allow for extra whitespaces, + # we are lenient with trailing whitespaces because some servers on the wild use it. +) diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_connection.py b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_connection.py new file mode 100644 index 0000000000000000000000000000000000000000..e37d82a82a882c072cb938a90eb4486b51cdad99 --- /dev/null +++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_connection.py @@ -0,0 +1,659 @@ +# This contains the main Connection class. Everything in h11 revolves around +# this. +from typing import ( + Any, + Callable, + cast, + Dict, + List, + Optional, + overload, + Tuple, + Type, + Union, +) + +from ._events import ( + ConnectionClosed, + Data, + EndOfMessage, + Event, + InformationalResponse, + Request, + Response, +) +from ._headers import get_comma_header, has_expect_100_continue, set_comma_header +from ._readers import READERS, ReadersType +from ._receivebuffer import ReceiveBuffer +from ._state import ( + _SWITCH_CONNECT, + _SWITCH_UPGRADE, + CLIENT, + ConnectionState, + DONE, + ERROR, + MIGHT_SWITCH_PROTOCOL, + SEND_BODY, + SERVER, + SWITCHED_PROTOCOL, +) +from ._util import ( # Import the internal things we need + LocalProtocolError, + RemoteProtocolError, + Sentinel, +) +from ._writers import WRITERS, WritersType + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = ["Connection", "NEED_DATA", "PAUSED"] + + +class NEED_DATA(Sentinel, metaclass=Sentinel): + pass + + +class PAUSED(Sentinel, metaclass=Sentinel): + pass + + +# If we ever have this much buffered without it making a complete parseable +# event, we error out. The only time we really buffer is when reading the +# request/response line + headers together, so this is effectively the limit on +# the size of that. +# +# Some precedents for defaults: +# - node.js: 80 * 1024 +# - tomcat: 8 * 1024 +# - IIS: 16 * 1024 +# - Apache: <8 KiB per line> +DEFAULT_MAX_INCOMPLETE_EVENT_SIZE = 16 * 1024 + + +# RFC 7230's rules for connection lifecycles: +# - If either side says they want to close the connection, then the connection +# must close. +# - HTTP/1.1 defaults to keep-alive unless someone says Connection: close +# - HTTP/1.0 defaults to close unless both sides say Connection: keep-alive +# (and even this is a mess -- e.g. if you're implementing a proxy then +# sending Connection: keep-alive is forbidden). +# +# We simplify life by simply not supporting keep-alive with HTTP/1.0 peers. So +# our rule is: +# - If someone says Connection: close, we will close +# - If someone uses HTTP/1.0, we will close. +def _keep_alive(event: Union[Request, Response]) -> bool: + connection = get_comma_header(event.headers, b"connection") + if b"close" in connection: + return False + if getattr(event, "http_version", b"1.1") < b"1.1": + return False + return True + + +def _body_framing( + request_method: bytes, event: Union[Request, Response] +) -> Tuple[str, Union[Tuple[()], Tuple[int]]]: + # Called when we enter SEND_BODY to figure out framing information for + # this body. + # + # These are the only two events that can trigger a SEND_BODY state: + assert type(event) in (Request, Response) + # Returns one of: + # + # ("content-length", count) + # ("chunked", ()) + # ("http/1.0", ()) + # + # which are (lookup key, *args) for constructing body reader/writer + # objects. + # + # Reference: https://tools.ietf.org/html/rfc7230#section-3.3.3 + # + # Step 1: some responses always have an empty body, regardless of what the + # headers say. + if type(event) is Response: + if ( + event.status_code in (204, 304) + or request_method == b"HEAD" + or (request_method == b"CONNECT" and 200 <= event.status_code < 300) + ): + return ("content-length", (0,)) + # Section 3.3.3 also lists another case -- responses with status_code + # < 200. For us these are InformationalResponses, not Responses, so + # they can't get into this function in the first place. + assert event.status_code >= 200 + + # Step 2: check for Transfer-Encoding (T-E beats C-L): + transfer_encodings = get_comma_header(event.headers, b"transfer-encoding") + if transfer_encodings: + assert transfer_encodings == [b"chunked"] + return ("chunked", ()) + + # Step 3: check for Content-Length + content_lengths = get_comma_header(event.headers, b"content-length") + if content_lengths: + return ("content-length", (int(content_lengths[0]),)) + + # Step 4: no applicable headers; fallback/default depends on type + if type(event) is Request: + return ("content-length", (0,)) + else: + return ("http/1.0", ()) + + +################################################################ +# +# The main Connection class +# +################################################################ + + +class Connection: + """An object encapsulating the state of an HTTP connection. + + Args: + our_role: If you're implementing a client, pass :data:`h11.CLIENT`. If + you're implementing a server, pass :data:`h11.SERVER`. + + max_incomplete_event_size (int): + The maximum number of bytes we're willing to buffer of an + incomplete event. In practice this mostly sets a limit on the + maximum size of the request/response line + headers. If this is + exceeded, then :meth:`next_event` will raise + :exc:`RemoteProtocolError`. + + """ + + def __init__( + self, + our_role: Type[Sentinel], + max_incomplete_event_size: int = DEFAULT_MAX_INCOMPLETE_EVENT_SIZE, + ) -> None: + self._max_incomplete_event_size = max_incomplete_event_size + # State and role tracking + if our_role not in (CLIENT, SERVER): + raise ValueError(f"expected CLIENT or SERVER, not {our_role!r}") + self.our_role = our_role + self.their_role: Type[Sentinel] + if our_role is CLIENT: + self.their_role = SERVER + else: + self.their_role = CLIENT + self._cstate = ConnectionState() + + # Callables for converting data->events or vice-versa given the + # current state + self._writer = self._get_io_object(self.our_role, None, WRITERS) + self._reader = self._get_io_object(self.their_role, None, READERS) + + # Holds any unprocessed received data + self._receive_buffer = ReceiveBuffer() + # If this is true, then it indicates that the incoming connection was + # closed *after* the end of whatever's in self._receive_buffer: + self._receive_buffer_closed = False + + # Extra bits of state that don't fit into the state machine. + # + # These two are only used to interpret framing headers for figuring + # out how to read/write response bodies. their_http_version is also + # made available as a convenient public API. + self.their_http_version: Optional[bytes] = None + self._request_method: Optional[bytes] = None + # This is pure flow-control and doesn't at all affect the set of legal + # transitions, so no need to bother ConnectionState with it: + self.client_is_waiting_for_100_continue = False + + @property + def states(self) -> Dict[Type[Sentinel], Type[Sentinel]]: + """A dictionary like:: + + {CLIENT: , SERVER: } + + See :ref:`state-machine` for details. + + """ + return dict(self._cstate.states) + + @property + def our_state(self) -> Type[Sentinel]: + """The current state of whichever role we are playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.our_role] + + @property + def their_state(self) -> Type[Sentinel]: + """The current state of whichever role we are NOT playing. See + :ref:`state-machine` for details. + """ + return self._cstate.states[self.their_role] + + @property + def they_are_waiting_for_100_continue(self) -> bool: + return self.their_role is CLIENT and self.client_is_waiting_for_100_continue + + def start_next_cycle(self) -> None: + """Attempt to reset our connection state for a new request/response + cycle. + + If both client and server are in :data:`DONE` state, then resets them + both to :data:`IDLE` state in preparation for a new request/response + cycle on this same connection. Otherwise, raises a + :exc:`LocalProtocolError`. + + See :ref:`keepalive-and-pipelining`. + + """ + old_states = dict(self._cstate.states) + self._cstate.start_next_cycle() + self._request_method = None + # self.their_http_version gets left alone, since it presumably lasts + # beyond a single request/response cycle + assert not self.client_is_waiting_for_100_continue + self._respond_to_state_changes(old_states) + + def _process_error(self, role: Type[Sentinel]) -> None: + old_states = dict(self._cstate.states) + self._cstate.process_error(role) + self._respond_to_state_changes(old_states) + + def _server_switch_event(self, event: Event) -> Optional[Type[Sentinel]]: + if type(event) is InformationalResponse and event.status_code == 101: + return _SWITCH_UPGRADE + if type(event) is Response: + if ( + _SWITCH_CONNECT in self._cstate.pending_switch_proposals + and 200 <= event.status_code < 300 + ): + return _SWITCH_CONNECT + return None + + # All events go through here + def _process_event(self, role: Type[Sentinel], event: Event) -> None: + # First, pass the event through the state machine to make sure it + # succeeds. + old_states = dict(self._cstate.states) + if role is CLIENT and type(event) is Request: + if event.method == b"CONNECT": + self._cstate.process_client_switch_proposal(_SWITCH_CONNECT) + if get_comma_header(event.headers, b"upgrade"): + self._cstate.process_client_switch_proposal(_SWITCH_UPGRADE) + server_switch_event = None + if role is SERVER: + server_switch_event = self._server_switch_event(event) + self._cstate.process_event(role, type(event), server_switch_event) + + # Then perform the updates triggered by it. + + if type(event) is Request: + self._request_method = event.method + + if role is self.their_role and type(event) in ( + Request, + Response, + InformationalResponse, + ): + event = cast(Union[Request, Response, InformationalResponse], event) + self.their_http_version = event.http_version + + # Keep alive handling + # + # RFC 7230 doesn't really say what one should do if Connection: close + # shows up on a 1xx InformationalResponse. I think the idea is that + # this is not supposed to happen. In any case, if it does happen, we + # ignore it. + if type(event) in (Request, Response) and not _keep_alive( + cast(Union[Request, Response], event) + ): + self._cstate.process_keep_alive_disabled() + + # 100-continue + if type(event) is Request and has_expect_100_continue(event): + self.client_is_waiting_for_100_continue = True + if type(event) in (InformationalResponse, Response): + self.client_is_waiting_for_100_continue = False + if role is CLIENT and type(event) in (Data, EndOfMessage): + self.client_is_waiting_for_100_continue = False + + self._respond_to_state_changes(old_states, event) + + def _get_io_object( + self, + role: Type[Sentinel], + event: Optional[Event], + io_dict: Union[ReadersType, WritersType], + ) -> Optional[Callable[..., Any]]: + # event may be None; it's only used when entering SEND_BODY + state = self._cstate.states[role] + if state is SEND_BODY: + # Special case: the io_dict has a dict of reader/writer factories + # that depend on the request/response framing. + framing_type, args = _body_framing( + cast(bytes, self._request_method), cast(Union[Request, Response], event) + ) + return io_dict[SEND_BODY][framing_type](*args) # type: ignore[index] + else: + # General case: the io_dict just has the appropriate reader/writer + # for this state + return io_dict.get((role, state)) # type: ignore[return-value] + + # This must be called after any action that might have caused + # self._cstate.states to change. + def _respond_to_state_changes( + self, + old_states: Dict[Type[Sentinel], Type[Sentinel]], + event: Optional[Event] = None, + ) -> None: + # Update reader/writer + if self.our_state != old_states[self.our_role]: + self._writer = self._get_io_object(self.our_role, event, WRITERS) + if self.their_state != old_states[self.their_role]: + self._reader = self._get_io_object(self.their_role, event, READERS) + + @property + def trailing_data(self) -> Tuple[bytes, bool]: + """Data that has been received, but not yet processed, represented as + a tuple with two elements, where the first is a byte-string containing + the unprocessed data itself, and the second is a bool that is True if + the receive connection was closed. + + See :ref:`switching-protocols` for discussion of why you'd want this. + """ + return (bytes(self._receive_buffer), self._receive_buffer_closed) + + def receive_data(self, data: bytes) -> None: + """Add data to our internal receive buffer. + + This does not actually do any processing on the data, just stores + it. To trigger processing, you have to call :meth:`next_event`. + + Args: + data (:term:`bytes-like object`): + The new data that was just received. + + Special case: If *data* is an empty byte-string like ``b""``, + then this indicates that the remote side has closed the + connection (end of file). Normally this is convenient, because + standard Python APIs like :meth:`file.read` or + :meth:`socket.recv` use ``b""`` to indicate end-of-file, while + other failures to read are indicated using other mechanisms + like raising :exc:`TimeoutError`. When using such an API you + can just blindly pass through whatever you get from ``read`` + to :meth:`receive_data`, and everything will work. + + But, if you have an API where reading an empty string is a + valid non-EOF condition, then you need to be aware of this and + make sure to check for such strings and avoid passing them to + :meth:`receive_data`. + + Returns: + Nothing, but after calling this you should call :meth:`next_event` + to parse the newly received data. + + Raises: + RuntimeError: + Raised if you pass an empty *data*, indicating EOF, and then + pass a non-empty *data*, indicating more data that somehow + arrived after the EOF. + + (Calling ``receive_data(b"")`` multiple times is fine, + and equivalent to calling it once.) + + """ + if data: + if self._receive_buffer_closed: + raise RuntimeError("received close, then received more data?") + self._receive_buffer += data + else: + self._receive_buffer_closed = True + + def _extract_next_receive_event( + self, + ) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]: + state = self.their_state + # We don't pause immediately when they enter DONE, because even in + # DONE state we can still process a ConnectionClosed() event. But + # if we have data in our buffer, then we definitely aren't getting + # a ConnectionClosed() immediately and we need to pause. + if state is DONE and self._receive_buffer: + return PAUSED + if state is MIGHT_SWITCH_PROTOCOL or state is SWITCHED_PROTOCOL: + return PAUSED + assert self._reader is not None + event = self._reader(self._receive_buffer) + if event is None: + if not self._receive_buffer and self._receive_buffer_closed: + # In some unusual cases (basically just HTTP/1.0 bodies), EOF + # triggers an actual protocol event; in that case, we want to + # return that event, and then the state will change and we'll + # get called again to generate the actual ConnectionClosed(). + if hasattr(self._reader, "read_eof"): + event = self._reader.read_eof() + else: + event = ConnectionClosed() + if event is None: + event = NEED_DATA + return event # type: ignore[no-any-return] + + def next_event(self) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]: + """Parse the next event out of our receive buffer, update our internal + state, and return it. + + This is a mutating operation -- think of it like calling :func:`next` + on an iterator. + + Returns: + : One of three things: + + 1) An event object -- see :ref:`events`. + + 2) The special constant :data:`NEED_DATA`, which indicates that + you need to read more data from your socket and pass it to + :meth:`receive_data` before this method will be able to return + any more events. + + 3) The special constant :data:`PAUSED`, which indicates that we + are not in a state where we can process incoming data (usually + because the peer has finished their part of the current + request/response cycle, and you have not yet called + :meth:`start_next_cycle`). See :ref:`flow-control` for details. + + Raises: + RemoteProtocolError: + The peer has misbehaved. You should close the connection + (possibly after sending some kind of 4xx response). + + Once this method returns :class:`ConnectionClosed` once, then all + subsequent calls will also return :class:`ConnectionClosed`. + + If this method raises any exception besides :exc:`RemoteProtocolError` + then that's a bug -- if it happens please file a bug report! + + If this method raises any exception then it also sets + :attr:`Connection.their_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + + if self.their_state is ERROR: + raise RemoteProtocolError("Can't receive data when peer state is ERROR") + try: + event = self._extract_next_receive_event() + if event not in [NEED_DATA, PAUSED]: + self._process_event(self.their_role, cast(Event, event)) + if event is NEED_DATA: + if len(self._receive_buffer) > self._max_incomplete_event_size: + # 431 is "Request header fields too large" which is pretty + # much the only situation where we can get here + raise RemoteProtocolError( + "Receive buffer too long", error_status_hint=431 + ) + if self._receive_buffer_closed: + # We're still trying to complete some event, but that's + # never going to happen because no more data is coming + raise RemoteProtocolError("peer unexpectedly closed connection") + return event + except BaseException as exc: + self._process_error(self.their_role) + if isinstance(exc, LocalProtocolError): + exc._reraise_as_remote_protocol_error() + else: + raise + + @overload + def send(self, event: ConnectionClosed) -> None: + ... + + @overload + def send( + self, event: Union[Request, InformationalResponse, Response, Data, EndOfMessage] + ) -> bytes: + ... + + @overload + def send(self, event: Event) -> Optional[bytes]: + ... + + def send(self, event: Event) -> Optional[bytes]: + """Convert a high-level event into bytes that can be sent to the peer, + while updating our internal state machine. + + Args: + event: The :ref:`event ` to send. + + Returns: + If ``type(event) is ConnectionClosed``, then returns + ``None``. Otherwise, returns a :term:`bytes-like object`. + + Raises: + LocalProtocolError: + Sending this event at this time would violate our + understanding of the HTTP/1.1 protocol. + + If this method raises any exception then it also sets + :attr:`Connection.our_state` to :data:`ERROR` -- see + :ref:`error-handling` for discussion. + + """ + data_list = self.send_with_data_passthrough(event) + if data_list is None: + return None + else: + return b"".join(data_list) + + def send_with_data_passthrough(self, event: Event) -> Optional[List[bytes]]: + """Identical to :meth:`send`, except that in situations where + :meth:`send` returns a single :term:`bytes-like object`, this instead + returns a list of them -- and when sending a :class:`Data` event, this + list is guaranteed to contain the exact object you passed in as + :attr:`Data.data`. See :ref:`sendfile` for discussion. + + """ + if self.our_state is ERROR: + raise LocalProtocolError("Can't send data when our state is ERROR") + try: + if type(event) is Response: + event = self._clean_up_response_headers_for_sending(event) + # We want to call _process_event before calling the writer, + # because if someone tries to do something invalid then this will + # give a sensible error message, while our writers all just assume + # they will only receive valid events. But, _process_event might + # change self._writer. So we have to do a little dance: + writer = self._writer + self._process_event(self.our_role, event) + if type(event) is ConnectionClosed: + return None + else: + # In any situation where writer is None, process_event should + # have raised ProtocolError + assert writer is not None + data_list: List[bytes] = [] + writer(event, data_list.append) + return data_list + except: + self._process_error(self.our_role) + raise + + def send_failed(self) -> None: + """Notify the state machine that we failed to send the data it gave + us. + + This causes :attr:`Connection.our_state` to immediately become + :data:`ERROR` -- see :ref:`error-handling` for discussion. + + """ + self._process_error(self.our_role) + + # When sending a Response, we take responsibility for a few things: + # + # - Sometimes you MUST set Connection: close. We take care of those + # times. (You can also set it yourself if you want, and if you do then + # we'll respect that and close the connection at the right time. But you + # don't have to worry about that unless you want to.) + # + # - The user has to set Content-Length if they want it. Otherwise, for + # responses that have bodies (e.g. not HEAD), then we will automatically + # select the right mechanism for streaming a body of unknown length, + # which depends on depending on the peer's HTTP version. + # + # This function's *only* responsibility is making sure headers are set up + # right -- everything downstream just looks at the headers. There are no + # side channels. + def _clean_up_response_headers_for_sending(self, response: Response) -> Response: + assert type(response) is Response + + headers = response.headers + need_close = False + + # HEAD requests need some special handling: they always act like they + # have Content-Length: 0, and that's how _body_framing treats + # them. But their headers are supposed to match what we would send if + # the request was a GET. (Technically there is one deviation allowed: + # we're allowed to leave out the framing headers -- see + # https://tools.ietf.org/html/rfc7231#section-4.3.2 . But it's just as + # easy to get them right.) + method_for_choosing_headers = cast(bytes, self._request_method) + if method_for_choosing_headers == b"HEAD": + method_for_choosing_headers = b"GET" + framing_type, _ = _body_framing(method_for_choosing_headers, response) + if framing_type in ("chunked", "http/1.0"): + # This response has a body of unknown length. + # If our peer is HTTP/1.1, we use Transfer-Encoding: chunked + # If our peer is HTTP/1.0, we use no framing headers, and close the + # connection afterwards. + # + # Make sure to clear Content-Length (in principle user could have + # set both and then we ignored Content-Length b/c + # Transfer-Encoding overwrote it -- this would be naughty of them, + # but the HTTP spec says that if our peer does this then we have + # to fix it instead of erroring out, so we'll accord the user the + # same respect). + headers = set_comma_header(headers, b"content-length", []) + if self.their_http_version is None or self.their_http_version < b"1.1": + # Either we never got a valid request and are sending back an + # error (their_http_version is None), so we assume the worst; + # or else we did get a valid HTTP/1.0 request, so we know that + # they don't understand chunked encoding. + headers = set_comma_header(headers, b"transfer-encoding", []) + # This is actually redundant ATM, since currently we + # unconditionally disable keep-alive when talking to HTTP/1.0 + # peers. But let's be defensive just in case we add + # Connection: keep-alive support later: + if self._request_method != b"HEAD": + need_close = True + else: + headers = set_comma_header(headers, b"transfer-encoding", [b"chunked"]) + + if not self._cstate.keep_alive or need_close: + # Make sure Connection: close is set + connection = set(get_comma_header(headers, b"connection")) + connection.discard(b"keep-alive") + connection.add(b"close") + headers = set_comma_header(headers, b"connection", sorted(connection)) + + return Response( + headers=headers, + status_code=response.status_code, + http_version=response.http_version, + reason=response.reason, + ) diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_events.py b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_events.py new file mode 100644 index 0000000000000000000000000000000000000000..ca1c3adbde2c4e7710482a18e3471f91f1da610e --- /dev/null +++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_events.py @@ -0,0 +1,369 @@ +# High level events that make up HTTP/1.1 conversations. Loosely inspired by +# the corresponding events in hyper-h2: +# +# http://python-hyper.org/h2/en/stable/api.html#events +# +# Don't subclass these. Stuff will break. + +import re +from abc import ABC +from dataclasses import dataclass +from typing import List, Tuple, Union + +from ._abnf import method, request_target +from ._headers import Headers, normalize_and_validate +from ._util import bytesify, LocalProtocolError, validate + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "Event", + "Request", + "InformationalResponse", + "Response", + "Data", + "EndOfMessage", + "ConnectionClosed", +] + +method_re = re.compile(method.encode("ascii")) +request_target_re = re.compile(request_target.encode("ascii")) + + +class Event(ABC): + """ + Base class for h11 events. + """ + + __slots__ = () + + +@dataclass(init=False, frozen=True) +class Request(Event): + """The beginning of an HTTP request. + + Fields: + + .. attribute:: method + + An HTTP method, e.g. ``b"GET"`` or ``b"POST"``. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: target + + The target of an HTTP request, e.g. ``b"/index.html"``, or one of the + more exotic formats described in `RFC 7320, section 5.3 + `_. Always a byte + string. :term:`Bytes-like objects ` and native + strings containing only ascii characters will be automatically + converted to byte strings. + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + """ + + __slots__ = ("method", "headers", "target", "http_version") + + method: bytes + headers: Headers + target: bytes + http_version: bytes + + def __init__( + self, + *, + method: Union[bytes, str], + headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], + target: Union[bytes, str], + http_version: Union[bytes, str] = b"1.1", + _parsed: bool = False, + ) -> None: + super().__init__() + if isinstance(headers, Headers): + object.__setattr__(self, "headers", headers) + else: + object.__setattr__( + self, "headers", normalize_and_validate(headers, _parsed=_parsed) + ) + if not _parsed: + object.__setattr__(self, "method", bytesify(method)) + object.__setattr__(self, "target", bytesify(target)) + object.__setattr__(self, "http_version", bytesify(http_version)) + else: + object.__setattr__(self, "method", method) + object.__setattr__(self, "target", target) + object.__setattr__(self, "http_version", http_version) + + # "A server MUST respond with a 400 (Bad Request) status code to any + # HTTP/1.1 request message that lacks a Host header field and to any + # request message that contains more than one Host header field or a + # Host header field with an invalid field-value." + # -- https://tools.ietf.org/html/rfc7230#section-5.4 + host_count = 0 + for name, value in self.headers: + if name == b"host": + host_count += 1 + if self.http_version == b"1.1" and host_count == 0: + raise LocalProtocolError("Missing mandatory Host: header") + if host_count > 1: + raise LocalProtocolError("Found multiple Host: headers") + + validate(method_re, self.method, "Illegal method characters") + validate(request_target_re, self.target, "Illegal target characters") + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class _ResponseBase(Event): + __slots__ = ("headers", "http_version", "reason", "status_code") + + headers: Headers + http_version: bytes + reason: bytes + status_code: int + + def __init__( + self, + *, + headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], + status_code: int, + http_version: Union[bytes, str] = b"1.1", + reason: Union[bytes, str] = b"", + _parsed: bool = False, + ) -> None: + super().__init__() + if isinstance(headers, Headers): + object.__setattr__(self, "headers", headers) + else: + object.__setattr__( + self, "headers", normalize_and_validate(headers, _parsed=_parsed) + ) + if not _parsed: + object.__setattr__(self, "reason", bytesify(reason)) + object.__setattr__(self, "http_version", bytesify(http_version)) + if not isinstance(status_code, int): + raise LocalProtocolError("status code must be integer") + # Because IntEnum objects are instances of int, but aren't + # duck-compatible (sigh), see gh-72. + object.__setattr__(self, "status_code", int(status_code)) + else: + object.__setattr__(self, "reason", reason) + object.__setattr__(self, "http_version", http_version) + object.__setattr__(self, "status_code", status_code) + + self.__post_init__() + + def __post_init__(self) -> None: + pass + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class InformationalResponse(_ResponseBase): + """An HTTP informational response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`InformationalResponse`, this is always in the range [100, + 200). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for + details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def __post_init__(self) -> None: + if not (100 <= self.status_code < 200): + raise LocalProtocolError( + "InformationalResponse status_code should be in range " + "[100, 200), not {}".format(self.status_code) + ) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class Response(_ResponseBase): + """The beginning of an HTTP response. + + Fields: + + .. attribute:: status_code + + The status code of this response, as an integer. For an + :class:`Response`, this is always in the range [200, + 1000). + + .. attribute:: headers + + Request headers, represented as a list of (name, value) pairs. See + :ref:`the header normalization rules ` for details. + + .. attribute:: http_version + + The HTTP protocol version, represented as a byte string like + ``b"1.1"``. See :ref:`the HTTP version normalization rules + ` for details. + + .. attribute:: reason + + The reason phrase of this response, as a byte string. For example: + ``b"OK"``, or ``b"Not Found"``. + + """ + + def __post_init__(self) -> None: + if not (200 <= self.status_code < 1000): + raise LocalProtocolError( + "Response status_code should be in range [200, 1000), not {}".format( + self.status_code + ) + ) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(init=False, frozen=True) +class Data(Event): + """Part of an HTTP message body. + + Fields: + + .. attribute:: data + + A :term:`bytes-like object` containing part of a message body. Or, if + using the ``combine=False`` argument to :meth:`Connection.send`, then + any object that your socket writing code knows what to do with, and for + which calling :func:`len` returns the number of bytes that will be + written -- see :ref:`sendfile` for details. + + .. attribute:: chunk_start + + A marker that indicates whether this data object is from the start of a + chunked transfer encoding chunk. This field is ignored when when a Data + event is provided to :meth:`Connection.send`: it is only valid on + events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + .. attribute:: chunk_end + + A marker that indicates whether this data object is the last for a + given chunked transfer encoding chunk. This field is ignored when when + a Data event is provided to :meth:`Connection.send`: it is only valid + on events emitted from :meth:`Connection.next_event`. You probably + shouldn't use this attribute at all; see + :ref:`chunk-delimiters-are-bad` for details. + + """ + + __slots__ = ("data", "chunk_start", "chunk_end") + + data: bytes + chunk_start: bool + chunk_end: bool + + def __init__( + self, data: bytes, chunk_start: bool = False, chunk_end: bool = False + ) -> None: + object.__setattr__(self, "data", data) + object.__setattr__(self, "chunk_start", chunk_start) + object.__setattr__(self, "chunk_end", chunk_end) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +# XX FIXME: "A recipient MUST ignore (or consider as an error) any fields that +# are forbidden to be sent in a trailer, since processing them as if they were +# present in the header section might bypass external security filters." +# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#chunked.trailer.part +# Unfortunately, the list of forbidden fields is long and vague :-/ +@dataclass(init=False, frozen=True) +class EndOfMessage(Event): + """The end of an HTTP message. + + Fields: + + .. attribute:: headers + + Default value: ``[]`` + + Any trailing headers attached to this message, represented as a list of + (name, value) pairs. See :ref:`the header normalization rules + ` for details. + + Must be empty unless ``Transfer-Encoding: chunked`` is in use. + + """ + + __slots__ = ("headers",) + + headers: Headers + + def __init__( + self, + *, + headers: Union[ + Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]], None + ] = None, + _parsed: bool = False, + ) -> None: + super().__init__() + if headers is None: + headers = Headers([]) + elif not isinstance(headers, Headers): + headers = normalize_and_validate(headers, _parsed=_parsed) + + object.__setattr__(self, "headers", headers) + + # This is an unhashable type. + __hash__ = None # type: ignore + + +@dataclass(frozen=True) +class ConnectionClosed(Event): + """This event indicates that the sender has closed their outgoing + connection. + + Note that this does not necessarily mean that they can't *receive* further + data, because TCP connections are composed to two one-way channels which + can be closed independently. See :ref:`closing` for details. + + No fields. + """ + + pass diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_headers.py b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_headers.py new file mode 100644 index 0000000000000000000000000000000000000000..31da3e2b23b55a624b36f105e62a6902e63286aa --- /dev/null +++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_headers.py @@ -0,0 +1,282 @@ +import re +from typing import AnyStr, cast, List, overload, Sequence, Tuple, TYPE_CHECKING, Union + +from ._abnf import field_name, field_value +from ._util import bytesify, LocalProtocolError, validate + +if TYPE_CHECKING: + from ._events import Request + +try: + from typing import Literal +except ImportError: + from typing_extensions import Literal # type: ignore + +CONTENT_LENGTH_MAX_DIGITS = 20 # allow up to 1 billion TB - 1 + + +# Facts +# ----- +# +# Headers are: +# keys: case-insensitive ascii +# values: mixture of ascii and raw bytes +# +# "Historically, HTTP has allowed field content with text in the ISO-8859-1 +# charset [ISO-8859-1], supporting other charsets only through use of +# [RFC2047] encoding. In practice, most HTTP header field values use only a +# subset of the US-ASCII charset [USASCII]. Newly defined header fields SHOULD +# limit their field values to US-ASCII octets. A recipient SHOULD treat other +# octets in field content (obs-text) as opaque data." +# And it deprecates all non-ascii values +# +# Leading/trailing whitespace in header names is forbidden +# +# Values get leading/trailing whitespace stripped +# +# Content-Disposition actually needs to contain unicode semantically; to +# accomplish this it has a terrifically weird way of encoding the filename +# itself as ascii (and even this still has lots of cross-browser +# incompatibilities) +# +# Order is important: +# "a proxy MUST NOT change the order of these field values when forwarding a +# message" +# (and there are several headers where the order indicates a preference) +# +# Multiple occurences of the same header: +# "A sender MUST NOT generate multiple header fields with the same field name +# in a message unless either the entire field value for that header field is +# defined as a comma-separated list [or the header is Set-Cookie which gets a +# special exception]" - RFC 7230. (cookies are in RFC 6265) +# +# So every header aside from Set-Cookie can be merged by b", ".join if it +# occurs repeatedly. But, of course, they can't necessarily be split by +# .split(b","), because quoting. +# +# Given all this mess (case insensitive, duplicates allowed, order is +# important, ...), there doesn't appear to be any standard way to handle +# headers in Python -- they're almost like dicts, but... actually just +# aren't. For now we punt and just use a super simple representation: headers +# are a list of pairs +# +# [(name1, value1), (name2, value2), ...] +# +# where all entries are bytestrings, names are lowercase and have no +# leading/trailing whitespace, and values are bytestrings with no +# leading/trailing whitespace. Searching and updating are done via naive O(n) +# methods. +# +# Maybe a dict-of-lists would be better? + +_content_length_re = re.compile(rb"[0-9]+") +_field_name_re = re.compile(field_name.encode("ascii")) +_field_value_re = re.compile(field_value.encode("ascii")) + + +class Headers(Sequence[Tuple[bytes, bytes]]): + """ + A list-like interface that allows iterating over headers as byte-pairs + of (lowercased-name, value). + + Internally we actually store the representation as three-tuples, + including both the raw original casing, in order to preserve casing + over-the-wire, and the lowercased name, for case-insensitive comparisions. + + r = Request( + method="GET", + target="/", + headers=[("Host", "example.org"), ("Connection", "keep-alive")], + http_version="1.1", + ) + assert r.headers == [ + (b"host", b"example.org"), + (b"connection", b"keep-alive") + ] + assert r.headers.raw_items() == [ + (b"Host", b"example.org"), + (b"Connection", b"keep-alive") + ] + """ + + __slots__ = "_full_items" + + def __init__(self, full_items: List[Tuple[bytes, bytes, bytes]]) -> None: + self._full_items = full_items + + def __bool__(self) -> bool: + return bool(self._full_items) + + def __eq__(self, other: object) -> bool: + return list(self) == list(other) # type: ignore + + def __len__(self) -> int: + return len(self._full_items) + + def __repr__(self) -> str: + return "" % repr(list(self)) + + def __getitem__(self, idx: int) -> Tuple[bytes, bytes]: # type: ignore[override] + _, name, value = self._full_items[idx] + return (name, value) + + def raw_items(self) -> List[Tuple[bytes, bytes]]: + return [(raw_name, value) for raw_name, _, value in self._full_items] + + +HeaderTypes = Union[ + List[Tuple[bytes, bytes]], + List[Tuple[bytes, str]], + List[Tuple[str, bytes]], + List[Tuple[str, str]], +] + + +@overload +def normalize_and_validate(headers: Headers, _parsed: Literal[True]) -> Headers: + ... + + +@overload +def normalize_and_validate(headers: HeaderTypes, _parsed: Literal[False]) -> Headers: + ... + + +@overload +def normalize_and_validate( + headers: Union[Headers, HeaderTypes], _parsed: bool = False +) -> Headers: + ... + + +def normalize_and_validate( + headers: Union[Headers, HeaderTypes], _parsed: bool = False +) -> Headers: + new_headers = [] + seen_content_length = None + saw_transfer_encoding = False + for name, value in headers: + # For headers coming out of the parser, we can safely skip some steps, + # because it always returns bytes and has already run these regexes + # over the data: + if not _parsed: + name = bytesify(name) + value = bytesify(value) + validate(_field_name_re, name, "Illegal header name {!r}", name) + validate(_field_value_re, value, "Illegal header value {!r}", value) + assert isinstance(name, bytes) + assert isinstance(value, bytes) + + raw_name = name + name = name.lower() + if name == b"content-length": + lengths = {length.strip() for length in value.split(b",")} + if len(lengths) != 1: + raise LocalProtocolError("conflicting Content-Length headers") + value = lengths.pop() + validate(_content_length_re, value, "bad Content-Length") + if len(value) > CONTENT_LENGTH_MAX_DIGITS: + raise LocalProtocolError("bad Content-Length") + if seen_content_length is None: + seen_content_length = value + new_headers.append((raw_name, name, value)) + elif seen_content_length != value: + raise LocalProtocolError("conflicting Content-Length headers") + elif name == b"transfer-encoding": + # "A server that receives a request message with a transfer coding + # it does not understand SHOULD respond with 501 (Not + # Implemented)." + # https://tools.ietf.org/html/rfc7230#section-3.3.1 + if saw_transfer_encoding: + raise LocalProtocolError( + "multiple Transfer-Encoding headers", error_status_hint=501 + ) + # "All transfer-coding names are case-insensitive" + # -- https://tools.ietf.org/html/rfc7230#section-4 + value = value.lower() + if value != b"chunked": + raise LocalProtocolError( + "Only Transfer-Encoding: chunked is supported", + error_status_hint=501, + ) + saw_transfer_encoding = True + new_headers.append((raw_name, name, value)) + else: + new_headers.append((raw_name, name, value)) + return Headers(new_headers) + + +def get_comma_header(headers: Headers, name: bytes) -> List[bytes]: + # Should only be used for headers whose value is a list of + # comma-separated, case-insensitive values. + # + # The header name `name` is expected to be lower-case bytes. + # + # Connection: meets these criteria (including cast insensitivity). + # + # Content-Length: technically is just a single value (1*DIGIT), but the + # standard makes reference to implementations that do multiple values, and + # using this doesn't hurt. Ditto, case insensitivity doesn't things either + # way. + # + # Transfer-Encoding: is more complex (allows for quoted strings), so + # splitting on , is actually wrong. For example, this is legal: + # + # Transfer-Encoding: foo; options="1,2", chunked + # + # and should be parsed as + # + # foo; options="1,2" + # chunked + # + # but this naive function will parse it as + # + # foo; options="1 + # 2" + # chunked + # + # However, this is okay because the only thing we are going to do with + # any Transfer-Encoding is reject ones that aren't just "chunked", so + # both of these will be treated the same anyway. + # + # Expect: the only legal value is the literal string + # "100-continue". Splitting on commas is harmless. Case insensitive. + # + out: List[bytes] = [] + for _, found_name, found_raw_value in headers._full_items: + if found_name == name: + found_raw_value = found_raw_value.lower() + for found_split_value in found_raw_value.split(b","): + found_split_value = found_split_value.strip() + if found_split_value: + out.append(found_split_value) + return out + + +def set_comma_header(headers: Headers, name: bytes, new_values: List[bytes]) -> Headers: + # The header name `name` is expected to be lower-case bytes. + # + # Note that when we store the header we use title casing for the header + # names, in order to match the conventional HTTP header style. + # + # Simply calling `.title()` is a blunt approach, but it's correct + # here given the cases where we're using `set_comma_header`... + # + # Connection, Content-Length, Transfer-Encoding. + new_headers: List[Tuple[bytes, bytes]] = [] + for found_raw_name, found_name, found_raw_value in headers._full_items: + if found_name != name: + new_headers.append((found_raw_name, found_raw_value)) + for new_value in new_values: + new_headers.append((name.title(), new_value)) + return normalize_and_validate(new_headers) + + +def has_expect_100_continue(request: "Request") -> bool: + # https://tools.ietf.org/html/rfc7231#section-5.1.1 + # "A server that receives a 100-continue expectation in an HTTP/1.0 request + # MUST ignore that expectation." + if request.http_version < b"1.1": + return False + expect = get_comma_header(request.headers, b"expect") + return b"100-continue" in expect diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_readers.py b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_readers.py new file mode 100644 index 0000000000000000000000000000000000000000..576804cc282032526e0a932c9853d586a094bad0 --- /dev/null +++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_readers.py @@ -0,0 +1,250 @@ +# Code to read HTTP data +# +# Strategy: each reader is a callable which takes a ReceiveBuffer object, and +# either: +# 1) consumes some of it and returns an Event +# 2) raises a LocalProtocolError (for consistency -- e.g. we call validate() +# and it might raise a LocalProtocolError, so simpler just to always use +# this) +# 3) returns None, meaning "I need more data" +# +# If they have a .read_eof attribute, then this will be called if an EOF is +# received -- but this is optional. Either way, the actual ConnectionClosed +# event will be generated afterwards. +# +# READERS is a dict describing how to pick a reader. It maps states to either: +# - a reader +# - or, for body readers, a dict of per-framing reader factories + +import re +from typing import Any, Callable, Dict, Iterable, NoReturn, Optional, Tuple, Type, Union + +from ._abnf import chunk_header, header_field, request_line, status_line +from ._events import Data, EndOfMessage, InformationalResponse, Request, Response +from ._receivebuffer import ReceiveBuffer +from ._state import ( + CLIENT, + CLOSED, + DONE, + IDLE, + MUST_CLOSE, + SEND_BODY, + SEND_RESPONSE, + SERVER, +) +from ._util import LocalProtocolError, RemoteProtocolError, Sentinel, validate + +__all__ = ["READERS"] + +header_field_re = re.compile(header_field.encode("ascii")) +obs_fold_re = re.compile(rb"[ \t]+") + + +def _obsolete_line_fold(lines: Iterable[bytes]) -> Iterable[bytes]: + it = iter(lines) + last: Optional[bytes] = None + for line in it: + match = obs_fold_re.match(line) + if match: + if last is None: + raise LocalProtocolError("continuation line at start of headers") + if not isinstance(last, bytearray): + # Cast to a mutable type, avoiding copy on append to ensure O(n) time + last = bytearray(last) + last += b" " + last += line[match.end() :] + else: + if last is not None: + yield last + last = line + if last is not None: + yield last + + +def _decode_header_lines( + lines: Iterable[bytes], +) -> Iterable[Tuple[bytes, bytes]]: + for line in _obsolete_line_fold(lines): + matches = validate(header_field_re, line, "illegal header line: {!r}", line) + yield (matches["field_name"], matches["field_value"]) + + +request_line_re = re.compile(request_line.encode("ascii")) + + +def maybe_read_from_IDLE_client(buf: ReceiveBuffer) -> Optional[Request]: + lines = buf.maybe_extract_lines() + if lines is None: + if buf.is_next_line_obviously_invalid_request_line(): + raise LocalProtocolError("illegal request line") + return None + if not lines: + raise LocalProtocolError("no request line received") + matches = validate( + request_line_re, lines[0], "illegal request line: {!r}", lines[0] + ) + return Request( + headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches + ) + + +status_line_re = re.compile(status_line.encode("ascii")) + + +def maybe_read_from_SEND_RESPONSE_server( + buf: ReceiveBuffer, +) -> Union[InformationalResponse, Response, None]: + lines = buf.maybe_extract_lines() + if lines is None: + if buf.is_next_line_obviously_invalid_request_line(): + raise LocalProtocolError("illegal request line") + return None + if not lines: + raise LocalProtocolError("no response line received") + matches = validate(status_line_re, lines[0], "illegal status line: {!r}", lines[0]) + http_version = ( + b"1.1" if matches["http_version"] is None else matches["http_version"] + ) + reason = b"" if matches["reason"] is None else matches["reason"] + status_code = int(matches["status_code"]) + class_: Union[Type[InformationalResponse], Type[Response]] = ( + InformationalResponse if status_code < 200 else Response + ) + return class_( + headers=list(_decode_header_lines(lines[1:])), + _parsed=True, + status_code=status_code, + reason=reason, + http_version=http_version, + ) + + +class ContentLengthReader: + def __init__(self, length: int) -> None: + self._length = length + self._remaining = length + + def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: + if self._remaining == 0: + return EndOfMessage() + data = buf.maybe_extract_at_most(self._remaining) + if data is None: + return None + self._remaining -= len(data) + return Data(data=data) + + def read_eof(self) -> NoReturn: + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(received {} bytes, expected {})".format( + self._length - self._remaining, self._length + ) + ) + + +chunk_header_re = re.compile(chunk_header.encode("ascii")) + + +class ChunkedReader: + def __init__(self) -> None: + self._bytes_in_chunk = 0 + # After reading a chunk, we have to throw away the trailing \r\n. + # This tracks the bytes that we need to match and throw away. + self._bytes_to_discard = b"" + self._reading_trailer = False + + def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: + if self._reading_trailer: + lines = buf.maybe_extract_lines() + if lines is None: + return None + return EndOfMessage(headers=list(_decode_header_lines(lines))) + if self._bytes_to_discard: + data = buf.maybe_extract_at_most(len(self._bytes_to_discard)) + if data is None: + return None + if data != self._bytes_to_discard[: len(data)]: + raise LocalProtocolError( + f"malformed chunk footer: {data!r} (expected {self._bytes_to_discard!r})" + ) + self._bytes_to_discard = self._bytes_to_discard[len(data) :] + if self._bytes_to_discard: + return None + # else, fall through and read some more + assert self._bytes_to_discard == b"" + if self._bytes_in_chunk == 0: + # We need to refill our chunk count + chunk_header = buf.maybe_extract_next_line() + if chunk_header is None: + return None + matches = validate( + chunk_header_re, + chunk_header, + "illegal chunk header: {!r}", + chunk_header, + ) + # XX FIXME: we discard chunk extensions. Does anyone care? + self._bytes_in_chunk = int(matches["chunk_size"], base=16) + if self._bytes_in_chunk == 0: + self._reading_trailer = True + return self(buf) + chunk_start = True + else: + chunk_start = False + assert self._bytes_in_chunk > 0 + data = buf.maybe_extract_at_most(self._bytes_in_chunk) + if data is None: + return None + self._bytes_in_chunk -= len(data) + if self._bytes_in_chunk == 0: + self._bytes_to_discard = b"\r\n" + chunk_end = True + else: + chunk_end = False + return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end) + + def read_eof(self) -> NoReturn: + raise RemoteProtocolError( + "peer closed connection without sending complete message body " + "(incomplete chunked read)" + ) + + +class Http10Reader: + def __call__(self, buf: ReceiveBuffer) -> Optional[Data]: + data = buf.maybe_extract_at_most(999999999) + if data is None: + return None + return Data(data=data) + + def read_eof(self) -> EndOfMessage: + return EndOfMessage() + + +def expect_nothing(buf: ReceiveBuffer) -> None: + if buf: + raise LocalProtocolError("Got data when expecting EOF") + return None + + +ReadersType = Dict[ + Union[Type[Sentinel], Tuple[Type[Sentinel], Type[Sentinel]]], + Union[Callable[..., Any], Dict[str, Callable[..., Any]]], +] + +READERS: ReadersType = { + (CLIENT, IDLE): maybe_read_from_IDLE_client, + (SERVER, IDLE): maybe_read_from_SEND_RESPONSE_server, + (SERVER, SEND_RESPONSE): maybe_read_from_SEND_RESPONSE_server, + (CLIENT, DONE): expect_nothing, + (CLIENT, MUST_CLOSE): expect_nothing, + (CLIENT, CLOSED): expect_nothing, + (SERVER, DONE): expect_nothing, + (SERVER, MUST_CLOSE): expect_nothing, + (SERVER, CLOSED): expect_nothing, + SEND_BODY: { + "chunked": ChunkedReader, + "content-length": ContentLengthReader, + "http/1.0": Http10Reader, + }, +} diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_receivebuffer.py b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_receivebuffer.py new file mode 100644 index 0000000000000000000000000000000000000000..e5c4e08a56f5081e87103f38b4add6ce1b730204 --- /dev/null +++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_receivebuffer.py @@ -0,0 +1,153 @@ +import re +import sys +from typing import List, Optional, Union + +__all__ = ["ReceiveBuffer"] + + +# Operations we want to support: +# - find next \r\n or \r\n\r\n (\n or \n\n are also acceptable), +# or wait until there is one +# - read at-most-N bytes +# Goals: +# - on average, do this fast +# - worst case, do this in O(n) where n is the number of bytes processed +# Plan: +# - store bytearray, offset, how far we've searched for a separator token +# - use the how-far-we've-searched data to avoid rescanning +# - while doing a stream of uninterrupted processing, advance offset instead +# of constantly copying +# WARNING: +# - I haven't benchmarked or profiled any of this yet. +# +# Note that starting in Python 3.4, deleting the initial n bytes from a +# bytearray is amortized O(n), thanks to some excellent work by Antoine +# Martin: +# +# https://bugs.python.org/issue19087 +# +# This means that if we only supported 3.4+, we could get rid of the code here +# involving self._start and self.compress, because it's doing exactly the same +# thing that bytearray now does internally. +# +# BUT unfortunately, we still support 2.7, and reading short segments out of a +# long buffer MUST be O(bytes read) to avoid DoS issues, so we can't actually +# delete this code. Yet: +# +# https://pythonclock.org/ +# +# (Two things to double-check first though: make sure PyPy also has the +# optimization, and benchmark to make sure it's a win, since we do have a +# slightly clever thing where we delay calling compress() until we've +# processed a whole event, which could in theory be slightly more efficient +# than the internal bytearray support.) +blank_line_regex = re.compile(b"\n\r?\n", re.MULTILINE) + + +class ReceiveBuffer: + def __init__(self) -> None: + self._data = bytearray() + self._next_line_search = 0 + self._multiple_lines_search = 0 + + def __iadd__(self, byteslike: Union[bytes, bytearray]) -> "ReceiveBuffer": + self._data += byteslike + return self + + def __bool__(self) -> bool: + return bool(len(self)) + + def __len__(self) -> int: + return len(self._data) + + # for @property unprocessed_data + def __bytes__(self) -> bytes: + return bytes(self._data) + + def _extract(self, count: int) -> bytearray: + # extracting an initial slice of the data buffer and return it + out = self._data[:count] + del self._data[:count] + + self._next_line_search = 0 + self._multiple_lines_search = 0 + + return out + + def maybe_extract_at_most(self, count: int) -> Optional[bytearray]: + """ + Extract a fixed number of bytes from the buffer. + """ + out = self._data[:count] + if not out: + return None + + return self._extract(count) + + def maybe_extract_next_line(self) -> Optional[bytearray]: + """ + Extract the first line, if it is completed in the buffer. + """ + # Only search in buffer space that we've not already looked at. + search_start_index = max(0, self._next_line_search - 1) + partial_idx = self._data.find(b"\r\n", search_start_index) + + if partial_idx == -1: + self._next_line_search = len(self._data) + return None + + # + 2 is to compensate len(b"\r\n") + idx = partial_idx + 2 + + return self._extract(idx) + + def maybe_extract_lines(self) -> Optional[List[bytearray]]: + """ + Extract everything up to the first blank line, and return a list of lines. + """ + # Handle the case where we have an immediate empty line. + if self._data[:1] == b"\n": + self._extract(1) + return [] + + if self._data[:2] == b"\r\n": + self._extract(2) + return [] + + # Only search in buffer space that we've not already looked at. + match = blank_line_regex.search(self._data, self._multiple_lines_search) + if match is None: + self._multiple_lines_search = max(0, len(self._data) - 2) + return None + + # Truncate the buffer and return it. + idx = match.span(0)[-1] + out = self._extract(idx) + lines = out.split(b"\n") + + for line in lines: + if line.endswith(b"\r"): + del line[-1] + + assert lines[-2] == lines[-1] == b"" + + del lines[-2:] + + return lines + + # In theory we should wait until `\r\n` before starting to validate + # incoming data. However it's interesting to detect (very) invalid data + # early given they might not even contain `\r\n` at all (hence only + # timeout will get rid of them). + # This is not a 100% effective detection but more of a cheap sanity check + # allowing for early abort in some useful cases. + # This is especially interesting when peer is messing up with HTTPS and + # sent us a TLS stream where we were expecting plain HTTP given all + # versions of TLS so far start handshake with a 0x16 message type code. + def is_next_line_obviously_invalid_request_line(self) -> bool: + try: + # HTTP header line must not contain non-printable characters + # and should not start with a space + return self._data[0] < 0x21 + except IndexError: + return False diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_state.py b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_state.py new file mode 100644 index 0000000000000000000000000000000000000000..3ad444b043e3f3d6c05c2d9d84d5119312bfaa34 --- /dev/null +++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_state.py @@ -0,0 +1,365 @@ +################################################################ +# The core state machine +################################################################ +# +# Rule 1: everything that affects the state machine and state transitions must +# live here in this file. As much as possible goes into the table-based +# representation, but for the bits that don't quite fit, the actual code and +# state must nonetheless live here. +# +# Rule 2: this file does not know about what role we're playing; it only knows +# about HTTP request/response cycles in the abstract. This ensures that we +# don't cheat and apply different rules to local and remote parties. +# +# +# Theory of operation +# =================== +# +# Possibly the simplest way to think about this is that we actually have 5 +# different state machines here. Yes, 5. These are: +# +# 1) The client state, with its complicated automaton (see the docs) +# 2) The server state, with its complicated automaton (see the docs) +# 3) The keep-alive state, with possible states {True, False} +# 4) The SWITCH_CONNECT state, with possible states {False, True} +# 5) The SWITCH_UPGRADE state, with possible states {False, True} +# +# For (3)-(5), the first state listed is the initial state. +# +# (1)-(3) are stored explicitly in member variables. The last +# two are stored implicitly in the pending_switch_proposals set as: +# (state of 4) == (_SWITCH_CONNECT in pending_switch_proposals) +# (state of 5) == (_SWITCH_UPGRADE in pending_switch_proposals) +# +# And each of these machines has two different kinds of transitions: +# +# a) Event-triggered +# b) State-triggered +# +# Event triggered is the obvious thing that you'd think it is: some event +# happens, and if it's the right event at the right time then a transition +# happens. But there are somewhat complicated rules for which machines can +# "see" which events. (As a rule of thumb, if a machine "sees" an event, this +# means two things: the event can affect the machine, and if the machine is +# not in a state where it expects that event then it's an error.) These rules +# are: +# +# 1) The client machine sees all h11.events objects emitted by the client. +# +# 2) The server machine sees all h11.events objects emitted by the server. +# +# It also sees the client's Request event. +# +# And sometimes, server events are annotated with a _SWITCH_* event. For +# example, we can have a (Response, _SWITCH_CONNECT) event, which is +# different from a regular Response event. +# +# 3) The keep-alive machine sees the process_keep_alive_disabled() event +# (which is derived from Request/Response events), and this event +# transitions it from True -> False, or from False -> False. There's no way +# to transition back. +# +# 4&5) The _SWITCH_* machines transition from False->True when we get a +# Request that proposes the relevant type of switch (via +# process_client_switch_proposals), and they go from True->False when we +# get a Response that has no _SWITCH_* annotation. +# +# So that's event-triggered transitions. +# +# State-triggered transitions are less standard. What they do here is couple +# the machines together. The way this works is, when certain *joint* +# configurations of states are achieved, then we automatically transition to a +# new *joint* state. So, for example, if we're ever in a joint state with +# +# client: DONE +# keep-alive: False +# +# then the client state immediately transitions to: +# +# client: MUST_CLOSE +# +# This is fundamentally different from an event-based transition, because it +# doesn't matter how we arrived at the {client: DONE, keep-alive: False} state +# -- maybe the client transitioned SEND_BODY -> DONE, or keep-alive +# transitioned True -> False. Either way, once this precondition is satisfied, +# this transition is immediately triggered. +# +# What if two conflicting state-based transitions get enabled at the same +# time? In practice there's only one case where this arises (client DONE -> +# MIGHT_SWITCH_PROTOCOL versus DONE -> MUST_CLOSE), and we resolve it by +# explicitly prioritizing the DONE -> MIGHT_SWITCH_PROTOCOL transition. +# +# Implementation +# -------------- +# +# The event-triggered transitions for the server and client machines are all +# stored explicitly in a table. Ditto for the state-triggered transitions that +# involve just the server and client state. +# +# The transitions for the other machines, and the state-triggered transitions +# that involve the other machines, are written out as explicit Python code. +# +# It'd be nice if there were some cleaner way to do all this. This isn't +# *too* terrible, but I feel like it could probably be better. +# +# WARNING +# ------- +# +# The script that generates the state machine diagrams for the docs knows how +# to read out the EVENT_TRIGGERED_TRANSITIONS and STATE_TRIGGERED_TRANSITIONS +# tables. But it can't automatically read the transitions that are written +# directly in Python code. So if you touch those, you need to also update the +# script to keep it in sync! +from typing import cast, Dict, Optional, Set, Tuple, Type, Union + +from ._events import * +from ._util import LocalProtocolError, Sentinel + +# Everything in __all__ gets re-exported as part of the h11 public API. +__all__ = [ + "CLIENT", + "SERVER", + "IDLE", + "SEND_RESPONSE", + "SEND_BODY", + "DONE", + "MUST_CLOSE", + "CLOSED", + "MIGHT_SWITCH_PROTOCOL", + "SWITCHED_PROTOCOL", + "ERROR", +] + + +class CLIENT(Sentinel, metaclass=Sentinel): + pass + + +class SERVER(Sentinel, metaclass=Sentinel): + pass + + +# States +class IDLE(Sentinel, metaclass=Sentinel): + pass + + +class SEND_RESPONSE(Sentinel, metaclass=Sentinel): + pass + + +class SEND_BODY(Sentinel, metaclass=Sentinel): + pass + + +class DONE(Sentinel, metaclass=Sentinel): + pass + + +class MUST_CLOSE(Sentinel, metaclass=Sentinel): + pass + + +class CLOSED(Sentinel, metaclass=Sentinel): + pass + + +class ERROR(Sentinel, metaclass=Sentinel): + pass + + +# Switch types +class MIGHT_SWITCH_PROTOCOL(Sentinel, metaclass=Sentinel): + pass + + +class SWITCHED_PROTOCOL(Sentinel, metaclass=Sentinel): + pass + + +class _SWITCH_UPGRADE(Sentinel, metaclass=Sentinel): + pass + + +class _SWITCH_CONNECT(Sentinel, metaclass=Sentinel): + pass + + +EventTransitionType = Dict[ + Type[Sentinel], + Dict[ + Type[Sentinel], + Dict[Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], Type[Sentinel]], + ], +] + +EVENT_TRIGGERED_TRANSITIONS: EventTransitionType = { + CLIENT: { + IDLE: {Request: SEND_BODY, ConnectionClosed: CLOSED}, + SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, + DONE: {ConnectionClosed: CLOSED}, + MUST_CLOSE: {ConnectionClosed: CLOSED}, + CLOSED: {ConnectionClosed: CLOSED}, + MIGHT_SWITCH_PROTOCOL: {}, + SWITCHED_PROTOCOL: {}, + ERROR: {}, + }, + SERVER: { + IDLE: { + ConnectionClosed: CLOSED, + Response: SEND_BODY, + # Special case: server sees client Request events, in this form + (Request, CLIENT): SEND_RESPONSE, + }, + SEND_RESPONSE: { + InformationalResponse: SEND_RESPONSE, + Response: SEND_BODY, + (InformationalResponse, _SWITCH_UPGRADE): SWITCHED_PROTOCOL, + (Response, _SWITCH_CONNECT): SWITCHED_PROTOCOL, + }, + SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, + DONE: {ConnectionClosed: CLOSED}, + MUST_CLOSE: {ConnectionClosed: CLOSED}, + CLOSED: {ConnectionClosed: CLOSED}, + SWITCHED_PROTOCOL: {}, + ERROR: {}, + }, +} + +StateTransitionType = Dict[ + Tuple[Type[Sentinel], Type[Sentinel]], Dict[Type[Sentinel], Type[Sentinel]] +] + +# NB: there are also some special-case state-triggered transitions hard-coded +# into _fire_state_triggered_transitions below. +STATE_TRIGGERED_TRANSITIONS: StateTransitionType = { + # (Client state, Server state) -> new states + # Protocol negotiation + (MIGHT_SWITCH_PROTOCOL, SWITCHED_PROTOCOL): {CLIENT: SWITCHED_PROTOCOL}, + # Socket shutdown + (CLOSED, DONE): {SERVER: MUST_CLOSE}, + (CLOSED, IDLE): {SERVER: MUST_CLOSE}, + (ERROR, DONE): {SERVER: MUST_CLOSE}, + (DONE, CLOSED): {CLIENT: MUST_CLOSE}, + (IDLE, CLOSED): {CLIENT: MUST_CLOSE}, + (DONE, ERROR): {CLIENT: MUST_CLOSE}, +} + + +class ConnectionState: + def __init__(self) -> None: + # Extra bits of state that don't quite fit into the state model. + + # If this is False then it enables the automatic DONE -> MUST_CLOSE + # transition. Don't set this directly; call .keep_alive_disabled() + self.keep_alive = True + + # This is a subset of {UPGRADE, CONNECT}, containing the proposals + # made by the client for switching protocols. + self.pending_switch_proposals: Set[Type[Sentinel]] = set() + + self.states: Dict[Type[Sentinel], Type[Sentinel]] = {CLIENT: IDLE, SERVER: IDLE} + + def process_error(self, role: Type[Sentinel]) -> None: + self.states[role] = ERROR + self._fire_state_triggered_transitions() + + def process_keep_alive_disabled(self) -> None: + self.keep_alive = False + self._fire_state_triggered_transitions() + + def process_client_switch_proposal(self, switch_event: Type[Sentinel]) -> None: + self.pending_switch_proposals.add(switch_event) + self._fire_state_triggered_transitions() + + def process_event( + self, + role: Type[Sentinel], + event_type: Type[Event], + server_switch_event: Optional[Type[Sentinel]] = None, + ) -> None: + _event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]] = event_type + if server_switch_event is not None: + assert role is SERVER + if server_switch_event not in self.pending_switch_proposals: + raise LocalProtocolError( + "Received server _SWITCH_UPGRADE event without a pending proposal" + ) + _event_type = (event_type, server_switch_event) + if server_switch_event is None and _event_type is Response: + self.pending_switch_proposals = set() + self._fire_event_triggered_transitions(role, _event_type) + # Special case: the server state does get to see Request + # events. + if _event_type is Request: + assert role is CLIENT + self._fire_event_triggered_transitions(SERVER, (Request, CLIENT)) + self._fire_state_triggered_transitions() + + def _fire_event_triggered_transitions( + self, + role: Type[Sentinel], + event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], + ) -> None: + state = self.states[role] + try: + new_state = EVENT_TRIGGERED_TRANSITIONS[role][state][event_type] + except KeyError: + event_type = cast(Type[Event], event_type) + raise LocalProtocolError( + "can't handle event type {} when role={} and state={}".format( + event_type.__name__, role, self.states[role] + ) + ) from None + self.states[role] = new_state + + def _fire_state_triggered_transitions(self) -> None: + # We apply these rules repeatedly until converging on a fixed point + while True: + start_states = dict(self.states) + + # It could happen that both these special-case transitions are + # enabled at the same time: + # + # DONE -> MIGHT_SWITCH_PROTOCOL + # DONE -> MUST_CLOSE + # + # For example, this will always be true of a HTTP/1.0 client + # requesting CONNECT. If this happens, the protocol switch takes + # priority. From there the client will either go to + # SWITCHED_PROTOCOL, in which case it's none of our business when + # they close the connection, or else the server will deny the + # request, in which case the client will go back to DONE and then + # from there to MUST_CLOSE. + if self.pending_switch_proposals: + if self.states[CLIENT] is DONE: + self.states[CLIENT] = MIGHT_SWITCH_PROTOCOL + + if not self.pending_switch_proposals: + if self.states[CLIENT] is MIGHT_SWITCH_PROTOCOL: + self.states[CLIENT] = DONE + + if not self.keep_alive: + for role in (CLIENT, SERVER): + if self.states[role] is DONE: + self.states[role] = MUST_CLOSE + + # Tabular state-triggered transitions + joint_state = (self.states[CLIENT], self.states[SERVER]) + changes = STATE_TRIGGERED_TRANSITIONS.get(joint_state, {}) + self.states.update(changes) + + if self.states == start_states: + # Fixed point reached + return + + def start_next_cycle(self) -> None: + if self.states != {CLIENT: DONE, SERVER: DONE}: + raise LocalProtocolError( + f"not in a reusable state. self.states={self.states}" + ) + # Can't reach DONE/DONE with any of these active, but still, let's be + # sure. + assert self.keep_alive + assert not self.pending_switch_proposals + self.states = {CLIENT: IDLE, SERVER: IDLE} diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_util.py b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_util.py new file mode 100644 index 0000000000000000000000000000000000000000..6718445290770e028ea2f1f662026c9a0b0991db --- /dev/null +++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_util.py @@ -0,0 +1,135 @@ +from typing import Any, Dict, NoReturn, Pattern, Tuple, Type, TypeVar, Union + +__all__ = [ + "ProtocolError", + "LocalProtocolError", + "RemoteProtocolError", + "validate", + "bytesify", +] + + +class ProtocolError(Exception): + """Exception indicating a violation of the HTTP/1.1 protocol. + + This as an abstract base class, with two concrete base classes: + :exc:`LocalProtocolError`, which indicates that you tried to do something + that HTTP/1.1 says is illegal, and :exc:`RemoteProtocolError`, which + indicates that the remote peer tried to do something that HTTP/1.1 says is + illegal. See :ref:`error-handling` for details. + + In addition to the normal :exc:`Exception` features, it has one attribute: + + .. attribute:: error_status_hint + + This gives a suggestion as to what status code a server might use if + this error occurred as part of a request. + + For a :exc:`RemoteProtocolError`, this is useful as a suggestion for + how you might want to respond to a misbehaving peer, if you're + implementing a server. + + For a :exc:`LocalProtocolError`, this can be taken as a suggestion for + how your peer might have responded to *you* if h11 had allowed you to + continue. + + The default is 400 Bad Request, a generic catch-all for protocol + violations. + + """ + + def __init__(self, msg: str, error_status_hint: int = 400) -> None: + if type(self) is ProtocolError: + raise TypeError("tried to directly instantiate ProtocolError") + Exception.__init__(self, msg) + self.error_status_hint = error_status_hint + + +# Strategy: there are a number of public APIs where a LocalProtocolError can +# be raised (send(), all the different event constructors, ...), and only one +# public API where RemoteProtocolError can be raised +# (receive_data()). Therefore we always raise LocalProtocolError internally, +# and then receive_data will translate this into a RemoteProtocolError. +# +# Internally: +# LocalProtocolError is the generic "ProtocolError". +# Externally: +# LocalProtocolError is for local errors and RemoteProtocolError is for +# remote errors. +class LocalProtocolError(ProtocolError): + def _reraise_as_remote_protocol_error(self) -> NoReturn: + # After catching a LocalProtocolError, use this method to re-raise it + # as a RemoteProtocolError. This method must be called from inside an + # except: block. + # + # An easy way to get an equivalent RemoteProtocolError is just to + # modify 'self' in place. + self.__class__ = RemoteProtocolError # type: ignore + # But the re-raising is somewhat non-trivial -- you might think that + # now that we've modified the in-flight exception object, that just + # doing 'raise' to re-raise it would be enough. But it turns out that + # this doesn't work, because Python tracks the exception type + # (exc_info[0]) separately from the exception object (exc_info[1]), + # and we only modified the latter. So we really do need to re-raise + # the new type explicitly. + # On py3, the traceback is part of the exception object, so our + # in-place modification preserved it and we can just re-raise: + raise self + + +class RemoteProtocolError(ProtocolError): + pass + + +def validate( + regex: Pattern[bytes], data: bytes, msg: str = "malformed data", *format_args: Any +) -> Dict[str, bytes]: + match = regex.fullmatch(data) + if not match: + if format_args: + msg = msg.format(*format_args) + raise LocalProtocolError(msg) + return match.groupdict() + + +# Sentinel values +# +# - Inherit identity-based comparison and hashing from object +# - Have a nice repr +# - Have a *bonus property*: type(sentinel) is sentinel +# +# The bonus property is useful if you want to take the return value from +# next_event() and do some sort of dispatch based on type(event). + +_T_Sentinel = TypeVar("_T_Sentinel", bound="Sentinel") + + +class Sentinel(type): + def __new__( + cls: Type[_T_Sentinel], + name: str, + bases: Tuple[type, ...], + namespace: Dict[str, Any], + **kwds: Any + ) -> _T_Sentinel: + assert bases == (Sentinel,) + v = super().__new__(cls, name, bases, namespace, **kwds) + v.__class__ = v # type: ignore + return v + + def __repr__(self) -> str: + return self.__name__ + + +# Used for methods, request targets, HTTP versions, header names, and header +# values. Accepts ascii-strings, or bytes/bytearray/memoryview/..., and always +# returns bytes. +def bytesify(s: Union[bytes, bytearray, memoryview, int, str]) -> bytes: + # Fast-path: + if type(s) is bytes: + return s + if isinstance(s, str): + s = s.encode("ascii") + if isinstance(s, int): + raise TypeError("expected bytes-like object, not int") + return bytes(s) diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_version.py b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_version.py new file mode 100644 index 0000000000000000000000000000000000000000..76e7327b8617c9d12236f511414d5eb58e98a44b --- /dev/null +++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_version.py @@ -0,0 +1,16 @@ +# This file must be kept very simple, because it is consumed from several +# places -- it is imported by h11/__init__.py, execfile'd by setup.py, etc. + +# We use a simple scheme: +# 1.0.0 -> 1.0.0+dev -> 1.1.0 -> 1.1.0+dev +# where the +dev versions are never released into the wild, they're just what +# we stick into the VCS in between releases. +# +# This is compatible with PEP 440: +# http://legacy.python.org/dev/peps/pep-0440/ +# via the use of the "local suffix" "+dev", which is disallowed on index +# servers and causes 1.0.0+dev to sort after plain 1.0.0, which is what we +# want. (Contrast with the special suffix 1.0.0.dev, which sorts *before* +# 1.0.0.) + +__version__ = "0.16.0" diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_writers.py b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_writers.py new file mode 100644 index 0000000000000000000000000000000000000000..939cdb912a9debaea07fbf3a9ac04549c44d077c --- /dev/null +++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/_writers.py @@ -0,0 +1,145 @@ +# Code to read HTTP data +# +# Strategy: each writer takes an event + a write-some-bytes function, which is +# calls. +# +# WRITERS is a dict describing how to pick a reader. It maps states to either: +# - a writer +# - or, for body writers, a dict of framin-dependent writer factories + +from typing import Any, Callable, Dict, List, Tuple, Type, Union + +from ._events import Data, EndOfMessage, Event, InformationalResponse, Request, Response +from ._headers import Headers +from ._state import CLIENT, IDLE, SEND_BODY, SEND_RESPONSE, SERVER +from ._util import LocalProtocolError, Sentinel + +__all__ = ["WRITERS"] + +Writer = Callable[[bytes], Any] + + +def write_headers(headers: Headers, write: Writer) -> None: + # "Since the Host field-value is critical information for handling a + # request, a user agent SHOULD generate Host as the first header field + # following the request-line." - RFC 7230 + raw_items = headers._full_items + for raw_name, name, value in raw_items: + if name == b"host": + write(b"%s: %s\r\n" % (raw_name, value)) + for raw_name, name, value in raw_items: + if name != b"host": + write(b"%s: %s\r\n" % (raw_name, value)) + write(b"\r\n") + + +def write_request(request: Request, write: Writer) -> None: + if request.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + write(b"%s %s HTTP/1.1\r\n" % (request.method, request.target)) + write_headers(request.headers, write) + + +# Shared between InformationalResponse and Response +def write_any_response( + response: Union[InformationalResponse, Response], write: Writer +) -> None: + if response.http_version != b"1.1": + raise LocalProtocolError("I only send HTTP/1.1") + status_bytes = str(response.status_code).encode("ascii") + # We don't bother sending ascii status messages like "OK"; they're + # optional and ignored by the protocol. (But the space after the numeric + # status code is mandatory.) + # + # XX FIXME: could at least make an effort to pull out the status message + # from stdlib's http.HTTPStatus table. Or maybe just steal their enums + # (either by import or copy/paste). We already accept them as status codes + # since they're of type IntEnum < int. + write(b"HTTP/1.1 %s %s\r\n" % (status_bytes, response.reason)) + write_headers(response.headers, write) + + +class BodyWriter: + def __call__(self, event: Event, write: Writer) -> None: + if type(event) is Data: + self.send_data(event.data, write) + elif type(event) is EndOfMessage: + self.send_eom(event.headers, write) + else: # pragma: no cover + assert False + + def send_data(self, data: bytes, write: Writer) -> None: + pass + + def send_eom(self, headers: Headers, write: Writer) -> None: + pass + + +# +# These are all careful not to do anything to 'data' except call len(data) and +# write(data). This allows us to transparently pass-through funny objects, +# like placeholder objects referring to files on disk that will be sent via +# sendfile(2). +# +class ContentLengthWriter(BodyWriter): + def __init__(self, length: int) -> None: + self._length = length + + def send_data(self, data: bytes, write: Writer) -> None: + self._length -= len(data) + if self._length < 0: + raise LocalProtocolError("Too much data for declared Content-Length") + write(data) + + def send_eom(self, headers: Headers, write: Writer) -> None: + if self._length != 0: + raise LocalProtocolError("Too little data for declared Content-Length") + if headers: + raise LocalProtocolError("Content-Length and trailers don't mix") + + +class ChunkedWriter(BodyWriter): + def send_data(self, data: bytes, write: Writer) -> None: + # if we encoded 0-length data in the naive way, it would look like an + # end-of-message. + if not data: + return + write(b"%x\r\n" % len(data)) + write(data) + write(b"\r\n") + + def send_eom(self, headers: Headers, write: Writer) -> None: + write(b"0\r\n") + write_headers(headers, write) + + +class Http10Writer(BodyWriter): + def send_data(self, data: bytes, write: Writer) -> None: + write(data) + + def send_eom(self, headers: Headers, write: Writer) -> None: + if headers: + raise LocalProtocolError("can't send trailers to HTTP/1.0 client") + # no need to close the socket ourselves, that will be taken care of by + # Connection: close machinery + + +WritersType = Dict[ + Union[Tuple[Type[Sentinel], Type[Sentinel]], Type[Sentinel]], + Union[ + Dict[str, Type[BodyWriter]], + Callable[[Union[InformationalResponse, Response], Writer], None], + Callable[[Request, Writer], None], + ], +] + +WRITERS: WritersType = { + (CLIENT, IDLE): write_request, + (SERVER, IDLE): write_any_response, + (SERVER, SEND_RESPONSE): write_any_response, + SEND_BODY: { + "chunked": ChunkedWriter, + "content-length": ContentLengthWriter, + "http/1.0": Http10Writer, + }, +} diff --git a/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/py.typed b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..f5642f79f21d872f010979dcf6f0c4a415acc19d --- /dev/null +++ b/.cache/uv/archive-v0/7f1Wn_yGu4XwJ63DdvZ9K/h11/py.typed @@ -0,0 +1 @@ +Marker diff --git a/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna-3.11.dist-info/licenses/LICENSE.md b/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna-3.11.dist-info/licenses/LICENSE.md new file mode 100644 index 0000000000000000000000000000000000000000..256ba90cd91190a6c980bd44663dc51c201c14d3 --- /dev/null +++ b/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna-3.11.dist-info/licenses/LICENSE.md @@ -0,0 +1,31 @@ +BSD 3-Clause License + +Copyright (c) 2013-2025, Kim Davies and contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna/__init__.py b/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..cfdc030a751b089fc7e38fc88093b791605d501d --- /dev/null +++ b/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna/__init__.py @@ -0,0 +1,45 @@ +from .core import ( + IDNABidiError, + IDNAError, + InvalidCodepoint, + InvalidCodepointContext, + alabel, + check_bidi, + check_hyphen_ok, + check_initial_combiner, + check_label, + check_nfc, + decode, + encode, + ulabel, + uts46_remap, + valid_contextj, + valid_contexto, + valid_label_length, + valid_string_length, +) +from .intranges import intranges_contain +from .package_data import __version__ + +__all__ = [ + "__version__", + "IDNABidiError", + "IDNAError", + "InvalidCodepoint", + "InvalidCodepointContext", + "alabel", + "check_bidi", + "check_hyphen_ok", + "check_initial_combiner", + "check_label", + "check_nfc", + "decode", + "encode", + "intranges_contain", + "ulabel", + "uts46_remap", + "valid_contextj", + "valid_contexto", + "valid_label_length", + "valid_string_length", +] diff --git a/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna/codec.py b/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna/codec.py new file mode 100644 index 0000000000000000000000000000000000000000..cbc2e4ff4ec3e2318d47615bab44ea0ca3dba978 --- /dev/null +++ b/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna/codec.py @@ -0,0 +1,122 @@ +import codecs +import re +from typing import Any, Optional, Tuple + +from .core import IDNAError, alabel, decode, encode, ulabel + +_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]") + + +class Codec(codecs.Codec): + def encode(self, data: str, errors: str = "strict") -> Tuple[bytes, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return b"", 0 + + return encode(data), len(data) + + def decode(self, data: bytes, errors: str = "strict") -> Tuple[str, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return "", 0 + + return decode(data), len(data) + + +class IncrementalEncoder(codecs.BufferedIncrementalEncoder): + def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[bytes, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return b"", 0 + + labels = _unicode_dots_re.split(data) + trailing_dot = b"" + if labels: + if not labels[-1]: + trailing_dot = b"." + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = b"." + + result = [] + size = 0 + for label in labels: + result.append(alabel(label)) + if size: + size += 1 + size += len(label) + + # Join with U+002E + result_bytes = b".".join(result) + trailing_dot + size += len(trailing_dot) + return result_bytes, size + + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, data: Any, errors: str, final: bool) -> Tuple[str, int]: + if errors != "strict": + raise IDNAError('Unsupported error handling "{}"'.format(errors)) + + if not data: + return ("", 0) + + if not isinstance(data, str): + data = str(data, "ascii") + + labels = _unicode_dots_re.split(data) + trailing_dot = "" + if labels: + if not labels[-1]: + trailing_dot = "." + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = "." + + result = [] + size = 0 + for label in labels: + result.append(ulabel(label)) + if size: + size += 1 + size += len(label) + + result_str = ".".join(result) + trailing_dot + size += len(trailing_dot) + return (result_str, size) + + +class StreamWriter(Codec, codecs.StreamWriter): + pass + + +class StreamReader(Codec, codecs.StreamReader): + pass + + +def search_function(name: str) -> Optional[codecs.CodecInfo]: + if name != "idna2008": + return None + return codecs.CodecInfo( + name=name, + encode=Codec().encode, + decode=Codec().decode, # type: ignore + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamwriter=StreamWriter, + streamreader=StreamReader, + ) + + +codecs.register(search_function) diff --git a/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna/core.py b/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna/core.py new file mode 100644 index 0000000000000000000000000000000000000000..8177bf7a324f9f54a29e41e867f5d56f2dd0a924 --- /dev/null +++ b/.cache/uv/archive-v0/9CF5plAJqnTTxoeIF7B2i/idna/core.py @@ -0,0 +1,437 @@ +import bisect +import re +import unicodedata +from typing import Optional, Union + +from . import idnadata +from .intranges import intranges_contain + +_virama_combining_class = 9 +_alabel_prefix = b"xn--" +_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]") + + +class IDNAError(UnicodeError): + """Base exception for all IDNA-encoding related problems""" + + pass + + +class IDNABidiError(IDNAError): + """Exception when bidirectional requirements are not satisfied""" + + pass + + +class InvalidCodepoint(IDNAError): + """Exception when a disallowed or unallocated codepoint is used""" + + pass + + +class InvalidCodepointContext(IDNAError): + """Exception when the codepoint is not valid in the context it is used""" + + pass + + +def _combining_class(cp: int) -> int: + v = unicodedata.combining(chr(cp)) + if v == 0: + if not unicodedata.name(chr(cp)): + raise ValueError("Unknown character in unicodedata") + return v + + +def _is_script(cp: str, script: str) -> bool: + return intranges_contain(ord(cp), idnadata.scripts[script]) + + +def _punycode(s: str) -> bytes: + return s.encode("punycode") + + +def _unot(s: int) -> str: + return "U+{:04X}".format(s) + + +def valid_label_length(label: Union[bytes, str]) -> bool: + if len(label) > 63: + return False + return True + + +def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool: + if len(label) > (254 if trailing_dot else 253): + return False + return True + + +def check_bidi(label: str, check_ltr: bool = False) -> bool: + # Bidi rules should only be applied if string contains RTL characters + bidi_label = False + for idx, cp in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + if direction == "": + # String likely comes from a newer version of Unicode + raise IDNABidiError("Unknown directionality in label {} at position {}".format(repr(label), idx)) + if direction in ["R", "AL", "AN"]: + bidi_label = True + if not bidi_label and not check_ltr: + return True + + # Bidi rule 1 + direction = unicodedata.bidirectional(label[0]) + if direction in ["R", "AL"]: + rtl = True + elif direction == "L": + rtl = False + else: + raise IDNABidiError("First codepoint in label {} must be directionality L, R or AL".format(repr(label))) + + valid_ending = False + number_type: Optional[str] = None + for idx, cp in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + + if rtl: + # Bidi rule 2 + if direction not in [ + "R", + "AL", + "AN", + "EN", + "ES", + "CS", + "ET", + "ON", + "BN", + "NSM", + ]: + raise IDNABidiError("Invalid direction for codepoint at position {} in a right-to-left label".format(idx)) + # Bidi rule 3 + if direction in ["R", "AL", "EN", "AN"]: + valid_ending = True + elif direction != "NSM": + valid_ending = False + # Bidi rule 4 + if direction in ["AN", "EN"]: + if not number_type: + number_type = direction + else: + if number_type != direction: + raise IDNABidiError("Can not mix numeral types in a right-to-left label") + else: + # Bidi rule 5 + if direction not in ["L", "EN", "ES", "CS", "ET", "ON", "BN", "NSM"]: + raise IDNABidiError("Invalid direction for codepoint at position {} in a left-to-right label".format(idx)) + # Bidi rule 6 + if direction in ["L", "EN"]: + valid_ending = True + elif direction != "NSM": + valid_ending = False + + if not valid_ending: + raise IDNABidiError("Label ends with illegal codepoint directionality") + + return True + + +def check_initial_combiner(label: str) -> bool: + if unicodedata.category(label[0])[0] == "M": + raise IDNAError("Label begins with an illegal combining character") + return True + + +def check_hyphen_ok(label: str) -> bool: + if label[2:4] == "--": + raise IDNAError("Label has disallowed hyphens in 3rd and 4th position") + if label[0] == "-" or label[-1] == "-": + raise IDNAError("Label must not start or end with a hyphen") + return True + + +def check_nfc(label: str) -> None: + if unicodedata.normalize("NFC", label) != label: + raise IDNAError("Label must be in Normalization Form C") + + +def valid_contextj(label: str, pos: int) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x200C: + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + + ok = False + for i in range(pos - 1, -1, -1): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord("T"): + continue + elif joining_type in [ord("L"), ord("D")]: + ok = True + break + else: + break + + if not ok: + return False + + ok = False + for i in range(pos + 1, len(label)): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord("T"): + continue + elif joining_type in [ord("R"), ord("D")]: + ok = True + break + else: + break + return ok + + if cp_value == 0x200D: + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + return False + + else: + return False + + +def valid_contexto(label: str, pos: int, exception: bool = False) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x00B7: + if 0 < pos < len(label) - 1: + if ord(label[pos - 1]) == 0x006C and ord(label[pos + 1]) == 0x006C: + return True + return False + + elif cp_value == 0x0375: + if pos < len(label) - 1 and len(label) > 1: + return _is_script(label[pos + 1], "Greek") + return False + + elif cp_value == 0x05F3 or cp_value == 0x05F4: + if pos > 0: + return _is_script(label[pos - 1], "Hebrew") + return False + + elif cp_value == 0x30FB: + for cp in label: + if cp == "\u30fb": + continue + if _is_script(cp, "Hiragana") or _is_script(cp, "Katakana") or _is_script(cp, "Han"): + return True + return False + + elif 0x660 <= cp_value <= 0x669: + for cp in label: + if 0x6F0 <= ord(cp) <= 0x06F9: + return False + return True + + elif 0x6F0 <= cp_value <= 0x6F9: + for cp in label: + if 0x660 <= ord(cp) <= 0x0669: + return False + return True + + return False + + +def check_label(label: Union[str, bytes, bytearray]) -> None: + if isinstance(label, (bytes, bytearray)): + label = label.decode("utf-8") + if len(label) == 0: + raise IDNAError("Empty Label") + + check_nfc(label) + check_hyphen_ok(label) + check_initial_combiner(label) + + for pos, cp in enumerate(label): + cp_value = ord(cp) + if intranges_contain(cp_value, idnadata.codepoint_classes["PVALID"]): + continue + elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTJ"]): + try: + if not valid_contextj(label, pos): + raise InvalidCodepointContext( + "Joiner {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label)) + ) + except ValueError: + raise IDNAError( + "Unknown codepoint adjacent to joiner {} at position {} in {}".format( + _unot(cp_value), pos + 1, repr(label) + ) + ) + elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTO"]): + if not valid_contexto(label, pos): + raise InvalidCodepointContext( + "Codepoint {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label)) + ) + else: + raise InvalidCodepoint( + "Codepoint {} at position {} of {} not allowed".format(_unot(cp_value), pos + 1, repr(label)) + ) + + check_bidi(label) + + +def alabel(label: str) -> bytes: + try: + label_bytes = label.encode("ascii") + ulabel(label_bytes) + if not valid_label_length(label_bytes): + raise IDNAError("Label too long") + return label_bytes + except UnicodeEncodeError: + pass + + check_label(label) + label_bytes = _alabel_prefix + _punycode(label) + + if not valid_label_length(label_bytes): + raise IDNAError("Label too long") + + return label_bytes + + +def ulabel(label: Union[str, bytes, bytearray]) -> str: + if not isinstance(label, (bytes, bytearray)): + try: + label_bytes = label.encode("ascii") + except UnicodeEncodeError: + check_label(label) + return label + else: + label_bytes = bytes(label) + + label_bytes = label_bytes.lower() + if label_bytes.startswith(_alabel_prefix): + label_bytes = label_bytes[len(_alabel_prefix) :] + if not label_bytes: + raise IDNAError("Malformed A-label, no Punycode eligible content found") + if label_bytes.decode("ascii")[-1] == "-": + raise IDNAError("A-label must not end with a hyphen") + else: + check_label(label_bytes) + return label_bytes.decode("ascii") + + try: + label = label_bytes.decode("punycode") + except UnicodeError: + raise IDNAError("Invalid A-label") + check_label(label) + return label + + +def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str: + """Re-map the characters in the string according to UTS46 processing.""" + from .uts46data import uts46data + + output = "" + + for pos, char in enumerate(domain): + code_point = ord(char) + try: + uts46row = uts46data[code_point if code_point < 256 else bisect.bisect_left(uts46data, (code_point, "Z")) - 1] + status = uts46row[1] + replacement: Optional[str] = None + if len(uts46row) == 3: + replacement = uts46row[2] + if ( + status == "V" + or (status == "D" and not transitional) + or (status == "3" and not std3_rules and replacement is None) + ): + output += char + elif replacement is not None and ( + status == "M" or (status == "3" and not std3_rules) or (status == "D" and transitional) + ): + output += replacement + elif status != "I": + raise IndexError() + except IndexError: + raise InvalidCodepoint( + "Codepoint {} not allowed at position {} in {}".format(_unot(code_point), pos + 1, repr(domain)) + ) + + return unicodedata.normalize("NFC", output) + + +def encode( + s: Union[str, bytes, bytearray], + strict: bool = False, + uts46: bool = False, + std3_rules: bool = False, + transitional: bool = False, +) -> bytes: + if not isinstance(s, str): + try: + s = str(s, "ascii") + except UnicodeDecodeError: + raise IDNAError("should pass a unicode string to the function rather than a byte string.") + if uts46: + s = uts46_remap(s, std3_rules, transitional) + trailing_dot = False + result = [] + if strict: + labels = s.split(".") + else: + labels = _unicode_dots_re.split(s) + if not labels or labels == [""]: + raise IDNAError("Empty domain") + if labels[-1] == "": + del labels[-1] + trailing_dot = True + for label in labels: + s = alabel(label) + if s: + result.append(s) + else: + raise IDNAError("Empty label") + if trailing_dot: + result.append(b"") + s = b".".join(result) + if not valid_string_length(s, trailing_dot): + raise IDNAError("Domain too long") + return s + + +def decode( + s: Union[str, bytes, bytearray], + strict: bool = False, + uts46: bool = False, + std3_rules: bool = False, +) -> str: + try: + if not isinstance(s, str): + s = str(s, "ascii") + except UnicodeDecodeError: + raise IDNAError("Invalid ASCII in A-label") + if uts46: + s = uts46_remap(s, std3_rules, False) + trailing_dot = False + result = [] + if not strict: + labels = _unicode_dots_re.split(s) + else: + labels = s.split(".") + if not labels or labels == [""]: + raise IDNAError("Empty domain") + if not labels[-1]: + del labels[-1] + trailing_dot = True + for label in labels: + s = ulabel(label) + if s: + result.append(s) + else: + raise IDNAError("Empty label") + if trailing_dot: + result.append("") + return ".".join(result) diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/LICENSE b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..b9077766e9b9bdcae49ea5c8fced750ed13ec8f7 --- /dev/null +++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2018, Tzu-ping Chung + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/METADATA b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..52118f1e5c83bd7ef39196a749651fc87d176812 --- /dev/null +++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/METADATA @@ -0,0 +1,106 @@ +Metadata-Version: 2.1 +Name: shellingham +Version: 1.5.4 +Summary: Tool to Detect Surrounding Shell +Home-page: https://github.com/sarugaku/shellingham +Author: Tzu-ping Chung +Author-email: uranusjr@gmail.com +License: ISC License +Keywords: shell +Classifier: Development Status :: 3 - Alpha +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: ISC License (ISCL) +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE + +============================================= +Shellingham: Tool to Detect Surrounding Shell +============================================= + +.. image:: https://img.shields.io/pypi/v/shellingham.svg + :target: https://pypi.org/project/shellingham/ + +Shellingham detects what shell the current Python executable is running in. + + +Usage +===== + +.. code-block:: python + + >>> import shellingham + >>> shellingham.detect_shell() + ('bash', '/bin/bash') + +``detect_shell`` pokes around the process's running environment to determine +what shell it is run in. It returns a 2-tuple: + +* The shell name, always lowercased. +* The command used to run the shell. + +``ShellDetectionFailure`` is raised if ``detect_shell`` fails to detect the +surrounding shell. + + +Notes +===== + +* The shell name is always lowercased. +* On Windows, the shell name is the name of the executable, minus the file + extension. + + +Notes for Application Developers +================================ + +Remember, your application's user is not necessarily using a shell. +Shellingham raises ``ShellDetectionFailure`` if there is no shell to detect, +but *your application should almost never do this to your user*. + +A practical approach to this is to wrap ``detect_shell`` in a try block, and +provide a sane default on failure + +.. code-block:: python + + try: + shell = shellingham.detect_shell() + except shellingham.ShellDetectionFailure: + shell = provide_default() + + +There are a few choices for you to choose from. + +* The POSIX standard mandates the environment variable ``SHELL`` to refer to + "the user's preferred command language interpreter". This is always available + (even if the user is not in an interactive session), and likely the correct + choice to launch an interactive sub-shell with. +* A command ``sh`` is almost guaranteed to exist, likely at ``/bin/sh``, since + several POSIX tools rely on it. This should be suitable if you want to run a + (possibly non-interactive) script. +* All versions of DOS and Windows have an environment variable ``COMSPEC``. + This can always be used to launch a usable command prompt (e.g. `cmd.exe` on + Windows). + +Here's a simple implementation to provide a default shell + +.. code-block:: python + + import os + + def provide_default(): + if os.name == 'posix': + return os.environ['SHELL'] + elif os.name == 'nt': + return os.environ['COMSPEC'] + raise NotImplementedError(f'OS {os.name!r} support not available') diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/RECORD b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..b7d5ac479bf032cd37b21d0856f881d97296c2fa --- /dev/null +++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/RECORD @@ -0,0 +1,13 @@ +shellingham/__init__.py,sha256=pAKXUPKUdwyErC0ZjS-5w-fRdSbmdcfvnpt_x1yWqtA,635 +shellingham/_core.py,sha256=v-CTr_7F7cJAtNnzpa1N_Hl8afkY5yiDA4joGmsUBu0,300 +shellingham/nt.py,sha256=m6J6SuwyqVVlxXT9Bc-9F_1x-T5u0gCFFrRAF2LIkeg,4516 +shellingham/posix/__init__.py,sha256=pB69qtvZJ_yIf48nl4-ZfS3wLwwuXuknXOZhBnC2T1o,3129 +shellingham/posix/_core.py,sha256=_v18UaXbzr4muNhr3-mH1FdSdjZ_dOXQrtUyomIbKYQ,81 +shellingham/posix/proc.py,sha256=nSUxIuQSotvaDW76i0oTQAM9aZ9PXBLFAEktWljSKCo,2659 +shellingham/posix/ps.py,sha256=NGmDKCukhNp0lahwYCaMXphBYaVbhbiR9BtE0OkT8qU,1770 +shellingham-1.5.4.dist-info/LICENSE,sha256=84j9OMrRMRLB3A9mm76A5_hFQe26-3LzAw0sp2QsPJ0,751 +shellingham-1.5.4.dist-info/METADATA,sha256=GD2AIgo3STJieVc53TV8xbs_Sb05DMkZjVGA5UUaB_o,3461 +shellingham-1.5.4.dist-info/WHEEL,sha256=iYlv5fX357PQyRT2o6tw1bN-YcKFFHKqB_LwHO5wP-g,110 +shellingham-1.5.4.dist-info/top_level.txt,sha256=uKMQL5AKxPi4O9_Rbd838QeEs4ImpGQKNbEDZYqgBgk,12 +shellingham-1.5.4.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +shellingham-1.5.4.dist-info/RECORD,, diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/WHEEL b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..c34f1162ef9a50c355df1261ef6194ffc1b39975 --- /dev/null +++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.41.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/top_level.txt b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..d4e44ce0299bb38463f8491ec8850910235c2709 --- /dev/null +++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/top_level.txt @@ -0,0 +1 @@ +shellingham diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/zip-safe b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/zip-safe new file mode 100644 index 0000000000000000000000000000000000000000..8b137891791fe96927ad78e64b0aad7bded08bdc --- /dev/null +++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham-1.5.4.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/__init__.py b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..15f7a90cbd02e5c2cc933cf6aa0374cca68035f1 --- /dev/null +++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/__init__.py @@ -0,0 +1,23 @@ +import importlib +import os + +from ._core import ShellDetectionFailure + +__version__ = "1.5.4" + + +def detect_shell(pid=None, max_depth=10): + name = os.name + try: + impl = importlib.import_module(".{}".format(name), __name__) + except ImportError: + message = "Shell detection not implemented for {0!r}".format(name) + raise RuntimeError(message) + try: + get_shell = impl.get_shell + except AttributeError: + raise RuntimeError("get_shell not implemented for {0!r}".format(name)) + shell = get_shell(pid, max_depth=max_depth) + if shell: + return shell + raise ShellDetectionFailure() diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/_core.py b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/_core.py new file mode 100644 index 0000000000000000000000000000000000000000..13b65417c733b54e48b120e37f573c2baa6ef72b --- /dev/null +++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/_core.py @@ -0,0 +1,11 @@ +SHELL_NAMES = ( + {"sh", "bash", "dash", "ash"} # Bourne. + | {"csh", "tcsh"} # C. + | {"ksh", "zsh", "fish"} # Common alternatives. + | {"cmd", "powershell", "pwsh"} # Microsoft. + | {"elvish", "xonsh", "nu"} # More exotic. +) + + +class ShellDetectionFailure(EnvironmentError): + pass diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/nt.py b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/nt.py new file mode 100644 index 0000000000000000000000000000000000000000..389551b223a761fa2f97e929b60bf3ca5baed94c --- /dev/null +++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/nt.py @@ -0,0 +1,163 @@ +import contextlib +import ctypes +import os + +from ctypes.wintypes import ( + BOOL, + CHAR, + DWORD, + HANDLE, + LONG, + LPWSTR, + MAX_PATH, + PDWORD, + ULONG, +) + +from shellingham._core import SHELL_NAMES + + +INVALID_HANDLE_VALUE = HANDLE(-1).value +ERROR_NO_MORE_FILES = 18 +ERROR_INSUFFICIENT_BUFFER = 122 +TH32CS_SNAPPROCESS = 2 +PROCESS_QUERY_LIMITED_INFORMATION = 0x1000 + + +kernel32 = ctypes.windll.kernel32 + + +def _check_handle(error_val=0): + def check(ret, func, args): + if ret == error_val: + raise ctypes.WinError() + return ret + + return check + + +def _check_expected(expected): + def check(ret, func, args): + if ret: + return True + code = ctypes.GetLastError() + if code == expected: + return False + raise ctypes.WinError(code) + + return check + + +class ProcessEntry32(ctypes.Structure): + _fields_ = ( + ("dwSize", DWORD), + ("cntUsage", DWORD), + ("th32ProcessID", DWORD), + ("th32DefaultHeapID", ctypes.POINTER(ULONG)), + ("th32ModuleID", DWORD), + ("cntThreads", DWORD), + ("th32ParentProcessID", DWORD), + ("pcPriClassBase", LONG), + ("dwFlags", DWORD), + ("szExeFile", CHAR * MAX_PATH), + ) + + +kernel32.CloseHandle.argtypes = [HANDLE] +kernel32.CloseHandle.restype = BOOL + +kernel32.CreateToolhelp32Snapshot.argtypes = [DWORD, DWORD] +kernel32.CreateToolhelp32Snapshot.restype = HANDLE +kernel32.CreateToolhelp32Snapshot.errcheck = _check_handle( # type: ignore + INVALID_HANDLE_VALUE, +) + +kernel32.Process32First.argtypes = [HANDLE, ctypes.POINTER(ProcessEntry32)] +kernel32.Process32First.restype = BOOL +kernel32.Process32First.errcheck = _check_expected( # type: ignore + ERROR_NO_MORE_FILES, +) + +kernel32.Process32Next.argtypes = [HANDLE, ctypes.POINTER(ProcessEntry32)] +kernel32.Process32Next.restype = BOOL +kernel32.Process32Next.errcheck = _check_expected( # type: ignore + ERROR_NO_MORE_FILES, +) + +kernel32.GetCurrentProcessId.argtypes = [] +kernel32.GetCurrentProcessId.restype = DWORD + +kernel32.OpenProcess.argtypes = [DWORD, BOOL, DWORD] +kernel32.OpenProcess.restype = HANDLE +kernel32.OpenProcess.errcheck = _check_handle( # type: ignore + INVALID_HANDLE_VALUE, +) + +kernel32.QueryFullProcessImageNameW.argtypes = [HANDLE, DWORD, LPWSTR, PDWORD] +kernel32.QueryFullProcessImageNameW.restype = BOOL +kernel32.QueryFullProcessImageNameW.errcheck = _check_expected( # type: ignore + ERROR_INSUFFICIENT_BUFFER, +) + + +@contextlib.contextmanager +def _handle(f, *args, **kwargs): + handle = f(*args, **kwargs) + try: + yield handle + finally: + kernel32.CloseHandle(handle) + + +def _iter_processes(): + f = kernel32.CreateToolhelp32Snapshot + with _handle(f, TH32CS_SNAPPROCESS, 0) as snap: + entry = ProcessEntry32() + entry.dwSize = ctypes.sizeof(entry) + ret = kernel32.Process32First(snap, entry) + while ret: + yield entry + ret = kernel32.Process32Next(snap, entry) + + +def _get_full_path(proch): + size = DWORD(MAX_PATH) + while True: + path_buff = ctypes.create_unicode_buffer("", size.value) + if kernel32.QueryFullProcessImageNameW(proch, 0, path_buff, size): + return path_buff.value + size.value *= 2 + + +def get_shell(pid=None, max_depth=10): + proc_map = { + proc.th32ProcessID: (proc.th32ParentProcessID, proc.szExeFile) + for proc in _iter_processes() + } + pid = pid or os.getpid() + + for _ in range(0, max_depth + 1): + try: + ppid, executable = proc_map[pid] + except KeyError: # No such process? Give up. + break + + # The executable name would be encoded with the current code page if + # we're in ANSI mode (usually). Try to decode it into str/unicode, + # replacing invalid characters to be safe (not thoeratically necessary, + # I think). Note that we need to use 'mbcs' instead of encoding + # settings from sys because this is from the Windows API, not Python + # internals (which those settings reflect). (pypa/pipenv#3382) + if isinstance(executable, bytes): + executable = executable.decode("mbcs", "replace") + + name = executable.rpartition(".")[0].lower() + if name not in SHELL_NAMES: + pid = ppid + continue + + key = PROCESS_QUERY_LIMITED_INFORMATION + with _handle(kernel32.OpenProcess, key, 0, pid) as proch: + return (name, _get_full_path(proch)) + + return None diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/__init__.py b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5bd2070db27189e62a1867e4de49f16f8c8841ff --- /dev/null +++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/__init__.py @@ -0,0 +1,112 @@ +import os +import re + +from .._core import SHELL_NAMES, ShellDetectionFailure +from . import proc, ps + +# Based on QEMU docs: https://www.qemu.org/docs/master/user/main.html +QEMU_BIN_REGEX = re.compile( + r"""qemu- + (alpha + |armeb + |arm + |m68k + |cris + |i386 + |x86_64 + |microblaze + |mips + |mipsel + |mips64 + |mips64el + |mipsn32 + |mipsn32el + |nios2 + |ppc64 + |ppc + |sh4eb + |sh4 + |sparc + |sparc32plus + |sparc64 + )""", + re.VERBOSE, +) + + +def _iter_process_parents(pid, max_depth=10): + """Select a way to obtain process information from the system. + + * `/proc` is used if supported. + * The system `ps` utility is used as a fallback option. + """ + for impl in (proc, ps): + try: + iterator = impl.iter_process_parents(pid, max_depth) + except EnvironmentError: + continue + return iterator + raise ShellDetectionFailure("compatible proc fs or ps utility is required") + + +def _get_login_shell(proc_cmd): + """Form shell information from SHELL environ if possible.""" + login_shell = os.environ.get("SHELL", "") + if login_shell: + proc_cmd = login_shell + else: + proc_cmd = proc_cmd[1:] + return (os.path.basename(proc_cmd).lower(), proc_cmd) + + +_INTERPRETER_SHELL_NAMES = [ + (re.compile(r"^python(\d+(\.\d+)?)?$"), {"xonsh"}), +] + + +def _get_interpreter_shell(proc_name, proc_args): + """Get shell invoked via an interpreter. + + Some shells are implemented on, and invoked with an interpreter, e.g. xonsh + is commonly executed with an executable Python script. This detects what + script the interpreter is actually running, and check whether that looks + like a shell. + + See sarugaku/shellingham#26 for rational. + """ + for pattern, shell_names in _INTERPRETER_SHELL_NAMES: + if not pattern.match(proc_name): + continue + for arg in proc_args: + name = os.path.basename(arg).lower() + if os.path.isfile(arg) and name in shell_names: + return (name, arg) + return None + + +def _get_shell(cmd, *args): + if cmd.startswith("-"): # Login shell! Let's use this. + return _get_login_shell(cmd) + name = os.path.basename(cmd).lower() + if name == "rosetta" or QEMU_BIN_REGEX.fullmatch(name): + # If the current process is Rosetta or QEMU, this likely is a + # containerized process. Parse out the actual command instead. + cmd = args[0] + args = args[1:] + name = os.path.basename(cmd).lower() + if name in SHELL_NAMES: # Command looks like a shell. + return (name, cmd) + shell = _get_interpreter_shell(name, args) + if shell: + return shell + return None + + +def get_shell(pid=None, max_depth=10): + """Get the shell that the supplied pid or os.getpid() is running in.""" + pid = str(pid or os.getpid()) + for proc_args, _, _ in _iter_process_parents(pid, max_depth): + shell = _get_shell(*proc_args) + if shell: + return shell + return None diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/_core.py b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/_core.py new file mode 100644 index 0000000000000000000000000000000000000000..adc49e6e7a9d3edf062c55e0078136899f78d30d --- /dev/null +++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/_core.py @@ -0,0 +1,3 @@ +import collections + +Process = collections.namedtuple("Process", "args pid ppid") diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/proc.py b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/proc.py new file mode 100644 index 0000000000000000000000000000000000000000..950f63228e5b328f82b70da8851ec60c6a2ff029 --- /dev/null +++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/proc.py @@ -0,0 +1,83 @@ +import io +import os +import re +import sys + +from ._core import Process + +# FreeBSD: https://www.freebsd.org/cgi/man.cgi?query=procfs +# NetBSD: https://man.netbsd.org/NetBSD-9.3-STABLE/mount_procfs.8 +# DragonFlyBSD: https://www.dragonflybsd.org/cgi/web-man?command=procfs +BSD_STAT_PPID = 2 + +# See https://docs.kernel.org/filesystems/proc.html +LINUX_STAT_PPID = 3 + +STAT_PATTERN = re.compile(r"\(.+\)|\S+") + + +def detect_proc(): + """Detect /proc filesystem style. + + This checks the /proc/{pid} directory for possible formats. Returns one of + the following as str: + + * `stat`: Linux-style, i.e. ``/proc/{pid}/stat``. + * `status`: BSD-style, i.e. ``/proc/{pid}/status``. + """ + pid = os.getpid() + for name in ("stat", "status"): + if os.path.exists(os.path.join("/proc", str(pid), name)): + return name + raise ProcFormatError("unsupported proc format") + + +def _use_bsd_stat_format(): + try: + return os.uname().sysname.lower() in ("freebsd", "netbsd", "dragonfly") + except Exception: + return False + + +def _get_ppid(pid, name): + path = os.path.join("/proc", str(pid), name) + with io.open(path, encoding="ascii", errors="replace") as f: + parts = STAT_PATTERN.findall(f.read()) + # We only care about TTY and PPID -- both are numbers. + if _use_bsd_stat_format(): + return parts[BSD_STAT_PPID] + return parts[LINUX_STAT_PPID] + + +def _get_cmdline(pid): + path = os.path.join("/proc", str(pid), "cmdline") + encoding = sys.getfilesystemencoding() or "utf-8" + with io.open(path, encoding=encoding, errors="replace") as f: + # XXX: Command line arguments can be arbitrary byte sequences, not + # necessarily decodable. For Shellingham's purpose, however, we don't + # care. (pypa/pipenv#2820) + # cmdline appends an extra NULL at the end, hence the [:-1]. + return tuple(f.read().split("\0")[:-1]) + + +class ProcFormatError(EnvironmentError): + pass + + +def iter_process_parents(pid, max_depth=10): + """Try to look up the process tree via the /proc interface.""" + stat_name = detect_proc() + + # Inner generator function so we correctly throw an error eagerly if proc + # is not supported, rather than on the first call to the iterator. This + # allows the call site detects the correct implementation. + def _iter_process_parents(pid, max_depth): + for _ in range(max_depth): + ppid = _get_ppid(pid, stat_name) + args = _get_cmdline(pid) + yield Process(args=args, pid=pid, ppid=ppid) + if ppid == "0": + break + pid = ppid + + return _iter_process_parents(pid, max_depth) diff --git a/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/ps.py b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/ps.py new file mode 100644 index 0000000000000000000000000000000000000000..3bc39a74a56390c263e63bfead028f6bce4df3cb --- /dev/null +++ b/.cache/uv/archive-v0/9bo5BDbGReWchYHFXzpbY/shellingham/posix/ps.py @@ -0,0 +1,51 @@ +import errno +import subprocess +import sys + +from ._core import Process + + +class PsNotAvailable(EnvironmentError): + pass + + +def iter_process_parents(pid, max_depth=10): + """Try to look up the process tree via the output of `ps`.""" + try: + cmd = ["ps", "-ww", "-o", "pid=", "-o", "ppid=", "-o", "args="] + output = subprocess.check_output(cmd) + except OSError as e: # Python 2-compatible FileNotFoundError. + if e.errno != errno.ENOENT: + raise + raise PsNotAvailable("ps not found") + except subprocess.CalledProcessError as e: + # `ps` can return 1 if the process list is completely empty. + # (sarugaku/shellingham#15) + if not e.output.strip(): + return + raise + if not isinstance(output, str): + encoding = sys.getfilesystemencoding() or sys.getdefaultencoding() + output = output.decode(encoding) + + processes_mapping = {} + for line in output.split("\n"): + try: + _pid, ppid, args = line.strip().split(None, 2) + # XXX: This is not right, but we are really out of options. + # ps does not offer a sane way to decode the argument display, + # and this is "Good Enough" for obtaining shell names. Hopefully + # people don't name their shell with a space, or have something + # like "/usr/bin/xonsh is uber". (sarugaku/shellingham#14) + args = tuple(a.strip() for a in args.split(" ")) + except ValueError: + continue + processes_mapping[_pid] = Process(args=args, pid=_pid, ppid=ppid) + + for _ in range(max_depth): + try: + process = processes_mapping[pid] + except KeyError: + return + yield process + pid = process.ppid diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/METADATA b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..3200e601f970271fdde3fcc74f9af4423655a79d --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/METADATA @@ -0,0 +1,107 @@ +Metadata-Version: 2.4 +Name: packaging +Version: 26.0 +Summary: Core utilities for Python packages +Author-email: Donald Stufft +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +License-Expression: Apache-2.0 OR BSD-2-Clause +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Typing :: Typed +License-File: LICENSE +License-File: LICENSE.APACHE +License-File: LICENSE.BSD +Project-URL: Documentation, https://packaging.pypa.io/ +Project-URL: Source, https://github.com/pypa/packaging + +packaging +========= + +.. start-intro + +Reusable core utilities for various Python Packaging +`interoperability specifications `_. + +This library provides utilities that implement the interoperability +specifications which have clearly one correct behaviour (eg: :pep:`440`) +or benefit greatly from having a single shared implementation (eg: :pep:`425`). + +.. end-intro + +The ``packaging`` project includes the following: version handling, specifiers, +markers, requirements, tags, metadata, lockfiles, utilities. + +Documentation +------------- + +The `documentation`_ provides information and the API for the following: + +- Version Handling +- Specifiers +- Markers +- Requirements +- Tags +- Metadata +- Lockfiles +- Utilities + +Installation +------------ + +Use ``pip`` to install these utilities:: + + pip install packaging + +The ``packaging`` library uses calendar-based versioning (``YY.N``). + +Discussion +---------- + +If you run into bugs, you can file them in our `issue tracker`_. + +You can also join ``#pypa`` on Freenode to ask questions or get involved. + + +.. _`documentation`: https://packaging.pypa.io/ +.. _`issue tracker`: https://github.com/pypa/packaging/issues + + +Code of Conduct +--------------- + +Everyone interacting in the packaging project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. + +.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + +Contributing +------------ + +The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as +well as how to report a potential security issue. The documentation for this +project also covers information about `project development`_ and `security`_. + +.. _`project development`: https://packaging.pypa.io/en/latest/development/ +.. _`security`: https://packaging.pypa.io/en/latest/security/ + +Project History +--------------- + +Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for +recent changes and project history. + +.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/ + diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/RECORD b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..7ac212dd2e350a114166bbe59643b3a37d7e1069 --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/RECORD @@ -0,0 +1,24 @@ +packaging/__init__.py,sha256=y4lVbpeBzCGk-IPDw5BGBZ_b0P3ukEEJZAbGYc6Ey8c,494 +packaging/_elffile.py,sha256=-sKkptYqzYw2-x3QByJa5mB4rfPWu1pxkZHRx1WAFCY,3211 +packaging/_manylinux.py,sha256=Hf6nB0cOrayEs96-p3oIXAgGnFquv20DO5l-o2_Xnv0,9559 +packaging/_musllinux.py,sha256=Z6swjH3MA7XS3qXnmMN7QPhqP3fnoYI0eQ18e9-HgAE,2707 +packaging/_parser.py,sha256=U_DajsEx2VoC_F46fSVV3hDKNCWoQYkPkasO3dld0ig,10518 +packaging/_structures.py,sha256=Hn49Ta8zV9Wo8GiCL8Nl2ARZY983Un3pruZGVNldPwE,1514 +packaging/_tokenizer.py,sha256=M8EwNIdXeL9NMFuFrQtiOKwjka_xFx8KjRQnfE8O_z8,5421 +packaging/markers.py,sha256=ZX-cLvW1S3cZcEc0fHI4z7zSx5U2T19yMpDP_mE-CYw,12771 +packaging/metadata.py,sha256=CWVZpN_HfoYMSSDuCP7igOvGgqA9AOmpW8f3qTisfnc,39360 +packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +packaging/pylock.py,sha256=-R1uNfJ4PaLto7Mg62YsGOHgvskuiIEqPwxOywl42Jk,22537 +packaging/requirements.py,sha256=PMCAWD8aNMnVD-6uZMedhBuAVX2573eZ4yPBLXmz04I,2870 +packaging/specifiers.py,sha256=EPNPimY_zFivthv1vdjZYz5IqkKGsnKR2yKh-EVyvZw,40797 +packaging/tags.py,sha256=cXLV1pJD3UtJlDg7Wz3zrfdQhRZqr8jumSAKKAAd2xE,22856 +packaging/utils.py,sha256=N4c6oZzFJy6klTZ3AnkNz7sSkJesuFWPp68LA3B5dAo,5040 +packaging/version.py,sha256=7XWlL2IDYLwDYC0ht6cFEhapLwLWbmyo4rb7sEFj0x8,23272 +packaging/licenses/__init__.py,sha256=TwXLHZCXwSgdFwRLPxW602T6mSieunSFHM6fp8pgW78,5819 +packaging/licenses/_spdx.py,sha256=WW7DXiyg68up_YND_wpRYlr1SHhiV4FfJLQffghhMxQ,51122 +packaging-26.0.dist-info/licenses/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197 +packaging-26.0.dist-info/licenses/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174 +packaging-26.0.dist-info/licenses/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344 +packaging-26.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82 +packaging-26.0.dist-info/METADATA,sha256=M2K7fWom2iliuo2qpHhc0LrKwhq6kIoRlcyPWVgKJlo,3309 +packaging-26.0.dist-info/RECORD,, diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/WHEEL b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..d8b9936dad9ab2513fa6979f411560d3b6b57e37 --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.12.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/licenses/LICENSE b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..6f62d44e4ef733c0e713afcd2371fed7f2b3de67 --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/licenses/LICENSE @@ -0,0 +1,3 @@ +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made +under the terms of *both* these licenses. diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/licenses/LICENSE.APACHE b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/licenses/LICENSE.APACHE new file mode 100644 index 0000000000000000000000000000000000000000..f433b1a53f5b830a205fd2df78e2b34974656c7b --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/licenses/LICENSE.APACHE @@ -0,0 +1,177 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/licenses/LICENSE.BSD b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/licenses/LICENSE.BSD new file mode 100644 index 0000000000000000000000000000000000000000..42ce7b75c92fb01a3f6ed17eea363f756b7da582 --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging-26.0.dist-info/licenses/LICENSE.BSD @@ -0,0 +1,23 @@ +Copyright (c) Donald Stufft and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/__init__.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..21695a74b5107c96ba4bb2cbca6b7f259dacd330 --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/__init__.py @@ -0,0 +1,15 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +__title__ = "packaging" +__summary__ = "Core utilities for Python packages" +__uri__ = "https://github.com/pypa/packaging" + +__version__ = "26.0" + +__author__ = "Donald Stufft and individual contributors" +__email__ = "donald@stufft.io" + +__license__ = "BSD-2-Clause or Apache-2.0" +__copyright__ = f"2014 {__author__}" diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_elffile.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_elffile.py new file mode 100644 index 0000000000000000000000000000000000000000..497b0645217512ae2ba8ff61341fd2bbfa3648cd --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_elffile.py @@ -0,0 +1,108 @@ +""" +ELF file parser. + +This provides a class ``ELFFile`` that parses an ELF executable in a similar +interface to ``ZipFile``. Only the read interface is implemented. + +ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html +""" + +from __future__ import annotations + +import enum +import os +import struct +from typing import IO + + +class ELFInvalid(ValueError): + pass + + +class EIClass(enum.IntEnum): + C32 = 1 + C64 = 2 + + +class EIData(enum.IntEnum): + Lsb = 1 + Msb = 2 + + +class EMachine(enum.IntEnum): + I386 = 3 + S390 = 22 + Arm = 40 + X8664 = 62 + AArc64 = 183 + + +class ELFFile: + """ + Representation of an ELF executable. + """ + + def __init__(self, f: IO[bytes]) -> None: + self._f = f + + try: + ident = self._read("16B") + except struct.error as e: + raise ELFInvalid("unable to parse identification") from e + magic = bytes(ident[:4]) + if magic != b"\x7fELF": + raise ELFInvalid(f"invalid magic: {magic!r}") + + self.capacity = ident[4] # Format for program header (bitness). + self.encoding = ident[5] # Data structure encoding (endianness). + + try: + # e_fmt: Format for program header. + # p_fmt: Format for section header. + # p_idx: Indexes to find p_type, p_offset, and p_filesz. + e_fmt, self._p_fmt, self._p_idx = { + (1, 1): ("HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB. + (2, 1): ("HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB. + }[(self.capacity, self.encoding)] + except KeyError as e: + raise ELFInvalid( + f"unrecognized capacity ({self.capacity}) or encoding ({self.encoding})" + ) from e + + try: + ( + _, + self.machine, # Architecture type. + _, + _, + self._e_phoff, # Offset of program header. + _, + self.flags, # Processor-specific flags. + _, + self._e_phentsize, # Size of section. + self._e_phnum, # Number of sections. + ) = self._read(e_fmt) + except struct.error as e: + raise ELFInvalid("unable to parse machine and section information") from e + + def _read(self, fmt: str) -> tuple[int, ...]: + return struct.unpack(fmt, self._f.read(struct.calcsize(fmt))) + + @property + def interpreter(self) -> str | None: + """ + The path recorded in the ``PT_INTERP`` section header. + """ + for index in range(self._e_phnum): + self._f.seek(self._e_phoff + self._e_phentsize * index) + try: + data = self._read(self._p_fmt) + except struct.error: + continue + if data[self._p_idx[0]] != 3: # Not PT_INTERP. + continue + self._f.seek(data[self._p_idx[1]]) + return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0") + return None diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_manylinux.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_manylinux.py new file mode 100644 index 0000000000000000000000000000000000000000..0e79e8a882be74fe76c80ccf49a9cd68fb636fd4 --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_manylinux.py @@ -0,0 +1,262 @@ +from __future__ import annotations + +import collections +import contextlib +import functools +import os +import re +import sys +import warnings +from typing import Generator, Iterator, NamedTuple, Sequence + +from ._elffile import EIClass, EIData, ELFFile, EMachine + +EF_ARM_ABIMASK = 0xFF000000 +EF_ARM_ABI_VER5 = 0x05000000 +EF_ARM_ABI_FLOAT_HARD = 0x00000400 + +_ALLOWED_ARCHS = { + "x86_64", + "aarch64", + "ppc64", + "ppc64le", + "s390x", + "loongarch64", + "riscv64", +} + + +# `os.PathLike` not a generic type until Python 3.9, so sticking with `str` +# as the type for `path` until then. +@contextlib.contextmanager +def _parse_elf(path: str) -> Generator[ELFFile | None, None, None]: + try: + with open(path, "rb") as f: + yield ELFFile(f) + except (OSError, TypeError, ValueError): + yield None + + +def _is_linux_armhf(executable: str) -> bool: + # hard-float ABI can be detected from the ELF header of the running + # process + # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf + with _parse_elf(executable) as f: + return ( + f is not None + and f.capacity == EIClass.C32 + and f.encoding == EIData.Lsb + and f.machine == EMachine.Arm + and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5 + and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD + ) + + +def _is_linux_i686(executable: str) -> bool: + with _parse_elf(executable) as f: + return ( + f is not None + and f.capacity == EIClass.C32 + and f.encoding == EIData.Lsb + and f.machine == EMachine.I386 + ) + + +def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool: + if "armv7l" in archs: + return _is_linux_armhf(executable) + if "i686" in archs: + return _is_linux_i686(executable) + return any(arch in _ALLOWED_ARCHS for arch in archs) + + +# If glibc ever changes its major version, we need to know what the last +# minor version was, so we can build the complete list of all versions. +# For now, guess what the highest minor version might be, assume it will +# be 50 for testing. Once this actually happens, update the dictionary +# with the actual value. +_LAST_GLIBC_MINOR: dict[int, int] = collections.defaultdict(lambda: 50) + + +class _GLibCVersion(NamedTuple): + major: int + minor: int + + +def _glibc_version_string_confstr() -> str | None: + """ + Primary implementation of glibc_version_string using os.confstr. + """ + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module. + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 + try: + # Should be a string like "glibc 2.17". + version_string: str | None = os.confstr("CS_GNU_LIBC_VERSION") + assert version_string is not None + _, version = version_string.rsplit() + except (AssertionError, AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def _glibc_version_string_ctypes() -> str | None: + """ + Fallback implementation of glibc_version_string using ctypes. + """ + try: + import ctypes # noqa: PLC0415 + except ImportError: + return None + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + # + # We must also handle the special case where the executable is not a + # dynamically linked executable. This can occur when using musl libc, + # for example. In this situation, dlopen() will error, leading to an + # OSError. Interestingly, at least in the case of musl, there is no + # errno set on the OSError. The single string argument used to construct + # OSError comes from libc itself and is therefore not portable to + # hard code here. In any case, failure to call dlopen() means we + # can proceed, so we bail on our attempt. + try: + process_namespace = ctypes.CDLL(None) + except OSError: + return None + + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str: str = gnu_get_libc_version() + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +def _glibc_version_string() -> str | None: + """Returns glibc version string, or None if not using glibc.""" + return _glibc_version_string_confstr() or _glibc_version_string_ctypes() + + +def _parse_glibc_version(version_str: str) -> _GLibCVersion: + """Parse glibc version. + + We use a regexp instead of str.split because we want to discard any + random junk that might come after the minor version -- this might happen + in patched/forked versions of glibc (e.g. Linaro's version of glibc + uses version strings like "2.20-2014.11"). See gh-3588. + """ + m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) + if not m: + warnings.warn( + f"Expected glibc version with 2 components major.minor, got: {version_str}", + RuntimeWarning, + stacklevel=2, + ) + return _GLibCVersion(-1, -1) + return _GLibCVersion(int(m.group("major")), int(m.group("minor"))) + + +@functools.lru_cache +def _get_glibc_version() -> _GLibCVersion: + version_str = _glibc_version_string() + if version_str is None: + return _GLibCVersion(-1, -1) + return _parse_glibc_version(version_str) + + +# From PEP 513, PEP 600 +def _is_compatible(arch: str, version: _GLibCVersion) -> bool: + sys_glibc = _get_glibc_version() + if sys_glibc < version: + return False + # Check for presence of _manylinux module. + try: + import _manylinux # noqa: PLC0415 + except ImportError: + return True + if hasattr(_manylinux, "manylinux_compatible"): + result = _manylinux.manylinux_compatible(version[0], version[1], arch) + if result is not None: + return bool(result) + return True + if version == _GLibCVersion(2, 5) and hasattr(_manylinux, "manylinux1_compatible"): + return bool(_manylinux.manylinux1_compatible) + if version == _GLibCVersion(2, 12) and hasattr( + _manylinux, "manylinux2010_compatible" + ): + return bool(_manylinux.manylinux2010_compatible) + if version == _GLibCVersion(2, 17) and hasattr( + _manylinux, "manylinux2014_compatible" + ): + return bool(_manylinux.manylinux2014_compatible) + return True + + +_LEGACY_MANYLINUX_MAP: dict[_GLibCVersion, str] = { + # CentOS 7 w/ glibc 2.17 (PEP 599) + _GLibCVersion(2, 17): "manylinux2014", + # CentOS 6 w/ glibc 2.12 (PEP 571) + _GLibCVersion(2, 12): "manylinux2010", + # CentOS 5 w/ glibc 2.5 (PEP 513) + _GLibCVersion(2, 5): "manylinux1", +} + + +def platform_tags(archs: Sequence[str]) -> Iterator[str]: + """Generate manylinux tags compatible to the current platform. + + :param archs: Sequence of compatible architectures. + The first one shall be the closest to the actual architecture and be the part of + platform tag after the ``linux_`` prefix, e.g. ``x86_64``. + The ``linux_`` prefix is assumed as a prerequisite for the current platform to + be manylinux-compatible. + + :returns: An iterator of compatible manylinux tags. + """ + if not _have_compatible_abi(sys.executable, archs): + return + # Oldest glibc to be supported regardless of architecture is (2, 17). + too_old_glibc2 = _GLibCVersion(2, 16) + if set(archs) & {"x86_64", "i686"}: + # On x86/i686 also oldest glibc to be supported is (2, 5). + too_old_glibc2 = _GLibCVersion(2, 4) + current_glibc = _GLibCVersion(*_get_glibc_version()) + glibc_max_list = [current_glibc] + # We can assume compatibility across glibc major versions. + # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 + # + # Build a list of maximum glibc versions so that we can + # output the canonical list of all glibc from current_glibc + # down to too_old_glibc2, including all intermediary versions. + for glibc_major in range(current_glibc.major - 1, 1, -1): + glibc_minor = _LAST_GLIBC_MINOR[glibc_major] + glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor)) + for arch in archs: + for glibc_max in glibc_max_list: + if glibc_max.major == too_old_glibc2.major: + min_minor = too_old_glibc2.minor + else: + # For other glibc major versions oldest supported is (x, 0). + min_minor = -1 + for glibc_minor in range(glibc_max.minor, min_minor, -1): + glibc_version = _GLibCVersion(glibc_max.major, glibc_minor) + if _is_compatible(arch, glibc_version): + yield "manylinux_{}_{}_{}".format(*glibc_version, arch) + + # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. + if legacy_tag := _LEGACY_MANYLINUX_MAP.get(glibc_version): + yield f"{legacy_tag}_{arch}" diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_musllinux.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_musllinux.py new file mode 100644 index 0000000000000000000000000000000000000000..4e8116a79ca80d60657542a23b4bbcbc3c518eaf --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_musllinux.py @@ -0,0 +1,85 @@ +"""PEP 656 support. + +This module implements logic to detect if the currently running Python is +linked against musl, and what musl version is used. +""" + +from __future__ import annotations + +import functools +import re +import subprocess +import sys +from typing import Iterator, NamedTuple, Sequence + +from ._elffile import ELFFile + + +class _MuslVersion(NamedTuple): + major: int + minor: int + + +def _parse_musl_version(output: str) -> _MuslVersion | None: + lines = [n for n in (n.strip() for n in output.splitlines()) if n] + if len(lines) < 2 or lines[0][:4] != "musl": + return None + m = re.match(r"Version (\d+)\.(\d+)", lines[1]) + if not m: + return None + return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2))) + + +@functools.lru_cache +def _get_musl_version(executable: str) -> _MuslVersion | None: + """Detect currently-running musl runtime version. + + This is done by checking the specified executable's dynamic linking + information, and invoking the loader to parse its output for a version + string. If the loader is musl, the output would be something like:: + + musl libc (x86_64) + Version 1.2.2 + Dynamic Program Loader + """ + try: + with open(executable, "rb") as f: + ld = ELFFile(f).interpreter + except (OSError, TypeError, ValueError): + return None + if ld is None or "musl" not in ld: + return None + proc = subprocess.run([ld], check=False, stderr=subprocess.PIPE, text=True) + return _parse_musl_version(proc.stderr) + + +def platform_tags(archs: Sequence[str]) -> Iterator[str]: + """Generate musllinux tags compatible to the current platform. + + :param archs: Sequence of compatible architectures. + The first one shall be the closest to the actual architecture and be the part of + platform tag after the ``linux_`` prefix, e.g. ``x86_64``. + The ``linux_`` prefix is assumed as a prerequisite for the current platform to + be musllinux-compatible. + + :returns: An iterator of compatible musllinux tags. + """ + sys_musl = _get_musl_version(sys.executable) + if sys_musl is None: # Python not dynamically linked against musl. + return + for arch in archs: + for minor in range(sys_musl.minor, -1, -1): + yield f"musllinux_{sys_musl.major}_{minor}_{arch}" + + +if __name__ == "__main__": # pragma: no cover + import sysconfig + + plat = sysconfig.get_platform() + assert plat.startswith("linux-"), "not linux" + + print("plat:", plat) + print("musl:", _get_musl_version(sys.executable)) + print("tags:", end=" ") + for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])): + print(t, end="\n ") diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_parser.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..f6c1f5cd226b926f96a3bb1e9fb0f18d1bd021c9 --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_parser.py @@ -0,0 +1,365 @@ +"""Handwritten parser of dependency specifiers. + +The docstring for each __parse_* function contains EBNF-inspired grammar representing +the implementation. +""" + +from __future__ import annotations + +import ast +from typing import List, Literal, NamedTuple, Sequence, Tuple, Union + +from ._tokenizer import DEFAULT_RULES, Tokenizer + + +class Node: + __slots__ = ("value",) + + def __init__(self, value: str) -> None: + self.value = value + + def __str__(self) -> str: + return self.value + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}({self.value!r})>" + + def serialize(self) -> str: + raise NotImplementedError + + +class Variable(Node): + __slots__ = () + + def serialize(self) -> str: + return str(self) + + +class Value(Node): + __slots__ = () + + def serialize(self) -> str: + return f'"{self}"' + + +class Op(Node): + __slots__ = () + + def serialize(self) -> str: + return str(self) + + +MarkerLogical = Literal["and", "or"] +MarkerVar = Union[Variable, Value] +MarkerItem = Tuple[MarkerVar, Op, MarkerVar] +MarkerAtom = Union[MarkerItem, Sequence["MarkerAtom"]] +MarkerList = List[Union["MarkerList", MarkerAtom, MarkerLogical]] + + +class ParsedRequirement(NamedTuple): + name: str + url: str + extras: list[str] + specifier: str + marker: MarkerList | None + + +# -------------------------------------------------------------------------------------- +# Recursive descent parser for dependency specifier +# -------------------------------------------------------------------------------------- +def parse_requirement(source: str) -> ParsedRequirement: + return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES)) + + +def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement: + """ + requirement = WS? IDENTIFIER WS? extras WS? requirement_details + """ + tokenizer.consume("WS") + + name_token = tokenizer.expect( + "IDENTIFIER", expected="package name at the start of dependency specifier" + ) + name = name_token.text + tokenizer.consume("WS") + + extras = _parse_extras(tokenizer) + tokenizer.consume("WS") + + url, specifier, marker = _parse_requirement_details(tokenizer) + tokenizer.expect("END", expected="end of dependency specifier") + + return ParsedRequirement(name, url, extras, specifier, marker) + + +def _parse_requirement_details( + tokenizer: Tokenizer, +) -> tuple[str, str, MarkerList | None]: + """ + requirement_details = AT URL (WS requirement_marker?)? + | specifier WS? (requirement_marker)? + """ + + specifier = "" + url = "" + marker = None + + if tokenizer.check("AT"): + tokenizer.read() + tokenizer.consume("WS") + + url_start = tokenizer.position + url = tokenizer.expect("URL", expected="URL after @").text + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + tokenizer.expect("WS", expected="whitespace after URL") + + # The input might end after whitespace. + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + marker = _parse_requirement_marker( + tokenizer, + span_start=url_start, + expected="semicolon (after URL and whitespace)", + ) + else: + specifier_start = tokenizer.position + specifier = _parse_specifier(tokenizer) + tokenizer.consume("WS") + + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + marker = _parse_requirement_marker( + tokenizer, + span_start=specifier_start, + expected=( + "comma (within version specifier), semicolon (after version specifier)" + if specifier + else "semicolon (after name with no version specifier)" + ), + ) + + return (url, specifier, marker) + + +def _parse_requirement_marker( + tokenizer: Tokenizer, *, span_start: int, expected: str +) -> MarkerList: + """ + requirement_marker = SEMICOLON marker WS? + """ + + if not tokenizer.check("SEMICOLON"): + tokenizer.raise_syntax_error( + f"Expected {expected} or end", + span_start=span_start, + span_end=None, + ) + tokenizer.read() + + marker = _parse_marker(tokenizer) + tokenizer.consume("WS") + + return marker + + +def _parse_extras(tokenizer: Tokenizer) -> list[str]: + """ + extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)? + """ + if not tokenizer.check("LEFT_BRACKET", peek=True): + return [] + + with tokenizer.enclosing_tokens( + "LEFT_BRACKET", + "RIGHT_BRACKET", + around="extras", + ): + tokenizer.consume("WS") + extras = _parse_extras_list(tokenizer) + tokenizer.consume("WS") + + return extras + + +def _parse_extras_list(tokenizer: Tokenizer) -> list[str]: + """ + extras_list = identifier (wsp* ',' wsp* identifier)* + """ + extras: list[str] = [] + + if not tokenizer.check("IDENTIFIER"): + return extras + + extras.append(tokenizer.read().text) + + while True: + tokenizer.consume("WS") + if tokenizer.check("IDENTIFIER", peek=True): + tokenizer.raise_syntax_error("Expected comma between extra names") + elif not tokenizer.check("COMMA"): + break + + tokenizer.read() + tokenizer.consume("WS") + + extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma") + extras.append(extra_token.text) + + return extras + + +def _parse_specifier(tokenizer: Tokenizer) -> str: + """ + specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS + | WS? version_many WS? + """ + with tokenizer.enclosing_tokens( + "LEFT_PARENTHESIS", + "RIGHT_PARENTHESIS", + around="version specifier", + ): + tokenizer.consume("WS") + parsed_specifiers = _parse_version_many(tokenizer) + tokenizer.consume("WS") + + return parsed_specifiers + + +def _parse_version_many(tokenizer: Tokenizer) -> str: + """ + version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)? + """ + parsed_specifiers = "" + while tokenizer.check("SPECIFIER"): + span_start = tokenizer.position + parsed_specifiers += tokenizer.read().text + if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True): + tokenizer.raise_syntax_error( + ".* suffix can only be used with `==` or `!=` operators", + span_start=span_start, + span_end=tokenizer.position + 1, + ) + if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True): + tokenizer.raise_syntax_error( + "Local version label can only be used with `==` or `!=` operators", + span_start=span_start, + span_end=tokenizer.position, + ) + tokenizer.consume("WS") + if not tokenizer.check("COMMA"): + break + parsed_specifiers += tokenizer.read().text + tokenizer.consume("WS") + + return parsed_specifiers + + +# -------------------------------------------------------------------------------------- +# Recursive descent parser for marker expression +# -------------------------------------------------------------------------------------- +def parse_marker(source: str) -> MarkerList: + return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES)) + + +def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList: + retval = _parse_marker(tokenizer) + tokenizer.expect("END", expected="end of marker expression") + return retval + + +def _parse_marker(tokenizer: Tokenizer) -> MarkerList: + """ + marker = marker_atom (BOOLOP marker_atom)+ + """ + expression = [_parse_marker_atom(tokenizer)] + while tokenizer.check("BOOLOP"): + token = tokenizer.read() + expr_right = _parse_marker_atom(tokenizer) + expression.extend((token.text, expr_right)) + return expression + + +def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom: + """ + marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS? + | WS? marker_item WS? + """ + + tokenizer.consume("WS") + if tokenizer.check("LEFT_PARENTHESIS", peek=True): + with tokenizer.enclosing_tokens( + "LEFT_PARENTHESIS", + "RIGHT_PARENTHESIS", + around="marker expression", + ): + tokenizer.consume("WS") + marker: MarkerAtom = _parse_marker(tokenizer) + tokenizer.consume("WS") + else: + marker = _parse_marker_item(tokenizer) + tokenizer.consume("WS") + return marker + + +def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem: + """ + marker_item = WS? marker_var WS? marker_op WS? marker_var WS? + """ + tokenizer.consume("WS") + marker_var_left = _parse_marker_var(tokenizer) + tokenizer.consume("WS") + marker_op = _parse_marker_op(tokenizer) + tokenizer.consume("WS") + marker_var_right = _parse_marker_var(tokenizer) + tokenizer.consume("WS") + return (marker_var_left, marker_op, marker_var_right) + + +def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar: # noqa: RET503 + """ + marker_var = VARIABLE | QUOTED_STRING + """ + if tokenizer.check("VARIABLE"): + return process_env_var(tokenizer.read().text.replace(".", "_")) + elif tokenizer.check("QUOTED_STRING"): + return process_python_str(tokenizer.read().text) + else: + tokenizer.raise_syntax_error( + message="Expected a marker variable or quoted string" + ) + + +def process_env_var(env_var: str) -> Variable: + if env_var in ("platform_python_implementation", "python_implementation"): + return Variable("platform_python_implementation") + else: + return Variable(env_var) + + +def process_python_str(python_str: str) -> Value: + value = ast.literal_eval(python_str) + return Value(str(value)) + + +def _parse_marker_op(tokenizer: Tokenizer) -> Op: + """ + marker_op = IN | NOT IN | OP + """ + if tokenizer.check("IN"): + tokenizer.read() + return Op("in") + elif tokenizer.check("NOT"): + tokenizer.read() + tokenizer.expect("WS", expected="whitespace after 'not'") + tokenizer.expect("IN", expected="'in' after 'not'") + return Op("not in") + elif tokenizer.check("OP"): + return Op(tokenizer.read().text) + else: + return tokenizer.raise_syntax_error( + "Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in" + ) diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_structures.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_structures.py new file mode 100644 index 0000000000000000000000000000000000000000..225e2eee01238571c50595eb104e0b70d5f503c4 --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_structures.py @@ -0,0 +1,69 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import typing + + +@typing.final +class InfinityType: + __slots__ = () + + def __repr__(self) -> str: + return "Infinity" + + def __hash__(self) -> int: + return hash(repr(self)) + + def __lt__(self, other: object) -> bool: + return False + + def __le__(self, other: object) -> bool: + return False + + def __eq__(self, other: object) -> bool: + return isinstance(other, self.__class__) + + def __gt__(self, other: object) -> bool: + return True + + def __ge__(self, other: object) -> bool: + return True + + def __neg__(self: object) -> "NegativeInfinityType": + return NegativeInfinity + + +Infinity = InfinityType() + + +@typing.final +class NegativeInfinityType: + __slots__ = () + + def __repr__(self) -> str: + return "-Infinity" + + def __hash__(self) -> int: + return hash(repr(self)) + + def __lt__(self, other: object) -> bool: + return True + + def __le__(self, other: object) -> bool: + return True + + def __eq__(self, other: object) -> bool: + return isinstance(other, self.__class__) + + def __gt__(self, other: object) -> bool: + return False + + def __ge__(self, other: object) -> bool: + return False + + def __neg__(self: object) -> InfinityType: + return Infinity + + +NegativeInfinity = NegativeInfinityType() diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_tokenizer.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_tokenizer.py new file mode 100644 index 0000000000000000000000000000000000000000..e6d20dd3f56f880a92db7409a3e1335cb282a8f2 --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/_tokenizer.py @@ -0,0 +1,193 @@ +from __future__ import annotations + +import contextlib +import re +from dataclasses import dataclass +from typing import Generator, Mapping, NoReturn + +from .specifiers import Specifier + + +@dataclass +class Token: + name: str + text: str + position: int + + +class ParserSyntaxError(Exception): + """The provided source text could not be parsed correctly.""" + + def __init__( + self, + message: str, + *, + source: str, + span: tuple[int, int], + ) -> None: + self.span = span + self.message = message + self.source = source + + super().__init__() + + def __str__(self) -> str: + marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^" + return f"{self.message}\n {self.source}\n {marker}" + + +DEFAULT_RULES: dict[str, re.Pattern[str]] = { + "LEFT_PARENTHESIS": re.compile(r"\("), + "RIGHT_PARENTHESIS": re.compile(r"\)"), + "LEFT_BRACKET": re.compile(r"\["), + "RIGHT_BRACKET": re.compile(r"\]"), + "SEMICOLON": re.compile(r";"), + "COMMA": re.compile(r","), + "QUOTED_STRING": re.compile( + r""" + ( + ('[^']*') + | + ("[^"]*") + ) + """, + re.VERBOSE, + ), + "OP": re.compile(r"(===|==|~=|!=|<=|>=|<|>)"), + "BOOLOP": re.compile(r"\b(or|and)\b"), + "IN": re.compile(r"\bin\b"), + "NOT": re.compile(r"\bnot\b"), + "VARIABLE": re.compile( + r""" + \b( + python_version + |python_full_version + |os[._]name + |sys[._]platform + |platform_(release|system) + |platform[._](version|machine|python_implementation) + |python_implementation + |implementation_(name|version) + |extras? + |dependency_groups + )\b + """, + re.VERBOSE, + ), + "SPECIFIER": re.compile( + Specifier._operator_regex_str + Specifier._version_regex_str, + re.VERBOSE | re.IGNORECASE, + ), + "AT": re.compile(r"\@"), + "URL": re.compile(r"[^ \t]+"), + "IDENTIFIER": re.compile(r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b"), + "VERSION_PREFIX_TRAIL": re.compile(r"\.\*"), + "VERSION_LOCAL_LABEL_TRAIL": re.compile(r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*"), + "WS": re.compile(r"[ \t]+"), + "END": re.compile(r"$"), +} + + +class Tokenizer: + """Context-sensitive token parsing. + + Provides methods to examine the input stream to check whether the next token + matches. + """ + + def __init__( + self, + source: str, + *, + rules: Mapping[str, re.Pattern[str]], + ) -> None: + self.source = source + self.rules = rules + self.next_token: Token | None = None + self.position = 0 + + def consume(self, name: str) -> None: + """Move beyond provided token name, if at current position.""" + if self.check(name): + self.read() + + def check(self, name: str, *, peek: bool = False) -> bool: + """Check whether the next token has the provided name. + + By default, if the check succeeds, the token *must* be read before + another check. If `peek` is set to `True`, the token is not loaded and + would need to be checked again. + """ + assert self.next_token is None, ( + f"Cannot check for {name!r}, already have {self.next_token!r}" + ) + assert name in self.rules, f"Unknown token name: {name!r}" + + expression = self.rules[name] + + match = expression.match(self.source, self.position) + if match is None: + return False + if not peek: + self.next_token = Token(name, match[0], self.position) + return True + + def expect(self, name: str, *, expected: str) -> Token: + """Expect a certain token name next, failing with a syntax error otherwise. + + The token is *not* read. + """ + if not self.check(name): + raise self.raise_syntax_error(f"Expected {expected}") + return self.read() + + def read(self) -> Token: + """Consume the next token and return it.""" + token = self.next_token + assert token is not None + + self.position += len(token.text) + self.next_token = None + + return token + + def raise_syntax_error( + self, + message: str, + *, + span_start: int | None = None, + span_end: int | None = None, + ) -> NoReturn: + """Raise ParserSyntaxError at the given position.""" + span = ( + self.position if span_start is None else span_start, + self.position if span_end is None else span_end, + ) + raise ParserSyntaxError( + message, + source=self.source, + span=span, + ) + + @contextlib.contextmanager + def enclosing_tokens( + self, open_token: str, close_token: str, *, around: str + ) -> Generator[None, None, None]: + if self.check(open_token): + open_position = self.position + self.read() + else: + open_position = None + + yield + + if open_position is None: + return + + if not self.check(close_token): + self.raise_syntax_error( + f"Expected matching {close_token} for {open_token}, after {around}", + span_start=open_position, + ) + + self.read() diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/licenses/__init__.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/licenses/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..335b275fa7575b0a7c525a713fbe0252ad2d956f --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/licenses/__init__.py @@ -0,0 +1,147 @@ +####################################################################################### +# +# Adapted from: +# https://github.com/pypa/hatch/blob/5352e44/backend/src/hatchling/licenses/parse.py +# +# MIT License +# +# Copyright (c) 2017-present Ofek Lev +# +# Permission is hereby granted, free of charge, to any person obtaining a copy of this +# software and associated documentation files (the "Software"), to deal in the Software +# without restriction, including without limitation the rights to use, copy, modify, +# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be included in all copies +# or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF +# CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE +# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# +# +# With additional allowance of arbitrary `LicenseRef-` identifiers, not just +# `LicenseRef-Public-Domain` and `LicenseRef-Proprietary`. +# +####################################################################################### +from __future__ import annotations + +import re +from typing import NewType, cast + +from ._spdx import EXCEPTIONS, LICENSES + +__all__ = [ + "InvalidLicenseExpression", + "NormalizedLicenseExpression", + "canonicalize_license_expression", +] + +license_ref_allowed = re.compile("^[A-Za-z0-9.-]*$") + +NormalizedLicenseExpression = NewType("NormalizedLicenseExpression", str) + + +class InvalidLicenseExpression(ValueError): + """Raised when a license-expression string is invalid + + >>> canonicalize_license_expression("invalid") + Traceback (most recent call last): + ... + packaging.licenses.InvalidLicenseExpression: Invalid license expression: 'invalid' + """ + + +def canonicalize_license_expression( + raw_license_expression: str, +) -> NormalizedLicenseExpression: + if not raw_license_expression: + message = f"Invalid license expression: {raw_license_expression!r}" + raise InvalidLicenseExpression(message) + + # Pad any parentheses so tokenization can be achieved by merely splitting on + # whitespace. + license_expression = raw_license_expression.replace("(", " ( ").replace(")", " ) ") + licenseref_prefix = "LicenseRef-" + license_refs = { + ref.lower(): "LicenseRef-" + ref[len(licenseref_prefix) :] + for ref in license_expression.split() + if ref.lower().startswith(licenseref_prefix.lower()) + } + + # Normalize to lower case so we can look up licenses/exceptions + # and so boolean operators are Python-compatible. + license_expression = license_expression.lower() + + tokens = license_expression.split() + + # Rather than implementing a parenthesis/boolean logic parser, create an + # expression that Python can parse. Everything that is not involved with the + # grammar itself is replaced with the placeholder `False` and the resultant + # expression should become a valid Python expression. + python_tokens = [] + for token in tokens: + if token not in {"or", "and", "with", "(", ")"}: + python_tokens.append("False") + elif token == "with": + python_tokens.append("or") + elif ( + token == "(" + and python_tokens + and python_tokens[-1] not in {"or", "and", "("} + ) or (token == ")" and python_tokens and python_tokens[-1] == "("): + message = f"Invalid license expression: {raw_license_expression!r}" + raise InvalidLicenseExpression(message) + else: + python_tokens.append(token) + + python_expression = " ".join(python_tokens) + try: + compile(python_expression, "", "eval") + except SyntaxError: + message = f"Invalid license expression: {raw_license_expression!r}" + raise InvalidLicenseExpression(message) from None + + # Take a final pass to check for unknown licenses/exceptions. + normalized_tokens = [] + for token in tokens: + if token in {"or", "and", "with", "(", ")"}: + normalized_tokens.append(token.upper()) + continue + + if normalized_tokens and normalized_tokens[-1] == "WITH": + if token not in EXCEPTIONS: + message = f"Unknown license exception: {token!r}" + raise InvalidLicenseExpression(message) + + normalized_tokens.append(EXCEPTIONS[token]["id"]) + else: + if token.endswith("+"): + final_token = token[:-1] + suffix = "+" + else: + final_token = token + suffix = "" + + if final_token.startswith("licenseref-"): + if not license_ref_allowed.match(final_token): + message = f"Invalid licenseref: {final_token!r}" + raise InvalidLicenseExpression(message) + normalized_tokens.append(license_refs[final_token] + suffix) + else: + if final_token not in LICENSES: + message = f"Unknown license: {final_token!r}" + raise InvalidLicenseExpression(message) + normalized_tokens.append(LICENSES[final_token]["id"] + suffix) + + normalized_expression = " ".join(normalized_tokens) + + return cast( + "NormalizedLicenseExpression", + normalized_expression.replace("( ", "(").replace(" )", ")"), + ) diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/licenses/_spdx.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/licenses/_spdx.py new file mode 100644 index 0000000000000000000000000000000000000000..a277af28220b6dbe4599471104d1c7a2bd1e1288 --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/licenses/_spdx.py @@ -0,0 +1,799 @@ + +from __future__ import annotations + +from typing import TypedDict + +class SPDXLicense(TypedDict): + id: str + deprecated: bool + +class SPDXException(TypedDict): + id: str + deprecated: bool + + +VERSION = '3.27.0' + +LICENSES: dict[str, SPDXLicense] = { + '0bsd': {'id': '0BSD', 'deprecated': False}, + '3d-slicer-1.0': {'id': '3D-Slicer-1.0', 'deprecated': False}, + 'aal': {'id': 'AAL', 'deprecated': False}, + 'abstyles': {'id': 'Abstyles', 'deprecated': False}, + 'adacore-doc': {'id': 'AdaCore-doc', 'deprecated': False}, + 'adobe-2006': {'id': 'Adobe-2006', 'deprecated': False}, + 'adobe-display-postscript': {'id': 'Adobe-Display-PostScript', 'deprecated': False}, + 'adobe-glyph': {'id': 'Adobe-Glyph', 'deprecated': False}, + 'adobe-utopia': {'id': 'Adobe-Utopia', 'deprecated': False}, + 'adsl': {'id': 'ADSL', 'deprecated': False}, + 'afl-1.1': {'id': 'AFL-1.1', 'deprecated': False}, + 'afl-1.2': {'id': 'AFL-1.2', 'deprecated': False}, + 'afl-2.0': {'id': 'AFL-2.0', 'deprecated': False}, + 'afl-2.1': {'id': 'AFL-2.1', 'deprecated': False}, + 'afl-3.0': {'id': 'AFL-3.0', 'deprecated': False}, + 'afmparse': {'id': 'Afmparse', 'deprecated': False}, + 'agpl-1.0': {'id': 'AGPL-1.0', 'deprecated': True}, + 'agpl-1.0-only': {'id': 'AGPL-1.0-only', 'deprecated': False}, + 'agpl-1.0-or-later': {'id': 'AGPL-1.0-or-later', 'deprecated': False}, + 'agpl-3.0': {'id': 'AGPL-3.0', 'deprecated': True}, + 'agpl-3.0-only': {'id': 'AGPL-3.0-only', 'deprecated': False}, + 'agpl-3.0-or-later': {'id': 'AGPL-3.0-or-later', 'deprecated': False}, + 'aladdin': {'id': 'Aladdin', 'deprecated': False}, + 'amd-newlib': {'id': 'AMD-newlib', 'deprecated': False}, + 'amdplpa': {'id': 'AMDPLPA', 'deprecated': False}, + 'aml': {'id': 'AML', 'deprecated': False}, + 'aml-glslang': {'id': 'AML-glslang', 'deprecated': False}, + 'ampas': {'id': 'AMPAS', 'deprecated': False}, + 'antlr-pd': {'id': 'ANTLR-PD', 'deprecated': False}, + 'antlr-pd-fallback': {'id': 'ANTLR-PD-fallback', 'deprecated': False}, + 'any-osi': {'id': 'any-OSI', 'deprecated': False}, + 'any-osi-perl-modules': {'id': 'any-OSI-perl-modules', 'deprecated': False}, + 'apache-1.0': {'id': 'Apache-1.0', 'deprecated': False}, + 'apache-1.1': {'id': 'Apache-1.1', 'deprecated': False}, + 'apache-2.0': {'id': 'Apache-2.0', 'deprecated': False}, + 'apafml': {'id': 'APAFML', 'deprecated': False}, + 'apl-1.0': {'id': 'APL-1.0', 'deprecated': False}, + 'app-s2p': {'id': 'App-s2p', 'deprecated': False}, + 'apsl-1.0': {'id': 'APSL-1.0', 'deprecated': False}, + 'apsl-1.1': {'id': 'APSL-1.1', 'deprecated': False}, + 'apsl-1.2': {'id': 'APSL-1.2', 'deprecated': False}, + 'apsl-2.0': {'id': 'APSL-2.0', 'deprecated': False}, + 'arphic-1999': {'id': 'Arphic-1999', 'deprecated': False}, + 'artistic-1.0': {'id': 'Artistic-1.0', 'deprecated': False}, + 'artistic-1.0-cl8': {'id': 'Artistic-1.0-cl8', 'deprecated': False}, + 'artistic-1.0-perl': {'id': 'Artistic-1.0-Perl', 'deprecated': False}, + 'artistic-2.0': {'id': 'Artistic-2.0', 'deprecated': False}, + 'artistic-dist': {'id': 'Artistic-dist', 'deprecated': False}, + 'aspell-ru': {'id': 'Aspell-RU', 'deprecated': False}, + 'aswf-digital-assets-1.0': {'id': 'ASWF-Digital-Assets-1.0', 'deprecated': False}, + 'aswf-digital-assets-1.1': {'id': 'ASWF-Digital-Assets-1.1', 'deprecated': False}, + 'baekmuk': {'id': 'Baekmuk', 'deprecated': False}, + 'bahyph': {'id': 'Bahyph', 'deprecated': False}, + 'barr': {'id': 'Barr', 'deprecated': False}, + 'bcrypt-solar-designer': {'id': 'bcrypt-Solar-Designer', 'deprecated': False}, + 'beerware': {'id': 'Beerware', 'deprecated': False}, + 'bitstream-charter': {'id': 'Bitstream-Charter', 'deprecated': False}, + 'bitstream-vera': {'id': 'Bitstream-Vera', 'deprecated': False}, + 'bittorrent-1.0': {'id': 'BitTorrent-1.0', 'deprecated': False}, + 'bittorrent-1.1': {'id': 'BitTorrent-1.1', 'deprecated': False}, + 'blessing': {'id': 'blessing', 'deprecated': False}, + 'blueoak-1.0.0': {'id': 'BlueOak-1.0.0', 'deprecated': False}, + 'boehm-gc': {'id': 'Boehm-GC', 'deprecated': False}, + 'boehm-gc-without-fee': {'id': 'Boehm-GC-without-fee', 'deprecated': False}, + 'borceux': {'id': 'Borceux', 'deprecated': False}, + 'brian-gladman-2-clause': {'id': 'Brian-Gladman-2-Clause', 'deprecated': False}, + 'brian-gladman-3-clause': {'id': 'Brian-Gladman-3-Clause', 'deprecated': False}, + 'bsd-1-clause': {'id': 'BSD-1-Clause', 'deprecated': False}, + 'bsd-2-clause': {'id': 'BSD-2-Clause', 'deprecated': False}, + 'bsd-2-clause-darwin': {'id': 'BSD-2-Clause-Darwin', 'deprecated': False}, + 'bsd-2-clause-first-lines': {'id': 'BSD-2-Clause-first-lines', 'deprecated': False}, + 'bsd-2-clause-freebsd': {'id': 'BSD-2-Clause-FreeBSD', 'deprecated': True}, + 'bsd-2-clause-netbsd': {'id': 'BSD-2-Clause-NetBSD', 'deprecated': True}, + 'bsd-2-clause-patent': {'id': 'BSD-2-Clause-Patent', 'deprecated': False}, + 'bsd-2-clause-pkgconf-disclaimer': {'id': 'BSD-2-Clause-pkgconf-disclaimer', 'deprecated': False}, + 'bsd-2-clause-views': {'id': 'BSD-2-Clause-Views', 'deprecated': False}, + 'bsd-3-clause': {'id': 'BSD-3-Clause', 'deprecated': False}, + 'bsd-3-clause-acpica': {'id': 'BSD-3-Clause-acpica', 'deprecated': False}, + 'bsd-3-clause-attribution': {'id': 'BSD-3-Clause-Attribution', 'deprecated': False}, + 'bsd-3-clause-clear': {'id': 'BSD-3-Clause-Clear', 'deprecated': False}, + 'bsd-3-clause-flex': {'id': 'BSD-3-Clause-flex', 'deprecated': False}, + 'bsd-3-clause-hp': {'id': 'BSD-3-Clause-HP', 'deprecated': False}, + 'bsd-3-clause-lbnl': {'id': 'BSD-3-Clause-LBNL', 'deprecated': False}, + 'bsd-3-clause-modification': {'id': 'BSD-3-Clause-Modification', 'deprecated': False}, + 'bsd-3-clause-no-military-license': {'id': 'BSD-3-Clause-No-Military-License', 'deprecated': False}, + 'bsd-3-clause-no-nuclear-license': {'id': 'BSD-3-Clause-No-Nuclear-License', 'deprecated': False}, + 'bsd-3-clause-no-nuclear-license-2014': {'id': 'BSD-3-Clause-No-Nuclear-License-2014', 'deprecated': False}, + 'bsd-3-clause-no-nuclear-warranty': {'id': 'BSD-3-Clause-No-Nuclear-Warranty', 'deprecated': False}, + 'bsd-3-clause-open-mpi': {'id': 'BSD-3-Clause-Open-MPI', 'deprecated': False}, + 'bsd-3-clause-sun': {'id': 'BSD-3-Clause-Sun', 'deprecated': False}, + 'bsd-4-clause': {'id': 'BSD-4-Clause', 'deprecated': False}, + 'bsd-4-clause-shortened': {'id': 'BSD-4-Clause-Shortened', 'deprecated': False}, + 'bsd-4-clause-uc': {'id': 'BSD-4-Clause-UC', 'deprecated': False}, + 'bsd-4.3reno': {'id': 'BSD-4.3RENO', 'deprecated': False}, + 'bsd-4.3tahoe': {'id': 'BSD-4.3TAHOE', 'deprecated': False}, + 'bsd-advertising-acknowledgement': {'id': 'BSD-Advertising-Acknowledgement', 'deprecated': False}, + 'bsd-attribution-hpnd-disclaimer': {'id': 'BSD-Attribution-HPND-disclaimer', 'deprecated': False}, + 'bsd-inferno-nettverk': {'id': 'BSD-Inferno-Nettverk', 'deprecated': False}, + 'bsd-protection': {'id': 'BSD-Protection', 'deprecated': False}, + 'bsd-source-beginning-file': {'id': 'BSD-Source-beginning-file', 'deprecated': False}, + 'bsd-source-code': {'id': 'BSD-Source-Code', 'deprecated': False}, + 'bsd-systemics': {'id': 'BSD-Systemics', 'deprecated': False}, + 'bsd-systemics-w3works': {'id': 'BSD-Systemics-W3Works', 'deprecated': False}, + 'bsl-1.0': {'id': 'BSL-1.0', 'deprecated': False}, + 'busl-1.1': {'id': 'BUSL-1.1', 'deprecated': False}, + 'bzip2-1.0.5': {'id': 'bzip2-1.0.5', 'deprecated': True}, + 'bzip2-1.0.6': {'id': 'bzip2-1.0.6', 'deprecated': False}, + 'c-uda-1.0': {'id': 'C-UDA-1.0', 'deprecated': False}, + 'cal-1.0': {'id': 'CAL-1.0', 'deprecated': False}, + 'cal-1.0-combined-work-exception': {'id': 'CAL-1.0-Combined-Work-Exception', 'deprecated': False}, + 'caldera': {'id': 'Caldera', 'deprecated': False}, + 'caldera-no-preamble': {'id': 'Caldera-no-preamble', 'deprecated': False}, + 'catharon': {'id': 'Catharon', 'deprecated': False}, + 'catosl-1.1': {'id': 'CATOSL-1.1', 'deprecated': False}, + 'cc-by-1.0': {'id': 'CC-BY-1.0', 'deprecated': False}, + 'cc-by-2.0': {'id': 'CC-BY-2.0', 'deprecated': False}, + 'cc-by-2.5': {'id': 'CC-BY-2.5', 'deprecated': False}, + 'cc-by-2.5-au': {'id': 'CC-BY-2.5-AU', 'deprecated': False}, + 'cc-by-3.0': {'id': 'CC-BY-3.0', 'deprecated': False}, + 'cc-by-3.0-at': {'id': 'CC-BY-3.0-AT', 'deprecated': False}, + 'cc-by-3.0-au': {'id': 'CC-BY-3.0-AU', 'deprecated': False}, + 'cc-by-3.0-de': {'id': 'CC-BY-3.0-DE', 'deprecated': False}, + 'cc-by-3.0-igo': {'id': 'CC-BY-3.0-IGO', 'deprecated': False}, + 'cc-by-3.0-nl': {'id': 'CC-BY-3.0-NL', 'deprecated': False}, + 'cc-by-3.0-us': {'id': 'CC-BY-3.0-US', 'deprecated': False}, + 'cc-by-4.0': {'id': 'CC-BY-4.0', 'deprecated': False}, + 'cc-by-nc-1.0': {'id': 'CC-BY-NC-1.0', 'deprecated': False}, + 'cc-by-nc-2.0': {'id': 'CC-BY-NC-2.0', 'deprecated': False}, + 'cc-by-nc-2.5': {'id': 'CC-BY-NC-2.5', 'deprecated': False}, + 'cc-by-nc-3.0': {'id': 'CC-BY-NC-3.0', 'deprecated': False}, + 'cc-by-nc-3.0-de': {'id': 'CC-BY-NC-3.0-DE', 'deprecated': False}, + 'cc-by-nc-4.0': {'id': 'CC-BY-NC-4.0', 'deprecated': False}, + 'cc-by-nc-nd-1.0': {'id': 'CC-BY-NC-ND-1.0', 'deprecated': False}, + 'cc-by-nc-nd-2.0': {'id': 'CC-BY-NC-ND-2.0', 'deprecated': False}, + 'cc-by-nc-nd-2.5': {'id': 'CC-BY-NC-ND-2.5', 'deprecated': False}, + 'cc-by-nc-nd-3.0': {'id': 'CC-BY-NC-ND-3.0', 'deprecated': False}, + 'cc-by-nc-nd-3.0-de': {'id': 'CC-BY-NC-ND-3.0-DE', 'deprecated': False}, + 'cc-by-nc-nd-3.0-igo': {'id': 'CC-BY-NC-ND-3.0-IGO', 'deprecated': False}, + 'cc-by-nc-nd-4.0': {'id': 'CC-BY-NC-ND-4.0', 'deprecated': False}, + 'cc-by-nc-sa-1.0': {'id': 'CC-BY-NC-SA-1.0', 'deprecated': False}, + 'cc-by-nc-sa-2.0': {'id': 'CC-BY-NC-SA-2.0', 'deprecated': False}, + 'cc-by-nc-sa-2.0-de': {'id': 'CC-BY-NC-SA-2.0-DE', 'deprecated': False}, + 'cc-by-nc-sa-2.0-fr': {'id': 'CC-BY-NC-SA-2.0-FR', 'deprecated': False}, + 'cc-by-nc-sa-2.0-uk': {'id': 'CC-BY-NC-SA-2.0-UK', 'deprecated': False}, + 'cc-by-nc-sa-2.5': {'id': 'CC-BY-NC-SA-2.5', 'deprecated': False}, + 'cc-by-nc-sa-3.0': {'id': 'CC-BY-NC-SA-3.0', 'deprecated': False}, + 'cc-by-nc-sa-3.0-de': {'id': 'CC-BY-NC-SA-3.0-DE', 'deprecated': False}, + 'cc-by-nc-sa-3.0-igo': {'id': 'CC-BY-NC-SA-3.0-IGO', 'deprecated': False}, + 'cc-by-nc-sa-4.0': {'id': 'CC-BY-NC-SA-4.0', 'deprecated': False}, + 'cc-by-nd-1.0': {'id': 'CC-BY-ND-1.0', 'deprecated': False}, + 'cc-by-nd-2.0': {'id': 'CC-BY-ND-2.0', 'deprecated': False}, + 'cc-by-nd-2.5': {'id': 'CC-BY-ND-2.5', 'deprecated': False}, + 'cc-by-nd-3.0': {'id': 'CC-BY-ND-3.0', 'deprecated': False}, + 'cc-by-nd-3.0-de': {'id': 'CC-BY-ND-3.0-DE', 'deprecated': False}, + 'cc-by-nd-4.0': {'id': 'CC-BY-ND-4.0', 'deprecated': False}, + 'cc-by-sa-1.0': {'id': 'CC-BY-SA-1.0', 'deprecated': False}, + 'cc-by-sa-2.0': {'id': 'CC-BY-SA-2.0', 'deprecated': False}, + 'cc-by-sa-2.0-uk': {'id': 'CC-BY-SA-2.0-UK', 'deprecated': False}, + 'cc-by-sa-2.1-jp': {'id': 'CC-BY-SA-2.1-JP', 'deprecated': False}, + 'cc-by-sa-2.5': {'id': 'CC-BY-SA-2.5', 'deprecated': False}, + 'cc-by-sa-3.0': {'id': 'CC-BY-SA-3.0', 'deprecated': False}, + 'cc-by-sa-3.0-at': {'id': 'CC-BY-SA-3.0-AT', 'deprecated': False}, + 'cc-by-sa-3.0-de': {'id': 'CC-BY-SA-3.0-DE', 'deprecated': False}, + 'cc-by-sa-3.0-igo': {'id': 'CC-BY-SA-3.0-IGO', 'deprecated': False}, + 'cc-by-sa-4.0': {'id': 'CC-BY-SA-4.0', 'deprecated': False}, + 'cc-pddc': {'id': 'CC-PDDC', 'deprecated': False}, + 'cc-pdm-1.0': {'id': 'CC-PDM-1.0', 'deprecated': False}, + 'cc-sa-1.0': {'id': 'CC-SA-1.0', 'deprecated': False}, + 'cc0-1.0': {'id': 'CC0-1.0', 'deprecated': False}, + 'cddl-1.0': {'id': 'CDDL-1.0', 'deprecated': False}, + 'cddl-1.1': {'id': 'CDDL-1.1', 'deprecated': False}, + 'cdl-1.0': {'id': 'CDL-1.0', 'deprecated': False}, + 'cdla-permissive-1.0': {'id': 'CDLA-Permissive-1.0', 'deprecated': False}, + 'cdla-permissive-2.0': {'id': 'CDLA-Permissive-2.0', 'deprecated': False}, + 'cdla-sharing-1.0': {'id': 'CDLA-Sharing-1.0', 'deprecated': False}, + 'cecill-1.0': {'id': 'CECILL-1.0', 'deprecated': False}, + 'cecill-1.1': {'id': 'CECILL-1.1', 'deprecated': False}, + 'cecill-2.0': {'id': 'CECILL-2.0', 'deprecated': False}, + 'cecill-2.1': {'id': 'CECILL-2.1', 'deprecated': False}, + 'cecill-b': {'id': 'CECILL-B', 'deprecated': False}, + 'cecill-c': {'id': 'CECILL-C', 'deprecated': False}, + 'cern-ohl-1.1': {'id': 'CERN-OHL-1.1', 'deprecated': False}, + 'cern-ohl-1.2': {'id': 'CERN-OHL-1.2', 'deprecated': False}, + 'cern-ohl-p-2.0': {'id': 'CERN-OHL-P-2.0', 'deprecated': False}, + 'cern-ohl-s-2.0': {'id': 'CERN-OHL-S-2.0', 'deprecated': False}, + 'cern-ohl-w-2.0': {'id': 'CERN-OHL-W-2.0', 'deprecated': False}, + 'cfitsio': {'id': 'CFITSIO', 'deprecated': False}, + 'check-cvs': {'id': 'check-cvs', 'deprecated': False}, + 'checkmk': {'id': 'checkmk', 'deprecated': False}, + 'clartistic': {'id': 'ClArtistic', 'deprecated': False}, + 'clips': {'id': 'Clips', 'deprecated': False}, + 'cmu-mach': {'id': 'CMU-Mach', 'deprecated': False}, + 'cmu-mach-nodoc': {'id': 'CMU-Mach-nodoc', 'deprecated': False}, + 'cnri-jython': {'id': 'CNRI-Jython', 'deprecated': False}, + 'cnri-python': {'id': 'CNRI-Python', 'deprecated': False}, + 'cnri-python-gpl-compatible': {'id': 'CNRI-Python-GPL-Compatible', 'deprecated': False}, + 'coil-1.0': {'id': 'COIL-1.0', 'deprecated': False}, + 'community-spec-1.0': {'id': 'Community-Spec-1.0', 'deprecated': False}, + 'condor-1.1': {'id': 'Condor-1.1', 'deprecated': False}, + 'copyleft-next-0.3.0': {'id': 'copyleft-next-0.3.0', 'deprecated': False}, + 'copyleft-next-0.3.1': {'id': 'copyleft-next-0.3.1', 'deprecated': False}, + 'cornell-lossless-jpeg': {'id': 'Cornell-Lossless-JPEG', 'deprecated': False}, + 'cpal-1.0': {'id': 'CPAL-1.0', 'deprecated': False}, + 'cpl-1.0': {'id': 'CPL-1.0', 'deprecated': False}, + 'cpol-1.02': {'id': 'CPOL-1.02', 'deprecated': False}, + 'cronyx': {'id': 'Cronyx', 'deprecated': False}, + 'crossword': {'id': 'Crossword', 'deprecated': False}, + 'cryptoswift': {'id': 'CryptoSwift', 'deprecated': False}, + 'crystalstacker': {'id': 'CrystalStacker', 'deprecated': False}, + 'cua-opl-1.0': {'id': 'CUA-OPL-1.0', 'deprecated': False}, + 'cube': {'id': 'Cube', 'deprecated': False}, + 'curl': {'id': 'curl', 'deprecated': False}, + 'cve-tou': {'id': 'cve-tou', 'deprecated': False}, + 'd-fsl-1.0': {'id': 'D-FSL-1.0', 'deprecated': False}, + 'dec-3-clause': {'id': 'DEC-3-Clause', 'deprecated': False}, + 'diffmark': {'id': 'diffmark', 'deprecated': False}, + 'dl-de-by-2.0': {'id': 'DL-DE-BY-2.0', 'deprecated': False}, + 'dl-de-zero-2.0': {'id': 'DL-DE-ZERO-2.0', 'deprecated': False}, + 'doc': {'id': 'DOC', 'deprecated': False}, + 'docbook-dtd': {'id': 'DocBook-DTD', 'deprecated': False}, + 'docbook-schema': {'id': 'DocBook-Schema', 'deprecated': False}, + 'docbook-stylesheet': {'id': 'DocBook-Stylesheet', 'deprecated': False}, + 'docbook-xml': {'id': 'DocBook-XML', 'deprecated': False}, + 'dotseqn': {'id': 'Dotseqn', 'deprecated': False}, + 'drl-1.0': {'id': 'DRL-1.0', 'deprecated': False}, + 'drl-1.1': {'id': 'DRL-1.1', 'deprecated': False}, + 'dsdp': {'id': 'DSDP', 'deprecated': False}, + 'dtoa': {'id': 'dtoa', 'deprecated': False}, + 'dvipdfm': {'id': 'dvipdfm', 'deprecated': False}, + 'ecl-1.0': {'id': 'ECL-1.0', 'deprecated': False}, + 'ecl-2.0': {'id': 'ECL-2.0', 'deprecated': False}, + 'ecos-2.0': {'id': 'eCos-2.0', 'deprecated': True}, + 'efl-1.0': {'id': 'EFL-1.0', 'deprecated': False}, + 'efl-2.0': {'id': 'EFL-2.0', 'deprecated': False}, + 'egenix': {'id': 'eGenix', 'deprecated': False}, + 'elastic-2.0': {'id': 'Elastic-2.0', 'deprecated': False}, + 'entessa': {'id': 'Entessa', 'deprecated': False}, + 'epics': {'id': 'EPICS', 'deprecated': False}, + 'epl-1.0': {'id': 'EPL-1.0', 'deprecated': False}, + 'epl-2.0': {'id': 'EPL-2.0', 'deprecated': False}, + 'erlpl-1.1': {'id': 'ErlPL-1.1', 'deprecated': False}, + 'etalab-2.0': {'id': 'etalab-2.0', 'deprecated': False}, + 'eudatagrid': {'id': 'EUDatagrid', 'deprecated': False}, + 'eupl-1.0': {'id': 'EUPL-1.0', 'deprecated': False}, + 'eupl-1.1': {'id': 'EUPL-1.1', 'deprecated': False}, + 'eupl-1.2': {'id': 'EUPL-1.2', 'deprecated': False}, + 'eurosym': {'id': 'Eurosym', 'deprecated': False}, + 'fair': {'id': 'Fair', 'deprecated': False}, + 'fbm': {'id': 'FBM', 'deprecated': False}, + 'fdk-aac': {'id': 'FDK-AAC', 'deprecated': False}, + 'ferguson-twofish': {'id': 'Ferguson-Twofish', 'deprecated': False}, + 'frameworx-1.0': {'id': 'Frameworx-1.0', 'deprecated': False}, + 'freebsd-doc': {'id': 'FreeBSD-DOC', 'deprecated': False}, + 'freeimage': {'id': 'FreeImage', 'deprecated': False}, + 'fsfap': {'id': 'FSFAP', 'deprecated': False}, + 'fsfap-no-warranty-disclaimer': {'id': 'FSFAP-no-warranty-disclaimer', 'deprecated': False}, + 'fsful': {'id': 'FSFUL', 'deprecated': False}, + 'fsfullr': {'id': 'FSFULLR', 'deprecated': False}, + 'fsfullrsd': {'id': 'FSFULLRSD', 'deprecated': False}, + 'fsfullrwd': {'id': 'FSFULLRWD', 'deprecated': False}, + 'fsl-1.1-alv2': {'id': 'FSL-1.1-ALv2', 'deprecated': False}, + 'fsl-1.1-mit': {'id': 'FSL-1.1-MIT', 'deprecated': False}, + 'ftl': {'id': 'FTL', 'deprecated': False}, + 'furuseth': {'id': 'Furuseth', 'deprecated': False}, + 'fwlw': {'id': 'fwlw', 'deprecated': False}, + 'game-programming-gems': {'id': 'Game-Programming-Gems', 'deprecated': False}, + 'gcr-docs': {'id': 'GCR-docs', 'deprecated': False}, + 'gd': {'id': 'GD', 'deprecated': False}, + 'generic-xts': {'id': 'generic-xts', 'deprecated': False}, + 'gfdl-1.1': {'id': 'GFDL-1.1', 'deprecated': True}, + 'gfdl-1.1-invariants-only': {'id': 'GFDL-1.1-invariants-only', 'deprecated': False}, + 'gfdl-1.1-invariants-or-later': {'id': 'GFDL-1.1-invariants-or-later', 'deprecated': False}, + 'gfdl-1.1-no-invariants-only': {'id': 'GFDL-1.1-no-invariants-only', 'deprecated': False}, + 'gfdl-1.1-no-invariants-or-later': {'id': 'GFDL-1.1-no-invariants-or-later', 'deprecated': False}, + 'gfdl-1.1-only': {'id': 'GFDL-1.1-only', 'deprecated': False}, + 'gfdl-1.1-or-later': {'id': 'GFDL-1.1-or-later', 'deprecated': False}, + 'gfdl-1.2': {'id': 'GFDL-1.2', 'deprecated': True}, + 'gfdl-1.2-invariants-only': {'id': 'GFDL-1.2-invariants-only', 'deprecated': False}, + 'gfdl-1.2-invariants-or-later': {'id': 'GFDL-1.2-invariants-or-later', 'deprecated': False}, + 'gfdl-1.2-no-invariants-only': {'id': 'GFDL-1.2-no-invariants-only', 'deprecated': False}, + 'gfdl-1.2-no-invariants-or-later': {'id': 'GFDL-1.2-no-invariants-or-later', 'deprecated': False}, + 'gfdl-1.2-only': {'id': 'GFDL-1.2-only', 'deprecated': False}, + 'gfdl-1.2-or-later': {'id': 'GFDL-1.2-or-later', 'deprecated': False}, + 'gfdl-1.3': {'id': 'GFDL-1.3', 'deprecated': True}, + 'gfdl-1.3-invariants-only': {'id': 'GFDL-1.3-invariants-only', 'deprecated': False}, + 'gfdl-1.3-invariants-or-later': {'id': 'GFDL-1.3-invariants-or-later', 'deprecated': False}, + 'gfdl-1.3-no-invariants-only': {'id': 'GFDL-1.3-no-invariants-only', 'deprecated': False}, + 'gfdl-1.3-no-invariants-or-later': {'id': 'GFDL-1.3-no-invariants-or-later', 'deprecated': False}, + 'gfdl-1.3-only': {'id': 'GFDL-1.3-only', 'deprecated': False}, + 'gfdl-1.3-or-later': {'id': 'GFDL-1.3-or-later', 'deprecated': False}, + 'giftware': {'id': 'Giftware', 'deprecated': False}, + 'gl2ps': {'id': 'GL2PS', 'deprecated': False}, + 'glide': {'id': 'Glide', 'deprecated': False}, + 'glulxe': {'id': 'Glulxe', 'deprecated': False}, + 'glwtpl': {'id': 'GLWTPL', 'deprecated': False}, + 'gnuplot': {'id': 'gnuplot', 'deprecated': False}, + 'gpl-1.0': {'id': 'GPL-1.0', 'deprecated': True}, + 'gpl-1.0+': {'id': 'GPL-1.0+', 'deprecated': True}, + 'gpl-1.0-only': {'id': 'GPL-1.0-only', 'deprecated': False}, + 'gpl-1.0-or-later': {'id': 'GPL-1.0-or-later', 'deprecated': False}, + 'gpl-2.0': {'id': 'GPL-2.0', 'deprecated': True}, + 'gpl-2.0+': {'id': 'GPL-2.0+', 'deprecated': True}, + 'gpl-2.0-only': {'id': 'GPL-2.0-only', 'deprecated': False}, + 'gpl-2.0-or-later': {'id': 'GPL-2.0-or-later', 'deprecated': False}, + 'gpl-2.0-with-autoconf-exception': {'id': 'GPL-2.0-with-autoconf-exception', 'deprecated': True}, + 'gpl-2.0-with-bison-exception': {'id': 'GPL-2.0-with-bison-exception', 'deprecated': True}, + 'gpl-2.0-with-classpath-exception': {'id': 'GPL-2.0-with-classpath-exception', 'deprecated': True}, + 'gpl-2.0-with-font-exception': {'id': 'GPL-2.0-with-font-exception', 'deprecated': True}, + 'gpl-2.0-with-gcc-exception': {'id': 'GPL-2.0-with-GCC-exception', 'deprecated': True}, + 'gpl-3.0': {'id': 'GPL-3.0', 'deprecated': True}, + 'gpl-3.0+': {'id': 'GPL-3.0+', 'deprecated': True}, + 'gpl-3.0-only': {'id': 'GPL-3.0-only', 'deprecated': False}, + 'gpl-3.0-or-later': {'id': 'GPL-3.0-or-later', 'deprecated': False}, + 'gpl-3.0-with-autoconf-exception': {'id': 'GPL-3.0-with-autoconf-exception', 'deprecated': True}, + 'gpl-3.0-with-gcc-exception': {'id': 'GPL-3.0-with-GCC-exception', 'deprecated': True}, + 'graphics-gems': {'id': 'Graphics-Gems', 'deprecated': False}, + 'gsoap-1.3b': {'id': 'gSOAP-1.3b', 'deprecated': False}, + 'gtkbook': {'id': 'gtkbook', 'deprecated': False}, + 'gutmann': {'id': 'Gutmann', 'deprecated': False}, + 'haskellreport': {'id': 'HaskellReport', 'deprecated': False}, + 'hdf5': {'id': 'HDF5', 'deprecated': False}, + 'hdparm': {'id': 'hdparm', 'deprecated': False}, + 'hidapi': {'id': 'HIDAPI', 'deprecated': False}, + 'hippocratic-2.1': {'id': 'Hippocratic-2.1', 'deprecated': False}, + 'hp-1986': {'id': 'HP-1986', 'deprecated': False}, + 'hp-1989': {'id': 'HP-1989', 'deprecated': False}, + 'hpnd': {'id': 'HPND', 'deprecated': False}, + 'hpnd-dec': {'id': 'HPND-DEC', 'deprecated': False}, + 'hpnd-doc': {'id': 'HPND-doc', 'deprecated': False}, + 'hpnd-doc-sell': {'id': 'HPND-doc-sell', 'deprecated': False}, + 'hpnd-export-us': {'id': 'HPND-export-US', 'deprecated': False}, + 'hpnd-export-us-acknowledgement': {'id': 'HPND-export-US-acknowledgement', 'deprecated': False}, + 'hpnd-export-us-modify': {'id': 'HPND-export-US-modify', 'deprecated': False}, + 'hpnd-export2-us': {'id': 'HPND-export2-US', 'deprecated': False}, + 'hpnd-fenneberg-livingston': {'id': 'HPND-Fenneberg-Livingston', 'deprecated': False}, + 'hpnd-inria-imag': {'id': 'HPND-INRIA-IMAG', 'deprecated': False}, + 'hpnd-intel': {'id': 'HPND-Intel', 'deprecated': False}, + 'hpnd-kevlin-henney': {'id': 'HPND-Kevlin-Henney', 'deprecated': False}, + 'hpnd-markus-kuhn': {'id': 'HPND-Markus-Kuhn', 'deprecated': False}, + 'hpnd-merchantability-variant': {'id': 'HPND-merchantability-variant', 'deprecated': False}, + 'hpnd-mit-disclaimer': {'id': 'HPND-MIT-disclaimer', 'deprecated': False}, + 'hpnd-netrek': {'id': 'HPND-Netrek', 'deprecated': False}, + 'hpnd-pbmplus': {'id': 'HPND-Pbmplus', 'deprecated': False}, + 'hpnd-sell-mit-disclaimer-xserver': {'id': 'HPND-sell-MIT-disclaimer-xserver', 'deprecated': False}, + 'hpnd-sell-regexpr': {'id': 'HPND-sell-regexpr', 'deprecated': False}, + 'hpnd-sell-variant': {'id': 'HPND-sell-variant', 'deprecated': False}, + 'hpnd-sell-variant-mit-disclaimer': {'id': 'HPND-sell-variant-MIT-disclaimer', 'deprecated': False}, + 'hpnd-sell-variant-mit-disclaimer-rev': {'id': 'HPND-sell-variant-MIT-disclaimer-rev', 'deprecated': False}, + 'hpnd-uc': {'id': 'HPND-UC', 'deprecated': False}, + 'hpnd-uc-export-us': {'id': 'HPND-UC-export-US', 'deprecated': False}, + 'htmltidy': {'id': 'HTMLTIDY', 'deprecated': False}, + 'ibm-pibs': {'id': 'IBM-pibs', 'deprecated': False}, + 'icu': {'id': 'ICU', 'deprecated': False}, + 'iec-code-components-eula': {'id': 'IEC-Code-Components-EULA', 'deprecated': False}, + 'ijg': {'id': 'IJG', 'deprecated': False}, + 'ijg-short': {'id': 'IJG-short', 'deprecated': False}, + 'imagemagick': {'id': 'ImageMagick', 'deprecated': False}, + 'imatix': {'id': 'iMatix', 'deprecated': False}, + 'imlib2': {'id': 'Imlib2', 'deprecated': False}, + 'info-zip': {'id': 'Info-ZIP', 'deprecated': False}, + 'inner-net-2.0': {'id': 'Inner-Net-2.0', 'deprecated': False}, + 'innosetup': {'id': 'InnoSetup', 'deprecated': False}, + 'intel': {'id': 'Intel', 'deprecated': False}, + 'intel-acpi': {'id': 'Intel-ACPI', 'deprecated': False}, + 'interbase-1.0': {'id': 'Interbase-1.0', 'deprecated': False}, + 'ipa': {'id': 'IPA', 'deprecated': False}, + 'ipl-1.0': {'id': 'IPL-1.0', 'deprecated': False}, + 'isc': {'id': 'ISC', 'deprecated': False}, + 'isc-veillard': {'id': 'ISC-Veillard', 'deprecated': False}, + 'jam': {'id': 'Jam', 'deprecated': False}, + 'jasper-2.0': {'id': 'JasPer-2.0', 'deprecated': False}, + 'jove': {'id': 'jove', 'deprecated': False}, + 'jpl-image': {'id': 'JPL-image', 'deprecated': False}, + 'jpnic': {'id': 'JPNIC', 'deprecated': False}, + 'json': {'id': 'JSON', 'deprecated': False}, + 'kastrup': {'id': 'Kastrup', 'deprecated': False}, + 'kazlib': {'id': 'Kazlib', 'deprecated': False}, + 'knuth-ctan': {'id': 'Knuth-CTAN', 'deprecated': False}, + 'lal-1.2': {'id': 'LAL-1.2', 'deprecated': False}, + 'lal-1.3': {'id': 'LAL-1.3', 'deprecated': False}, + 'latex2e': {'id': 'Latex2e', 'deprecated': False}, + 'latex2e-translated-notice': {'id': 'Latex2e-translated-notice', 'deprecated': False}, + 'leptonica': {'id': 'Leptonica', 'deprecated': False}, + 'lgpl-2.0': {'id': 'LGPL-2.0', 'deprecated': True}, + 'lgpl-2.0+': {'id': 'LGPL-2.0+', 'deprecated': True}, + 'lgpl-2.0-only': {'id': 'LGPL-2.0-only', 'deprecated': False}, + 'lgpl-2.0-or-later': {'id': 'LGPL-2.0-or-later', 'deprecated': False}, + 'lgpl-2.1': {'id': 'LGPL-2.1', 'deprecated': True}, + 'lgpl-2.1+': {'id': 'LGPL-2.1+', 'deprecated': True}, + 'lgpl-2.1-only': {'id': 'LGPL-2.1-only', 'deprecated': False}, + 'lgpl-2.1-or-later': {'id': 'LGPL-2.1-or-later', 'deprecated': False}, + 'lgpl-3.0': {'id': 'LGPL-3.0', 'deprecated': True}, + 'lgpl-3.0+': {'id': 'LGPL-3.0+', 'deprecated': True}, + 'lgpl-3.0-only': {'id': 'LGPL-3.0-only', 'deprecated': False}, + 'lgpl-3.0-or-later': {'id': 'LGPL-3.0-or-later', 'deprecated': False}, + 'lgpllr': {'id': 'LGPLLR', 'deprecated': False}, + 'libpng': {'id': 'Libpng', 'deprecated': False}, + 'libpng-1.6.35': {'id': 'libpng-1.6.35', 'deprecated': False}, + 'libpng-2.0': {'id': 'libpng-2.0', 'deprecated': False}, + 'libselinux-1.0': {'id': 'libselinux-1.0', 'deprecated': False}, + 'libtiff': {'id': 'libtiff', 'deprecated': False}, + 'libutil-david-nugent': {'id': 'libutil-David-Nugent', 'deprecated': False}, + 'liliq-p-1.1': {'id': 'LiLiQ-P-1.1', 'deprecated': False}, + 'liliq-r-1.1': {'id': 'LiLiQ-R-1.1', 'deprecated': False}, + 'liliq-rplus-1.1': {'id': 'LiLiQ-Rplus-1.1', 'deprecated': False}, + 'linux-man-pages-1-para': {'id': 'Linux-man-pages-1-para', 'deprecated': False}, + 'linux-man-pages-copyleft': {'id': 'Linux-man-pages-copyleft', 'deprecated': False}, + 'linux-man-pages-copyleft-2-para': {'id': 'Linux-man-pages-copyleft-2-para', 'deprecated': False}, + 'linux-man-pages-copyleft-var': {'id': 'Linux-man-pages-copyleft-var', 'deprecated': False}, + 'linux-openib': {'id': 'Linux-OpenIB', 'deprecated': False}, + 'loop': {'id': 'LOOP', 'deprecated': False}, + 'lpd-document': {'id': 'LPD-document', 'deprecated': False}, + 'lpl-1.0': {'id': 'LPL-1.0', 'deprecated': False}, + 'lpl-1.02': {'id': 'LPL-1.02', 'deprecated': False}, + 'lppl-1.0': {'id': 'LPPL-1.0', 'deprecated': False}, + 'lppl-1.1': {'id': 'LPPL-1.1', 'deprecated': False}, + 'lppl-1.2': {'id': 'LPPL-1.2', 'deprecated': False}, + 'lppl-1.3a': {'id': 'LPPL-1.3a', 'deprecated': False}, + 'lppl-1.3c': {'id': 'LPPL-1.3c', 'deprecated': False}, + 'lsof': {'id': 'lsof', 'deprecated': False}, + 'lucida-bitmap-fonts': {'id': 'Lucida-Bitmap-Fonts', 'deprecated': False}, + 'lzma-sdk-9.11-to-9.20': {'id': 'LZMA-SDK-9.11-to-9.20', 'deprecated': False}, + 'lzma-sdk-9.22': {'id': 'LZMA-SDK-9.22', 'deprecated': False}, + 'mackerras-3-clause': {'id': 'Mackerras-3-Clause', 'deprecated': False}, + 'mackerras-3-clause-acknowledgment': {'id': 'Mackerras-3-Clause-acknowledgment', 'deprecated': False}, + 'magaz': {'id': 'magaz', 'deprecated': False}, + 'mailprio': {'id': 'mailprio', 'deprecated': False}, + 'makeindex': {'id': 'MakeIndex', 'deprecated': False}, + 'man2html': {'id': 'man2html', 'deprecated': False}, + 'martin-birgmeier': {'id': 'Martin-Birgmeier', 'deprecated': False}, + 'mcphee-slideshow': {'id': 'McPhee-slideshow', 'deprecated': False}, + 'metamail': {'id': 'metamail', 'deprecated': False}, + 'minpack': {'id': 'Minpack', 'deprecated': False}, + 'mips': {'id': 'MIPS', 'deprecated': False}, + 'miros': {'id': 'MirOS', 'deprecated': False}, + 'mit': {'id': 'MIT', 'deprecated': False}, + 'mit-0': {'id': 'MIT-0', 'deprecated': False}, + 'mit-advertising': {'id': 'MIT-advertising', 'deprecated': False}, + 'mit-click': {'id': 'MIT-Click', 'deprecated': False}, + 'mit-cmu': {'id': 'MIT-CMU', 'deprecated': False}, + 'mit-enna': {'id': 'MIT-enna', 'deprecated': False}, + 'mit-feh': {'id': 'MIT-feh', 'deprecated': False}, + 'mit-festival': {'id': 'MIT-Festival', 'deprecated': False}, + 'mit-khronos-old': {'id': 'MIT-Khronos-old', 'deprecated': False}, + 'mit-modern-variant': {'id': 'MIT-Modern-Variant', 'deprecated': False}, + 'mit-open-group': {'id': 'MIT-open-group', 'deprecated': False}, + 'mit-testregex': {'id': 'MIT-testregex', 'deprecated': False}, + 'mit-wu': {'id': 'MIT-Wu', 'deprecated': False}, + 'mitnfa': {'id': 'MITNFA', 'deprecated': False}, + 'mmixware': {'id': 'MMIXware', 'deprecated': False}, + 'motosoto': {'id': 'Motosoto', 'deprecated': False}, + 'mpeg-ssg': {'id': 'MPEG-SSG', 'deprecated': False}, + 'mpi-permissive': {'id': 'mpi-permissive', 'deprecated': False}, + 'mpich2': {'id': 'mpich2', 'deprecated': False}, + 'mpl-1.0': {'id': 'MPL-1.0', 'deprecated': False}, + 'mpl-1.1': {'id': 'MPL-1.1', 'deprecated': False}, + 'mpl-2.0': {'id': 'MPL-2.0', 'deprecated': False}, + 'mpl-2.0-no-copyleft-exception': {'id': 'MPL-2.0-no-copyleft-exception', 'deprecated': False}, + 'mplus': {'id': 'mplus', 'deprecated': False}, + 'ms-lpl': {'id': 'MS-LPL', 'deprecated': False}, + 'ms-pl': {'id': 'MS-PL', 'deprecated': False}, + 'ms-rl': {'id': 'MS-RL', 'deprecated': False}, + 'mtll': {'id': 'MTLL', 'deprecated': False}, + 'mulanpsl-1.0': {'id': 'MulanPSL-1.0', 'deprecated': False}, + 'mulanpsl-2.0': {'id': 'MulanPSL-2.0', 'deprecated': False}, + 'multics': {'id': 'Multics', 'deprecated': False}, + 'mup': {'id': 'Mup', 'deprecated': False}, + 'naist-2003': {'id': 'NAIST-2003', 'deprecated': False}, + 'nasa-1.3': {'id': 'NASA-1.3', 'deprecated': False}, + 'naumen': {'id': 'Naumen', 'deprecated': False}, + 'nbpl-1.0': {'id': 'NBPL-1.0', 'deprecated': False}, + 'ncbi-pd': {'id': 'NCBI-PD', 'deprecated': False}, + 'ncgl-uk-2.0': {'id': 'NCGL-UK-2.0', 'deprecated': False}, + 'ncl': {'id': 'NCL', 'deprecated': False}, + 'ncsa': {'id': 'NCSA', 'deprecated': False}, + 'net-snmp': {'id': 'Net-SNMP', 'deprecated': True}, + 'netcdf': {'id': 'NetCDF', 'deprecated': False}, + 'newsletr': {'id': 'Newsletr', 'deprecated': False}, + 'ngpl': {'id': 'NGPL', 'deprecated': False}, + 'ngrep': {'id': 'ngrep', 'deprecated': False}, + 'nicta-1.0': {'id': 'NICTA-1.0', 'deprecated': False}, + 'nist-pd': {'id': 'NIST-PD', 'deprecated': False}, + 'nist-pd-fallback': {'id': 'NIST-PD-fallback', 'deprecated': False}, + 'nist-software': {'id': 'NIST-Software', 'deprecated': False}, + 'nlod-1.0': {'id': 'NLOD-1.0', 'deprecated': False}, + 'nlod-2.0': {'id': 'NLOD-2.0', 'deprecated': False}, + 'nlpl': {'id': 'NLPL', 'deprecated': False}, + 'nokia': {'id': 'Nokia', 'deprecated': False}, + 'nosl': {'id': 'NOSL', 'deprecated': False}, + 'noweb': {'id': 'Noweb', 'deprecated': False}, + 'npl-1.0': {'id': 'NPL-1.0', 'deprecated': False}, + 'npl-1.1': {'id': 'NPL-1.1', 'deprecated': False}, + 'nposl-3.0': {'id': 'NPOSL-3.0', 'deprecated': False}, + 'nrl': {'id': 'NRL', 'deprecated': False}, + 'ntia-pd': {'id': 'NTIA-PD', 'deprecated': False}, + 'ntp': {'id': 'NTP', 'deprecated': False}, + 'ntp-0': {'id': 'NTP-0', 'deprecated': False}, + 'nunit': {'id': 'Nunit', 'deprecated': True}, + 'o-uda-1.0': {'id': 'O-UDA-1.0', 'deprecated': False}, + 'oar': {'id': 'OAR', 'deprecated': False}, + 'occt-pl': {'id': 'OCCT-PL', 'deprecated': False}, + 'oclc-2.0': {'id': 'OCLC-2.0', 'deprecated': False}, + 'odbl-1.0': {'id': 'ODbL-1.0', 'deprecated': False}, + 'odc-by-1.0': {'id': 'ODC-By-1.0', 'deprecated': False}, + 'offis': {'id': 'OFFIS', 'deprecated': False}, + 'ofl-1.0': {'id': 'OFL-1.0', 'deprecated': False}, + 'ofl-1.0-no-rfn': {'id': 'OFL-1.0-no-RFN', 'deprecated': False}, + 'ofl-1.0-rfn': {'id': 'OFL-1.0-RFN', 'deprecated': False}, + 'ofl-1.1': {'id': 'OFL-1.1', 'deprecated': False}, + 'ofl-1.1-no-rfn': {'id': 'OFL-1.1-no-RFN', 'deprecated': False}, + 'ofl-1.1-rfn': {'id': 'OFL-1.1-RFN', 'deprecated': False}, + 'ogc-1.0': {'id': 'OGC-1.0', 'deprecated': False}, + 'ogdl-taiwan-1.0': {'id': 'OGDL-Taiwan-1.0', 'deprecated': False}, + 'ogl-canada-2.0': {'id': 'OGL-Canada-2.0', 'deprecated': False}, + 'ogl-uk-1.0': {'id': 'OGL-UK-1.0', 'deprecated': False}, + 'ogl-uk-2.0': {'id': 'OGL-UK-2.0', 'deprecated': False}, + 'ogl-uk-3.0': {'id': 'OGL-UK-3.0', 'deprecated': False}, + 'ogtsl': {'id': 'OGTSL', 'deprecated': False}, + 'oldap-1.1': {'id': 'OLDAP-1.1', 'deprecated': False}, + 'oldap-1.2': {'id': 'OLDAP-1.2', 'deprecated': False}, + 'oldap-1.3': {'id': 'OLDAP-1.3', 'deprecated': False}, + 'oldap-1.4': {'id': 'OLDAP-1.4', 'deprecated': False}, + 'oldap-2.0': {'id': 'OLDAP-2.0', 'deprecated': False}, + 'oldap-2.0.1': {'id': 'OLDAP-2.0.1', 'deprecated': False}, + 'oldap-2.1': {'id': 'OLDAP-2.1', 'deprecated': False}, + 'oldap-2.2': {'id': 'OLDAP-2.2', 'deprecated': False}, + 'oldap-2.2.1': {'id': 'OLDAP-2.2.1', 'deprecated': False}, + 'oldap-2.2.2': {'id': 'OLDAP-2.2.2', 'deprecated': False}, + 'oldap-2.3': {'id': 'OLDAP-2.3', 'deprecated': False}, + 'oldap-2.4': {'id': 'OLDAP-2.4', 'deprecated': False}, + 'oldap-2.5': {'id': 'OLDAP-2.5', 'deprecated': False}, + 'oldap-2.6': {'id': 'OLDAP-2.6', 'deprecated': False}, + 'oldap-2.7': {'id': 'OLDAP-2.7', 'deprecated': False}, + 'oldap-2.8': {'id': 'OLDAP-2.8', 'deprecated': False}, + 'olfl-1.3': {'id': 'OLFL-1.3', 'deprecated': False}, + 'oml': {'id': 'OML', 'deprecated': False}, + 'openpbs-2.3': {'id': 'OpenPBS-2.3', 'deprecated': False}, + 'openssl': {'id': 'OpenSSL', 'deprecated': False}, + 'openssl-standalone': {'id': 'OpenSSL-standalone', 'deprecated': False}, + 'openvision': {'id': 'OpenVision', 'deprecated': False}, + 'opl-1.0': {'id': 'OPL-1.0', 'deprecated': False}, + 'opl-uk-3.0': {'id': 'OPL-UK-3.0', 'deprecated': False}, + 'opubl-1.0': {'id': 'OPUBL-1.0', 'deprecated': False}, + 'oset-pl-2.1': {'id': 'OSET-PL-2.1', 'deprecated': False}, + 'osl-1.0': {'id': 'OSL-1.0', 'deprecated': False}, + 'osl-1.1': {'id': 'OSL-1.1', 'deprecated': False}, + 'osl-2.0': {'id': 'OSL-2.0', 'deprecated': False}, + 'osl-2.1': {'id': 'OSL-2.1', 'deprecated': False}, + 'osl-3.0': {'id': 'OSL-3.0', 'deprecated': False}, + 'padl': {'id': 'PADL', 'deprecated': False}, + 'parity-6.0.0': {'id': 'Parity-6.0.0', 'deprecated': False}, + 'parity-7.0.0': {'id': 'Parity-7.0.0', 'deprecated': False}, + 'pddl-1.0': {'id': 'PDDL-1.0', 'deprecated': False}, + 'php-3.0': {'id': 'PHP-3.0', 'deprecated': False}, + 'php-3.01': {'id': 'PHP-3.01', 'deprecated': False}, + 'pixar': {'id': 'Pixar', 'deprecated': False}, + 'pkgconf': {'id': 'pkgconf', 'deprecated': False}, + 'plexus': {'id': 'Plexus', 'deprecated': False}, + 'pnmstitch': {'id': 'pnmstitch', 'deprecated': False}, + 'polyform-noncommercial-1.0.0': {'id': 'PolyForm-Noncommercial-1.0.0', 'deprecated': False}, + 'polyform-small-business-1.0.0': {'id': 'PolyForm-Small-Business-1.0.0', 'deprecated': False}, + 'postgresql': {'id': 'PostgreSQL', 'deprecated': False}, + 'ppl': {'id': 'PPL', 'deprecated': False}, + 'psf-2.0': {'id': 'PSF-2.0', 'deprecated': False}, + 'psfrag': {'id': 'psfrag', 'deprecated': False}, + 'psutils': {'id': 'psutils', 'deprecated': False}, + 'python-2.0': {'id': 'Python-2.0', 'deprecated': False}, + 'python-2.0.1': {'id': 'Python-2.0.1', 'deprecated': False}, + 'python-ldap': {'id': 'python-ldap', 'deprecated': False}, + 'qhull': {'id': 'Qhull', 'deprecated': False}, + 'qpl-1.0': {'id': 'QPL-1.0', 'deprecated': False}, + 'qpl-1.0-inria-2004': {'id': 'QPL-1.0-INRIA-2004', 'deprecated': False}, + 'radvd': {'id': 'radvd', 'deprecated': False}, + 'rdisc': {'id': 'Rdisc', 'deprecated': False}, + 'rhecos-1.1': {'id': 'RHeCos-1.1', 'deprecated': False}, + 'rpl-1.1': {'id': 'RPL-1.1', 'deprecated': False}, + 'rpl-1.5': {'id': 'RPL-1.5', 'deprecated': False}, + 'rpsl-1.0': {'id': 'RPSL-1.0', 'deprecated': False}, + 'rsa-md': {'id': 'RSA-MD', 'deprecated': False}, + 'rscpl': {'id': 'RSCPL', 'deprecated': False}, + 'ruby': {'id': 'Ruby', 'deprecated': False}, + 'ruby-pty': {'id': 'Ruby-pty', 'deprecated': False}, + 'sax-pd': {'id': 'SAX-PD', 'deprecated': False}, + 'sax-pd-2.0': {'id': 'SAX-PD-2.0', 'deprecated': False}, + 'saxpath': {'id': 'Saxpath', 'deprecated': False}, + 'scea': {'id': 'SCEA', 'deprecated': False}, + 'schemereport': {'id': 'SchemeReport', 'deprecated': False}, + 'sendmail': {'id': 'Sendmail', 'deprecated': False}, + 'sendmail-8.23': {'id': 'Sendmail-8.23', 'deprecated': False}, + 'sendmail-open-source-1.1': {'id': 'Sendmail-Open-Source-1.1', 'deprecated': False}, + 'sgi-b-1.0': {'id': 'SGI-B-1.0', 'deprecated': False}, + 'sgi-b-1.1': {'id': 'SGI-B-1.1', 'deprecated': False}, + 'sgi-b-2.0': {'id': 'SGI-B-2.0', 'deprecated': False}, + 'sgi-opengl': {'id': 'SGI-OpenGL', 'deprecated': False}, + 'sgp4': {'id': 'SGP4', 'deprecated': False}, + 'shl-0.5': {'id': 'SHL-0.5', 'deprecated': False}, + 'shl-0.51': {'id': 'SHL-0.51', 'deprecated': False}, + 'simpl-2.0': {'id': 'SimPL-2.0', 'deprecated': False}, + 'sissl': {'id': 'SISSL', 'deprecated': False}, + 'sissl-1.2': {'id': 'SISSL-1.2', 'deprecated': False}, + 'sl': {'id': 'SL', 'deprecated': False}, + 'sleepycat': {'id': 'Sleepycat', 'deprecated': False}, + 'smail-gpl': {'id': 'SMAIL-GPL', 'deprecated': False}, + 'smlnj': {'id': 'SMLNJ', 'deprecated': False}, + 'smppl': {'id': 'SMPPL', 'deprecated': False}, + 'snia': {'id': 'SNIA', 'deprecated': False}, + 'snprintf': {'id': 'snprintf', 'deprecated': False}, + 'sofa': {'id': 'SOFA', 'deprecated': False}, + 'softsurfer': {'id': 'softSurfer', 'deprecated': False}, + 'soundex': {'id': 'Soundex', 'deprecated': False}, + 'spencer-86': {'id': 'Spencer-86', 'deprecated': False}, + 'spencer-94': {'id': 'Spencer-94', 'deprecated': False}, + 'spencer-99': {'id': 'Spencer-99', 'deprecated': False}, + 'spl-1.0': {'id': 'SPL-1.0', 'deprecated': False}, + 'ssh-keyscan': {'id': 'ssh-keyscan', 'deprecated': False}, + 'ssh-openssh': {'id': 'SSH-OpenSSH', 'deprecated': False}, + 'ssh-short': {'id': 'SSH-short', 'deprecated': False}, + 'ssleay-standalone': {'id': 'SSLeay-standalone', 'deprecated': False}, + 'sspl-1.0': {'id': 'SSPL-1.0', 'deprecated': False}, + 'standardml-nj': {'id': 'StandardML-NJ', 'deprecated': True}, + 'sugarcrm-1.1.3': {'id': 'SugarCRM-1.1.3', 'deprecated': False}, + 'sul-1.0': {'id': 'SUL-1.0', 'deprecated': False}, + 'sun-ppp': {'id': 'Sun-PPP', 'deprecated': False}, + 'sun-ppp-2000': {'id': 'Sun-PPP-2000', 'deprecated': False}, + 'sunpro': {'id': 'SunPro', 'deprecated': False}, + 'swl': {'id': 'SWL', 'deprecated': False}, + 'swrule': {'id': 'swrule', 'deprecated': False}, + 'symlinks': {'id': 'Symlinks', 'deprecated': False}, + 'tapr-ohl-1.0': {'id': 'TAPR-OHL-1.0', 'deprecated': False}, + 'tcl': {'id': 'TCL', 'deprecated': False}, + 'tcp-wrappers': {'id': 'TCP-wrappers', 'deprecated': False}, + 'termreadkey': {'id': 'TermReadKey', 'deprecated': False}, + 'tgppl-1.0': {'id': 'TGPPL-1.0', 'deprecated': False}, + 'thirdeye': {'id': 'ThirdEye', 'deprecated': False}, + 'threeparttable': {'id': 'threeparttable', 'deprecated': False}, + 'tmate': {'id': 'TMate', 'deprecated': False}, + 'torque-1.1': {'id': 'TORQUE-1.1', 'deprecated': False}, + 'tosl': {'id': 'TOSL', 'deprecated': False}, + 'tpdl': {'id': 'TPDL', 'deprecated': False}, + 'tpl-1.0': {'id': 'TPL-1.0', 'deprecated': False}, + 'trustedqsl': {'id': 'TrustedQSL', 'deprecated': False}, + 'ttwl': {'id': 'TTWL', 'deprecated': False}, + 'ttyp0': {'id': 'TTYP0', 'deprecated': False}, + 'tu-berlin-1.0': {'id': 'TU-Berlin-1.0', 'deprecated': False}, + 'tu-berlin-2.0': {'id': 'TU-Berlin-2.0', 'deprecated': False}, + 'ubuntu-font-1.0': {'id': 'Ubuntu-font-1.0', 'deprecated': False}, + 'ucar': {'id': 'UCAR', 'deprecated': False}, + 'ucl-1.0': {'id': 'UCL-1.0', 'deprecated': False}, + 'ulem': {'id': 'ulem', 'deprecated': False}, + 'umich-merit': {'id': 'UMich-Merit', 'deprecated': False}, + 'unicode-3.0': {'id': 'Unicode-3.0', 'deprecated': False}, + 'unicode-dfs-2015': {'id': 'Unicode-DFS-2015', 'deprecated': False}, + 'unicode-dfs-2016': {'id': 'Unicode-DFS-2016', 'deprecated': False}, + 'unicode-tou': {'id': 'Unicode-TOU', 'deprecated': False}, + 'unixcrypt': {'id': 'UnixCrypt', 'deprecated': False}, + 'unlicense': {'id': 'Unlicense', 'deprecated': False}, + 'unlicense-libtelnet': {'id': 'Unlicense-libtelnet', 'deprecated': False}, + 'unlicense-libwhirlpool': {'id': 'Unlicense-libwhirlpool', 'deprecated': False}, + 'upl-1.0': {'id': 'UPL-1.0', 'deprecated': False}, + 'urt-rle': {'id': 'URT-RLE', 'deprecated': False}, + 'vim': {'id': 'Vim', 'deprecated': False}, + 'vostrom': {'id': 'VOSTROM', 'deprecated': False}, + 'vsl-1.0': {'id': 'VSL-1.0', 'deprecated': False}, + 'w3c': {'id': 'W3C', 'deprecated': False}, + 'w3c-19980720': {'id': 'W3C-19980720', 'deprecated': False}, + 'w3c-20150513': {'id': 'W3C-20150513', 'deprecated': False}, + 'w3m': {'id': 'w3m', 'deprecated': False}, + 'watcom-1.0': {'id': 'Watcom-1.0', 'deprecated': False}, + 'widget-workshop': {'id': 'Widget-Workshop', 'deprecated': False}, + 'wsuipa': {'id': 'Wsuipa', 'deprecated': False}, + 'wtfpl': {'id': 'WTFPL', 'deprecated': False}, + 'wwl': {'id': 'wwl', 'deprecated': False}, + 'wxwindows': {'id': 'wxWindows', 'deprecated': True}, + 'x11': {'id': 'X11', 'deprecated': False}, + 'x11-distribute-modifications-variant': {'id': 'X11-distribute-modifications-variant', 'deprecated': False}, + 'x11-swapped': {'id': 'X11-swapped', 'deprecated': False}, + 'xdebug-1.03': {'id': 'Xdebug-1.03', 'deprecated': False}, + 'xerox': {'id': 'Xerox', 'deprecated': False}, + 'xfig': {'id': 'Xfig', 'deprecated': False}, + 'xfree86-1.1': {'id': 'XFree86-1.1', 'deprecated': False}, + 'xinetd': {'id': 'xinetd', 'deprecated': False}, + 'xkeyboard-config-zinoviev': {'id': 'xkeyboard-config-Zinoviev', 'deprecated': False}, + 'xlock': {'id': 'xlock', 'deprecated': False}, + 'xnet': {'id': 'Xnet', 'deprecated': False}, + 'xpp': {'id': 'xpp', 'deprecated': False}, + 'xskat': {'id': 'XSkat', 'deprecated': False}, + 'xzoom': {'id': 'xzoom', 'deprecated': False}, + 'ypl-1.0': {'id': 'YPL-1.0', 'deprecated': False}, + 'ypl-1.1': {'id': 'YPL-1.1', 'deprecated': False}, + 'zed': {'id': 'Zed', 'deprecated': False}, + 'zeeff': {'id': 'Zeeff', 'deprecated': False}, + 'zend-2.0': {'id': 'Zend-2.0', 'deprecated': False}, + 'zimbra-1.3': {'id': 'Zimbra-1.3', 'deprecated': False}, + 'zimbra-1.4': {'id': 'Zimbra-1.4', 'deprecated': False}, + 'zlib': {'id': 'Zlib', 'deprecated': False}, + 'zlib-acknowledgement': {'id': 'zlib-acknowledgement', 'deprecated': False}, + 'zpl-1.1': {'id': 'ZPL-1.1', 'deprecated': False}, + 'zpl-2.0': {'id': 'ZPL-2.0', 'deprecated': False}, + 'zpl-2.1': {'id': 'ZPL-2.1', 'deprecated': False}, +} + +EXCEPTIONS: dict[str, SPDXException] = { + '389-exception': {'id': '389-exception', 'deprecated': False}, + 'asterisk-exception': {'id': 'Asterisk-exception', 'deprecated': False}, + 'asterisk-linking-protocols-exception': {'id': 'Asterisk-linking-protocols-exception', 'deprecated': False}, + 'autoconf-exception-2.0': {'id': 'Autoconf-exception-2.0', 'deprecated': False}, + 'autoconf-exception-3.0': {'id': 'Autoconf-exception-3.0', 'deprecated': False}, + 'autoconf-exception-generic': {'id': 'Autoconf-exception-generic', 'deprecated': False}, + 'autoconf-exception-generic-3.0': {'id': 'Autoconf-exception-generic-3.0', 'deprecated': False}, + 'autoconf-exception-macro': {'id': 'Autoconf-exception-macro', 'deprecated': False}, + 'bison-exception-1.24': {'id': 'Bison-exception-1.24', 'deprecated': False}, + 'bison-exception-2.2': {'id': 'Bison-exception-2.2', 'deprecated': False}, + 'bootloader-exception': {'id': 'Bootloader-exception', 'deprecated': False}, + 'cgal-linking-exception': {'id': 'CGAL-linking-exception', 'deprecated': False}, + 'classpath-exception-2.0': {'id': 'Classpath-exception-2.0', 'deprecated': False}, + 'clisp-exception-2.0': {'id': 'CLISP-exception-2.0', 'deprecated': False}, + 'cryptsetup-openssl-exception': {'id': 'cryptsetup-OpenSSL-exception', 'deprecated': False}, + 'digia-qt-lgpl-exception-1.1': {'id': 'Digia-Qt-LGPL-exception-1.1', 'deprecated': False}, + 'digirule-foss-exception': {'id': 'DigiRule-FOSS-exception', 'deprecated': False}, + 'ecos-exception-2.0': {'id': 'eCos-exception-2.0', 'deprecated': False}, + 'erlang-otp-linking-exception': {'id': 'erlang-otp-linking-exception', 'deprecated': False}, + 'fawkes-runtime-exception': {'id': 'Fawkes-Runtime-exception', 'deprecated': False}, + 'fltk-exception': {'id': 'FLTK-exception', 'deprecated': False}, + 'fmt-exception': {'id': 'fmt-exception', 'deprecated': False}, + 'font-exception-2.0': {'id': 'Font-exception-2.0', 'deprecated': False}, + 'freertos-exception-2.0': {'id': 'freertos-exception-2.0', 'deprecated': False}, + 'gcc-exception-2.0': {'id': 'GCC-exception-2.0', 'deprecated': False}, + 'gcc-exception-2.0-note': {'id': 'GCC-exception-2.0-note', 'deprecated': False}, + 'gcc-exception-3.1': {'id': 'GCC-exception-3.1', 'deprecated': False}, + 'gmsh-exception': {'id': 'Gmsh-exception', 'deprecated': False}, + 'gnat-exception': {'id': 'GNAT-exception', 'deprecated': False}, + 'gnome-examples-exception': {'id': 'GNOME-examples-exception', 'deprecated': False}, + 'gnu-compiler-exception': {'id': 'GNU-compiler-exception', 'deprecated': False}, + 'gnu-javamail-exception': {'id': 'gnu-javamail-exception', 'deprecated': False}, + 'gpl-3.0-389-ds-base-exception': {'id': 'GPL-3.0-389-ds-base-exception', 'deprecated': False}, + 'gpl-3.0-interface-exception': {'id': 'GPL-3.0-interface-exception', 'deprecated': False}, + 'gpl-3.0-linking-exception': {'id': 'GPL-3.0-linking-exception', 'deprecated': False}, + 'gpl-3.0-linking-source-exception': {'id': 'GPL-3.0-linking-source-exception', 'deprecated': False}, + 'gpl-cc-1.0': {'id': 'GPL-CC-1.0', 'deprecated': False}, + 'gstreamer-exception-2005': {'id': 'GStreamer-exception-2005', 'deprecated': False}, + 'gstreamer-exception-2008': {'id': 'GStreamer-exception-2008', 'deprecated': False}, + 'harbour-exception': {'id': 'harbour-exception', 'deprecated': False}, + 'i2p-gpl-java-exception': {'id': 'i2p-gpl-java-exception', 'deprecated': False}, + 'independent-modules-exception': {'id': 'Independent-modules-exception', 'deprecated': False}, + 'kicad-libraries-exception': {'id': 'KiCad-libraries-exception', 'deprecated': False}, + 'lgpl-3.0-linking-exception': {'id': 'LGPL-3.0-linking-exception', 'deprecated': False}, + 'libpri-openh323-exception': {'id': 'libpri-OpenH323-exception', 'deprecated': False}, + 'libtool-exception': {'id': 'Libtool-exception', 'deprecated': False}, + 'linux-syscall-note': {'id': 'Linux-syscall-note', 'deprecated': False}, + 'llgpl': {'id': 'LLGPL', 'deprecated': False}, + 'llvm-exception': {'id': 'LLVM-exception', 'deprecated': False}, + 'lzma-exception': {'id': 'LZMA-exception', 'deprecated': False}, + 'mif-exception': {'id': 'mif-exception', 'deprecated': False}, + 'mxml-exception': {'id': 'mxml-exception', 'deprecated': False}, + 'nokia-qt-exception-1.1': {'id': 'Nokia-Qt-exception-1.1', 'deprecated': True}, + 'ocaml-lgpl-linking-exception': {'id': 'OCaml-LGPL-linking-exception', 'deprecated': False}, + 'occt-exception-1.0': {'id': 'OCCT-exception-1.0', 'deprecated': False}, + 'openjdk-assembly-exception-1.0': {'id': 'OpenJDK-assembly-exception-1.0', 'deprecated': False}, + 'openvpn-openssl-exception': {'id': 'openvpn-openssl-exception', 'deprecated': False}, + 'pcre2-exception': {'id': 'PCRE2-exception', 'deprecated': False}, + 'polyparse-exception': {'id': 'polyparse-exception', 'deprecated': False}, + 'ps-or-pdf-font-exception-20170817': {'id': 'PS-or-PDF-font-exception-20170817', 'deprecated': False}, + 'qpl-1.0-inria-2004-exception': {'id': 'QPL-1.0-INRIA-2004-exception', 'deprecated': False}, + 'qt-gpl-exception-1.0': {'id': 'Qt-GPL-exception-1.0', 'deprecated': False}, + 'qt-lgpl-exception-1.1': {'id': 'Qt-LGPL-exception-1.1', 'deprecated': False}, + 'qwt-exception-1.0': {'id': 'Qwt-exception-1.0', 'deprecated': False}, + 'romic-exception': {'id': 'romic-exception', 'deprecated': False}, + 'rrdtool-floss-exception-2.0': {'id': 'RRDtool-FLOSS-exception-2.0', 'deprecated': False}, + 'sane-exception': {'id': 'SANE-exception', 'deprecated': False}, + 'shl-2.0': {'id': 'SHL-2.0', 'deprecated': False}, + 'shl-2.1': {'id': 'SHL-2.1', 'deprecated': False}, + 'stunnel-exception': {'id': 'stunnel-exception', 'deprecated': False}, + 'swi-exception': {'id': 'SWI-exception', 'deprecated': False}, + 'swift-exception': {'id': 'Swift-exception', 'deprecated': False}, + 'texinfo-exception': {'id': 'Texinfo-exception', 'deprecated': False}, + 'u-boot-exception-2.0': {'id': 'u-boot-exception-2.0', 'deprecated': False}, + 'ubdl-exception': {'id': 'UBDL-exception', 'deprecated': False}, + 'universal-foss-exception-1.0': {'id': 'Universal-FOSS-exception-1.0', 'deprecated': False}, + 'vsftpd-openssl-exception': {'id': 'vsftpd-openssl-exception', 'deprecated': False}, + 'wxwindows-exception-3.1': {'id': 'WxWindows-exception-3.1', 'deprecated': False}, + 'x11vnc-openssl-exception': {'id': 'x11vnc-openssl-exception', 'deprecated': False}, +} diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/markers.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/markers.py new file mode 100644 index 0000000000000000000000000000000000000000..ca3706fe492f4cf0762f7734d84c2d269f88bbc5 --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/markers.py @@ -0,0 +1,388 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import operator +import os +import platform +import sys +from typing import AbstractSet, Callable, Literal, Mapping, TypedDict, Union, cast + +from ._parser import MarkerAtom, MarkerList, Op, Value, Variable +from ._parser import parse_marker as _parse_marker +from ._tokenizer import ParserSyntaxError +from .specifiers import InvalidSpecifier, Specifier +from .utils import canonicalize_name + +__all__ = [ + "Environment", + "EvaluateContext", + "InvalidMarker", + "Marker", + "UndefinedComparison", + "UndefinedEnvironmentName", + "default_environment", +] + +Operator = Callable[[str, Union[str, AbstractSet[str]]], bool] +EvaluateContext = Literal["metadata", "lock_file", "requirement"] +MARKERS_ALLOWING_SET = {"extras", "dependency_groups"} +MARKERS_REQUIRING_VERSION = { + "implementation_version", + "platform_release", + "python_full_version", + "python_version", +} + + +class InvalidMarker(ValueError): + """ + An invalid marker was found, users should refer to PEP 508. + """ + + +class UndefinedComparison(ValueError): + """ + An invalid operation was attempted on a value that doesn't support it. + """ + + +class UndefinedEnvironmentName(ValueError): + """ + A name was attempted to be used that does not exist inside of the + environment. + """ + + +class Environment(TypedDict): + implementation_name: str + """The implementation's identifier, e.g. ``'cpython'``.""" + + implementation_version: str + """ + The implementation's version, e.g. ``'3.13.0a2'`` for CPython 3.13.0a2, or + ``'7.3.13'`` for PyPy3.10 v7.3.13. + """ + + os_name: str + """ + The value of :py:data:`os.name`. The name of the operating system dependent module + imported, e.g. ``'posix'``. + """ + + platform_machine: str + """ + Returns the machine type, e.g. ``'i386'``. + + An empty string if the value cannot be determined. + """ + + platform_release: str + """ + The system's release, e.g. ``'2.2.0'`` or ``'NT'``. + + An empty string if the value cannot be determined. + """ + + platform_system: str + """ + The system/OS name, e.g. ``'Linux'``, ``'Windows'`` or ``'Java'``. + + An empty string if the value cannot be determined. + """ + + platform_version: str + """ + The system's release version, e.g. ``'#3 on degas'``. + + An empty string if the value cannot be determined. + """ + + python_full_version: str + """ + The Python version as string ``'major.minor.patchlevel'``. + + Note that unlike the Python :py:data:`sys.version`, this value will always include + the patchlevel (it defaults to 0). + """ + + platform_python_implementation: str + """ + A string identifying the Python implementation, e.g. ``'CPython'``. + """ + + python_version: str + """The Python version as string ``'major.minor'``.""" + + sys_platform: str + """ + This string contains a platform identifier that can be used to append + platform-specific components to :py:data:`sys.path`, for instance. + + For Unix systems, except on Linux and AIX, this is the lowercased OS name as + returned by ``uname -s`` with the first part of the version as returned by + ``uname -r`` appended, e.g. ``'sunos5'`` or ``'freebsd8'``, at the time when Python + was built. + """ + + +def _normalize_extras( + result: MarkerList | MarkerAtom | str, +) -> MarkerList | MarkerAtom | str: + if not isinstance(result, tuple): + return result + + lhs, op, rhs = result + if isinstance(lhs, Variable) and lhs.value == "extra": + normalized_extra = canonicalize_name(rhs.value) + rhs = Value(normalized_extra) + elif isinstance(rhs, Variable) and rhs.value == "extra": + normalized_extra = canonicalize_name(lhs.value) + lhs = Value(normalized_extra) + return lhs, op, rhs + + +def _normalize_extra_values(results: MarkerList) -> MarkerList: + """ + Normalize extra values. + """ + + return [_normalize_extras(r) for r in results] + + +def _format_marker( + marker: list[str] | MarkerAtom | str, first: bool | None = True +) -> str: + assert isinstance(marker, (list, tuple, str)) + + # Sometimes we have a structure like [[...]] which is a single item list + # where the single item is itself it's own list. In that case we want skip + # the rest of this function so that we don't get extraneous () on the + # outside. + if ( + isinstance(marker, list) + and len(marker) == 1 + and isinstance(marker[0], (list, tuple)) + ): + return _format_marker(marker[0]) + + if isinstance(marker, list): + inner = (_format_marker(m, first=False) for m in marker) + if first: + return " ".join(inner) + else: + return "(" + " ".join(inner) + ")" + elif isinstance(marker, tuple): + return " ".join([m.serialize() for m in marker]) + else: + return marker + + +_operators: dict[str, Operator] = { + "in": lambda lhs, rhs: lhs in rhs, + "not in": lambda lhs, rhs: lhs not in rhs, + "<": lambda _lhs, _rhs: False, + "<=": operator.eq, + "==": operator.eq, + "!=": operator.ne, + ">=": operator.eq, + ">": lambda _lhs, _rhs: False, +} + + +def _eval_op(lhs: str, op: Op, rhs: str | AbstractSet[str], *, key: str) -> bool: + op_str = op.serialize() + if key in MARKERS_REQUIRING_VERSION: + try: + spec = Specifier(f"{op_str}{rhs}") + except InvalidSpecifier: + pass + else: + return spec.contains(lhs, prereleases=True) + + oper: Operator | None = _operators.get(op_str) + if oper is None: + raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.") + + return oper(lhs, rhs) + + +def _normalize( + lhs: str, rhs: str | AbstractSet[str], key: str +) -> tuple[str, str | AbstractSet[str]]: + # PEP 685 - Comparison of extra names for optional distribution dependencies + # https://peps.python.org/pep-0685/ + # > When comparing extra names, tools MUST normalize the names being + # > compared using the semantics outlined in PEP 503 for names + if key == "extra": + assert isinstance(rhs, str), "extra value must be a string" + # Both sides are normalized at this point already + return (lhs, rhs) + if key in MARKERS_ALLOWING_SET: + if isinstance(rhs, str): # pragma: no cover + return (canonicalize_name(lhs), canonicalize_name(rhs)) + else: + return (canonicalize_name(lhs), {canonicalize_name(v) for v in rhs}) + + # other environment markers don't have such standards + return lhs, rhs + + +def _evaluate_markers( + markers: MarkerList, environment: dict[str, str | AbstractSet[str]] +) -> bool: + groups: list[list[bool]] = [[]] + + for marker in markers: + if isinstance(marker, list): + groups[-1].append(_evaluate_markers(marker, environment)) + elif isinstance(marker, tuple): + lhs, op, rhs = marker + + if isinstance(lhs, Variable): + environment_key = lhs.value + lhs_value = environment[environment_key] + rhs_value = rhs.value + else: + lhs_value = lhs.value + environment_key = rhs.value + rhs_value = environment[environment_key] + + assert isinstance(lhs_value, str), "lhs must be a string" + lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key) + groups[-1].append(_eval_op(lhs_value, op, rhs_value, key=environment_key)) + elif marker == "or": + groups.append([]) + elif marker == "and": + pass + else: # pragma: nocover + raise TypeError(f"Unexpected marker {marker!r}") + + return any(all(item) for item in groups) + + +def format_full_version(info: sys._version_info) -> str: + version = f"{info.major}.{info.minor}.{info.micro}" + kind = info.releaselevel + if kind != "final": + version += kind[0] + str(info.serial) + return version + + +def default_environment() -> Environment: + iver = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name + return { + "implementation_name": implementation_name, + "implementation_version": iver, + "os_name": os.name, + "platform_machine": platform.machine(), + "platform_release": platform.release(), + "platform_system": platform.system(), + "platform_version": platform.version(), + "python_full_version": platform.python_version(), + "platform_python_implementation": platform.python_implementation(), + "python_version": ".".join(platform.python_version_tuple()[:2]), + "sys_platform": sys.platform, + } + + +class Marker: + def __init__(self, marker: str) -> None: + # Note: We create a Marker object without calling this constructor in + # packaging.requirements.Requirement. If any additional logic is + # added here, make sure to mirror/adapt Requirement. + + # If this fails and throws an error, the repr still expects _markers to + # be defined. + self._markers: MarkerList = [] + + try: + self._markers = _normalize_extra_values(_parse_marker(marker)) + # The attribute `_markers` can be described in terms of a recursive type: + # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]] + # + # For example, the following expression: + # python_version > "3.6" or (python_version == "3.6" and os_name == "unix") + # + # is parsed into: + # [ + # (, ')>, ), + # 'and', + # [ + # (, , ), + # 'or', + # (, , ) + # ] + # ] + except ParserSyntaxError as e: + raise InvalidMarker(str(e)) from e + + def __str__(self) -> str: + return _format_marker(self._markers) + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}('{self}')>" + + def __hash__(self) -> int: + return hash(str(self)) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Marker): + return NotImplemented + + return str(self) == str(other) + + def evaluate( + self, + environment: Mapping[str, str | AbstractSet[str]] | None = None, + context: EvaluateContext = "metadata", + ) -> bool: + """Evaluate a marker. + + Return the boolean from evaluating the given marker against the + environment. environment is an optional argument to override all or + part of the determined environment. The *context* parameter specifies what + context the markers are being evaluated for, which influences what markers + are considered valid. Acceptable values are "metadata" (for core metadata; + default), "lock_file", and "requirement" (i.e. all other situations). + + The environment is determined from the current Python process. + """ + current_environment = cast( + "dict[str, str | AbstractSet[str]]", default_environment() + ) + if context == "lock_file": + current_environment.update( + extras=frozenset(), dependency_groups=frozenset() + ) + elif context == "metadata": + current_environment["extra"] = "" + + if environment is not None: + current_environment.update(environment) + if "extra" in current_environment: + # The API used to allow setting extra to None. We need to handle + # this case for backwards compatibility. Also skip running + # normalize name if extra is empty. + extra = cast("str | None", current_environment["extra"]) + current_environment["extra"] = canonicalize_name(extra) if extra else "" + + return _evaluate_markers( + self._markers, _repair_python_full_version(current_environment) + ) + + +def _repair_python_full_version( + env: dict[str, str | AbstractSet[str]], +) -> dict[str, str | AbstractSet[str]]: + """ + Work around platform.python_version() returning something that is not PEP 440 + compliant for non-tagged Python builds. + """ + python_full_version = cast("str", env["python_full_version"]) + if python_full_version.endswith("+"): + env["python_full_version"] = f"{python_full_version}local" + return env diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/metadata.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/metadata.py new file mode 100644 index 0000000000000000000000000000000000000000..253f6b1b7ebd711fdc6bbbab3b56897061bab515 --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/metadata.py @@ -0,0 +1,978 @@ +from __future__ import annotations + +import email.feedparser +import email.header +import email.message +import email.parser +import email.policy +import keyword +import pathlib +import sys +import typing +from typing import ( + Any, + Callable, + Generic, + Literal, + TypedDict, + cast, +) + +from . import licenses, requirements, specifiers, utils +from . import version as version_module + +if typing.TYPE_CHECKING: + from .licenses import NormalizedLicenseExpression + +T = typing.TypeVar("T") + + +if sys.version_info >= (3, 11): # pragma: no cover + ExceptionGroup = ExceptionGroup # noqa: F821 +else: # pragma: no cover + + class ExceptionGroup(Exception): + """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11. + + If :external:exc:`ExceptionGroup` is already defined by Python itself, + that version is used instead. + """ + + message: str + exceptions: list[Exception] + + def __init__(self, message: str, exceptions: list[Exception]) -> None: + self.message = message + self.exceptions = exceptions + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})" + + +class InvalidMetadata(ValueError): + """A metadata field contains invalid data.""" + + field: str + """The name of the field that contains invalid data.""" + + def __init__(self, field: str, message: str) -> None: + self.field = field + super().__init__(message) + + +# The RawMetadata class attempts to make as few assumptions about the underlying +# serialization formats as possible. The idea is that as long as a serialization +# formats offer some very basic primitives in *some* way then we can support +# serializing to and from that format. +class RawMetadata(TypedDict, total=False): + """A dictionary of raw core metadata. + + Each field in core metadata maps to a key of this dictionary (when data is + provided). The key is lower-case and underscores are used instead of dashes + compared to the equivalent core metadata field. Any core metadata field that + can be specified multiple times or can hold multiple values in a single + field have a key with a plural name. See :class:`Metadata` whose attributes + match the keys of this dictionary. + + Core metadata fields that can be specified multiple times are stored as a + list or dict depending on which is appropriate for the field. Any fields + which hold multiple values in a single field are stored as a list. + + """ + + # Metadata 1.0 - PEP 241 + metadata_version: str + name: str + version: str + platforms: list[str] + summary: str + description: str + keywords: list[str] + home_page: str + author: str + author_email: str + license: str + + # Metadata 1.1 - PEP 314 + supported_platforms: list[str] + download_url: str + classifiers: list[str] + requires: list[str] + provides: list[str] + obsoletes: list[str] + + # Metadata 1.2 - PEP 345 + maintainer: str + maintainer_email: str + requires_dist: list[str] + provides_dist: list[str] + obsoletes_dist: list[str] + requires_python: str + requires_external: list[str] + project_urls: dict[str, str] + + # Metadata 2.0 + # PEP 426 attempted to completely revamp the metadata format + # but got stuck without ever being able to build consensus on + # it and ultimately ended up withdrawn. + # + # However, a number of tools had started emitting METADATA with + # `2.0` Metadata-Version, so for historical reasons, this version + # was skipped. + + # Metadata 2.1 - PEP 566 + description_content_type: str + provides_extra: list[str] + + # Metadata 2.2 - PEP 643 + dynamic: list[str] + + # Metadata 2.3 - PEP 685 + # No new fields were added in PEP 685, just some edge case were + # tightened up to provide better interoperability. + + # Metadata 2.4 - PEP 639 + license_expression: str + license_files: list[str] + + # Metadata 2.5 - PEP 794 + import_names: list[str] + import_namespaces: list[str] + + +# 'keywords' is special as it's a string in the core metadata spec, but we +# represent it as a list. +_STRING_FIELDS = { + "author", + "author_email", + "description", + "description_content_type", + "download_url", + "home_page", + "license", + "license_expression", + "maintainer", + "maintainer_email", + "metadata_version", + "name", + "requires_python", + "summary", + "version", +} + +_LIST_FIELDS = { + "classifiers", + "dynamic", + "license_files", + "obsoletes", + "obsoletes_dist", + "platforms", + "provides", + "provides_dist", + "provides_extra", + "requires", + "requires_dist", + "requires_external", + "supported_platforms", + "import_names", + "import_namespaces", +} + +_DICT_FIELDS = { + "project_urls", +} + + +def _parse_keywords(data: str) -> list[str]: + """Split a string of comma-separated keywords into a list of keywords.""" + return [k.strip() for k in data.split(",")] + + +def _parse_project_urls(data: list[str]) -> dict[str, str]: + """Parse a list of label/URL string pairings separated by a comma.""" + urls = {} + for pair in data: + # Our logic is slightly tricky here as we want to try and do + # *something* reasonable with malformed data. + # + # The main thing that we have to worry about, is data that does + # not have a ',' at all to split the label from the Value. There + # isn't a singular right answer here, and we will fail validation + # later on (if the caller is validating) so it doesn't *really* + # matter, but since the missing value has to be an empty str + # and our return value is dict[str, str], if we let the key + # be the missing value, then they'd have multiple '' values that + # overwrite each other in a accumulating dict. + # + # The other potential issue is that it's possible to have the + # same label multiple times in the metadata, with no solid "right" + # answer with what to do in that case. As such, we'll do the only + # thing we can, which is treat the field as unparsable and add it + # to our list of unparsed fields. + # + # TODO: The spec doesn't say anything about if the keys should be + # considered case sensitive or not... logically they should + # be case-preserving and case-insensitive, but doing that + # would open up more cases where we might have duplicate + # entries. + label, _, url = (s.strip() for s in pair.partition(",")) + + if label in urls: + # The label already exists in our set of urls, so this field + # is unparsable, and we can just add the whole thing to our + # unparsable data and stop processing it. + raise KeyError("duplicate labels in project urls") + urls[label] = url + + return urls + + +def _get_payload(msg: email.message.Message, source: bytes | str) -> str: + """Get the body of the message.""" + # If our source is a str, then our caller has managed encodings for us, + # and we don't need to deal with it. + if isinstance(source, str): + payload = msg.get_payload() + assert isinstance(payload, str) + return payload + # If our source is a bytes, then we're managing the encoding and we need + # to deal with it. + else: + bpayload = msg.get_payload(decode=True) + assert isinstance(bpayload, bytes) + try: + return bpayload.decode("utf8", "strict") + except UnicodeDecodeError as exc: + raise ValueError("payload in an invalid encoding") from exc + + +# The various parse_FORMAT functions here are intended to be as lenient as +# possible in their parsing, while still returning a correctly typed +# RawMetadata. +# +# To aid in this, we also generally want to do as little touching of the +# data as possible, except where there are possibly some historic holdovers +# that make valid data awkward to work with. +# +# While this is a lower level, intermediate format than our ``Metadata`` +# class, some light touch ups can make a massive difference in usability. + +# Map METADATA fields to RawMetadata. +_EMAIL_TO_RAW_MAPPING = { + "author": "author", + "author-email": "author_email", + "classifier": "classifiers", + "description": "description", + "description-content-type": "description_content_type", + "download-url": "download_url", + "dynamic": "dynamic", + "home-page": "home_page", + "import-name": "import_names", + "import-namespace": "import_namespaces", + "keywords": "keywords", + "license": "license", + "license-expression": "license_expression", + "license-file": "license_files", + "maintainer": "maintainer", + "maintainer-email": "maintainer_email", + "metadata-version": "metadata_version", + "name": "name", + "obsoletes": "obsoletes", + "obsoletes-dist": "obsoletes_dist", + "platform": "platforms", + "project-url": "project_urls", + "provides": "provides", + "provides-dist": "provides_dist", + "provides-extra": "provides_extra", + "requires": "requires", + "requires-dist": "requires_dist", + "requires-external": "requires_external", + "requires-python": "requires_python", + "summary": "summary", + "supported-platform": "supported_platforms", + "version": "version", +} +_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()} + + +# This class is for writing RFC822 messages +class RFC822Policy(email.policy.EmailPolicy): + """ + This is :class:`email.policy.EmailPolicy`, but with a simple ``header_store_parse`` + implementation that handles multi-line values, and some nice defaults. + """ + + utf8 = True + mangle_from_ = False + max_line_length = 0 + + def header_store_parse(self, name: str, value: str) -> tuple[str, str]: + size = len(name) + 2 + value = value.replace("\n", "\n" + " " * size) + return (name, value) + + +# This class is for writing RFC822 messages +class RFC822Message(email.message.EmailMessage): + """ + This is :class:`email.message.EmailMessage` with two small changes: it defaults to + our `RFC822Policy`, and it correctly writes unicode when being called + with `bytes()`. + """ + + def __init__(self) -> None: + super().__init__(policy=RFC822Policy()) + + def as_bytes( + self, unixfrom: bool = False, policy: email.policy.Policy | None = None + ) -> bytes: + """ + Return the bytes representation of the message. + + This handles unicode encoding. + """ + return self.as_string(unixfrom, policy=policy).encode("utf-8") + + +def parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]: + """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``). + + This function returns a two-item tuple of dicts. The first dict is of + recognized fields from the core metadata specification. Fields that can be + parsed and translated into Python's built-in types are converted + appropriately. All other fields are left as-is. Fields that are allowed to + appear multiple times are stored as lists. + + The second dict contains all other fields from the metadata. This includes + any unrecognized fields. It also includes any fields which are expected to + be parsed into a built-in type but were not formatted appropriately. Finally, + any fields that are expected to appear only once but are repeated are + included in this dict. + + """ + raw: dict[str, str | list[str] | dict[str, str]] = {} + unparsed: dict[str, list[str]] = {} + + if isinstance(data, str): + parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data) + else: + parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data) + + # We have to wrap parsed.keys() in a set, because in the case of multiple + # values for a key (a list), the key will appear multiple times in the + # list of keys, but we're avoiding that by using get_all(). + for name_with_case in frozenset(parsed.keys()): + # Header names in RFC are case insensitive, so we'll normalize to all + # lower case to make comparisons easier. + name = name_with_case.lower() + + # We use get_all() here, even for fields that aren't multiple use, + # because otherwise someone could have e.g. two Name fields, and we + # would just silently ignore it rather than doing something about it. + headers = parsed.get_all(name) or [] + + # The way the email module works when parsing bytes is that it + # unconditionally decodes the bytes as ascii using the surrogateescape + # handler. When you pull that data back out (such as with get_all() ), + # it looks to see if the str has any surrogate escapes, and if it does + # it wraps it in a Header object instead of returning the string. + # + # As such, we'll look for those Header objects, and fix up the encoding. + value = [] + # Flag if we have run into any issues processing the headers, thus + # signalling that the data belongs in 'unparsed'. + valid_encoding = True + for h in headers: + # It's unclear if this can return more types than just a Header or + # a str, so we'll just assert here to make sure. + assert isinstance(h, (email.header.Header, str)) + + # If it's a header object, we need to do our little dance to get + # the real data out of it. In cases where there is invalid data + # we're going to end up with mojibake, but there's no obvious, good + # way around that without reimplementing parts of the Header object + # ourselves. + # + # That should be fine since, if mojibacked happens, this key is + # going into the unparsed dict anyways. + if isinstance(h, email.header.Header): + # The Header object stores it's data as chunks, and each chunk + # can be independently encoded, so we'll need to check each + # of them. + chunks: list[tuple[bytes, str | None]] = [] + for binary, _encoding in email.header.decode_header(h): + try: + binary.decode("utf8", "strict") + except UnicodeDecodeError: + # Enable mojibake. + encoding = "latin1" + valid_encoding = False + else: + encoding = "utf8" + chunks.append((binary, encoding)) + + # Turn our chunks back into a Header object, then let that + # Header object do the right thing to turn them into a + # string for us. + value.append(str(email.header.make_header(chunks))) + # This is already a string, so just add it. + else: + value.append(h) + + # We've processed all of our values to get them into a list of str, + # but we may have mojibake data, in which case this is an unparsed + # field. + if not valid_encoding: + unparsed[name] = value + continue + + raw_name = _EMAIL_TO_RAW_MAPPING.get(name) + if raw_name is None: + # This is a bit of a weird situation, we've encountered a key that + # we don't know what it means, so we don't know whether it's meant + # to be a list or not. + # + # Since we can't really tell one way or another, we'll just leave it + # as a list, even though it may be a single item list, because that's + # what makes the most sense for email headers. + unparsed[name] = value + continue + + # If this is one of our string fields, then we'll check to see if our + # value is a list of a single item. If it is then we'll assume that + # it was emitted as a single string, and unwrap the str from inside + # the list. + # + # If it's any other kind of data, then we haven't the faintest clue + # what we should parse it as, and we have to just add it to our list + # of unparsed stuff. + if raw_name in _STRING_FIELDS and len(value) == 1: + raw[raw_name] = value[0] + # If this is import_names, we need to special case the empty field + # case, which converts to an empty list instead of None. We can't let + # the empty case slip through, as it will fail validation. + elif raw_name == "import_names" and value == [""]: + raw[raw_name] = [] + # If this is one of our list of string fields, then we can just assign + # the value, since email *only* has strings, and our get_all() call + # above ensures that this is a list. + elif raw_name in _LIST_FIELDS: + raw[raw_name] = value + # Special Case: Keywords + # The keywords field is implemented in the metadata spec as a str, + # but it conceptually is a list of strings, and is serialized using + # ", ".join(keywords), so we'll do some light data massaging to turn + # this into what it logically is. + elif raw_name == "keywords" and len(value) == 1: + raw[raw_name] = _parse_keywords(value[0]) + # Special Case: Project-URL + # The project urls is implemented in the metadata spec as a list of + # specially-formatted strings that represent a key and a value, which + # is fundamentally a mapping, however the email format doesn't support + # mappings in a sane way, so it was crammed into a list of strings + # instead. + # + # We will do a little light data massaging to turn this into a map as + # it logically should be. + elif raw_name == "project_urls": + try: + raw[raw_name] = _parse_project_urls(value) + except KeyError: + unparsed[name] = value + # Nothing that we've done has managed to parse this, so it'll just + # throw it in our unparsable data and move on. + else: + unparsed[name] = value + + # We need to support getting the Description from the message payload in + # addition to getting it from the the headers. This does mean, though, there + # is the possibility of it being set both ways, in which case we put both + # in 'unparsed' since we don't know which is right. + try: + payload = _get_payload(parsed, data) + except ValueError: + unparsed.setdefault("description", []).append( + parsed.get_payload(decode=isinstance(data, bytes)) # type: ignore[call-overload] + ) + else: + if payload: + # Check to see if we've already got a description, if so then both + # it, and this body move to unparsable. + if "description" in raw: + description_header = cast("str", raw.pop("description")) + unparsed.setdefault("description", []).extend( + [description_header, payload] + ) + elif "description" in unparsed: + unparsed["description"].append(payload) + else: + raw["description"] = payload + + # We need to cast our `raw` to a metadata, because a TypedDict only support + # literal key names, but we're computing our key names on purpose, but the + # way this function is implemented, our `TypedDict` can only have valid key + # names. + return cast("RawMetadata", raw), unparsed + + +_NOT_FOUND = object() + + +# Keep the two values in sync. +_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4", "2.5"] +_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4", "2.5"] + +_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"]) + + +class _Validator(Generic[T]): + """Validate a metadata field. + + All _process_*() methods correspond to a core metadata field. The method is + called with the field's raw value. If the raw value is valid it is returned + in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field). + If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause + as appropriate). + """ + + name: str + raw_name: str + added: _MetadataVersion + + def __init__( + self, + *, + added: _MetadataVersion = "1.0", + ) -> None: + self.added = added + + def __set_name__(self, _owner: Metadata, name: str) -> None: + self.name = name + self.raw_name = _RAW_TO_EMAIL_MAPPING[name] + + def __get__(self, instance: Metadata, _owner: type[Metadata]) -> T: + # With Python 3.8, the caching can be replaced with functools.cached_property(). + # No need to check the cache as attribute lookup will resolve into the + # instance's __dict__ before __get__ is called. + cache = instance.__dict__ + value = instance._raw.get(self.name) + + # To make the _process_* methods easier, we'll check if the value is None + # and if this field is NOT a required attribute, and if both of those + # things are true, we'll skip the the converter. This will mean that the + # converters never have to deal with the None union. + if self.name in _REQUIRED_ATTRS or value is not None: + try: + converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}") + except AttributeError: + pass + else: + value = converter(value) + + cache[self.name] = value + try: + del instance._raw[self.name] # type: ignore[misc] + except KeyError: + pass + + return cast("T", value) + + def _invalid_metadata( + self, msg: str, cause: Exception | None = None + ) -> InvalidMetadata: + exc = InvalidMetadata( + self.raw_name, msg.format_map({"field": repr(self.raw_name)}) + ) + exc.__cause__ = cause + return exc + + def _process_metadata_version(self, value: str) -> _MetadataVersion: + # Implicitly makes Metadata-Version required. + if value not in _VALID_METADATA_VERSIONS: + raise self._invalid_metadata(f"{value!r} is not a valid metadata version") + return cast("_MetadataVersion", value) + + def _process_name(self, value: str) -> str: + if not value: + raise self._invalid_metadata("{field} is a required field") + # Validate the name as a side-effect. + try: + utils.canonicalize_name(value, validate=True) + except utils.InvalidName as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) from exc + else: + return value + + def _process_version(self, value: str) -> version_module.Version: + if not value: + raise self._invalid_metadata("{field} is a required field") + try: + return version_module.parse(value) + except version_module.InvalidVersion as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) from exc + + def _process_summary(self, value: str) -> str: + """Check the field contains no newlines.""" + if "\n" in value: + raise self._invalid_metadata("{field} must be a single line") + return value + + def _process_description_content_type(self, value: str) -> str: + content_types = {"text/plain", "text/x-rst", "text/markdown"} + message = email.message.EmailMessage() + message["content-type"] = value + + content_type, parameters = ( + # Defaults to `text/plain` if parsing failed. + message.get_content_type().lower(), + message["content-type"].params, + ) + # Check if content-type is valid or defaulted to `text/plain` and thus was + # not parseable. + if content_type not in content_types or content_type not in value.lower(): + raise self._invalid_metadata( + f"{{field}} must be one of {list(content_types)}, not {value!r}" + ) + + charset = parameters.get("charset", "UTF-8") + if charset != "UTF-8": + raise self._invalid_metadata( + f"{{field}} can only specify the UTF-8 charset, not {list(charset)}" + ) + + markdown_variants = {"GFM", "CommonMark"} + variant = parameters.get("variant", "GFM") # Use an acceptable default. + if content_type == "text/markdown" and variant not in markdown_variants: + raise self._invalid_metadata( + f"valid Markdown variants for {{field}} are {list(markdown_variants)}, " + f"not {variant!r}", + ) + return value + + def _process_dynamic(self, value: list[str]) -> list[str]: + for dynamic_field in map(str.lower, value): + if dynamic_field in {"name", "version", "metadata-version"}: + raise self._invalid_metadata( + f"{dynamic_field!r} is not allowed as a dynamic field" + ) + elif dynamic_field not in _EMAIL_TO_RAW_MAPPING: + raise self._invalid_metadata( + f"{dynamic_field!r} is not a valid dynamic field" + ) + return list(map(str.lower, value)) + + def _process_provides_extra( + self, + value: list[str], + ) -> list[utils.NormalizedName]: + normalized_names = [] + try: + for name in value: + normalized_names.append(utils.canonicalize_name(name, validate=True)) + except utils.InvalidName as exc: + raise self._invalid_metadata( + f"{name!r} is invalid for {{field}}", cause=exc + ) from exc + else: + return normalized_names + + def _process_requires_python(self, value: str) -> specifiers.SpecifierSet: + try: + return specifiers.SpecifierSet(value) + except specifiers.InvalidSpecifier as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) from exc + + def _process_requires_dist( + self, + value: list[str], + ) -> list[requirements.Requirement]: + reqs = [] + try: + for req in value: + reqs.append(requirements.Requirement(req)) + except requirements.InvalidRequirement as exc: + raise self._invalid_metadata( + f"{req!r} is invalid for {{field}}", cause=exc + ) from exc + else: + return reqs + + def _process_license_expression(self, value: str) -> NormalizedLicenseExpression: + try: + return licenses.canonicalize_license_expression(value) + except ValueError as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) from exc + + def _process_license_files(self, value: list[str]) -> list[str]: + paths = [] + for path in value: + if ".." in path: + raise self._invalid_metadata( + f"{path!r} is invalid for {{field}}, " + "parent directory indicators are not allowed" + ) + if "*" in path: + raise self._invalid_metadata( + f"{path!r} is invalid for {{field}}, paths must be resolved" + ) + if ( + pathlib.PurePosixPath(path).is_absolute() + or pathlib.PureWindowsPath(path).is_absolute() + ): + raise self._invalid_metadata( + f"{path!r} is invalid for {{field}}, paths must be relative" + ) + if pathlib.PureWindowsPath(path).as_posix() != path: + raise self._invalid_metadata( + f"{path!r} is invalid for {{field}}, paths must use '/' delimiter" + ) + paths.append(path) + return paths + + def _process_import_names(self, value: list[str]) -> list[str]: + for import_name in value: + name, semicolon, private = import_name.partition(";") + name = name.rstrip() + for identifier in name.split("."): + if not identifier.isidentifier(): + raise self._invalid_metadata( + f"{name!r} is invalid for {{field}}; " + f"{identifier!r} is not a valid identifier" + ) + elif keyword.iskeyword(identifier): + raise self._invalid_metadata( + f"{name!r} is invalid for {{field}}; " + f"{identifier!r} is a keyword" + ) + if semicolon and private.lstrip() != "private": + raise self._invalid_metadata( + f"{import_name!r} is invalid for {{field}}; " + "the only valid option is 'private'" + ) + return value + + _process_import_namespaces = _process_import_names + + +class Metadata: + """Representation of distribution metadata. + + Compared to :class:`RawMetadata`, this class provides objects representing + metadata fields instead of only using built-in types. Any invalid metadata + will cause :exc:`InvalidMetadata` to be raised (with a + :py:attr:`~BaseException.__cause__` attribute as appropriate). + """ + + _raw: RawMetadata + + @classmethod + def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> Metadata: + """Create an instance from :class:`RawMetadata`. + + If *validate* is true, all metadata will be validated. All exceptions + related to validation will be gathered and raised as an :class:`ExceptionGroup`. + """ + ins = cls() + ins._raw = data.copy() # Mutations occur due to caching enriched values. + + if validate: + exceptions: list[Exception] = [] + try: + metadata_version = ins.metadata_version + metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version) + except InvalidMetadata as metadata_version_exc: + exceptions.append(metadata_version_exc) + metadata_version = None + + # Make sure to check for the fields that are present, the required + # fields (so their absence can be reported). + fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS + # Remove fields that have already been checked. + fields_to_check -= {"metadata_version"} + + for key in fields_to_check: + try: + if metadata_version: + # Can't use getattr() as that triggers descriptor protocol which + # will fail due to no value for the instance argument. + try: + field_metadata_version = cls.__dict__[key].added + except KeyError: + exc = InvalidMetadata(key, f"unrecognized field: {key!r}") + exceptions.append(exc) + continue + field_age = _VALID_METADATA_VERSIONS.index( + field_metadata_version + ) + if field_age > metadata_age: + field = _RAW_TO_EMAIL_MAPPING[key] + exc = InvalidMetadata( + field, + f"{field} introduced in metadata version " + f"{field_metadata_version}, not {metadata_version}", + ) + exceptions.append(exc) + continue + getattr(ins, key) + except InvalidMetadata as exc: + exceptions.append(exc) + + if exceptions: + raise ExceptionGroup("invalid metadata", exceptions) + + return ins + + @classmethod + def from_email(cls, data: bytes | str, *, validate: bool = True) -> Metadata: + """Parse metadata from email headers. + + If *validate* is true, the metadata will be validated. All exceptions + related to validation will be gathered and raised as an :class:`ExceptionGroup`. + """ + raw, unparsed = parse_email(data) + + if validate: + exceptions: list[Exception] = [] + for unparsed_key in unparsed: + if unparsed_key in _EMAIL_TO_RAW_MAPPING: + message = f"{unparsed_key!r} has invalid data" + else: + message = f"unrecognized field: {unparsed_key!r}" + exceptions.append(InvalidMetadata(unparsed_key, message)) + + if exceptions: + raise ExceptionGroup("unparsed", exceptions) + + try: + return cls.from_raw(raw, validate=validate) + except ExceptionGroup as exc_group: + raise ExceptionGroup( + "invalid or unparsed metadata", exc_group.exceptions + ) from None + + metadata_version: _Validator[_MetadataVersion] = _Validator() + """:external:ref:`core-metadata-metadata-version` + (required; validated to be a valid metadata version)""" + # `name` is not normalized/typed to NormalizedName so as to provide access to + # the original/raw name. + name: _Validator[str] = _Validator() + """:external:ref:`core-metadata-name` + (required; validated using :func:`~packaging.utils.canonicalize_name` and its + *validate* parameter)""" + version: _Validator[version_module.Version] = _Validator() + """:external:ref:`core-metadata-version` (required)""" + dynamic: _Validator[list[str] | None] = _Validator( + added="2.2", + ) + """:external:ref:`core-metadata-dynamic` + (validated against core metadata field names and lowercased)""" + platforms: _Validator[list[str] | None] = _Validator() + """:external:ref:`core-metadata-platform`""" + supported_platforms: _Validator[list[str] | None] = _Validator(added="1.1") + """:external:ref:`core-metadata-supported-platform`""" + summary: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-summary` (validated to contain no newlines)""" + description: _Validator[str | None] = _Validator() # TODO 2.1: can be in body + """:external:ref:`core-metadata-description`""" + description_content_type: _Validator[str | None] = _Validator(added="2.1") + """:external:ref:`core-metadata-description-content-type` (validated)""" + keywords: _Validator[list[str] | None] = _Validator() + """:external:ref:`core-metadata-keywords`""" + home_page: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-home-page`""" + download_url: _Validator[str | None] = _Validator(added="1.1") + """:external:ref:`core-metadata-download-url`""" + author: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-author`""" + author_email: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-author-email`""" + maintainer: _Validator[str | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-maintainer`""" + maintainer_email: _Validator[str | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-maintainer-email`""" + license: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-license`""" + license_expression: _Validator[NormalizedLicenseExpression | None] = _Validator( + added="2.4" + ) + """:external:ref:`core-metadata-license-expression`""" + license_files: _Validator[list[str] | None] = _Validator(added="2.4") + """:external:ref:`core-metadata-license-file`""" + classifiers: _Validator[list[str] | None] = _Validator(added="1.1") + """:external:ref:`core-metadata-classifier`""" + requires_dist: _Validator[list[requirements.Requirement] | None] = _Validator( + added="1.2" + ) + """:external:ref:`core-metadata-requires-dist`""" + requires_python: _Validator[specifiers.SpecifierSet | None] = _Validator( + added="1.2" + ) + """:external:ref:`core-metadata-requires-python`""" + # Because `Requires-External` allows for non-PEP 440 version specifiers, we + # don't do any processing on the values. + requires_external: _Validator[list[str] | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-requires-external`""" + project_urls: _Validator[dict[str, str] | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-project-url`""" + # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation + # regardless of metadata version. + provides_extra: _Validator[list[utils.NormalizedName] | None] = _Validator( + added="2.1", + ) + """:external:ref:`core-metadata-provides-extra`""" + provides_dist: _Validator[list[str] | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-provides-dist`""" + obsoletes_dist: _Validator[list[str] | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-obsoletes-dist`""" + import_names: _Validator[list[str] | None] = _Validator(added="2.5") + """:external:ref:`core-metadata-import-name`""" + import_namespaces: _Validator[list[str] | None] = _Validator(added="2.5") + """:external:ref:`core-metadata-import-namespace`""" + requires: _Validator[list[str] | None] = _Validator(added="1.1") + """``Requires`` (deprecated)""" + provides: _Validator[list[str] | None] = _Validator(added="1.1") + """``Provides`` (deprecated)""" + obsoletes: _Validator[list[str] | None] = _Validator(added="1.1") + """``Obsoletes`` (deprecated)""" + + def as_rfc822(self) -> RFC822Message: + """ + Return an RFC822 message with the metadata. + """ + message = RFC822Message() + self._write_metadata(message) + return message + + def _write_metadata(self, message: RFC822Message) -> None: + """ + Return an RFC822 message with the metadata. + """ + for name, validator in self.__class__.__dict__.items(): + if isinstance(validator, _Validator) and name != "description": + value = getattr(self, name) + email_name = _RAW_TO_EMAIL_MAPPING[name] + if value is not None: + if email_name == "project-url": + for label, url in value.items(): + message[email_name] = f"{label}, {url}" + elif email_name == "keywords": + message[email_name] = ",".join(value) + elif email_name == "import-name" and value == []: + message[email_name] = "" + elif isinstance(value, list): + for item in value: + message[email_name] = str(item) + else: + message[email_name] = str(value) + + # The description is a special case because it is in the body of the message. + if self.description is not None: + message.set_payload(self.description) diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/py.typed b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/pylock.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/pylock.py new file mode 100644 index 0000000000000000000000000000000000000000..a564f15246ad65038029f8fefb48621fa64a3abd --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/pylock.py @@ -0,0 +1,635 @@ +from __future__ import annotations + +import dataclasses +import logging +import re +from collections.abc import Mapping, Sequence +from dataclasses import dataclass +from datetime import datetime +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Protocol, + TypeVar, +) + +from .markers import Marker +from .specifiers import SpecifierSet +from .utils import NormalizedName, is_normalized_name +from .version import Version + +if TYPE_CHECKING: # pragma: no cover + from pathlib import Path + + from typing_extensions import Self + +_logger = logging.getLogger(__name__) + +__all__ = [ + "Package", + "PackageArchive", + "PackageDirectory", + "PackageSdist", + "PackageVcs", + "PackageWheel", + "Pylock", + "PylockUnsupportedVersionError", + "PylockValidationError", + "is_valid_pylock_path", +] + +_T = TypeVar("_T") +_T2 = TypeVar("_T2") + + +class _FromMappingProtocol(Protocol): # pragma: no cover + @classmethod + def _from_dict(cls, d: Mapping[str, Any]) -> Self: ... + + +_FromMappingProtocolT = TypeVar("_FromMappingProtocolT", bound=_FromMappingProtocol) + + +_PYLOCK_FILE_NAME_RE = re.compile(r"^pylock\.([^.]+)\.toml$") + + +def is_valid_pylock_path(path: Path) -> bool: + """Check if the given path is a valid pylock file path.""" + return path.name == "pylock.toml" or bool(_PYLOCK_FILE_NAME_RE.match(path.name)) + + +def _toml_key(key: str) -> str: + return key.replace("_", "-") + + +def _toml_value(key: str, value: Any) -> Any: # noqa: ANN401 + if isinstance(value, (Version, Marker, SpecifierSet)): + return str(value) + if isinstance(value, Sequence) and key == "environments": + return [str(v) for v in value] + return value + + +def _toml_dict_factory(data: list[tuple[str, Any]]) -> dict[str, Any]: + return { + _toml_key(key): _toml_value(key, value) + for key, value in data + if value is not None + } + + +def _get(d: Mapping[str, Any], expected_type: type[_T], key: str) -> _T | None: + """Get a value from the dictionary and verify it's the expected type.""" + if (value := d.get(key)) is None: + return None + if not isinstance(value, expected_type): + raise PylockValidationError( + f"Unexpected type {type(value).__name__} " + f"(expected {expected_type.__name__})", + context=key, + ) + return value + + +def _get_required(d: Mapping[str, Any], expected_type: type[_T], key: str) -> _T: + """Get a required value from the dictionary and verify it's the expected type.""" + if (value := _get(d, expected_type, key)) is None: + raise _PylockRequiredKeyError(key) + return value + + +def _get_sequence( + d: Mapping[str, Any], expected_item_type: type[_T], key: str +) -> Sequence[_T] | None: + """Get a list value from the dictionary and verify it's the expected items type.""" + if (value := _get(d, Sequence, key)) is None: # type: ignore[type-abstract] + return None + if isinstance(value, (str, bytes)): + # special case: str and bytes are Sequences, but we want to reject it + raise PylockValidationError( + f"Unexpected type {type(value).__name__} (expected Sequence)", + context=key, + ) + for i, item in enumerate(value): + if not isinstance(item, expected_item_type): + raise PylockValidationError( + f"Unexpected type {type(item).__name__} " + f"(expected {expected_item_type.__name__})", + context=f"{key}[{i}]", + ) + return value + + +def _get_as( + d: Mapping[str, Any], + expected_type: type[_T], + target_type: Callable[[_T], _T2], + key: str, +) -> _T2 | None: + """Get a value from the dictionary, verify it's the expected type, + and convert to the target type. + + This assumes the target_type constructor accepts the value. + """ + if (value := _get(d, expected_type, key)) is None: + return None + try: + return target_type(value) + except Exception as e: + raise PylockValidationError(e, context=key) from e + + +def _get_required_as( + d: Mapping[str, Any], + expected_type: type[_T], + target_type: Callable[[_T], _T2], + key: str, +) -> _T2: + """Get a required value from the dict, verify it's the expected type, + and convert to the target type.""" + if (value := _get_as(d, expected_type, target_type, key)) is None: + raise _PylockRequiredKeyError(key) + return value + + +def _get_sequence_as( + d: Mapping[str, Any], + expected_item_type: type[_T], + target_item_type: Callable[[_T], _T2], + key: str, +) -> list[_T2] | None: + """Get list value from dictionary and verify expected items type.""" + if (value := _get_sequence(d, expected_item_type, key)) is None: + return None + result = [] + try: + for item in value: + typed_item = target_item_type(item) + result.append(typed_item) + except Exception as e: + raise PylockValidationError(e, context=f"{key}[{len(result)}]") from e + return result + + +def _get_object( + d: Mapping[str, Any], target_type: type[_FromMappingProtocolT], key: str +) -> _FromMappingProtocolT | None: + """Get a dictionary value from the dictionary and convert it to a dataclass.""" + if (value := _get(d, Mapping, key)) is None: # type: ignore[type-abstract] + return None + try: + return target_type._from_dict(value) + except Exception as e: + raise PylockValidationError(e, context=key) from e + + +def _get_sequence_of_objects( + d: Mapping[str, Any], target_item_type: type[_FromMappingProtocolT], key: str +) -> list[_FromMappingProtocolT] | None: + """Get a list value from the dictionary and convert its items to a dataclass.""" + if (value := _get_sequence(d, Mapping, key)) is None: # type: ignore[type-abstract] + return None + result: list[_FromMappingProtocolT] = [] + try: + for item in value: + typed_item = target_item_type._from_dict(item) + result.append(typed_item) + except Exception as e: + raise PylockValidationError(e, context=f"{key}[{len(result)}]") from e + return result + + +def _get_required_sequence_of_objects( + d: Mapping[str, Any], target_item_type: type[_FromMappingProtocolT], key: str +) -> Sequence[_FromMappingProtocolT]: + """Get a required list value from the dictionary and convert its items to a + dataclass.""" + if (result := _get_sequence_of_objects(d, target_item_type, key)) is None: + raise _PylockRequiredKeyError(key) + return result + + +def _validate_normalized_name(name: str) -> NormalizedName: + """Validate that a string is a NormalizedName.""" + if not is_normalized_name(name): + raise PylockValidationError(f"Name {name!r} is not normalized") + return NormalizedName(name) + + +def _validate_path_url(path: str | None, url: str | None) -> None: + if not path and not url: + raise PylockValidationError("path or url must be provided") + + +def _validate_hashes(hashes: Mapping[str, Any]) -> Mapping[str, Any]: + if not hashes: + raise PylockValidationError("At least one hash must be provided") + if not all(isinstance(hash_val, str) for hash_val in hashes.values()): + raise PylockValidationError("Hash values must be strings") + return hashes + + +class PylockValidationError(Exception): + """Raised when when input data is not spec-compliant.""" + + context: str | None = None + message: str + + def __init__( + self, + cause: str | Exception, + *, + context: str | None = None, + ) -> None: + if isinstance(cause, PylockValidationError): + if cause.context: + self.context = ( + f"{context}.{cause.context}" if context else cause.context + ) + else: + self.context = context + self.message = cause.message + else: + self.context = context + self.message = str(cause) + + def __str__(self) -> str: + if self.context: + return f"{self.message} in {self.context!r}" + return self.message + + +class _PylockRequiredKeyError(PylockValidationError): + def __init__(self, key: str) -> None: + super().__init__("Missing required value", context=key) + + +class PylockUnsupportedVersionError(PylockValidationError): + """Raised when encountering an unsupported `lock_version`.""" + + +@dataclass(frozen=True, init=False) +class PackageVcs: + type: str + url: str | None = None + path: str | None = None + requested_revision: str | None = None + commit_id: str # type: ignore[misc] + subdirectory: str | None = None + + def __init__( + self, + *, + type: str, + url: str | None = None, + path: str | None = None, + requested_revision: str | None = None, + commit_id: str, + subdirectory: str | None = None, + ) -> None: + # In Python 3.10+ make dataclass kw_only=True and remove __init__ + object.__setattr__(self, "type", type) + object.__setattr__(self, "url", url) + object.__setattr__(self, "path", path) + object.__setattr__(self, "requested_revision", requested_revision) + object.__setattr__(self, "commit_id", commit_id) + object.__setattr__(self, "subdirectory", subdirectory) + + @classmethod + def _from_dict(cls, d: Mapping[str, Any]) -> Self: + package_vcs = cls( + type=_get_required(d, str, "type"), + url=_get(d, str, "url"), + path=_get(d, str, "path"), + requested_revision=_get(d, str, "requested-revision"), + commit_id=_get_required(d, str, "commit-id"), + subdirectory=_get(d, str, "subdirectory"), + ) + _validate_path_url(package_vcs.path, package_vcs.url) + return package_vcs + + +@dataclass(frozen=True, init=False) +class PackageDirectory: + path: str + editable: bool | None = None + subdirectory: str | None = None + + def __init__( + self, + *, + path: str, + editable: bool | None = None, + subdirectory: str | None = None, + ) -> None: + # In Python 3.10+ make dataclass kw_only=True and remove __init__ + object.__setattr__(self, "path", path) + object.__setattr__(self, "editable", editable) + object.__setattr__(self, "subdirectory", subdirectory) + + @classmethod + def _from_dict(cls, d: Mapping[str, Any]) -> Self: + return cls( + path=_get_required(d, str, "path"), + editable=_get(d, bool, "editable"), + subdirectory=_get(d, str, "subdirectory"), + ) + + +@dataclass(frozen=True, init=False) +class PackageArchive: + url: str | None = None + path: str | None = None + size: int | None = None + upload_time: datetime | None = None + hashes: Mapping[str, str] # type: ignore[misc] + subdirectory: str | None = None + + def __init__( + self, + *, + url: str | None = None, + path: str | None = None, + size: int | None = None, + upload_time: datetime | None = None, + hashes: Mapping[str, str], + subdirectory: str | None = None, + ) -> None: + # In Python 3.10+ make dataclass kw_only=True and remove __init__ + object.__setattr__(self, "url", url) + object.__setattr__(self, "path", path) + object.__setattr__(self, "size", size) + object.__setattr__(self, "upload_time", upload_time) + object.__setattr__(self, "hashes", hashes) + object.__setattr__(self, "subdirectory", subdirectory) + + @classmethod + def _from_dict(cls, d: Mapping[str, Any]) -> Self: + package_archive = cls( + url=_get(d, str, "url"), + path=_get(d, str, "path"), + size=_get(d, int, "size"), + upload_time=_get(d, datetime, "upload-time"), + hashes=_get_required_as(d, Mapping, _validate_hashes, "hashes"), # type: ignore[type-abstract] + subdirectory=_get(d, str, "subdirectory"), + ) + _validate_path_url(package_archive.path, package_archive.url) + return package_archive + + +@dataclass(frozen=True, init=False) +class PackageSdist: + name: str | None = None + upload_time: datetime | None = None + url: str | None = None + path: str | None = None + size: int | None = None + hashes: Mapping[str, str] # type: ignore[misc] + + def __init__( + self, + *, + name: str | None = None, + upload_time: datetime | None = None, + url: str | None = None, + path: str | None = None, + size: int | None = None, + hashes: Mapping[str, str], + ) -> None: + # In Python 3.10+ make dataclass kw_only=True and remove __init__ + object.__setattr__(self, "name", name) + object.__setattr__(self, "upload_time", upload_time) + object.__setattr__(self, "url", url) + object.__setattr__(self, "path", path) + object.__setattr__(self, "size", size) + object.__setattr__(self, "hashes", hashes) + + @classmethod + def _from_dict(cls, d: Mapping[str, Any]) -> Self: + package_sdist = cls( + name=_get(d, str, "name"), + upload_time=_get(d, datetime, "upload-time"), + url=_get(d, str, "url"), + path=_get(d, str, "path"), + size=_get(d, int, "size"), + hashes=_get_required_as(d, Mapping, _validate_hashes, "hashes"), # type: ignore[type-abstract] + ) + _validate_path_url(package_sdist.path, package_sdist.url) + return package_sdist + + +@dataclass(frozen=True, init=False) +class PackageWheel: + name: str | None = None + upload_time: datetime | None = None + url: str | None = None + path: str | None = None + size: int | None = None + hashes: Mapping[str, str] # type: ignore[misc] + + def __init__( + self, + *, + name: str | None = None, + upload_time: datetime | None = None, + url: str | None = None, + path: str | None = None, + size: int | None = None, + hashes: Mapping[str, str], + ) -> None: + # In Python 3.10+ make dataclass kw_only=True and remove __init__ + object.__setattr__(self, "name", name) + object.__setattr__(self, "upload_time", upload_time) + object.__setattr__(self, "url", url) + object.__setattr__(self, "path", path) + object.__setattr__(self, "size", size) + object.__setattr__(self, "hashes", hashes) + + @classmethod + def _from_dict(cls, d: Mapping[str, Any]) -> Self: + package_wheel = cls( + name=_get(d, str, "name"), + upload_time=_get(d, datetime, "upload-time"), + url=_get(d, str, "url"), + path=_get(d, str, "path"), + size=_get(d, int, "size"), + hashes=_get_required_as(d, Mapping, _validate_hashes, "hashes"), # type: ignore[type-abstract] + ) + _validate_path_url(package_wheel.path, package_wheel.url) + return package_wheel + + +@dataclass(frozen=True, init=False) +class Package: + name: NormalizedName + version: Version | None = None + marker: Marker | None = None + requires_python: SpecifierSet | None = None + dependencies: Sequence[Mapping[str, Any]] | None = None + vcs: PackageVcs | None = None + directory: PackageDirectory | None = None + archive: PackageArchive | None = None + index: str | None = None + sdist: PackageSdist | None = None + wheels: Sequence[PackageWheel] | None = None + attestation_identities: Sequence[Mapping[str, Any]] | None = None + tool: Mapping[str, Any] | None = None + + def __init__( + self, + *, + name: NormalizedName, + version: Version | None = None, + marker: Marker | None = None, + requires_python: SpecifierSet | None = None, + dependencies: Sequence[Mapping[str, Any]] | None = None, + vcs: PackageVcs | None = None, + directory: PackageDirectory | None = None, + archive: PackageArchive | None = None, + index: str | None = None, + sdist: PackageSdist | None = None, + wheels: Sequence[PackageWheel] | None = None, + attestation_identities: Sequence[Mapping[str, Any]] | None = None, + tool: Mapping[str, Any] | None = None, + ) -> None: + # In Python 3.10+ make dataclass kw_only=True and remove __init__ + object.__setattr__(self, "name", name) + object.__setattr__(self, "version", version) + object.__setattr__(self, "marker", marker) + object.__setattr__(self, "requires_python", requires_python) + object.__setattr__(self, "dependencies", dependencies) + object.__setattr__(self, "vcs", vcs) + object.__setattr__(self, "directory", directory) + object.__setattr__(self, "archive", archive) + object.__setattr__(self, "index", index) + object.__setattr__(self, "sdist", sdist) + object.__setattr__(self, "wheels", wheels) + object.__setattr__(self, "attestation_identities", attestation_identities) + object.__setattr__(self, "tool", tool) + + @classmethod + def _from_dict(cls, d: Mapping[str, Any]) -> Self: + package = cls( + name=_get_required_as(d, str, _validate_normalized_name, "name"), + version=_get_as(d, str, Version, "version"), + requires_python=_get_as(d, str, SpecifierSet, "requires-python"), + dependencies=_get_sequence(d, Mapping, "dependencies"), # type: ignore[type-abstract] + marker=_get_as(d, str, Marker, "marker"), + vcs=_get_object(d, PackageVcs, "vcs"), + directory=_get_object(d, PackageDirectory, "directory"), + archive=_get_object(d, PackageArchive, "archive"), + index=_get(d, str, "index"), + sdist=_get_object(d, PackageSdist, "sdist"), + wheels=_get_sequence_of_objects(d, PackageWheel, "wheels"), + attestation_identities=_get_sequence(d, Mapping, "attestation-identities"), # type: ignore[type-abstract] + tool=_get(d, Mapping, "tool"), # type: ignore[type-abstract] + ) + distributions = bool(package.sdist) + len(package.wheels or []) + direct_urls = ( + bool(package.vcs) + bool(package.directory) + bool(package.archive) + ) + if distributions > 0 and direct_urls > 0: + raise PylockValidationError( + "None of vcs, directory, archive must be set if sdist or wheels are set" + ) + if distributions == 0 and direct_urls != 1: + raise PylockValidationError( + "Exactly one of vcs, directory, archive must be set " + "if sdist and wheels are not set" + ) + try: + for i, attestation_identity in enumerate( # noqa: B007 + package.attestation_identities or [] + ): + _get_required(attestation_identity, str, "kind") + except Exception as e: + raise PylockValidationError( + e, context=f"attestation-identities[{i}]" + ) from e + return package + + @property + def is_direct(self) -> bool: + return not (self.sdist or self.wheels) + + +@dataclass(frozen=True, init=False) +class Pylock: + """A class representing a pylock file.""" + + lock_version: Version + environments: Sequence[Marker] | None = None + requires_python: SpecifierSet | None = None + extras: Sequence[NormalizedName] | None = None + dependency_groups: Sequence[str] | None = None + default_groups: Sequence[str] | None = None + created_by: str # type: ignore[misc] + packages: Sequence[Package] # type: ignore[misc] + tool: Mapping[str, Any] | None = None + + def __init__( + self, + *, + lock_version: Version, + environments: Sequence[Marker] | None = None, + requires_python: SpecifierSet | None = None, + extras: Sequence[NormalizedName] | None = None, + dependency_groups: Sequence[str] | None = None, + default_groups: Sequence[str] | None = None, + created_by: str, + packages: Sequence[Package], + tool: Mapping[str, Any] | None = None, + ) -> None: + # In Python 3.10+ make dataclass kw_only=True and remove __init__ + object.__setattr__(self, "lock_version", lock_version) + object.__setattr__(self, "environments", environments) + object.__setattr__(self, "requires_python", requires_python) + object.__setattr__(self, "extras", extras) + object.__setattr__(self, "dependency_groups", dependency_groups) + object.__setattr__(self, "default_groups", default_groups) + object.__setattr__(self, "created_by", created_by) + object.__setattr__(self, "packages", packages) + object.__setattr__(self, "tool", tool) + + @classmethod + def _from_dict(cls, d: Mapping[str, Any]) -> Self: + pylock = cls( + lock_version=_get_required_as(d, str, Version, "lock-version"), + environments=_get_sequence_as(d, str, Marker, "environments"), + extras=_get_sequence_as(d, str, _validate_normalized_name, "extras"), + dependency_groups=_get_sequence(d, str, "dependency-groups"), + default_groups=_get_sequence(d, str, "default-groups"), + created_by=_get_required(d, str, "created-by"), + requires_python=_get_as(d, str, SpecifierSet, "requires-python"), + packages=_get_required_sequence_of_objects(d, Package, "packages"), + tool=_get(d, Mapping, "tool"), # type: ignore[type-abstract] + ) + if not Version("1") <= pylock.lock_version < Version("2"): + raise PylockUnsupportedVersionError( + f"pylock version {pylock.lock_version} is not supported" + ) + if pylock.lock_version > Version("1.0"): + _logger.warning( + "pylock minor version %s is not supported", pylock.lock_version + ) + return pylock + + @classmethod + def from_dict(cls, d: Mapping[str, Any], /) -> Self: + """Create and validate a Pylock instance from a TOML dictionary. + + Raises :class:`PylockValidationError` if the input data is not + spec-compliant. + """ + return cls._from_dict(d) + + def to_dict(self) -> Mapping[str, Any]: + """Convert the Pylock instance to a TOML dictionary.""" + return dataclasses.asdict(self, dict_factory=_toml_dict_factory) + + def validate(self) -> None: + """Validate the Pylock instance against the specification. + + Raises :class:`PylockValidationError` otherwise.""" + self.from_dict(self.to_dict()) diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/requirements.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/requirements.py new file mode 100644 index 0000000000000000000000000000000000000000..3079be69bf880f47e64dbf62993f0e54754b7315 --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/requirements.py @@ -0,0 +1,86 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import annotations + +from typing import Iterator + +from ._parser import parse_requirement as _parse_requirement +from ._tokenizer import ParserSyntaxError +from .markers import Marker, _normalize_extra_values +from .specifiers import SpecifierSet +from .utils import canonicalize_name + + +class InvalidRequirement(ValueError): + """ + An invalid requirement was found, users should refer to PEP 508. + """ + + +class Requirement: + """Parse a requirement. + + Parse a given requirement string into its parts, such as name, specifier, + URL, and extras. Raises InvalidRequirement on a badly-formed requirement + string. + """ + + # TODO: Can we test whether something is contained within a requirement? + # If so how do we do that? Do we need to test against the _name_ of + # the thing as well as the version? What about the markers? + # TODO: Can we normalize the name and extra name? + + def __init__(self, requirement_string: str) -> None: + try: + parsed = _parse_requirement(requirement_string) + except ParserSyntaxError as e: + raise InvalidRequirement(str(e)) from e + + self.name: str = parsed.name + self.url: str | None = parsed.url or None + self.extras: set[str] = set(parsed.extras or []) + self.specifier: SpecifierSet = SpecifierSet(parsed.specifier) + self.marker: Marker | None = None + if parsed.marker is not None: + self.marker = Marker.__new__(Marker) + self.marker._markers = _normalize_extra_values(parsed.marker) + + def _iter_parts(self, name: str) -> Iterator[str]: + yield name + + if self.extras: + formatted_extras = ",".join(sorted(self.extras)) + yield f"[{formatted_extras}]" + + if self.specifier: + yield str(self.specifier) + + if self.url: + yield f" @ {self.url}" + if self.marker: + yield " " + + if self.marker: + yield f"; {self.marker}" + + def __str__(self) -> str: + return "".join(self._iter_parts(self.name)) + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}('{self}')>" + + def __hash__(self) -> int: + return hash(tuple(self._iter_parts(canonicalize_name(self.name)))) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Requirement): + return NotImplemented + + return ( + canonicalize_name(self.name) == canonicalize_name(other.name) + and self.extras == other.extras + and self.specifier == other.specifier + and self.url == other.url + and self.marker == other.marker + ) diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/specifiers.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/specifiers.py new file mode 100644 index 0000000000000000000000000000000000000000..5d26b0d1ae2d21b77e24b692d5a7e1fd01296edc --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/specifiers.py @@ -0,0 +1,1068 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +""" +.. testsetup:: + + from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier + from packaging.version import Version +""" + +from __future__ import annotations + +import abc +import itertools +import re +from typing import Callable, Final, Iterable, Iterator, TypeVar, Union + +from .utils import canonicalize_version +from .version import InvalidVersion, Version + +UnparsedVersion = Union[Version, str] +UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion) +CallableOperator = Callable[[Version, str], bool] + + +def _coerce_version(version: UnparsedVersion) -> Version | None: + if not isinstance(version, Version): + try: + version = Version(version) + except InvalidVersion: + return None + return version + + +def _public_version(version: Version) -> Version: + return version.__replace__(local=None) + + +def _base_version(version: Version) -> Version: + return version.__replace__(pre=None, post=None, dev=None, local=None) + + +class InvalidSpecifier(ValueError): + """ + Raised when attempting to create a :class:`Specifier` with a specifier + string that is invalid. + + >>> Specifier("lolwat") + Traceback (most recent call last): + ... + packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat' + """ + + +class BaseSpecifier(metaclass=abc.ABCMeta): + __slots__ = () + __match_args__ = ("_str",) + + @property + def _str(self) -> str: + """Internal property for match_args""" + return str(self) + + @abc.abstractmethod + def __str__(self) -> str: + """ + Returns the str representation of this Specifier-like object. This + should be representative of the Specifier itself. + """ + + @abc.abstractmethod + def __hash__(self) -> int: + """ + Returns a hash value for this Specifier-like object. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Returns a boolean representing whether or not the two Specifier-like + objects are equal. + + :param other: The other object to check against. + """ + + @property + @abc.abstractmethod + def prereleases(self) -> bool | None: + """Whether or not pre-releases as a whole are allowed. + + This can be set to either ``True`` or ``False`` to explicitly enable or disable + prereleases or it can be set to ``None`` (the default) to use default semantics. + """ + + @prereleases.setter # noqa: B027 + def prereleases(self, value: bool) -> None: + """Setter for :attr:`prereleases`. + + :param value: The value to set. + """ + + @abc.abstractmethod + def contains(self, item: str, prereleases: bool | None = None) -> bool: + """ + Determines if the given item is contained within this specifier. + """ + + @abc.abstractmethod + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None + ) -> Iterator[UnparsedVersionVar]: + """ + Takes an iterable of items and filters them so that only items which + are contained within this specifier are allowed in it. + """ + + +class Specifier(BaseSpecifier): + """This class abstracts handling of version specifiers. + + .. tip:: + + It is generally not required to instantiate this manually. You should instead + prefer to work with :class:`SpecifierSet` instead, which can parse + comma-separated version specifiers (which is what package metadata contains). + """ + + __slots__ = ("_prereleases", "_spec", "_spec_version") + + _operator_regex_str = r""" + (?P(~=|==|!=|<=|>=|<|>|===)) + """ + _version_regex_str = r""" + (?P + (?: + # The identity operators allow for an escape hatch that will + # do an exact string match of the version you wish to install. + # This will not be parsed by PEP 440 and we cannot determine + # any semantic meaning from it. This operator is discouraged + # but included entirely as an escape hatch. + (?<====) # Only match for the identity operator + \s* + [^\s;)]* # The arbitrary version can be just about anything, + # we match everything except for whitespace, a + # semi-colon for marker support, and a closing paren + # since versions can be enclosed in them. + ) + | + (?: + # The (non)equality operators allow for wild card and local + # versions to be specified so we have to define these two + # operators separately to enable that. + (?<===|!=) # Only match for equals and not equals + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + + # You cannot use a wild card and a pre-release, post-release, a dev or + # local version together so group them with a | and make them optional. + (?: + \.\* # Wild card syntax of .* + | + (?: # pre release + [-_\.]? + (alpha|beta|preview|pre|a|b|c|rc) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local + )? + ) + | + (?: + # The compatible operator requires at least two digits in the + # release segment. + (?<=~=) # Only match for the compatible operator + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) + (?: # pre release + [-_\.]? + (alpha|beta|preview|pre|a|b|c|rc) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + | + (?: + # All other operators only allow a sub set of what the + # (non)equality operators do. Specifically they do not allow + # local versions to be specified nor do they allow the prefix + # matching wild cards. + (?=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + "===": "arbitrary", + } + + def __init__(self, spec: str = "", prereleases: bool | None = None) -> None: + """Initialize a Specifier instance. + + :param spec: + The string representation of a specifier which will be parsed and + normalized before use. + :param prereleases: + This tells the specifier if it should accept prerelease versions if + applicable or not. The default of ``None`` will autodetect it from the + given specifiers. + :raises InvalidSpecifier: + If the given specifier is invalid (i.e. bad syntax). + """ + match = self._regex.fullmatch(spec) + if not match: + raise InvalidSpecifier(f"Invalid specifier: {spec!r}") + + self._spec: tuple[str, str] = ( + match.group("operator").strip(), + match.group("version").strip(), + ) + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + # Specifier version cache + self._spec_version: tuple[str, Version] | None = None + + def _get_spec_version(self, version: str) -> Version | None: + """One element cache, as only one spec Version is needed per Specifier.""" + if self._spec_version is not None and self._spec_version[0] == version: + return self._spec_version[1] + + version_specifier = _coerce_version(version) + if version_specifier is None: + return None + + self._spec_version = (version, version_specifier) + return version_specifier + + def _require_spec_version(self, version: str) -> Version: + """Get spec version, asserting it's valid (not for === operator). + + This method should only be called for operators where version + strings are guaranteed to be valid PEP 440 versions (not ===). + """ + spec_version = self._get_spec_version(version) + assert spec_version is not None + return spec_version + + @property + def prereleases(self) -> bool | None: + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Only the "!=" operator does not imply prereleases when + # the version in the specifier is a prerelease. + operator, version_str = self._spec + if operator != "!=": + # The == specifier with trailing .* cannot include prereleases + # e.g. "==1.0a1.*" is not valid. + if operator == "==" and version_str.endswith(".*"): + return False + + # "===" can have arbitrary string versions, so we cannot parse + # those, we take prereleases as unknown (None) for those. + version = self._get_spec_version(version_str) + if version is None: + return None + + # For all other operators, use the check if spec Version + # object implies pre-releases. + if version.is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value: bool | None) -> None: + self._prereleases = value + + @property + def operator(self) -> str: + """The operator of this specifier. + + >>> Specifier("==1.2.3").operator + '==' + """ + return self._spec[0] + + @property + def version(self) -> str: + """The version of this specifier. + + >>> Specifier("==1.2.3").version + '1.2.3' + """ + return self._spec[1] + + def __repr__(self) -> str: + """A representation of the Specifier that shows all internal state. + + >>> Specifier('>=1.0.0') + =1.0.0')> + >>> Specifier('>=1.0.0', prereleases=False) + =1.0.0', prereleases=False)> + >>> Specifier('>=1.0.0', prereleases=True) + =1.0.0', prereleases=True)> + """ + pre = ( + f", prereleases={self.prereleases!r}" + if self._prereleases is not None + else "" + ) + + return f"<{self.__class__.__name__}({str(self)!r}{pre})>" + + def __str__(self) -> str: + """A string representation of the Specifier that can be round-tripped. + + >>> str(Specifier('>=1.0.0')) + '>=1.0.0' + >>> str(Specifier('>=1.0.0', prereleases=False)) + '>=1.0.0' + """ + return "{}{}".format(*self._spec) + + @property + def _canonical_spec(self) -> tuple[str, str]: + operator, version = self._spec + if operator == "===" or version.endswith(".*"): + return operator, version + + spec_version = self._require_spec_version(version) + + canonical_version = canonicalize_version( + spec_version, strip_trailing_zero=(operator != "~=") + ) + + return operator, canonical_version + + def __hash__(self) -> int: + return hash(self._canonical_spec) + + def __eq__(self, other: object) -> bool: + """Whether or not the two Specifier-like objects are equal. + + :param other: The other object to check against. + + The value of :attr:`prereleases` is ignored. + + >>> Specifier("==1.2.3") == Specifier("== 1.2.3.0") + True + >>> (Specifier("==1.2.3", prereleases=False) == + ... Specifier("==1.2.3", prereleases=True)) + True + >>> Specifier("==1.2.3") == "==1.2.3" + True + >>> Specifier("==1.2.3") == Specifier("==1.2.4") + False + >>> Specifier("==1.2.3") == Specifier("~=1.2.3") + False + """ + if isinstance(other, str): + try: + other = self.__class__(str(other)) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._canonical_spec == other._canonical_spec + + def _get_operator(self, op: str) -> CallableOperator: + operator_callable: CallableOperator = getattr( + self, f"_compare_{self._operators[op]}" + ) + return operator_callable + + def _compare_compatible(self, prospective: Version, spec: str) -> bool: + # Compatible releases have an equivalent combination of >= and ==. That + # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to + # implement this in terms of the other specifiers instead of + # implementing it ourselves. The only thing we need to do is construct + # the other specifiers. + + # We want everything but the last item in the version, but we want to + # ignore suffix segments. + prefix = _version_join( + list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1] + ) + + # Add the prefix notation to the end of our string + prefix += ".*" + + return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( + prospective, prefix + ) + + def _compare_equal(self, prospective: Version, spec: str) -> bool: + # We need special logic to handle prefix matching + if spec.endswith(".*"): + # In the case of prefix matching we want to ignore local segment. + normalized_prospective = canonicalize_version( + _public_version(prospective), strip_trailing_zero=False + ) + # Get the normalized version string ignoring the trailing .* + normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False) + # Split the spec out by bangs and dots, and pretend that there is + # an implicit dot in between a release segment and a pre-release segment. + split_spec = _version_split(normalized_spec) + + # Split the prospective version out by bangs and dots, and pretend + # that there is an implicit dot in between a release segment and + # a pre-release segment. + split_prospective = _version_split(normalized_prospective) + + # 0-pad the prospective version before shortening it to get the correct + # shortened version. + padded_prospective, _ = _pad_version(split_prospective, split_spec) + + # Shorten the prospective version to be the same length as the spec + # so that we can determine if the specifier is a prefix of the + # prospective version or not. + shortened_prospective = padded_prospective[: len(split_spec)] + + return shortened_prospective == split_spec + else: + # Convert our spec string into a Version + spec_version = self._require_spec_version(spec) + + # If the specifier does not have a local segment, then we want to + # act as if the prospective version also does not have a local + # segment. + if not spec_version.local: + prospective = _public_version(prospective) + + return prospective == spec_version + + def _compare_not_equal(self, prospective: Version, spec: str) -> bool: + return not self._compare_equal(prospective, spec) + + def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool: + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return _public_version(prospective) <= self._require_spec_version(spec) + + def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool: + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return _public_version(prospective) >= self._require_spec_version(spec) + + def _compare_less_than(self, prospective: Version, spec_str: str) -> bool: + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = self._require_spec_version(spec_str) + + # Check to see if the prospective version is less than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective < spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a pre-release version, that we do not accept pre-release + # versions for the version mentioned in the specifier (e.g. <3.1 should + # not match 3.1.dev0, but should match 3.0.dev0). + if ( + not spec.is_prerelease + and prospective.is_prerelease + and _base_version(prospective) == _base_version(spec) + ): + return False + + # If we've gotten to here, it means that prospective version is both + # less than the spec version *and* it's not a pre-release of the same + # version in the spec. + return True + + def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool: + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = self._require_spec_version(spec_str) + + # Check to see if the prospective version is greater than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective > spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a post-release version, that we do not accept + # post-release versions for the version mentioned in the specifier + # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). + if ( + not spec.is_postrelease + and prospective.is_postrelease + and _base_version(prospective) == _base_version(spec) + ): + return False + + # Ensure that we do not allow a local version of the version mentioned + # in the specifier, which is technically greater than, to match. + if prospective.local is not None and _base_version( + prospective + ) == _base_version(spec): + return False + + # If we've gotten to here, it means that prospective version is both + # greater than the spec version *and* it's not a pre-release of the + # same version in the spec. + return True + + def _compare_arbitrary(self, prospective: Version | str, spec: str) -> bool: + return str(prospective).lower() == str(spec).lower() + + def __contains__(self, item: str | Version) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: The item to check for. + + This is used for the ``in`` operator and behaves the same as + :meth:`contains` with no ``prereleases`` argument passed. + + >>> "1.2.3" in Specifier(">=1.2.3") + True + >>> Version("1.2.3") in Specifier(">=1.2.3") + True + >>> "1.0.0" in Specifier(">=1.2.3") + False + >>> "1.3.0a1" in Specifier(">=1.2.3") + True + >>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True) + True + """ + return self.contains(item) + + def contains(self, item: UnparsedVersion, prereleases: bool | None = None) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: + The item to check for, which can be a version string or a + :class:`Version` instance. + :param prereleases: + Whether or not to match prereleases with this Specifier. If set to + ``None`` (the default), it will follow the recommendation from + :pep:`440` and match prereleases, as there are no other versions. + + >>> Specifier(">=1.2.3").contains("1.2.3") + True + >>> Specifier(">=1.2.3").contains(Version("1.2.3")) + True + >>> Specifier(">=1.2.3").contains("1.0.0") + False + >>> Specifier(">=1.2.3").contains("1.3.0a1") + True + >>> Specifier(">=1.2.3", prereleases=False).contains("1.3.0a1") + False + >>> Specifier(">=1.2.3").contains("1.3.0a1") + True + """ + + return bool(list(self.filter([item], prereleases=prereleases))) + + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None + ) -> Iterator[UnparsedVersionVar]: + """Filter items in the given iterable, that match the specifier. + + :param iterable: + An iterable that can contain version strings and :class:`Version` instances. + The items in the iterable will be filtered according to the specifier. + :param prereleases: + Whether or not to allow prereleases in the returned iterator. If set to + ``None`` (the default), it will follow the recommendation from :pep:`440` + and match prereleases if there are no other versions. + + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) + ['1.3'] + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")])) + ['1.2.3', '1.3', ] + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"])) + ['1.5a1'] + >>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + >>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + """ + prereleases_versions = [] + found_non_prereleases = False + + # Determine if to include prereleases by default + include_prereleases = ( + prereleases if prereleases is not None else self.prereleases + ) + + # Get the matching operator + operator_callable = self._get_operator(self.operator) + + # Filter versions + for version in iterable: + parsed_version = _coerce_version(version) + if parsed_version is None: + # === operator can match arbitrary (non-version) strings + if self.operator == "===" and self._compare_arbitrary( + version, self.version + ): + yield version + elif operator_callable(parsed_version, self.version): + # If it's not a prerelease or prereleases are allowed, yield it directly + if not parsed_version.is_prerelease or include_prereleases: + found_non_prereleases = True + yield version + # Otherwise collect prereleases for potential later use + elif prereleases is None and self._prereleases is not False: + prereleases_versions.append(version) + + # If no non-prereleases were found and prereleases weren't + # explicitly forbidden, yield the collected prereleases + if ( + not found_non_prereleases + and prereleases is None + and self._prereleases is not False + ): + yield from prereleases_versions + + +_prefix_regex = re.compile(r"([0-9]+)((?:a|b|c|rc)[0-9]+)") + + +def _version_split(version: str) -> list[str]: + """Split version into components. + + The split components are intended for version comparison. The logic does + not attempt to retain the original version string, so joining the + components back with :func:`_version_join` may not produce the original + version string. + """ + result: list[str] = [] + + epoch, _, rest = version.rpartition("!") + result.append(epoch or "0") + + for item in rest.split("."): + match = _prefix_regex.fullmatch(item) + if match: + result.extend(match.groups()) + else: + result.append(item) + return result + + +def _version_join(components: list[str]) -> str: + """Join split version components into a version string. + + This function assumes the input came from :func:`_version_split`, where the + first component must be the epoch (either empty or numeric), and all other + components numeric. + """ + epoch, *rest = components + return f"{epoch}!{'.'.join(rest)}" + + +def _is_not_suffix(segment: str) -> bool: + return not any( + segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post") + ) + + +def _pad_version(left: list[str], right: list[str]) -> tuple[list[str], list[str]]: + left_split, right_split = [], [] + + # Get the release segment of our versions + left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) + right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + + # Get the rest of our versions + left_split.append(left[len(left_split[0]) :]) + right_split.append(right[len(right_split[0]) :]) + + # Insert our padding + left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) + right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) + + return ( + list(itertools.chain.from_iterable(left_split)), + list(itertools.chain.from_iterable(right_split)), + ) + + +class SpecifierSet(BaseSpecifier): + """This class abstracts handling of a set of version specifiers. + + It can be passed a single specifier (``>=3.0``), a comma-separated list of + specifiers (``>=3.0,!=3.1``), or no specifier at all. + """ + + __slots__ = ("_prereleases", "_specs") + + def __init__( + self, + specifiers: str | Iterable[Specifier] = "", + prereleases: bool | None = None, + ) -> None: + """Initialize a SpecifierSet instance. + + :param specifiers: + The string representation of a specifier or a comma-separated list of + specifiers which will be parsed and normalized before use. + May also be an iterable of ``Specifier`` instances, which will be used + as is. + :param prereleases: + This tells the SpecifierSet if it should accept prerelease versions if + applicable or not. The default of ``None`` will autodetect it from the + given specifiers. + + :raises InvalidSpecifier: + If the given ``specifiers`` are not parseable than this exception will be + raised. + """ + + if isinstance(specifiers, str): + # Split on `,` to break each individual specifier into its own item, and + # strip each item to remove leading/trailing whitespace. + split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + + # Make each individual specifier a Specifier and save in a frozen set + # for later. + self._specs = frozenset(map(Specifier, split_specifiers)) + else: + # Save the supplied specifiers in a frozen set. + self._specs = frozenset(specifiers) + + # Store our prereleases value so we can use it later to determine if + # we accept prereleases or not. + self._prereleases = prereleases + + @property + def prereleases(self) -> bool | None: + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # If we don't have any specifiers, and we don't have a forced value, + # then we'll just return None since we don't know if this should have + # pre-releases or not. + if not self._specs: + return None + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + if any(s.prereleases for s in self._specs): + return True + + return None + + @prereleases.setter + def prereleases(self, value: bool | None) -> None: + self._prereleases = value + + def __repr__(self) -> str: + """A representation of the specifier set that shows all internal state. + + Note that the ordering of the individual specifiers within the set may not + match the input string. + + >>> SpecifierSet('>=1.0.0,!=2.0.0') + =1.0.0')> + >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False) + =1.0.0', prereleases=False)> + >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True) + =1.0.0', prereleases=True)> + """ + pre = ( + f", prereleases={self.prereleases!r}" + if self._prereleases is not None + else "" + ) + + return f"" + + def __str__(self) -> str: + """A string representation of the specifier set that can be round-tripped. + + Note that the ordering of the individual specifiers within the set may not + match the input string. + + >>> str(SpecifierSet(">=1.0.0,!=1.0.1")) + '!=1.0.1,>=1.0.0' + >>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False)) + '!=1.0.1,>=1.0.0' + """ + return ",".join(sorted(str(s) for s in self._specs)) + + def __hash__(self) -> int: + return hash(self._specs) + + def __and__(self, other: SpecifierSet | str) -> SpecifierSet: + """Return a SpecifierSet which is a combination of the two sets. + + :param other: The other object to combine with. + + >>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1' + =1.0.0')> + >>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1') + =1.0.0')> + """ + if isinstance(other, str): + other = SpecifierSet(other) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + specifier = SpecifierSet() + specifier._specs = frozenset(self._specs | other._specs) + + if self._prereleases is None and other._prereleases is not None: + specifier._prereleases = other._prereleases + elif ( + self._prereleases is not None and other._prereleases is None + ) or self._prereleases == other._prereleases: + specifier._prereleases = self._prereleases + else: + raise ValueError( + "Cannot combine SpecifierSets with True and False prerelease overrides." + ) + + return specifier + + def __eq__(self, other: object) -> bool: + """Whether or not the two SpecifierSet-like objects are equal. + + :param other: The other object to check against. + + The value of :attr:`prereleases` is ignored. + + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) == + ... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)) + True + >>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1" + True + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2") + False + """ + if isinstance(other, (str, Specifier)): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs == other._specs + + def __len__(self) -> int: + """Returns the number of specifiers in this specifier set.""" + return len(self._specs) + + def __iter__(self) -> Iterator[Specifier]: + """ + Returns an iterator over all the underlying :class:`Specifier` instances + in this specifier set. + + >>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str) + [, =1.0.0')>] + """ + return iter(self._specs) + + def __contains__(self, item: UnparsedVersion) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: The item to check for. + + This is used for the ``in`` operator and behaves the same as + :meth:`contains` with no ``prereleases`` argument passed. + + >>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1") + False + >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True) + True + """ + return self.contains(item) + + def contains( + self, + item: UnparsedVersion, + prereleases: bool | None = None, + installed: bool | None = None, + ) -> bool: + """Return whether or not the item is contained in this SpecifierSet. + + :param item: + The item to check for, which can be a version string or a + :class:`Version` instance. + :param prereleases: + Whether or not to match prereleases with this SpecifierSet. If set to + ``None`` (the default), it will follow the recommendation from :pep:`440` + and match prereleases, as there are no other versions. + :param installed: + Whether or not the item is installed. If set to ``True``, it will + accept prerelease versions even if the specifier does not allow them. + + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3") + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3")) + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1") + True + >>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False).contains("1.3.0a1") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True) + True + """ + version = _coerce_version(item) + + if version is not None and installed and version.is_prerelease: + prereleases = True + + check_item = item if version is None else version + return bool(list(self.filter([check_item], prereleases=prereleases))) + + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None + ) -> Iterator[UnparsedVersionVar]: + """Filter items in the given iterable, that match the specifiers in this set. + + :param iterable: + An iterable that can contain version strings and :class:`Version` instances. + The items in the iterable will be filtered according to the specifier. + :param prereleases: + Whether or not to allow prereleases in the returned iterator. If set to + ``None`` (the default), it will follow the recommendation from :pep:`440` + and match prereleases if there are no other versions. + + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) + ['1.3'] + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")])) + ['1.3', ] + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"])) + ['1.5a1'] + >>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + >>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + + An "empty" SpecifierSet will filter items based on the presence of prerelease + versions in the set. + + >>> list(SpecifierSet("").filter(["1.3", "1.5a1"])) + ['1.3'] + >>> list(SpecifierSet("").filter(["1.5a1"])) + ['1.5a1'] + >>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + >>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + """ + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None and self.prereleases is not None: + prereleases = self.prereleases + + # If we have any specifiers, then we want to wrap our iterable in the + # filter method for each one, this will act as a logical AND amongst + # each specifier. + if self._specs: + # When prereleases is None, we need to let all versions through + # the individual filters, then decide about prereleases at the end + # based on whether any non-prereleases matched ALL specs. + for spec in self._specs: + iterable = spec.filter( + iterable, prereleases=True if prereleases is None else prereleases + ) + + if prereleases is not None: + # If we have a forced prereleases value, + # we can immediately return the iterator. + return iter(iterable) + else: + # Handle empty SpecifierSet cases where prereleases is not None. + if prereleases is True: + return iter(iterable) + + if prereleases is False: + return ( + item + for item in iterable + if (version := _coerce_version(item)) is None + or not version.is_prerelease + ) + + # Finally if prereleases is None, apply PEP 440 logic: + # exclude prereleases unless there are no final releases that matched. + filtered_items: list[UnparsedVersionVar] = [] + found_prereleases: list[UnparsedVersionVar] = [] + found_final_release = False + + for item in iterable: + parsed_version = _coerce_version(item) + # Arbitrary strings are always included as it is not + # possible to determine if they are prereleases, + # and they have already passed all specifiers. + if parsed_version is None: + filtered_items.append(item) + found_prereleases.append(item) + elif parsed_version.is_prerelease: + found_prereleases.append(item) + else: + filtered_items.append(item) + found_final_release = True + + return iter(filtered_items if found_final_release else found_prereleases) diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/tags.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/tags.py new file mode 100644 index 0000000000000000000000000000000000000000..5ef27c897a4df35a2a6923b608a5e04a0a38b9ee --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/tags.py @@ -0,0 +1,651 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import logging +import platform +import re +import struct +import subprocess +import sys +import sysconfig +from importlib.machinery import EXTENSION_SUFFIXES +from typing import ( + Any, + Iterable, + Iterator, + Sequence, + Tuple, + cast, +) + +from . import _manylinux, _musllinux + +logger = logging.getLogger(__name__) + +PythonVersion = Sequence[int] +AppleVersion = Tuple[int, int] + +INTERPRETER_SHORT_NAMES: dict[str, str] = { + "python": "py", # Generic. + "cpython": "cp", + "pypy": "pp", + "ironpython": "ip", + "jython": "jy", +} + + +_32_BIT_INTERPRETER = struct.calcsize("P") == 4 + + +class Tag: + """ + A representation of the tag triple for a wheel. + + Instances are considered immutable and thus are hashable. Equality checking + is also supported. + """ + + __slots__ = ["_abi", "_hash", "_interpreter", "_platform"] + + def __init__(self, interpreter: str, abi: str, platform: str) -> None: + self._interpreter = interpreter.lower() + self._abi = abi.lower() + self._platform = platform.lower() + # The __hash__ of every single element in a Set[Tag] will be evaluated each time + # that a set calls its `.disjoint()` method, which may be called hundreds of + # times when scanning a page of links for packages with tags matching that + # Set[Tag]. Pre-computing the value here produces significant speedups for + # downstream consumers. + self._hash = hash((self._interpreter, self._abi, self._platform)) + + @property + def interpreter(self) -> str: + return self._interpreter + + @property + def abi(self) -> str: + return self._abi + + @property + def platform(self) -> str: + return self._platform + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Tag): + return NotImplemented + + return ( + (self._hash == other._hash) # Short-circuit ASAP for perf reasons. + and (self._platform == other._platform) + and (self._abi == other._abi) + and (self._interpreter == other._interpreter) + ) + + def __hash__(self) -> int: + return self._hash + + def __str__(self) -> str: + return f"{self._interpreter}-{self._abi}-{self._platform}" + + def __repr__(self) -> str: + return f"<{self} @ {id(self)}>" + + def __setstate__(self, state: tuple[None, dict[str, Any]]) -> None: + # The cached _hash is wrong when unpickling. + _, slots = state + for k, v in slots.items(): + setattr(self, k, v) + self._hash = hash((self._interpreter, self._abi, self._platform)) + + +def parse_tag(tag: str) -> frozenset[Tag]: + """ + Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. + + Returning a set is required due to the possibility that the tag is a + compressed tag set. + """ + tags = set() + interpreters, abis, platforms = tag.split("-") + for interpreter in interpreters.split("."): + for abi in abis.split("."): + for platform_ in platforms.split("."): + tags.add(Tag(interpreter, abi, platform_)) + return frozenset(tags) + + +def _get_config_var(name: str, warn: bool = False) -> int | str | None: + value: int | str | None = sysconfig.get_config_var(name) + if value is None and warn: + logger.debug( + "Config variable '%s' is unset, Python ABI tag may be incorrect", name + ) + return value + + +def _normalize_string(string: str) -> str: + return string.replace(".", "_").replace("-", "_").replace(" ", "_") + + +def _is_threaded_cpython(abis: list[str]) -> bool: + """ + Determine if the ABI corresponds to a threaded (`--disable-gil`) build. + + The threaded builds are indicated by a "t" in the abiflags. + """ + if len(abis) == 0: + return False + # expect e.g., cp313 + m = re.match(r"cp\d+(.*)", abis[0]) + if not m: + return False + abiflags = m.group(1) + return "t" in abiflags + + +def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool: + """ + Determine if the Python version supports abi3. + + PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`) + builds do not support abi3. + """ + return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading + + +def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> list[str]: + py_version = tuple(py_version) # To allow for version comparison. + abis = [] + version = _version_nodot(py_version[:2]) + threading = debug = pymalloc = ucs4 = "" + with_debug = _get_config_var("Py_DEBUG", warn) + has_refcount = hasattr(sys, "gettotalrefcount") + # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled + # extension modules is the best option. + # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 + has_ext = "_d.pyd" in EXTENSION_SUFFIXES + if with_debug or (with_debug is None and (has_refcount or has_ext)): + debug = "d" + if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn): + threading = "t" + if py_version < (3, 8): + with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) + if with_pymalloc or with_pymalloc is None: + pymalloc = "m" + if py_version < (3, 3): + unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) + if unicode_size == 4 or ( + unicode_size is None and sys.maxunicode == 0x10FFFF + ): + ucs4 = "u" + elif debug: + # Debug builds can also load "normal" extension modules. + # We can also assume no UCS-4 or pymalloc requirement. + abis.append(f"cp{version}{threading}") + abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}") + return abis + + +def cpython_tags( + python_version: PythonVersion | None = None, + abis: Iterable[str] | None = None, + platforms: Iterable[str] | None = None, + *, + warn: bool = False, +) -> Iterator[Tag]: + """ + Yields the tags for a CPython interpreter. + + The tags consist of: + - cp-- + - cp-abi3- + - cp-none- + - cp-abi3- # Older Python versions down to 3.2. + + If python_version only specifies a major version then user-provided ABIs and + the 'none' ABItag will be used. + + If 'abi3' or 'none' are specified in 'abis' then they will be yielded at + their normal position and not at the beginning. + """ + if not python_version: + python_version = sys.version_info[:2] + + interpreter = f"cp{_version_nodot(python_version[:2])}" + + if abis is None: + abis = _cpython_abis(python_version, warn) if len(python_version) > 1 else [] + abis = list(abis) + # 'abi3' and 'none' are explicitly handled later. + for explicit_abi in ("abi3", "none"): + try: + abis.remove(explicit_abi) + except ValueError: # noqa: PERF203 + pass + + platforms = list(platforms or platform_tags()) + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + + threading = _is_threaded_cpython(abis) + use_abi3 = _abi3_applies(python_version, threading) + if use_abi3: + yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms) + yield from (Tag(interpreter, "none", platform_) for platform_ in platforms) + + if use_abi3: + for minor_version in range(python_version[1] - 1, 1, -1): + for platform_ in platforms: + version = _version_nodot((python_version[0], minor_version)) + interpreter = f"cp{version}" + yield Tag(interpreter, "abi3", platform_) + + +def _generic_abi() -> list[str]: + """ + Return the ABI tag based on EXT_SUFFIX. + """ + # The following are examples of `EXT_SUFFIX`. + # We want to keep the parts which are related to the ABI and remove the + # parts which are related to the platform: + # - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310 + # - mac: '.cpython-310-darwin.so' => cp310 + # - win: '.cp310-win_amd64.pyd' => cp310 + # - win: '.pyd' => cp37 (uses _cpython_abis()) + # - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73 + # - graalpy: '.graalpy-38-native-x86_64-darwin.dylib' + # => graalpy_38_native + + ext_suffix = _get_config_var("EXT_SUFFIX", warn=True) + if not isinstance(ext_suffix, str) or ext_suffix[0] != ".": + raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')") + parts = ext_suffix.split(".") + if len(parts) < 3: + # CPython3.7 and earlier uses ".pyd" on Windows. + return _cpython_abis(sys.version_info[:2]) + soabi = parts[1] + if soabi.startswith("cpython"): + # non-windows + abi = "cp" + soabi.split("-")[1] + elif soabi.startswith("cp"): + # windows + abi = soabi.split("-")[0] + elif soabi.startswith("pypy"): + abi = "-".join(soabi.split("-")[:2]) + elif soabi.startswith("graalpy"): + abi = "-".join(soabi.split("-")[:3]) + elif soabi: + # pyston, ironpython, others? + abi = soabi + else: + return [] + return [_normalize_string(abi)] + + +def generic_tags( + interpreter: str | None = None, + abis: Iterable[str] | None = None, + platforms: Iterable[str] | None = None, + *, + warn: bool = False, +) -> Iterator[Tag]: + """ + Yields the tags for a generic interpreter. + + The tags consist of: + - -- + + The "none" ABI will be added if it was not explicitly provided. + """ + if not interpreter: + interp_name = interpreter_name() + interp_version = interpreter_version(warn=warn) + interpreter = f"{interp_name}{interp_version}" + abis = _generic_abi() if abis is None else list(abis) + platforms = list(platforms or platform_tags()) + if "none" not in abis: + abis.append("none") + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + + +def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: + """ + Yields Python versions in descending order. + + After the latest version, the major-only version will be yielded, and then + all previous versions of that major version. + """ + if len(py_version) > 1: + yield f"py{_version_nodot(py_version[:2])}" + yield f"py{py_version[0]}" + if len(py_version) > 1: + for minor in range(py_version[1] - 1, -1, -1): + yield f"py{_version_nodot((py_version[0], minor))}" + + +def compatible_tags( + python_version: PythonVersion | None = None, + interpreter: str | None = None, + platforms: Iterable[str] | None = None, +) -> Iterator[Tag]: + """ + Yields the sequence of tags that are compatible with a specific version of Python. + + The tags consist of: + - py*-none- + - -none-any # ... if `interpreter` is provided. + - py*-none-any + """ + if not python_version: + python_version = sys.version_info[:2] + platforms = list(platforms or platform_tags()) + for version in _py_interpreter_range(python_version): + for platform_ in platforms: + yield Tag(version, "none", platform_) + if interpreter: + yield Tag(interpreter, "none", "any") + for version in _py_interpreter_range(python_version): + yield Tag(version, "none", "any") + + +def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str: + if not is_32bit: + return arch + + if arch.startswith("ppc"): + return "ppc" + + return "i386" + + +def _mac_binary_formats(version: AppleVersion, cpu_arch: str) -> list[str]: + formats = [cpu_arch] + if cpu_arch == "x86_64": + if version < (10, 4): + return [] + formats.extend(["intel", "fat64", "fat32"]) + + elif cpu_arch == "i386": + if version < (10, 4): + return [] + formats.extend(["intel", "fat32", "fat"]) + + elif cpu_arch == "ppc64": + # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? + if version > (10, 5) or version < (10, 4): + return [] + formats.append("fat64") + + elif cpu_arch == "ppc": + if version > (10, 6): + return [] + formats.extend(["fat32", "fat"]) + + if cpu_arch in {"arm64", "x86_64"}: + formats.append("universal2") + + if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}: + formats.append("universal") + + return formats + + +def mac_platforms( + version: AppleVersion | None = None, arch: str | None = None +) -> Iterator[str]: + """ + Yields the platform tags for a macOS system. + + The `version` parameter is a two-item tuple specifying the macOS version to + generate platform tags for. The `arch` parameter is the CPU architecture to + generate platform tags for. Both parameters default to the appropriate value + for the current system. + """ + version_str, _, cpu_arch = platform.mac_ver() + if version is None: + version = cast("AppleVersion", tuple(map(int, version_str.split(".")[:2]))) + if version == (10, 16): + # When built against an older macOS SDK, Python will report macOS 10.16 + # instead of the real version. + version_str = subprocess.run( + [ + sys.executable, + "-sS", + "-c", + "import platform; print(platform.mac_ver()[0])", + ], + check=True, + env={"SYSTEM_VERSION_COMPAT": "0"}, + stdout=subprocess.PIPE, + text=True, + ).stdout + version = cast("AppleVersion", tuple(map(int, version_str.split(".")[:2]))) + + if arch is None: + arch = _mac_arch(cpu_arch) + + if (10, 0) <= version < (11, 0): + # Prior to Mac OS 11, each yearly release of Mac OS bumped the + # "minor" version number. The major version was always 10. + major_version = 10 + for minor_version in range(version[1], -1, -1): + compat_version = major_version, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield f"macosx_{major_version}_{minor_version}_{binary_format}" + + if version >= (11, 0): + # Starting with Mac OS 11, each yearly release bumps the major version + # number. The minor versions are now the midyear updates. + minor_version = 0 + for major_version in range(version[0], 10, -1): + compat_version = major_version, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield f"macosx_{major_version}_{minor_version}_{binary_format}" + + if version >= (11, 0): + # Mac OS 11 on x86_64 is compatible with binaries from previous releases. + # Arm64 support was introduced in 11.0, so no Arm binaries from previous + # releases exist. + # + # However, the "universal2" binary format can have a + # macOS version earlier than 11.0 when the x86_64 part of the binary supports + # that version of macOS. + major_version = 10 + if arch == "x86_64": + for minor_version in range(16, 3, -1): + compat_version = major_version, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield f"macosx_{major_version}_{minor_version}_{binary_format}" + else: + for minor_version in range(16, 3, -1): + compat_version = major_version, minor_version + binary_format = "universal2" + yield f"macosx_{major_version}_{minor_version}_{binary_format}" + + +def ios_platforms( + version: AppleVersion | None = None, multiarch: str | None = None +) -> Iterator[str]: + """ + Yields the platform tags for an iOS system. + + :param version: A two-item tuple specifying the iOS version to generate + platform tags for. Defaults to the current iOS version. + :param multiarch: The CPU architecture+ABI to generate platform tags for - + (the value used by `sys.implementation._multiarch` e.g., + `arm64_iphoneos` or `x84_64_iphonesimulator`). Defaults to the current + multiarch value. + """ + if version is None: + # if iOS is the current platform, ios_ver *must* be defined. However, + # it won't exist for CPython versions before 3.13, which causes a mypy + # error. + _, release, _, _ = platform.ios_ver() # type: ignore[attr-defined, unused-ignore] + version = cast("AppleVersion", tuple(map(int, release.split(".")[:2]))) + + if multiarch is None: + multiarch = sys.implementation._multiarch + multiarch = multiarch.replace("-", "_") + + ios_platform_template = "ios_{major}_{minor}_{multiarch}" + + # Consider any iOS major.minor version from the version requested, down to + # 12.0. 12.0 is the first iOS version that is known to have enough features + # to support CPython. Consider every possible minor release up to X.9. There + # highest the minor has ever gone is 8 (14.8 and 15.8) but having some extra + # candidates that won't ever match doesn't really hurt, and it saves us from + # having to keep an explicit list of known iOS versions in the code. Return + # the results descending order of version number. + + # If the requested major version is less than 12, there won't be any matches. + if version[0] < 12: + return + + # Consider the actual X.Y version that was requested. + yield ios_platform_template.format( + major=version[0], minor=version[1], multiarch=multiarch + ) + + # Consider every minor version from X.0 to the minor version prior to the + # version requested by the platform. + for minor in range(version[1] - 1, -1, -1): + yield ios_platform_template.format( + major=version[0], minor=minor, multiarch=multiarch + ) + + for major in range(version[0] - 1, 11, -1): + for minor in range(9, -1, -1): + yield ios_platform_template.format( + major=major, minor=minor, multiarch=multiarch + ) + + +def android_platforms( + api_level: int | None = None, abi: str | None = None +) -> Iterator[str]: + """ + Yields the :attr:`~Tag.platform` tags for Android. If this function is invoked on + non-Android platforms, the ``api_level`` and ``abi`` arguments are required. + + :param int api_level: The maximum `API level + `__ to return. Defaults + to the current system's version, as returned by ``platform.android_ver``. + :param str abi: The `Android ABI `__, + e.g. ``arm64_v8a``. Defaults to the current system's ABI , as returned by + ``sysconfig.get_platform``. Hyphens and periods will be replaced with + underscores. + """ + if platform.system() != "Android" and (api_level is None or abi is None): + raise TypeError( + "on non-Android platforms, the api_level and abi arguments are required" + ) + + if api_level is None: + # Python 3.13 was the first version to return platform.system() == "Android", + # and also the first version to define platform.android_ver(). + api_level = platform.android_ver().api_level # type: ignore[attr-defined] + + if abi is None: + abi = sysconfig.get_platform().split("-")[-1] + abi = _normalize_string(abi) + + # 16 is the minimum API level known to have enough features to support CPython + # without major patching. Yield every API level from the maximum down to the + # minimum, inclusive. + min_api_level = 16 + for ver in range(api_level, min_api_level - 1, -1): + yield f"android_{ver}_{abi}" + + +def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]: + linux = _normalize_string(sysconfig.get_platform()) + if not linux.startswith("linux_"): + # we should never be here, just yield the sysconfig one and return + yield linux + return + if is_32bit: + if linux == "linux_x86_64": + linux = "linux_i686" + elif linux == "linux_aarch64": + linux = "linux_armv8l" + _, arch = linux.split("_", 1) + archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch]) + yield from _manylinux.platform_tags(archs) + yield from _musllinux.platform_tags(archs) + for arch in archs: + yield f"linux_{arch}" + + +def _generic_platforms() -> Iterator[str]: + yield _normalize_string(sysconfig.get_platform()) + + +def platform_tags() -> Iterator[str]: + """ + Provides the platform tags for this installation. + """ + if platform.system() == "Darwin": + return mac_platforms() + elif platform.system() == "iOS": + return ios_platforms() + elif platform.system() == "Android": + return android_platforms() + elif platform.system() == "Linux": + return _linux_platforms() + else: + return _generic_platforms() + + +def interpreter_name() -> str: + """ + Returns the name of the running interpreter. + + Some implementations have a reserved, two-letter abbreviation which will + be returned when appropriate. + """ + name = sys.implementation.name + return INTERPRETER_SHORT_NAMES.get(name) or name + + +def interpreter_version(*, warn: bool = False) -> str: + """ + Returns the version of the running interpreter. + """ + version = _get_config_var("py_version_nodot", warn=warn) + return str(version) if version else _version_nodot(sys.version_info[:2]) + + +def _version_nodot(version: PythonVersion) -> str: + return "".join(map(str, version)) + + +def sys_tags(*, warn: bool = False) -> Iterator[Tag]: + """ + Returns the sequence of tag triples for the running interpreter. + + The order of the sequence corresponds to priority order for the + interpreter, from most to least important. + """ + + interp_name = interpreter_name() + if interp_name == "cp": + yield from cpython_tags(warn=warn) + else: + yield from generic_tags() + + if interp_name == "pp": + interp = "pp3" + elif interp_name == "cp": + interp = "cp" + interpreter_version(warn=warn) + else: + interp = None + yield from compatible_tags(interpreter=interp) diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/utils.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..c41c8137f2679a0fac21bb845596e231ae88dbd8 --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/utils.py @@ -0,0 +1,158 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import re +from typing import NewType, Tuple, Union, cast + +from .tags import Tag, parse_tag +from .version import InvalidVersion, Version, _TrimmedRelease + +BuildTag = Union[Tuple[()], Tuple[int, str]] +NormalizedName = NewType("NormalizedName", str) + + +class InvalidName(ValueError): + """ + An invalid distribution name; users should refer to the packaging user guide. + """ + + +class InvalidWheelFilename(ValueError): + """ + An invalid wheel filename was found, users should refer to PEP 427. + """ + + +class InvalidSdistFilename(ValueError): + """ + An invalid sdist filename was found, users should refer to the packaging user guide. + """ + + +# Core metadata spec for `Name` +_validate_regex = re.compile(r"[A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9]", re.IGNORECASE) +_normalized_regex = re.compile(r"[a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9]") +# PEP 427: The build number must start with a digit. +_build_tag_regex = re.compile(r"(\d+)(.*)") + + +def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName: + if validate and not _validate_regex.fullmatch(name): + raise InvalidName(f"name is invalid: {name!r}") + # Ensure all ``.`` and ``_`` are ``-`` + # Emulates ``re.sub(r"[-_.]+", "-", name).lower()`` from PEP 503 + # Much faster than re, and even faster than str.translate + value = name.lower().replace("_", "-").replace(".", "-") + # Condense repeats (faster than regex) + while "--" in value: + value = value.replace("--", "-") + return cast("NormalizedName", value) + + +def is_normalized_name(name: str) -> bool: + return _normalized_regex.fullmatch(name) is not None + + +def canonicalize_version( + version: Version | str, *, strip_trailing_zero: bool = True +) -> str: + """ + Return a canonical form of a version as a string. + + >>> canonicalize_version('1.0.1') + '1.0.1' + + Per PEP 625, versions may have multiple canonical forms, differing + only by trailing zeros. + + >>> canonicalize_version('1.0.0') + '1' + >>> canonicalize_version('1.0.0', strip_trailing_zero=False) + '1.0.0' + + Invalid versions are returned unaltered. + + >>> canonicalize_version('foo bar baz') + 'foo bar baz' + """ + if isinstance(version, str): + try: + version = Version(version) + except InvalidVersion: + return str(version) + return str(_TrimmedRelease(version) if strip_trailing_zero else version) + + +def parse_wheel_filename( + filename: str, +) -> tuple[NormalizedName, Version, BuildTag, frozenset[Tag]]: + if not filename.endswith(".whl"): + raise InvalidWheelFilename( + f"Invalid wheel filename (extension must be '.whl'): {filename!r}" + ) + + filename = filename[:-4] + dashes = filename.count("-") + if dashes not in (4, 5): + raise InvalidWheelFilename( + f"Invalid wheel filename (wrong number of parts): {filename!r}" + ) + + parts = filename.split("-", dashes - 2) + name_part = parts[0] + # See PEP 427 for the rules on escaping the project name. + if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None: + raise InvalidWheelFilename(f"Invalid project name: {filename!r}") + name = canonicalize_name(name_part) + + try: + version = Version(parts[1]) + except InvalidVersion as e: + raise InvalidWheelFilename( + f"Invalid wheel filename (invalid version): {filename!r}" + ) from e + + if dashes == 5: + build_part = parts[2] + build_match = _build_tag_regex.match(build_part) + if build_match is None: + raise InvalidWheelFilename( + f"Invalid build number: {build_part} in {filename!r}" + ) + build = cast("BuildTag", (int(build_match.group(1)), build_match.group(2))) + else: + build = () + tags = parse_tag(parts[-1]) + return (name, version, build, tags) + + +def parse_sdist_filename(filename: str) -> tuple[NormalizedName, Version]: + if filename.endswith(".tar.gz"): + file_stem = filename[: -len(".tar.gz")] + elif filename.endswith(".zip"): + file_stem = filename[: -len(".zip")] + else: + raise InvalidSdistFilename( + f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):" + f" {filename!r}" + ) + + # We are requiring a PEP 440 version, which cannot contain dashes, + # so we split on the last dash. + name_part, sep, version_part = file_stem.rpartition("-") + if not sep: + raise InvalidSdistFilename(f"Invalid sdist filename: {filename!r}") + + name = canonicalize_name(name_part) + + try: + version = Version(version_part) + except InvalidVersion as e: + raise InvalidSdistFilename( + f"Invalid sdist filename (invalid version): {filename!r}" + ) from e + + return (name, version) diff --git a/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/version.py b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/version.py new file mode 100644 index 0000000000000000000000000000000000000000..1206c462d4fcaa670a816e201bb88b27dfc9cf88 --- /dev/null +++ b/.cache/uv/archive-v0/DbqlmhBm6Ad3c4NdZljCm/packaging/version.py @@ -0,0 +1,792 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +""" +.. testsetup:: + + from packaging.version import parse, Version +""" + +from __future__ import annotations + +import re +import sys +import typing +from typing import ( + Any, + Callable, + Literal, + NamedTuple, + SupportsInt, + Tuple, + TypedDict, + Union, +) + +from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType + +if typing.TYPE_CHECKING: + from typing_extensions import Self, Unpack + +if sys.version_info >= (3, 13): # pragma: no cover + from warnings import deprecated as _deprecated +elif typing.TYPE_CHECKING: + from typing_extensions import deprecated as _deprecated +else: # pragma: no cover + import functools + import warnings + + def _deprecated(message: str) -> object: + def decorator(func: object) -> object: + @functools.wraps(func) + def wrapper(*args: object, **kwargs: object) -> object: + warnings.warn( + message, + category=DeprecationWarning, + stacklevel=2, + ) + return func(*args, **kwargs) + + return wrapper + + return decorator + + +_LETTER_NORMALIZATION = { + "alpha": "a", + "beta": "b", + "c": "rc", + "pre": "rc", + "preview": "rc", + "rev": "post", + "r": "post", +} + +__all__ = ["VERSION_PATTERN", "InvalidVersion", "Version", "parse"] + +LocalType = Tuple[Union[int, str], ...] + +CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]] +CmpLocalType = Union[ + NegativeInfinityType, + Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...], +] +CmpKey = Tuple[ + int, + Tuple[int, ...], + CmpPrePostDevType, + CmpPrePostDevType, + CmpPrePostDevType, + CmpLocalType, +] +VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool] + + +class _VersionReplace(TypedDict, total=False): + epoch: int | None + release: tuple[int, ...] | None + pre: tuple[Literal["a", "b", "rc"], int] | None + post: int | None + dev: int | None + local: str | None + + +def parse(version: str) -> Version: + """Parse the given version string. + + >>> parse('1.0.dev1') + + + :param version: The version string to parse. + :raises InvalidVersion: When the version string is not a valid version. + """ + return Version(version) + + +class InvalidVersion(ValueError): + """Raised when a version string is not a valid version. + + >>> Version("invalid") + Traceback (most recent call last): + ... + packaging.version.InvalidVersion: Invalid version: 'invalid' + """ + + +class _BaseVersion: + __slots__ = () + + # This can also be a normal member (see the packaging_legacy package); + # we are just requiring it to be readable. Actually defining a property + # has runtime effect on subclasses, so it's typing only. + if typing.TYPE_CHECKING: + + @property + def _key(self) -> tuple[Any, ...]: ... + + def __hash__(self) -> int: + return hash(self._key) + + # Please keep the duplicated `isinstance` check + # in the six comparisons hereunder + # unless you find a way to avoid adding overhead function calls. + def __lt__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key < other._key + + def __le__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key <= other._key + + def __eq__(self, other: object) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key == other._key + + def __ge__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key >= other._key + + def __gt__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key > other._key + + def __ne__(self, other: object) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key != other._key + + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse + +# Note that ++ doesn't behave identically on CPython and PyPy, so not using it here +_VERSION_PATTERN = r""" + v?+ # optional leading v + (?: + (?:(?P[0-9]+)!)?+ # epoch + (?P[0-9]+(?:\.[0-9]+)*+) # release segment + (?P
                                          # pre-release
+            [._-]?+
+            (?Palpha|a|beta|b|preview|pre|c|rc)
+            [._-]?+
+            (?P[0-9]+)?
+        )?+
+        (?P                                         # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [._-]?
+                (?Ppost|rev|r)
+                [._-]?
+                (?P[0-9]+)?
+            )
+        )?+
+        (?P                                          # dev release
+            [._-]?+
+            (?Pdev)
+            [._-]?+
+            (?P[0-9]+)?
+        )?+
+    )
+    (?:\+
+        (?P                                        # local version
+            [a-z0-9]+
+            (?:[._-][a-z0-9]+)*+
+        )
+    )?+
+"""
+
+_VERSION_PATTERN_OLD = _VERSION_PATTERN.replace("*+", "*").replace("?+", "?")
+
+# Possessive qualifiers were added in Python 3.11.
+# CPython 3.11.0-3.11.4 had a bug: https://github.com/python/cpython/pull/107795
+# Older PyPy also had a bug.
+VERSION_PATTERN = (
+    _VERSION_PATTERN_OLD
+    if (sys.implementation.name == "cpython" and sys.version_info < (3, 11, 5))
+    or (sys.implementation.name == "pypy" and sys.version_info < (3, 11, 13))
+    or sys.version_info < (3, 11)
+    else _VERSION_PATTERN
+)
+"""
+A string containing the regular expression used to match a valid version.
+
+The pattern is not anchored at either end, and is intended for embedding in larger
+expressions (for example, matching a version number as part of a file name). The
+regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
+flags set.
+
+:meta hide-value:
+"""
+
+
+# Validation pattern for local version in replace()
+_LOCAL_PATTERN = re.compile(r"[a-z0-9]+(?:[._-][a-z0-9]+)*", re.IGNORECASE)
+
+
+def _validate_epoch(value: object, /) -> int:
+    epoch = value or 0
+    if isinstance(epoch, int) and epoch >= 0:
+        return epoch
+    msg = f"epoch must be non-negative integer, got {epoch}"
+    raise InvalidVersion(msg)
+
+
+def _validate_release(value: object, /) -> tuple[int, ...]:
+    release = (0,) if value is None else value
+    if (
+        isinstance(release, tuple)
+        and len(release) > 0
+        and all(isinstance(i, int) and i >= 0 for i in release)
+    ):
+        return release
+    msg = f"release must be a non-empty tuple of non-negative integers, got {release}"
+    raise InvalidVersion(msg)
+
+
+def _validate_pre(value: object, /) -> tuple[Literal["a", "b", "rc"], int] | None:
+    if value is None:
+        return value
+    if (
+        isinstance(value, tuple)
+        and len(value) == 2
+        and value[0] in ("a", "b", "rc")
+        and isinstance(value[1], int)
+        and value[1] >= 0
+    ):
+        return value
+    msg = f"pre must be a tuple of ('a'|'b'|'rc', non-negative int), got {value}"
+    raise InvalidVersion(msg)
+
+
+def _validate_post(value: object, /) -> tuple[Literal["post"], int] | None:
+    if value is None:
+        return value
+    if isinstance(value, int) and value >= 0:
+        return ("post", value)
+    msg = f"post must be non-negative integer, got {value}"
+    raise InvalidVersion(msg)
+
+
+def _validate_dev(value: object, /) -> tuple[Literal["dev"], int] | None:
+    if value is None:
+        return value
+    if isinstance(value, int) and value >= 0:
+        return ("dev", value)
+    msg = f"dev must be non-negative integer, got {value}"
+    raise InvalidVersion(msg)
+
+
+def _validate_local(value: object, /) -> LocalType | None:
+    if value is None:
+        return value
+    if isinstance(value, str) and _LOCAL_PATTERN.fullmatch(value):
+        return _parse_local_version(value)
+    msg = f"local must be a valid version string, got {value!r}"
+    raise InvalidVersion(msg)
+
+
+# Backward compatibility for internals before 26.0. Do not use.
+class _Version(NamedTuple):
+    epoch: int
+    release: tuple[int, ...]
+    dev: tuple[str, int] | None
+    pre: tuple[str, int] | None
+    post: tuple[str, int] | None
+    local: LocalType | None
+
+
+class Version(_BaseVersion):
+    """This class abstracts handling of a project's versions.
+
+    A :class:`Version` instance is comparison aware and can be compared and
+    sorted using the standard Python interfaces.
+
+    >>> v1 = Version("1.0a5")
+    >>> v2 = Version("1.0")
+    >>> v1
+    
+    >>> v2
+    
+    >>> v1 < v2
+    True
+    >>> v1 == v2
+    False
+    >>> v1 > v2
+    False
+    >>> v1 >= v2
+    False
+    >>> v1 <= v2
+    True
+    """
+
+    __slots__ = ("_dev", "_epoch", "_key_cache", "_local", "_post", "_pre", "_release")
+    __match_args__ = ("_str",)
+
+    _regex = re.compile(r"\s*" + VERSION_PATTERN + r"\s*", re.VERBOSE | re.IGNORECASE)
+
+    _epoch: int
+    _release: tuple[int, ...]
+    _dev: tuple[str, int] | None
+    _pre: tuple[str, int] | None
+    _post: tuple[str, int] | None
+    _local: LocalType | None
+
+    _key_cache: CmpKey | None
+
+    def __init__(self, version: str) -> None:
+        """Initialize a Version object.
+
+        :param version:
+            The string representation of a version which will be parsed and normalized
+            before use.
+        :raises InvalidVersion:
+            If the ``version`` does not conform to PEP 440 in any way then this
+            exception will be raised.
+        """
+        # Validate the version and parse it into pieces
+        match = self._regex.fullmatch(version)
+        if not match:
+            raise InvalidVersion(f"Invalid version: {version!r}")
+        self._epoch = int(match.group("epoch")) if match.group("epoch") else 0
+        self._release = tuple(map(int, match.group("release").split(".")))
+        self._pre = _parse_letter_version(match.group("pre_l"), match.group("pre_n"))
+        self._post = _parse_letter_version(
+            match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+        )
+        self._dev = _parse_letter_version(match.group("dev_l"), match.group("dev_n"))
+        self._local = _parse_local_version(match.group("local"))
+
+        # Key which will be used for sorting
+        self._key_cache = None
+
+    def __replace__(self, **kwargs: Unpack[_VersionReplace]) -> Self:
+        epoch = _validate_epoch(kwargs["epoch"]) if "epoch" in kwargs else self._epoch
+        release = (
+            _validate_release(kwargs["release"])
+            if "release" in kwargs
+            else self._release
+        )
+        pre = _validate_pre(kwargs["pre"]) if "pre" in kwargs else self._pre
+        post = _validate_post(kwargs["post"]) if "post" in kwargs else self._post
+        dev = _validate_dev(kwargs["dev"]) if "dev" in kwargs else self._dev
+        local = _validate_local(kwargs["local"]) if "local" in kwargs else self._local
+
+        if (
+            epoch == self._epoch
+            and release == self._release
+            and pre == self._pre
+            and post == self._post
+            and dev == self._dev
+            and local == self._local
+        ):
+            return self
+
+        new_version = self.__class__.__new__(self.__class__)
+        new_version._key_cache = None
+        new_version._epoch = epoch
+        new_version._release = release
+        new_version._pre = pre
+        new_version._post = post
+        new_version._dev = dev
+        new_version._local = local
+
+        return new_version
+
+    @property
+    def _key(self) -> CmpKey:
+        if self._key_cache is None:
+            self._key_cache = _cmpkey(
+                self._epoch,
+                self._release,
+                self._pre,
+                self._post,
+                self._dev,
+                self._local,
+            )
+        return self._key_cache
+
+    @property
+    @_deprecated("Version._version is private and will be removed soon")
+    def _version(self) -> _Version:
+        return _Version(
+            self._epoch, self._release, self._dev, self._pre, self._post, self._local
+        )
+
+    @_version.setter
+    @_deprecated("Version._version is private and will be removed soon")
+    def _version(self, value: _Version) -> None:
+        self._epoch = value.epoch
+        self._release = value.release
+        self._dev = value.dev
+        self._pre = value.pre
+        self._post = value.post
+        self._local = value.local
+        self._key_cache = None
+
+    def __repr__(self) -> str:
+        """A representation of the Version that shows all internal state.
+
+        >>> Version('1.0.0')
+        
+        """
+        return f""
+
+    def __str__(self) -> str:
+        """A string representation of the version that can be round-tripped.
+
+        >>> str(Version("1.0a5"))
+        '1.0a5'
+        """
+        # This is a hot function, so not calling self.base_version
+        version = ".".join(map(str, self.release))
+
+        # Epoch
+        if self.epoch:
+            version = f"{self.epoch}!{version}"
+
+        # Pre-release
+        if self.pre is not None:
+            version += "".join(map(str, self.pre))
+
+        # Post-release
+        if self.post is not None:
+            version += f".post{self.post}"
+
+        # Development release
+        if self.dev is not None:
+            version += f".dev{self.dev}"
+
+        # Local version segment
+        if self.local is not None:
+            version += f"+{self.local}"
+
+        return version
+
+    @property
+    def _str(self) -> str:
+        """Internal property for match_args"""
+        return str(self)
+
+    @property
+    def epoch(self) -> int:
+        """The epoch of the version.
+
+        >>> Version("2.0.0").epoch
+        0
+        >>> Version("1!2.0.0").epoch
+        1
+        """
+        return self._epoch
+
+    @property
+    def release(self) -> tuple[int, ...]:
+        """The components of the "release" segment of the version.
+
+        >>> Version("1.2.3").release
+        (1, 2, 3)
+        >>> Version("2.0.0").release
+        (2, 0, 0)
+        >>> Version("1!2.0.0.post0").release
+        (2, 0, 0)
+
+        Includes trailing zeroes but not the epoch or any pre-release / development /
+        post-release suffixes.
+        """
+        return self._release
+
+    @property
+    def pre(self) -> tuple[str, int] | None:
+        """The pre-release segment of the version.
+
+        >>> print(Version("1.2.3").pre)
+        None
+        >>> Version("1.2.3a1").pre
+        ('a', 1)
+        >>> Version("1.2.3b1").pre
+        ('b', 1)
+        >>> Version("1.2.3rc1").pre
+        ('rc', 1)
+        """
+        return self._pre
+
+    @property
+    def post(self) -> int | None:
+        """The post-release number of the version.
+
+        >>> print(Version("1.2.3").post)
+        None
+        >>> Version("1.2.3.post1").post
+        1
+        """
+        return self._post[1] if self._post else None
+
+    @property
+    def dev(self) -> int | None:
+        """The development number of the version.
+
+        >>> print(Version("1.2.3").dev)
+        None
+        >>> Version("1.2.3.dev1").dev
+        1
+        """
+        return self._dev[1] if self._dev else None
+
+    @property
+    def local(self) -> str | None:
+        """The local version segment of the version.
+
+        >>> print(Version("1.2.3").local)
+        None
+        >>> Version("1.2.3+abc").local
+        'abc'
+        """
+        if self._local:
+            return ".".join(str(x) for x in self._local)
+        else:
+            return None
+
+    @property
+    def public(self) -> str:
+        """The public portion of the version.
+
+        >>> Version("1.2.3").public
+        '1.2.3'
+        >>> Version("1.2.3+abc").public
+        '1.2.3'
+        >>> Version("1!1.2.3dev1+abc").public
+        '1!1.2.3.dev1'
+        """
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self) -> str:
+        """The "base version" of the version.
+
+        >>> Version("1.2.3").base_version
+        '1.2.3'
+        >>> Version("1.2.3+abc").base_version
+        '1.2.3'
+        >>> Version("1!1.2.3dev1+abc").base_version
+        '1!1.2.3'
+
+        The "base version" is the public version of the project without any pre or post
+        release markers.
+        """
+        release_segment = ".".join(map(str, self.release))
+        return f"{self.epoch}!{release_segment}" if self.epoch else release_segment
+
+    @property
+    def is_prerelease(self) -> bool:
+        """Whether this version is a pre-release.
+
+        >>> Version("1.2.3").is_prerelease
+        False
+        >>> Version("1.2.3a1").is_prerelease
+        True
+        >>> Version("1.2.3b1").is_prerelease
+        True
+        >>> Version("1.2.3rc1").is_prerelease
+        True
+        >>> Version("1.2.3dev1").is_prerelease
+        True
+        """
+        return self.dev is not None or self.pre is not None
+
+    @property
+    def is_postrelease(self) -> bool:
+        """Whether this version is a post-release.
+
+        >>> Version("1.2.3").is_postrelease
+        False
+        >>> Version("1.2.3.post1").is_postrelease
+        True
+        """
+        return self.post is not None
+
+    @property
+    def is_devrelease(self) -> bool:
+        """Whether this version is a development release.
+
+        >>> Version("1.2.3").is_devrelease
+        False
+        >>> Version("1.2.3.dev1").is_devrelease
+        True
+        """
+        return self.dev is not None
+
+    @property
+    def major(self) -> int:
+        """The first item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").major
+        1
+        """
+        return self.release[0] if len(self.release) >= 1 else 0
+
+    @property
+    def minor(self) -> int:
+        """The second item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").minor
+        2
+        >>> Version("1").minor
+        0
+        """
+        return self.release[1] if len(self.release) >= 2 else 0
+
+    @property
+    def micro(self) -> int:
+        """The third item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").micro
+        3
+        >>> Version("1").micro
+        0
+        """
+        return self.release[2] if len(self.release) >= 3 else 0
+
+
+class _TrimmedRelease(Version):
+    __slots__ = ()
+
+    def __init__(self, version: str | Version) -> None:
+        if isinstance(version, Version):
+            self._epoch = version._epoch
+            self._release = version._release
+            self._dev = version._dev
+            self._pre = version._pre
+            self._post = version._post
+            self._local = version._local
+            self._key_cache = version._key_cache
+            return
+        super().__init__(version)  # pragma: no cover
+
+    @property
+    def release(self) -> tuple[int, ...]:
+        """
+        Release segment without any trailing zeros.
+
+        >>> _TrimmedRelease('1.0.0').release
+        (1,)
+        >>> _TrimmedRelease('0.0').release
+        (0,)
+        """
+        # This leaves one 0.
+        rel = super().release
+        len_release = len(rel)
+        i = len_release
+        while i > 1 and rel[i - 1] == 0:
+            i -= 1
+        return rel if i == len_release else rel[:i]
+
+
+def _parse_letter_version(
+    letter: str | None, number: str | bytes | SupportsInt | None
+) -> tuple[str, int] | None:
+    if letter:
+        # We normalize any letters to their lower case form
+        letter = letter.lower()
+
+        # We consider some words to be alternate spellings of other words and
+        # in those cases we want to normalize the spellings to our preferred
+        # spelling.
+        letter = _LETTER_NORMALIZATION.get(letter, letter)
+
+        # We consider there to be an implicit 0 in a pre-release if there is
+        # not a numeral associated with it.
+        return letter, int(number or 0)
+
+    if number:
+        # We assume if we are given a number, but we are not given a letter
+        # then this is using the implicit post release syntax (e.g. 1.0-1)
+        return "post", int(number)
+
+    return None
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local: str | None) -> LocalType | None:
+    """
+    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+    """
+    if local is not None:
+        return tuple(
+            part.lower() if not part.isdigit() else int(part)
+            for part in _local_version_separators.split(local)
+        )
+    return None
+
+
+def _cmpkey(
+    epoch: int,
+    release: tuple[int, ...],
+    pre: tuple[str, int] | None,
+    post: tuple[str, int] | None,
+    dev: tuple[str, int] | None,
+    local: LocalType | None,
+) -> CmpKey:
+    # When we compare a release version, we want to compare it with all of the
+    # trailing zeros removed. We will use this for our sorting key.
+    len_release = len(release)
+    i = len_release
+    while i and release[i - 1] == 0:
+        i -= 1
+    _release = release if i == len_release else release[:i]
+
+    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+    # We'll do this by abusing the pre segment, but we _only_ want to do this
+    # if there is not a pre or a post segment. If we have one of those then
+    # the normal sorting rules will handle this case correctly.
+    if pre is None and post is None and dev is not None:
+        _pre: CmpPrePostDevType = NegativeInfinity
+    # Versions without a pre-release (except as noted above) should sort after
+    # those with one.
+    elif pre is None:
+        _pre = Infinity
+    else:
+        _pre = pre
+
+    # Versions without a post segment should sort before those with one.
+    if post is None:
+        _post: CmpPrePostDevType = NegativeInfinity
+
+    else:
+        _post = post
+
+    # Versions without a development segment should sort after those with one.
+    if dev is None:
+        _dev: CmpPrePostDevType = Infinity
+
+    else:
+        _dev = dev
+
+    if local is None:
+        # Versions without a local segment should sort before those with one.
+        _local: CmpLocalType = NegativeInfinity
+    else:
+        # Versions with a local segment need that segment parsed to implement
+        # the sorting rules in PEP440.
+        # - Alpha numeric segments sort before numeric segments
+        # - Alpha numeric segments sort lexicographically
+        # - Numeric segments sort numerically
+        # - Shorter versions sort before longer versions when the prefixes
+        #   match exactly
+        _local = tuple(
+            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
+        )
+
+    return epoch, _release, _pre, _post, _dev, _local
diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/METADATA b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..8fc14ea2b68c0df97b52da5802a44e0083973d26
--- /dev/null
+++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/METADATA
@@ -0,0 +1,412 @@
+Metadata-Version: 2.4
+Name: typer
+Version: 0.24.1
+Summary: Typer, build great CLIs. Easy to code. Based on Python type hints.
+Author-Email: =?utf-8?q?Sebasti=C3=A1n_Ram=C3=ADrez?= 
+License-Expression: MIT
+License-File: LICENSE
+Classifier: Intended Audience :: Information Technology
+Classifier: Intended Audience :: System Administrators
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python
+Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Software Development
+Classifier: Typing :: Typed
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
+Project-URL: Homepage, https://github.com/fastapi/typer
+Project-URL: Documentation, https://typer.tiangolo.com
+Project-URL: Repository, https://github.com/fastapi/typer
+Project-URL: Issues, https://github.com/fastapi/typer/issues
+Project-URL: Changelog, https://typer.tiangolo.com/release-notes/
+Requires-Python: >=3.10
+Requires-Dist: click>=8.2.1
+Requires-Dist: shellingham>=1.3.0
+Requires-Dist: rich>=12.3.0
+Requires-Dist: annotated-doc>=0.0.2
+Description-Content-Type: text/markdown
+
+

+ Typer + +

+

+ Typer, build great CLIs. Easy to code. Based on Python type hints. +

+

+ + Test + + + Publish + + + Coverage + + Package version + +

+ +--- + +**Documentation**: https://typer.tiangolo.com + +**Source Code**: https://github.com/fastapi/typer + +--- + +Typer is a library for building CLI applications that users will **love using** and developers will **love creating**. Based on Python type hints. + +It's also a command line tool to run scripts, automatically converting them to CLI applications. + +The key features are: + +* **Intuitive to write**: Great editor support. Completion everywhere. Less time debugging. Designed to be easy to use and learn. Less time reading docs. +* **Easy to use**: It's easy to use for the final users. Automatic help, and automatic completion for all shells. +* **Short**: Minimize code duplication. Multiple features from each parameter declaration. Fewer bugs. +* **Start simple**: The simplest example adds only 2 lines of code to your app: **1 import, 1 function call**. +* **Grow large**: Grow in complexity as much as you want, create arbitrarily complex trees of commands and groups of subcommands, with options and arguments. +* **Run scripts**: Typer includes a `typer` command/program that you can use to run scripts, automatically converting them to CLIs, even if they don't use Typer internally. + +## 2026 February - Typer developer survey + +Help us define Typer's future by filling the Typer developer survey. ✨ + +## FastAPI of CLIs + +**Typer** is FastAPI's little sibling, it's the FastAPI of CLIs. + +## Installation + +Create and activate a virtual environment and then install **Typer**: + +
+ +```console +$ pip install typer +---> 100% +Successfully installed typer rich shellingham +``` + +
+ +## Example + +### The absolute minimum + +* Create a file `main.py` with: + +```Python +def main(name: str): + print(f"Hello {name}") +``` + +This script doesn't even use Typer internally. But you can use the `typer` command to run it as a CLI application. + +### Run it + +Run your application with the `typer` command: + +
+ +```console +// Run your application +$ typer main.py run + +// You get a nice error, you are missing NAME +Usage: typer [PATH_OR_MODULE] run [OPTIONS] NAME +Try 'typer [PATH_OR_MODULE] run --help' for help. +╭─ Error ───────────────────────────────────────────╮ +│ Missing argument 'NAME'. │ +╰───────────────────────────────────────────────────╯ + + +// You get a --help for free +$ typer main.py run --help + +Usage: typer [PATH_OR_MODULE] run [OPTIONS] NAME + +Run the provided Typer app. + +╭─ Arguments ───────────────────────────────────────╮ +│ * name TEXT [default: None] [required] | +╰───────────────────────────────────────────────────╯ +╭─ Options ─────────────────────────────────────────╮ +│ --help Show this message and exit. │ +╰───────────────────────────────────────────────────╯ + +// Now pass the NAME argument +$ typer main.py run Camila + +Hello Camila + +// It works! 🎉 +``` + +
+ +This is the simplest use case, not even using Typer internally, but it can already be quite useful for simple scripts. + +**Note**: auto-completion works when you create a Python package and run it with `--install-completion` or when you use the `typer` command. + +## Use Typer in your code + +Now let's start using Typer in your own code, update `main.py` with: + +```Python +import typer + + +def main(name: str): + print(f"Hello {name}") + + +if __name__ == "__main__": + typer.run(main) +``` + +Now you could run it with Python directly: + +
+ +```console +// Run your application +$ python main.py + +// You get a nice error, you are missing NAME +Usage: main.py [OPTIONS] NAME +Try 'main.py --help' for help. +╭─ Error ───────────────────────────────────────────╮ +│ Missing argument 'NAME'. │ +╰───────────────────────────────────────────────────╯ + + +// You get a --help for free +$ python main.py --help + +Usage: main.py [OPTIONS] NAME + +╭─ Arguments ───────────────────────────────────────╮ +│ * name TEXT [default: None] [required] | +╰───────────────────────────────────────────────────╯ +╭─ Options ─────────────────────────────────────────╮ +│ --help Show this message and exit. │ +╰───────────────────────────────────────────────────╯ + +// Now pass the NAME argument +$ python main.py Camila + +Hello Camila + +// It works! 🎉 +``` + +
+ +**Note**: you can also call this same script with the `typer` command, but you don't need to. + +## Example upgrade + +This was the simplest example possible. + +Now let's see one a bit more complex. + +### An example with two subcommands + +Modify the file `main.py`. + +Create a `typer.Typer()` app, and create two subcommands with their parameters. + +```Python hl_lines="3 6 11 20" +import typer + +app = typer.Typer() + + +@app.command() +def hello(name: str): + print(f"Hello {name}") + + +@app.command() +def goodbye(name: str, formal: bool = False): + if formal: + print(f"Goodbye Ms. {name}. Have a good day.") + else: + print(f"Bye {name}!") + + +if __name__ == "__main__": + app() +``` + +And that will: + +* Explicitly create a `typer.Typer` app. + * The previous `typer.run` actually creates one implicitly for you. +* Add two subcommands with `@app.command()`. +* Execute the `app()` itself, as if it was a function (instead of `typer.run`). + +### Run the upgraded example + +Check the new help: + +
+ +```console +$ python main.py --help + + Usage: main.py [OPTIONS] COMMAND [ARGS]... + +╭─ Options ─────────────────────────────────────────╮ +│ --install-completion Install completion │ +│ for the current │ +│ shell. │ +│ --show-completion Show completion for │ +│ the current shell, │ +│ to copy it or │ +│ customize the │ +│ installation. │ +│ --help Show this message │ +│ and exit. │ +╰───────────────────────────────────────────────────╯ +╭─ Commands ────────────────────────────────────────╮ +│ goodbye │ +│ hello │ +╰───────────────────────────────────────────────────╯ + +// When you create a package you get ✨ auto-completion ✨ for free, installed with --install-completion + +// You have 2 subcommands (the 2 functions): goodbye and hello +``` + +
+ +Now check the help for the `hello` command: + +
+ +```console +$ python main.py hello --help + + Usage: main.py hello [OPTIONS] NAME + +╭─ Arguments ───────────────────────────────────────╮ +│ * name TEXT [default: None] [required] │ +╰───────────────────────────────────────────────────╯ +╭─ Options ─────────────────────────────────────────╮ +│ --help Show this message and exit. │ +╰───────────────────────────────────────────────────╯ +``` + +
+ +And now check the help for the `goodbye` command: + +
+ +```console +$ python main.py goodbye --help + + Usage: main.py goodbye [OPTIONS] NAME + +╭─ Arguments ───────────────────────────────────────╮ +│ * name TEXT [default: None] [required] │ +╰───────────────────────────────────────────────────╯ +╭─ Options ─────────────────────────────────────────╮ +│ --formal --no-formal [default: no-formal] │ +│ --help Show this message │ +│ and exit. │ +╰───────────────────────────────────────────────────╯ + +// Automatic --formal and --no-formal for the bool option 🎉 +``` + +
+ +Now you can try out the new command line application: + +
+ +```console +// Use it with the hello command + +$ python main.py hello Camila + +Hello Camila + +// And with the goodbye command + +$ python main.py goodbye Camila + +Bye Camila! + +// And with --formal + +$ python main.py goodbye --formal Camila + +Goodbye Ms. Camila. Have a good day. +``` + +
+ +**Note**: If your app only has one command, by default the command name is **omitted** in usage: `python main.py Camila`. However, when there are multiple commands, you must **explicitly include the command name**: `python main.py hello Camila`. See [One or Multiple Commands](https://typer.tiangolo.com/tutorial/commands/one-or-multiple/) for more details. + +### Recap + +In summary, you declare **once** the types of parameters (*CLI arguments* and *CLI options*) as function parameters. + +You do that with standard modern Python types. + +You don't have to learn a new syntax, the methods or classes of a specific library, etc. + +Just standard **Python**. + +For example, for an `int`: + +```Python +total: int +``` + +or for a `bool` flag: + +```Python +force: bool +``` + +And similarly for **files**, **paths**, **enums** (choices), etc. And there are tools to create **groups of subcommands**, add metadata, extra **validation**, etc. + +**You get**: great editor support, including **completion** and **type checks** everywhere. + +**Your users get**: automatic **`--help`**, **auto-completion** in their terminal (Bash, Zsh, Fish, PowerShell) when they install your package or when using the `typer` command. + +For a more complete example including more features, see the Tutorial - User Guide. + +## Dependencies + +**Typer** stands on the shoulders of giants. It has three required dependencies: + +* Click: a popular tool for building CLIs in Python. Typer is based on it. +* rich: to show nicely formatted errors automatically. +* shellingham: to automatically detect the current shell when installing completion. + +### `typer-slim` + +There used to be a slimmed-down version of Typer called `typer-slim`, which didn't include the dependencies `rich` and `shellingham`, nor the `typer` command. + +However, since version 0.22.0, we have stopped supporting this, and `typer-slim` now simply installs (all of) Typer. + +If you want to disable Rich globally, you can set an environmental variable `TYPER_USE_RICH` to `False` or `0`. + +## License + +This project is licensed under the terms of the MIT license. diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/RECORD b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..52219d571f39b0935f47864ba03515bb96fb2f46 --- /dev/null +++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/RECORD @@ -0,0 +1,22 @@ +typer-0.24.1.dist-info/METADATA,sha256=V4OWoWjBhPNcoIaOxhr1cszo69nePKOHMRXERkMscKs,16057 +typer-0.24.1.dist-info/WHEEL,sha256=Wb0ASbVj8JvWHpOiIpPi7ucfIgJeCi__PzivviEAQFc,90 +typer-0.24.1.dist-info/entry_points.txt,sha256=YO13ByiqWeuas9V0JADLUARZFUe_cwU_7wmTNvxBYQ8,57 +typer-0.24.1.dist-info/licenses/LICENSE,sha256=WJks68-N-25AxOIRLtEhJsJDZm3KORKj14t-ysSFnUk,1086 +typer/__init__.py,sha256=WOelHJu4PW0hk9nfjEX0Qxssb58NCh1km_Xq5LY_33s,1596 +typer/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30 +typer/_completion_classes.py,sha256=R9v4D8pJ_-n8fLOuyxrRSu7sP5lpXIy5fsLUW8zwsDU,7039 +typer/_completion_shared.py,sha256=-uhCUIMc2S1ywdB-fBSSccH70mIBEsVTxHomcmy-klE,9129 +typer/_types.py,sha256=0lcBDLcsxqr1sxTsqObj_u0Dfa37lWJYUY4PNkX4QlA,974 +typer/_typing.py,sha256=QOw5o-B2L--C3ly2DQH6aUwag6x5brV5FhVaBZ5gzMg,1727 +typer/cli.py,sha256=icRbazvdRdbYeaidPZOmJDOzrP3RAa7vj2INVV9Zb8Q,10183 +typer/colors.py,sha256=e42j8uB520hLpX5C_0fiR3OOoIFMbhO3ADZvv6hlAV8,430 +typer/completion.py,sha256=FRTR9hP_IPdJp-4GXPOq0btXo5SvgAtLVfS3ZkAMpgQ,4793 +typer/core.py,sha256=O5NywSwHPyYbLhZkPYSfwIj7Za2hPnoPP4xPXRa97a0,27947 +typer/main.py,sha256=xyNex-QfGUi-enu9j9rl-_wofApxs5VwdpCthAUAAkk,69005 +typer/models.py,sha256=OwPG3MAXiUD5ih3p8eNVciXUsL07UIJfNWy3JiNpDfg,19843 +typer/params.py,sha256=AovViRtl-VvUIXnmKKpnxoWK9_gHUbyQgXxxv3h_7lI,59713 +typer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +typer/rich_utils.py,sha256=RTyeoxwz16ZZXYbwoEixB_LSEnqpoStG_TGCRTz6zFQ,25424 +typer/testing.py,sha256=-ovLNjUNNEFCJoau-41iTJIobsjPbqyTrRq7-8ac4z4,871 +typer/utils.py,sha256=wnJ1DWXBFMnxLHaMN_HDYntxLRby0K-rux63aokHInI,7599 +typer-0.24.1.dist-info/RECORD,, diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/WHEEL b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..18c430c1f6411c0532096b13de4384ab50a79abd --- /dev/null +++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: pdm-backend (2.4.7) +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/entry_points.txt b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..ca44c05a071b87fe02a3724bd895838305fe4e5d --- /dev/null +++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/entry_points.txt @@ -0,0 +1,5 @@ +[console_scripts] +typer = typer.cli:main + +[gui_scripts] + diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/licenses/LICENSE b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..a7694736cf37716aafec14b24aa8d6316ebe07a3 --- /dev/null +++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer-0.24.1.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2019 Sebastián Ramírez + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/__init__.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..cb0921c06c9c0eddcb32ef016236ea98c2a42375 --- /dev/null +++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/__init__.py @@ -0,0 +1,39 @@ +"""Typer, build great CLIs. Easy to code. Based on Python type hints.""" + +__version__ = "0.24.1" + +from shutil import get_terminal_size as get_terminal_size + +from click.exceptions import Abort as Abort +from click.exceptions import BadParameter as BadParameter +from click.exceptions import Exit as Exit +from click.termui import clear as clear +from click.termui import confirm as confirm +from click.termui import echo_via_pager as echo_via_pager +from click.termui import edit as edit +from click.termui import getchar as getchar +from click.termui import pause as pause +from click.termui import progressbar as progressbar +from click.termui import prompt as prompt +from click.termui import secho as secho +from click.termui import style as style +from click.termui import unstyle as unstyle +from click.utils import echo as echo +from click.utils import format_filename as format_filename +from click.utils import get_app_dir as get_app_dir +from click.utils import get_binary_stream as get_binary_stream +from click.utils import get_text_stream as get_text_stream +from click.utils import open_file as open_file + +from . import colors as colors +from .main import Typer as Typer +from .main import launch as launch +from .main import run as run +from .models import CallbackParam as CallbackParam +from .models import Context as Context +from .models import FileBinaryRead as FileBinaryRead +from .models import FileBinaryWrite as FileBinaryWrite +from .models import FileText as FileText +from .models import FileTextWrite as FileTextWrite +from .params import Argument as Argument +from .params import Option as Option diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/__main__.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/__main__.py new file mode 100644 index 0000000000000000000000000000000000000000..4e28416e104515e90fca4b69cc60d0c61fd15d61 --- /dev/null +++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/__main__.py @@ -0,0 +1,3 @@ +from .cli import main + +main() diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_completion_classes.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_completion_classes.py new file mode 100644 index 0000000000000000000000000000000000000000..8548fb4d6a3e2e274811b0935276e1c0e6f2a9a0 --- /dev/null +++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_completion_classes.py @@ -0,0 +1,199 @@ +import importlib.util +import os +import re +import sys +from typing import Any + +import click +import click.parser +import click.shell_completion +from click.shell_completion import split_arg_string as click_split_arg_string + +from ._completion_shared import ( + COMPLETION_SCRIPT_BASH, + COMPLETION_SCRIPT_FISH, + COMPLETION_SCRIPT_POWER_SHELL, + COMPLETION_SCRIPT_ZSH, + Shells, +) + + +def _sanitize_help_text(text: str) -> str: + """Sanitizes the help text by removing rich tags""" + if not importlib.util.find_spec("rich"): + return text + from . import rich_utils + + return rich_utils.rich_render_text(text) + + +class BashComplete(click.shell_completion.BashComplete): + name = Shells.bash.value + source_template = COMPLETION_SCRIPT_BASH + + def source_vars(self) -> dict[str, Any]: + return { + "complete_func": self.func_name, + "autocomplete_var": self.complete_var, + "prog_name": self.prog_name, + } + + def get_completion_args(self) -> tuple[list[str], str]: + cwords = click_split_arg_string(os.environ["COMP_WORDS"]) + cword = int(os.environ["COMP_CWORD"]) + args = cwords[1:cword] + + try: + incomplete = cwords[cword] + except IndexError: + incomplete = "" + + return args, incomplete + + def format_completion(self, item: click.shell_completion.CompletionItem) -> str: + # TODO: Explore replicating the new behavior from Click, with item types and + # triggering completion for files and directories + # return f"{item.type},{item.value}" + return f"{item.value}" + + def complete(self) -> str: + args, incomplete = self.get_completion_args() + completions = self.get_completions(args, incomplete) + out = [self.format_completion(item) for item in completions] + return "\n".join(out) + + +class ZshComplete(click.shell_completion.ZshComplete): + name = Shells.zsh.value + source_template = COMPLETION_SCRIPT_ZSH + + def source_vars(self) -> dict[str, Any]: + return { + "complete_func": self.func_name, + "autocomplete_var": self.complete_var, + "prog_name": self.prog_name, + } + + def get_completion_args(self) -> tuple[list[str], str]: + completion_args = os.getenv("_TYPER_COMPLETE_ARGS", "") + cwords = click_split_arg_string(completion_args) + args = cwords[1:] + if args and not completion_args.endswith(" "): + incomplete = args[-1] + args = args[:-1] + else: + incomplete = "" + return args, incomplete + + def format_completion(self, item: click.shell_completion.CompletionItem) -> str: + def escape(s: str) -> str: + return ( + s.replace('"', '""') + .replace("'", "''") + .replace("$", "\\$") + .replace("`", "\\`") + .replace(":", r"\\:") + ) + + # TODO: Explore replicating the new behavior from Click, pay attention to + # the difference with and without escape + # return f"{item.type}\n{item.value}\n{item.help if item.help else '_'}" + if item.help: + return f'"{escape(item.value)}":"{_sanitize_help_text(escape(item.help))}"' + else: + return f'"{escape(item.value)}"' + + def complete(self) -> str: + args, incomplete = self.get_completion_args() + completions = self.get_completions(args, incomplete) + res = [self.format_completion(item) for item in completions] + if res: + args_str = "\n".join(res) + return f"_arguments '*: :(({args_str}))'" + else: + return "_files" + + +class FishComplete(click.shell_completion.FishComplete): + name = Shells.fish.value + source_template = COMPLETION_SCRIPT_FISH + + def source_vars(self) -> dict[str, Any]: + return { + "complete_func": self.func_name, + "autocomplete_var": self.complete_var, + "prog_name": self.prog_name, + } + + def get_completion_args(self) -> tuple[list[str], str]: + completion_args = os.getenv("_TYPER_COMPLETE_ARGS", "") + cwords = click_split_arg_string(completion_args) + args = cwords[1:] + if args and not completion_args.endswith(" "): + incomplete = args[-1] + args = args[:-1] + else: + incomplete = "" + return args, incomplete + + def format_completion(self, item: click.shell_completion.CompletionItem) -> str: + # TODO: Explore replicating the new behavior from Click, pay attention to + # the difference with and without formatted help + # if item.help: + # return f"{item.type},{item.value}\t{item.help}" + + # return f"{item.type},{item.value} + if item.help: + formatted_help = re.sub(r"\s", " ", item.help) + return f"{item.value}\t{_sanitize_help_text(formatted_help)}" + else: + return f"{item.value}" + + def complete(self) -> str: + complete_action = os.getenv("_TYPER_COMPLETE_FISH_ACTION", "") + args, incomplete = self.get_completion_args() + completions = self.get_completions(args, incomplete) + show_args = [self.format_completion(item) for item in completions] + if complete_action == "get-args": + if show_args: + return "\n".join(show_args) + elif complete_action == "is-args": + if show_args: + # Activate complete args (no files) + sys.exit(0) + else: + # Deactivate complete args (allow files) + sys.exit(1) + return "" # pragma: no cover + + +class PowerShellComplete(click.shell_completion.ShellComplete): + name = Shells.powershell.value + source_template = COMPLETION_SCRIPT_POWER_SHELL + + def source_vars(self) -> dict[str, Any]: + return { + "complete_func": self.func_name, + "autocomplete_var": self.complete_var, + "prog_name": self.prog_name, + } + + def get_completion_args(self) -> tuple[list[str], str]: + completion_args = os.getenv("_TYPER_COMPLETE_ARGS", "") + incomplete = os.getenv("_TYPER_COMPLETE_WORD_TO_COMPLETE", "") + cwords = click_split_arg_string(completion_args) + args = cwords[1:-1] if incomplete else cwords[1:] + return args, incomplete + + def format_completion(self, item: click.shell_completion.CompletionItem) -> str: + return f"{item.value}:::{_sanitize_help_text(item.help) if item.help else ' '}" + + +def completion_init() -> None: + click.shell_completion.add_completion_class(BashComplete, Shells.bash.value) + click.shell_completion.add_completion_class(ZshComplete, Shells.zsh.value) + click.shell_completion.add_completion_class(FishComplete, Shells.fish.value) + click.shell_completion.add_completion_class( + PowerShellComplete, Shells.powershell.value + ) + click.shell_completion.add_completion_class(PowerShellComplete, Shells.pwsh.value) diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_completion_shared.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_completion_shared.py new file mode 100644 index 0000000000000000000000000000000000000000..5a81dcf68cddda410becd393e5cf0d7e9e83042e --- /dev/null +++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_completion_shared.py @@ -0,0 +1,252 @@ +import os +import re +import subprocess +from enum import Enum +from pathlib import Path + +import click +import shellingham + + +class Shells(str, Enum): + bash = "bash" + zsh = "zsh" + fish = "fish" + powershell = "powershell" + pwsh = "pwsh" + + +COMPLETION_SCRIPT_BASH = """ +%(complete_func)s() { + local IFS=$'\n' + COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \\ + COMP_CWORD=$COMP_CWORD \\ + %(autocomplete_var)s=complete_bash $1 ) ) + return 0 +} + +complete -o default -F %(complete_func)s %(prog_name)s +""" + +COMPLETION_SCRIPT_ZSH = """ +#compdef %(prog_name)s + +%(complete_func)s() { + eval $(env _TYPER_COMPLETE_ARGS="${words[1,$CURRENT]}" %(autocomplete_var)s=complete_zsh %(prog_name)s) +} + +compdef %(complete_func)s %(prog_name)s +""" + +COMPLETION_SCRIPT_FISH = 'complete --command %(prog_name)s --no-files --arguments "(env %(autocomplete_var)s=complete_fish _TYPER_COMPLETE_FISH_ACTION=get-args _TYPER_COMPLETE_ARGS=(commandline -cp) %(prog_name)s)" --condition "env %(autocomplete_var)s=complete_fish _TYPER_COMPLETE_FISH_ACTION=is-args _TYPER_COMPLETE_ARGS=(commandline -cp) %(prog_name)s"' + +COMPLETION_SCRIPT_POWER_SHELL = """ +Import-Module PSReadLine +Set-PSReadLineKeyHandler -Chord Tab -Function MenuComplete +$scriptblock = { + param($wordToComplete, $commandAst, $cursorPosition) + $Env:%(autocomplete_var)s = "complete_powershell" + $Env:_TYPER_COMPLETE_ARGS = $commandAst.ToString() + $Env:_TYPER_COMPLETE_WORD_TO_COMPLETE = $wordToComplete + %(prog_name)s | ForEach-Object { + $commandArray = $_ -Split ":::" + $command = $commandArray[0] + $helpString = $commandArray[1] + [System.Management.Automation.CompletionResult]::new( + $command, $command, 'ParameterValue', $helpString) + } + $Env:%(autocomplete_var)s = "" + $Env:_TYPER_COMPLETE_ARGS = "" + $Env:_TYPER_COMPLETE_WORD_TO_COMPLETE = "" +} +Register-ArgumentCompleter -Native -CommandName %(prog_name)s -ScriptBlock $scriptblock +""" + +_completion_scripts = { + "bash": COMPLETION_SCRIPT_BASH, + "zsh": COMPLETION_SCRIPT_ZSH, + "fish": COMPLETION_SCRIPT_FISH, + "powershell": COMPLETION_SCRIPT_POWER_SHELL, + "pwsh": COMPLETION_SCRIPT_POWER_SHELL, +} + +# TODO: Probably refactor this, copied from Click 7.x +_invalid_ident_char_re = re.compile(r"[^a-zA-Z0-9_]") + + +def get_completion_script(*, prog_name: str, complete_var: str, shell: str) -> str: + cf_name = _invalid_ident_char_re.sub("", prog_name.replace("-", "_")) + script = _completion_scripts.get(shell) + if script is None: + click.echo(f"Shell {shell} not supported.", err=True) + raise click.exceptions.Exit(1) + return ( + script + % { + "complete_func": f"_{cf_name}_completion", + "prog_name": prog_name, + "autocomplete_var": complete_var, + } + ).strip() + + +def install_bash(*, prog_name: str, complete_var: str, shell: str) -> Path: + # Ref: https://github.com/scop/bash-completion#faq + # It seems bash-completion is the official completion system for bash: + # Ref: https://www.gnu.org/software/bash/manual/html_node/A-Programmable-Completion-Example.html + # But installing in the locations from the docs doesn't seem to have effect + completion_path = Path.home() / ".bash_completions" / f"{prog_name}.sh" + rc_path = Path.home() / ".bashrc" + rc_path.parent.mkdir(parents=True, exist_ok=True) + rc_content = "" + if rc_path.is_file(): + rc_content = rc_path.read_text() + completion_init_lines = [f"source '{completion_path}'"] + for line in completion_init_lines: + if line not in rc_content: # pragma: no cover + rc_content += f"\n{line}" + rc_content += "\n" + rc_path.write_text(rc_content) + # Install completion + completion_path.parent.mkdir(parents=True, exist_ok=True) + script_content = get_completion_script( + prog_name=prog_name, complete_var=complete_var, shell=shell + ) + completion_path.write_text(script_content) + return completion_path + + +def install_zsh(*, prog_name: str, complete_var: str, shell: str) -> Path: + # Setup Zsh and load ~/.zfunc + zshrc_path = Path.home() / ".zshrc" + zshrc_path.parent.mkdir(parents=True, exist_ok=True) + zshrc_content = "" + if zshrc_path.is_file(): + zshrc_content = zshrc_path.read_text() + completion_line = "fpath+=~/.zfunc; autoload -Uz compinit; compinit" + if completion_line not in zshrc_content: + zshrc_content += f"\n{completion_line}\n" + style_line = "zstyle ':completion:*' menu select" + # TODO: consider setting the style only for the current program + # style_line = f"zstyle ':completion:*:*:{prog_name}:*' menu select" + # Install zstyle completion config only if the user doesn't have a customization + if "zstyle" not in zshrc_content: + zshrc_content += f"\n{style_line}\n" + zshrc_content = f"{zshrc_content.strip()}\n" + zshrc_path.write_text(zshrc_content) + # Install completion under ~/.zfunc/ + path_obj = Path.home() / f".zfunc/_{prog_name}" + path_obj.parent.mkdir(parents=True, exist_ok=True) + script_content = get_completion_script( + prog_name=prog_name, complete_var=complete_var, shell=shell + ) + path_obj.write_text(script_content) + return path_obj + + +def install_fish(*, prog_name: str, complete_var: str, shell: str) -> Path: + path_obj = Path.home() / f".config/fish/completions/{prog_name}.fish" + parent_dir: Path = path_obj.parent + parent_dir.mkdir(parents=True, exist_ok=True) + script_content = get_completion_script( + prog_name=prog_name, complete_var=complete_var, shell=shell + ) + path_obj.write_text(f"{script_content}\n") + return path_obj + + +def install_powershell(*, prog_name: str, complete_var: str, shell: str) -> Path: + subprocess.run( + [ + shell, + "-Command", + "Set-ExecutionPolicy", + "Unrestricted", + "-Scope", + "CurrentUser", + ] + ) + result = subprocess.run( + [shell, "-NoProfile", "-Command", "echo", "$profile"], + check=True, + stdout=subprocess.PIPE, + ) + if result.returncode != 0: # pragma: no cover + click.echo("Couldn't get PowerShell user profile", err=True) + raise click.exceptions.Exit(result.returncode) + path_str = "" + if isinstance(result.stdout, str): # pragma: no cover + path_str = result.stdout + if isinstance(result.stdout, bytes): + for encoding in ["windows-1252", "utf8", "cp850"]: + try: + path_str = result.stdout.decode(encoding) + break + except UnicodeDecodeError: # pragma: no cover + pass + if not path_str: # pragma: no cover + click.echo("Couldn't decode the path automatically", err=True) + raise click.exceptions.Exit(1) + path_obj = Path(path_str.strip()) + parent_dir: Path = path_obj.parent + parent_dir.mkdir(parents=True, exist_ok=True) + script_content = get_completion_script( + prog_name=prog_name, complete_var=complete_var, shell=shell + ) + with path_obj.open(mode="a") as f: + f.write(f"{script_content}\n") + return path_obj + + +def install( + shell: str | None = None, + prog_name: str | None = None, + complete_var: str | None = None, +) -> tuple[str, Path]: + prog_name = prog_name or click.get_current_context().find_root().info_name + assert prog_name + if complete_var is None: + complete_var = "_{}_COMPLETE".format(prog_name.replace("-", "_").upper()) + test_disable_detection = os.getenv("_TYPER_COMPLETE_TEST_DISABLE_SHELL_DETECTION") + if shell is None and not test_disable_detection: + shell = _get_shell_name() + if shell == "bash": + installed_path = install_bash( + prog_name=prog_name, complete_var=complete_var, shell=shell + ) + return shell, installed_path + elif shell == "zsh": + installed_path = install_zsh( + prog_name=prog_name, complete_var=complete_var, shell=shell + ) + return shell, installed_path + elif shell == "fish": + installed_path = install_fish( + prog_name=prog_name, complete_var=complete_var, shell=shell + ) + return shell, installed_path + elif shell in {"powershell", "pwsh"}: + installed_path = install_powershell( + prog_name=prog_name, complete_var=complete_var, shell=shell + ) + return shell, installed_path + else: + click.echo(f"Shell {shell} is not supported.") + raise click.exceptions.Exit(1) + + +def _get_shell_name() -> str | None: + """Get the current shell name, if available. + + The name will always be lowercase. If the shell cannot be detected, None is + returned. + """ + name: str | None # N.B. shellingham is untyped + try: + # N.B. detect_shell returns a tuple of (shell name, shell command). + # We only need the name. + name, _cmd = shellingham.detect_shell() # noqa: TID251 + except shellingham.ShellDetectionFailure: # pragma: no cover + name = None + + return name diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_types.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_types.py new file mode 100644 index 0000000000000000000000000000000000000000..dc9fc63220d91b5c7ecc16170bb9de7afb135fb4 --- /dev/null +++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_types.py @@ -0,0 +1,27 @@ +from enum import Enum +from typing import TypeVar + +import click + +ParamTypeValue = TypeVar("ParamTypeValue") + + +class TyperChoice(click.Choice[ParamTypeValue]): + def normalize_choice( + self, choice: ParamTypeValue, ctx: click.Context | None + ) -> str: + # Click 8.2.0 added a new method `normalize_choice` to the `Choice` class + # to support enums, but it uses the enum names, while Typer has always used the + # enum values. + # This class overrides that method to maintain the previous behavior. + # In Click: + # normed_value = choice.name if isinstance(choice, Enum) else str(choice) + normed_value = str(choice.value) if isinstance(choice, Enum) else str(choice) + + if ctx is not None and ctx.token_normalize_func is not None: + normed_value = ctx.token_normalize_func(normed_value) + + if not self.case_sensitive: + normed_value = normed_value.casefold() + + return normed_value diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_typing.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_typing.py new file mode 100644 index 0000000000000000000000000000000000000000..218f674c2220589738656294fdd8ec243965376f --- /dev/null +++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/_typing.py @@ -0,0 +1,73 @@ +# Copied from pydantic 1.9.2 (the latest version to support python 3.6.) +# https://github.com/pydantic/pydantic/blob/v1.9.2/pydantic/typing.py +# Reduced drastically to only include Typer-specific 3.9+ functionality +# mypy: ignore-errors + +import types +from collections.abc import Callable +from typing import ( + Annotated, + Any, + Literal, + Union, + get_args, + get_origin, + get_type_hints, +) + + +def is_union(tp: type[Any] | None) -> bool: + return tp is Union or tp is types.UnionType # noqa: E721 + + +__all__ = ( + "NoneType", + "is_none_type", + "is_callable_type", + "is_literal_type", + "all_literal_values", + "is_union", + "Annotated", + "Literal", + "get_args", + "get_origin", + "get_type_hints", +) + + +NoneType = None.__class__ + + +NONE_TYPES: tuple[Any, Any, Any] = (None, NoneType, Literal[None]) + + +def is_none_type(type_: Any) -> bool: + for none_type in NONE_TYPES: + if type_ is none_type: + return True + return False + + +def is_callable_type(type_: type[Any]) -> bool: + return type_ is Callable or get_origin(type_) is Callable + + +def is_literal_type(type_: type[Any]) -> bool: + return get_origin(type_) is Literal + + +def literal_values(type_: type[Any]) -> tuple[Any, ...]: + return get_args(type_) + + +def all_literal_values(type_: type[Any]) -> tuple[Any, ...]: + """ + This method is used to retrieve all Literal values as + Literal can be used recursively (see https://www.python.org/dev/peps/pep-0586) + e.g. `Literal[Literal[Literal[1, 2, 3], "foo"], 5, None]` + """ + if not is_literal_type(type_): + return (type_,) + + values = literal_values(type_) + return tuple(x for value in values for x in all_literal_values(value)) diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/cli.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/cli.py new file mode 100644 index 0000000000000000000000000000000000000000..4b4356f8bdafcd5fc9e8de6c0c95f6f8f4f8bda1 --- /dev/null +++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/cli.py @@ -0,0 +1,317 @@ +import importlib.util +import re +import sys +from pathlib import Path +from typing import Any + +import click +import typer +import typer.core +from click import Command, Group, Option + +from . import __version__ +from .core import HAS_RICH, MARKUP_MODE_KEY + +default_app_names = ("app", "cli", "main") +default_func_names = ("main", "cli", "app") + +app = typer.Typer() +utils_app = typer.Typer(help="Extra utility commands for Typer apps.") +app.add_typer(utils_app, name="utils") + + +class State: + def __init__(self) -> None: + self.app: str | None = None + self.func: str | None = None + self.file: Path | None = None + self.module: str | None = None + + +state = State() + + +def maybe_update_state(ctx: click.Context) -> None: + path_or_module = ctx.params.get("path_or_module") + if path_or_module: + file_path = Path(path_or_module) + if file_path.exists() and file_path.is_file(): + state.file = file_path + else: + if not re.fullmatch(r"[a-zA-Z_]\w*(\.[a-zA-Z_]\w*)*", path_or_module): + typer.echo( + f"Not a valid file or Python module: {path_or_module}", err=True + ) + sys.exit(1) + state.module = path_or_module + app_name = ctx.params.get("app") + if app_name: + state.app = app_name + func_name = ctx.params.get("func") + if func_name: + state.func = func_name + + +class TyperCLIGroup(typer.core.TyperGroup): + def list_commands(self, ctx: click.Context) -> list[str]: + self.maybe_add_run(ctx) + return super().list_commands(ctx) + + def get_command(self, ctx: click.Context, name: str) -> Command | None: # ty: ignore[invalid-method-override] + self.maybe_add_run(ctx) + return super().get_command(ctx, name) + + def invoke(self, ctx: click.Context) -> Any: + self.maybe_add_run(ctx) + return super().invoke(ctx) + + def maybe_add_run(self, ctx: click.Context) -> None: + maybe_update_state(ctx) + maybe_add_run_to_cli(self) + + +def get_typer_from_module(module: Any) -> typer.Typer | None: + # Try to get defined app + if state.app: + obj = getattr(module, state.app, None) + if not isinstance(obj, typer.Typer): + typer.echo(f"Not a Typer object: --app {state.app}", err=True) + sys.exit(1) + return obj + # Try to get defined function + if state.func: + func_obj = getattr(module, state.func, None) + if not callable(func_obj): + typer.echo(f"Not a function: --func {state.func}", err=True) + raise typer.Exit(1) + sub_app = typer.Typer() + sub_app.command()(func_obj) + return sub_app + # Iterate and get a default object to use as CLI + local_names = dir(module) + local_names_set = set(local_names) + # Try to get a default Typer app + for name in default_app_names: + if name in local_names_set: + obj = getattr(module, name, None) + if isinstance(obj, typer.Typer): + return obj + # Try to get any Typer app + for name in local_names_set - set(default_app_names): + obj = getattr(module, name) + if isinstance(obj, typer.Typer): + return obj + # Try to get a default function + for func_name in default_func_names: + func_obj = getattr(module, func_name, None) + if callable(func_obj): + sub_app = typer.Typer() + sub_app.command()(func_obj) + return sub_app + # Try to get any func app + for func_name in local_names_set - set(default_func_names): + func_obj = getattr(module, func_name) + if callable(func_obj): + sub_app = typer.Typer() + sub_app.command()(func_obj) + return sub_app + return None + + +def get_typer_from_state() -> typer.Typer | None: + spec = None + if state.file: + module_name = state.file.name + spec = importlib.util.spec_from_file_location(module_name, str(state.file)) + elif state.module: + spec = importlib.util.find_spec(state.module) + if spec is None: + if state.file: + typer.echo(f"Could not import as Python file: {state.file}", err=True) + else: + typer.echo(f"Could not import as Python module: {state.module}", err=True) + sys.exit(1) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) # type: ignore + obj = get_typer_from_module(module) + return obj + + +def maybe_add_run_to_cli(cli: click.Group) -> None: + if "run" not in cli.commands: + if state.file or state.module: + obj = get_typer_from_state() + if obj: + obj._add_completion = False + click_obj = typer.main.get_command(obj) + click_obj.name = "run" + if not click_obj.help: + click_obj.help = "Run the provided Typer app." + cli.add_command(click_obj) + + +def print_version(ctx: click.Context, param: Option, value: bool) -> None: + if not value or ctx.resilient_parsing: + return + typer.echo(f"Typer version: {__version__}") + raise typer.Exit() + + +@app.callback(cls=TyperCLIGroup, no_args_is_help=True) +def callback( + ctx: typer.Context, + *, + path_or_module: str = typer.Argument(None), + app: str = typer.Option(None, help="The typer app object/variable to use."), + func: str = typer.Option(None, help="The function to convert to Typer."), + version: bool = typer.Option( + False, + "--version", + help="Print version and exit.", + callback=print_version, + ), +) -> None: + """ + Run Typer scripts with completion, without having to create a package. + + You probably want to install completion for the typer command: + + $ typer --install-completion + + https://typer.tiangolo.com/ + """ + maybe_update_state(ctx) + + +def get_docs_for_click( + *, + obj: Command, + ctx: typer.Context, + indent: int = 0, + name: str = "", + call_prefix: str = "", + title: str | None = None, +) -> str: + docs = "#" * (1 + indent) + command_name = name or obj.name + if call_prefix: + command_name = f"{call_prefix} {command_name}" + if not title: + title = f"`{command_name}`" if command_name else "CLI" + docs += f" {title}\n\n" + rich_markup_mode = None + if hasattr(ctx, "obj") and isinstance(ctx.obj, dict): + rich_markup_mode = ctx.obj.get(MARKUP_MODE_KEY, None) + to_parse: bool = bool(HAS_RICH and (rich_markup_mode == "rich")) + if obj.help: + docs += f"{_parse_html(to_parse, obj.help)}\n\n" + usage_pieces = obj.collect_usage_pieces(ctx) + if usage_pieces: + docs += "**Usage**:\n\n" + docs += "```console\n" + docs += "$ " + if command_name: + docs += f"{command_name} " + docs += f"{' '.join(usage_pieces)}\n" + docs += "```\n\n" + args = [] + opts = [] + for param in obj.get_params(ctx): + rv = param.get_help_record(ctx) + if rv is not None: + if param.param_type_name == "argument": + args.append(rv) + elif param.param_type_name == "option": + opts.append(rv) + if args: + docs += "**Arguments**:\n\n" + for arg_name, arg_help in args: + docs += f"* `{arg_name}`" + if arg_help: + docs += f": {_parse_html(to_parse, arg_help)}" + docs += "\n" + docs += "\n" + if opts: + docs += "**Options**:\n\n" + for opt_name, opt_help in opts: + docs += f"* `{opt_name}`" + if opt_help: + docs += f": {_parse_html(to_parse, opt_help)}" + docs += "\n" + docs += "\n" + if obj.epilog: + docs += f"{obj.epilog}\n\n" + if isinstance(obj, Group): + group = obj + commands = group.list_commands(ctx) + if commands: + docs += "**Commands**:\n\n" + for command in commands: + command_obj = group.get_command(ctx, command) + assert command_obj + docs += f"* `{command_obj.name}`" + command_help = command_obj.get_short_help_str() + if command_help: + docs += f": {_parse_html(to_parse, command_help)}" + docs += "\n" + docs += "\n" + for command in commands: + command_obj = group.get_command(ctx, command) + assert command_obj + use_prefix = "" + if command_name: + use_prefix += f"{command_name}" + docs += get_docs_for_click( + obj=command_obj, ctx=ctx, indent=indent + 1, call_prefix=use_prefix + ) + return docs + + +def _parse_html(to_parse: bool, input_text: str) -> str: + if not to_parse: + return input_text + from . import rich_utils + + return rich_utils.rich_to_html(input_text) + + +@utils_app.command() +def docs( + ctx: typer.Context, + name: str = typer.Option("", help="The name of the CLI program to use in docs."), + output: Path | None = typer.Option( + None, + help="An output file to write docs to, like README.md.", + file_okay=True, + dir_okay=False, + ), + title: str | None = typer.Option( + None, + help="The title for the documentation page. If not provided, the name of " + "the program is used.", + ), +) -> None: + """ + Generate Markdown docs for a Typer app. + """ + typer_obj = get_typer_from_state() + if not typer_obj: + typer.echo("No Typer app found", err=True) + raise typer.Abort() + if hasattr(typer_obj, "rich_markup_mode"): + if not hasattr(ctx, "obj") or ctx.obj is None: + ctx.ensure_object(dict) + if isinstance(ctx.obj, dict): + ctx.obj[MARKUP_MODE_KEY] = typer_obj.rich_markup_mode + click_obj = typer.main.get_command(typer_obj) + docs = get_docs_for_click(obj=click_obj, ctx=ctx, name=name, title=title) + clean_docs = f"{docs.strip()}\n" + if output: + output.write_text(clean_docs) + typer.echo(f"Docs saved to: {output}") + else: + typer.echo(clean_docs) + + +def main() -> Any: + return app() diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/colors.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/colors.py new file mode 100644 index 0000000000000000000000000000000000000000..54e7b166cb1de83321a4965cc4915824b47a7f4f --- /dev/null +++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/colors.py @@ -0,0 +1,20 @@ +# Variable names to colors, just for completion +BLACK = "black" +RED = "red" +GREEN = "green" +YELLOW = "yellow" +BLUE = "blue" +MAGENTA = "magenta" +CYAN = "cyan" +WHITE = "white" + +RESET = "reset" + +BRIGHT_BLACK = "bright_black" +BRIGHT_RED = "bright_red" +BRIGHT_GREEN = "bright_green" +BRIGHT_YELLOW = "bright_yellow" +BRIGHT_BLUE = "bright_blue" +BRIGHT_MAGENTA = "bright_magenta" +BRIGHT_CYAN = "bright_cyan" +BRIGHT_WHITE = "bright_white" diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/completion.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/completion.py new file mode 100644 index 0000000000000000000000000000000000000000..0d621e411d7bab9a6d4ab14102670adbcabaf962 --- /dev/null +++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/completion.py @@ -0,0 +1,146 @@ +import os +import sys +from collections.abc import MutableMapping +from typing import Any + +import click + +from ._completion_classes import completion_init +from ._completion_shared import Shells, _get_shell_name, get_completion_script, install +from .models import ParamMeta +from .params import Option +from .utils import get_params_from_function + +_click_patched = False + + +def get_completion_inspect_parameters() -> tuple[ParamMeta, ParamMeta]: + completion_init() + test_disable_detection = os.getenv("_TYPER_COMPLETE_TEST_DISABLE_SHELL_DETECTION") + if not test_disable_detection: + parameters = get_params_from_function(_install_completion_placeholder_function) + else: + parameters = get_params_from_function( + _install_completion_no_auto_placeholder_function + ) + install_param, show_param = parameters.values() + return install_param, show_param + + +def install_callback(ctx: click.Context, param: click.Parameter, value: Any) -> Any: + if not value or ctx.resilient_parsing: + return value # pragma: no cover + if isinstance(value, str): + shell, path = install(shell=value) + else: + shell, path = install() + click.secho(f"{shell} completion installed in {path}", fg="green") + click.echo("Completion will take effect once you restart the terminal") + sys.exit(0) + + +def show_callback(ctx: click.Context, param: click.Parameter, value: Any) -> Any: + if not value or ctx.resilient_parsing: + return value # pragma: no cover + prog_name = ctx.find_root().info_name + assert prog_name + complete_var = "_{}_COMPLETE".format(prog_name.replace("-", "_").upper()) + shell = "" + test_disable_detection = os.getenv("_TYPER_COMPLETE_TEST_DISABLE_SHELL_DETECTION") + if isinstance(value, str): + shell = value + elif not test_disable_detection: + detected_shell = _get_shell_name() + if detected_shell is not None: + shell = detected_shell + script_content = get_completion_script( + prog_name=prog_name, complete_var=complete_var, shell=shell + ) + click.echo(script_content) + sys.exit(0) + + +# Create a fake command function to extract the completion parameters +def _install_completion_placeholder_function( + install_completion: bool = Option( + None, + "--install-completion", + callback=install_callback, + expose_value=False, + help="Install completion for the current shell.", + ), + show_completion: bool = Option( + None, + "--show-completion", + callback=show_callback, + expose_value=False, + help="Show completion for the current shell, to copy it or customize the installation.", + ), +) -> Any: + pass # pragma: no cover + + +def _install_completion_no_auto_placeholder_function( + install_completion: Shells = Option( + None, + callback=install_callback, + expose_value=False, + help="Install completion for the specified shell.", + ), + show_completion: Shells = Option( + None, + callback=show_callback, + expose_value=False, + help="Show completion for the specified shell, to copy it or customize the installation.", + ), +) -> Any: + pass # pragma: no cover + + +# Re-implement Click's shell_complete to add error message with: +# Invalid completion instruction +# To use 7.x instruction style for compatibility +# And to add extra error messages, for compatibility with Typer in previous versions +# This is only called in new Command method, only used by Click 8.x+ +def shell_complete( + cli: click.Command, + ctx_args: MutableMapping[str, Any], + prog_name: str, + complete_var: str, + instruction: str, +) -> int: + import click + import click.shell_completion + + if "_" not in instruction: + click.echo("Invalid completion instruction.", err=True) + return 1 + + # Click 8 changed the order/style of shell instructions from e.g. + # source_bash to bash_source + # Typer override to preserve the old style for compatibility + # Original in Click 8.x commented: + # shell, _, instruction = instruction.partition("_") + instruction, _, shell = instruction.partition("_") + # Typer override end + + comp_cls = click.shell_completion.get_completion_class(shell) + + if comp_cls is None: + click.echo(f"Shell {shell} not supported.", err=True) + return 1 + + comp = comp_cls(cli, ctx_args, prog_name, complete_var) + + if instruction == "source": + click.echo(comp.source()) + return 0 + + # Typer override to print the completion help msg with Rich + if instruction == "complete": + click.echo(comp.complete()) + return 0 + # Typer override end + + click.echo(f'Completion instruction "{instruction}" not supported.', err=True) + return 1 diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/core.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/core.py new file mode 100644 index 0000000000000000000000000000000000000000..3e72d839893185826dfdc9e9d6d8006a279db48d --- /dev/null +++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/core.py @@ -0,0 +1,821 @@ +import errno +import inspect +import os +import sys +from collections.abc import Callable, MutableMapping, Sequence +from difflib import get_close_matches +from enum import Enum +from gettext import gettext as _ +from typing import ( + Any, + TextIO, + Union, + cast, +) + +import click +import click.core +import click.formatting +import click.shell_completion +import click.types +import click.utils + +from ._typing import Literal +from .utils import parse_boolean_env_var + +MarkupMode = Literal["markdown", "rich", None] +MARKUP_MODE_KEY = "TYPER_RICH_MARKUP_MODE" + +HAS_RICH = parse_boolean_env_var(os.getenv("TYPER_USE_RICH"), default=True) + +if HAS_RICH: + DEFAULT_MARKUP_MODE: MarkupMode = "rich" +else: + DEFAULT_MARKUP_MODE = None + + +# Copy from click.parser._split_opt +def _split_opt(opt: str) -> tuple[str, str]: + first = opt[:1] + if first.isalnum(): + return "", opt + if opt[1:2] == first: + return opt[:2], opt[2:] + return first, opt[1:] + + +def _typer_param_setup_autocompletion_compat( + self: click.Parameter, + *, + autocompletion: Callable[ + [click.Context, list[str], str], list[tuple[str, str] | str] + ] + | None = None, +) -> None: + if self._custom_shell_complete is not None: + import warnings + + warnings.warn( + "In Typer, only the parameter 'autocompletion' is supported. " + "The support for 'shell_complete' is deprecated and will be removed in upcoming versions. ", + DeprecationWarning, + stacklevel=2, + ) + + if autocompletion is not None: + + def compat_autocompletion( + ctx: click.Context, param: click.core.Parameter, incomplete: str + ) -> list["click.shell_completion.CompletionItem"]: + from click.shell_completion import CompletionItem + + out = [] + + for c in autocompletion(ctx, [], incomplete): + if isinstance(c, tuple): + use_completion = CompletionItem(c[0], help=c[1]) + else: + assert isinstance(c, str) + use_completion = CompletionItem(c) + + if use_completion.value.startswith(incomplete): + out.append(use_completion) + + return out + + self._custom_shell_complete = compat_autocompletion + + +def _get_default_string( + obj: Union["TyperArgument", "TyperOption"], + *, + ctx: click.Context, + show_default_is_str: bool, + default_value: list[Any] | tuple[Any, ...] | str | Callable[..., Any] | Any, +) -> str: + # Extracted from click.core.Option.get_help_record() to be reused by + # rich_utils avoiding RegEx hacks + if show_default_is_str: + default_string = f"({obj.show_default})" + elif isinstance(default_value, (list, tuple)): + default_string = ", ".join( + _get_default_string( + obj, ctx=ctx, show_default_is_str=show_default_is_str, default_value=d + ) + for d in default_value + ) + elif isinstance(default_value, Enum): + default_string = str(default_value.value) + elif inspect.isfunction(default_value): + default_string = _("(dynamic)") + elif isinstance(obj, TyperOption) and obj.is_bool_flag and obj.secondary_opts: + # For boolean flags that have distinct True/False opts, + # use the opt without prefix instead of the value. + # Typer override, original commented + # default_string = click.parser.split_opt( + # (self.opts if self.default else self.secondary_opts)[0] + # )[1] + if obj.default: + if obj.opts: + default_string = _split_opt(obj.opts[0])[1] + else: + default_string = str(default_value) + else: + default_string = _split_opt(obj.secondary_opts[0])[1] + # Typer override end + elif ( + isinstance(obj, TyperOption) + and obj.is_bool_flag + and not obj.secondary_opts + and not default_value + ): + default_string = "" + else: + default_string = str(default_value) + return default_string + + +def _extract_default_help_str( + obj: Union["TyperArgument", "TyperOption"], *, ctx: click.Context +) -> Any | Callable[[], Any] | None: + # Extracted from click.core.Option.get_help_record() to be reused by + # rich_utils avoiding RegEx hacks + # Temporarily enable resilient parsing to avoid type casting + # failing for the default. Might be possible to extend this to + # help formatting in general. + resilient = ctx.resilient_parsing + ctx.resilient_parsing = True + + try: + default_value = obj.get_default(ctx, call=False) + finally: + ctx.resilient_parsing = resilient + return default_value + + +def _main( + self: click.Command, + *, + args: Sequence[str] | None = None, + prog_name: str | None = None, + complete_var: str | None = None, + standalone_mode: bool = True, + windows_expand_args: bool = True, + rich_markup_mode: MarkupMode = DEFAULT_MARKUP_MODE, + **extra: Any, +) -> Any: + # Typer override, duplicated from click.main() to handle custom rich exceptions + # Verify that the environment is configured correctly, or reject + # further execution to avoid a broken script. + if args is None: + args = sys.argv[1:] + + # Covered in Click tests + if os.name == "nt" and windows_expand_args: # pragma: no cover + args = click.utils._expand_args(args) + else: + args = list(args) + + if prog_name is None: + prog_name = click.utils._detect_program_name() + + # Process shell completion requests and exit early. + self._main_shell_completion(extra, prog_name, complete_var) + + try: + try: + with self.make_context(prog_name, args, **extra) as ctx: + rv = self.invoke(ctx) + if not standalone_mode: + return rv + # it's not safe to `ctx.exit(rv)` here! + # note that `rv` may actually contain data like "1" which + # has obvious effects + # more subtle case: `rv=[None, None]` can come out of + # chained commands which all returned `None` -- so it's not + # even always obvious that `rv` indicates success/failure + # by its truthiness/falsiness + ctx.exit() + except EOFError as e: + click.echo(file=sys.stderr) + raise click.Abort() from e + except KeyboardInterrupt as e: + raise click.exceptions.Exit(130) from e + except click.ClickException as e: + if not standalone_mode: + raise + # Typer override + if HAS_RICH and rich_markup_mode is not None: + from . import rich_utils + + rich_utils.rich_format_error(e) + else: + e.show() + # Typer override end + sys.exit(e.exit_code) + except OSError as e: + if e.errno == errno.EPIPE: + sys.stdout = cast(TextIO, click.utils.PacifyFlushWrapper(sys.stdout)) + sys.stderr = cast(TextIO, click.utils.PacifyFlushWrapper(sys.stderr)) + sys.exit(1) + else: + raise + except click.exceptions.Exit as e: + if standalone_mode: + sys.exit(e.exit_code) + else: + # in non-standalone mode, return the exit code + # note that this is only reached if `self.invoke` above raises + # an Exit explicitly -- thus bypassing the check there which + # would return its result + # the results of non-standalone execution may therefore be + # somewhat ambiguous: if there are codepaths which lead to + # `ctx.exit(1)` and to `return 1`, the caller won't be able to + # tell the difference between the two + return e.exit_code + except click.Abort: + if not standalone_mode: + raise + # Typer override + if HAS_RICH and rich_markup_mode is not None: + from . import rich_utils + + rich_utils.rich_abort_error() + else: + click.echo(_("Aborted!"), file=sys.stderr) + # Typer override end + sys.exit(1) + + +class TyperArgument(click.core.Argument): + def __init__( + self, + *, + # Parameter + param_decls: list[str], + type: Any | None = None, + required: bool | None = None, + default: Any | None = None, + callback: Callable[..., Any] | None = None, + nargs: int | None = None, + metavar: str | None = None, + expose_value: bool = True, + is_eager: bool = False, + envvar: str | list[str] | None = None, + # Note that shell_complete is not fully supported and will be removed in future versions + # TODO: Remove shell_complete in a future version (after 0.16.0) + shell_complete: Callable[ + [click.Context, click.Parameter, str], + list["click.shell_completion.CompletionItem"] | list[str], + ] + | None = None, + autocompletion: Callable[..., Any] | None = None, + # TyperArgument + show_default: bool | str = True, + show_choices: bool = True, + show_envvar: bool = True, + help: str | None = None, + hidden: bool = False, + # Rich settings + rich_help_panel: str | None = None, + ): + self.help = help + self.show_default = show_default + self.show_choices = show_choices + self.show_envvar = show_envvar + self.hidden = hidden + self.rich_help_panel = rich_help_panel + + super().__init__( + param_decls=param_decls, + type=type, + required=required, + default=default, + callback=callback, + nargs=nargs, + metavar=metavar, + expose_value=expose_value, + is_eager=is_eager, + envvar=envvar, + shell_complete=shell_complete, + ) + _typer_param_setup_autocompletion_compat(self, autocompletion=autocompletion) + + def _get_default_string( + self, + *, + ctx: click.Context, + show_default_is_str: bool, + default_value: list[Any] | tuple[Any, ...] | str | Callable[..., Any] | Any, + ) -> str: + return _get_default_string( + self, + ctx=ctx, + show_default_is_str=show_default_is_str, + default_value=default_value, + ) + + def _extract_default_help_str( + self, *, ctx: click.Context + ) -> Any | Callable[[], Any] | None: + return _extract_default_help_str(self, ctx=ctx) + + def get_help_record(self, ctx: click.Context) -> tuple[str, str] | None: + # Modified version of click.core.Option.get_help_record() + # to support Arguments + if self.hidden: + return None + name = self.make_metavar(ctx=ctx) + help = self.help or "" + extra = [] + if self.show_envvar: + envvar = self.envvar + # allow_from_autoenv is currently not supported in Typer for CLI Arguments + if envvar is not None: + var_str = ( + ", ".join(str(d) for d in envvar) + if isinstance(envvar, (list, tuple)) + else envvar + ) + extra.append(f"env var: {var_str}") + + # Typer override: + # Extracted to _extract_default_help_str() to allow re-using it in rich_utils + default_value = self._extract_default_help_str(ctx=ctx) + # Typer override end + + show_default_is_str = isinstance(self.show_default, str) + + if show_default_is_str or ( + default_value is not None and (self.show_default or ctx.show_default) + ): + # Typer override: + # Extracted to _get_default_string() to allow re-using it in rich_utils + default_string = self._get_default_string( + ctx=ctx, + show_default_is_str=show_default_is_str, + default_value=default_value, + ) + # Typer override end + if default_string: + extra.append(_("default: {default}").format(default=default_string)) + if self.required: + extra.append(_("required")) + if extra: + extra_str = "; ".join(extra) + extra_str = f"[{extra_str}]" + rich_markup_mode = None + if hasattr(ctx, "obj") and isinstance(ctx.obj, dict): + rich_markup_mode = ctx.obj.get(MARKUP_MODE_KEY, None) + if HAS_RICH and rich_markup_mode == "rich": + # This is needed for when we want to export to HTML + from . import rich_utils + + extra_str = rich_utils.escape_before_html_export(extra_str) + + help = f"{help} {extra_str}" if help else f"{extra_str}" + return name, help + + def make_metavar(self, ctx: click.Context | None = None) -> str: + # Modified version of click.core.Argument.make_metavar() + # to include Argument name + if self.metavar is not None: + var = self.metavar + if not self.required and not var.startswith("["): + var = f"[{var}]" + return var + var = (self.name or "").upper() + if not self.required: + var = f"[{var}]" + type_var = self.type.get_metavar(self, ctx=ctx) # type: ignore[arg-type] + # type_var = self.type.get_metavar(self, ctx=ctx) + if type_var: + var += f":{type_var}" + if self.nargs != 1: + var += "..." + return var + + def value_is_missing(self, value: Any) -> bool: + return _value_is_missing(self, value) + + +class TyperOption(click.core.Option): + def __init__( + self, + *, + # Parameter + param_decls: list[str], + type: click.types.ParamType | Any | None = None, + required: bool | None = None, + default: Any | None = None, + callback: Callable[..., Any] | None = None, + nargs: int | None = None, + metavar: str | None = None, + expose_value: bool = True, + is_eager: bool = False, + envvar: str | list[str] | None = None, + # Note that shell_complete is not fully supported and will be removed in future versions + # TODO: Remove shell_complete in a future version (after 0.16.0) + shell_complete: Callable[ + [click.Context, click.Parameter, str], + list["click.shell_completion.CompletionItem"] | list[str], + ] + | None = None, + autocompletion: Callable[..., Any] | None = None, + # Option + show_default: bool | str = False, + prompt: bool | str = False, + confirmation_prompt: bool | str = False, + prompt_required: bool = True, + hide_input: bool = False, + is_flag: bool | None = None, + multiple: bool = False, + count: bool = False, + allow_from_autoenv: bool = True, + help: str | None = None, + hidden: bool = False, + show_choices: bool = True, + show_envvar: bool = False, + # Rich settings + rich_help_panel: str | None = None, + ): + super().__init__( + param_decls=param_decls, + type=type, + required=required, + default=default, + callback=callback, + nargs=nargs, + metavar=metavar, + expose_value=expose_value, + is_eager=is_eager, + envvar=envvar, + show_default=show_default, + prompt=prompt, + confirmation_prompt=confirmation_prompt, + hide_input=hide_input, + is_flag=is_flag, + multiple=multiple, + count=count, + allow_from_autoenv=allow_from_autoenv, + help=help, + hidden=hidden, + show_choices=show_choices, + show_envvar=show_envvar, + prompt_required=prompt_required, + shell_complete=shell_complete, + ) + _typer_param_setup_autocompletion_compat(self, autocompletion=autocompletion) + self.rich_help_panel = rich_help_panel + + def _get_default_string( + self, + *, + ctx: click.Context, + show_default_is_str: bool, + default_value: list[Any] | tuple[Any, ...] | str | Callable[..., Any] | Any, + ) -> str: + return _get_default_string( + self, + ctx=ctx, + show_default_is_str=show_default_is_str, + default_value=default_value, + ) + + def _extract_default_help_str( + self, *, ctx: click.Context + ) -> Any | Callable[[], Any] | None: + return _extract_default_help_str(self, ctx=ctx) + + def make_metavar(self, ctx: click.Context | None = None) -> str: + return super().make_metavar(ctx=ctx) # type: ignore[arg-type] + + def get_help_record(self, ctx: click.Context) -> tuple[str, str] | None: + # Duplicate all of Click's logic only to modify a single line, to allow boolean + # flags with only names for False values as it's currently supported by Typer + # Ref: https://typer.tiangolo.com/tutorial/parameter-types/bool/#only-names-for-false + if self.hidden: + return None + + any_prefix_is_slash = False + + def _write_opts(opts: Sequence[str]) -> str: + nonlocal any_prefix_is_slash + + rv, any_slashes = click.formatting.join_options(opts) + + if any_slashes: + any_prefix_is_slash = True + + if not self.is_flag and not self.count: + rv += f" {self.make_metavar(ctx=ctx)}" + + return rv + + rv = [_write_opts(self.opts)] + + if self.secondary_opts: + rv.append(_write_opts(self.secondary_opts)) + + help = self.help or "" + extra = [] + + if self.show_envvar: + envvar = self.envvar + + if envvar is None: + if ( + self.allow_from_autoenv + and ctx.auto_envvar_prefix is not None + and self.name is not None + ): + envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" + + if envvar is not None: + var_str = ( + envvar + if isinstance(envvar, str) + else ", ".join(str(d) for d in envvar) + ) + extra.append(_("env var: {var}").format(var=var_str)) + + # Typer override: + # Extracted to _extract_default() to allow re-using it in rich_utils + default_value = self._extract_default_help_str(ctx=ctx) + # Typer override end + + show_default_is_str = isinstance(self.show_default, str) + + if show_default_is_str or ( + default_value is not None and (self.show_default or ctx.show_default) + ): + # Typer override: + # Extracted to _get_default_string() to allow re-using it in rich_utils + default_string = self._get_default_string( + ctx=ctx, + show_default_is_str=show_default_is_str, + default_value=default_value, + ) + # Typer override end + if default_string: + extra.append(_("default: {default}").format(default=default_string)) + + if isinstance(self.type, click.types._NumberRangeBase): + range_str = self.type._describe_range() + + if range_str: + extra.append(range_str) + + if self.required: + extra.append(_("required")) + + if extra: + extra_str = "; ".join(extra) + extra_str = f"[{extra_str}]" + rich_markup_mode = None + if hasattr(ctx, "obj") and isinstance(ctx.obj, dict): + rich_markup_mode = ctx.obj.get(MARKUP_MODE_KEY, None) + if HAS_RICH and rich_markup_mode == "rich": + # This is needed for when we want to export to HTML + from . import rich_utils + + extra_str = rich_utils.escape_before_html_export(extra_str) + + help = f"{help} {extra_str}" if help else f"{extra_str}" + + return ("; " if any_prefix_is_slash else " / ").join(rv), help + + def value_is_missing(self, value: Any) -> bool: + return _value_is_missing(self, value) + + +def _value_is_missing(param: click.Parameter, value: Any) -> bool: + if value is None: + return True + + # Click 8.3 and beyond + # if value is UNSET: + # return True + + if (param.nargs != 1 or param.multiple) and value == (): + return True # pragma: no cover + + return False + + +def _typer_format_options( + self: click.core.Command, *, ctx: click.Context, formatter: click.HelpFormatter +) -> None: + args = [] + opts = [] + for param in self.get_params(ctx): + rv = param.get_help_record(ctx) + if rv is not None: + if param.param_type_name == "argument": + args.append(rv) + elif param.param_type_name == "option": + opts.append(rv) + + if args: + with formatter.section(_("Arguments")): + formatter.write_dl(args) + if opts: + with formatter.section(_("Options")): + formatter.write_dl(opts) + + +def _typer_main_shell_completion( + self: click.core.Command, + *, + ctx_args: MutableMapping[str, Any], + prog_name: str, + complete_var: str | None = None, +) -> None: + if complete_var is None: + complete_var = f"_{prog_name}_COMPLETE".replace("-", "_").upper() + + instruction = os.environ.get(complete_var) + + if not instruction: + return + + from .completion import shell_complete + + rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction) + sys.exit(rv) + + +class TyperCommand(click.core.Command): + def __init__( + self, + name: str | None, + *, + context_settings: dict[str, Any] | None = None, + callback: Callable[..., Any] | None = None, + params: list[click.Parameter] | None = None, + help: str | None = None, + epilog: str | None = None, + short_help: str | None = None, + options_metavar: str | None = "[OPTIONS]", + add_help_option: bool = True, + no_args_is_help: bool = False, + hidden: bool = False, + deprecated: bool = False, + # Rich settings + rich_markup_mode: MarkupMode = DEFAULT_MARKUP_MODE, + rich_help_panel: str | None = None, + ) -> None: + super().__init__( + name=name, + context_settings=context_settings, + callback=callback, + params=params, + help=help, + epilog=epilog, + short_help=short_help, + options_metavar=options_metavar, + add_help_option=add_help_option, + no_args_is_help=no_args_is_help, + hidden=hidden, + deprecated=deprecated, + ) + self.rich_markup_mode: MarkupMode = rich_markup_mode + self.rich_help_panel = rich_help_panel + + def format_options( + self, ctx: click.Context, formatter: click.HelpFormatter + ) -> None: + _typer_format_options(self, ctx=ctx, formatter=formatter) + + def _main_shell_completion( + self, + ctx_args: MutableMapping[str, Any], + prog_name: str, + complete_var: str | None = None, + ) -> None: + _typer_main_shell_completion( + self, ctx_args=ctx_args, prog_name=prog_name, complete_var=complete_var + ) + + def main( + self, + args: Sequence[str] | None = None, + prog_name: str | None = None, + complete_var: str | None = None, + standalone_mode: bool = True, + windows_expand_args: bool = True, + **extra: Any, + ) -> Any: + return _main( + self, + args=args, + prog_name=prog_name, + complete_var=complete_var, + standalone_mode=standalone_mode, + windows_expand_args=windows_expand_args, + rich_markup_mode=self.rich_markup_mode, + **extra, + ) + + def format_help(self, ctx: click.Context, formatter: click.HelpFormatter) -> None: + if not HAS_RICH or self.rich_markup_mode is None: + if not hasattr(ctx, "obj") or ctx.obj is None: + ctx.ensure_object(dict) + if isinstance(ctx.obj, dict): + ctx.obj[MARKUP_MODE_KEY] = self.rich_markup_mode + return super().format_help(ctx, formatter) + from . import rich_utils + + return rich_utils.rich_format_help( + obj=self, + ctx=ctx, + markup_mode=self.rich_markup_mode, + ) + + +class TyperGroup(click.core.Group): + def __init__( + self, + *, + name: str | None = None, + commands: dict[str, click.Command] | Sequence[click.Command] | None = None, + # Rich settings + rich_markup_mode: MarkupMode = DEFAULT_MARKUP_MODE, + rich_help_panel: str | None = None, + suggest_commands: bool = True, + **attrs: Any, + ) -> None: + super().__init__(name=name, commands=commands, **attrs) + self.rich_markup_mode: MarkupMode = rich_markup_mode + self.rich_help_panel = rich_help_panel + self.suggest_commands = suggest_commands + + def format_options( + self, ctx: click.Context, formatter: click.HelpFormatter + ) -> None: + _typer_format_options(self, ctx=ctx, formatter=formatter) + self.format_commands(ctx, formatter) + + def _main_shell_completion( + self, + ctx_args: MutableMapping[str, Any], + prog_name: str, + complete_var: str | None = None, + ) -> None: + _typer_main_shell_completion( + self, ctx_args=ctx_args, prog_name=prog_name, complete_var=complete_var + ) + + def resolve_command( + self, ctx: click.Context, args: list[str] + ) -> tuple[str | None, click.Command | None, list[str]]: + try: + return super().resolve_command(ctx, args) + except click.UsageError as e: + if self.suggest_commands: + available_commands = list(self.commands.keys()) + if available_commands and args: + typo = args[0] + matches = get_close_matches(typo, available_commands) + if matches: + suggestions = ", ".join(f"{m!r}" for m in matches) + message = e.message.rstrip(".") + e.message = f"{message}. Did you mean {suggestions}?" + raise + + def main( + self, + args: Sequence[str] | None = None, + prog_name: str | None = None, + complete_var: str | None = None, + standalone_mode: bool = True, + windows_expand_args: bool = True, + **extra: Any, + ) -> Any: + return _main( + self, + args=args, + prog_name=prog_name, + complete_var=complete_var, + standalone_mode=standalone_mode, + windows_expand_args=windows_expand_args, + rich_markup_mode=self.rich_markup_mode, + **extra, + ) + + def format_help(self, ctx: click.Context, formatter: click.HelpFormatter) -> None: + if not HAS_RICH or self.rich_markup_mode is None: + return super().format_help(ctx, formatter) + from . import rich_utils + + return rich_utils.rich_format_help( + obj=self, + ctx=ctx, + markup_mode=self.rich_markup_mode, + ) + + def list_commands(self, ctx: click.Context) -> list[str]: + """Returns a list of subcommand names. + Note that in Click's Group class, these are sorted. + In Typer, we wish to maintain the original order of creation (cf Issue #933)""" + return [n for n, c in self.commands.items()] diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/main.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/main.py new file mode 100644 index 0000000000000000000000000000000000000000..f4f21bb8444a80af5ad8ab82822fc972f0207b9c --- /dev/null +++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/main.py @@ -0,0 +1,2013 @@ +import inspect +import os +import platform +import shutil +import subprocess +import sys +import traceback +from collections.abc import Callable, Sequence +from datetime import datetime +from enum import Enum +from functools import update_wrapper +from pathlib import Path +from traceback import FrameSummary, StackSummary +from types import TracebackType +from typing import Annotated, Any +from uuid import UUID + +import click +from annotated_doc import Doc +from typer._types import TyperChoice + +from ._typing import get_args, get_origin, is_literal_type, is_union, literal_values +from .completion import get_completion_inspect_parameters +from .core import ( + DEFAULT_MARKUP_MODE, + HAS_RICH, + MarkupMode, + TyperArgument, + TyperCommand, + TyperGroup, + TyperOption, +) +from .models import ( + AnyType, + ArgumentInfo, + CommandFunctionType, + CommandInfo, + Default, + DefaultPlaceholder, + DeveloperExceptionConfig, + FileBinaryRead, + FileBinaryWrite, + FileText, + FileTextWrite, + NoneType, + OptionInfo, + ParameterInfo, + ParamMeta, + Required, + TyperInfo, + TyperPath, +) +from .utils import get_params_from_function + +_original_except_hook = sys.excepthook +_typer_developer_exception_attr_name = "__typer_developer_exception__" + + +def except_hook( + exc_type: type[BaseException], exc_value: BaseException, tb: TracebackType | None +) -> None: + exception_config: DeveloperExceptionConfig | None = getattr( + exc_value, _typer_developer_exception_attr_name, None + ) + standard_traceback = os.getenv( + "TYPER_STANDARD_TRACEBACK", os.getenv("_TYPER_STANDARD_TRACEBACK") + ) + if ( + standard_traceback + or not exception_config + or not exception_config.pretty_exceptions_enable + ): + _original_except_hook(exc_type, exc_value, tb) + return + typer_path = os.path.dirname(__file__) + click_path = os.path.dirname(click.__file__) + internal_dir_names = [typer_path, click_path] + exc = exc_value + if HAS_RICH: + from . import rich_utils + + rich_tb = rich_utils.get_traceback(exc, exception_config, internal_dir_names) + console_stderr = rich_utils._get_rich_console(stderr=True) + console_stderr.print(rich_tb) + return + tb_exc = traceback.TracebackException.from_exception(exc) + stack: list[FrameSummary] = [] + for frame in tb_exc.stack: + if any(frame.filename.startswith(path) for path in internal_dir_names): + if not exception_config.pretty_exceptions_short: + # Hide the line for internal libraries, Typer and Click + stack.append( + traceback.FrameSummary( + filename=frame.filename, + lineno=frame.lineno, + name=frame.name, + line="", + ) + ) + else: + stack.append(frame) + # Type ignore ref: https://github.com/python/typeshed/pull/8244 + final_stack_summary = StackSummary.from_list(stack) + tb_exc.stack = final_stack_summary + for line in tb_exc.format(): + print(line, file=sys.stderr) + return + + +def get_install_completion_arguments() -> tuple[click.Parameter, click.Parameter]: + install_param, show_param = get_completion_inspect_parameters() + click_install_param, _ = get_click_param(install_param) + click_show_param, _ = get_click_param(show_param) + return click_install_param, click_show_param + + +class Typer: + """ + `Typer` main class, the main entrypoint to use Typer. + + Read more in the + [Typer docs for First Steps](https://typer.tiangolo.com/tutorial/typer-app/). + + ## Example + + ```python + import typer + + app = typer.Typer() + ``` + """ + + def __init__( + self, + *, + name: Annotated[ + str | None, + Doc( + """ + The name of this application. + Mostly used to set the name for [subcommands](https://typer.tiangolo.com/tutorial/subcommands/), in which case it can be overridden by `add_typer(name=...)`. + + **Example** + + ```python + import typer + + app = typer.Typer(name="users") + ``` + """ + ), + ] = Default(None), + cls: Annotated[ + type[TyperGroup] | None, + Doc( + """ + The class of this app. Mainly used when [using the Click library underneath](https://typer.tiangolo.com/tutorial/using-click/). Can usually be left at the default value `None`. + Otherwise, should be a subtype of `TyperGroup`. + + **Example** + + ```python + import typer + + app = typer.Typer(cls=TyperGroup) + ``` + """ + ), + ] = Default(None), + invoke_without_command: Annotated[ + bool, + Doc( + """ + By setting this to `True`, you can make sure a callback is executed even when no subcommand is provided. + + **Example** + + ```python + import typer + + app = typer.Typer(invoke_without_command=True) + ``` + """ + ), + ] = Default(False), + no_args_is_help: Annotated[ + bool, + Doc( + """ + If this is set to `True`, running a command without any arguments will automatically show the help page. + + **Example** + + ```python + import typer + + app = typer.Typer(no_args_is_help=True) + ``` + """ + ), + ] = Default(False), + subcommand_metavar: Annotated[ + str | None, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited + from Click and supported for compatibility. + + --- + + How to represent the subcommand argument in help. + """ + ), + ] = Default(None), + chain: Annotated[ + bool, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited + from Click and supported for compatibility. + + --- + + Allow passing more than one subcommand argument. + """ + ), + ] = Default(False), + result_callback: Annotated[ + Callable[..., Any] | None, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited + from Click and supported for compatibility. + + --- + + A function to call after the group's and subcommand's callbacks. + """ + ), + ] = Default(None), + # Command + context_settings: Annotated[ + dict[Any, Any] | None, + Doc( + """ + Pass configurations for the [context](https://typer.tiangolo.com/tutorial/commands/context/). + Available configurations can be found in the docs for Click's `Context` [here](https://click.palletsprojects.com/en/stable/api/#context). + + **Example** + + ```python + import typer + + app = typer.Typer(context_settings={"help_option_names": ["-h", "--help"]}) + ``` + """ + ), + ] = Default(None), + callback: Annotated[ + Callable[..., Any] | None, + Doc( + """ + Add a callback to the main Typer app. Can be overridden with `@app.callback()`. + See [the tutorial about callbacks](https://typer.tiangolo.com/tutorial/commands/callback/) for more details. + + **Example** + + ```python + import typer + + def callback(): + print("Running a command") + + app = typer.Typer(callback=callback) + ``` + """ + ), + ] = Default(None), + help: Annotated[ + str | None, + Doc( + """ + Help text for the main Typer app. + See [the tutorial about name and help](https://typer.tiangolo.com/tutorial/subcommands/name-and-help) for different ways of setting a command's help, + and which one takes priority. + + **Example** + + ```python + import typer + + app = typer.Typer(help="Some help.") + ``` + """ + ), + ] = Default(None), + epilog: Annotated[ + str | None, + Doc( + """ + Text that will be printed right after the help text. + + **Example** + + ```python + import typer + + app = typer.Typer(epilog="May the force be with you") + ``` + """ + ), + ] = Default(None), + short_help: Annotated[ + str | None, + Doc( + """ + A shortened version of the help text that can be used e.g. in the help table listing subcommands. + When not defined, the normal `help` text will be used instead. + + **Example** + + ```python + import typer + + app = typer.Typer(help="A lot of explanation about user management", short_help="user management") + ``` + """ + ), + ] = Default(None), + options_metavar: Annotated[ + str, + Doc( + """ + In the example usage string of the help text for a command, the default placeholder for various arguments is `[OPTIONS]`. + Set `options_metavar` to change this into a different string. + + **Example** + + ```python + import typer + + app = typer.Typer(options_metavar="[OPTS]") + ``` + """ + ), + ] = Default("[OPTIONS]"), + add_help_option: Annotated[ + bool, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited + from Click and supported for compatibility. + + --- + + By default each command registers a `--help` option. This can be disabled by this parameter. + """ + ), + ] = Default(True), + hidden: Annotated[ + bool, + Doc( + """ + Hide this command from help outputs. `False` by default. + + **Example** + + ```python + import typer + + app = typer.Typer(hidden=True) + ``` + """ + ), + ] = Default(False), + deprecated: Annotated[ + bool, + Doc( + """ + Mark this command as being deprecated in the help text. `False` by default. + + **Example** + + ```python + import typer + + app = typer.Typer(deprecated=True) + ``` + """ + ), + ] = Default(False), + add_completion: Annotated[ + bool, + Doc( + """ + Toggle whether or not to add the `--install-completion` and `--show-completion` options to the app. + Set to `True` by default. + + **Example** + + ```python + import typer + + app = typer.Typer(add_completion=False) + ``` + """ + ), + ] = True, + # Rich settings + rich_markup_mode: Annotated[ + MarkupMode, + Doc( + """ + Enable markup text if you have Rich installed. This can be set to `"markdown"`, `"rich"`, or `None`. + By default, `rich_markup_mode` is `None` if Rich is not installed, and `"rich"` if it is installed. + See [the tutorial on help formatting](https://typer.tiangolo.com/tutorial/commands/help/#rich-markdown-and-markup) for more information. + + **Example** + + ```python + import typer + + app = typer.Typer(rich_markup_mode="rich") + ``` + """ + ), + ] = DEFAULT_MARKUP_MODE, + rich_help_panel: Annotated[ + str | None, + Doc( + """ + Set the panel name of the command when the help is printed with Rich. + + **Example** + + ```python + import typer + + app = typer.Typer(rich_help_panel="Utils and Configs") + ``` + """ + ), + ] = Default(None), + suggest_commands: Annotated[ + bool, + Doc( + """ + As of version 0.20.0, Typer provides [support for mistyped command names](https://typer.tiangolo.com/tutorial/commands/help/#suggest-commands) by printing helpful suggestions. + You can turn this setting off with `suggest_commands`: + + **Example** + + ```python + import typer + + app = typer.Typer(suggest_commands=False) + ``` + """ + ), + ] = True, + pretty_exceptions_enable: Annotated[ + bool, + Doc( + """ + If you want to disable [pretty exceptions with Rich](https://typer.tiangolo.com/tutorial/exceptions/#exceptions-with-rich), + you can set `pretty_exceptions_enable` to `False`. When doing so, you will see the usual standard exception trace. + + **Example** + + ```python + import typer + + app = typer.Typer(pretty_exceptions_enable=False) + ``` + """ + ), + ] = True, + pretty_exceptions_show_locals: Annotated[ + bool, + Doc( + """ + If Rich is installed, [error messages](https://typer.tiangolo.com/tutorial/exceptions/#exceptions-and-errors) + will be nicely printed. + + If you set `pretty_exceptions_show_locals=True` it will also include the values of local variables for easy debugging. + + However, if such a variable contains delicate information, you should consider leaving `pretty_exceptions_show_locals=False` + (the default) to `False` to enhance security. + + **Example** + + ```python + import typer + + app = typer.Typer(pretty_exceptions_show_locals=True) + ``` + """ + ), + ] = False, + pretty_exceptions_short: Annotated[ + bool, + Doc( + """ + By default, [pretty exceptions formatted with Rich](https://typer.tiangolo.com/tutorial/exceptions/#exceptions-with-rich) hide the long stack trace. + If you want to show the full trace instead, you can set the parameter `pretty_exceptions_short` to `False`: + + **Example** + + ```python + import typer + + app = typer.Typer(pretty_exceptions_short=False) + ``` + """ + ), + ] = True, + ): + self._add_completion = add_completion + self.rich_markup_mode: MarkupMode = rich_markup_mode + self.rich_help_panel = rich_help_panel + self.suggest_commands = suggest_commands + self.pretty_exceptions_enable = pretty_exceptions_enable + self.pretty_exceptions_show_locals = pretty_exceptions_show_locals + self.pretty_exceptions_short = pretty_exceptions_short + self.info = TyperInfo( + name=name, + cls=cls, + invoke_without_command=invoke_without_command, + no_args_is_help=no_args_is_help, + subcommand_metavar=subcommand_metavar, + chain=chain, + result_callback=result_callback, + context_settings=context_settings, + callback=callback, + help=help, + epilog=epilog, + short_help=short_help, + options_metavar=options_metavar, + add_help_option=add_help_option, + hidden=hidden, + deprecated=deprecated, + ) + self.registered_groups: list[TyperInfo] = [] + self.registered_commands: list[CommandInfo] = [] + self.registered_callback: TyperInfo | None = None + + def callback( + self, + *, + cls: Annotated[ + type[TyperGroup] | None, + Doc( + """ + The class of this app. Mainly used when [using the Click library underneath](https://typer.tiangolo.com/tutorial/using-click/). Can usually be left at the default value `None`. + Otherwise, should be a subtype of `TyperGroup`. + """ + ), + ] = Default(None), + invoke_without_command: Annotated[ + bool, + Doc( + """ + By setting this to `True`, you can make sure a callback is executed even when no subcommand is provided. + """ + ), + ] = Default(False), + no_args_is_help: Annotated[ + bool, + Doc( + """ + If this is set to `True`, running a command without any arguments will automatically show the help page. + """ + ), + ] = Default(False), + subcommand_metavar: Annotated[ + str | None, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited + from Click and supported for compatibility. + + --- + + How to represent the subcommand argument in help. + """ + ), + ] = Default(None), + chain: Annotated[ + bool, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited + from Click and supported for compatibility. + + --- + + Allow passing more than one subcommand argument. + """ + ), + ] = Default(False), + result_callback: Annotated[ + Callable[..., Any] | None, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited + from Click and supported for compatibility. + + --- + + A function to call after the group's and subcommand's callbacks. + """ + ), + ] = Default(None), + # Command + context_settings: Annotated[ + dict[Any, Any] | None, + Doc( + """ + Pass configurations for the [context](https://typer.tiangolo.com/tutorial/commands/context/). + Available configurations can be found in the docs for Click's `Context` [here](https://click.palletsprojects.com/en/stable/api/#context). + """ + ), + ] = Default(None), + help: Annotated[ + str | None, + Doc( + """ + Help text for the command. + See [the tutorial about name and help](https://typer.tiangolo.com/tutorial/subcommands/name-and-help) for different ways of setting a command's help, + and which one takes priority. + """ + ), + ] = Default(None), + epilog: Annotated[ + str | None, + Doc( + """ + Text that will be printed right after the help text. + """ + ), + ] = Default(None), + short_help: Annotated[ + str | None, + Doc( + """ + A shortened version of the help text that can be used e.g. in the help table listing subcommands. + When not defined, the normal `help` text will be used instead. + """ + ), + ] = Default(None), + options_metavar: Annotated[ + str | None, + Doc( + """ + In the example usage string of the help text for a command, the default placeholder for various arguments is `[OPTIONS]`. + Set `options_metavar` to change this into a different string. When `None`, the default value will be used. + """ + ), + ] = Default(None), + add_help_option: Annotated[ + bool, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited + from Click and supported for compatibility. + + --- + + By default each command registers a `--help` option. This can be disabled by this parameter. + """ + ), + ] = Default(True), + hidden: Annotated[ + bool, + Doc( + """ + Hide this command from help outputs. `False` by default. + """ + ), + ] = Default(False), + deprecated: Annotated[ + bool, + Doc( + """ + Mark this command as deprecated in the help text. `False` by default. + """ + ), + ] = Default(False), + # Rich settings + rich_help_panel: Annotated[ + str | None, + Doc( + """ + Set the panel name of the command when the help is printed with Rich. + """ + ), + ] = Default(None), + ) -> Callable[[CommandFunctionType], CommandFunctionType]: + """ + Using the decorator `@app.callback`, you can declare the CLI parameters for the main CLI application. + + Read more in the + [Typer docs for Callbacks](https://typer.tiangolo.com/tutorial/commands/callback/). + + ## Example + + ```python + import typer + + app = typer.Typer() + state = {"verbose": False} + + @app.callback() + def main(verbose: bool = False): + if verbose: + print("Will write verbose output") + state["verbose"] = True + + @app.command() + def delete(username: str): + # define subcommand + ... + ``` + """ + + def decorator(f: CommandFunctionType) -> CommandFunctionType: + self.registered_callback = TyperInfo( + cls=cls, + invoke_without_command=invoke_without_command, + no_args_is_help=no_args_is_help, + subcommand_metavar=subcommand_metavar, + chain=chain, + result_callback=result_callback, + context_settings=context_settings, + callback=f, + help=help, + epilog=epilog, + short_help=short_help, + options_metavar=( + options_metavar or self._info_val_str("options_metavar") + ), + add_help_option=add_help_option, + hidden=hidden, + deprecated=deprecated, + rich_help_panel=rich_help_panel, + ) + return f + + return decorator + + def command( + self, + name: Annotated[ + str | None, + Doc( + """ + The name of this command. + """ + ), + ] = None, + *, + cls: Annotated[ + type[TyperCommand] | None, + Doc( + """ + The class of this command. Mainly used when [using the Click library underneath](https://typer.tiangolo.com/tutorial/using-click/). Can usually be left at the default value `None`. + Otherwise, should be a subtype of `TyperCommand`. + """ + ), + ] = None, + context_settings: Annotated[ + dict[Any, Any] | None, + Doc( + """ + Pass configurations for the [context](https://typer.tiangolo.com/tutorial/commands/context/). + Available configurations can be found in the docs for Click's `Context` [here](https://click.palletsprojects.com/en/stable/api/#context). + """ + ), + ] = None, + help: Annotated[ + str | None, + Doc( + """ + Help text for the command. + See [the tutorial about name and help](https://typer.tiangolo.com/tutorial/subcommands/name-and-help) for different ways of setting a command's help, + and which one takes priority. + """ + ), + ] = None, + epilog: Annotated[ + str | None, + Doc( + """ + Text that will be printed right after the help text. + """ + ), + ] = None, + short_help: Annotated[ + str | None, + Doc( + """ + A shortened version of the help text that can be used e.g. in the help table listing subcommands. + When not defined, the normal `help` text will be used instead. + """ + ), + ] = None, + options_metavar: Annotated[ + str | None, + Doc( + """ + In the example usage string of the help text for a command, the default placeholder for various arguments is `[OPTIONS]`. + Set `options_metavar` to change this into a different string. When `None`, the default value will be used. + """ + ), + ] = Default(None), + add_help_option: Annotated[ + bool, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited + from Click and supported for compatibility. + + --- + + By default each command registers a `--help` option. This can be disabled by this parameter. + """ + ), + ] = True, + no_args_is_help: Annotated[ + bool, + Doc( + """ + If this is set to `True`, running a command without any arguments will automatically show the help page. + """ + ), + ] = False, + hidden: Annotated[ + bool, + Doc( + """ + Hide this command from help outputs. `False` by default. + """ + ), + ] = False, + deprecated: Annotated[ + bool, + Doc( + """ + Mark this command as deprecated in the help outputs. `False` by default. + """ + ), + ] = False, + # Rich settings + rich_help_panel: Annotated[ + str | None, + Doc( + """ + Set the panel name of the command when the help is printed with Rich. + """ + ), + ] = Default(None), + ) -> Callable[[CommandFunctionType], CommandFunctionType]: + """ + Using the decorator `@app.command`, you can define a subcommand of the previously defined Typer app. + + Read more in the + [Typer docs for Commands](https://typer.tiangolo.com/tutorial/commands/). + + ## Example + + ```python + import typer + + app = typer.Typer() + + @app.command() + def create(): + print("Creating user: Hiro Hamada") + + @app.command() + def delete(): + print("Deleting user: Hiro Hamada") + ``` + """ + if cls is None: + cls = TyperCommand + + def decorator(f: CommandFunctionType) -> CommandFunctionType: + self.registered_commands.append( + CommandInfo( + name=name, + cls=cls, + context_settings=context_settings, + callback=f, + help=help, + epilog=epilog, + short_help=short_help, + options_metavar=( + options_metavar or self._info_val_str("options_metavar") + ), + add_help_option=add_help_option, + no_args_is_help=no_args_is_help, + hidden=hidden, + deprecated=deprecated, + # Rich settings + rich_help_panel=rich_help_panel, + ) + ) + return f + + return decorator + + def add_typer( + self, + typer_instance: "Typer", + *, + name: Annotated[ + str | None, + Doc( + """ + The name of this subcommand. + See [the tutorial about name and help](https://typer.tiangolo.com/tutorial/subcommands/name-and-help) for different ways of setting a command's name, + and which one takes priority. + """ + ), + ] = Default(None), + cls: Annotated[ + type[TyperGroup] | None, + Doc( + """ + The class of this subcommand. Mainly used when [using the Click library underneath](https://typer.tiangolo.com/tutorial/using-click/). Can usually be left at the default value `None`. + Otherwise, should be a subtype of `TyperGroup`. + """ + ), + ] = Default(None), + invoke_without_command: Annotated[ + bool, + Doc( + """ + By setting this to `True`, you can make sure a callback is executed even when no subcommand is provided. + """ + ), + ] = Default(False), + no_args_is_help: Annotated[ + bool, + Doc( + """ + If this is set to `True`, running a command without any arguments will automatically show the help page. + """ + ), + ] = Default(False), + subcommand_metavar: Annotated[ + str | None, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited + from Click and supported for compatibility. + + --- + + How to represent the subcommand argument in help. + """ + ), + ] = Default(None), + chain: Annotated[ + bool, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited + from Click and supported for compatibility. + + --- + + Allow passing more than one subcommand argument. + """ + ), + ] = Default(False), + result_callback: Annotated[ + Callable[..., Any] | None, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited + from Click and supported for compatibility. + + --- + + A function to call after the group's and subcommand's callbacks. + """ + ), + ] = Default(None), + # Command + context_settings: Annotated[ + dict[Any, Any] | None, + Doc( + """ + Pass configurations for the [context](https://typer.tiangolo.com/tutorial/commands/context/). + Available configurations can be found in the docs for Click's `Context` [here](https://click.palletsprojects.com/en/stable/api/#context). + """ + ), + ] = Default(None), + callback: Annotated[ + Callable[..., Any] | None, + Doc( + """ + Add a callback to this app. + See [the tutorial about callbacks](https://typer.tiangolo.com/tutorial/commands/callback/) for more details. + """ + ), + ] = Default(None), + help: Annotated[ + str | None, + Doc( + """ + Help text for the subcommand. + See [the tutorial about name and help](https://typer.tiangolo.com/tutorial/subcommands/name-and-help) for different ways of setting a command's help, + and which one takes priority. + """ + ), + ] = Default(None), + epilog: Annotated[ + str | None, + Doc( + """ + Text that will be printed right after the help text. + """ + ), + ] = Default(None), + short_help: Annotated[ + str | None, + Doc( + """ + A shortened version of the help text that can be used e.g. in the help table listing subcommands. + When not defined, the normal `help` text will be used instead. + """ + ), + ] = Default(None), + options_metavar: Annotated[ + str | None, + Doc( + """ + In the example usage string of the help text for a command, the default placeholder for various arguments is `[OPTIONS]`. + Set `options_metavar` to change this into a different string. When `None`, the default value will be used. + """ + ), + ] = Default(None), + add_help_option: Annotated[ + bool, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited + from Click and supported for compatibility. + + --- + + By default each command registers a `--help` option. This can be disabled by this parameter. + """ + ), + ] = Default(True), + hidden: Annotated[ + bool, + Doc( + """ + Hide this command from help outputs. `False` by default. + """ + ), + ] = Default(False), + deprecated: Annotated[ + bool, + Doc( + """ + Mark this command as deprecated in the help outputs. `False` by default. + """ + ), + ] = False, + # Rich settings + rich_help_panel: Annotated[ + str | None, + Doc( + """ + Set the panel name of the command when the help is printed with Rich. + """ + ), + ] = Default(None), + ) -> None: + """ + Add subcommands to the main app using `app.add_typer()`. + Subcommands may be defined in separate modules, ensuring clean separation of code by functionality. + + Read more in the + [Typer docs for SubCommands](https://typer.tiangolo.com/tutorial/subcommands/add-typer/). + + ## Example + + ```python + import typer + + from .add import app as add_app + from .delete import app as delete_app + + app = typer.Typer() + + app.add_typer(add_app) + app.add_typer(delete_app) + ``` + """ + self.registered_groups.append( + TyperInfo( + typer_instance, + name=name, + cls=cls, + invoke_without_command=invoke_without_command, + no_args_is_help=no_args_is_help, + subcommand_metavar=subcommand_metavar, + chain=chain, + result_callback=result_callback, + context_settings=context_settings, + callback=callback, + help=help, + epilog=epilog, + short_help=short_help, + options_metavar=( + options_metavar or self._info_val_str("options_metavar") + ), + add_help_option=add_help_option, + hidden=hidden, + deprecated=deprecated, + rich_help_panel=rich_help_panel, + ) + ) + + def __call__(self, *args: Any, **kwargs: Any) -> Any: + if sys.excepthook != except_hook: + sys.excepthook = except_hook + try: + return get_command(self)(*args, **kwargs) + except Exception as e: + # Set a custom attribute to tell the hook to show nice exceptions for user + # code. An alternative/first implementation was a custom exception with + # raise custom_exc from e + # but that means the last error shown is the custom exception, not the + # actual error. This trick improves developer experience by showing the + # actual error last. + setattr( + e, + _typer_developer_exception_attr_name, + DeveloperExceptionConfig( + pretty_exceptions_enable=self.pretty_exceptions_enable, + pretty_exceptions_show_locals=self.pretty_exceptions_show_locals, + pretty_exceptions_short=self.pretty_exceptions_short, + ), + ) + raise e + + def _info_val_str(self, name: str) -> str: + val = getattr(self.info, name) + val_str = val.value if isinstance(val, DefaultPlaceholder) else val + assert isinstance(val_str, str) + return val_str + + +def get_group(typer_instance: Typer) -> TyperGroup: + group = get_group_from_info( + TyperInfo(typer_instance), + pretty_exceptions_short=typer_instance.pretty_exceptions_short, + rich_markup_mode=typer_instance.rich_markup_mode, + suggest_commands=typer_instance.suggest_commands, + ) + return group + + +def get_command(typer_instance: Typer) -> click.Command: + if typer_instance._add_completion: + click_install_param, click_show_param = get_install_completion_arguments() + if ( + typer_instance.registered_callback + or typer_instance.info.callback + or typer_instance.registered_groups + or len(typer_instance.registered_commands) > 1 + ): + # Create a Group + click_command: click.Command = get_group(typer_instance) + if typer_instance._add_completion: + click_command.params.append(click_install_param) + click_command.params.append(click_show_param) + return click_command + elif len(typer_instance.registered_commands) == 1: + # Create a single Command + single_command = typer_instance.registered_commands[0] + + if not single_command.context_settings and not isinstance( + typer_instance.info.context_settings, DefaultPlaceholder + ): + single_command.context_settings = typer_instance.info.context_settings + + click_command = get_command_from_info( + single_command, + pretty_exceptions_short=typer_instance.pretty_exceptions_short, + rich_markup_mode=typer_instance.rich_markup_mode, + ) + if typer_instance._add_completion: + click_command.params.append(click_install_param) + click_command.params.append(click_show_param) + return click_command + raise RuntimeError( + "Could not get a command for this Typer instance" + ) # pragma: no cover + + +def solve_typer_info_help(typer_info: TyperInfo) -> str: + # Priority 1: Explicit value was set in app.add_typer() + if not isinstance(typer_info.help, DefaultPlaceholder): + return inspect.cleandoc(typer_info.help or "") + # Priority 2: Explicit value was set in sub_app.callback() + if typer_info.typer_instance and typer_info.typer_instance.registered_callback: + callback_help = typer_info.typer_instance.registered_callback.help + if not isinstance(callback_help, DefaultPlaceholder): + return inspect.cleandoc(callback_help or "") + # Priority 3: Explicit value was set in sub_app = typer.Typer() + if typer_info.typer_instance and typer_info.typer_instance.info: + instance_help = typer_info.typer_instance.info.help + if not isinstance(instance_help, DefaultPlaceholder): + return inspect.cleandoc(instance_help or "") + # Priority 4: Implicit inference from callback docstring in app.add_typer() + if typer_info.callback: + doc = inspect.getdoc(typer_info.callback) + if doc: + return doc + # Priority 5: Implicit inference from callback docstring in @app.callback() + if typer_info.typer_instance and typer_info.typer_instance.registered_callback: + callback = typer_info.typer_instance.registered_callback.callback + if not isinstance(callback, DefaultPlaceholder): + doc = inspect.getdoc(callback or "") + if doc: + return doc + # Priority 6: Implicit inference from callback docstring in typer.Typer() + if typer_info.typer_instance and typer_info.typer_instance.info: + instance_callback = typer_info.typer_instance.info.callback + if not isinstance(instance_callback, DefaultPlaceholder): + doc = inspect.getdoc(instance_callback) + if doc: + return doc + # Value not set, use the default + return typer_info.help.value + + +def solve_typer_info_defaults(typer_info: TyperInfo) -> TyperInfo: + values: dict[str, Any] = {} + for name, value in typer_info.__dict__.items(): + # Priority 1: Value was set in app.add_typer() + if not isinstance(value, DefaultPlaceholder): + values[name] = value + continue + # Priority 2: Value was set in @subapp.callback() + try: + callback_value = getattr( + typer_info.typer_instance.registered_callback, # type: ignore + name, + ) + if not isinstance(callback_value, DefaultPlaceholder): + values[name] = callback_value + continue + except AttributeError: + pass + # Priority 3: Value set in subapp = typer.Typer() + try: + instance_value = getattr( + typer_info.typer_instance.info, # type: ignore + name, + ) + if not isinstance(instance_value, DefaultPlaceholder): + values[name] = instance_value + continue + except AttributeError: + pass + # Value not set, use the default + values[name] = value.value + values["help"] = solve_typer_info_help(typer_info) + return TyperInfo(**values) + + +def get_group_from_info( + group_info: TyperInfo, + *, + pretty_exceptions_short: bool, + suggest_commands: bool, + rich_markup_mode: MarkupMode, +) -> TyperGroup: + assert group_info.typer_instance, ( + "A Typer instance is needed to generate a Click Group" + ) + commands: dict[str, click.Command] = {} + for command_info in group_info.typer_instance.registered_commands: + command = get_command_from_info( + command_info=command_info, + pretty_exceptions_short=pretty_exceptions_short, + rich_markup_mode=rich_markup_mode, + ) + if command.name: + commands[command.name] = command + for sub_group_info in group_info.typer_instance.registered_groups: + sub_group = get_group_from_info( + sub_group_info, + pretty_exceptions_short=pretty_exceptions_short, + rich_markup_mode=rich_markup_mode, + suggest_commands=suggest_commands, + ) + if sub_group.name: + commands[sub_group.name] = sub_group + else: + if sub_group.callback: + import warnings + + warnings.warn( + "The 'callback' parameter is not supported by Typer when using `add_typer` without a name", + stacklevel=5, + ) + for sub_command_name, sub_command in sub_group.commands.items(): + commands[sub_command_name] = sub_command + solved_info = solve_typer_info_defaults(group_info) + ( + params, + convertors, + context_param_name, + ) = get_params_convertors_ctx_param_name_from_function(solved_info.callback) + cls = solved_info.cls or TyperGroup + assert issubclass(cls, TyperGroup), f"{cls} should be a subclass of {TyperGroup}" + group = cls( + name=solved_info.name or "", + commands=commands, + invoke_without_command=solved_info.invoke_without_command, + no_args_is_help=solved_info.no_args_is_help, + subcommand_metavar=solved_info.subcommand_metavar, + chain=solved_info.chain, + result_callback=solved_info.result_callback, + context_settings=solved_info.context_settings, + callback=get_callback( + callback=solved_info.callback, + params=params, + convertors=convertors, + context_param_name=context_param_name, + pretty_exceptions_short=pretty_exceptions_short, + ), + params=params, + help=solved_info.help, + epilog=solved_info.epilog, + short_help=solved_info.short_help, + options_metavar=solved_info.options_metavar, + add_help_option=solved_info.add_help_option, + hidden=solved_info.hidden, + deprecated=solved_info.deprecated, + rich_markup_mode=rich_markup_mode, + # Rich settings + rich_help_panel=solved_info.rich_help_panel, + suggest_commands=suggest_commands, + ) + return group + + +def get_command_name(name: str) -> str: + return name.lower().replace("_", "-") + + +def get_params_convertors_ctx_param_name_from_function( + callback: Callable[..., Any] | None, +) -> tuple[list[click.Argument | click.Option], dict[str, Any], str | None]: + params = [] + convertors = {} + context_param_name = None + if callback: + parameters = get_params_from_function(callback) + for param_name, param in parameters.items(): + if lenient_issubclass(param.annotation, click.Context): + context_param_name = param_name + continue + click_param, convertor = get_click_param(param) + if convertor: + convertors[param_name] = convertor + params.append(click_param) + return params, convertors, context_param_name + + +def get_command_from_info( + command_info: CommandInfo, + *, + pretty_exceptions_short: bool, + rich_markup_mode: MarkupMode, +) -> click.Command: + assert command_info.callback, "A command must have a callback function" + name = command_info.name or get_command_name(command_info.callback.__name__) # ty:ignore[unresolved-attribute] + use_help = command_info.help + if use_help is None: + use_help = inspect.getdoc(command_info.callback) + else: + use_help = inspect.cleandoc(use_help) + ( + params, + convertors, + context_param_name, + ) = get_params_convertors_ctx_param_name_from_function(command_info.callback) + cls = command_info.cls or TyperCommand + command = cls( + name=name, + context_settings=command_info.context_settings, + callback=get_callback( + callback=command_info.callback, + params=params, + convertors=convertors, + context_param_name=context_param_name, + pretty_exceptions_short=pretty_exceptions_short, + ), + params=params, # type: ignore + help=use_help, + epilog=command_info.epilog, + short_help=command_info.short_help, + options_metavar=command_info.options_metavar, + add_help_option=command_info.add_help_option, + no_args_is_help=command_info.no_args_is_help, + hidden=command_info.hidden, + deprecated=command_info.deprecated, + rich_markup_mode=rich_markup_mode, + # Rich settings + rich_help_panel=command_info.rich_help_panel, + ) + return command + + +def determine_type_convertor(type_: Any) -> Callable[[Any], Any] | None: + convertor: Callable[[Any], Any] | None = None + if lenient_issubclass(type_, Path): + convertor = param_path_convertor + if lenient_issubclass(type_, Enum): + convertor = generate_enum_convertor(type_) + return convertor + + +def param_path_convertor(value: str | None = None) -> Path | None: + if value is not None: + # allow returning any subclass of Path created by an annotated parser without converting + # it back to a Path + return value if isinstance(value, Path) else Path(value) + return None + + +def generate_enum_convertor(enum: type[Enum]) -> Callable[[Any], Any]: + val_map = {str(val.value): val for val in enum} + + def convertor(value: Any) -> Any: + if value is not None: + val = str(value) + if val in val_map: + key = val_map[val] + return enum(key) + + return convertor + + +def generate_list_convertor( + convertor: Callable[[Any], Any] | None, default_value: Any | None +) -> Callable[[Sequence[Any] | None], list[Any] | None]: + def internal_convertor(value: Sequence[Any] | None) -> list[Any] | None: + if (value is None) or (default_value is None and len(value) == 0): + return None + return [convertor(v) if convertor else v for v in value] + + return internal_convertor + + +def generate_tuple_convertor( + types: Sequence[Any], +) -> Callable[[tuple[Any, ...] | None], tuple[Any, ...] | None]: + convertors = [determine_type_convertor(type_) for type_ in types] + + def internal_convertor( + param_args: tuple[Any, ...] | None, + ) -> tuple[Any, ...] | None: + if param_args is None: + return None + return tuple( + convertor(arg) if convertor else arg + for (convertor, arg) in zip(convertors, param_args, strict=False) + ) + + return internal_convertor + + +def get_callback( + *, + callback: Callable[..., Any] | None = None, + params: Sequence[click.Parameter] = [], + convertors: dict[str, Callable[[str], Any]] | None = None, + context_param_name: str | None = None, + pretty_exceptions_short: bool, +) -> Callable[..., Any] | None: + use_convertors = convertors or {} + if not callback: + return None + parameters = get_params_from_function(callback) + use_params: dict[str, Any] = {} + for param_name in parameters: + use_params[param_name] = None + for param in params: + if param.name: + use_params[param.name] = param.default + + def wrapper(**kwargs: Any) -> Any: + _rich_traceback_guard = pretty_exceptions_short # noqa: F841 + for k, v in kwargs.items(): + if k in use_convertors: + use_params[k] = use_convertors[k](v) + else: + use_params[k] = v + if context_param_name: + use_params[context_param_name] = click.get_current_context() + return callback(**use_params) + + update_wrapper(wrapper, callback) + return wrapper + + +def get_click_type( + *, annotation: Any, parameter_info: ParameterInfo +) -> click.ParamType: + if parameter_info.click_type is not None: + return parameter_info.click_type + + elif parameter_info.parser is not None: + return click.types.FuncParamType(parameter_info.parser) + + elif annotation is str: + return click.STRING + elif annotation is int: + if parameter_info.min is not None or parameter_info.max is not None: + min_ = None + max_ = None + if parameter_info.min is not None: + min_ = int(parameter_info.min) + if parameter_info.max is not None: + max_ = int(parameter_info.max) + return click.IntRange(min=min_, max=max_, clamp=parameter_info.clamp) + else: + return click.INT + elif annotation is float: + if parameter_info.min is not None or parameter_info.max is not None: + return click.FloatRange( + min=parameter_info.min, + max=parameter_info.max, + clamp=parameter_info.clamp, + ) + else: + return click.FLOAT + elif annotation is bool: + return click.BOOL + elif annotation == UUID: + return click.UUID + elif annotation == datetime: + return click.DateTime(formats=parameter_info.formats) + elif ( + annotation == Path + or parameter_info.allow_dash + or parameter_info.path_type + or parameter_info.resolve_path + ): + return TyperPath( + exists=parameter_info.exists, + file_okay=parameter_info.file_okay, + dir_okay=parameter_info.dir_okay, + writable=parameter_info.writable, + readable=parameter_info.readable, + resolve_path=parameter_info.resolve_path, + allow_dash=parameter_info.allow_dash, + path_type=parameter_info.path_type, + ) + elif lenient_issubclass(annotation, FileTextWrite): + return click.File( + mode=parameter_info.mode or "w", + encoding=parameter_info.encoding, + errors=parameter_info.errors, + lazy=parameter_info.lazy, + atomic=parameter_info.atomic, + ) + elif lenient_issubclass(annotation, FileText): + return click.File( + mode=parameter_info.mode or "r", + encoding=parameter_info.encoding, + errors=parameter_info.errors, + lazy=parameter_info.lazy, + atomic=parameter_info.atomic, + ) + elif lenient_issubclass(annotation, FileBinaryRead): + return click.File( + mode=parameter_info.mode or "rb", + encoding=parameter_info.encoding, + errors=parameter_info.errors, + lazy=parameter_info.lazy, + atomic=parameter_info.atomic, + ) + elif lenient_issubclass(annotation, FileBinaryWrite): + return click.File( + mode=parameter_info.mode or "wb", + encoding=parameter_info.encoding, + errors=parameter_info.errors, + lazy=parameter_info.lazy, + atomic=parameter_info.atomic, + ) + elif lenient_issubclass(annotation, Enum): + # The custom TyperChoice is only needed for Click < 8.2.0, to parse the + # command line values matching them to the enum values. Click 8.2.0 added + # support for enum values but reading enum names. + # Passing here the list of enum values (instead of just the enum) accounts for + # Click < 8.2.0. + return TyperChoice( + [item.value for item in annotation], + case_sensitive=parameter_info.case_sensitive, + ) + elif is_literal_type(annotation): + return click.Choice( + literal_values(annotation), + case_sensitive=parameter_info.case_sensitive, + ) + raise RuntimeError(f"Type not yet supported: {annotation}") # pragma: no cover + + +def lenient_issubclass(cls: Any, class_or_tuple: AnyType | tuple[AnyType, ...]) -> bool: + return isinstance(cls, type) and issubclass(cls, class_or_tuple) + + +def get_click_param( + param: ParamMeta, +) -> tuple[click.Argument | click.Option, Any]: + # First, find out what will be: + # * ParamInfo (ArgumentInfo or OptionInfo) + # * default_value + # * required + default_value = None + required = False + if isinstance(param.default, ParameterInfo): + parameter_info = param.default + if parameter_info.default == Required: + required = True + else: + default_value = parameter_info.default + elif param.default == Required or param.default is param.empty: + required = True + parameter_info = ArgumentInfo() + else: + default_value = param.default + parameter_info = OptionInfo() + annotation: Any + if param.annotation is not param.empty: + annotation = param.annotation + else: + annotation = str + main_type = annotation + is_list = False + is_tuple = False + parameter_type: Any = None + is_flag = None + origin = get_origin(main_type) + + if origin is not None: + # Handle SomeType | None and Optional[SomeType] + if is_union(origin): + types = [] + for type_ in get_args(main_type): + if type_ is NoneType: + continue + types.append(type_) + assert len(types) == 1, "Typer Currently doesn't support Union types" + main_type = types[0] + origin = get_origin(main_type) + # Handle Tuples and Lists + if lenient_issubclass(origin, list): + main_type = get_args(main_type)[0] + assert not get_origin(main_type), ( + "List types with complex sub-types are not currently supported" + ) + is_list = True + elif lenient_issubclass(origin, tuple): + types = [] + for type_ in get_args(main_type): + assert not get_origin(type_), ( + "Tuple types with complex sub-types are not currently supported" + ) + types.append( + get_click_type(annotation=type_, parameter_info=parameter_info) + ) + parameter_type = tuple(types) + is_tuple = True + if parameter_type is None: + parameter_type = get_click_type( + annotation=main_type, parameter_info=parameter_info + ) + convertor = determine_type_convertor(main_type) + if is_list: + convertor = generate_list_convertor( + convertor=convertor, default_value=default_value + ) + if is_tuple: + convertor = generate_tuple_convertor(get_args(main_type)) + if isinstance(parameter_info, OptionInfo): + if main_type is bool: + is_flag = True + # Click doesn't accept a flag of type bool, only None, and then it sets it + # to bool internally + parameter_type = None + default_option_name = get_command_name(param.name) + if is_flag: + default_option_declaration = ( + f"--{default_option_name}/--no-{default_option_name}" + ) + else: + default_option_declaration = f"--{default_option_name}" + param_decls = [param.name] + if parameter_info.param_decls: + param_decls.extend(parameter_info.param_decls) + else: + param_decls.append(default_option_declaration) + return ( + TyperOption( + # Option + param_decls=param_decls, + show_default=parameter_info.show_default, + prompt=parameter_info.prompt, + confirmation_prompt=parameter_info.confirmation_prompt, + prompt_required=parameter_info.prompt_required, + hide_input=parameter_info.hide_input, + is_flag=is_flag, + multiple=is_list, + count=parameter_info.count, + allow_from_autoenv=parameter_info.allow_from_autoenv, + type=parameter_type, + help=parameter_info.help, + hidden=parameter_info.hidden, + show_choices=parameter_info.show_choices, + show_envvar=parameter_info.show_envvar, + # Parameter + required=required, + default=default_value, + callback=get_param_callback( + callback=parameter_info.callback, convertor=convertor + ), + metavar=parameter_info.metavar, + expose_value=parameter_info.expose_value, + is_eager=parameter_info.is_eager, + envvar=parameter_info.envvar, + shell_complete=parameter_info.shell_complete, + autocompletion=get_param_completion(parameter_info.autocompletion), + # Rich settings + rich_help_panel=parameter_info.rich_help_panel, + ), + convertor, + ) + elif isinstance(parameter_info, ArgumentInfo): + param_decls = [param.name] + nargs = None + if is_list: + nargs = -1 + return ( + TyperArgument( + # Argument + param_decls=param_decls, + type=parameter_type, + required=required, + nargs=nargs, + # TyperArgument + show_default=parameter_info.show_default, + show_choices=parameter_info.show_choices, + show_envvar=parameter_info.show_envvar, + help=parameter_info.help, + hidden=parameter_info.hidden, + # Parameter + default=default_value, + callback=get_param_callback( + callback=parameter_info.callback, convertor=convertor + ), + metavar=parameter_info.metavar, + expose_value=parameter_info.expose_value, + is_eager=parameter_info.is_eager, + envvar=parameter_info.envvar, + shell_complete=parameter_info.shell_complete, + autocompletion=get_param_completion(parameter_info.autocompletion), + # Rich settings + rich_help_panel=parameter_info.rich_help_panel, + ), + convertor, + ) + raise AssertionError("A click.Parameter should be returned") # pragma: no cover + + +def get_param_callback( + *, + callback: Callable[..., Any] | None = None, + convertor: Callable[..., Any] | None = None, +) -> Callable[..., Any] | None: + if not callback: + return None + parameters = get_params_from_function(callback) + ctx_name = None + click_param_name = None + value_name = None + untyped_names: list[str] = [] + for param_name, param_sig in parameters.items(): + if lenient_issubclass(param_sig.annotation, click.Context): + ctx_name = param_name + elif lenient_issubclass(param_sig.annotation, click.Parameter): + click_param_name = param_name + else: + untyped_names.append(param_name) + # Extract value param name first + if untyped_names: + value_name = untyped_names.pop() + # If context and Click param were not typed (old/Click callback style) extract them + if untyped_names: + if ctx_name is None: + ctx_name = untyped_names.pop(0) + if click_param_name is None: + if untyped_names: + click_param_name = untyped_names.pop(0) + if untyped_names: + raise click.ClickException( + "Too many CLI parameter callback function parameters" + ) + + def wrapper(ctx: click.Context, param: click.Parameter, value: Any) -> Any: + use_params: dict[str, Any] = {} + if ctx_name: + use_params[ctx_name] = ctx + if click_param_name: + use_params[click_param_name] = param + if value_name: + if convertor: + use_value = convertor(value) + else: + use_value = value + use_params[value_name] = use_value + return callback(**use_params) + + update_wrapper(wrapper, callback) + return wrapper + + +def get_param_completion( + callback: Callable[..., Any] | None = None, +) -> Callable[..., Any] | None: + if not callback: + return None + parameters = get_params_from_function(callback) + ctx_name = None + args_name = None + incomplete_name = None + unassigned_params = list(parameters.values()) + for param_sig in unassigned_params[:]: + origin = get_origin(param_sig.annotation) + if lenient_issubclass(param_sig.annotation, click.Context): + ctx_name = param_sig.name + unassigned_params.remove(param_sig) + elif lenient_issubclass(origin, list): + args_name = param_sig.name + unassigned_params.remove(param_sig) + elif lenient_issubclass(param_sig.annotation, str): + incomplete_name = param_sig.name + unassigned_params.remove(param_sig) + # If there are still unassigned parameters (not typed), extract by name + for param_sig in unassigned_params[:]: + if ctx_name is None and param_sig.name == "ctx": + ctx_name = param_sig.name + unassigned_params.remove(param_sig) + elif args_name is None and param_sig.name == "args": + args_name = param_sig.name + unassigned_params.remove(param_sig) + elif incomplete_name is None and param_sig.name == "incomplete": + incomplete_name = param_sig.name + unassigned_params.remove(param_sig) + # Extract value param name first + if unassigned_params: + show_params = " ".join([param.name for param in unassigned_params]) + raise click.ClickException( + f"Invalid autocompletion callback parameters: {show_params}" + ) + + def wrapper(ctx: click.Context, args: list[str], incomplete: str | None) -> Any: + use_params: dict[str, Any] = {} + if ctx_name: + use_params[ctx_name] = ctx + if args_name: + use_params[args_name] = args + if incomplete_name: + use_params[incomplete_name] = incomplete + return callback(**use_params) + + update_wrapper(wrapper, callback) + return wrapper + + +def run( + function: Annotated[ + Callable[..., Any], + Doc( + """ + The function that should power this CLI application. + """ + ), + ], +) -> None: + """ + This function converts a given function to a CLI application with `Typer()` and executes it. + + ## Example + + ```python + import typer + + def main(name: str): + print(f"Hello {name}") + + if __name__ == "__main__": + typer.run(main) + ``` + """ + app = Typer(add_completion=False) + app.command()(function) + app() + + +def _is_macos() -> bool: + return platform.system() == "Darwin" + + +def _is_linux_or_bsd() -> bool: + if platform.system() == "Linux": + return True + + return "BSD" in platform.system() + + +def launch( + url: Annotated[ + str, + Doc( + """ + URL or filename of the thing to launch. + """ + ), + ], + wait: Annotated[ + bool, + Doc( + """ + Wait for the program to exit before returning. This only works if the launched program blocks. + In particular, `xdg-open` on Linux does not block. + """ + ), + ] = False, + locate: Annotated[ + bool, + Doc( + """ + If this is set to `True`, then instead of launching the application associated with the URL, it will attempt to + launch a file manager with the file located. This might have weird effects if the URL does not point to the filesystem. + """ + ), + ] = False, +) -> int: + """ + This function launches the given URL (or filename) in the default + viewer application for this file type. If this is an executable, it + might launch the executable in a new session. The return value is + the exit code of the launched application. Usually, `0` indicates + success. + + This function handles url in different operating systems separately: + - On macOS (Darwin), it uses the `open` command. + - On Linux and BSD, it uses `xdg-open` if available. + - On Windows (and other OSes), it uses the standard webbrowser module. + + The function avoids, when possible, using the webbrowser module on Linux and macOS + to prevent spammy terminal messages from some browsers (e.g., Chrome). + + ## Examples + ```python + import typer + + typer.launch("https://typer.tiangolo.com/") + ``` + + ```python + import typer + + typer.launch("/my/downloaded/file", locate=True) + ``` + """ + + if url.startswith("http://") or url.startswith("https://"): + if _is_macos(): + return subprocess.Popen( + ["open", url], stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT + ).wait() + + has_xdg_open = _is_linux_or_bsd() and shutil.which("xdg-open") is not None + + if has_xdg_open: + return subprocess.Popen( + ["xdg-open", url], stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT + ).wait() + + import webbrowser + + webbrowser.open(url) + + return 0 + + else: + return click.launch(url) diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/models.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/models.py new file mode 100644 index 0000000000000000000000000000000000000000..3285a96a2433176a22c11c2402116e5f7562218e --- /dev/null +++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/models.py @@ -0,0 +1,651 @@ +import inspect +import io +from collections.abc import Callable, Sequence +from typing import ( + TYPE_CHECKING, + Any, + Optional, + TypeVar, +) + +import click +import click.shell_completion + +if TYPE_CHECKING: # pragma: no cover + from .core import TyperCommand, TyperGroup + from .main import Typer + + +NoneType = type(None) + +AnyType = type[Any] + +Required = ... + + +class Context(click.Context): + """ + The [`Context`](https://click.palletsprojects.com/en/stable/api/#click.Context) has some additional data about the current execution of your program. + When declaring it in a [callback](https://typer.tiangolo.com/tutorial/options/callback-and-context/) function, + you can access this additional information. + """ + + pass + + +class FileText(io.TextIOWrapper): + """ + Gives you a file-like object for reading text, and you will get a `str` data from it. + The default mode of this class is `mode="r"`. + + **Example** + + ```python + from typing import Annotated + + import typer + + app = typer.Typer() + + @app.command() + def main(config: Annotated[typer.FileText, typer.Option()]): + for line in config: + print(f"Config line: {line}") + + if __name__ == "__main__": + app() + ``` + """ + + pass + + +class FileTextWrite(FileText): + """ + You can use this class for writing text. Alternatively, you can use `FileText` with `mode="w"`. + The default mode of this class is `mode="w"`. + + **Example** + + ```python + from typing import Annotated + + import typer + + app = typer.Typer() + + @app.command() + def main(config: Annotated[typer.FileTextWrite, typer.Option()]): + config.write("Some config written by the app") + print("Config written") + + if __name__ == "__main__": + app() + ``` + """ + + pass + + +class FileBinaryRead(io.BufferedReader): + """ + You can use this class to read binary data, receiving `bytes`. + The default mode of this class is `mode="rb"`. + It is useful for reading binary files like images: + + **Example** + + ```python + from typing import Annotated + + import typer + + app = typer.Typer() + + @app.command() + def main(file: Annotated[typer.FileBinaryRead, typer.Option()]): + processed_total = 0 + for bytes_chunk in file: + # Process the bytes in bytes_chunk + processed_total += len(bytes_chunk) + print(f"Processed bytes total: {processed_total}") + + if __name__ == "__main__": + app() + ``` + """ + + pass + + +class FileBinaryWrite(io.BufferedWriter): + """ + You can use this class to write binary data: you pass `bytes` to it instead of strings. + The default mode of this class is `mode="wb"`. + It is useful for writing binary files like images: + + **Example** + + ```python + from typing import Annotated + + import typer + + app = typer.Typer() + + @app.command() + def main(file: Annotated[typer.FileBinaryWrite, typer.Option()]): + first_line_str = "some settings\\n" + # You cannot write str directly to a binary file; encode it first + first_line_bytes = first_line_str.encode("utf-8") + # Then you can write the bytes + file.write(first_line_bytes) + # This is already bytes, it starts with b" + second_line = b"la cig\xc3\xbce\xc3\xb1a trae al ni\xc3\xb1o" + file.write(second_line) + print("Binary file written") + + if __name__ == "__main__": + app() + ``` + """ + + pass + + +class CallbackParam(click.Parameter): + """ + In a callback function, you can declare a function parameter with type `CallbackParam` + to access the specific Click [`Parameter`](https://click.palletsprojects.com/en/stable/api/#click.Parameter) object. + """ + + pass + + +class DefaultPlaceholder: + """ + You shouldn't use this class directly. + + It's used internally to recognize when a default value has been overwritten, even + if the new value is `None`. + """ + + def __init__(self, value: Any): + self.value = value + + def __bool__(self) -> bool: + return bool(self.value) + + +DefaultType = TypeVar("DefaultType") + +CommandFunctionType = TypeVar("CommandFunctionType", bound=Callable[..., Any]) + + +def Default(value: DefaultType) -> DefaultType: + """ + You shouldn't use this function directly. + + It's used internally to recognize when a default value has been overwritten, even + if the new value is `None`. + """ + return DefaultPlaceholder(value) # type: ignore + + +class CommandInfo: + def __init__( + self, + name: str | None = None, + *, + cls: type["TyperCommand"] | None = None, + context_settings: dict[Any, Any] | None = None, + callback: Callable[..., Any] | None = None, + help: str | None = None, + epilog: str | None = None, + short_help: str | None = None, + options_metavar: str = "[OPTIONS]", + add_help_option: bool = True, + no_args_is_help: bool = False, + hidden: bool = False, + deprecated: bool = False, + # Rich settings + rich_help_panel: str | None = None, + ): + self.name = name + self.cls = cls + self.context_settings = context_settings + self.callback = callback + self.help = help + self.epilog = epilog + self.short_help = short_help + self.options_metavar = options_metavar + self.add_help_option = add_help_option + self.no_args_is_help = no_args_is_help + self.hidden = hidden + self.deprecated = deprecated + # Rich settings + self.rich_help_panel = rich_help_panel + + +class TyperInfo: + def __init__( + self, + typer_instance: Optional["Typer"] = Default(None), + *, + name: str | None = Default(None), + cls: type["TyperGroup"] | None = Default(None), + invoke_without_command: bool = Default(False), + no_args_is_help: bool = Default(False), + subcommand_metavar: str | None = Default(None), + chain: bool = Default(False), + result_callback: Callable[..., Any] | None = Default(None), + # Command + context_settings: dict[Any, Any] | None = Default(None), + callback: Callable[..., Any] | None = Default(None), + help: str | None = Default(None), + epilog: str | None = Default(None), + short_help: str | None = Default(None), + options_metavar: str = Default("[OPTIONS]"), + add_help_option: bool = Default(True), + hidden: bool = Default(False), + deprecated: bool = Default(False), + # Rich settings + rich_help_panel: str | None = Default(None), + ): + self.typer_instance = typer_instance + self.name = name + self.cls = cls + self.invoke_without_command = invoke_without_command + self.no_args_is_help = no_args_is_help + self.subcommand_metavar = subcommand_metavar + self.chain = chain + self.result_callback = result_callback + self.context_settings = context_settings + self.callback = callback + self.help = help + self.epilog = epilog + self.short_help = short_help + self.options_metavar = options_metavar + self.add_help_option = add_help_option + self.hidden = hidden + self.deprecated = deprecated + self.rich_help_panel = rich_help_panel + + +class ParameterInfo: + def __init__( + self, + *, + default: Any | None = None, + param_decls: Sequence[str] | None = None, + callback: Callable[..., Any] | None = None, + metavar: str | None = None, + expose_value: bool = True, + is_eager: bool = False, + envvar: str | list[str] | None = None, + # Note that shell_complete is not fully supported and will be removed in future versions + # TODO: Remove shell_complete in a future version (after 0.16.0) + shell_complete: Callable[ + [click.Context, click.Parameter, str], + list["click.shell_completion.CompletionItem"] | list[str], + ] + | None = None, + autocompletion: Callable[..., Any] | None = None, + default_factory: Callable[[], Any] | None = None, + # Custom type + parser: Callable[[str], Any] | None = None, + click_type: click.ParamType | None = None, + # TyperArgument + show_default: bool | str = True, + show_choices: bool = True, + show_envvar: bool = True, + help: str | None = None, + hidden: bool = False, + # Choice + case_sensitive: bool = True, + # Numbers + min: int | float | None = None, + max: int | float | None = None, + clamp: bool = False, + # DateTime + formats: list[str] | None = None, + # File + mode: str | None = None, + encoding: str | None = None, + errors: str | None = "strict", + lazy: bool | None = None, + atomic: bool = False, + # Path + exists: bool = False, + file_okay: bool = True, + dir_okay: bool = True, + writable: bool = False, + readable: bool = True, + resolve_path: bool = False, + allow_dash: bool = False, + path_type: None | type[str] | type[bytes] = None, + # Rich settings + rich_help_panel: str | None = None, + ): + # Check if user has provided multiple custom parsers + if parser and click_type: + raise ValueError( + "Multiple custom type parsers provided. " + "`parser` and `click_type` may not both be provided." + ) + + self.default = default + self.param_decls = param_decls + self.callback = callback + self.metavar = metavar + self.expose_value = expose_value + self.is_eager = is_eager + self.envvar = envvar + self.shell_complete = shell_complete + self.autocompletion = autocompletion + self.default_factory = default_factory + # Custom type + self.parser = parser + self.click_type = click_type + # TyperArgument + self.show_default = show_default + self.show_choices = show_choices + self.show_envvar = show_envvar + self.help = help + self.hidden = hidden + # Choice + self.case_sensitive = case_sensitive + # Numbers + self.min = min + self.max = max + self.clamp = clamp + # DateTime + self.formats = formats + # File + self.mode = mode + self.encoding = encoding + self.errors = errors + self.lazy = lazy + self.atomic = atomic + # Path + self.exists = exists + self.file_okay = file_okay + self.dir_okay = dir_okay + self.writable = writable + self.readable = readable + self.resolve_path = resolve_path + self.allow_dash = allow_dash + self.path_type = path_type + # Rich settings + self.rich_help_panel = rich_help_panel + + +class OptionInfo(ParameterInfo): + def __init__( + self, + *, + # ParameterInfo + default: Any | None = None, + param_decls: Sequence[str] | None = None, + callback: Callable[..., Any] | None = None, + metavar: str | None = None, + expose_value: bool = True, + is_eager: bool = False, + envvar: str | list[str] | None = None, + # Note that shell_complete is not fully supported and will be removed in future versions + # TODO: Remove shell_complete in a future version (after 0.16.0) + shell_complete: Callable[ + [click.Context, click.Parameter, str], + list["click.shell_completion.CompletionItem"] | list[str], + ] + | None = None, + autocompletion: Callable[..., Any] | None = None, + default_factory: Callable[[], Any] | None = None, + # Custom type + parser: Callable[[str], Any] | None = None, + click_type: click.ParamType | None = None, + # Option + show_default: bool | str = True, + prompt: bool | str = False, + confirmation_prompt: bool = False, + prompt_required: bool = True, + hide_input: bool = False, + # TODO: remove is_flag and flag_value in a future release + is_flag: bool | None = None, + flag_value: Any | None = None, + count: bool = False, + allow_from_autoenv: bool = True, + help: str | None = None, + hidden: bool = False, + show_choices: bool = True, + show_envvar: bool = True, + # Choice + case_sensitive: bool = True, + # Numbers + min: int | float | None = None, + max: int | float | None = None, + clamp: bool = False, + # DateTime + formats: list[str] | None = None, + # File + mode: str | None = None, + encoding: str | None = None, + errors: str | None = "strict", + lazy: bool | None = None, + atomic: bool = False, + # Path + exists: bool = False, + file_okay: bool = True, + dir_okay: bool = True, + writable: bool = False, + readable: bool = True, + resolve_path: bool = False, + allow_dash: bool = False, + path_type: None | type[str] | type[bytes] = None, + # Rich settings + rich_help_panel: str | None = None, + ): + super().__init__( + default=default, + param_decls=param_decls, + callback=callback, + metavar=metavar, + expose_value=expose_value, + is_eager=is_eager, + envvar=envvar, + shell_complete=shell_complete, + autocompletion=autocompletion, + default_factory=default_factory, + # Custom type + parser=parser, + click_type=click_type, + # TyperArgument + show_default=show_default, + show_choices=show_choices, + show_envvar=show_envvar, + help=help, + hidden=hidden, + # Choice + case_sensitive=case_sensitive, + # Numbers + min=min, + max=max, + clamp=clamp, + # DateTime + formats=formats, + # File + mode=mode, + encoding=encoding, + errors=errors, + lazy=lazy, + atomic=atomic, + # Path + exists=exists, + file_okay=file_okay, + dir_okay=dir_okay, + writable=writable, + readable=readable, + resolve_path=resolve_path, + allow_dash=allow_dash, + path_type=path_type, + # Rich settings + rich_help_panel=rich_help_panel, + ) + if is_flag is not None or flag_value is not None: + import warnings + + warnings.warn( + "The 'is_flag' and 'flag_value' parameters are not supported by Typer " + "and will be removed entirely in a future release.", + DeprecationWarning, + stacklevel=2, + ) + self.prompt = prompt + self.confirmation_prompt = confirmation_prompt + self.prompt_required = prompt_required + self.hide_input = hide_input + self.count = count + self.allow_from_autoenv = allow_from_autoenv + + +class ArgumentInfo(ParameterInfo): + def __init__( + self, + *, + # ParameterInfo + default: Any | None = None, + param_decls: Sequence[str] | None = None, + callback: Callable[..., Any] | None = None, + metavar: str | None = None, + expose_value: bool = True, + is_eager: bool = False, + envvar: str | list[str] | None = None, + # Note that shell_complete is not fully supported and will be removed in future versions + # TODO: Remove shell_complete in a future version (after 0.16.0) + shell_complete: Callable[ + [click.Context, click.Parameter, str], + list["click.shell_completion.CompletionItem"] | list[str], + ] + | None = None, + autocompletion: Callable[..., Any] | None = None, + default_factory: Callable[[], Any] | None = None, + # Custom type + parser: Callable[[str], Any] | None = None, + click_type: click.ParamType | None = None, + # TyperArgument + show_default: bool | str = True, + show_choices: bool = True, + show_envvar: bool = True, + help: str | None = None, + hidden: bool = False, + # Choice + case_sensitive: bool = True, + # Numbers + min: int | float | None = None, + max: int | float | None = None, + clamp: bool = False, + # DateTime + formats: list[str] | None = None, + # File + mode: str | None = None, + encoding: str | None = None, + errors: str | None = "strict", + lazy: bool | None = None, + atomic: bool = False, + # Path + exists: bool = False, + file_okay: bool = True, + dir_okay: bool = True, + writable: bool = False, + readable: bool = True, + resolve_path: bool = False, + allow_dash: bool = False, + path_type: None | type[str] | type[bytes] = None, + # Rich settings + rich_help_panel: str | None = None, + ): + super().__init__( + default=default, + param_decls=param_decls, + callback=callback, + metavar=metavar, + expose_value=expose_value, + is_eager=is_eager, + envvar=envvar, + shell_complete=shell_complete, + autocompletion=autocompletion, + default_factory=default_factory, + # Custom type + parser=parser, + click_type=click_type, + # TyperArgument + show_default=show_default, + show_choices=show_choices, + show_envvar=show_envvar, + help=help, + hidden=hidden, + # Choice + case_sensitive=case_sensitive, + # Numbers + min=min, + max=max, + clamp=clamp, + # DateTime + formats=formats, + # File + mode=mode, + encoding=encoding, + errors=errors, + lazy=lazy, + atomic=atomic, + # Path + exists=exists, + file_okay=file_okay, + dir_okay=dir_okay, + writable=writable, + readable=readable, + resolve_path=resolve_path, + allow_dash=allow_dash, + path_type=path_type, + # Rich settings + rich_help_panel=rich_help_panel, + ) + + +class ParamMeta: + empty = inspect.Parameter.empty + + def __init__( + self, + *, + name: str, + default: Any = inspect.Parameter.empty, + annotation: Any = inspect.Parameter.empty, + ) -> None: + self.name = name + self.default = default + self.annotation = annotation + + +class DeveloperExceptionConfig: + def __init__( + self, + *, + pretty_exceptions_enable: bool = True, + pretty_exceptions_show_locals: bool = True, + pretty_exceptions_short: bool = True, + ) -> None: + self.pretty_exceptions_enable = pretty_exceptions_enable + self.pretty_exceptions_show_locals = pretty_exceptions_show_locals + self.pretty_exceptions_short = pretty_exceptions_short + + +class TyperPath(click.Path): + # Overwrite Click's behaviour to be compatible with Typer's autocompletion system + def shell_complete( + self, ctx: click.Context, param: click.Parameter, incomplete: str + ) -> list[click.shell_completion.CompletionItem]: + """Return an empty list so that the autocompletion functionality + will work properly from the commandline. + """ + return [] diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/params.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/params.py new file mode 100644 index 0000000000000000000000000000000000000000..b325b273c43860f79f598c11e971f60f2c95676f --- /dev/null +++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/params.py @@ -0,0 +1,1831 @@ +from collections.abc import Callable +from typing import TYPE_CHECKING, Annotated, Any, overload + +import click +from annotated_doc import Doc + +from .models import ArgumentInfo, OptionInfo + +if TYPE_CHECKING: # pragma: no cover + import click.shell_completion + + +# Overload for Option created with custom type 'parser' +@overload +def Option( + # Parameter + default: Any | None = ..., + *param_decls: str, + callback: Callable[..., Any] | None = None, + metavar: str | None = None, + expose_value: bool = True, + is_eager: bool = False, + envvar: str | list[str] | None = None, + # Note that shell_complete is not fully supported and will be removed in future versions + # TODO: Remove shell_complete in a future version (after 0.16.0) + shell_complete: Callable[ + [click.Context, click.Parameter, str], + list["click.shell_completion.CompletionItem"] | list[str], + ] + | None = None, + autocompletion: Callable[..., Any] | None = None, + default_factory: Callable[[], Any] | None = None, + # Custom type + parser: Callable[[str], Any] | None = None, + # Option + show_default: bool | str = True, + prompt: bool | str = False, + confirmation_prompt: bool = False, + prompt_required: bool = True, + hide_input: bool = False, + # TODO: remove is_flag and flag_value in a future release + is_flag: bool | None = None, + flag_value: Any | None = None, + count: bool = False, + allow_from_autoenv: bool = True, + help: str | None = None, + hidden: bool = False, + show_choices: bool = True, + show_envvar: bool = True, + # Choice + case_sensitive: bool = True, + # Numbers + min: int | float | None = None, + max: int | float | None = None, + clamp: bool = False, + # DateTime + formats: list[str] | None = None, + # File + mode: str | None = None, + encoding: str | None = None, + errors: str | None = "strict", + lazy: bool | None = None, + atomic: bool = False, + # Path + exists: bool = False, + file_okay: bool = True, + dir_okay: bool = True, + writable: bool = False, + readable: bool = True, + resolve_path: bool = False, + allow_dash: bool = False, + path_type: None | type[str] | type[bytes] = None, + # Rich settings + rich_help_panel: str | None = None, +) -> Any: ... + + +# Overload for Option created with custom type 'click_type' +@overload +def Option( + # Parameter + default: Any | None = ..., + *param_decls: str, + callback: Callable[..., Any] | None = None, + metavar: str | None = None, + expose_value: bool = True, + is_eager: bool = False, + envvar: str | list[str] | None = None, + # Note that shell_complete is not fully supported and will be removed in future versions + # TODO: Remove shell_complete in a future version (after 0.16.0) + shell_complete: Callable[ + [click.Context, click.Parameter, str], + list["click.shell_completion.CompletionItem"] | list[str], + ] + | None = None, + autocompletion: Callable[..., Any] | None = None, + default_factory: Callable[[], Any] | None = None, + # Custom type + click_type: click.ParamType | None = None, + # Option + show_default: bool | str = True, + prompt: bool | str = False, + confirmation_prompt: bool = False, + prompt_required: bool = True, + hide_input: bool = False, + # TODO: remove is_flag and flag_value in a future release + is_flag: bool | None = None, + flag_value: Any | None = None, + count: bool = False, + allow_from_autoenv: bool = True, + help: str | None = None, + hidden: bool = False, + show_choices: bool = True, + show_envvar: bool = True, + # Choice + case_sensitive: bool = True, + # Numbers + min: int | float | None = None, + max: int | float | None = None, + clamp: bool = False, + # DateTime + formats: list[str] | None = None, + # File + mode: str | None = None, + encoding: str | None = None, + errors: str | None = "strict", + lazy: bool | None = None, + atomic: bool = False, + # Path + exists: bool = False, + file_okay: bool = True, + dir_okay: bool = True, + writable: bool = False, + readable: bool = True, + resolve_path: bool = False, + allow_dash: bool = False, + path_type: None | type[str] | type[bytes] = None, + # Rich settings + rich_help_panel: str | None = None, +) -> Any: ... + + +def Option( + # Parameter + default: Annotated[ + Any | None, + Doc( + """ + Usually, [CLI options](https://typer.tiangolo.com/tutorial/options/) are optional and have a default value, passed on like this: + + **Example** + + ```python + @app.command() + def main(network: str = typer.Option("CNN")): + print(f"Training neural network of type: {network}") + ``` + + Note that this usage is deprecated, and we recommend to use `Annotated` instead: + ``` + @app.command() + def main(network: Annotated[str, typer.Option()] = "CNN"): + print(f"Hello {name}!") + ``` + + You can also use `...` ([Ellipsis](https://docs.python.org/3/library/constants.html#Ellipsis)) as the "default" value to clarify that this is a required CLI option. + """ + ), + ] = ..., + *param_decls: Annotated[ + str, + Doc( + """ + Positional argument that defines how users can call this option on the command line. This may be one or multiple aliases, all strings. + If not defined, Typer will automatically use the function parameter as default name. + See [the tutorial about CLI Option Names](https://typer.tiangolo.com/tutorial/options/name/) for more details. + + **Example** + + ```python + @app.command() + def main(user_name: Annotated[str, typer.Option("--user", "-u", "-x")]): + print(f"Hello {user_name}") + ``` + """ + ), + ], + callback: Annotated[ + Callable[..., Any] | None, + Doc( + """ + Add a callback to this CLI Option, to execute additional logic after its value was received from the terminal. + See [the tutorial about callbacks](https://typer.tiangolo.com/tutorial/options/callback-and-context/) for more details. + + **Example** + + ```python + def name_callback(value: str): + if value != "Deadpool": + raise typer.BadParameter("Only Deadpool is allowed") + return value + + @app.command() + def main(name: Annotated[str, typer.Option(callback=name_callback)]): + print(f"Hello {name}") + ``` + """ + ), + ] = None, + metavar: Annotated[ + str | None, + Doc( + """ + Customize the name displayed in the [help text](https://typer.tiangolo.com/tutorial/options/help/) to represent this CLI option. + Note that this doesn't influence the way the option must be called. + + **Example** + + ```python + @app.command() + def main(user: Annotated[str, typer.Option(metavar="User name")]): + print(f"Hello {user}") + ``` + """ + ), + ] = None, + expose_value: Annotated[ + bool, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility. + + --- + + If this is `True` then the value is passed onwards to the command callback and stored on the context, otherwise it’s skipped. + """ + ), + ] = True, + is_eager: Annotated[ + bool, + Doc( + """ + Mark a CLI Option to be "eager", ensuring it gets processed before other CLI parameters. This could be relevant when there are other parameters with callbacks that could exit the program early. + For more information and an extended example, see the documentation [here](https://typer.tiangolo.com/tutorial/options/version/#fix-with-is_eager). + """ + ), + ] = False, + envvar: Annotated[ + str | list[str] | None, + Doc( + """ + Configure a CLI Option to read its value from an environment variable if it is not provided in the command line. + For more information, see the [documentation on Environment Variables](https://typer.tiangolo.com/tutorial/arguments/envvar/). + + **Example** + + ```python + @app.command() + def main(user: Annotated[str, typer.Option(envvar="ME")]): + print(f"Hello {user}") + ``` + """ + ), + ] = None, + # TODO: Remove shell_complete in a future version (after 0.16.0) + shell_complete: Annotated[ + Callable[ + [click.Context, click.Parameter, str], + list["click.shell_completion.CompletionItem"] | list[str], + ] + | None, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility. + It is however not fully functional, and will likely be removed in future versions. + """ + ), + ] = None, + autocompletion: Annotated[ + Callable[..., Any] | None, + Doc( + """ + Provide a custom function that helps to autocomplete the values of this CLI Option. + See [the tutorial on parameter autocompletion](https://typer.tiangolo.com/tutorial/options-autocompletion) for more details. + + **Example** + + ```python + def complete(): + return ["Me", "Myself", "I"] + + @app.command() + def main(name: Annotated[str, typer.Option(autocompletion=complete)]): + print(f"Hello {name}") + ``` + """ + ), + ] = None, + default_factory: Annotated[ + Callable[[], Any] | None, + Doc( + """ + Provide a custom function that dynamically generates a [default](https://typer.tiangolo.com/tutorial/arguments/default) for this CLI Option. + + **Example** + + ```python + def get_name(): + return random.choice(["Me", "Myself", "I"]) + + @app.command() + def main(name: Annotated[str, typer.Option(default_factory=get_name)]): + print(f"Hello {name}") + ``` + """ + ), + ] = None, + # Custom type + parser: Annotated[ + Callable[[str], Any] | None, + Doc( + """ + Use your own custom types in Typer applications by defining a `parser` function that parses input into your own types: + + **Example** + + ```python + class CustomClass: + def __init__(self, value: str): + self.value = value + + def __str__(self): + return f"" + + def my_parser(value: str): + return CustomClass(value * 2) + + @app.command() + def main(opt: Annotated[CustomClass, typer.Option(parser=my_parser)] = "Foo"): + print(f"--opt is {opt}") + ``` + """ + ), + ] = None, + click_type: Annotated[ + click.ParamType | None, + Doc( + """ + Define this parameter to use a [custom Click type](https://click.palletsprojects.com/en/stable/parameters/#implementing-custom-types) in your Typer applications. + + **Example** + + ```python + class MyClass: + def __init__(self, value: str): + self.value = value + + def __str__(self): + return f"" + + class MyParser(click.ParamType): + name = "MyClass" + + def convert(self, value, param, ctx): + return MyClass(value * 3) + + @app.command() + def main(opt: Annotated[MyClass, typer.Option(click_type=MyParser())] = "Foo"): + print(f"--opt is {opt}") + ``` + """ + ), + ] = None, + # Option + show_default: Annotated[ + bool | str, + Doc( + """ + When set to `False`, don't show the default value of this CLI Option in the [help text](https://typer.tiangolo.com/tutorial/options/help/). + + **Example** + + ```python + @app.command() + def main(name: Annotated[str, typer.Option(show_default=False)] = "Rick"): + print(f"Hello {name}") + ``` + """ + ), + ] = True, + prompt: Annotated[ + bool | str, + Doc( + """ + When set to `True`, a prompt will appear to ask for the value of this CLI Option if it was not provided: + + **Example** + + ```python + @app.command() + def main(name: str, lastname: Annotated[str, typer.Option(prompt=True)]): + print(f"Hello {name} {lastname}") + ``` + """ + ), + ] = False, + confirmation_prompt: Annotated[ + bool, + Doc( + """ + When set to `True`, a user will need to type a prompted value twice (may be useful for passwords etc.). + + **Example** + + ```python + @app.command() + def main(project: Annotated[str, typer.Option(prompt=True, confirmation_prompt=True)]): + print(f"Deleting project {project}") + ``` + """ + ), + ] = False, + prompt_required: Annotated[ + bool, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility. + + --- + + If this is `False` then a prompt is only shown if the option's flag is given without a value. + """ + ), + ] = True, + hide_input: Annotated[ + bool, + Doc( + """ + When you've configured a prompt, for instance for [querying a password](https://typer.tiangolo.com/tutorial/options/password/), + don't show anything on the screen while the user is typing the value. + + **Example** + + ```python + @app.command() + def login( + name: str, + password: Annotated[str, typer.Option(prompt=True, hide_input=True)], + ): + print(f"Hello {name}. Doing something very secure with password.") + ``` + """ + ), + ] = False, + # TODO: remove is_flag and flag_value in a future release + is_flag: Annotated[ + bool | None, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility. + It is however not fully functional, and will likely be removed in future versions. + """ + ), + ] = None, + flag_value: Annotated[ + Any | None, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility. + It is however not fully functional, and will likely be removed in future versions. + """ + ), + ] = None, + count: Annotated[ + bool, + Doc( + """ + Make a CLI Option work as a [counter](https://typer.tiangolo.com/tutorial/parameter-types/number/#counter-cli-options). + The CLI option will have the `int` value representing the number of times the option was used on the command line. + + **Example** + + ```python + @app.command() + def main(verbose: Annotated[int, typer.Option("--verbose", "-v", count=True)] = 0): + print(f"Verbose level is {verbose}") + ``` + """ + ), + ] = False, + allow_from_autoenv: Annotated[ + bool, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility. + + --- + + If this is enabled then the value of this parameter will be pulled from an environment variable in case a prefix is defined on the context. + """ + ), + ] = True, + help: Annotated[ + str | None, + Doc( + """ + Help text for this CLI Option. + See [the tutorial about CLI Options with help](https://typer.tiangolo.com/tutorial/options/help/) for more dedails. + + **Example** + + ```python + @app.command() + def greet(name: Annotated[str, typer.Option(help="Person to greet")] = "Deadpool"): + print(f"Hello {name}") + ``` + """ + ), + ] = None, + hidden: Annotated[ + bool, + Doc( + """ + Hide this CLI Option from [help outputs](https://typer.tiangolo.com/tutorial/options/help). `False` by default. + + **Example** + + ```python + @app.command() + def greet(name: Annotated[str, typer.Option(hidden=True)] = "Deadpool"): + print(f"Hello {name}") + ``` + """ + ), + ] = False, + show_choices: Annotated[ + bool, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility. + + --- + + When set to `False`, this suppresses choices from being displayed inline when `prompt` is used. + """ + ), + ] = True, + show_envvar: Annotated[ + bool, + Doc( + """ + When an ["envvar"](https://typer.tiangolo.com/tutorial/arguments/envvar) is defined, prevent it from showing up in the help text: + + **Example** + + ```python + @app.command() + def main(user: Annotated[str, typer.Option(envvar="ME", show_envvar=False)]): + print(f"Hello {user}") + ``` + """ + ), + ] = True, + # Choice + case_sensitive: Annotated[ + bool, + Doc( + """ + For a CLI Option representing an [Enum (choice)](https://typer.tiangolo.com/tutorial/parameter-types/enum), + you can allow case-insensitive matching with this parameter: + + **Example** + + ```python + from enum import Enum + + class NeuralNetwork(str, Enum): + simple = "simple" + conv = "conv" + lstm = "lstm" + + @app.command() + def main( + network: Annotated[NeuralNetwork, typer.Option(case_sensitive=False)]): + print(f"Training neural network of type: {network.value}") + ``` + + With this setting, "LSTM" or "lstm" will both be valid values that will be resolved to `NeuralNetwork.lstm`. + """ + ), + ] = True, + # Numbers + min: Annotated[ + int | float | None, + Doc( + """ + For a CLI Option representing a [number](https://typer.tiangolo.com/tutorial/parameter-types/number/) (`int` or `float`), + you can define numeric validations with `min` and `max` values: + + **Example** + + ```python + @app.command() + def main( + user: Annotated[str, typer.Argument()], + user_id: Annotated[int, typer.Option(min=1, max=1000)], + ): + print(f"ID for {user} is {user_id}") + ``` + + If the user attempts to input an invalid number, an error will be shown, explaining why the value is invalid. + """ + ), + ] = None, + max: Annotated[ + int | float | None, + Doc( + """ + For a CLI Option representing a [number](https://typer.tiangolo.com/tutorial/parameter-types/number/) (`int` or `float`), + you can define numeric validations with `min` and `max` values: + + **Example** + + ```python + @app.command() + def main( + user: Annotated[str, typer.Argument()], + user_id: Annotated[int, typer.Option(min=1, max=1000)], + ): + print(f"ID for {user} is {user_id}") + ``` + + If the user attempts to input an invalid number, an error will be shown, explaining why the value is invalid. + """ + ), + ] = None, + clamp: Annotated[ + bool, + Doc( + """ + For a CLI Option representing a [number](https://typer.tiangolo.com/tutorial/parameter-types/number/) and that is bounded by using `min` and/or `max`, + you can opt to use the closest minimum or maximum value instead of raising an error when the value is out of bounds. This is done by setting `clamp` to `True`. + + **Example** + + ```python + @app.command() + def main( + user: Annotated[str, typer.Argument()], + user_id: Annotated[int, typer.Option(min=1, max=1000, clamp=True)], + ): + print(f"ID for {user} is {user_id}") + ``` + + If the user attempts to input 3420 for `user_id`, this will internally be converted to `1000`. + """ + ), + ] = False, + # DateTime + formats: Annotated[ + list[str] | None, + Doc( + """ + For a CLI Option representing a [DateTime object](https://typer.tiangolo.com/tutorial/parameter-types/datetime), + you can customize the formats that can be parsed automatically: + + **Example** + + ```python + from datetime import datetime + + @app.command() + def main( + birthday: Annotated[ + datetime, + typer.Option( + formats=["%Y-%m-%d", "%Y-%m-%d %H:%M:%S", "%m/%d/%Y"] + ), + ], + ): + print(f"Birthday defined at: {birthday}") + ``` + """ + ), + ] = None, + # File + mode: Annotated[ + str | None, + Doc( + """ + For a CLI Option representing a [File object](https://typer.tiangolo.com/tutorial/parameter-types/file/), + you can customize the mode to open the file with. If unset, Typer will set a [sensible value by default](https://typer.tiangolo.com/tutorial/parameter-types/file/#advanced-mode). + + **Example** + + ```python + @app.command() + def main(config: Annotated[typer.FileText, typer.Option(mode="a")]): + config.write("This is a single line\\n") + print("Config line written") + ``` + """ + ), + ] = None, + encoding: Annotated[ + str | None, + Doc( + """ + Customize the encoding of this CLI Option represented by a [File object](https://typer.tiangolo.com/tutorial/parameter-types/file/). + + **Example** + + ```python + @app.command() + def main(config: Annotated[typer.FileText, typer.Option(encoding="utf-8")]): + config.write("All the text gets written\\n") + ``` + """ + ), + ] = None, + errors: Annotated[ + str | None, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility. + + --- + + The error handling mode. + """ + ), + ] = "strict", + lazy: Annotated[ + bool | None, + Doc( + """ + For a CLI Option representing a [File object](https://typer.tiangolo.com/tutorial/parameter-types/file/), + by default the file will not be created until you actually start writing to it. + You can change this behaviour by setting this parameter. + By default, it's set to `True` for writing and to `False` for reading. + + **Example** + + ```python + @app.command() + def main(config: Annotated[typer.FileText, typer.Option(mode="a", lazy=False)]): + config.write("This is a single line\\n") + print("Config line written") + ``` + """ + ), + ] = None, + atomic: Annotated[ + bool, + Doc( + """ + For a CLI Option representing a [File object](https://typer.tiangolo.com/tutorial/parameter-types/file/), + you can ensure that all write instructions first go into a temporal file, and are only moved to the final destination after completing + by setting `atomic` to `True`. This can be useful for files with potential concurrent access. + + **Example** + + ```python + @app.command() + def main(config: Annotated[typer.FileText, typer.Option(mode="a", atomic=True)]): + config.write("All the text") + ``` + """ + ), + ] = False, + # Path + exists: Annotated[ + bool, + Doc( + """ + When set to `True` for a [`Path` CLI Option](https://typer.tiangolo.com/tutorial/parameter-types/path/), + additional validation is performed to check that the file or directory exists. If not, the value will be invalid. + + **Example** + + ```python + from pathlib import Path + + @app.command() + def main(config: Annotated[Path, typer.Option(exists=True)]): + text = config.read_text() + print(f"Config file contents: {text}") + ``` + """ + ), + ] = False, + file_okay: Annotated[ + bool, + Doc( + """ + Determine whether or not a [`Path` CLI Option](https://typer.tiangolo.com/tutorial/parameter-types/path/) + is allowed to refer to a file. When this is set to `False`, the application will raise a validation error when a path to a file is given. + + **Example** + + ```python + from pathlib import Path + + @app.command() + def main(config: Annotated[Path, typer.Option(exists=True, file_okay=False)]): + print(f"Directory listing: {[x.name for x in config.iterdir()]}") + ``` + """ + ), + ] = True, + dir_okay: Annotated[ + bool, + Doc( + """ + Determine whether or not a [`Path` CLI Option](https://typer.tiangolo.com/tutorial/parameter-types/path/) + is allowed to refer to a directory. When this is set to `False`, the application will raise a validation error when a path to a directory is given. + + **Example** + + ```python + from pathlib import Path + + @app.command() + def main(config: Annotated[Path, typer.Argument(exists=True, dir_okay=False)]): + text = config.read_text() + print(f"Config file contents: {text}") + ``` + """ + ), + ] = True, + writable: Annotated[ + bool, + Doc( + """ + Whether or not to perform a writable check for this [`Path` CLI Option](https://typer.tiangolo.com/tutorial/parameter-types/path/). + + **Example** + + ```python + from pathlib import Path + + @app.command() + def main(config: Annotated[Path, typer.Option(writable=True)]): + config.write_text("All the text") + ``` + """ + ), + ] = False, + readable: Annotated[ + bool, + Doc( + """ + Whether or not to perform a readable check for this [`Path` CLI Option](https://typer.tiangolo.com/tutorial/parameter-types/path/). + + **Example** + + ```python + from pathlib import Path + + @app.command() + def main(config: Annotated[Path, typer.Option(readable=True)]): + config.read_text("All the text") + ``` + """ + ), + ] = True, + resolve_path: Annotated[ + bool, + Doc( + """ + Whether or not to fully resolve the path of this [`Path` CLI Option](https://typer.tiangolo.com/tutorial/parameter-types/path/), + meaning that the path becomes absolute and symlinks are resolved. + + **Example** + + ```python + from pathlib import Path + + @app.command() + def main(config: Annotated[Path, typer.Option(resolve_path=True)]): + config.read_text("All the text") + ``` + """ + ), + ] = False, + allow_dash: Annotated[ + bool, + Doc( + """ + When set to `True`, a single dash for this [`Path` CLI Option](https://typer.tiangolo.com/tutorial/parameter-types/path/) + would be a valid value, indicating standard streams. This is a more advanced use-case. + """ + ), + ] = False, + path_type: Annotated[ + None | type[str] | type[bytes], + Doc( + """ + A string type that will be used to represent this [`Path` argument](https://typer.tiangolo.com/tutorial/parameter-types/path/). + The default is `None` which means the return value will be either bytes or unicode, depending on what makes most sense given the input data. + This is a more advanced use-case. + """ + ), + ] = None, + # Rich settings + rich_help_panel: Annotated[ + str | None, + Doc( + """ + Set the panel name where you want this CLI Option to be shown in the [help text](https://typer.tiangolo.com/tutorial/arguments/help). + + **Example** + + ```python + @app.command() + def main( + name: Annotated[str, typer.Argument(help="Who to greet")], + age: Annotated[str, typer.Option(help="Their age", rich_help_panel="Data")], + ): + print(f"Hello {name} of age {age}") + ``` + """ + ), + ] = None, +) -> Any: + """ + A [CLI Option](https://typer.tiangolo.com/tutorial/options) is a parameter to your command line application that is called with a single or double dash, something like `--verbose` or `-v`. + + Often, CLI Options are optional, meaning that users can omit them from the command. However, you can set them to be required by using `Annotated` + and omitting a default value. + + ## Example + + ```python + @app.command() + def register( + user: Annotated[str, typer.Argument()], + age: Annotated[int, typer.Option(min=18)], + ): + print(f"User is {user}") + print(f"--age is {age}") + ``` + + Note how in this example, `--age` is a required CLI Option. + """ + return OptionInfo( + # Parameter + default=default, + param_decls=param_decls, + callback=callback, + metavar=metavar, + expose_value=expose_value, + is_eager=is_eager, + envvar=envvar, + shell_complete=shell_complete, + autocompletion=autocompletion, + default_factory=default_factory, + # Custom type + parser=parser, + click_type=click_type, + # Option + show_default=show_default, + prompt=prompt, + confirmation_prompt=confirmation_prompt, + prompt_required=prompt_required, + hide_input=hide_input, + is_flag=is_flag, + flag_value=flag_value, + count=count, + allow_from_autoenv=allow_from_autoenv, + help=help, + hidden=hidden, + show_choices=show_choices, + show_envvar=show_envvar, + # Choice + case_sensitive=case_sensitive, + # Numbers + min=min, + max=max, + clamp=clamp, + # DateTime + formats=formats, + # File + mode=mode, + encoding=encoding, + errors=errors, + lazy=lazy, + atomic=atomic, + # Path + exists=exists, + file_okay=file_okay, + dir_okay=dir_okay, + writable=writable, + readable=readable, + resolve_path=resolve_path, + allow_dash=allow_dash, + path_type=path_type, + # Rich settings + rich_help_panel=rich_help_panel, + ) + + +# Overload for Argument created with custom type 'parser' +@overload +def Argument( + # Parameter + default: Any | None = ..., + *, + callback: Callable[..., Any] | None = None, + metavar: str | None = None, + expose_value: bool = True, + is_eager: bool = False, + envvar: str | list[str] | None = None, + # Note that shell_complete is not fully supported and will be removed in future versions + # TODO: Remove shell_complete in a future version (after 0.16.0) + shell_complete: Callable[ + [click.Context, click.Parameter, str], + list["click.shell_completion.CompletionItem"] | list[str], + ] + | None = None, + autocompletion: Callable[..., Any] | None = None, + default_factory: Callable[[], Any] | None = None, + # Custom type + parser: Callable[[str], Any] | None = None, + # TyperArgument + show_default: bool | str = True, + show_choices: bool = True, + show_envvar: bool = True, + help: str | None = None, + hidden: bool = False, + # Choice + case_sensitive: bool = True, + # Numbers + min: int | float | None = None, + max: int | float | None = None, + clamp: bool = False, + # DateTime + formats: list[str] | None = None, + # File + mode: str | None = None, + encoding: str | None = None, + errors: str | None = "strict", + lazy: bool | None = None, + atomic: bool = False, + # Path + exists: bool = False, + file_okay: bool = True, + dir_okay: bool = True, + writable: bool = False, + readable: bool = True, + resolve_path: bool = False, + allow_dash: bool = False, + path_type: None | type[str] | type[bytes] = None, + # Rich settings + rich_help_panel: str | None = None, +) -> Any: ... + + +# Overload for Argument created with custom type 'click_type' +@overload +def Argument( + # Parameter + default: Any | None = ..., + *, + callback: Callable[..., Any] | None = None, + metavar: str | None = None, + expose_value: bool = True, + is_eager: bool = False, + envvar: str | list[str] | None = None, + # Note that shell_complete is not fully supported and will be removed in future versions + # TODO: Remove shell_complete in a future version (after 0.16.0) + shell_complete: Callable[ + [click.Context, click.Parameter, str], + list["click.shell_completion.CompletionItem"] | list[str], + ] + | None = None, + autocompletion: Callable[..., Any] | None = None, + default_factory: Callable[[], Any] | None = None, + # Custom type + click_type: click.ParamType | None = None, + # TyperArgument + show_default: bool | str = True, + show_choices: bool = True, + show_envvar: bool = True, + help: str | None = None, + hidden: bool = False, + # Choice + case_sensitive: bool = True, + # Numbers + min: int | float | None = None, + max: int | float | None = None, + clamp: bool = False, + # DateTime + formats: list[str] | None = None, + # File + mode: str | None = None, + encoding: str | None = None, + errors: str | None = "strict", + lazy: bool | None = None, + atomic: bool = False, + # Path + exists: bool = False, + file_okay: bool = True, + dir_okay: bool = True, + writable: bool = False, + readable: bool = True, + resolve_path: bool = False, + allow_dash: bool = False, + path_type: None | type[str] | type[bytes] = None, + # Rich settings + rich_help_panel: str | None = None, +) -> Any: ... + + +def Argument( + # Parameter + default: Annotated[ + Any | None, + Doc( + """ + By default, CLI arguments are required. However, by giving them a default value they become [optional](https://typer.tiangolo.com/tutorial/arguments/optional): + + **Example** + + ```python + @app.command() + def main(name: str = typer.Argument("World")): + print(f"Hello {name}!") + ``` + + Note that this usage is deprecated, and we recommend to use `Annotated` instead: + ```python + @app.command() + def main(name: Annotated[str, typer.Argument()] = "World"): + print(f"Hello {name}!") + ``` + """ + ), + ] = ..., + *, + callback: Annotated[ + Callable[..., Any] | None, + Doc( + """ + Add a callback to this CLI Argument, to execute additional logic with the value received from the terminal. + See [the tutorial about callbacks](https://typer.tiangolo.com/tutorial/options/callback-and-context/) for more details. + + **Example** + + ```python + def name_callback(value: str): + if value != "Deadpool": + raise typer.BadParameter("Only Deadpool is allowed") + return value + + @app.command() + def main(name: Annotated[str, typer.Argument(callback=name_callback)]): + print(f"Hello {name}") + ``` + """ + ), + ] = None, + metavar: Annotated[ + str | None, + Doc( + """ + Customize the name displayed in the help text to represent this CLI Argument. + By default, it will be the same name you declared, in uppercase. + See [the tutorial about CLI Arguments with Help](https://typer.tiangolo.com/tutorial/arguments/help/#custom-help-name-metavar) for more details. + + **Example** + + ```python + @app.command() + def main(name: Annotated[str, typer.Argument(metavar="✨username✨")]): + print(f"Hello {name}") + ``` + """ + ), + ] = None, + expose_value: Annotated[ + bool, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility. + + --- + + If this is `True` then the value is passed onwards to the command callback and stored on the context, otherwise it’s skipped. + """ + ), + ] = True, + is_eager: Annotated[ + bool, + Doc( + """ + Set an argument to "eager" to ensure it gets processed before other CLI parameters. This could be relevant when there are other parameters with callbacks that could exit the program early. + For more information and an extended example, see the documentation [here](https://typer.tiangolo.com/tutorial/options/version/#fix-with-is_eager). + """ + ), + ] = False, + envvar: Annotated[ + str | list[str] | None, + Doc( + """ + Configure an argument to read a value from an environment variable if it is not provided in the command line as a CLI argument. + For more information, see the [documentation on Environment Variables](https://typer.tiangolo.com/tutorial/arguments/envvar/). + + **Example** + + ```python + @app.command() + def main(name: Annotated[str, typer.Argument(envvar="ME")]): + print(f"Hello Mr. {name}") + ``` + """ + ), + ] = None, + # TODO: Remove shell_complete in a future version (after 0.16.0) + shell_complete: Annotated[ + Callable[ + [click.Context, click.Parameter, str], + list["click.shell_completion.CompletionItem"] | list[str], + ] + | None, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility. + It is however not fully functional, and will likely be removed in future versions. + """ + ), + ] = None, + autocompletion: Annotated[ + Callable[..., Any] | None, + Doc( + """ + Provide a custom function that helps to autocomplete the values of this CLI Argument. + See [the tutorial on parameter autocompletion](https://typer.tiangolo.com/tutorial/options-autocompletion) for more details. + + **Example** + + ```python + def complete(): + return ["Me", "Myself", "I"] + + @app.command() + def main(name: Annotated[str, typer.Argument(autocompletion=complete)]): + print(f"Hello {name}") + ``` + """ + ), + ] = None, + default_factory: Annotated[ + Callable[[], Any] | None, + Doc( + """ + Provide a custom function that dynamically generates a [default](https://typer.tiangolo.com/tutorial/arguments/default) for this CLI Argument. + + **Example** + + ```python + def get_name(): + return random.choice(["Me", "Myself", "I"]) + + @app.command() + def main(name: Annotated[str, typer.Argument(default_factory=get_name)]): + print(f"Hello {name}") + ``` + """ + ), + ] = None, + # Custom type + parser: Annotated[ + Callable[[str], Any] | None, + Doc( + """ + Use your own custom types in Typer applications by defining a `parser` function that parses input into your own types: + + **Example** + + ```python + class CustomClass: + def __init__(self, value: str): + self.value = value + + def __str__(self): + return f"" + + def my_parser(value: str): + return CustomClass(value * 2) + + @app.command() + def main(arg: Annotated[CustomClass, typer.Argument(parser=my_parser): + print(f"arg is {arg}") + ``` + """ + ), + ] = None, + click_type: Annotated[ + click.ParamType | None, + Doc( + """ + Define this parameter to use a [custom Click type](https://click.palletsprojects.com/en/stable/parameters/#implementing-custom-types) in your Typer applications. + + **Example** + + ```python + class MyClass: + def __init__(self, value: str): + self.value = value + + def __str__(self): + return f"" + + class MyParser(click.ParamType): + name = "MyClass" + + def convert(self, value, param, ctx): + return MyClass(value * 3) + + @app.command() + def main(arg: Annotated[MyClass, typer.Argument(click_type=MyParser())]): + print(f"arg is {arg}") + ``` + """ + ), + ] = None, + # TyperArgument + show_default: Annotated[ + bool | str, + Doc( + """ + When set to `False`, don't show the default value of this CLI Argument in the [help text](https://typer.tiangolo.com/tutorial/arguments/help/). + + **Example** + + ```python + @app.command() + def main(name: Annotated[str, typer.Argument(show_default=False)] = "Rick"): + print(f"Hello {name}") + ``` + """ + ), + ] = True, + show_choices: Annotated[ + bool, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility. + + --- + + When set to `False`, this suppresses choices from being displayed inline when `prompt` is used. + """ + ), + ] = True, + show_envvar: Annotated[ + bool, + Doc( + """ + When an ["envvar"](https://typer.tiangolo.com/tutorial/arguments/envvar) is defined, prevent it from showing up in the help text: + + **Example** + + ```python + @app.command() + def main(name: Annotated[str, typer.Argument(envvar="ME", show_envvar=False)]): + print(f"Hello Mr. {name}") + ``` + """ + ), + ] = True, + help: Annotated[ + str | None, + Doc( + """ + Help text for this CLI Argument. + See [the tutorial about CLI Arguments with help](https://typer.tiangolo.com/tutorial/arguments/help/) for more dedails. + + **Example** + + ```python + @app.command() + def greet(name: Annotated[str, typer.Argument(help="Person to greet")]): + print(f"Hello {name}") + ``` + """ + ), + ] = None, + hidden: Annotated[ + bool, + Doc( + """ + Hide this CLI Argument from [help outputs](https://typer.tiangolo.com/tutorial/arguments/help). `False` by default. + + **Example** + + ```python + @app.command() + def main(name: Annotated[str, typer.Argument(hidden=True)] = "World"): + print(f"Hello {name}") + ``` + """ + ), + ] = False, + # Choice + case_sensitive: Annotated[ + bool, + Doc( + """ + For a CLI Argument representing an [Enum (choice)](https://typer.tiangolo.com/tutorial/parameter-types/enum), + you can allow case-insensitive matching with this parameter: + + **Example** + + ```python + from enum import Enum + + class NeuralNetwork(str, Enum): + simple = "simple" + conv = "conv" + lstm = "lstm" + + @app.command() + def main( + network: Annotated[NeuralNetwork, typer.Argument(case_sensitive=False)]): + print(f"Training neural network of type: {network.value}") + ``` + + With this setting, "LSTM" or "lstm" will both be valid values that will be resolved to `NeuralNetwork.lstm`. + """ + ), + ] = True, + # Numbers + min: Annotated[ + int | float | None, + Doc( + """ + For a CLI Argument representing a [number](https://typer.tiangolo.com/tutorial/parameter-types/number/) (`int` or `float`), + you can define numeric validations with `min` and `max` values: + + **Example** + + ```python + @app.command() + def main( + user: Annotated[str, typer.Argument()], + user_id: Annotated[int, typer.Argument(min=1, max=1000)], + ): + print(f"ID for {user} is {user_id}") + ``` + + If the user attempts to input an invalid number, an error will be shown, explaining why the value is invalid. + """ + ), + ] = None, + max: Annotated[ + int | float | None, + Doc( + """ + For a CLI Argument representing a [number](https://typer.tiangolo.com/tutorial/parameter-types/number/) (`int` or `float`), + you can define numeric validations with `min` and `max` values: + + **Example** + + ```python + @app.command() + def main( + user: Annotated[str, typer.Argument()], + user_id: Annotated[int, typer.Argument(min=1, max=1000)], + ): + print(f"ID for {user} is {user_id}") + ``` + + If the user attempts to input an invalid number, an error will be shown, explaining why the value is invalid. + """ + ), + ] = None, + clamp: Annotated[ + bool, + Doc( + """ + For a CLI Argument representing a [number](https://typer.tiangolo.com/tutorial/parameter-types/number/) and that is bounded by using `min` and/or `max`, + you can opt to use the closest minimum or maximum value instead of raising an error. This is done by setting `clamp` to `True`. + + **Example** + + ```python + @app.command() + def main( + user: Annotated[str, typer.Argument()], + user_id: Annotated[int, typer.Argument(min=1, max=1000, clamp=True)], + ): + print(f"ID for {user} is {user_id}") + ``` + + If the user attempts to input 3420 for `user_id`, this will internally be converted to `1000`. + """ + ), + ] = False, + # DateTime + formats: Annotated[ + list[str] | None, + Doc( + """ + For a CLI Argument representing a [DateTime object](https://typer.tiangolo.com/tutorial/parameter-types/datetime), + you can customize the formats that can be parsed automatically: + + **Example** + + ```python + from datetime import datetime + + @app.command() + def main( + birthday: Annotated[ + datetime, + typer.Argument( + formats=["%Y-%m-%d", "%Y-%m-%d %H:%M:%S", "%m/%d/%Y"] + ), + ], + ): + print(f"Birthday defined at: {birthday}") + ``` + """ + ), + ] = None, + # File + mode: Annotated[ + str | None, + Doc( + """ + For a CLI Argument representing a [File object](https://typer.tiangolo.com/tutorial/parameter-types/file/), + you can customize the mode to open the file with. If unset, Typer will set a [sensible value by default](https://typer.tiangolo.com/tutorial/parameter-types/file/#advanced-mode). + + **Example** + + ```python + @app.command() + def main(config: Annotated[typer.FileText, typer.Argument(mode="a")]): + config.write("This is a single line\\n") + print("Config line written") + ``` + """ + ), + ] = None, + encoding: Annotated[ + str | None, + Doc( + """ + Customize the encoding of this CLI Argument represented by a [File object](https://typer.tiangolo.com/tutorial/parameter-types/file/). + + **Example** + + ```python + @app.command() + def main(config: Annotated[typer.FileText, typer.Argument(encoding="utf-8")]): + config.write("All the text gets written\\n") + ``` + """ + ), + ] = None, + errors: Annotated[ + str | None, + Doc( + """ + **Note**: you probably shouldn't use this parameter, it is inherited from Click and supported for compatibility. + + --- + + The error handling mode. + """ + ), + ] = "strict", + lazy: Annotated[ + bool | None, + Doc( + """ + For a CLI Argument representing a [File object](https://typer.tiangolo.com/tutorial/parameter-types/file/), + by default the file will not be created until you actually start writing to it. + You can change this behaviour by setting this parameter. + By default, it's set to `True` for writing and to `False` for reading. + + **Example** + + ```python + @app.command() + def main(config: Annotated[typer.FileText, typer.Argument(mode="a", lazy=False)]): + config.write("This is a single line\\n") + print("Config line written") + ``` + """ + ), + ] = None, + atomic: Annotated[ + bool, + Doc( + """ + For a CLI Argument representing a [File object](https://typer.tiangolo.com/tutorial/parameter-types/file/), + you can ensure that all write instructions first go into a temporal file, and are only moved to the final destination after completing + by setting `atomic` to `True`. This can be useful for files with potential concurrent access. + + **Example** + + ```python + @app.command() + def main(config: Annotated[typer.FileText, typer.Argument(mode="a", atomic=True)]): + config.write("All the text") + ``` + """ + ), + ] = False, + # Path + exists: Annotated[ + bool, + Doc( + """ + When set to `True` for a [`Path` argument](https://typer.tiangolo.com/tutorial/parameter-types/path/), + additional validation is performed to check that the file or directory exists. If not, the value will be invalid. + + **Example** + + ```python + from pathlib import Path + + @app.command() + def main(config: Annotated[Path, typer.Argument(exists=True)]): + text = config.read_text() + print(f"Config file contents: {text}") + ``` + """ + ), + ] = False, + file_okay: Annotated[ + bool, + Doc( + """ + Determine whether or not a [`Path` argument](https://typer.tiangolo.com/tutorial/parameter-types/path/) + is allowed to refer to a file. When this is set to `False`, the application will raise a validation error when a path to a file is given. + + **Example** + + ```python + from pathlib import Path + + @app.command() + def main(config: Annotated[Path, typer.Argument(exists=True, file_okay=False)]): + print(f"Directory listing: {[x.name for x in config.iterdir()]}") + ``` + """ + ), + ] = True, + dir_okay: Annotated[ + bool, + Doc( + """ + Determine whether or not a [`Path` argument](https://typer.tiangolo.com/tutorial/parameter-types/path/) + is allowed to refer to a directory. When this is set to `False`, the application will raise a validation error when a path to a directory is given. + + **Example** + + ```python + from pathlib import Path + + @app.command() + def main(config: Annotated[Path, typer.Argument(exists=True, dir_okay=False)]): + text = config.read_text() + print(f"Config file contents: {text}") + ``` + """ + ), + ] = True, + writable: Annotated[ + bool, + Doc( + """ + Whether or not to perform a writable check for this [`Path` argument](https://typer.tiangolo.com/tutorial/parameter-types/path/). + + **Example** + + ```python + from pathlib import Path + + @app.command() + def main(config: Annotated[Path, typer.Argument(writable=True)]): + config.write_text("All the text") + ``` + """ + ), + ] = False, + readable: Annotated[ + bool, + Doc( + """ + Whether or not to perform a readable check for this [`Path` argument](https://typer.tiangolo.com/tutorial/parameter-types/path/). + + **Example** + + ```python + from pathlib import Path + + @app.command() + def main(config: Annotated[Path, typer.Argument(readable=True)]): + config.read_text("All the text") + ``` + """ + ), + ] = True, + resolve_path: Annotated[ + bool, + Doc( + """ + Whether or not to fully resolve the path of this [`Path` argument](https://typer.tiangolo.com/tutorial/parameter-types/path/), + meaning that the path becomes absolute and symlinks are resolved. + + **Example** + + ```python + from pathlib import Path + + @app.command() + def main(config: Annotated[Path, typer.Argument(resolve_path=True)]): + config.read_text("All the text") + ``` + """ + ), + ] = False, + allow_dash: Annotated[ + bool, + Doc( + """ + When set to `True`, a single dash for this [`Path` argument](https://typer.tiangolo.com/tutorial/parameter-types/path/) + would be a valid value, indicating standard streams. This is a more advanced use-case. + """ + ), + ] = False, + path_type: Annotated[ + None | type[str] | type[bytes], + Doc( + """ + A string type that will be used to represent this [`Path` argument](https://typer.tiangolo.com/tutorial/parameter-types/path/). + The default is `None` which means the return value will be either bytes or unicode, depending on what makes most sense given the input data. + This is a more advanced use-case. + """ + ), + ] = None, + # Rich settings + rich_help_panel: Annotated[ + str | None, + Doc( + """ + Set the panel name where you want this CLI Argument to be shown in the [help text](https://typer.tiangolo.com/tutorial/arguments/help). + + **Example** + + ```python + @app.command() + def main( + name: Annotated[str, typer.Argument(help="Who to greet")], + age: Annotated[str, typer.Option(help="Their age", rich_help_panel="Data")], + ): + print(f"Hello {name} of age {age}") + ``` + """ + ), + ] = None, +) -> Any: + """ + A [CLI Argument](https://typer.tiangolo.com/tutorial/arguments) is a positional parameter to your command line application. + + Often, CLI Arguments are required, meaning that users have to specify them. However, you can set them to be optional by defining a default value: + + ## Example + + ```python + @app.command() + def main(name: Annotated[str, typer.Argument()] = "World"): + print(f"Hello {name}!") + ``` + + Note how in this example, if `name` is not specified on the command line, the application will still execute normally and print "Hello World!". + """ + return ArgumentInfo( + # Parameter + default=default, + # Arguments can only have one param declaration + # it will be generated from the param name + param_decls=None, + callback=callback, + metavar=metavar, + expose_value=expose_value, + is_eager=is_eager, + envvar=envvar, + shell_complete=shell_complete, + autocompletion=autocompletion, + default_factory=default_factory, + # Custom type + parser=parser, + click_type=click_type, + # TyperArgument + show_default=show_default, + show_choices=show_choices, + show_envvar=show_envvar, + help=help, + hidden=hidden, + # Choice + case_sensitive=case_sensitive, + # Numbers + min=min, + max=max, + clamp=clamp, + # DateTime + formats=formats, + # File + mode=mode, + encoding=encoding, + errors=errors, + lazy=lazy, + atomic=atomic, + # Path + exists=exists, + file_okay=file_okay, + dir_okay=dir_okay, + writable=writable, + readable=readable, + resolve_path=resolve_path, + allow_dash=allow_dash, + path_type=path_type, + # Rich settings + rich_help_panel=rich_help_panel, + ) diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/py.typed b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/rich_utils.py b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/rich_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..d85043238c3d47a89cd6d5127fce198c0564f0b7 --- /dev/null +++ b/.cache/uv/archive-v0/TGF58suahDkG7kpEgFUZl/typer/rich_utils.py @@ -0,0 +1,753 @@ +# Extracted and modified from https://github.com/ewels/rich-click + +import inspect +import io +from collections import defaultdict +from collections.abc import Iterable +from gettext import gettext as _ +from os import getenv +from typing import Any, Literal + +import click +from rich import box +from rich.align import Align +from rich.columns import Columns +from rich.console import Console, RenderableType, group +from rich.emoji import Emoji +from rich.highlighter import RegexHighlighter +from rich.markdown import Markdown +from rich.markup import escape +from rich.padding import Padding +from rich.panel import Panel +from rich.table import Table +from rich.text import Text +from rich.theme import Theme +from rich.traceback import Traceback +from typer.models import DeveloperExceptionConfig + +# Default styles +STYLE_OPTION = "bold cyan" +STYLE_SWITCH = "bold green" +STYLE_NEGATIVE_OPTION = "bold magenta" +STYLE_NEGATIVE_SWITCH = "bold red" +STYLE_METAVAR = "bold yellow" +STYLE_METAVAR_SEPARATOR = "dim" +STYLE_USAGE = "yellow" +STYLE_USAGE_COMMAND = "bold" +STYLE_DEPRECATED = "red" +STYLE_DEPRECATED_COMMAND = "dim" +STYLE_HELPTEXT_FIRST_LINE = "" +STYLE_HELPTEXT = "dim" +STYLE_OPTION_HELP = "" +STYLE_OPTION_DEFAULT = "dim" +STYLE_OPTION_ENVVAR = "dim yellow" +STYLE_REQUIRED_SHORT = "red" +STYLE_REQUIRED_LONG = "dim red" +STYLE_OPTIONS_PANEL_BORDER = "dim" +ALIGN_OPTIONS_PANEL: Literal["left", "center", "right"] = "left" +STYLE_OPTIONS_TABLE_SHOW_LINES = False +STYLE_OPTIONS_TABLE_LEADING = 0 +STYLE_OPTIONS_TABLE_PAD_EDGE = False +STYLE_OPTIONS_TABLE_PADDING = (0, 1) +STYLE_OPTIONS_TABLE_BOX = "" +STYLE_OPTIONS_TABLE_ROW_STYLES = None +STYLE_OPTIONS_TABLE_BORDER_STYLE = None +STYLE_COMMANDS_PANEL_BORDER = "dim" +ALIGN_COMMANDS_PANEL: Literal["left", "center", "right"] = "left" +STYLE_COMMANDS_TABLE_SHOW_LINES = False +STYLE_COMMANDS_TABLE_LEADING = 0 +STYLE_COMMANDS_TABLE_PAD_EDGE = False +STYLE_COMMANDS_TABLE_PADDING = (0, 1) +STYLE_COMMANDS_TABLE_BOX = "" +STYLE_COMMANDS_TABLE_ROW_STYLES = None +STYLE_COMMANDS_TABLE_BORDER_STYLE = None +STYLE_COMMANDS_TABLE_FIRST_COLUMN = "bold cyan" +STYLE_ERRORS_PANEL_BORDER = "red" +ALIGN_ERRORS_PANEL: Literal["left", "center", "right"] = "left" +STYLE_ERRORS_SUGGESTION = "dim" +STYLE_ABORTED = "red" +_TERMINAL_WIDTH = getenv("TERMINAL_WIDTH") +MAX_WIDTH = int(_TERMINAL_WIDTH) if _TERMINAL_WIDTH else None +COLOR_SYSTEM: Literal["auto", "standard", "256", "truecolor", "windows"] | None = ( + "auto" # Set to None to disable colors +) +_TYPER_FORCE_DISABLE_TERMINAL = getenv("_TYPER_FORCE_DISABLE_TERMINAL") +FORCE_TERMINAL = ( + True + if getenv("GITHUB_ACTIONS") or getenv("FORCE_COLOR") or getenv("PY_COLORS") + else None +) +if _TYPER_FORCE_DISABLE_TERMINAL: + FORCE_TERMINAL = False + +# Fixed strings +DEPRECATED_STRING = _("(deprecated) ") +DEFAULT_STRING = _("[default: {}]") +ENVVAR_STRING = _("[env var: {}]") +REQUIRED_SHORT_STRING = "*" +REQUIRED_LONG_STRING = _("[required]") +RANGE_STRING = " [{}]" +ARGUMENTS_PANEL_TITLE = _("Arguments") +OPTIONS_PANEL_TITLE = _("Options") +COMMANDS_PANEL_TITLE = _("Commands") +ERRORS_PANEL_TITLE = _("Error") +ABORTED_TEXT = _("Aborted.") +RICH_HELP = _("Try [blue]'{command_path} {help_option}'[/] for help.") + +MARKUP_MODE_MARKDOWN = "markdown" +MARKUP_MODE_RICH = "rich" +_RICH_HELP_PANEL_NAME = "rich_help_panel" +ANSI_PREFIX = "\033[" + +MarkupModeStrict = Literal["markdown", "rich"] + + +# Rich regex highlighter +class OptionHighlighter(RegexHighlighter): + """Highlights our special options.""" + + highlights = [ + r"(^|\W)(?P\-\w+)(?![a-zA-Z0-9])", + r"(^|\W)(?P