diff --git a/.gitattributes b/.gitattributes index 6367dca5a6d03aedc63780ef4b0b332e86a188ca..6eca11decbd2295fd1f86dffb5944caf73b23ba9 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1279,3 +1279,4 @@ evalkit_tf437/lib/python3.10/site-packages/transformers/__pycache__/modeling_out infer_4_33_0/lib/python3.10/site-packages/nvidia/cuda_nvrtc/lib/libnvrtc.so.11.2 filter=lfs diff=lfs merge=lfs -text falcon/lib/python3.10/site-packages/setuptools/_vendor/more_itertools/__pycache__/more.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text falcon/lib/python3.10/site-packages/sklearn/preprocessing/__pycache__/_data.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text +falcon/lib/python3.10/site-packages/setuptools/_vendor/__pycache__/typing_extensions.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text diff --git a/evalkit_tf437/lib/python3.10/site-packages/google_cloud_core-2.4.1.dist-info/RECORD b/evalkit_tf437/lib/python3.10/site-packages/google_cloud_core-2.4.1.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..45959b6cc5877bfe3c3566ae84e58e404b1a7146 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/google_cloud_core-2.4.1.dist-info/RECORD @@ -0,0 +1,33 @@ +google/cloud/__pycache__/version.cpython-310.pyc,, +google/cloud/_helpers/__init__.py,sha256=7aFMb13R-zqSPwjYJvI7JL5kzDXJAA-KyWYGFuBLWeQ,18644 +google/cloud/_helpers/__pycache__/__init__.cpython-310.pyc,, +google/cloud/_helpers/py.typed,sha256=0XXSft34lxBaTiAPy-Gu8ejOOFXoAskbySm9m2Uq7UY,63 +google/cloud/_http/__init__.py,sha256=p3Fx59gfYYVnS1-8RxTIG6f8Z6NIDNjySQzR55JdvUg,16649 +google/cloud/_http/__pycache__/__init__.cpython-310.pyc,, +google/cloud/_http/py.typed,sha256=0XXSft34lxBaTiAPy-Gu8ejOOFXoAskbySm9m2Uq7UY,63 +google/cloud/_testing/__init__.py,sha256=ezhO0xNaEnpMaH0eB3Qi10TLgQFvk7GKoztxD0AVLW0,3397 +google/cloud/_testing/__pycache__/__init__.cpython-310.pyc,, +google/cloud/_testing/py.typed,sha256=0XXSft34lxBaTiAPy-Gu8ejOOFXoAskbySm9m2Uq7UY,63 +google/cloud/client/__init__.py,sha256=zTsn3FwWE7Ukh43G85VunfcHUlNKf-dcWjj-i1D4gxo,12266 +google/cloud/client/__pycache__/__init__.cpython-310.pyc,, +google/cloud/client/py.typed,sha256=0XXSft34lxBaTiAPy-Gu8ejOOFXoAskbySm9m2Uq7UY,63 +google/cloud/environment_vars/__init__.py,sha256=lCe2EyQjeJO2ImbrSR23Cj2XpwGKFFy1xCZuQo2DHEo,1318 +google/cloud/environment_vars/__pycache__/__init__.cpython-310.pyc,, +google/cloud/environment_vars/py.typed,sha256=0XXSft34lxBaTiAPy-Gu8ejOOFXoAskbySm9m2Uq7UY,63 +google/cloud/exceptions/__init__.py,sha256=pyCO5lyFO_99DTwUsJqolO0iPOoFDhKG_ebTIfsoI6M,2199 +google/cloud/exceptions/__pycache__/__init__.cpython-310.pyc,, +google/cloud/exceptions/py.typed,sha256=0XXSft34lxBaTiAPy-Gu8ejOOFXoAskbySm9m2Uq7UY,63 +google/cloud/obsolete/__init__.py,sha256=MeXpOiQ6BOYjkIeIEGfL59ytVy_NX1kcnbCFQcUUqsw,1479 +google/cloud/obsolete/__pycache__/__init__.cpython-310.pyc,, +google/cloud/obsolete/py.typed,sha256=0XXSft34lxBaTiAPy-Gu8ejOOFXoAskbySm9m2Uq7UY,63 +google/cloud/operation/__init__.py,sha256=SD9exHvTtJm6cvzn8FOuz3y59fp9drpk_hBb2Ok1l-A,9083 +google/cloud/operation/__pycache__/__init__.cpython-310.pyc,, +google/cloud/operation/py.typed,sha256=0XXSft34lxBaTiAPy-Gu8ejOOFXoAskbySm9m2Uq7UY,63 +google/cloud/version.py,sha256=Y_K22yBdaAEamPLwca5c8mBwgo-dAnUXHyMVGtpBa8E,597 +google_cloud_core-2.4.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +google_cloud_core-2.4.1.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358 +google_cloud_core-2.4.1.dist-info/METADATA,sha256=X7E3MwB0E0e4Os-klv14ubTpaUmKsbiutDdoumDX-hk,2706 +google_cloud_core-2.4.1.dist-info/RECORD,, +google_cloud_core-2.4.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +google_cloud_core-2.4.1.dist-info/WHEEL,sha256=P2T-6epvtXQ2cBOE_U1K4_noqlJFN3tj15djMgEu4NM,110 +google_cloud_core-2.4.1.dist-info/top_level.txt,sha256=_1QvSJIhFAGfxb79D6DhB7SUw2X6T4rwnz_LLrbcD3c,7 diff --git a/evalkit_tf437/lib/python3.10/site-packages/google_cloud_core-2.4.1.dist-info/REQUESTED b/evalkit_tf437/lib/python3.10/site-packages/google_cloud_core-2.4.1.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/evalkit_tf437/lib/python3.10/site-packages/google_cloud_core-2.4.1.dist-info/top_level.txt b/evalkit_tf437/lib/python3.10/site-packages/google_cloud_core-2.4.1.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..cb429113e0f9a73019fd799e8052093fea7f0c8b --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/google_cloud_core-2.4.1.dist-info/top_level.txt @@ -0,0 +1 @@ +google diff --git a/evalkit_tf437/lib/python3.10/site-packages/googleapis_common_protos-1.65.0.dist-info/LICENSE b/evalkit_tf437/lib/python3.10/site-packages/googleapis_common_protos-1.65.0.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..d645695673349e3947e8e5ae42332d0ac3164cd7 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/googleapis_common_protos-1.65.0.dist-info/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/evalkit_tf437/lib/python3.10/site-packages/googleapis_common_protos-1.65.0.dist-info/METADATA b/evalkit_tf437/lib/python3.10/site-packages/googleapis_common_protos-1.65.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..291f0a6ed951ea765ed494a07f4cd81b83ab9030 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/googleapis_common_protos-1.65.0.dist-info/METADATA @@ -0,0 +1,36 @@ +Metadata-Version: 2.1 +Name: googleapis-common-protos +Version: 1.65.0 +Summary: Common protobufs used in Google APIs +Home-page: https://github.com/googleapis/python-api-common-protos +Author: Google LLC +Author-email: googleapis-packages@google.com +License: Apache-2.0 +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: protobuf !=3.20.0,!=3.20.1,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0.dev0,>=3.20.2 +Provides-Extra: grpc +Requires-Dist: grpcio <2.0.0.dev0,>=1.44.0 ; extra == 'grpc' + +Google APIs common protos +------------------------- + +.. image:: https://img.shields.io/pypi/v/googleapis-common-protos.svg + :target: https://pypi.org/project/googleapis-common-protos/ + + +googleapis-common-protos contains the python classes generated from the common +protos in the `googleapis/googleapis `_ repository. diff --git a/evalkit_tf437/lib/python3.10/site-packages/googleapis_common_protos-1.65.0.dist-info/REQUESTED b/evalkit_tf437/lib/python3.10/site-packages/googleapis_common_protos-1.65.0.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/evalkit_tf437/lib/python3.10/site-packages/googleapis_common_protos-1.65.0.dist-info/WHEEL b/evalkit_tf437/lib/python3.10/site-packages/googleapis_common_protos-1.65.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..7601b88b4a3b444bfddeee3ddf1172c2629f4991 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/googleapis_common_protos-1.65.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (70.2.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/evalkit_tf437/lib/python3.10/site-packages/googleapis_common_protos-1.65.0.dist-info/top_level.txt b/evalkit_tf437/lib/python3.10/site-packages/googleapis_common_protos-1.65.0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..cb429113e0f9a73019fd799e8052093fea7f0c8b --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/googleapis_common_protos-1.65.0.dist-info/top_level.txt @@ -0,0 +1 @@ +google diff --git a/evalkit_tf437/lib/python3.10/site-packages/mpl_toolkits/axisartist/tests/__pycache__/test_axis_artist.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/mpl_toolkits/axisartist/tests/__pycache__/test_axis_artist.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1077e2c5109efe766078890f8fc84e13f3b438f4 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/mpl_toolkits/axisartist/tests/__pycache__/test_axis_artist.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/shellingham-1.5.4.dist-info/LICENSE b/evalkit_tf437/lib/python3.10/site-packages/shellingham-1.5.4.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..b9077766e9b9bdcae49ea5c8fced750ed13ec8f7 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/shellingham-1.5.4.dist-info/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2018, Tzu-ping Chung + +Permission to use, copy, modify, and distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/evalkit_tf437/lib/python3.10/site-packages/shellingham-1.5.4.dist-info/METADATA b/evalkit_tf437/lib/python3.10/site-packages/shellingham-1.5.4.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..52118f1e5c83bd7ef39196a749651fc87d176812 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/shellingham-1.5.4.dist-info/METADATA @@ -0,0 +1,106 @@ +Metadata-Version: 2.1 +Name: shellingham +Version: 1.5.4 +Summary: Tool to Detect Surrounding Shell +Home-page: https://github.com/sarugaku/shellingham +Author: Tzu-ping Chung +Author-email: uranusjr@gmail.com +License: ISC License +Keywords: shell +Classifier: Development Status :: 3 - Alpha +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: ISC License (ISCL) +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE + +============================================= +Shellingham: Tool to Detect Surrounding Shell +============================================= + +.. image:: https://img.shields.io/pypi/v/shellingham.svg + :target: https://pypi.org/project/shellingham/ + +Shellingham detects what shell the current Python executable is running in. + + +Usage +===== + +.. code-block:: python + + >>> import shellingham + >>> shellingham.detect_shell() + ('bash', '/bin/bash') + +``detect_shell`` pokes around the process's running environment to determine +what shell it is run in. It returns a 2-tuple: + +* The shell name, always lowercased. +* The command used to run the shell. + +``ShellDetectionFailure`` is raised if ``detect_shell`` fails to detect the +surrounding shell. + + +Notes +===== + +* The shell name is always lowercased. +* On Windows, the shell name is the name of the executable, minus the file + extension. + + +Notes for Application Developers +================================ + +Remember, your application's user is not necessarily using a shell. +Shellingham raises ``ShellDetectionFailure`` if there is no shell to detect, +but *your application should almost never do this to your user*. + +A practical approach to this is to wrap ``detect_shell`` in a try block, and +provide a sane default on failure + +.. code-block:: python + + try: + shell = shellingham.detect_shell() + except shellingham.ShellDetectionFailure: + shell = provide_default() + + +There are a few choices for you to choose from. + +* The POSIX standard mandates the environment variable ``SHELL`` to refer to + "the user's preferred command language interpreter". This is always available + (even if the user is not in an interactive session), and likely the correct + choice to launch an interactive sub-shell with. +* A command ``sh`` is almost guaranteed to exist, likely at ``/bin/sh``, since + several POSIX tools rely on it. This should be suitable if you want to run a + (possibly non-interactive) script. +* All versions of DOS and Windows have an environment variable ``COMSPEC``. + This can always be used to launch a usable command prompt (e.g. `cmd.exe` on + Windows). + +Here's a simple implementation to provide a default shell + +.. code-block:: python + + import os + + def provide_default(): + if os.name == 'posix': + return os.environ['SHELL'] + elif os.name == 'nt': + return os.environ['COMSPEC'] + raise NotImplementedError(f'OS {os.name!r} support not available') diff --git a/evalkit_tf437/lib/python3.10/site-packages/shellingham-1.5.4.dist-info/WHEEL b/evalkit_tf437/lib/python3.10/site-packages/shellingham-1.5.4.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..c34f1162ef9a50c355df1261ef6194ffc1b39975 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/shellingham-1.5.4.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.41.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/evalkit_tf437/lib/python3.10/site-packages/sympy/sets/__init__.py b/evalkit_tf437/lib/python3.10/site-packages/sympy/sets/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8b909c0b5ef03b1e1e76dfbf4288f61860575da7 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/sympy/sets/__init__.py @@ -0,0 +1,36 @@ +from .sets import (Set, Interval, Union, FiniteSet, ProductSet, + Intersection, imageset, Complement, SymmetricDifference, + DisjointUnion) + +from .fancysets import ImageSet, Range, ComplexRegion +from .contains import Contains +from .conditionset import ConditionSet +from .ordinals import Ordinal, OmegaPower, ord0 +from .powerset import PowerSet +from ..core.singleton import S +from .handlers.comparison import _eval_is_eq # noqa:F401 +Complexes = S.Complexes +EmptySet = S.EmptySet +Integers = S.Integers +Naturals = S.Naturals +Naturals0 = S.Naturals0 +Rationals = S.Rationals +Reals = S.Reals +UniversalSet = S.UniversalSet + +__all__ = [ + 'Set', 'Interval', 'Union', 'EmptySet', 'FiniteSet', 'ProductSet', + 'Intersection', 'imageset', 'Complement', 'SymmetricDifference', 'DisjointUnion', + + 'ImageSet', 'Range', 'ComplexRegion', 'Reals', + + 'Contains', + + 'ConditionSet', + + 'Ordinal', 'OmegaPower', 'ord0', + + 'PowerSet', + + 'Reals', 'Naturals', 'Naturals0', 'UniversalSet', 'Integers', 'Rationals', +] diff --git a/evalkit_tf437/lib/python3.10/site-packages/sympy/sets/handlers/__pycache__/mul.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/sympy/sets/handlers/__pycache__/mul.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fe43c2d1b463f72e328901ebbb9e285c8ee90d13 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/sympy/sets/handlers/__pycache__/mul.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/sympy/sets/tests/__pycache__/test_contains.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/sympy/sets/tests/__pycache__/test_contains.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2346b5c7e97a7616f7e23fbf5f769c99d2ef9298 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/sympy/sets/tests/__pycache__/test_contains.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/sympy/sets/tests/__pycache__/test_fancysets.cpython-310.pyc b/evalkit_tf437/lib/python3.10/site-packages/sympy/sets/tests/__pycache__/test_fancysets.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..17b40567baf41d5a2ede4ab8cdf54864ce3e40b7 Binary files /dev/null and b/evalkit_tf437/lib/python3.10/site-packages/sympy/sets/tests/__pycache__/test_fancysets.cpython-310.pyc differ diff --git a/evalkit_tf437/lib/python3.10/site-packages/sympy/sets/tests/test_sets.py b/evalkit_tf437/lib/python3.10/site-packages/sympy/sets/tests/test_sets.py new file mode 100644 index 0000000000000000000000000000000000000000..657ab19a90eb88ca48f266f7a5cf050504caed43 --- /dev/null +++ b/evalkit_tf437/lib/python3.10/site-packages/sympy/sets/tests/test_sets.py @@ -0,0 +1,1753 @@ +from sympy.concrete.summations import Sum +from sympy.core.add import Add +from sympy.core.containers import TupleKind +from sympy.core.function import Lambda +from sympy.core.kind import NumberKind, UndefinedKind +from sympy.core.numbers import (Float, I, Rational, nan, oo, pi, zoo) +from sympy.core.power import Pow +from sympy.core.singleton import S +from sympy.core.symbol import (Symbol, symbols) +from sympy.core.sympify import sympify +from sympy.functions.elementary.miscellaneous import (Max, Min, sqrt) +from sympy.functions.elementary.piecewise import Piecewise +from sympy.functions.elementary.trigonometric import (cos, sin) +from sympy.logic.boolalg import (false, true) +from sympy.matrices.kind import MatrixKind +from sympy.matrices.dense import Matrix +from sympy.polys.rootoftools import rootof +from sympy.sets.contains import Contains +from sympy.sets.fancysets import (ImageSet, Range) +from sympy.sets.sets import (Complement, DisjointUnion, FiniteSet, Intersection, Interval, ProductSet, Set, SymmetricDifference, Union, imageset, SetKind) +from mpmath import mpi + +from sympy.core.expr import unchanged +from sympy.core.relational import Eq, Ne, Le, Lt, LessThan +from sympy.logic import And, Or, Xor +from sympy.testing.pytest import raises, XFAIL, warns_deprecated_sympy +from sympy.utilities.iterables import cartes + +from sympy.abc import x, y, z, m, n + +EmptySet = S.EmptySet + +def test_imageset(): + ints = S.Integers + assert imageset(x, x - 1, S.Naturals) is S.Naturals0 + assert imageset(x, x + 1, S.Naturals0) is S.Naturals + assert imageset(x, abs(x), S.Naturals0) is S.Naturals0 + assert imageset(x, abs(x), S.Naturals) is S.Naturals + assert imageset(x, abs(x), S.Integers) is S.Naturals0 + # issue 16878a + r = symbols('r', real=True) + assert imageset(x, (x, x), S.Reals)._contains((1, r)) == None + assert imageset(x, (x, x), S.Reals)._contains((1, 2)) == False + assert (r, r) in imageset(x, (x, x), S.Reals) + assert 1 + I in imageset(x, x + I, S.Reals) + assert {1} not in imageset(x, (x,), S.Reals) + assert (1, 1) not in imageset(x, (x,), S.Reals) + raises(TypeError, lambda: imageset(x, ints)) + raises(ValueError, lambda: imageset(x, y, z, ints)) + raises(ValueError, lambda: imageset(Lambda(x, cos(x)), y)) + assert (1, 2) in imageset(Lambda((x, y), (x, y)), ints, ints) + raises(ValueError, lambda: imageset(Lambda(x, x), ints, ints)) + assert imageset(cos, ints) == ImageSet(Lambda(x, cos(x)), ints) + def f(x): + return cos(x) + assert imageset(f, ints) == imageset(x, cos(x), ints) + f = lambda x: cos(x) + assert imageset(f, ints) == ImageSet(Lambda(x, cos(x)), ints) + assert imageset(x, 1, ints) == FiniteSet(1) + assert imageset(x, y, ints) == {y} + assert imageset((x, y), (1, z), ints, S.Reals) == {(1, z)} + clash = Symbol('x', integer=true) + assert (str(imageset(lambda x: x + clash, Interval(-2, 1)).lamda.expr) + in ('x0 + x', 'x + x0')) + x1, x2 = symbols("x1, x2") + assert imageset(lambda x, y: + Add(x, y), Interval(1, 2), Interval(2, 3)).dummy_eq( + ImageSet(Lambda((x1, x2), x1 + x2), + Interval(1, 2), Interval(2, 3))) + + +def test_is_empty(): + for s in [S.Naturals, S.Naturals0, S.Integers, S.Rationals, S.Reals, + S.UniversalSet]: + assert s.is_empty is False + + assert S.EmptySet.is_empty is True + + +def test_is_finiteset(): + for s in [S.Naturals, S.Naturals0, S.Integers, S.Rationals, S.Reals, + S.UniversalSet]: + assert s.is_finite_set is False + + assert S.EmptySet.is_finite_set is True + + assert FiniteSet(1, 2).is_finite_set is True + assert Interval(1, 2).is_finite_set is False + assert Interval(x, y).is_finite_set is None + assert ProductSet(FiniteSet(1), FiniteSet(2)).is_finite_set is True + assert ProductSet(FiniteSet(1), Interval(1, 2)).is_finite_set is False + assert ProductSet(FiniteSet(1), Interval(x, y)).is_finite_set is None + assert Union(Interval(0, 1), Interval(2, 3)).is_finite_set is False + assert Union(FiniteSet(1), Interval(2, 3)).is_finite_set is False + assert Union(FiniteSet(1), FiniteSet(2)).is_finite_set is True + assert Union(FiniteSet(1), Interval(x, y)).is_finite_set is None + assert Intersection(Interval(x, y), FiniteSet(1)).is_finite_set is True + assert Intersection(Interval(x, y), Interval(1, 2)).is_finite_set is None + assert Intersection(FiniteSet(x), FiniteSet(y)).is_finite_set is True + assert Complement(FiniteSet(1), Interval(x, y)).is_finite_set is True + assert Complement(Interval(x, y), FiniteSet(1)).is_finite_set is None + assert Complement(Interval(1, 2), FiniteSet(x)).is_finite_set is False + assert DisjointUnion(Interval(-5, 3), FiniteSet(x, y)).is_finite_set is False + assert DisjointUnion(S.EmptySet, FiniteSet(x, y), S.EmptySet).is_finite_set is True + + +def test_deprecated_is_EmptySet(): + with warns_deprecated_sympy(): + S.EmptySet.is_EmptySet + + with warns_deprecated_sympy(): + FiniteSet(1).is_EmptySet + + +def test_interval_arguments(): + assert Interval(0, oo) == Interval(0, oo, False, True) + assert Interval(0, oo).right_open is true + assert Interval(-oo, 0) == Interval(-oo, 0, True, False) + assert Interval(-oo, 0).left_open is true + assert Interval(oo, -oo) == S.EmptySet + assert Interval(oo, oo) == S.EmptySet + assert Interval(-oo, -oo) == S.EmptySet + assert Interval(oo, x) == S.EmptySet + assert Interval(oo, oo) == S.EmptySet + assert Interval(x, -oo) == S.EmptySet + assert Interval(x, x) == {x} + + assert isinstance(Interval(1, 1), FiniteSet) + e = Sum(x, (x, 1, 3)) + assert isinstance(Interval(e, e), FiniteSet) + + assert Interval(1, 0) == S.EmptySet + assert Interval(1, 1).measure == 0 + + assert Interval(1, 1, False, True) == S.EmptySet + assert Interval(1, 1, True, False) == S.EmptySet + assert Interval(1, 1, True, True) == S.EmptySet + + + assert isinstance(Interval(0, Symbol('a')), Interval) + assert Interval(Symbol('a', positive=True), 0) == S.EmptySet + raises(ValueError, lambda: Interval(0, S.ImaginaryUnit)) + raises(ValueError, lambda: Interval(0, Symbol('z', extended_real=False))) + raises(ValueError, lambda: Interval(x, x + S.ImaginaryUnit)) + + raises(NotImplementedError, lambda: Interval(0, 1, And(x, y))) + raises(NotImplementedError, lambda: Interval(0, 1, False, And(x, y))) + raises(NotImplementedError, lambda: Interval(0, 1, z, And(x, y))) + + +def test_interval_symbolic_end_points(): + a = Symbol('a', real=True) + + assert Union(Interval(0, a), Interval(0, 3)).sup == Max(a, 3) + assert Union(Interval(a, 0), Interval(-3, 0)).inf == Min(-3, a) + + assert Interval(0, a).contains(1) == LessThan(1, a) + + +def test_interval_is_empty(): + x, y = symbols('x, y') + r = Symbol('r', real=True) + p = Symbol('p', positive=True) + n = Symbol('n', negative=True) + nn = Symbol('nn', nonnegative=True) + assert Interval(1, 2).is_empty == False + assert Interval(3, 3).is_empty == False # FiniteSet + assert Interval(r, r).is_empty == False # FiniteSet + assert Interval(r, r + nn).is_empty == False + assert Interval(x, x).is_empty == False + assert Interval(1, oo).is_empty == False + assert Interval(-oo, oo).is_empty == False + assert Interval(-oo, 1).is_empty == False + assert Interval(x, y).is_empty == None + assert Interval(r, oo).is_empty == False # real implies finite + assert Interval(n, 0).is_empty == False + assert Interval(n, 0, left_open=True).is_empty == False + assert Interval(p, 0).is_empty == True # EmptySet + assert Interval(nn, 0).is_empty == None + assert Interval(n, p).is_empty == False + assert Interval(0, p, left_open=True).is_empty == False + assert Interval(0, p, right_open=True).is_empty == False + assert Interval(0, nn, left_open=True).is_empty == None + assert Interval(0, nn, right_open=True).is_empty == None + + +def test_union(): + assert Union(Interval(1, 2), Interval(2, 3)) == Interval(1, 3) + assert Union(Interval(1, 2), Interval(2, 3, True)) == Interval(1, 3) + assert Union(Interval(1, 3), Interval(2, 4)) == Interval(1, 4) + assert Union(Interval(1, 2), Interval(1, 3)) == Interval(1, 3) + assert Union(Interval(1, 3), Interval(1, 2)) == Interval(1, 3) + assert Union(Interval(1, 3, False, True), Interval(1, 2)) == \ + Interval(1, 3, False, True) + assert Union(Interval(1, 3), Interval(1, 2, False, True)) == Interval(1, 3) + assert Union(Interval(1, 2, True), Interval(1, 3)) == Interval(1, 3) + assert Union(Interval(1, 2, True), Interval(1, 3, True)) == \ + Interval(1, 3, True) + assert Union(Interval(1, 2, True), Interval(1, 3, True, True)) == \ + Interval(1, 3, True, True) + assert Union(Interval(1, 2, True, True), Interval(1, 3, True)) == \ + Interval(1, 3, True) + assert Union(Interval(1, 3), Interval(2, 3)) == Interval(1, 3) + assert Union(Interval(1, 3, False, True), Interval(2, 3)) == \ + Interval(1, 3) + assert Union(Interval(1, 2, False, True), Interval(2, 3, True)) != \ + Interval(1, 3) + assert Union(Interval(1, 2), S.EmptySet) == Interval(1, 2) + assert Union(S.EmptySet) == S.EmptySet + + assert Union(Interval(0, 1), *[FiniteSet(1.0/n) for n in range(1, 10)]) == \ + Interval(0, 1) + # issue #18241: + x = Symbol('x') + assert Union(Interval(0, 1), FiniteSet(1, x)) == Union( + Interval(0, 1), FiniteSet(x)) + assert unchanged(Union, Interval(0, 1), FiniteSet(2, x)) + + assert Interval(1, 2).union(Interval(2, 3)) == \ + Interval(1, 2) + Interval(2, 3) + + assert Interval(1, 2).union(Interval(2, 3)) == Interval(1, 3) + + assert Union(Set()) == Set() + + assert FiniteSet(1) + FiniteSet(2) + FiniteSet(3) == FiniteSet(1, 2, 3) + assert FiniteSet('ham') + FiniteSet('eggs') == FiniteSet('ham', 'eggs') + assert FiniteSet(1, 2, 3) + S.EmptySet == FiniteSet(1, 2, 3) + + assert FiniteSet(1, 2, 3) & FiniteSet(2, 3, 4) == FiniteSet(2, 3) + assert FiniteSet(1, 2, 3) | FiniteSet(2, 3, 4) == FiniteSet(1, 2, 3, 4) + + assert FiniteSet(1, 2, 3) & S.EmptySet == S.EmptySet + assert FiniteSet(1, 2, 3) | S.EmptySet == FiniteSet(1, 2, 3) + + x = Symbol("x") + y = Symbol("y") + z = Symbol("z") + assert S.EmptySet | FiniteSet(x, FiniteSet(y, z)) == \ + FiniteSet(x, FiniteSet(y, z)) + + # Test that Intervals and FiniteSets play nicely + assert Interval(1, 3) + FiniteSet(2) == Interval(1, 3) + assert Interval(1, 3, True, True) + FiniteSet(3) == \ + Interval(1, 3, True, False) + X = Interval(1, 3) + FiniteSet(5) + Y = Interval(1, 2) + FiniteSet(3) + XandY = X.intersect(Y) + assert 2 in X and 3 in X and 3 in XandY + assert XandY.is_subset(X) and XandY.is_subset(Y) + + raises(TypeError, lambda: Union(1, 2, 3)) + + assert X.is_iterable is False + + # issue 7843 + assert Union(S.EmptySet, FiniteSet(-sqrt(-I), sqrt(-I))) == \ + FiniteSet(-sqrt(-I), sqrt(-I)) + + assert Union(S.Reals, S.Integers) == S.Reals + + +def test_union_iter(): + # Use Range because it is ordered + u = Union(Range(3), Range(5), Range(4), evaluate=False) + + # Round robin + assert list(u) == [0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 4] + + +def test_union_is_empty(): + assert (Interval(x, y) + FiniteSet(1)).is_empty == False + assert (Interval(x, y) + Interval(-x, y)).is_empty == None + + +def test_difference(): + assert Interval(1, 3) - Interval(1, 2) == Interval(2, 3, True) + assert Interval(1, 3) - Interval(2, 3) == Interval(1, 2, False, True) + assert Interval(1, 3, True) - Interval(2, 3) == Interval(1, 2, True, True) + assert Interval(1, 3, True) - Interval(2, 3, True) == \ + Interval(1, 2, True, False) + assert Interval(0, 2) - FiniteSet(1) == \ + Union(Interval(0, 1, False, True), Interval(1, 2, True, False)) + + # issue #18119 + assert S.Reals - FiniteSet(I) == S.Reals + assert S.Reals - FiniteSet(-I, I) == S.Reals + assert Interval(0, 10) - FiniteSet(-I, I) == Interval(0, 10) + assert Interval(0, 10) - FiniteSet(1, I) == Union( + Interval.Ropen(0, 1), Interval.Lopen(1, 10)) + assert S.Reals - FiniteSet(1, 2 + I, x, y**2) == Complement( + Union(Interval.open(-oo, 1), Interval.open(1, oo)), FiniteSet(x, y**2), + evaluate=False) + + assert FiniteSet(1, 2, 3) - FiniteSet(2) == FiniteSet(1, 3) + assert FiniteSet('ham', 'eggs') - FiniteSet('eggs') == FiniteSet('ham') + assert FiniteSet(1, 2, 3, 4) - Interval(2, 10, True, False) == \ + FiniteSet(1, 2) + assert FiniteSet(1, 2, 3, 4) - S.EmptySet == FiniteSet(1, 2, 3, 4) + assert Union(Interval(0, 2), FiniteSet(2, 3, 4)) - Interval(1, 3) == \ + Union(Interval(0, 1, False, True), FiniteSet(4)) + + assert -1 in S.Reals - S.Naturals + + +def test_Complement(): + A = FiniteSet(1, 3, 4) + B = FiniteSet(3, 4) + C = Interval(1, 3) + D = Interval(1, 2) + + assert Complement(A, B, evaluate=False).is_iterable is True + assert Complement(A, C, evaluate=False).is_iterable is True + assert Complement(C, D, evaluate=False).is_iterable is None + + assert FiniteSet(*Complement(A, B, evaluate=False)) == FiniteSet(1) + assert FiniteSet(*Complement(A, C, evaluate=False)) == FiniteSet(4) + raises(TypeError, lambda: FiniteSet(*Complement(C, A, evaluate=False))) + + assert Complement(Interval(1, 3), Interval(1, 2)) == Interval(2, 3, True) + assert Complement(FiniteSet(1, 3, 4), FiniteSet(3, 4)) == FiniteSet(1) + assert Complement(Union(Interval(0, 2), FiniteSet(2, 3, 4)), + Interval(1, 3)) == \ + Union(Interval(0, 1, False, True), FiniteSet(4)) + + assert 3 not in Complement(Interval(0, 5), Interval(1, 4), evaluate=False) + assert -1 in Complement(S.Reals, S.Naturals, evaluate=False) + assert 1 not in Complement(S.Reals, S.Naturals, evaluate=False) + + assert Complement(S.Integers, S.UniversalSet) == EmptySet + assert S.UniversalSet.complement(S.Integers) == EmptySet + + assert (0 not in S.Reals.intersect(S.Integers - FiniteSet(0))) + + assert S.EmptySet - S.Integers == S.EmptySet + + assert (S.Integers - FiniteSet(0)) - FiniteSet(1) == S.Integers - FiniteSet(0, 1) + + assert S.Reals - Union(S.Naturals, FiniteSet(pi)) == \ + Intersection(S.Reals - S.Naturals, S.Reals - FiniteSet(pi)) + # issue 12712 + assert Complement(FiniteSet(x, y, 2), Interval(-10, 10)) == \ + Complement(FiniteSet(x, y), Interval(-10, 10)) + + A = FiniteSet(*symbols('a:c')) + B = FiniteSet(*symbols('d:f')) + assert unchanged(Complement, ProductSet(A, A), B) + + A2 = ProductSet(A, A) + B3 = ProductSet(B, B, B) + assert A2 - B3 == A2 + assert B3 - A2 == B3 + + +def test_set_operations_nonsets(): + '''Tests that e.g. FiniteSet(1) * 2 raises TypeError''' + ops = [ + lambda a, b: a + b, + lambda a, b: a - b, + lambda a, b: a * b, + lambda a, b: a / b, + lambda a, b: a // b, + lambda a, b: a | b, + lambda a, b: a & b, + lambda a, b: a ^ b, + # FiniteSet(1) ** 2 gives a ProductSet + #lambda a, b: a ** b, + ] + Sx = FiniteSet(x) + Sy = FiniteSet(y) + sets = [ + {1}, + FiniteSet(1), + Interval(1, 2), + Union(Sx, Interval(1, 2)), + Intersection(Sx, Sy), + Complement(Sx, Sy), + ProductSet(Sx, Sy), + S.EmptySet, + ] + nums = [0, 1, 2, S(0), S(1), S(2)] + + for si in sets: + for ni in nums: + for op in ops: + raises(TypeError, lambda : op(si, ni)) + raises(TypeError, lambda : op(ni, si)) + raises(TypeError, lambda: si ** object()) + raises(TypeError, lambda: si ** {1}) + + +def test_complement(): + assert Complement({1, 2}, {1}) == {2} + assert Interval(0, 1).complement(S.Reals) == \ + Union(Interval(-oo, 0, True, True), Interval(1, oo, True, True)) + assert Interval(0, 1, True, False).complement(S.Reals) == \ + Union(Interval(-oo, 0, True, False), Interval(1, oo, True, True)) + assert Interval(0, 1, False, True).complement(S.Reals) == \ + Union(Interval(-oo, 0, True, True), Interval(1, oo, False, True)) + assert Interval(0, 1, True, True).complement(S.Reals) == \ + Union(Interval(-oo, 0, True, False), Interval(1, oo, False, True)) + + assert S.UniversalSet.complement(S.EmptySet) == S.EmptySet + assert S.UniversalSet.complement(S.Reals) == S.EmptySet + assert S.UniversalSet.complement(S.UniversalSet) == S.EmptySet + + assert S.EmptySet.complement(S.Reals) == S.Reals + + assert Union(Interval(0, 1), Interval(2, 3)).complement(S.Reals) == \ + Union(Interval(-oo, 0, True, True), Interval(1, 2, True, True), + Interval(3, oo, True, True)) + + assert FiniteSet(0).complement(S.Reals) == \ + Union(Interval(-oo, 0, True, True), Interval(0, oo, True, True)) + + assert (FiniteSet(5) + Interval(S.NegativeInfinity, + 0)).complement(S.Reals) == \ + Interval(0, 5, True, True) + Interval(5, S.Infinity, True, True) + + assert FiniteSet(1, 2, 3).complement(S.Reals) == \ + Interval(S.NegativeInfinity, 1, True, True) + \ + Interval(1, 2, True, True) + Interval(2, 3, True, True) +\ + Interval(3, S.Infinity, True, True) + + assert FiniteSet(x).complement(S.Reals) == Complement(S.Reals, FiniteSet(x)) + + assert FiniteSet(0, x).complement(S.Reals) == Complement(Interval(-oo, 0, True, True) + + Interval(0, oo, True, True) + , FiniteSet(x), evaluate=False) + + square = Interval(0, 1) * Interval(0, 1) + notsquare = square.complement(S.Reals*S.Reals) + + assert all(pt in square for pt in [(0, 0), (.5, .5), (1, 0), (1, 1)]) + assert not any( + pt in notsquare for pt in [(0, 0), (.5, .5), (1, 0), (1, 1)]) + assert not any(pt in square for pt in [(-1, 0), (1.5, .5), (10, 10)]) + assert all(pt in notsquare for pt in [(-1, 0), (1.5, .5), (10, 10)]) + + +def test_intersect1(): + assert all(S.Integers.intersection(i) is i for i in + (S.Naturals, S.Naturals0)) + assert all(i.intersection(S.Integers) is i for i in + (S.Naturals, S.Naturals0)) + s = S.Naturals0 + assert S.Naturals.intersection(s) is S.Naturals + assert s.intersection(S.Naturals) is S.Naturals + x = Symbol('x') + assert Interval(0, 2).intersect(Interval(1, 2)) == Interval(1, 2) + assert Interval(0, 2).intersect(Interval(1, 2, True)) == \ + Interval(1, 2, True) + assert Interval(0, 2, True).intersect(Interval(1, 2)) == \ + Interval(1, 2, False, False) + assert Interval(0, 2, True, True).intersect(Interval(1, 2)) == \ + Interval(1, 2, False, True) + assert Interval(0, 2).intersect(Union(Interval(0, 1), Interval(2, 3))) == \ + Union(Interval(0, 1), Interval(2, 2)) + + assert FiniteSet(1, 2).intersect(FiniteSet(1, 2, 3)) == FiniteSet(1, 2) + assert FiniteSet(1, 2, x).intersect(FiniteSet(x)) == FiniteSet(x) + assert FiniteSet('ham', 'eggs').intersect(FiniteSet('ham')) == \ + FiniteSet('ham') + assert FiniteSet(1, 2, 3, 4, 5).intersect(S.EmptySet) == S.EmptySet + + assert Interval(0, 5).intersect(FiniteSet(1, 3)) == FiniteSet(1, 3) + assert Interval(0, 1, True, True).intersect(FiniteSet(1)) == S.EmptySet + + assert Union(Interval(0, 1), Interval(2, 3)).intersect(Interval(1, 2)) == \ + Union(Interval(1, 1), Interval(2, 2)) + assert Union(Interval(0, 1), Interval(2, 3)).intersect(Interval(0, 2)) == \ + Union(Interval(0, 1), Interval(2, 2)) + assert Union(Interval(0, 1), Interval(2, 3)).intersect(Interval(1, 2, True, True)) == \ + S.EmptySet + assert Union(Interval(0, 1), Interval(2, 3)).intersect(S.EmptySet) == \ + S.EmptySet + assert Union(Interval(0, 5), FiniteSet('ham')).intersect(FiniteSet(2, 3, 4, 5, 6)) == \ + Intersection(FiniteSet(2, 3, 4, 5, 6), Union(FiniteSet('ham'), Interval(0, 5))) + assert Intersection(FiniteSet(1, 2, 3), Interval(2, x), Interval(3, y)) == \ + Intersection(FiniteSet(3), Interval(2, x), Interval(3, y), evaluate=False) + assert Intersection(FiniteSet(1, 2), Interval(0, 3), Interval(x, y)) == \ + Intersection({1, 2}, Interval(x, y), evaluate=False) + assert Intersection(FiniteSet(1, 2, 4), Interval(0, 3), Interval(x, y)) == \ + Intersection({1, 2}, Interval(x, y), evaluate=False) + # XXX: Is the real=True necessary here? + # https://github.com/sympy/sympy/issues/17532 + m, n = symbols('m, n', real=True) + assert Intersection(FiniteSet(m), FiniteSet(m, n), Interval(m, m+1)) == \ + FiniteSet(m) + + # issue 8217 + assert Intersection(FiniteSet(x), FiniteSet(y)) == \ + Intersection(FiniteSet(x), FiniteSet(y), evaluate=False) + assert FiniteSet(x).intersect(S.Reals) == \ + Intersection(S.Reals, FiniteSet(x), evaluate=False) + + # tests for the intersection alias + assert Interval(0, 5).intersection(FiniteSet(1, 3)) == FiniteSet(1, 3) + assert Interval(0, 1, True, True).intersection(FiniteSet(1)) == S.EmptySet + + assert Union(Interval(0, 1), Interval(2, 3)).intersection(Interval(1, 2)) == \ + Union(Interval(1, 1), Interval(2, 2)) + + # canonical boundary selected + a = sqrt(2*sqrt(6) + 5) + b = sqrt(2) + sqrt(3) + assert Interval(a, 4).intersection(Interval(b, 5)) == Interval(b, 4) + assert Interval(1, a).intersection(Interval(0, b)) == Interval(1, b) + + +def test_intersection_interval_float(): + # intersection of Intervals with mixed Rational/Float boundaries should + # lead to Float boundaries in all cases regardless of which Interval is + # open or closed. + typs = [ + (Interval, Interval, Interval), + (Interval, Interval.open, Interval.open), + (Interval, Interval.Lopen, Interval.Lopen), + (Interval, Interval.Ropen, Interval.Ropen), + (Interval.open, Interval.open, Interval.open), + (Interval.open, Interval.Lopen, Interval.open), + (Interval.open, Interval.Ropen, Interval.open), + (Interval.Lopen, Interval.Lopen, Interval.Lopen), + (Interval.Lopen, Interval.Ropen, Interval.open), + (Interval.Ropen, Interval.Ropen, Interval.Ropen), + ] + + as_float = lambda a1, a2: a2 if isinstance(a2, float) else a1 + + for t1, t2, t3 in typs: + for t1i, t2i in [(t1, t2), (t2, t1)]: + for a1, a2, b1, b2 in cartes([2, 2.0], [2, 2.0], [3, 3.0], [3, 3.0]): + I1 = t1(a1, b1) + I2 = t2(a2, b2) + I3 = t3(as_float(a1, a2), as_float(b1, b2)) + assert I1.intersect(I2) == I3 + + +def test_intersection(): + # iterable + i = Intersection(FiniteSet(1, 2, 3), Interval(2, 5), evaluate=False) + assert i.is_iterable + assert set(i) == {S(2), S(3)} + + # challenging intervals + x = Symbol('x', real=True) + i = Intersection(Interval(0, 3), Interval(x, 6)) + assert (5 in i) is False + raises(TypeError, lambda: 2 in i) + + # Singleton special cases + assert Intersection(Interval(0, 1), S.EmptySet) == S.EmptySet + assert Intersection(Interval(-oo, oo), Interval(-oo, x)) == Interval(-oo, x) + + # Products + line = Interval(0, 5) + i = Intersection(line**2, line**3, evaluate=False) + assert (2, 2) not in i + assert (2, 2, 2) not in i + raises(TypeError, lambda: list(i)) + + a = Intersection(Intersection(S.Integers, S.Naturals, evaluate=False), S.Reals, evaluate=False) + assert a._argset == frozenset([Intersection(S.Naturals, S.Integers, evaluate=False), S.Reals]) + + assert Intersection(S.Complexes, FiniteSet(S.ComplexInfinity)) == S.EmptySet + + # issue 12178 + assert Intersection() == S.UniversalSet + + # issue 16987 + assert Intersection({1}, {1}, {x}) == Intersection({1}, {x}) + + +def test_issue_9623(): + n = Symbol('n') + + a = S.Reals + b = Interval(0, oo) + c = FiniteSet(n) + + assert Intersection(a, b, c) == Intersection(b, c) + assert Intersection(Interval(1, 2), Interval(3, 4), FiniteSet(n)) == EmptySet + + +def test_is_disjoint(): + assert Interval(0, 2).is_disjoint(Interval(1, 2)) == False + assert Interval(0, 2).is_disjoint(Interval(3, 4)) == True + + +def test_ProductSet__len__(): + A = FiniteSet(1, 2) + B = FiniteSet(1, 2, 3) + assert ProductSet(A).__len__() == 2 + assert ProductSet(A).__len__() is not S(2) + assert ProductSet(A, B).__len__() == 6 + assert ProductSet(A, B).__len__() is not S(6) + + +def test_ProductSet(): + # ProductSet is always a set of Tuples + assert ProductSet(S.Reals) == S.Reals ** 1 + assert ProductSet(S.Reals, S.Reals) == S.Reals ** 2 + assert ProductSet(S.Reals, S.Reals, S.Reals) == S.Reals ** 3 + + assert ProductSet(S.Reals) != S.Reals + assert ProductSet(S.Reals, S.Reals) == S.Reals * S.Reals + assert ProductSet(S.Reals, S.Reals, S.Reals) != S.Reals * S.Reals * S.Reals + assert ProductSet(S.Reals, S.Reals, S.Reals) == (S.Reals * S.Reals * S.Reals).flatten() + + assert 1 not in ProductSet(S.Reals) + assert (1,) in ProductSet(S.Reals) + + assert 1 not in ProductSet(S.Reals, S.Reals) + assert (1, 2) in ProductSet(S.Reals, S.Reals) + assert (1, I) not in ProductSet(S.Reals, S.Reals) + + assert (1, 2, 3) in ProductSet(S.Reals, S.Reals, S.Reals) + assert (1, 2, 3) in S.Reals ** 3 + assert (1, 2, 3) not in S.Reals * S.Reals * S.Reals + assert ((1, 2), 3) in S.Reals * S.Reals * S.Reals + assert (1, (2, 3)) not in S.Reals * S.Reals * S.Reals + assert (1, (2, 3)) in S.Reals * (S.Reals * S.Reals) + + assert ProductSet() == FiniteSet(()) + assert ProductSet(S.Reals, S.EmptySet) == S.EmptySet + + # See GH-17458 + + for ni in range(5): + Rn = ProductSet(*(S.Reals,) * ni) + assert (1,) * ni in Rn + assert 1 not in Rn + + assert (S.Reals * S.Reals) * S.Reals != S.Reals * (S.Reals * S.Reals) + + S1 = S.Reals + S2 = S.Integers + x1 = pi + x2 = 3 + assert x1 in S1 + assert x2 in S2 + assert (x1, x2) in S1 * S2 + S3 = S1 * S2 + x3 = (x1, x2) + assert x3 in S3 + assert (x3, x3) in S3 * S3 + assert x3 + x3 not in S3 * S3 + + raises(ValueError, lambda: S.Reals**-1) + with warns_deprecated_sympy(): + ProductSet(FiniteSet(s) for s in range(2)) + raises(TypeError, lambda: ProductSet(None)) + + S1 = FiniteSet(1, 2) + S2 = FiniteSet(3, 4) + S3 = ProductSet(S1, S2) + assert (S3.as_relational(x, y) + == And(S1.as_relational(x), S2.as_relational(y)) + == And(Or(Eq(x, 1), Eq(x, 2)), Or(Eq(y, 3), Eq(y, 4)))) + raises(ValueError, lambda: S3.as_relational(x)) + raises(ValueError, lambda: S3.as_relational(x, 1)) + raises(ValueError, lambda: ProductSet(Interval(0, 1)).as_relational(x, y)) + + Z2 = ProductSet(S.Integers, S.Integers) + assert Z2.contains((1, 2)) is S.true + assert Z2.contains((1,)) is S.false + assert Z2.contains(x) == Contains(x, Z2, evaluate=False) + assert Z2.contains(x).subs(x, 1) is S.false + assert Z2.contains((x, 1)).subs(x, 2) is S.true + assert Z2.contains((x, y)) == Contains(x, S.Integers) & Contains(y, S.Integers) + assert unchanged(Contains, (x, y), Z2) + assert Contains((1, 2), Z2) is S.true + + +def test_ProductSet_of_single_arg_is_not_arg(): + assert unchanged(ProductSet, Interval(0, 1)) + assert unchanged(ProductSet, ProductSet(Interval(0, 1))) + + +def test_ProductSet_is_empty(): + assert ProductSet(S.Integers, S.Reals).is_empty == False + assert ProductSet(Interval(x, 1), S.Reals).is_empty == None + + +def test_interval_subs(): + a = Symbol('a', real=True) + + assert Interval(0, a).subs(a, 2) == Interval(0, 2) + assert Interval(a, 0).subs(a, 2) == S.EmptySet + + +def test_interval_to_mpi(): + assert Interval(0, 1).to_mpi() == mpi(0, 1) + assert Interval(0, 1, True, False).to_mpi() == mpi(0, 1) + assert type(Interval(0, 1).to_mpi()) == type(mpi(0, 1)) + + +def test_set_evalf(): + assert Interval(S(11)/64, S.Half).evalf() == Interval( + Float('0.171875'), Float('0.5')) + assert Interval(x, S.Half, right_open=True).evalf() == Interval( + x, Float('0.5'), right_open=True) + assert Interval(-oo, S.Half).evalf() == Interval(-oo, Float('0.5')) + assert FiniteSet(2, x).evalf() == FiniteSet(Float('2.0'), x) + + +def test_measure(): + a = Symbol('a', real=True) + + assert Interval(1, 3).measure == 2 + assert Interval(0, a).measure == a + assert Interval(1, a).measure == a - 1 + + assert Union(Interval(1, 2), Interval(3, 4)).measure == 2 + assert Union(Interval(1, 2), Interval(3, 4), FiniteSet(5, 6, 7)).measure \ + == 2 + + assert FiniteSet(1, 2, oo, a, -oo, -5).measure == 0 + + assert S.EmptySet.measure == 0 + + square = Interval(0, 10) * Interval(0, 10) + offsetsquare = Interval(5, 15) * Interval(5, 15) + band = Interval(-oo, oo) * Interval(2, 4) + + assert square.measure == offsetsquare.measure == 100 + assert (square + offsetsquare).measure == 175 # there is some overlap + assert (square - offsetsquare).measure == 75 + assert (square * FiniteSet(1, 2, 3)).measure == 0 + assert (square.intersect(band)).measure == 20 + assert (square + band).measure is oo + assert (band * FiniteSet(1, 2, 3)).measure is nan + + +def test_is_subset(): + assert Interval(0, 1).is_subset(Interval(0, 2)) is True + assert Interval(0, 3).is_subset(Interval(0, 2)) is False + assert Interval(0, 1).is_subset(FiniteSet(0, 1)) is False + + assert FiniteSet(1, 2).is_subset(FiniteSet(1, 2, 3, 4)) + assert FiniteSet(4, 5).is_subset(FiniteSet(1, 2, 3, 4)) is False + assert FiniteSet(1).is_subset(Interval(0, 2)) + assert FiniteSet(1, 2).is_subset(Interval(0, 2, True, True)) is False + assert (Interval(1, 2) + FiniteSet(3)).is_subset( + Interval(0, 2, False, True) + FiniteSet(2, 3)) + + assert Interval(3, 4).is_subset(Union(Interval(0, 1), Interval(2, 5))) is True + assert Interval(3, 6).is_subset(Union(Interval(0, 1), Interval(2, 5))) is False + + assert FiniteSet(1, 2, 3, 4).is_subset(Interval(0, 5)) is True + assert S.EmptySet.is_subset(FiniteSet(1, 2, 3)) is True + + assert Interval(0, 1).is_subset(S.EmptySet) is False + assert S.EmptySet.is_subset(S.EmptySet) is True + + raises(ValueError, lambda: S.EmptySet.is_subset(1)) + + # tests for the issubset alias + assert FiniteSet(1, 2, 3, 4).issubset(Interval(0, 5)) is True + assert S.EmptySet.issubset(FiniteSet(1, 2, 3)) is True + + assert S.Naturals.is_subset(S.Integers) + assert S.Naturals0.is_subset(S.Integers) + + assert FiniteSet(x).is_subset(FiniteSet(y)) is None + assert FiniteSet(x).is_subset(FiniteSet(y).subs(y, x)) is True + assert FiniteSet(x).is_subset(FiniteSet(y).subs(y, x+1)) is False + + assert Interval(0, 1).is_subset(Interval(0, 1, left_open=True)) is False + assert Interval(-2, 3).is_subset(Union(Interval(-oo, -2), Interval(3, oo))) is False + + n = Symbol('n', integer=True) + assert Range(-3, 4, 1).is_subset(FiniteSet(-10, 10)) is False + assert Range(S(10)**100).is_subset(FiniteSet(0, 1, 2)) is False + assert Range(6, 0, -2).is_subset(FiniteSet(2, 4, 6)) is True + assert Range(1, oo).is_subset(FiniteSet(1, 2)) is False + assert Range(-oo, 1).is_subset(FiniteSet(1)) is False + assert Range(3).is_subset(FiniteSet(0, 1, n)) is None + assert Range(n, n + 2).is_subset(FiniteSet(n, n + 1)) is True + assert Range(5).is_subset(Interval(0, 4, right_open=True)) is False + #issue 19513 + assert imageset(Lambda(n, 1/n), S.Integers).is_subset(S.Reals) is None + +def test_is_proper_subset(): + assert Interval(0, 1).is_proper_subset(Interval(0, 2)) is True + assert Interval(0, 3).is_proper_subset(Interval(0, 2)) is False + assert S.EmptySet.is_proper_subset(FiniteSet(1, 2, 3)) is True + + raises(ValueError, lambda: Interval(0, 1).is_proper_subset(0)) + + +def test_is_superset(): + assert Interval(0, 1).is_superset(Interval(0, 2)) == False + assert Interval(0, 3).is_superset(Interval(0, 2)) + + assert FiniteSet(1, 2).is_superset(FiniteSet(1, 2, 3, 4)) == False + assert FiniteSet(4, 5).is_superset(FiniteSet(1, 2, 3, 4)) == False + assert FiniteSet(1).is_superset(Interval(0, 2)) == False + assert FiniteSet(1, 2).is_superset(Interval(0, 2, True, True)) == False + assert (Interval(1, 2) + FiniteSet(3)).is_superset( + Interval(0, 2, False, True) + FiniteSet(2, 3)) == False + + assert Interval(3, 4).is_superset(Union(Interval(0, 1), Interval(2, 5))) == False + + assert FiniteSet(1, 2, 3, 4).is_superset(Interval(0, 5)) == False + assert S.EmptySet.is_superset(FiniteSet(1, 2, 3)) == False + + assert Interval(0, 1).is_superset(S.EmptySet) == True + assert S.EmptySet.is_superset(S.EmptySet) == True + + raises(ValueError, lambda: S.EmptySet.is_superset(1)) + + # tests for the issuperset alias + assert Interval(0, 1).issuperset(S.EmptySet) == True + assert S.EmptySet.issuperset(S.EmptySet) == True + + +def test_is_proper_superset(): + assert Interval(0, 1).is_proper_superset(Interval(0, 2)) is False + assert Interval(0, 3).is_proper_superset(Interval(0, 2)) is True + assert FiniteSet(1, 2, 3).is_proper_superset(S.EmptySet) is True + + raises(ValueError, lambda: Interval(0, 1).is_proper_superset(0)) + + +def test_contains(): + assert Interval(0, 2).contains(1) is S.true + assert Interval(0, 2).contains(3) is S.false + assert Interval(0, 2, True, False).contains(0) is S.false + assert Interval(0, 2, True, False).contains(2) is S.true + assert Interval(0, 2, False, True).contains(0) is S.true + assert Interval(0, 2, False, True).contains(2) is S.false + assert Interval(0, 2, True, True).contains(0) is S.false + assert Interval(0, 2, True, True).contains(2) is S.false + + assert (Interval(0, 2) in Interval(0, 2)) is False + + assert FiniteSet(1, 2, 3).contains(2) is S.true + assert FiniteSet(1, 2, Symbol('x')).contains(Symbol('x')) is S.true + + assert FiniteSet(y)._contains(x) == Eq(y, x, evaluate=False) + raises(TypeError, lambda: x in FiniteSet(y)) + assert FiniteSet({x, y})._contains({x}) == Eq({x, y}, {x}, evaluate=False) + assert FiniteSet({x, y}).subs(y, x)._contains({x}) is S.true + assert FiniteSet({x, y}).subs(y, x+1)._contains({x}) is S.false + + # issue 8197 + from sympy.abc import a, b + assert FiniteSet(b).contains(-a) == Eq(b, -a) + assert FiniteSet(b).contains(a) == Eq(b, a) + assert FiniteSet(a).contains(1) == Eq(a, 1) + raises(TypeError, lambda: 1 in FiniteSet(a)) + + # issue 8209 + rad1 = Pow(Pow(2, Rational(1, 3)) - 1, Rational(1, 3)) + rad2 = Pow(Rational(1, 9), Rational(1, 3)) - Pow(Rational(2, 9), Rational(1, 3)) + Pow(Rational(4, 9), Rational(1, 3)) + s1 = FiniteSet(rad1) + s2 = FiniteSet(rad2) + assert s1 - s2 == S.EmptySet + + items = [1, 2, S.Infinity, S('ham'), -1.1] + fset = FiniteSet(*items) + assert all(item in fset for item in items) + assert all(fset.contains(item) is S.true for item in items) + + assert Union(Interval(0, 1), Interval(2, 5)).contains(3) is S.true + assert Union(Interval(0, 1), Interval(2, 5)).contains(6) is S.false + assert Union(Interval(0, 1), FiniteSet(2, 5)).contains(3) is S.false + + assert S.EmptySet.contains(1) is S.false + assert FiniteSet(rootof(x**3 + x - 1, 0)).contains(S.Infinity) is S.false + + assert rootof(x**5 + x**3 + 1, 0) in S.Reals + assert not rootof(x**5 + x**3 + 1, 1) in S.Reals + + # non-bool results + assert Union(Interval(1, 2), Interval(3, 4)).contains(x) == \ + Or(And(S.One <= x, x <= 2), And(S(3) <= x, x <= 4)) + assert Intersection(Interval(1, x), Interval(2, 3)).contains(y) == \ + And(y <= 3, y <= x, S.One <= y, S(2) <= y) + + assert (S.Complexes).contains(S.ComplexInfinity) == S.false + + +def test_interval_symbolic(): + x = Symbol('x') + e = Interval(0, 1) + assert e.contains(x) == And(S.Zero <= x, x <= 1) + raises(TypeError, lambda: x in e) + e = Interval(0, 1, True, True) + assert e.contains(x) == And(S.Zero < x, x < 1) + c = Symbol('c', real=False) + assert Interval(x, x + 1).contains(c) == False + e = Symbol('e', extended_real=True) + assert Interval(-oo, oo).contains(e) == And( + S.NegativeInfinity < e, e < S.Infinity) + + +def test_union_contains(): + x = Symbol('x') + i1 = Interval(0, 1) + i2 = Interval(2, 3) + i3 = Union(i1, i2) + assert i3.as_relational(x) == Or(And(S.Zero <= x, x <= 1), And(S(2) <= x, x <= 3)) + raises(TypeError, lambda: x in i3) + e = i3.contains(x) + assert e == i3.as_relational(x) + assert e.subs(x, -0.5) is false + assert e.subs(x, 0.5) is true + assert e.subs(x, 1.5) is false + assert e.subs(x, 2.5) is true + assert e.subs(x, 3.5) is false + + U = Interval(0, 2, True, True) + Interval(10, oo) + FiniteSet(-1, 2, 5, 6) + assert all(el not in U for el in [0, 4, -oo]) + assert all(el in U for el in [2, 5, 10]) + + +def test_is_number(): + assert Interval(0, 1).is_number is False + assert Set().is_number is False + + +def test_Interval_is_left_unbounded(): + assert Interval(3, 4).is_left_unbounded is False + assert Interval(-oo, 3).is_left_unbounded is True + assert Interval(Float("-inf"), 3).is_left_unbounded is True + + +def test_Interval_is_right_unbounded(): + assert Interval(3, 4).is_right_unbounded is False + assert Interval(3, oo).is_right_unbounded is True + assert Interval(3, Float("+inf")).is_right_unbounded is True + + +def test_Interval_as_relational(): + x = Symbol('x') + + assert Interval(-1, 2, False, False).as_relational(x) == \ + And(Le(-1, x), Le(x, 2)) + assert Interval(-1, 2, True, False).as_relational(x) == \ + And(Lt(-1, x), Le(x, 2)) + assert Interval(-1, 2, False, True).as_relational(x) == \ + And(Le(-1, x), Lt(x, 2)) + assert Interval(-1, 2, True, True).as_relational(x) == \ + And(Lt(-1, x), Lt(x, 2)) + + assert Interval(-oo, 2, right_open=False).as_relational(x) == And(Lt(-oo, x), Le(x, 2)) + assert Interval(-oo, 2, right_open=True).as_relational(x) == And(Lt(-oo, x), Lt(x, 2)) + + assert Interval(-2, oo, left_open=False).as_relational(x) == And(Le(-2, x), Lt(x, oo)) + assert Interval(-2, oo, left_open=True).as_relational(x) == And(Lt(-2, x), Lt(x, oo)) + + assert Interval(-oo, oo).as_relational(x) == And(Lt(-oo, x), Lt(x, oo)) + x = Symbol('x', real=True) + y = Symbol('y', real=True) + assert Interval(x, y).as_relational(x) == (x <= y) + assert Interval(y, x).as_relational(x) == (y <= x) + + +def test_Finite_as_relational(): + x = Symbol('x') + y = Symbol('y') + + assert FiniteSet(1, 2).as_relational(x) == Or(Eq(x, 1), Eq(x, 2)) + assert FiniteSet(y, -5).as_relational(x) == Or(Eq(x, y), Eq(x, -5)) + + +def test_Union_as_relational(): + x = Symbol('x') + assert (Interval(0, 1) + FiniteSet(2)).as_relational(x) == \ + Or(And(Le(0, x), Le(x, 1)), Eq(x, 2)) + assert (Interval(0, 1, True, True) + FiniteSet(1)).as_relational(x) == \ + And(Lt(0, x), Le(x, 1)) + assert Or(x < 0, x > 0).as_set().as_relational(x) == \ + And((x > -oo), (x < oo), Ne(x, 0)) + assert (Interval.Ropen(1, 3) + Interval.Lopen(3, 5) + ).as_relational(x) == And(Ne(x,3),(x>=1),(x<=5)) + + +def test_Intersection_as_relational(): + x = Symbol('x') + assert (Intersection(Interval(0, 1), FiniteSet(2), + evaluate=False).as_relational(x) + == And(And(Le(0, x), Le(x, 1)), Eq(x, 2))) + + +def test_Complement_as_relational(): + x = Symbol('x') + expr = Complement(Interval(0, 1), FiniteSet(2), evaluate=False) + assert expr.as_relational(x) == \ + And(Le(0, x), Le(x, 1), Ne(x, 2)) + + +@XFAIL +def test_Complement_as_relational_fail(): + x = Symbol('x') + expr = Complement(Interval(0, 1), FiniteSet(2), evaluate=False) + # XXX This example fails because 0 <= x changes to x >= 0 + # during the evaluation. + assert expr.as_relational(x) == \ + (0 <= x) & (x <= 1) & Ne(x, 2) + + +def test_SymmetricDifference_as_relational(): + x = Symbol('x') + expr = SymmetricDifference(Interval(0, 1), FiniteSet(2), evaluate=False) + assert expr.as_relational(x) == Xor(Eq(x, 2), Le(0, x) & Le(x, 1)) + + +def test_EmptySet(): + assert S.EmptySet.as_relational(Symbol('x')) is S.false + assert S.EmptySet.intersect(S.UniversalSet) == S.EmptySet + assert S.EmptySet.boundary == S.EmptySet + + +def test_finite_basic(): + x = Symbol('x') + A = FiniteSet(1, 2, 3) + B = FiniteSet(3, 4, 5) + AorB = Union(A, B) + AandB = A.intersect(B) + assert A.is_subset(AorB) and B.is_subset(AorB) + assert AandB.is_subset(A) + assert AandB == FiniteSet(3) + + assert A.inf == 1 and A.sup == 3 + assert AorB.inf == 1 and AorB.sup == 5 + assert FiniteSet(x, 1, 5).sup == Max(x, 5) + assert FiniteSet(x, 1, 5).inf == Min(x, 1) + + # issue 7335 + assert FiniteSet(S.EmptySet) != S.EmptySet + assert FiniteSet(FiniteSet(1, 2, 3)) != FiniteSet(1, 2, 3) + assert FiniteSet((1, 2, 3)) != FiniteSet(1, 2, 3) + + # Ensure a variety of types can exist in a FiniteSet + assert FiniteSet((1, 2), A, -5, x, 'eggs', x**2) + + assert (A > B) is False + assert (A >= B) is False + assert (A < B) is False + assert (A <= B) is False + assert AorB > A and AorB > B + assert AorB >= A and AorB >= B + assert A >= A and A <= A + assert A >= AandB and B >= AandB + assert A > AandB and B > AandB + + +def test_product_basic(): + H, T = 'H', 'T' + unit_line = Interval(0, 1) + d6 = FiniteSet(1, 2, 3, 4, 5, 6) + d4 = FiniteSet(1, 2, 3, 4) + coin = FiniteSet(H, T) + + square = unit_line * unit_line + + assert (0, 0) in square + assert 0 not in square + assert (H, T) in coin ** 2 + assert (.5, .5, .5) in (square * unit_line).flatten() + assert ((.5, .5), .5) in square * unit_line + assert (H, 3, 3) in (coin * d6 * d6).flatten() + assert ((H, 3), 3) in coin * d6 * d6 + HH, TT = sympify(H), sympify(T) + assert set(coin**2) == {(HH, HH), (HH, TT), (TT, HH), (TT, TT)} + + assert (d4*d4).is_subset(d6*d6) + + assert square.complement(Interval(-oo, oo)*Interval(-oo, oo)) == Union( + (Interval(-oo, 0, True, True) + + Interval(1, oo, True, True))*Interval(-oo, oo), + Interval(-oo, oo)*(Interval(-oo, 0, True, True) + + Interval(1, oo, True, True))) + + assert (Interval(-5, 5)**3).is_subset(Interval(-10, 10)**3) + assert not (Interval(-10, 10)**3).is_subset(Interval(-5, 5)**3) + assert not (Interval(-5, 5)**2).is_subset(Interval(-10, 10)**3) + + assert (Interval(.2, .5)*FiniteSet(.5)).is_subset(square) # segment in square + + assert len(coin*coin*coin) == 8 + assert len(S.EmptySet*S.EmptySet) == 0 + assert len(S.EmptySet*coin) == 0 + raises(TypeError, lambda: len(coin*Interval(0, 2))) + + +def test_real(): + x = Symbol('x', real=True) + + I = Interval(0, 5) + J = Interval(10, 20) + A = FiniteSet(1, 2, 30, x, S.Pi) + B = FiniteSet(-4, 0) + C = FiniteSet(100) + D = FiniteSet('Ham', 'Eggs') + + assert all(s.is_subset(S.Reals) for s in [I, J, A, B, C]) + assert not D.is_subset(S.Reals) + assert all((a + b).is_subset(S.Reals) for a in [I, J, A, B, C] for b in [I, J, A, B, C]) + assert not any((a + D).is_subset(S.Reals) for a in [I, J, A, B, C, D]) + + assert not (I + A + D).is_subset(S.Reals) + + +def test_supinf(): + x = Symbol('x', real=True) + y = Symbol('y', real=True) + + assert (Interval(0, 1) + FiniteSet(2)).sup == 2 + assert (Interval(0, 1) + FiniteSet(2)).inf == 0 + assert (Interval(0, 1) + FiniteSet(x)).sup == Max(1, x) + assert (Interval(0, 1) + FiniteSet(x)).inf == Min(0, x) + assert FiniteSet(5, 1, x).sup == Max(5, x) + assert FiniteSet(5, 1, x).inf == Min(1, x) + assert FiniteSet(5, 1, x, y).sup == Max(5, x, y) + assert FiniteSet(5, 1, x, y).inf == Min(1, x, y) + assert FiniteSet(5, 1, x, y, S.Infinity, S.NegativeInfinity).sup == \ + S.Infinity + assert FiniteSet(5, 1, x, y, S.Infinity, S.NegativeInfinity).inf == \ + S.NegativeInfinity + assert FiniteSet('Ham', 'Eggs').sup == Max('Ham', 'Eggs') + + +def test_universalset(): + U = S.UniversalSet + x = Symbol('x') + assert U.as_relational(x) is S.true + assert U.union(Interval(2, 4)) == U + + assert U.intersect(Interval(2, 4)) == Interval(2, 4) + assert U.measure is S.Infinity + assert U.boundary == S.EmptySet + assert U.contains(0) is S.true + + +def test_Union_of_ProductSets_shares(): + line = Interval(0, 2) + points = FiniteSet(0, 1, 2) + assert Union(line * line, line * points) == line * line + + +def test_Interval_free_symbols(): + # issue 6211 + assert Interval(0, 1).free_symbols == set() + x = Symbol('x', real=True) + assert Interval(0, x).free_symbols == {x} + + +def test_image_interval(): + x = Symbol('x', real=True) + a = Symbol('a', real=True) + assert imageset(x, 2*x, Interval(-2, 1)) == Interval(-4, 2) + assert imageset(x, 2*x, Interval(-2, 1, True, False)) == \ + Interval(-4, 2, True, False) + assert imageset(x, x**2, Interval(-2, 1, True, False)) == \ + Interval(0, 4, False, True) + assert imageset(x, x**2, Interval(-2, 1)) == Interval(0, 4) + assert imageset(x, x**2, Interval(-2, 1, True, False)) == \ + Interval(0, 4, False, True) + assert imageset(x, x**2, Interval(-2, 1, True, True)) == \ + Interval(0, 4, False, True) + assert imageset(x, (x - 2)**2, Interval(1, 3)) == Interval(0, 1) + assert imageset(x, 3*x**4 - 26*x**3 + 78*x**2 - 90*x, Interval(0, 4)) == \ + Interval(-35, 0) # Multiple Maxima + assert imageset(x, x + 1/x, Interval(-oo, oo)) == Interval(-oo, -2) \ + + Interval(2, oo) # Single Infinite discontinuity + assert imageset(x, 1/x + 1/(x-1)**2, Interval(0, 2, True, False)) == \ + Interval(Rational(3, 2), oo, False) # Multiple Infinite discontinuities + + # Test for Python lambda + assert imageset(lambda x: 2*x, Interval(-2, 1)) == Interval(-4, 2) + + assert imageset(Lambda(x, a*x), Interval(0, 1)) == \ + ImageSet(Lambda(x, a*x), Interval(0, 1)) + + assert imageset(Lambda(x, sin(cos(x))), Interval(0, 1)) == \ + ImageSet(Lambda(x, sin(cos(x))), Interval(0, 1)) + + +def test_image_piecewise(): + f = Piecewise((x, x <= -1), (1/x**2, x <= 5), (x**3, True)) + f1 = Piecewise((0, x <= 1), (1, x <= 2), (2, True)) + assert imageset(x, f, Interval(-5, 5)) == Union(Interval(-5, -1), Interval(Rational(1, 25), oo)) + assert imageset(x, f1, Interval(1, 2)) == FiniteSet(0, 1) + + +@XFAIL # See: https://github.com/sympy/sympy/pull/2723#discussion_r8659826 +def test_image_Intersection(): + x = Symbol('x', real=True) + y = Symbol('y', real=True) + assert imageset(x, x**2, Interval(-2, 0).intersect(Interval(x, y))) == \ + Interval(0, 4).intersect(Interval(Min(x**2, y**2), Max(x**2, y**2))) + + +def test_image_FiniteSet(): + x = Symbol('x', real=True) + assert imageset(x, 2*x, FiniteSet(1, 2, 3)) == FiniteSet(2, 4, 6) + + +def test_image_Union(): + x = Symbol('x', real=True) + assert imageset(x, x**2, Interval(-2, 0) + FiniteSet(1, 2, 3)) == \ + (Interval(0, 4) + FiniteSet(9)) + + +def test_image_EmptySet(): + x = Symbol('x', real=True) + assert imageset(x, 2*x, S.EmptySet) == S.EmptySet + + +def test_issue_5724_7680(): + assert I not in S.Reals # issue 7680 + assert Interval(-oo, oo).contains(I) is S.false + + +def test_boundary(): + assert FiniteSet(1).boundary == FiniteSet(1) + assert all(Interval(0, 1, left_open, right_open).boundary == FiniteSet(0, 1) + for left_open in (true, false) for right_open in (true, false)) + + +def test_boundary_Union(): + assert (Interval(0, 1) + Interval(2, 3)).boundary == FiniteSet(0, 1, 2, 3) + assert ((Interval(0, 1, False, True) + + Interval(1, 2, True, False)).boundary == FiniteSet(0, 1, 2)) + + assert (Interval(0, 1) + FiniteSet(2)).boundary == FiniteSet(0, 1, 2) + assert Union(Interval(0, 10), Interval(5, 15), evaluate=False).boundary \ + == FiniteSet(0, 15) + + assert Union(Interval(0, 10), Interval(0, 1), evaluate=False).boundary \ + == FiniteSet(0, 10) + assert Union(Interval(0, 10, True, True), + Interval(10, 15, True, True), evaluate=False).boundary \ + == FiniteSet(0, 10, 15) + + +@XFAIL +def test_union_boundary_of_joining_sets(): + """ Testing the boundary of unions is a hard problem """ + assert Union(Interval(0, 10), Interval(10, 15), evaluate=False).boundary \ + == FiniteSet(0, 15) + + +def test_boundary_ProductSet(): + open_square = Interval(0, 1, True, True) ** 2 + assert open_square.boundary == (FiniteSet(0, 1) * Interval(0, 1) + + Interval(0, 1) * FiniteSet(0, 1)) + + second_square = Interval(1, 2, True, True) * Interval(0, 1, True, True) + assert (open_square + second_square).boundary == ( + FiniteSet(0, 1) * Interval(0, 1) + + FiniteSet(1, 2) * Interval(0, 1) + + Interval(0, 1) * FiniteSet(0, 1) + + Interval(1, 2) * FiniteSet(0, 1)) + + +def test_boundary_ProductSet_line(): + line_in_r2 = Interval(0, 1) * FiniteSet(0) + assert line_in_r2.boundary == line_in_r2 + + +def test_is_open(): + assert Interval(0, 1, False, False).is_open is False + assert Interval(0, 1, True, False).is_open is False + assert Interval(0, 1, True, True).is_open is True + assert FiniteSet(1, 2, 3).is_open is False + + +def test_is_closed(): + assert Interval(0, 1, False, False).is_closed is True + assert Interval(0, 1, True, False).is_closed is False + assert FiniteSet(1, 2, 3).is_closed is True + + +def test_closure(): + assert Interval(0, 1, False, True).closure == Interval(0, 1, False, False) + + +def test_interior(): + assert Interval(0, 1, False, True).interior == Interval(0, 1, True, True) + + +def test_issue_7841(): + raises(TypeError, lambda: x in S.Reals) + + +def test_Eq(): + assert Eq(Interval(0, 1), Interval(0, 1)) + assert Eq(Interval(0, 1), Interval(0, 2)) == False + + s1 = FiniteSet(0, 1) + s2 = FiniteSet(1, 2) + + assert Eq(s1, s1) + assert Eq(s1, s2) == False + + assert Eq(s1*s2, s1*s2) + assert Eq(s1*s2, s2*s1) == False + + assert unchanged(Eq, FiniteSet({x, y}), FiniteSet({x})) + assert Eq(FiniteSet({x, y}).subs(y, x), FiniteSet({x})) is S.true + assert Eq(FiniteSet({x, y}), FiniteSet({x})).subs(y, x) is S.true + assert Eq(FiniteSet({x, y}).subs(y, x+1), FiniteSet({x})) is S.false + assert Eq(FiniteSet({x, y}), FiniteSet({x})).subs(y, x+1) is S.false + + assert Eq(ProductSet({1}, {2}), Interval(1, 2)) is S.false + assert Eq(ProductSet({1}), ProductSet({1}, {2})) is S.false + + assert Eq(FiniteSet(()), FiniteSet(1)) is S.false + assert Eq(ProductSet(), FiniteSet(1)) is S.false + + i1 = Interval(0, 1) + i2 = Interval(x, y) + assert unchanged(Eq, ProductSet(i1, i1), ProductSet(i2, i2)) + + +def test_SymmetricDifference(): + A = FiniteSet(0, 1, 2, 3, 4, 5) + B = FiniteSet(2, 4, 6, 8, 10) + C = Interval(8, 10) + + assert SymmetricDifference(A, B, evaluate=False).is_iterable is True + assert SymmetricDifference(A, C, evaluate=False).is_iterable is None + assert FiniteSet(*SymmetricDifference(A, B, evaluate=False)) == \ + FiniteSet(0, 1, 3, 5, 6, 8, 10) + raises(TypeError, + lambda: FiniteSet(*SymmetricDifference(A, C, evaluate=False))) + + assert SymmetricDifference(FiniteSet(0, 1, 2, 3, 4, 5), \ + FiniteSet(2, 4, 6, 8, 10)) == FiniteSet(0, 1, 3, 5, 6, 8, 10) + assert SymmetricDifference(FiniteSet(2, 3, 4), FiniteSet(2, 3, 4 ,5)) \ + == FiniteSet(5) + assert FiniteSet(1, 2, 3, 4, 5) ^ FiniteSet(1, 2, 5, 6) == \ + FiniteSet(3, 4, 6) + assert Set(S(1), S(2), S(3)) ^ Set(S(2), S(3), S(4)) == Union(Set(S(1), S(2), S(3)) - Set(S(2), S(3), S(4)), \ + Set(S(2), S(3), S(4)) - Set(S(1), S(2), S(3))) + assert Interval(0, 4) ^ Interval(2, 5) == Union(Interval(0, 4) - \ + Interval(2, 5), Interval(2, 5) - Interval(0, 4)) + + +def test_issue_9536(): + from sympy.functions.elementary.exponential import log + a = Symbol('a', real=True) + assert FiniteSet(log(a)).intersect(S.Reals) == Intersection(S.Reals, FiniteSet(log(a))) + + +def test_issue_9637(): + n = Symbol('n') + a = FiniteSet(n) + b = FiniteSet(2, n) + assert Complement(S.Reals, a) == Complement(S.Reals, a, evaluate=False) + assert Complement(Interval(1, 3), a) == Complement(Interval(1, 3), a, evaluate=False) + assert Complement(Interval(1, 3), b) == \ + Complement(Union(Interval(1, 2, False, True), Interval(2, 3, True, False)), a) + assert Complement(a, S.Reals) == Complement(a, S.Reals, evaluate=False) + assert Complement(a, Interval(1, 3)) == Complement(a, Interval(1, 3), evaluate=False) + + +def test_issue_9808(): + # See https://github.com/sympy/sympy/issues/16342 + assert Complement(FiniteSet(y), FiniteSet(1)) == Complement(FiniteSet(y), FiniteSet(1), evaluate=False) + assert Complement(FiniteSet(1, 2, x), FiniteSet(x, y, 2, 3)) == \ + Complement(FiniteSet(1), FiniteSet(y), evaluate=False) + + +def test_issue_9956(): + assert Union(Interval(-oo, oo), FiniteSet(1)) == Interval(-oo, oo) + assert Interval(-oo, oo).contains(1) is S.true + + +def test_issue_Symbol_inter(): + i = Interval(0, oo) + r = S.Reals + mat = Matrix([0, 0, 0]) + assert Intersection(r, i, FiniteSet(m), FiniteSet(m, n)) == \ + Intersection(i, FiniteSet(m)) + assert Intersection(FiniteSet(1, m, n), FiniteSet(m, n, 2), i) == \ + Intersection(i, FiniteSet(m, n)) + assert Intersection(FiniteSet(m, n, x), FiniteSet(m, z), r) == \ + Intersection(Intersection({m, z}, {m, n, x}), r) + assert Intersection(FiniteSet(m, n, 3), FiniteSet(m, n, x), r) == \ + Intersection(FiniteSet(3, m, n), FiniteSet(m, n, x), r, evaluate=False) + assert Intersection(FiniteSet(m, n, 3), FiniteSet(m, n, 2, 3), r) == \ + Intersection(FiniteSet(3, m, n), r) + assert Intersection(r, FiniteSet(mat, 2, n), FiniteSet(0, mat, n)) == \ + Intersection(r, FiniteSet(n)) + assert Intersection(FiniteSet(sin(x), cos(x)), FiniteSet(sin(x), cos(x), 1), r) == \ + Intersection(r, FiniteSet(sin(x), cos(x))) + assert Intersection(FiniteSet(x**2, 1, sin(x)), FiniteSet(x**2, 2, sin(x)), r) == \ + Intersection(r, FiniteSet(x**2, sin(x))) + + +def test_issue_11827(): + assert S.Naturals0**4 + + +def test_issue_10113(): + f = x**2/(x**2 - 4) + assert imageset(x, f, S.Reals) == Union(Interval(-oo, 0), Interval(1, oo, True, True)) + assert imageset(x, f, Interval(-2, 2)) == Interval(-oo, 0) + assert imageset(x, f, Interval(-2, 3)) == Union(Interval(-oo, 0), Interval(Rational(9, 5), oo)) + + +def test_issue_10248(): + raises( + TypeError, lambda: list(Intersection(S.Reals, FiniteSet(x))) + ) + A = Symbol('A', real=True) + assert list(Intersection(S.Reals, FiniteSet(A))) == [A] + + +def test_issue_9447(): + a = Interval(0, 1) + Interval(2, 3) + assert Complement(S.UniversalSet, a) == Complement( + S.UniversalSet, Union(Interval(0, 1), Interval(2, 3)), evaluate=False) + assert Complement(S.Naturals, a) == Complement( + S.Naturals, Union(Interval(0, 1), Interval(2, 3)), evaluate=False) + + +def test_issue_10337(): + assert (FiniteSet(2) == 3) is False + assert (FiniteSet(2) != 3) is True + raises(TypeError, lambda: FiniteSet(2) < 3) + raises(TypeError, lambda: FiniteSet(2) <= 3) + raises(TypeError, lambda: FiniteSet(2) > 3) + raises(TypeError, lambda: FiniteSet(2) >= 3) + + +def test_issue_10326(): + bad = [ + EmptySet, + FiniteSet(1), + Interval(1, 2), + S.ComplexInfinity, + S.ImaginaryUnit, + S.Infinity, + S.NaN, + S.NegativeInfinity, + ] + interval = Interval(0, 5) + for i in bad: + assert i not in interval + + x = Symbol('x', real=True) + nr = Symbol('nr', extended_real=False) + assert x + 1 in Interval(x, x + 4) + assert nr not in Interval(x, x + 4) + assert Interval(1, 2) in FiniteSet(Interval(0, 5), Interval(1, 2)) + assert Interval(-oo, oo).contains(oo) is S.false + assert Interval(-oo, oo).contains(-oo) is S.false + + +def test_issue_2799(): + U = S.UniversalSet + a = Symbol('a', real=True) + inf_interval = Interval(a, oo) + R = S.Reals + + assert U + inf_interval == inf_interval + U + assert U + R == R + U + assert R + inf_interval == inf_interval + R + + +def test_issue_9706(): + assert Interval(-oo, 0).closure == Interval(-oo, 0, True, False) + assert Interval(0, oo).closure == Interval(0, oo, False, True) + assert Interval(-oo, oo).closure == Interval(-oo, oo) + + +def test_issue_8257(): + reals_plus_infinity = Union(Interval(-oo, oo), FiniteSet(oo)) + reals_plus_negativeinfinity = Union(Interval(-oo, oo), FiniteSet(-oo)) + assert Interval(-oo, oo) + FiniteSet(oo) == reals_plus_infinity + assert FiniteSet(oo) + Interval(-oo, oo) == reals_plus_infinity + assert Interval(-oo, oo) + FiniteSet(-oo) == reals_plus_negativeinfinity + assert FiniteSet(-oo) + Interval(-oo, oo) == reals_plus_negativeinfinity + + +def test_issue_10931(): + assert S.Integers - S.Integers == EmptySet + assert S.Integers - S.Reals == EmptySet + + +def test_issue_11174(): + soln = Intersection(Interval(-oo, oo), FiniteSet(-x), evaluate=False) + assert Intersection(FiniteSet(-x), S.Reals) == soln + + soln = Intersection(S.Reals, FiniteSet(x), evaluate=False) + assert Intersection(FiniteSet(x), S.Reals) == soln + + +def test_issue_18505(): + assert ImageSet(Lambda(n, sqrt(pi*n/2 - 1 + pi/2)), S.Integers).contains(0) == \ + Contains(0, ImageSet(Lambda(n, sqrt(pi*n/2 - 1 + pi/2)), S.Integers)) + + +def test_finite_set_intersection(): + # The following should not produce recursion errors + # Note: some of these are not completely correct. See + # https://github.com/sympy/sympy/issues/16342. + assert Intersection(FiniteSet(-oo, x), FiniteSet(x)) == FiniteSet(x) + assert Intersection._handle_finite_sets([FiniteSet(-oo, x), FiniteSet(0, x)]) == FiniteSet(x) + + assert Intersection._handle_finite_sets([FiniteSet(-oo, x), FiniteSet(x)]) == FiniteSet(x) + assert Intersection._handle_finite_sets([FiniteSet(2, 3, x, y), FiniteSet(1, 2, x)]) == \ + Intersection._handle_finite_sets([FiniteSet(1, 2, x), FiniteSet(2, 3, x, y)]) == \ + Intersection(FiniteSet(1, 2, x), FiniteSet(2, 3, x, y)) == \ + Intersection(FiniteSet(1, 2, x), FiniteSet(2, x, y)) + + assert FiniteSet(1+x-y) & FiniteSet(1) == \ + FiniteSet(1) & FiniteSet(1+x-y) == \ + Intersection(FiniteSet(1+x-y), FiniteSet(1), evaluate=False) + + assert FiniteSet(1) & FiniteSet(x) == FiniteSet(x) & FiniteSet(1) == \ + Intersection(FiniteSet(1), FiniteSet(x), evaluate=False) + + assert FiniteSet({x}) & FiniteSet({x, y}) == \ + Intersection(FiniteSet({x}), FiniteSet({x, y}), evaluate=False) + + +def test_union_intersection_constructor(): + # The actual exception does not matter here, so long as these fail + sets = [FiniteSet(1), FiniteSet(2)] + raises(Exception, lambda: Union(sets)) + raises(Exception, lambda: Intersection(sets)) + raises(Exception, lambda: Union(tuple(sets))) + raises(Exception, lambda: Intersection(tuple(sets))) + raises(Exception, lambda: Union(i for i in sets)) + raises(Exception, lambda: Intersection(i for i in sets)) + + # Python sets are treated the same as FiniteSet + # The union of a single set (of sets) is the set (of sets) itself + assert Union(set(sets)) == FiniteSet(*sets) + assert Intersection(set(sets)) == FiniteSet(*sets) + + assert Union({1}, {2}) == FiniteSet(1, 2) + assert Intersection({1, 2}, {2, 3}) == FiniteSet(2) + + +def test_Union_contains(): + assert zoo not in Union( + Interval.open(-oo, 0), Interval.open(0, oo)) + + +@XFAIL +def test_issue_16878b(): + # in intersection_sets for (ImageSet, Set) there is no code + # that handles the base_set of S.Reals like there is + # for Integers + assert imageset(x, (x, x), S.Reals).is_subset(S.Reals**2) is True + +def test_DisjointUnion(): + assert DisjointUnion(FiniteSet(1, 2, 3), FiniteSet(1, 2, 3), FiniteSet(1, 2, 3)).rewrite(Union) == (FiniteSet(1, 2, 3) * FiniteSet(0, 1, 2)) + assert DisjointUnion(Interval(1, 3), Interval(2, 4)).rewrite(Union) == Union(Interval(1, 3) * FiniteSet(0), Interval(2, 4) * FiniteSet(1)) + assert DisjointUnion(Interval(0, 5), Interval(0, 5)).rewrite(Union) == Union(Interval(0, 5) * FiniteSet(0), Interval(0, 5) * FiniteSet(1)) + assert DisjointUnion(Interval(-1, 2), S.EmptySet, S.EmptySet).rewrite(Union) == Interval(-1, 2) * FiniteSet(0) + assert DisjointUnion(Interval(-1, 2)).rewrite(Union) == Interval(-1, 2) * FiniteSet(0) + assert DisjointUnion(S.EmptySet, Interval(-1, 2), S.EmptySet).rewrite(Union) == Interval(-1, 2) * FiniteSet(1) + assert DisjointUnion(Interval(-oo, oo)).rewrite(Union) == Interval(-oo, oo) * FiniteSet(0) + assert DisjointUnion(S.EmptySet).rewrite(Union) == S.EmptySet + assert DisjointUnion().rewrite(Union) == S.EmptySet + raises(TypeError, lambda: DisjointUnion(Symbol('n'))) + + x = Symbol("x") + y = Symbol("y") + z = Symbol("z") + assert DisjointUnion(FiniteSet(x), FiniteSet(y, z)).rewrite(Union) == (FiniteSet(x) * FiniteSet(0)) + (FiniteSet(y, z) * FiniteSet(1)) + +def test_DisjointUnion_is_empty(): + assert DisjointUnion(S.EmptySet).is_empty is True + assert DisjointUnion(S.EmptySet, S.EmptySet).is_empty is True + assert DisjointUnion(S.EmptySet, FiniteSet(1, 2, 3)).is_empty is False + +def test_DisjointUnion_is_iterable(): + assert DisjointUnion(S.Integers, S.Naturals, S.Rationals).is_iterable is True + assert DisjointUnion(S.EmptySet, S.Reals).is_iterable is False + assert DisjointUnion(FiniteSet(1, 2, 3), S.EmptySet, FiniteSet(x, y)).is_iterable is True + assert DisjointUnion(S.EmptySet, S.EmptySet).is_iterable is False + +def test_DisjointUnion_contains(): + assert (0, 0) in DisjointUnion(FiniteSet(0, 1, 2), FiniteSet(0, 1, 2), FiniteSet(0, 1, 2)) + assert (0, 1) in DisjointUnion(FiniteSet(0, 1, 2), FiniteSet(0, 1, 2), FiniteSet(0, 1, 2)) + assert (0, 2) in DisjointUnion(FiniteSet(0, 1, 2), FiniteSet(0, 1, 2), FiniteSet(0, 1, 2)) + assert (1, 0) in DisjointUnion(FiniteSet(0, 1, 2), FiniteSet(0, 1, 2), FiniteSet(0, 1, 2)) + assert (1, 1) in DisjointUnion(FiniteSet(0, 1, 2), FiniteSet(0, 1, 2), FiniteSet(0, 1, 2)) + assert (1, 2) in DisjointUnion(FiniteSet(0, 1, 2), FiniteSet(0, 1, 2), FiniteSet(0, 1, 2)) + assert (2, 0) in DisjointUnion(FiniteSet(0, 1, 2), FiniteSet(0, 1, 2), FiniteSet(0, 1, 2)) + assert (2, 1) in DisjointUnion(FiniteSet(0, 1, 2), FiniteSet(0, 1, 2), FiniteSet(0, 1, 2)) + assert (2, 2) in DisjointUnion(FiniteSet(0, 1, 2), FiniteSet(0, 1, 2), FiniteSet(0, 1, 2)) + assert (0, 1, 2) not in DisjointUnion(FiniteSet(0, 1, 2), FiniteSet(0, 1, 2), FiniteSet(0, 1, 2)) + assert (0, 0.5) not in DisjointUnion(FiniteSet(0.5)) + assert (0, 5) not in DisjointUnion(FiniteSet(0, 1, 2), FiniteSet(0, 1, 2), FiniteSet(0, 1, 2)) + assert (x, 0) in DisjointUnion(FiniteSet(x, y, z), S.EmptySet, FiniteSet(y)) + assert (y, 0) in DisjointUnion(FiniteSet(x, y, z), S.EmptySet, FiniteSet(y)) + assert (z, 0) in DisjointUnion(FiniteSet(x, y, z), S.EmptySet, FiniteSet(y)) + assert (y, 2) in DisjointUnion(FiniteSet(x, y, z), S.EmptySet, FiniteSet(y)) + assert (0.5, 0) in DisjointUnion(Interval(0, 1), Interval(0, 2)) + assert (0.5, 1) in DisjointUnion(Interval(0, 1), Interval(0, 2)) + assert (1.5, 0) not in DisjointUnion(Interval(0, 1), Interval(0, 2)) + assert (1.5, 1) in DisjointUnion(Interval(0, 1), Interval(0, 2)) + +def test_DisjointUnion_iter(): + D = DisjointUnion(FiniteSet(3, 5, 7, 9), FiniteSet(x, y, z)) + it = iter(D) + L1 = [(x, 1), (y, 1), (z, 1)] + L2 = [(3, 0), (5, 0), (7, 0), (9, 0)] + nxt = next(it) + assert nxt in L2 + L2.remove(nxt) + nxt = next(it) + assert nxt in L1 + L1.remove(nxt) + nxt = next(it) + assert nxt in L2 + L2.remove(nxt) + nxt = next(it) + assert nxt in L1 + L1.remove(nxt) + nxt = next(it) + assert nxt in L2 + L2.remove(nxt) + nxt = next(it) + assert nxt in L1 + L1.remove(nxt) + nxt = next(it) + assert nxt in L2 + L2.remove(nxt) + raises(StopIteration, lambda: next(it)) + + raises(ValueError, lambda: iter(DisjointUnion(Interval(0, 1), S.EmptySet))) + +def test_DisjointUnion_len(): + assert len(DisjointUnion(FiniteSet(3, 5, 7, 9), FiniteSet(x, y, z))) == 7 + assert len(DisjointUnion(S.EmptySet, S.EmptySet, FiniteSet(x, y, z), S.EmptySet)) == 3 + raises(ValueError, lambda: len(DisjointUnion(Interval(0, 1), S.EmptySet))) + +def test_SetKind_ProductSet(): + p = ProductSet(FiniteSet(Matrix([1, 2])), FiniteSet(Matrix([1, 2]))) + mk = MatrixKind(NumberKind) + k = SetKind(TupleKind(mk, mk)) + assert p.kind is k + assert ProductSet(Interval(1, 2), FiniteSet(Matrix([1, 2]))).kind is SetKind(TupleKind(NumberKind, mk)) + +def test_SetKind_Interval(): + assert Interval(1, 2).kind is SetKind(NumberKind) + +def test_SetKind_EmptySet_UniversalSet(): + assert S.UniversalSet.kind is SetKind(UndefinedKind) + assert EmptySet.kind is SetKind() + +def test_SetKind_FiniteSet(): + assert FiniteSet(1, Matrix([1, 2])).kind is SetKind(UndefinedKind) + assert FiniteSet(1, 2).kind is SetKind(NumberKind) + +def test_SetKind_Unions(): + assert Union(FiniteSet(Matrix([1, 2])), Interval(1, 2)).kind is SetKind(UndefinedKind) + assert Union(Interval(1, 2), Interval(1, 7)).kind is SetKind(NumberKind) + +def test_SetKind_DisjointUnion(): + A = FiniteSet(1, 2, 3) + B = Interval(0, 5) + assert DisjointUnion(A, B).kind is SetKind(NumberKind) + +def test_SetKind_evaluate_False(): + U = lambda *args: Union(*args, evaluate=False) + assert U({1}, EmptySet).kind is SetKind(NumberKind) + assert U(Interval(1, 2), EmptySet).kind is SetKind(NumberKind) + assert U({1}, S.UniversalSet).kind is SetKind(UndefinedKind) + assert U(Interval(1, 2), Interval(4, 5), + FiniteSet(1)).kind is SetKind(NumberKind) + I = lambda *args: Intersection(*args, evaluate=False) + assert I({1}, S.UniversalSet).kind is SetKind(NumberKind) + assert I({1}, EmptySet).kind is SetKind() + C = lambda *args: Complement(*args, evaluate=False) + assert C(S.UniversalSet, {1, 2, 4, 5}).kind is SetKind(UndefinedKind) + assert C({1, 2, 3, 4, 5}, EmptySet).kind is SetKind(NumberKind) + assert C(EmptySet, {1, 2, 3, 4, 5}).kind is SetKind() + +def test_SetKind_ImageSet_Special(): + f = ImageSet(Lambda(n, n ** 2), Interval(1, 4)) + assert (f - FiniteSet(3)).kind is SetKind(NumberKind) + assert (f + Interval(16, 17)).kind is SetKind(NumberKind) + assert (f + FiniteSet(17)).kind is SetKind(NumberKind) + +def test_issue_20089(): + B = FiniteSet(FiniteSet(1, 2), FiniteSet(1)) + assert 1 not in B + assert 1.0 not in B + assert not Eq(1, FiniteSet(1, 2)) + assert FiniteSet(1) in B + A = FiniteSet(1, 2) + assert A in B + assert B.issubset(B) + assert not A.issubset(B) + assert 1 in A + C = FiniteSet(FiniteSet(1, 2), FiniteSet(1), 1, 2) + assert A.issubset(C) + assert B.issubset(C) + +def test_issue_19378(): + a = FiniteSet(1, 2) + b = ProductSet(a, a) + c = FiniteSet((1, 1), (1, 2), (2, 1), (2, 2)) + assert b.is_subset(c) is True + d = FiniteSet(1) + assert b.is_subset(d) is False + assert Eq(c, b).simplify() is S.true + assert Eq(a, c).simplify() is S.false + assert Eq({1}, {x}).simplify() == Eq({1}, {x}) + +def test_intersection_symbolic(): + n = Symbol('n') + # These should not throw an error + assert isinstance(Intersection(Range(n), Range(100)), Intersection) + assert isinstance(Intersection(Range(n), Interval(1, 100)), Intersection) + assert isinstance(Intersection(Range(100), Interval(1, n)), Intersection) + + +@XFAIL +def test_intersection_symbolic_failing(): + n = Symbol('n', integer=True, positive=True) + assert Intersection(Range(10, n), Range(4, 500, 5)) == Intersection( + Range(14, n), Range(14, 500, 5)) + assert Intersection(Interval(10, n), Range(4, 500, 5)) == Intersection( + Interval(14, n), Range(14, 500, 5)) + + +def test_issue_20379(): + #https://github.com/sympy/sympy/issues/20379 + x = pi - 3.14159265358979 + assert FiniteSet(x).evalf(2) == FiniteSet(Float('3.23108914886517e-15', 2)) + +def test_finiteset_simplify(): + S = FiniteSet(1, cos(1)**2 + sin(1)**2) + assert S.simplify() == {1} + +def test_issue_14336(): + #https://github.com/sympy/sympy/issues/14336 + U = S.Complexes + x = Symbol("x") + U -= U.intersect(Ne(x, 1).as_set()) + U -= U.intersect(S.true.as_set()) + +def test_issue_9855(): + #https://github.com/sympy/sympy/issues/9855 + x, y, z = symbols('x, y, z', real=True) + s1 = Interval(1, x) & Interval(y, 2) + s2 = Interval(1, 2) + assert s1.is_subset(s2) == None diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/__pycache__/typing_extensions.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/__pycache__/typing_extensions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..437a2351ce3a86cc1d0408caaaf0ed1e0c18d593 --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/__pycache__/typing_extensions.cpython-310.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aae0f0b825e60b4e208c23a96717f1de76acea5b4be62a5c78d0dd99c5ae7ab3 +size 100312 diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand-2.2.2.dist-info/INSTALLER b/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand-2.2.2.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..f79e4cb9aaf0b2d9e8ba78861e2071317b2384b3 --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand-2.2.2.dist-info/INSTALLER @@ -0,0 +1 @@ +conda \ No newline at end of file diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand-2.2.2.dist-info/LICENSE b/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand-2.2.2.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..b49c3af060ce6326b8d91b9b73e76456ea0d9d2b --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand-2.2.2.dist-info/LICENSE @@ -0,0 +1,166 @@ +GNU LESSER GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + + This version of the GNU Lesser General Public License incorporates +the terms and conditions of version 3 of the GNU General Public +License, supplemented by the additional permissions listed below. + + 0. Additional Definitions. + + As used herein, "this License" refers to version 3 of the GNU Lesser +General Public License, and the "GNU GPL" refers to version 3 of the GNU +General Public License. + + "The Library" refers to a covered work governed by this License, +other than an Application or a Combined Work as defined below. + + An "Application" is any work that makes use of an interface provided +by the Library, but which is not otherwise based on the Library. +Defining a subclass of a class defined by the Library is deemed a mode +of using an interface provided by the Library. + + A "Combined Work" is a work produced by combining or linking an +Application with the Library. The particular version of the Library +with which the Combined Work was made is also called the "Linked +Version". + + The "Minimal Corresponding Source" for a Combined Work means the +Corresponding Source for the Combined Work, excluding any source code +for portions of the Combined Work that, considered in isolation, are +based on the Application, and not on the Linked Version. + + The "Corresponding Application Code" for a Combined Work means the +object code and/or source code for the Application, including any data +and utility programs needed for reproducing the Combined Work from the +Application, but excluding the System Libraries of the Combined Work. + + 1. Exception to Section 3 of the GNU GPL. + + You may convey a covered work under sections 3 and 4 of this License +without being bound by section 3 of the GNU GPL. + + 2. Conveying Modified Versions. + + If you modify a copy of the Library, and, in your modifications, a +facility refers to a function or data to be supplied by an Application +that uses the facility (other than as an argument passed when the +facility is invoked), then you may convey a copy of the modified +version: + + a) under this License, provided that you make a good faith effort to + ensure that, in the event an Application does not supply the + function or data, the facility still operates, and performs + whatever part of its purpose remains meaningful, or + + b) under the GNU GPL, with none of the additional permissions of + this License applicable to that copy. + + 3. Object Code Incorporating Material from Library Header Files. + + The object code form of an Application may incorporate material from +a header file that is part of the Library. You may convey such object +code under terms of your choice, provided that, if the incorporated +material is not limited to numerical parameters, data structure +layouts and accessors, or small macros, inline functions and templates +(ten or fewer lines in length), you do both of the following: + + a) Give prominent notice with each copy of the object code that the + Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the object code with a copy of the GNU GPL and this license + document. + + 4. Combined Works. + + You may convey a Combined Work under terms of your choice that, +taken together, effectively do not restrict modification of the +portions of the Library contained in the Combined Work and reverse +engineering for debugging such modifications, if you also do each of +the following: + + a) Give prominent notice with each copy of the Combined Work that + the Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the Combined Work with a copy of the GNU GPL and this license + document. + + c) For a Combined Work that displays copyright notices during + execution, include the copyright notice for the Library among + these notices, as well as a reference directing the user to the + copies of the GNU GPL and this license document. + + d) Do one of the following: + + 0) Convey the Minimal Corresponding Source under the terms of this + License, and the Corresponding Application Code in a form + suitable for, and under terms that permit, the user to + recombine or relink the Application with a modified version of + the Linked Version to produce a modified Combined Work, in the + manner specified by section 6 of the GNU GPL for conveying + Corresponding Source. + + 1) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (a) uses at run time + a copy of the Library already present on the user's computer + system, and (b) will operate properly with a modified version + of the Library that is interface-compatible with the Linked + Version. + + e) Provide Installation Information, but only if you would otherwise + be required to provide such information under section 6 of the + GNU GPL, and only to the extent that such information is + necessary to install and execute a modified version of the + Combined Work produced by recombining or relinking the + Application with a modified version of the Linked Version. (If + you use option 4d0, the Installation Information must accompany + the Minimal Corresponding Source and Corresponding Application + Code. If you use option 4d1, you must provide the Installation + Information in the manner specified by section 6 of the GNU GPL + for conveying Corresponding Source.) + + 5. Combined Libraries. + + You may place library facilities that are a work based on the +Library side by side in a single library together with other library +facilities that are not Applications and are not covered by this +License, and convey such a combined library under terms of your +choice, if you do both of the following: + + a) Accompany the combined library with a copy of the same work based + on the Library, uncombined with any other library facilities, + conveyed under the terms of this License. + + b) Give prominent notice with the combined library that part of it + is a work based on the Library, and explaining where to find the + accompanying uncombined form of the same work. + + 6. Revised Versions of the GNU Lesser General Public License. + + The Free Software Foundation may publish revised and/or new versions +of the GNU Lesser General Public License from time to time. Such new +versions will be similar in spirit to the present version, but may +differ in detail to address new problems or concerns. + + Each version is given a distinguishing version number. If the +Library as you received it specifies that a certain numbered version +of the GNU Lesser General Public License "or any later version" +applies to it, you have the option of following the terms and +conditions either of that published version or of any later version +published by the Free Software Foundation. If the Library as you +received it does not specify a version number of the GNU Lesser +General Public License, you may choose any version of the GNU Lesser +General Public License ever published by the Free Software Foundation. + + If the Library as you received it specifies that a proxy can decide +whether future versions of the GNU Lesser General Public License shall +apply, that proxy's public statement of acceptance of any version is +permanent authorization for you to choose that version for the +Library. + diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand-2.2.2.dist-info/RECORD b/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand-2.2.2.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..e6e12ea51e18b270be6c5103c8cb1f6ef62f9e4a --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand-2.2.2.dist-info/RECORD @@ -0,0 +1,18 @@ +autocommand-2.2.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +autocommand-2.2.2.dist-info/LICENSE,sha256=reeNBJgtaZctREqOFKlPh6IzTdOFXMgDSOqOJAqg3y0,7634 +autocommand-2.2.2.dist-info/METADATA,sha256=OADZuR3O6iBlpu1ieTgzYul6w4uOVrk0P0BO5TGGAJk,15006 +autocommand-2.2.2.dist-info/RECORD,, +autocommand-2.2.2.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 +autocommand-2.2.2.dist-info/top_level.txt,sha256=AzfhgKKS8EdAwWUTSF8mgeVQbXOY9kokHB6kSqwwqu0,12 +autocommand/__init__.py,sha256=zko5Rnvolvb-UXjCx_2ArPTGBWwUK5QY4LIQIKYR7As,1037 +autocommand/__pycache__/__init__.cpython-312.pyc,, +autocommand/__pycache__/autoasync.cpython-312.pyc,, +autocommand/__pycache__/autocommand.cpython-312.pyc,, +autocommand/__pycache__/automain.cpython-312.pyc,, +autocommand/__pycache__/autoparse.cpython-312.pyc,, +autocommand/__pycache__/errors.cpython-312.pyc,, +autocommand/autoasync.py,sha256=AMdyrxNS4pqWJfP_xuoOcImOHWD-qT7x06wmKN1Vp-U,5680 +autocommand/autocommand.py,sha256=hmkEmQ72HtL55gnURVjDOnsfYlGd5lLXbvT4KG496Qw,2505 +autocommand/automain.py,sha256=A2b8i754Mxc_DjU9WFr6vqYDWlhz0cn8miu8d8EsxV8,2076 +autocommand/autoparse.py,sha256=WVWmZJPcbzUKXP40raQw_0HD8qPJ2V9VG1eFFmmnFxw,11642 +autocommand/errors.py,sha256=7aa3roh9Herd6nIKpQHNWEslWE8oq7GiHYVUuRqORnA,886 diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand-2.2.2.dist-info/WHEEL b/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand-2.2.2.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..57e3d840d59a650ac5bccbad5baeec47d155f0ad --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand-2.2.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.4) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand-2.2.2.dist-info/top_level.txt b/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand-2.2.2.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..dda5158ff6d263927861b19ef5a5d183d2aa77ed --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand-2.2.2.dist-info/top_level.txt @@ -0,0 +1 @@ +autocommand diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand/__pycache__/__init__.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9afffda22af9a7fca57dab8535afba67bb5df86a Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand/__pycache__/__init__.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand/__pycache__/autocommand.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand/__pycache__/autocommand.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..98bb9c4c5b183a91c3ec4a77b882122fd05fd7e6 Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand/__pycache__/autocommand.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand/__pycache__/autoparse.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand/__pycache__/autoparse.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..598967aa0c3925a648390502b0bf4e4c9ce52fa4 Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand/__pycache__/autoparse.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand/errors.py b/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand/errors.py new file mode 100644 index 0000000000000000000000000000000000000000..2570607399a3ae13cb92db65a9171d955d3248c6 --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/autocommand/errors.py @@ -0,0 +1,23 @@ +# Copyright 2014-2016 Nathan West +# +# This file is part of autocommand. +# +# autocommand is free software: you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# autocommand is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with autocommand. If not, see . + + +class AutocommandError(Exception): + '''Base class for autocommand exceptions''' + pass + +# Individual modules will define errors specific to that module. diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_compat.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_compat.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6efcee169627498c5aa9a1d6c5732a10cebcac7b Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/importlib_metadata/__pycache__/_compat.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/importlib_metadata/compat/__pycache__/__init__.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/importlib_metadata/compat/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8c44740eb671c07c49e42894f476080cd2fce727 Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/importlib_metadata/compat/__pycache__/__init__.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/importlib_metadata/compat/__pycache__/py39.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/importlib_metadata/compat/__pycache__/py39.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6e322b7fa0993348e1c7708f28cc2c53e607fd80 Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/importlib_metadata/compat/__pycache__/py39.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/inflect/__init__.py b/falcon/lib/python3.10/site-packages/setuptools/_vendor/inflect/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3eec27f4c67f24db167393895a6eaf0a9aa16d35 --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/inflect/__init__.py @@ -0,0 +1,3986 @@ +""" +inflect: english language inflection + - correctly generate plurals, ordinals, indefinite articles + - convert numbers to words + +Copyright (C) 2010 Paul Dyson + +Based upon the Perl module +`Lingua::EN::Inflect `_. + +methods: + classical inflect + plural plural_noun plural_verb plural_adj singular_noun no num a an + compare compare_nouns compare_verbs compare_adjs + present_participle + ordinal + number_to_words + join + defnoun defverb defadj defa defan + +INFLECTIONS: + classical inflect + plural plural_noun plural_verb plural_adj singular_noun compare + no num a an present_participle + +PLURALS: + classical inflect + plural plural_noun plural_verb plural_adj singular_noun no num + compare compare_nouns compare_verbs compare_adjs + +COMPARISONS: + classical + compare compare_nouns compare_verbs compare_adjs + +ARTICLES: + classical inflect num a an + +NUMERICAL: + ordinal number_to_words + +USER_DEFINED: + defnoun defverb defadj defa defan + +Exceptions: + UnknownClassicalModeError + BadNumValueError + BadChunkingOptionError + NumOutOfRangeError + BadUserDefinedPatternError + BadRcFileError + BadGenderError + +""" + +from __future__ import annotations + +import ast +import collections +import contextlib +import functools +import itertools +import re +from numbers import Number +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterable, + List, + Literal, + Match, + Optional, + Sequence, + Tuple, + Union, + cast, +) + +from more_itertools import windowed_complete +from typeguard import typechecked + +from .compat.py38 import Annotated + + +class UnknownClassicalModeError(Exception): + pass + + +class BadNumValueError(Exception): + pass + + +class BadChunkingOptionError(Exception): + pass + + +class NumOutOfRangeError(Exception): + pass + + +class BadUserDefinedPatternError(Exception): + pass + + +class BadRcFileError(Exception): + pass + + +class BadGenderError(Exception): + pass + + +def enclose(s: str) -> str: + return f"(?:{s})" + + +def joinstem(cutpoint: Optional[int] = 0, words: Optional[Iterable[str]] = None) -> str: + """ + Join stem of each word in words into a string for regex. + + Each word is truncated at cutpoint. + + Cutpoint is usually negative indicating the number of letters to remove + from the end of each word. + + >>> joinstem(-2, ["ephemeris", "iris", ".*itis"]) + '(?:ephemer|ir|.*it)' + + >>> joinstem(None, ["ephemeris"]) + '(?:ephemeris)' + + >>> joinstem(5, None) + '(?:)' + """ + return enclose("|".join(w[:cutpoint] for w in words or [])) + + +def bysize(words: Iterable[str]) -> Dict[int, set]: + """ + From a list of words, return a dict of sets sorted by word length. + + >>> words = ['ant', 'cat', 'dog', 'pig', 'frog', 'goat', 'horse', 'elephant'] + >>> ret = bysize(words) + >>> sorted(ret[3]) + ['ant', 'cat', 'dog', 'pig'] + >>> ret[5] + {'horse'} + """ + res: Dict[int, set] = collections.defaultdict(set) + for w in words: + res[len(w)].add(w) + return res + + +def make_pl_si_lists( + lst: Iterable[str], + plending: str, + siendingsize: Optional[int], + dojoinstem: bool = True, +): + """ + given a list of singular words: lst + + an ending to append to make the plural: plending + + the number of characters to remove from the singular + before appending plending: siendingsize + + a flag whether to create a joinstem: dojoinstem + + return: + a list of pluralised words: si_list (called si because this is what you need to + look for to make the singular) + + the pluralised words as a dict of sets sorted by word length: si_bysize + the singular words as a dict of sets sorted by word length: pl_bysize + if dojoinstem is True: a regular expression that matches any of the stems: stem + """ + if siendingsize is not None: + siendingsize = -siendingsize + si_list = [w[:siendingsize] + plending for w in lst] + pl_bysize = bysize(lst) + si_bysize = bysize(si_list) + if dojoinstem: + stem = joinstem(siendingsize, lst) + return si_list, si_bysize, pl_bysize, stem + else: + return si_list, si_bysize, pl_bysize + + +# 1. PLURALS + +pl_sb_irregular_s = { + "corpus": "corpuses|corpora", + "opus": "opuses|opera", + "genus": "genera", + "mythos": "mythoi", + "penis": "penises|penes", + "testis": "testes", + "atlas": "atlases|atlantes", + "yes": "yeses", +} + +pl_sb_irregular = { + "child": "children", + "chili": "chilis|chilies", + "brother": "brothers|brethren", + "infinity": "infinities|infinity", + "loaf": "loaves", + "lore": "lores|lore", + "hoof": "hoofs|hooves", + "beef": "beefs|beeves", + "thief": "thiefs|thieves", + "money": "monies", + "mongoose": "mongooses", + "ox": "oxen", + "cow": "cows|kine", + "graffito": "graffiti", + "octopus": "octopuses|octopodes", + "genie": "genies|genii", + "ganglion": "ganglions|ganglia", + "trilby": "trilbys", + "turf": "turfs|turves", + "numen": "numina", + "atman": "atmas", + "occiput": "occiputs|occipita", + "sabretooth": "sabretooths", + "sabertooth": "sabertooths", + "lowlife": "lowlifes", + "flatfoot": "flatfoots", + "tenderfoot": "tenderfoots", + "romany": "romanies", + "jerry": "jerries", + "mary": "maries", + "talouse": "talouses", + "rom": "roma", + "carmen": "carmina", +} + +pl_sb_irregular.update(pl_sb_irregular_s) +# pl_sb_irregular_keys = enclose('|'.join(pl_sb_irregular.keys())) + +pl_sb_irregular_caps = { + "Romany": "Romanies", + "Jerry": "Jerrys", + "Mary": "Marys", + "Rom": "Roma", +} + +pl_sb_irregular_compound = {"prima donna": "prima donnas|prime donne"} + +si_sb_irregular = {v: k for (k, v) in pl_sb_irregular.items()} +for k in list(si_sb_irregular): + if "|" in k: + k1, k2 = k.split("|") + si_sb_irregular[k1] = si_sb_irregular[k2] = si_sb_irregular[k] + del si_sb_irregular[k] +si_sb_irregular_caps = {v: k for (k, v) in pl_sb_irregular_caps.items()} +si_sb_irregular_compound = {v: k for (k, v) in pl_sb_irregular_compound.items()} +for k in list(si_sb_irregular_compound): + if "|" in k: + k1, k2 = k.split("|") + si_sb_irregular_compound[k1] = si_sb_irregular_compound[k2] = ( + si_sb_irregular_compound[k] + ) + del si_sb_irregular_compound[k] + +# si_sb_irregular_keys = enclose('|'.join(si_sb_irregular.keys())) + +# Z's that don't double + +pl_sb_z_zes_list = ("quartz", "topaz") +pl_sb_z_zes_bysize = bysize(pl_sb_z_zes_list) + +pl_sb_ze_zes_list = ("snooze",) +pl_sb_ze_zes_bysize = bysize(pl_sb_ze_zes_list) + + +# CLASSICAL "..is" -> "..ides" + +pl_sb_C_is_ides_complete = [ + # GENERAL WORDS... + "ephemeris", + "iris", + "clitoris", + "chrysalis", + "epididymis", +] + +pl_sb_C_is_ides_endings = [ + # INFLAMATIONS... + "itis" +] + +pl_sb_C_is_ides = joinstem( + -2, pl_sb_C_is_ides_complete + [f".*{w}" for w in pl_sb_C_is_ides_endings] +) + +pl_sb_C_is_ides_list = pl_sb_C_is_ides_complete + pl_sb_C_is_ides_endings + +( + si_sb_C_is_ides_list, + si_sb_C_is_ides_bysize, + pl_sb_C_is_ides_bysize, +) = make_pl_si_lists(pl_sb_C_is_ides_list, "ides", 2, dojoinstem=False) + + +# CLASSICAL "..a" -> "..ata" + +pl_sb_C_a_ata_list = ( + "anathema", + "bema", + "carcinoma", + "charisma", + "diploma", + "dogma", + "drama", + "edema", + "enema", + "enigma", + "lemma", + "lymphoma", + "magma", + "melisma", + "miasma", + "oedema", + "sarcoma", + "schema", + "soma", + "stigma", + "stoma", + "trauma", + "gumma", + "pragma", +) + +( + si_sb_C_a_ata_list, + si_sb_C_a_ata_bysize, + pl_sb_C_a_ata_bysize, + pl_sb_C_a_ata, +) = make_pl_si_lists(pl_sb_C_a_ata_list, "ata", 1) + +# UNCONDITIONAL "..a" -> "..ae" + +pl_sb_U_a_ae_list = ( + "alumna", + "alga", + "vertebra", + "persona", + "vita", +) +( + si_sb_U_a_ae_list, + si_sb_U_a_ae_bysize, + pl_sb_U_a_ae_bysize, + pl_sb_U_a_ae, +) = make_pl_si_lists(pl_sb_U_a_ae_list, "e", None) + +# CLASSICAL "..a" -> "..ae" + +pl_sb_C_a_ae_list = ( + "amoeba", + "antenna", + "formula", + "hyperbola", + "medusa", + "nebula", + "parabola", + "abscissa", + "hydra", + "nova", + "lacuna", + "aurora", + "umbra", + "flora", + "fauna", +) +( + si_sb_C_a_ae_list, + si_sb_C_a_ae_bysize, + pl_sb_C_a_ae_bysize, + pl_sb_C_a_ae, +) = make_pl_si_lists(pl_sb_C_a_ae_list, "e", None) + + +# CLASSICAL "..en" -> "..ina" + +pl_sb_C_en_ina_list = ("stamen", "foramen", "lumen") + +( + si_sb_C_en_ina_list, + si_sb_C_en_ina_bysize, + pl_sb_C_en_ina_bysize, + pl_sb_C_en_ina, +) = make_pl_si_lists(pl_sb_C_en_ina_list, "ina", 2) + + +# UNCONDITIONAL "..um" -> "..a" + +pl_sb_U_um_a_list = ( + "bacterium", + "agendum", + "desideratum", + "erratum", + "stratum", + "datum", + "ovum", + "extremum", + "candelabrum", +) +( + si_sb_U_um_a_list, + si_sb_U_um_a_bysize, + pl_sb_U_um_a_bysize, + pl_sb_U_um_a, +) = make_pl_si_lists(pl_sb_U_um_a_list, "a", 2) + +# CLASSICAL "..um" -> "..a" + +pl_sb_C_um_a_list = ( + "maximum", + "minimum", + "momentum", + "optimum", + "quantum", + "cranium", + "curriculum", + "dictum", + "phylum", + "aquarium", + "compendium", + "emporium", + "encomium", + "gymnasium", + "honorarium", + "interregnum", + "lustrum", + "memorandum", + "millennium", + "rostrum", + "spectrum", + "speculum", + "stadium", + "trapezium", + "ultimatum", + "medium", + "vacuum", + "velum", + "consortium", + "arboretum", +) + +( + si_sb_C_um_a_list, + si_sb_C_um_a_bysize, + pl_sb_C_um_a_bysize, + pl_sb_C_um_a, +) = make_pl_si_lists(pl_sb_C_um_a_list, "a", 2) + + +# UNCONDITIONAL "..us" -> "i" + +pl_sb_U_us_i_list = ( + "alumnus", + "alveolus", + "bacillus", + "bronchus", + "locus", + "nucleus", + "stimulus", + "meniscus", + "sarcophagus", +) +( + si_sb_U_us_i_list, + si_sb_U_us_i_bysize, + pl_sb_U_us_i_bysize, + pl_sb_U_us_i, +) = make_pl_si_lists(pl_sb_U_us_i_list, "i", 2) + +# CLASSICAL "..us" -> "..i" + +pl_sb_C_us_i_list = ( + "focus", + "radius", + "genius", + "incubus", + "succubus", + "nimbus", + "fungus", + "nucleolus", + "stylus", + "torus", + "umbilicus", + "uterus", + "hippopotamus", + "cactus", +) + +( + si_sb_C_us_i_list, + si_sb_C_us_i_bysize, + pl_sb_C_us_i_bysize, + pl_sb_C_us_i, +) = make_pl_si_lists(pl_sb_C_us_i_list, "i", 2) + + +# CLASSICAL "..us" -> "..us" (ASSIMILATED 4TH DECLENSION LATIN NOUNS) + +pl_sb_C_us_us = ( + "status", + "apparatus", + "prospectus", + "sinus", + "hiatus", + "impetus", + "plexus", +) +pl_sb_C_us_us_bysize = bysize(pl_sb_C_us_us) + +# UNCONDITIONAL "..on" -> "a" + +pl_sb_U_on_a_list = ( + "criterion", + "perihelion", + "aphelion", + "phenomenon", + "prolegomenon", + "noumenon", + "organon", + "asyndeton", + "hyperbaton", +) +( + si_sb_U_on_a_list, + si_sb_U_on_a_bysize, + pl_sb_U_on_a_bysize, + pl_sb_U_on_a, +) = make_pl_si_lists(pl_sb_U_on_a_list, "a", 2) + +# CLASSICAL "..on" -> "..a" + +pl_sb_C_on_a_list = ("oxymoron",) + +( + si_sb_C_on_a_list, + si_sb_C_on_a_bysize, + pl_sb_C_on_a_bysize, + pl_sb_C_on_a, +) = make_pl_si_lists(pl_sb_C_on_a_list, "a", 2) + + +# CLASSICAL "..o" -> "..i" (BUT NORMALLY -> "..os") + +pl_sb_C_o_i = [ + "solo", + "soprano", + "basso", + "alto", + "contralto", + "tempo", + "piano", + "virtuoso", +] # list not tuple so can concat for pl_sb_U_o_os + +pl_sb_C_o_i_bysize = bysize(pl_sb_C_o_i) +si_sb_C_o_i_bysize = bysize([f"{w[:-1]}i" for w in pl_sb_C_o_i]) + +pl_sb_C_o_i_stems = joinstem(-1, pl_sb_C_o_i) + +# ALWAYS "..o" -> "..os" + +pl_sb_U_o_os_complete = {"ado", "ISO", "NATO", "NCO", "NGO", "oto"} +si_sb_U_o_os_complete = {f"{w}s" for w in pl_sb_U_o_os_complete} + + +pl_sb_U_o_os_endings = [ + "aficionado", + "aggro", + "albino", + "allegro", + "ammo", + "Antananarivo", + "archipelago", + "armadillo", + "auto", + "avocado", + "Bamako", + "Barquisimeto", + "bimbo", + "bingo", + "Biro", + "bolero", + "Bolzano", + "bongo", + "Boto", + "burro", + "Cairo", + "canto", + "cappuccino", + "casino", + "cello", + "Chicago", + "Chimango", + "cilantro", + "cochito", + "coco", + "Colombo", + "Colorado", + "commando", + "concertino", + "contango", + "credo", + "crescendo", + "cyano", + "demo", + "ditto", + "Draco", + "dynamo", + "embryo", + "Esperanto", + "espresso", + "euro", + "falsetto", + "Faro", + "fiasco", + "Filipino", + "flamenco", + "furioso", + "generalissimo", + "Gestapo", + "ghetto", + "gigolo", + "gizmo", + "Greensboro", + "gringo", + "Guaiabero", + "guano", + "gumbo", + "gyro", + "hairdo", + "hippo", + "Idaho", + "impetigo", + "inferno", + "info", + "intermezzo", + "intertrigo", + "Iquico", + "jumbo", + "junto", + "Kakapo", + "kilo", + "Kinkimavo", + "Kokako", + "Kosovo", + "Lesotho", + "libero", + "libido", + "libretto", + "lido", + "Lilo", + "limbo", + "limo", + "lineno", + "lingo", + "lino", + "livedo", + "loco", + "logo", + "lumbago", + "macho", + "macro", + "mafioso", + "magneto", + "magnifico", + "Majuro", + "Malabo", + "manifesto", + "Maputo", + "Maracaibo", + "medico", + "memo", + "metro", + "Mexico", + "micro", + "Milano", + "Monaco", + "mono", + "Montenegro", + "Morocco", + "Muqdisho", + "myo", + "neutrino", + "Ningbo", + "octavo", + "oregano", + "Orinoco", + "Orlando", + "Oslo", + "panto", + "Paramaribo", + "Pardusco", + "pedalo", + "photo", + "pimento", + "pinto", + "pleco", + "Pluto", + "pogo", + "polo", + "poncho", + "Porto-Novo", + "Porto", + "pro", + "psycho", + "pueblo", + "quarto", + "Quito", + "repo", + "rhino", + "risotto", + "rococo", + "rondo", + "Sacramento", + "saddo", + "sago", + "salvo", + "Santiago", + "Sapporo", + "Sarajevo", + "scherzando", + "scherzo", + "silo", + "sirocco", + "sombrero", + "staccato", + "sterno", + "stucco", + "stylo", + "sumo", + "Taiko", + "techno", + "terrazzo", + "testudo", + "timpano", + "tiro", + "tobacco", + "Togo", + "Tokyo", + "torero", + "Torino", + "Toronto", + "torso", + "tremolo", + "typo", + "tyro", + "ufo", + "UNESCO", + "vaquero", + "vermicello", + "verso", + "vibrato", + "violoncello", + "Virgo", + "weirdo", + "WHO", + "WTO", + "Yamoussoukro", + "yo-yo", + "zero", + "Zibo", +] + pl_sb_C_o_i + +pl_sb_U_o_os_bysize = bysize(pl_sb_U_o_os_endings) +si_sb_U_o_os_bysize = bysize([f"{w}s" for w in pl_sb_U_o_os_endings]) + + +# UNCONDITIONAL "..ch" -> "..chs" + +pl_sb_U_ch_chs_list = ("czech", "eunuch", "stomach") + +( + si_sb_U_ch_chs_list, + si_sb_U_ch_chs_bysize, + pl_sb_U_ch_chs_bysize, + pl_sb_U_ch_chs, +) = make_pl_si_lists(pl_sb_U_ch_chs_list, "s", None) + + +# UNCONDITIONAL "..[ei]x" -> "..ices" + +pl_sb_U_ex_ices_list = ("codex", "murex", "silex") +( + si_sb_U_ex_ices_list, + si_sb_U_ex_ices_bysize, + pl_sb_U_ex_ices_bysize, + pl_sb_U_ex_ices, +) = make_pl_si_lists(pl_sb_U_ex_ices_list, "ices", 2) + +pl_sb_U_ix_ices_list = ("radix", "helix") +( + si_sb_U_ix_ices_list, + si_sb_U_ix_ices_bysize, + pl_sb_U_ix_ices_bysize, + pl_sb_U_ix_ices, +) = make_pl_si_lists(pl_sb_U_ix_ices_list, "ices", 2) + +# CLASSICAL "..[ei]x" -> "..ices" + +pl_sb_C_ex_ices_list = ( + "vortex", + "vertex", + "cortex", + "latex", + "pontifex", + "apex", + "index", + "simplex", +) + +( + si_sb_C_ex_ices_list, + si_sb_C_ex_ices_bysize, + pl_sb_C_ex_ices_bysize, + pl_sb_C_ex_ices, +) = make_pl_si_lists(pl_sb_C_ex_ices_list, "ices", 2) + + +pl_sb_C_ix_ices_list = ("appendix",) + +( + si_sb_C_ix_ices_list, + si_sb_C_ix_ices_bysize, + pl_sb_C_ix_ices_bysize, + pl_sb_C_ix_ices, +) = make_pl_si_lists(pl_sb_C_ix_ices_list, "ices", 2) + + +# ARABIC: ".." -> "..i" + +pl_sb_C_i_list = ("afrit", "afreet", "efreet") + +(si_sb_C_i_list, si_sb_C_i_bysize, pl_sb_C_i_bysize, pl_sb_C_i) = make_pl_si_lists( + pl_sb_C_i_list, "i", None +) + + +# HEBREW: ".." -> "..im" + +pl_sb_C_im_list = ("goy", "seraph", "cherub") + +(si_sb_C_im_list, si_sb_C_im_bysize, pl_sb_C_im_bysize, pl_sb_C_im) = make_pl_si_lists( + pl_sb_C_im_list, "im", None +) + + +# UNCONDITIONAL "..man" -> "..mans" + +pl_sb_U_man_mans_list = """ + ataman caiman cayman ceriman + desman dolman farman harman hetman + human leman ottoman shaman talisman +""".split() +pl_sb_U_man_mans_caps_list = """ + Alabaman Bahaman Burman German + Hiroshiman Liman Nakayaman Norman Oklahoman + Panaman Roman Selman Sonaman Tacoman Yakiman + Yokohaman Yuman +""".split() + +( + si_sb_U_man_mans_list, + si_sb_U_man_mans_bysize, + pl_sb_U_man_mans_bysize, +) = make_pl_si_lists(pl_sb_U_man_mans_list, "s", None, dojoinstem=False) +( + si_sb_U_man_mans_caps_list, + si_sb_U_man_mans_caps_bysize, + pl_sb_U_man_mans_caps_bysize, +) = make_pl_si_lists(pl_sb_U_man_mans_caps_list, "s", None, dojoinstem=False) + +# UNCONDITIONAL "..louse" -> "..lice" +pl_sb_U_louse_lice_list = ("booklouse", "grapelouse", "louse", "woodlouse") + +( + si_sb_U_louse_lice_list, + si_sb_U_louse_lice_bysize, + pl_sb_U_louse_lice_bysize, +) = make_pl_si_lists(pl_sb_U_louse_lice_list, "lice", 5, dojoinstem=False) + +pl_sb_uninflected_s_complete = [ + # PAIRS OR GROUPS SUBSUMED TO A SINGULAR... + "breeches", + "britches", + "pajamas", + "pyjamas", + "clippers", + "gallows", + "hijinks", + "headquarters", + "pliers", + "scissors", + "testes", + "herpes", + "pincers", + "shears", + "proceedings", + "trousers", + # UNASSIMILATED LATIN 4th DECLENSION + "cantus", + "coitus", + "nexus", + # RECENT IMPORTS... + "contretemps", + "corps", + "debris", + "siemens", + # DISEASES + "mumps", + # MISCELLANEOUS OTHERS... + "diabetes", + "jackanapes", + "series", + "species", + "subspecies", + "rabies", + "chassis", + "innings", + "news", + "mews", + "haggis", +] + +pl_sb_uninflected_s_endings = [ + # RECENT IMPORTS... + "ois", + # DISEASES + "measles", +] + +pl_sb_uninflected_s = pl_sb_uninflected_s_complete + [ + f".*{w}" for w in pl_sb_uninflected_s_endings +] + +pl_sb_uninflected_herd = ( + # DON'T INFLECT IN CLASSICAL MODE, OTHERWISE NORMAL INFLECTION + "wildebeest", + "swine", + "eland", + "bison", + "buffalo", + "cattle", + "elk", + "rhinoceros", + "zucchini", + "caribou", + "dace", + "grouse", + "guinea fowl", + "guinea-fowl", + "haddock", + "hake", + "halibut", + "herring", + "mackerel", + "pickerel", + "pike", + "roe", + "seed", + "shad", + "snipe", + "teal", + "turbot", + "water fowl", + "water-fowl", +) + +pl_sb_uninflected_complete = [ + # SOME FISH AND HERD ANIMALS + "tuna", + "salmon", + "mackerel", + "trout", + "bream", + "sea-bass", + "sea bass", + "carp", + "cod", + "flounder", + "whiting", + "moose", + # OTHER ODDITIES + "graffiti", + "djinn", + "samuri", + "offspring", + "pence", + "quid", + "hertz", +] + pl_sb_uninflected_s_complete +# SOME WORDS ENDING IN ...s (OFTEN PAIRS TAKEN AS A WHOLE) + +pl_sb_uninflected_caps = [ + # ALL NATIONALS ENDING IN -ese + "Portuguese", + "Amoyese", + "Borghese", + "Congoese", + "Faroese", + "Foochowese", + "Genevese", + "Genoese", + "Gilbertese", + "Hottentotese", + "Kiplingese", + "Kongoese", + "Lucchese", + "Maltese", + "Nankingese", + "Niasese", + "Pekingese", + "Piedmontese", + "Pistoiese", + "Sarawakese", + "Shavese", + "Vermontese", + "Wenchowese", + "Yengeese", +] + + +pl_sb_uninflected_endings = [ + # UNCOUNTABLE NOUNS + "butter", + "cash", + "furniture", + "information", + # SOME FISH AND HERD ANIMALS + "fish", + "deer", + "sheep", + # ALL NATIONALS ENDING IN -ese + "nese", + "rese", + "lese", + "mese", + # DISEASES + "pox", + # OTHER ODDITIES + "craft", +] + pl_sb_uninflected_s_endings +# SOME WORDS ENDING IN ...s (OFTEN PAIRS TAKEN AS A WHOLE) + + +pl_sb_uninflected_bysize = bysize(pl_sb_uninflected_endings) + + +# SINGULAR WORDS ENDING IN ...s (ALL INFLECT WITH ...es) + +pl_sb_singular_s_complete = [ + "acropolis", + "aegis", + "alias", + "asbestos", + "bathos", + "bias", + "bronchitis", + "bursitis", + "caddis", + "cannabis", + "canvas", + "chaos", + "cosmos", + "dais", + "digitalis", + "epidermis", + "ethos", + "eyas", + "gas", + "glottis", + "hubris", + "ibis", + "lens", + "mantis", + "marquis", + "metropolis", + "pathos", + "pelvis", + "polis", + "rhinoceros", + "sassafras", + "trellis", +] + pl_sb_C_is_ides_complete + + +pl_sb_singular_s_endings = ["ss", "us"] + pl_sb_C_is_ides_endings + +pl_sb_singular_s_bysize = bysize(pl_sb_singular_s_endings) + +si_sb_singular_s_complete = [f"{w}es" for w in pl_sb_singular_s_complete] +si_sb_singular_s_endings = [f"{w}es" for w in pl_sb_singular_s_endings] +si_sb_singular_s_bysize = bysize(si_sb_singular_s_endings) + +pl_sb_singular_s_es = ["[A-Z].*es"] + +pl_sb_singular_s = enclose( + "|".join( + pl_sb_singular_s_complete + + [f".*{w}" for w in pl_sb_singular_s_endings] + + pl_sb_singular_s_es + ) +) + + +# PLURALS ENDING IN uses -> use + + +si_sb_ois_oi_case = ("Bolshois", "Hanois") + +si_sb_uses_use_case = ("Betelgeuses", "Duses", "Meuses", "Syracuses", "Toulouses") + +si_sb_uses_use = ( + "abuses", + "applauses", + "blouses", + "carouses", + "causes", + "chartreuses", + "clauses", + "contuses", + "douses", + "excuses", + "fuses", + "grouses", + "hypotenuses", + "masseuses", + "menopauses", + "misuses", + "muses", + "overuses", + "pauses", + "peruses", + "profuses", + "recluses", + "reuses", + "ruses", + "souses", + "spouses", + "suffuses", + "transfuses", + "uses", +) + +si_sb_ies_ie_case = ( + "Addies", + "Aggies", + "Allies", + "Amies", + "Angies", + "Annies", + "Annmaries", + "Archies", + "Arties", + "Aussies", + "Barbies", + "Barries", + "Basies", + "Bennies", + "Bernies", + "Berties", + "Bessies", + "Betties", + "Billies", + "Blondies", + "Bobbies", + "Bonnies", + "Bowies", + "Brandies", + "Bries", + "Brownies", + "Callies", + "Carnegies", + "Carries", + "Cassies", + "Charlies", + "Cheries", + "Christies", + "Connies", + "Curies", + "Dannies", + "Debbies", + "Dixies", + "Dollies", + "Donnies", + "Drambuies", + "Eddies", + "Effies", + "Ellies", + "Elsies", + "Eries", + "Ernies", + "Essies", + "Eugenies", + "Fannies", + "Flossies", + "Frankies", + "Freddies", + "Gillespies", + "Goldies", + "Gracies", + "Guthries", + "Hallies", + "Hatties", + "Hetties", + "Hollies", + "Jackies", + "Jamies", + "Janies", + "Jannies", + "Jeanies", + "Jeannies", + "Jennies", + "Jessies", + "Jimmies", + "Jodies", + "Johnies", + "Johnnies", + "Josies", + "Julies", + "Kalgoorlies", + "Kathies", + "Katies", + "Kellies", + "Kewpies", + "Kristies", + "Laramies", + "Lassies", + "Lauries", + "Leslies", + "Lessies", + "Lillies", + "Lizzies", + "Lonnies", + "Lories", + "Lorries", + "Lotties", + "Louies", + "Mackenzies", + "Maggies", + "Maisies", + "Mamies", + "Marcies", + "Margies", + "Maries", + "Marjories", + "Matties", + "McKenzies", + "Melanies", + "Mickies", + "Millies", + "Minnies", + "Mollies", + "Mounties", + "Nannies", + "Natalies", + "Nellies", + "Netties", + "Ollies", + "Ozzies", + "Pearlies", + "Pottawatomies", + "Reggies", + "Richies", + "Rickies", + "Robbies", + "Ronnies", + "Rosalies", + "Rosemaries", + "Rosies", + "Roxies", + "Rushdies", + "Ruthies", + "Sadies", + "Sallies", + "Sammies", + "Scotties", + "Selassies", + "Sherries", + "Sophies", + "Stacies", + "Stefanies", + "Stephanies", + "Stevies", + "Susies", + "Sylvies", + "Tammies", + "Terries", + "Tessies", + "Tommies", + "Tracies", + "Trekkies", + "Valaries", + "Valeries", + "Valkyries", + "Vickies", + "Virgies", + "Willies", + "Winnies", + "Wylies", + "Yorkies", +) + +si_sb_ies_ie = ( + "aeries", + "baggies", + "belies", + "biggies", + "birdies", + "bogies", + "bonnies", + "boogies", + "bookies", + "bourgeoisies", + "brownies", + "budgies", + "caddies", + "calories", + "camaraderies", + "cockamamies", + "collies", + "cookies", + "coolies", + "cooties", + "coteries", + "crappies", + "curies", + "cutesies", + "dogies", + "eyries", + "floozies", + "footsies", + "freebies", + "genies", + "goalies", + "groupies", + "hies", + "jalousies", + "junkies", + "kiddies", + "laddies", + "lassies", + "lies", + "lingeries", + "magpies", + "menageries", + "mommies", + "movies", + "neckties", + "newbies", + "nighties", + "oldies", + "organdies", + "overlies", + "pies", + "pinkies", + "pixies", + "potpies", + "prairies", + "quickies", + "reveries", + "rookies", + "rotisseries", + "softies", + "sorties", + "species", + "stymies", + "sweeties", + "ties", + "underlies", + "unties", + "veggies", + "vies", + "yuppies", + "zombies", +) + + +si_sb_oes_oe_case = ( + "Chloes", + "Crusoes", + "Defoes", + "Faeroes", + "Ivanhoes", + "Joes", + "McEnroes", + "Moes", + "Monroes", + "Noes", + "Poes", + "Roscoes", + "Tahoes", + "Tippecanoes", + "Zoes", +) + +si_sb_oes_oe = ( + "aloes", + "backhoes", + "canoes", + "does", + "floes", + "foes", + "hoes", + "mistletoes", + "oboes", + "pekoes", + "roes", + "sloes", + "throes", + "tiptoes", + "toes", + "woes", +) + +si_sb_z_zes = ("quartzes", "topazes") + +si_sb_zzes_zz = ("buzzes", "fizzes", "frizzes", "razzes") + +si_sb_ches_che_case = ( + "Andromaches", + "Apaches", + "Blanches", + "Comanches", + "Nietzsches", + "Porsches", + "Roches", +) + +si_sb_ches_che = ( + "aches", + "avalanches", + "backaches", + "bellyaches", + "caches", + "cloches", + "creches", + "douches", + "earaches", + "fiches", + "headaches", + "heartaches", + "microfiches", + "niches", + "pastiches", + "psyches", + "quiches", + "stomachaches", + "toothaches", + "tranches", +) + +si_sb_xes_xe = ("annexes", "axes", "deluxes", "pickaxes") + +si_sb_sses_sse_case = ("Hesses", "Jesses", "Larousses", "Matisses") +si_sb_sses_sse = ( + "bouillabaisses", + "crevasses", + "demitasses", + "impasses", + "mousses", + "posses", +) + +si_sb_ves_ve_case = ( + # *[nwl]ives -> [nwl]live + "Clives", + "Palmolives", +) +si_sb_ves_ve = ( + # *[^d]eaves -> eave + "interweaves", + "weaves", + # *[nwl]ives -> [nwl]live + "olives", + # *[eoa]lves -> [eoa]lve + "bivalves", + "dissolves", + "resolves", + "salves", + "twelves", + "valves", +) + + +plverb_special_s = enclose( + "|".join( + [pl_sb_singular_s] + + pl_sb_uninflected_s + + list(pl_sb_irregular_s) + + ["(.*[csx])is", "(.*)ceps", "[A-Z].*s"] + ) +) + +_pl_sb_postfix_adj_defn = ( + ("general", enclose(r"(?!major|lieutenant|brigadier|adjutant|.*star)\S+")), + ("martial", enclose("court")), + ("force", enclose("pound")), +) + +pl_sb_postfix_adj: Iterable[str] = ( + enclose(val + f"(?=(?:-|\\s+){key})") for key, val in _pl_sb_postfix_adj_defn +) + +pl_sb_postfix_adj_stems = f"({'|'.join(pl_sb_postfix_adj)})(.*)" + + +# PLURAL WORDS ENDING IS es GO TO SINGULAR is + +si_sb_es_is = ( + "amanuenses", + "amniocenteses", + "analyses", + "antitheses", + "apotheoses", + "arterioscleroses", + "atheroscleroses", + "axes", + # 'bases', # bases -> basis + "catalyses", + "catharses", + "chasses", + "cirrhoses", + "cocces", + "crises", + "diagnoses", + "dialyses", + "diereses", + "electrolyses", + "emphases", + "exegeses", + "geneses", + "halitoses", + "hydrolyses", + "hypnoses", + "hypotheses", + "hystereses", + "metamorphoses", + "metastases", + "misdiagnoses", + "mitoses", + "mononucleoses", + "narcoses", + "necroses", + "nemeses", + "neuroses", + "oases", + "osmoses", + "osteoporoses", + "paralyses", + "parentheses", + "parthenogeneses", + "periphrases", + "photosyntheses", + "probosces", + "prognoses", + "prophylaxes", + "prostheses", + "preces", + "psoriases", + "psychoanalyses", + "psychokineses", + "psychoses", + "scleroses", + "scolioses", + "sepses", + "silicoses", + "symbioses", + "synopses", + "syntheses", + "taxes", + "telekineses", + "theses", + "thromboses", + "tuberculoses", + "urinalyses", +) + +pl_prep_list = """ + about above across after among around at athwart before behind + below beneath beside besides between betwixt beyond but by + during except for from in into near of off on onto out over + since till to under until unto upon with""".split() + +pl_prep_list_da = pl_prep_list + ["de", "du", "da"] + +pl_prep_bysize = bysize(pl_prep_list_da) + +pl_prep = enclose("|".join(pl_prep_list_da)) + +pl_sb_prep_dual_compound = rf"(.*?)((?:-|\s+)(?:{pl_prep})(?:-|\s+))a(?:-|\s+)(.*)" + + +singular_pronoun_genders = { + "neuter", + "feminine", + "masculine", + "gender-neutral", + "feminine or masculine", + "masculine or feminine", +} + +pl_pron_nom = { + # NOMINATIVE REFLEXIVE + "i": "we", + "myself": "ourselves", + "you": "you", + "yourself": "yourselves", + "she": "they", + "herself": "themselves", + "he": "they", + "himself": "themselves", + "it": "they", + "itself": "themselves", + "they": "they", + "themself": "themselves", + # POSSESSIVE + "mine": "ours", + "yours": "yours", + "hers": "theirs", + "his": "theirs", + "its": "theirs", + "theirs": "theirs", +} + +si_pron: Dict[str, Dict[str, Union[str, Dict[str, str]]]] = { + "nom": {v: k for (k, v) in pl_pron_nom.items()} +} +si_pron["nom"]["we"] = "I" + + +pl_pron_acc = { + # ACCUSATIVE REFLEXIVE + "me": "us", + "myself": "ourselves", + "you": "you", + "yourself": "yourselves", + "her": "them", + "herself": "themselves", + "him": "them", + "himself": "themselves", + "it": "them", + "itself": "themselves", + "them": "them", + "themself": "themselves", +} + +pl_pron_acc_keys = enclose("|".join(pl_pron_acc)) +pl_pron_acc_keys_bysize = bysize(pl_pron_acc) + +si_pron["acc"] = {v: k for (k, v) in pl_pron_acc.items()} + +for _thecase, _plur, _gend, _sing in ( + ("nom", "they", "neuter", "it"), + ("nom", "they", "feminine", "she"), + ("nom", "they", "masculine", "he"), + ("nom", "they", "gender-neutral", "they"), + ("nom", "they", "feminine or masculine", "she or he"), + ("nom", "they", "masculine or feminine", "he or she"), + ("nom", "themselves", "neuter", "itself"), + ("nom", "themselves", "feminine", "herself"), + ("nom", "themselves", "masculine", "himself"), + ("nom", "themselves", "gender-neutral", "themself"), + ("nom", "themselves", "feminine or masculine", "herself or himself"), + ("nom", "themselves", "masculine or feminine", "himself or herself"), + ("nom", "theirs", "neuter", "its"), + ("nom", "theirs", "feminine", "hers"), + ("nom", "theirs", "masculine", "his"), + ("nom", "theirs", "gender-neutral", "theirs"), + ("nom", "theirs", "feminine or masculine", "hers or his"), + ("nom", "theirs", "masculine or feminine", "his or hers"), + ("acc", "them", "neuter", "it"), + ("acc", "them", "feminine", "her"), + ("acc", "them", "masculine", "him"), + ("acc", "them", "gender-neutral", "them"), + ("acc", "them", "feminine or masculine", "her or him"), + ("acc", "them", "masculine or feminine", "him or her"), + ("acc", "themselves", "neuter", "itself"), + ("acc", "themselves", "feminine", "herself"), + ("acc", "themselves", "masculine", "himself"), + ("acc", "themselves", "gender-neutral", "themself"), + ("acc", "themselves", "feminine or masculine", "herself or himself"), + ("acc", "themselves", "masculine or feminine", "himself or herself"), +): + try: + si_pron[_thecase][_plur][_gend] = _sing # type: ignore + except TypeError: + si_pron[_thecase][_plur] = {} + si_pron[_thecase][_plur][_gend] = _sing # type: ignore + + +si_pron_acc_keys = enclose("|".join(si_pron["acc"])) +si_pron_acc_keys_bysize = bysize(si_pron["acc"]) + + +def get_si_pron(thecase, word, gender) -> str: + try: + sing = si_pron[thecase][word] + except KeyError: + raise # not a pronoun + try: + return sing[gender] # has several types due to gender + except TypeError: + return cast(str, sing) # answer independent of gender + + +# These dictionaries group verbs by first, second and third person +# conjugations. + +plverb_irregular_pres = { + "am": "are", + "are": "are", + "is": "are", + "was": "were", + "were": "were", + "have": "have", + "has": "have", + "do": "do", + "does": "do", +} + +plverb_ambiguous_pres = { + "act": "act", + "acts": "act", + "blame": "blame", + "blames": "blame", + "can": "can", + "must": "must", + "fly": "fly", + "flies": "fly", + "copy": "copy", + "copies": "copy", + "drink": "drink", + "drinks": "drink", + "fight": "fight", + "fights": "fight", + "fire": "fire", + "fires": "fire", + "like": "like", + "likes": "like", + "look": "look", + "looks": "look", + "make": "make", + "makes": "make", + "reach": "reach", + "reaches": "reach", + "run": "run", + "runs": "run", + "sink": "sink", + "sinks": "sink", + "sleep": "sleep", + "sleeps": "sleep", + "view": "view", + "views": "view", +} + +plverb_ambiguous_pres_keys = re.compile( + rf"^({enclose('|'.join(plverb_ambiguous_pres))})((\s.*)?)$", re.IGNORECASE +) + + +plverb_irregular_non_pres = ( + "did", + "had", + "ate", + "made", + "put", + "spent", + "fought", + "sank", + "gave", + "sought", + "shall", + "could", + "ought", + "should", +) + +plverb_ambiguous_non_pres = re.compile( + r"^((?:thought|saw|bent|will|might|cut))((\s.*)?)$", re.IGNORECASE +) + +# "..oes" -> "..oe" (the rest are "..oes" -> "o") + +pl_v_oes_oe = ("canoes", "floes", "oboes", "roes", "throes", "woes") +pl_v_oes_oe_endings_size4 = ("hoes", "toes") +pl_v_oes_oe_endings_size5 = ("shoes",) + + +pl_count_zero = ("0", "no", "zero", "nil") + + +pl_count_one = ("1", "a", "an", "one", "each", "every", "this", "that") + +pl_adj_special = {"a": "some", "an": "some", "this": "these", "that": "those"} + +pl_adj_special_keys = re.compile( + rf"^({enclose('|'.join(pl_adj_special))})$", re.IGNORECASE +) + +pl_adj_poss = { + "my": "our", + "your": "your", + "its": "their", + "her": "their", + "his": "their", + "their": "their", +} + +pl_adj_poss_keys = re.compile(rf"^({enclose('|'.join(pl_adj_poss))})$", re.IGNORECASE) + + +# 2. INDEFINITE ARTICLES + +# THIS PATTERN MATCHES STRINGS OF CAPITALS STARTING WITH A "VOWEL-SOUND" +# CONSONANT FOLLOWED BY ANOTHER CONSONANT, AND WHICH ARE NOT LIKELY +# TO BE REAL WORDS (OH, ALL RIGHT THEN, IT'S JUST MAGIC!) + +A_abbrev = re.compile( + r""" +^(?! FJO | [HLMNS]Y. | RY[EO] | SQU + | ( F[LR]? | [HL] | MN? | N | RH? | S[CHKLMNPTVW]? | X(YL)?) [AEIOU]) +[FHLMNRSX][A-Z] +""", + re.VERBOSE, +) + +# THIS PATTERN CODES THE BEGINNINGS OF ALL ENGLISH WORDS BEGINING WITH A +# 'y' FOLLOWED BY A CONSONANT. ANY OTHER Y-CONSONANT PREFIX THEREFORE +# IMPLIES AN ABBREVIATION. + +A_y_cons = re.compile(r"^(y(b[lor]|cl[ea]|fere|gg|p[ios]|rou|tt))", re.IGNORECASE) + +# EXCEPTIONS TO EXCEPTIONS + +A_explicit_a = re.compile(r"^((?:unabomber|unanimous|US))", re.IGNORECASE) + +A_explicit_an = re.compile( + r"^((?:euler|hour(?!i)|heir|honest|hono[ur]|mpeg))", re.IGNORECASE +) + +A_ordinal_an = re.compile(r"^([aefhilmnorsx]-?th)", re.IGNORECASE) + +A_ordinal_a = re.compile(r"^([bcdgjkpqtuvwyz]-?th)", re.IGNORECASE) + + +# NUMERICAL INFLECTIONS + +nth = { + 0: "th", + 1: "st", + 2: "nd", + 3: "rd", + 4: "th", + 5: "th", + 6: "th", + 7: "th", + 8: "th", + 9: "th", + 11: "th", + 12: "th", + 13: "th", +} +nth_suff = set(nth.values()) + +ordinal = dict( + ty="tieth", + one="first", + two="second", + three="third", + five="fifth", + eight="eighth", + nine="ninth", + twelve="twelfth", +) + +ordinal_suff = re.compile(rf"({'|'.join(ordinal)})\Z") + + +# NUMBERS + +unit = ["", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine"] +teen = [ + "ten", + "eleven", + "twelve", + "thirteen", + "fourteen", + "fifteen", + "sixteen", + "seventeen", + "eighteen", + "nineteen", +] +ten = [ + "", + "", + "twenty", + "thirty", + "forty", + "fifty", + "sixty", + "seventy", + "eighty", + "ninety", +] +mill = [ + " ", + " thousand", + " million", + " billion", + " trillion", + " quadrillion", + " quintillion", + " sextillion", + " septillion", + " octillion", + " nonillion", + " decillion", +] + + +# SUPPORT CLASSICAL PLURALIZATIONS + +def_classical = dict( + all=False, zero=False, herd=False, names=True, persons=False, ancient=False +) + +all_classical = {k: True for k in def_classical} +no_classical = {k: False for k in def_classical} + + +# Maps strings to built-in constant types +string_to_constant = {"True": True, "False": False, "None": None} + + +# Pre-compiled regular expression objects +DOLLAR_DIGITS = re.compile(r"\$(\d+)") +FUNCTION_CALL = re.compile(r"((\w+)\([^)]*\)*)", re.IGNORECASE) +PARTITION_WORD = re.compile(r"\A(\s*)(.+?)(\s*)\Z") +PL_SB_POSTFIX_ADJ_STEMS_RE = re.compile( + rf"^(?:{pl_sb_postfix_adj_stems})$", re.IGNORECASE +) +PL_SB_PREP_DUAL_COMPOUND_RE = re.compile( + rf"^(?:{pl_sb_prep_dual_compound})$", re.IGNORECASE +) +DENOMINATOR = re.compile(r"(?P.+)( (per|a) .+)") +PLVERB_SPECIAL_S_RE = re.compile(rf"^({plverb_special_s})$") +WHITESPACE = re.compile(r"\s") +ENDS_WITH_S = re.compile(r"^(.*[^s])s$", re.IGNORECASE) +ENDS_WITH_APOSTROPHE_S = re.compile(r"^(.*)'s?$") +INDEFINITE_ARTICLE_TEST = re.compile(r"\A(\s*)(?:an?\s+)?(.+?)(\s*)\Z", re.IGNORECASE) +SPECIAL_AN = re.compile(r"^[aefhilmnorsx]$", re.IGNORECASE) +SPECIAL_A = re.compile(r"^[bcdgjkpqtuvwyz]$", re.IGNORECASE) +SPECIAL_ABBREV_AN = re.compile(r"^[aefhilmnorsx][.-]", re.IGNORECASE) +SPECIAL_ABBREV_A = re.compile(r"^[a-z][.-]", re.IGNORECASE) +CONSONANTS = re.compile(r"^[^aeiouy]", re.IGNORECASE) +ARTICLE_SPECIAL_EU = re.compile(r"^e[uw]", re.IGNORECASE) +ARTICLE_SPECIAL_ONCE = re.compile(r"^onc?e\b", re.IGNORECASE) +ARTICLE_SPECIAL_ONETIME = re.compile(r"^onetime\b", re.IGNORECASE) +ARTICLE_SPECIAL_UNIT = re.compile(r"^uni([^nmd]|mo)", re.IGNORECASE) +ARTICLE_SPECIAL_UBA = re.compile(r"^u[bcfghjkqrst][aeiou]", re.IGNORECASE) +ARTICLE_SPECIAL_UKR = re.compile(r"^ukr", re.IGNORECASE) +SPECIAL_CAPITALS = re.compile(r"^U[NK][AIEO]?") +VOWELS = re.compile(r"^[aeiou]", re.IGNORECASE) + +DIGIT_GROUP = re.compile(r"(\d)") +TWO_DIGITS = re.compile(r"(\d)(\d)") +THREE_DIGITS = re.compile(r"(\d)(\d)(\d)") +THREE_DIGITS_WORD = re.compile(r"(\d)(\d)(\d)(?=\D*\Z)") +TWO_DIGITS_WORD = re.compile(r"(\d)(\d)(?=\D*\Z)") +ONE_DIGIT_WORD = re.compile(r"(\d)(?=\D*\Z)") + +FOUR_DIGIT_COMMA = re.compile(r"(\d)(\d{3}(?:,|\Z))") +NON_DIGIT = re.compile(r"\D") +WHITESPACES_COMMA = re.compile(r"\s+,") +COMMA_WORD = re.compile(r", (\S+)\s+\Z") +WHITESPACES = re.compile(r"\s+") + + +PRESENT_PARTICIPLE_REPLACEMENTS = ( + (re.compile(r"ie$"), r"y"), + ( + re.compile(r"ue$"), + r"u", + ), # TODO: isn't ue$ -> u encompassed in the following rule? + (re.compile(r"([auy])e$"), r"\g<1>"), + (re.compile(r"ski$"), r"ski"), + (re.compile(r"[^b]i$"), r""), + (re.compile(r"^(are|were)$"), r"be"), + (re.compile(r"^(had)$"), r"hav"), + (re.compile(r"^(hoe)$"), r"\g<1>"), + (re.compile(r"([^e])e$"), r"\g<1>"), + (re.compile(r"er$"), r"er"), + (re.compile(r"([^aeiou][aeiouy]([bdgmnprst]))$"), r"\g<1>\g<2>"), +) + +DIGIT = re.compile(r"\d") + + +class Words(str): + lowered: str + split_: List[str] + first: str + last: str + + def __init__(self, orig) -> None: + self.lowered = self.lower() + self.split_ = self.split() + self.first = self.split_[0] + self.last = self.split_[-1] + + +Falsish = Any # ideally, falsish would only validate on bool(value) is False + + +_STATIC_TYPE_CHECKING = TYPE_CHECKING +# ^-- Workaround for typeguard AST manipulation: +# https://github.com/agronholm/typeguard/issues/353#issuecomment-1556306554 + +if _STATIC_TYPE_CHECKING: # pragma: no cover + Word = Annotated[str, "String with at least 1 character"] +else: + + class _WordMeta(type): # Too dynamic to be supported by mypy... + def __instancecheck__(self, instance: Any) -> bool: + return isinstance(instance, str) and len(instance) >= 1 + + class Word(metaclass=_WordMeta): # type: ignore[no-redef] + """String with at least 1 character""" + + +class engine: + def __init__(self) -> None: + self.classical_dict = def_classical.copy() + self.persistent_count: Optional[int] = None + self.mill_count = 0 + self.pl_sb_user_defined: List[Optional[Word]] = [] + self.pl_v_user_defined: List[Optional[Word]] = [] + self.pl_adj_user_defined: List[Optional[Word]] = [] + self.si_sb_user_defined: List[Optional[Word]] = [] + self.A_a_user_defined: List[Optional[Word]] = [] + self.thegender = "neuter" + self.__number_args: Optional[Dict[str, str]] = None + + @property + def _number_args(self): + return cast(Dict[str, str], self.__number_args) + + @_number_args.setter + def _number_args(self, val): + self.__number_args = val + + @typechecked + def defnoun(self, singular: Optional[Word], plural: Optional[Word]) -> int: + """ + Set the noun plural of singular to plural. + + """ + self.checkpat(singular) + self.checkpatplural(plural) + self.pl_sb_user_defined.extend((singular, plural)) + self.si_sb_user_defined.extend((plural, singular)) + return 1 + + @typechecked + def defverb( + self, + s1: Optional[Word], + p1: Optional[Word], + s2: Optional[Word], + p2: Optional[Word], + s3: Optional[Word], + p3: Optional[Word], + ) -> int: + """ + Set the verb plurals for s1, s2 and s3 to p1, p2 and p3 respectively. + + Where 1, 2 and 3 represent the 1st, 2nd and 3rd person forms of the verb. + + """ + self.checkpat(s1) + self.checkpat(s2) + self.checkpat(s3) + self.checkpatplural(p1) + self.checkpatplural(p2) + self.checkpatplural(p3) + self.pl_v_user_defined.extend((s1, p1, s2, p2, s3, p3)) + return 1 + + @typechecked + def defadj(self, singular: Optional[Word], plural: Optional[Word]) -> int: + """ + Set the adjective plural of singular to plural. + + """ + self.checkpat(singular) + self.checkpatplural(plural) + self.pl_adj_user_defined.extend((singular, plural)) + return 1 + + @typechecked + def defa(self, pattern: Optional[Word]) -> int: + """ + Define the indefinite article as 'a' for words matching pattern. + + """ + self.checkpat(pattern) + self.A_a_user_defined.extend((pattern, "a")) + return 1 + + @typechecked + def defan(self, pattern: Optional[Word]) -> int: + """ + Define the indefinite article as 'an' for words matching pattern. + + """ + self.checkpat(pattern) + self.A_a_user_defined.extend((pattern, "an")) + return 1 + + def checkpat(self, pattern: Optional[Word]) -> None: + """ + check for errors in a regex pattern + """ + if pattern is None: + return + try: + re.match(pattern, "") + except re.error as err: + raise BadUserDefinedPatternError(pattern) from err + + def checkpatplural(self, pattern: Optional[Word]) -> None: + """ + check for errors in a regex replace pattern + """ + return + + @typechecked + def ud_match(self, word: Word, wordlist: Sequence[Optional[Word]]) -> Optional[str]: + for i in range(len(wordlist) - 2, -2, -2): # backwards through even elements + mo = re.search(rf"^{wordlist[i]}$", word, re.IGNORECASE) + if mo: + if wordlist[i + 1] is None: + return None + pl = DOLLAR_DIGITS.sub( + r"\\1", cast(Word, wordlist[i + 1]) + ) # change $n to \n for expand + return mo.expand(pl) + return None + + def classical(self, **kwargs) -> None: + """ + turn classical mode on and off for various categories + + turn on all classical modes: + classical() + classical(all=True) + + turn on or off specific claassical modes: + e.g. + classical(herd=True) + classical(names=False) + + By default all classical modes are off except names. + + unknown value in args or key in kwargs raises + exception: UnknownClasicalModeError + + """ + if not kwargs: + self.classical_dict = all_classical.copy() + return + if "all" in kwargs: + if kwargs["all"]: + self.classical_dict = all_classical.copy() + else: + self.classical_dict = no_classical.copy() + + for k, v in kwargs.items(): + if k in def_classical: + self.classical_dict[k] = v + else: + raise UnknownClassicalModeError + + def num( + self, count: Optional[int] = None, show: Optional[int] = None + ) -> str: # (;$count,$show) + """ + Set the number to be used in other method calls. + + Returns count. + + Set show to False to return '' instead. + + """ + if count is not None: + try: + self.persistent_count = int(count) + except ValueError as err: + raise BadNumValueError from err + if (show is None) or show: + return str(count) + else: + self.persistent_count = None + return "" + + def gender(self, gender: str) -> None: + """ + set the gender for the singular of plural pronouns + + can be one of: + 'neuter' ('they' -> 'it') + 'feminine' ('they' -> 'she') + 'masculine' ('they' -> 'he') + 'gender-neutral' ('they' -> 'they') + 'feminine or masculine' ('they' -> 'she or he') + 'masculine or feminine' ('they' -> 'he or she') + """ + if gender in singular_pronoun_genders: + self.thegender = gender + else: + raise BadGenderError + + def _get_value_from_ast(self, obj): + """ + Return the value of the ast object. + """ + if isinstance(obj, ast.Num): + return obj.n + elif isinstance(obj, ast.Str): + return obj.s + elif isinstance(obj, ast.List): + return [self._get_value_from_ast(e) for e in obj.elts] + elif isinstance(obj, ast.Tuple): + return tuple([self._get_value_from_ast(e) for e in obj.elts]) + + # None, True and False are NameConstants in Py3.4 and above. + elif isinstance(obj, ast.NameConstant): + return obj.value + + # Probably passed a variable name. + # Or passed a single word without wrapping it in quotes as an argument + # ex: p.inflect("I plural(see)") instead of p.inflect("I plural('see')") + raise NameError(f"name '{obj.id}' is not defined") + + def _string_to_substitute( + self, mo: Match, methods_dict: Dict[str, Callable] + ) -> str: + """ + Return the string to be substituted for the match. + """ + matched_text, f_name = mo.groups() + # matched_text is the complete match string. e.g. plural_noun(cat) + # f_name is the function name. e.g. plural_noun + + # Return matched_text if function name is not in methods_dict + if f_name not in methods_dict: + return matched_text + + # Parse the matched text + a_tree = ast.parse(matched_text) + + # get the args and kwargs from ast objects + args_list = [ + self._get_value_from_ast(a) + for a in a_tree.body[0].value.args # type: ignore[attr-defined] + ] + kwargs_list = { + kw.arg: self._get_value_from_ast(kw.value) + for kw in a_tree.body[0].value.keywords # type: ignore[attr-defined] + } + + # Call the corresponding function + return methods_dict[f_name](*args_list, **kwargs_list) + + # 0. PERFORM GENERAL INFLECTIONS IN A STRING + + @typechecked + def inflect(self, text: Word) -> str: + """ + Perform inflections in a string. + + e.g. inflect('The plural of cat is plural(cat)') returns + 'The plural of cat is cats' + + can use plural, plural_noun, plural_verb, plural_adj, + singular_noun, a, an, no, ordinal, number_to_words, + and prespart + + """ + save_persistent_count = self.persistent_count + + # Dictionary of allowed methods + methods_dict: Dict[str, Callable] = { + "plural": self.plural, + "plural_adj": self.plural_adj, + "plural_noun": self.plural_noun, + "plural_verb": self.plural_verb, + "singular_noun": self.singular_noun, + "a": self.a, + "an": self.a, + "no": self.no, + "ordinal": self.ordinal, + "number_to_words": self.number_to_words, + "present_participle": self.present_participle, + "num": self.num, + } + + # Regular expression to find Python's function call syntax + output = FUNCTION_CALL.sub( + lambda mo: self._string_to_substitute(mo, methods_dict), text + ) + self.persistent_count = save_persistent_count + return output + + # ## PLURAL SUBROUTINES + + def postprocess(self, orig: str, inflected) -> str: + inflected = str(inflected) + if "|" in inflected: + word_options = inflected.split("|") + # When two parts of a noun need to be pluralized + if len(word_options[0].split(" ")) == len(word_options[1].split(" ")): + result = inflected.split("|")[self.classical_dict["all"]].split(" ") + # When only the last part of the noun needs to be pluralized + else: + result = inflected.split(" ") + for index, word in enumerate(result): + if "|" in word: + result[index] = word.split("|")[self.classical_dict["all"]] + else: + result = inflected.split(" ") + + # Try to fix word wise capitalization + for index, word in enumerate(orig.split(" ")): + if word == "I": + # Is this the only word for exceptions like this + # Where the original is fully capitalized + # without 'meaning' capitalization? + # Also this fails to handle a capitalizaion in context + continue + if word.capitalize() == word: + result[index] = result[index].capitalize() + if word == word.upper(): + result[index] = result[index].upper() + return " ".join(result) + + def partition_word(self, text: str) -> Tuple[str, str, str]: + mo = PARTITION_WORD.search(text) + if mo: + return mo.group(1), mo.group(2), mo.group(3) + else: + return "", "", "" + + @typechecked + def plural(self, text: Word, count: Optional[Union[str, int, Any]] = None) -> str: + """ + Return the plural of text. + + If count supplied, then return text if count is one of: + 1, a, an, one, each, every, this, that + + otherwise return the plural. + + Whitespace at the start and end is preserved. + + """ + pre, word, post = self.partition_word(text) + if not word: + return text + plural = self.postprocess( + word, + self._pl_special_adjective(word, count) + or self._pl_special_verb(word, count) + or self._plnoun(word, count), + ) + return f"{pre}{plural}{post}" + + @typechecked + def plural_noun( + self, text: Word, count: Optional[Union[str, int, Any]] = None + ) -> str: + """ + Return the plural of text, where text is a noun. + + If count supplied, then return text if count is one of: + 1, a, an, one, each, every, this, that + + otherwise return the plural. + + Whitespace at the start and end is preserved. + + """ + pre, word, post = self.partition_word(text) + if not word: + return text + plural = self.postprocess(word, self._plnoun(word, count)) + return f"{pre}{plural}{post}" + + @typechecked + def plural_verb( + self, text: Word, count: Optional[Union[str, int, Any]] = None + ) -> str: + """ + Return the plural of text, where text is a verb. + + If count supplied, then return text if count is one of: + 1, a, an, one, each, every, this, that + + otherwise return the plural. + + Whitespace at the start and end is preserved. + + """ + pre, word, post = self.partition_word(text) + if not word: + return text + plural = self.postprocess( + word, + self._pl_special_verb(word, count) or self._pl_general_verb(word, count), + ) + return f"{pre}{plural}{post}" + + @typechecked + def plural_adj( + self, text: Word, count: Optional[Union[str, int, Any]] = None + ) -> str: + """ + Return the plural of text, where text is an adjective. + + If count supplied, then return text if count is one of: + 1, a, an, one, each, every, this, that + + otherwise return the plural. + + Whitespace at the start and end is preserved. + + """ + pre, word, post = self.partition_word(text) + if not word: + return text + plural = self.postprocess(word, self._pl_special_adjective(word, count) or word) + return f"{pre}{plural}{post}" + + @typechecked + def compare(self, word1: Word, word2: Word) -> Union[str, bool]: + """ + compare word1 and word2 for equality regardless of plurality + + return values: + eq - the strings are equal + p:s - word1 is the plural of word2 + s:p - word2 is the plural of word1 + p:p - word1 and word2 are two different plural forms of the one word + False - otherwise + + >>> compare = engine().compare + >>> compare("egg", "eggs") + 's:p' + >>> compare('egg', 'egg') + 'eq' + + Words should not be empty. + + >>> compare('egg', '') + Traceback (most recent call last): + ... + typeguard.TypeCheckError:...is not an instance of inflect.Word + """ + norms = self.plural_noun, self.plural_verb, self.plural_adj + results = (self._plequal(word1, word2, norm) for norm in norms) + return next(filter(None, results), False) + + @typechecked + def compare_nouns(self, word1: Word, word2: Word) -> Union[str, bool]: + """ + compare word1 and word2 for equality regardless of plurality + word1 and word2 are to be treated as nouns + + return values: + eq - the strings are equal + p:s - word1 is the plural of word2 + s:p - word2 is the plural of word1 + p:p - word1 and word2 are two different plural forms of the one word + False - otherwise + + """ + return self._plequal(word1, word2, self.plural_noun) + + @typechecked + def compare_verbs(self, word1: Word, word2: Word) -> Union[str, bool]: + """ + compare word1 and word2 for equality regardless of plurality + word1 and word2 are to be treated as verbs + + return values: + eq - the strings are equal + p:s - word1 is the plural of word2 + s:p - word2 is the plural of word1 + p:p - word1 and word2 are two different plural forms of the one word + False - otherwise + + """ + return self._plequal(word1, word2, self.plural_verb) + + @typechecked + def compare_adjs(self, word1: Word, word2: Word) -> Union[str, bool]: + """ + compare word1 and word2 for equality regardless of plurality + word1 and word2 are to be treated as adjectives + + return values: + eq - the strings are equal + p:s - word1 is the plural of word2 + s:p - word2 is the plural of word1 + p:p - word1 and word2 are two different plural forms of the one word + False - otherwise + + """ + return self._plequal(word1, word2, self.plural_adj) + + @typechecked + def singular_noun( + self, + text: Word, + count: Optional[Union[int, str, Any]] = None, + gender: Optional[str] = None, + ) -> Union[str, Literal[False]]: + """ + Return the singular of text, where text is a plural noun. + + If count supplied, then return the singular if count is one of: + 1, a, an, one, each, every, this, that or if count is None + + otherwise return text unchanged. + + Whitespace at the start and end is preserved. + + >>> p = engine() + >>> p.singular_noun('horses') + 'horse' + >>> p.singular_noun('knights') + 'knight' + + Returns False when a singular noun is passed. + + >>> p.singular_noun('horse') + False + >>> p.singular_noun('knight') + False + >>> p.singular_noun('soldier') + False + + """ + pre, word, post = self.partition_word(text) + if not word: + return text + sing = self._sinoun(word, count=count, gender=gender) + if sing is not False: + plural = self.postprocess(word, sing) + return f"{pre}{plural}{post}" + return False + + def _plequal(self, word1: str, word2: str, pl) -> Union[str, bool]: # noqa: C901 + classval = self.classical_dict.copy() + self.classical_dict = all_classical.copy() + if word1 == word2: + return "eq" + if word1 == pl(word2): + return "p:s" + if pl(word1) == word2: + return "s:p" + self.classical_dict = no_classical.copy() + if word1 == pl(word2): + return "p:s" + if pl(word1) == word2: + return "s:p" + self.classical_dict = classval.copy() + + if pl == self.plural or pl == self.plural_noun: + if self._pl_check_plurals_N(word1, word2): + return "p:p" + if self._pl_check_plurals_N(word2, word1): + return "p:p" + if pl == self.plural or pl == self.plural_adj: + if self._pl_check_plurals_adj(word1, word2): + return "p:p" + return False + + def _pl_reg_plurals(self, pair: str, stems: str, end1: str, end2: str) -> bool: + pattern = rf"({stems})({end1}\|\1{end2}|{end2}\|\1{end1})" + return bool(re.search(pattern, pair)) + + def _pl_check_plurals_N(self, word1: str, word2: str) -> bool: + stem_endings = ( + (pl_sb_C_a_ata, "as", "ata"), + (pl_sb_C_is_ides, "is", "ides"), + (pl_sb_C_a_ae, "s", "e"), + (pl_sb_C_en_ina, "ens", "ina"), + (pl_sb_C_um_a, "ums", "a"), + (pl_sb_C_us_i, "uses", "i"), + (pl_sb_C_on_a, "ons", "a"), + (pl_sb_C_o_i_stems, "os", "i"), + (pl_sb_C_ex_ices, "exes", "ices"), + (pl_sb_C_ix_ices, "ixes", "ices"), + (pl_sb_C_i, "s", "i"), + (pl_sb_C_im, "s", "im"), + (".*eau", "s", "x"), + (".*ieu", "s", "x"), + (".*tri", "xes", "ces"), + (".{2,}[yia]n", "xes", "ges"), + ) + + words = map(Words, (word1, word2)) + pair = "|".join(word.last for word in words) + + return ( + pair in pl_sb_irregular_s.values() + or pair in pl_sb_irregular.values() + or pair in pl_sb_irregular_caps.values() + or any( + self._pl_reg_plurals(pair, stems, end1, end2) + for stems, end1, end2 in stem_endings + ) + ) + + def _pl_check_plurals_adj(self, word1: str, word2: str) -> bool: + word1a = word1[: word1.rfind("'")] if word1.endswith(("'s", "'")) else "" + word2a = word2[: word2.rfind("'")] if word2.endswith(("'s", "'")) else "" + + return ( + bool(word1a) + and bool(word2a) + and ( + self._pl_check_plurals_N(word1a, word2a) + or self._pl_check_plurals_N(word2a, word1a) + ) + ) + + def get_count(self, count: Optional[Union[str, int]] = None) -> Union[str, int]: + if count is None and self.persistent_count is not None: + count = self.persistent_count + + if count is not None: + count = ( + 1 + if ( + (str(count) in pl_count_one) + or ( + self.classical_dict["zero"] + and str(count).lower() in pl_count_zero + ) + ) + else 2 + ) + else: + count = "" + return count + + # @profile + def _plnoun( # noqa: C901 + self, word: str, count: Optional[Union[str, int]] = None + ) -> str: + count = self.get_count(count) + + # DEFAULT TO PLURAL + + if count == 1: + return word + + # HANDLE USER-DEFINED NOUNS + + value = self.ud_match(word, self.pl_sb_user_defined) + if value is not None: + return value + + # HANDLE EMPTY WORD, SINGULAR COUNT AND UNINFLECTED PLURALS + + if word == "": + return word + + word = Words(word) + + if word.last.lower() in pl_sb_uninflected_complete: + if len(word.split_) >= 3: + return self._handle_long_compounds(word, count=2) or word + return word + + if word in pl_sb_uninflected_caps: + return word + + for k, v in pl_sb_uninflected_bysize.items(): + if word.lowered[-k:] in v: + return word + + if self.classical_dict["herd"] and word.last.lower() in pl_sb_uninflected_herd: + return word + + # HANDLE COMPOUNDS ("Governor General", "mother-in-law", "aide-de-camp", ETC.) + + mo = PL_SB_POSTFIX_ADJ_STEMS_RE.search(word) + if mo and mo.group(2) != "": + return f"{self._plnoun(mo.group(1), 2)}{mo.group(2)}" + + if " a " in word.lowered or "-a-" in word.lowered: + mo = PL_SB_PREP_DUAL_COMPOUND_RE.search(word) + if mo and mo.group(2) != "" and mo.group(3) != "": + return ( + f"{self._plnoun(mo.group(1), 2)}" + f"{mo.group(2)}" + f"{self._plnoun(mo.group(3))}" + ) + + if len(word.split_) >= 3: + handled_words = self._handle_long_compounds(word, count=2) + if handled_words is not None: + return handled_words + + # only pluralize denominators in units + mo = DENOMINATOR.search(word.lowered) + if mo: + index = len(mo.group("denominator")) + return f"{self._plnoun(word[:index])}{word[index:]}" + + # handle units given in degrees (only accept if + # there is no more than one word following) + # degree Celsius => degrees Celsius but degree + # fahrenheit hour => degree fahrenheit hours + if len(word.split_) >= 2 and word.split_[-2] == "degree": + return " ".join([self._plnoun(word.first)] + word.split_[1:]) + + with contextlib.suppress(ValueError): + return self._handle_prepositional_phrase( + word.lowered, + functools.partial(self._plnoun, count=2), + '-', + ) + + # HANDLE PRONOUNS + + for k, v in pl_pron_acc_keys_bysize.items(): + if word.lowered[-k:] in v: # ends with accusative pronoun + for pk, pv in pl_prep_bysize.items(): + if word.lowered[:pk] in pv: # starts with a prep + if word.lowered.split() == [ + word.lowered[:pk], + word.lowered[-k:], + ]: + # only whitespace in between + return word.lowered[:-k] + pl_pron_acc[word.lowered[-k:]] + + try: + return pl_pron_nom[word.lowered] + except KeyError: + pass + + try: + return pl_pron_acc[word.lowered] + except KeyError: + pass + + # HANDLE ISOLATED IRREGULAR PLURALS + + if word.last in pl_sb_irregular_caps: + llen = len(word.last) + return f"{word[:-llen]}{pl_sb_irregular_caps[word.last]}" + + lowered_last = word.last.lower() + if lowered_last in pl_sb_irregular: + llen = len(lowered_last) + return f"{word[:-llen]}{pl_sb_irregular[lowered_last]}" + + dash_split = word.lowered.split('-') + if (" ".join(dash_split[-2:])).lower() in pl_sb_irregular_compound: + llen = len( + " ".join(dash_split[-2:]) + ) # TODO: what if 2 spaces between these words? + return ( + f"{word[:-llen]}" + f"{pl_sb_irregular_compound[(' '.join(dash_split[-2:])).lower()]}" + ) + + if word.lowered[-3:] == "quy": + return f"{word[:-1]}ies" + + if word.lowered[-6:] == "person": + if self.classical_dict["persons"]: + return f"{word}s" + else: + return f"{word[:-4]}ople" + + # HANDLE FAMILIES OF IRREGULAR PLURALS + + if word.lowered[-3:] == "man": + for k, v in pl_sb_U_man_mans_bysize.items(): + if word.lowered[-k:] in v: + return f"{word}s" + for k, v in pl_sb_U_man_mans_caps_bysize.items(): + if word[-k:] in v: + return f"{word}s" + return f"{word[:-3]}men" + if word.lowered[-5:] == "mouse": + return f"{word[:-5]}mice" + if word.lowered[-5:] == "louse": + v = pl_sb_U_louse_lice_bysize.get(len(word)) + if v and word.lowered in v: + return f"{word[:-5]}lice" + return f"{word}s" + if word.lowered[-5:] == "goose": + return f"{word[:-5]}geese" + if word.lowered[-5:] == "tooth": + return f"{word[:-5]}teeth" + if word.lowered[-4:] == "foot": + return f"{word[:-4]}feet" + if word.lowered[-4:] == "taco": + return f"{word[:-5]}tacos" + + if word.lowered == "die": + return "dice" + + # HANDLE UNASSIMILATED IMPORTS + + if word.lowered[-4:] == "ceps": + return word + if word.lowered[-4:] == "zoon": + return f"{word[:-2]}a" + if word.lowered[-3:] in ("cis", "sis", "xis"): + return f"{word[:-2]}es" + + for lastlet, d, numend, post in ( + ("h", pl_sb_U_ch_chs_bysize, None, "s"), + ("x", pl_sb_U_ex_ices_bysize, -2, "ices"), + ("x", pl_sb_U_ix_ices_bysize, -2, "ices"), + ("m", pl_sb_U_um_a_bysize, -2, "a"), + ("s", pl_sb_U_us_i_bysize, -2, "i"), + ("n", pl_sb_U_on_a_bysize, -2, "a"), + ("a", pl_sb_U_a_ae_bysize, None, "e"), + ): + if word.lowered[-1] == lastlet: # this test to add speed + for k, v in d.items(): + if word.lowered[-k:] in v: + return word[:numend] + post + + # HANDLE INCOMPLETELY ASSIMILATED IMPORTS + + if self.classical_dict["ancient"]: + if word.lowered[-4:] == "trix": + return f"{word[:-1]}ces" + if word.lowered[-3:] in ("eau", "ieu"): + return f"{word}x" + if word.lowered[-3:] in ("ynx", "inx", "anx") and len(word) > 4: + return f"{word[:-1]}ges" + + for lastlet, d, numend, post in ( + ("n", pl_sb_C_en_ina_bysize, -2, "ina"), + ("x", pl_sb_C_ex_ices_bysize, -2, "ices"), + ("x", pl_sb_C_ix_ices_bysize, -2, "ices"), + ("m", pl_sb_C_um_a_bysize, -2, "a"), + ("s", pl_sb_C_us_i_bysize, -2, "i"), + ("s", pl_sb_C_us_us_bysize, None, ""), + ("a", pl_sb_C_a_ae_bysize, None, "e"), + ("a", pl_sb_C_a_ata_bysize, None, "ta"), + ("s", pl_sb_C_is_ides_bysize, -1, "des"), + ("o", pl_sb_C_o_i_bysize, -1, "i"), + ("n", pl_sb_C_on_a_bysize, -2, "a"), + ): + if word.lowered[-1] == lastlet: # this test to add speed + for k, v in d.items(): + if word.lowered[-k:] in v: + return word[:numend] + post + + for d, numend, post in ( + (pl_sb_C_i_bysize, None, "i"), + (pl_sb_C_im_bysize, None, "im"), + ): + for k, v in d.items(): + if word.lowered[-k:] in v: + return word[:numend] + post + + # HANDLE SINGULAR NOUNS ENDING IN ...s OR OTHER SILIBANTS + + if lowered_last in pl_sb_singular_s_complete: + return f"{word}es" + + for k, v in pl_sb_singular_s_bysize.items(): + if word.lowered[-k:] in v: + return f"{word}es" + + if word.lowered[-2:] == "es" and word[0] == word[0].upper(): + return f"{word}es" + + if word.lowered[-1] == "z": + for k, v in pl_sb_z_zes_bysize.items(): + if word.lowered[-k:] in v: + return f"{word}es" + + if word.lowered[-2:-1] != "z": + return f"{word}zes" + + if word.lowered[-2:] == "ze": + for k, v in pl_sb_ze_zes_bysize.items(): + if word.lowered[-k:] in v: + return f"{word}s" + + if word.lowered[-2:] in ("ch", "sh", "zz", "ss") or word.lowered[-1] == "x": + return f"{word}es" + + # HANDLE ...f -> ...ves + + if word.lowered[-3:] in ("elf", "alf", "olf"): + return f"{word[:-1]}ves" + if word.lowered[-3:] == "eaf" and word.lowered[-4:-3] != "d": + return f"{word[:-1]}ves" + if word.lowered[-4:] in ("nife", "life", "wife"): + return f"{word[:-2]}ves" + if word.lowered[-3:] == "arf": + return f"{word[:-1]}ves" + + # HANDLE ...y + + if word.lowered[-1] == "y": + if word.lowered[-2:-1] in "aeiou" or len(word) == 1: + return f"{word}s" + + if self.classical_dict["names"]: + if word.lowered[-1] == "y" and word[0] == word[0].upper(): + return f"{word}s" + + return f"{word[:-1]}ies" + + # HANDLE ...o + + if lowered_last in pl_sb_U_o_os_complete: + return f"{word}s" + + for k, v in pl_sb_U_o_os_bysize.items(): + if word.lowered[-k:] in v: + return f"{word}s" + + if word.lowered[-2:] in ("ao", "eo", "io", "oo", "uo"): + return f"{word}s" + + if word.lowered[-1] == "o": + return f"{word}es" + + # OTHERWISE JUST ADD ...s + + return f"{word}s" + + @classmethod + def _handle_prepositional_phrase(cls, phrase, transform, sep): + """ + Given a word or phrase possibly separated by sep, parse out + the prepositional phrase and apply the transform to the word + preceding the prepositional phrase. + + Raise ValueError if the pivot is not found or if at least two + separators are not found. + + >>> engine._handle_prepositional_phrase("man-of-war", str.upper, '-') + 'MAN-of-war' + >>> engine._handle_prepositional_phrase("man of war", str.upper, ' ') + 'MAN of war' + """ + parts = phrase.split(sep) + if len(parts) < 3: + raise ValueError("Cannot handle words with fewer than two separators") + + pivot = cls._find_pivot(parts, pl_prep_list_da) + + transformed = transform(parts[pivot - 1]) or parts[pivot - 1] + return " ".join( + parts[: pivot - 1] + [sep.join([transformed, parts[pivot], ''])] + ) + " ".join(parts[(pivot + 1) :]) + + def _handle_long_compounds(self, word: Words, count: int) -> Union[str, None]: + """ + Handles the plural and singular for compound `Words` that + have three or more words, based on the given count. + + >>> engine()._handle_long_compounds(Words("pair of scissors"), 2) + 'pairs of scissors' + >>> engine()._handle_long_compounds(Words("men beyond hills"), 1) + 'man beyond hills' + """ + inflection = self._sinoun if count == 1 else self._plnoun + solutions = ( # type: ignore + " ".join( + itertools.chain( + leader, + [inflection(cand, count), prep], # type: ignore + trailer, + ) + ) + for leader, (cand, prep), trailer in windowed_complete(word.split_, 2) + if prep in pl_prep_list_da # type: ignore + ) + return next(solutions, None) + + @staticmethod + def _find_pivot(words, candidates): + pivots = ( + index for index in range(1, len(words) - 1) if words[index] in candidates + ) + try: + return next(pivots) + except StopIteration: + raise ValueError("No pivot found") from None + + def _pl_special_verb( # noqa: C901 + self, word: str, count: Optional[Union[str, int]] = None + ) -> Union[str, bool]: + if self.classical_dict["zero"] and str(count).lower() in pl_count_zero: + return False + count = self.get_count(count) + + if count == 1: + return word + + # HANDLE USER-DEFINED VERBS + + value = self.ud_match(word, self.pl_v_user_defined) + if value is not None: + return value + + # HANDLE IRREGULAR PRESENT TENSE (SIMPLE AND COMPOUND) + + try: + words = Words(word) + except IndexError: + return False # word is '' + + if words.first in plverb_irregular_pres: + return f"{plverb_irregular_pres[words.first]}{words[len(words.first) :]}" + + # HANDLE IRREGULAR FUTURE, PRETERITE AND PERFECT TENSES + + if words.first in plverb_irregular_non_pres: + return word + + # HANDLE PRESENT NEGATIONS (SIMPLE AND COMPOUND) + + if words.first.endswith("n't") and words.first[:-3] in plverb_irregular_pres: + return ( + f"{plverb_irregular_pres[words.first[:-3]]}n't" + f"{words[len(words.first) :]}" + ) + + if words.first.endswith("n't"): + return word + + # HANDLE SPECIAL CASES + + mo = PLVERB_SPECIAL_S_RE.search(word) + if mo: + return False + if WHITESPACE.search(word): + return False + + if words.lowered == "quizzes": + return "quiz" + + # HANDLE STANDARD 3RD PERSON (CHOP THE ...(e)s OFF SINGLE WORDS) + + if ( + words.lowered[-4:] in ("ches", "shes", "zzes", "sses") + or words.lowered[-3:] == "xes" + ): + return words[:-2] + + if words.lowered[-3:] == "ies" and len(words) > 3: + return words.lowered[:-3] + "y" + + if ( + words.last.lower() in pl_v_oes_oe + or words.lowered[-4:] in pl_v_oes_oe_endings_size4 + or words.lowered[-5:] in pl_v_oes_oe_endings_size5 + ): + return words[:-1] + + if words.lowered.endswith("oes") and len(words) > 3: + return words.lowered[:-2] + + mo = ENDS_WITH_S.search(words) + if mo: + return mo.group(1) + + # OTHERWISE, A REGULAR VERB (HANDLE ELSEWHERE) + + return False + + def _pl_general_verb( + self, word: str, count: Optional[Union[str, int]] = None + ) -> str: + count = self.get_count(count) + + if count == 1: + return word + + # HANDLE AMBIGUOUS PRESENT TENSES (SIMPLE AND COMPOUND) + + mo = plverb_ambiguous_pres_keys.search(word) + if mo: + return f"{plverb_ambiguous_pres[mo.group(1).lower()]}{mo.group(2)}" + + # HANDLE AMBIGUOUS PRETERITE AND PERFECT TENSES + + mo = plverb_ambiguous_non_pres.search(word) + if mo: + return word + + # OTHERWISE, 1st OR 2ND PERSON IS UNINFLECTED + + return word + + def _pl_special_adjective( + self, word: str, count: Optional[Union[str, int]] = None + ) -> Union[str, bool]: + count = self.get_count(count) + + if count == 1: + return word + + # HANDLE USER-DEFINED ADJECTIVES + + value = self.ud_match(word, self.pl_adj_user_defined) + if value is not None: + return value + + # HANDLE KNOWN CASES + + mo = pl_adj_special_keys.search(word) + if mo: + return pl_adj_special[mo.group(1).lower()] + + # HANDLE POSSESSIVES + + mo = pl_adj_poss_keys.search(word) + if mo: + return pl_adj_poss[mo.group(1).lower()] + + mo = ENDS_WITH_APOSTROPHE_S.search(word) + if mo: + pl = self.plural_noun(mo.group(1)) + trailing_s = "" if pl[-1] == "s" else "s" + return f"{pl}'{trailing_s}" + + # OTHERWISE, NO IDEA + + return False + + # @profile + def _sinoun( # noqa: C901 + self, + word: str, + count: Optional[Union[str, int]] = None, + gender: Optional[str] = None, + ) -> Union[str, bool]: + count = self.get_count(count) + + # DEFAULT TO PLURAL + + if count == 2: + return word + + # SET THE GENDER + + try: + if gender is None: + gender = self.thegender + elif gender not in singular_pronoun_genders: + raise BadGenderError + except (TypeError, IndexError) as err: + raise BadGenderError from err + + # HANDLE USER-DEFINED NOUNS + + value = self.ud_match(word, self.si_sb_user_defined) + if value is not None: + return value + + # HANDLE EMPTY WORD, SINGULAR COUNT AND UNINFLECTED PLURALS + + if word == "": + return word + + if word in si_sb_ois_oi_case: + return word[:-1] + + words = Words(word) + + if words.last.lower() in pl_sb_uninflected_complete: + if len(words.split_) >= 3: + return self._handle_long_compounds(words, count=1) or word + return word + + if word in pl_sb_uninflected_caps: + return word + + for k, v in pl_sb_uninflected_bysize.items(): + if words.lowered[-k:] in v: + return word + + if self.classical_dict["herd"] and words.last.lower() in pl_sb_uninflected_herd: + return word + + if words.last.lower() in pl_sb_C_us_us: + return word if self.classical_dict["ancient"] else False + + # HANDLE COMPOUNDS ("Governor General", "mother-in-law", "aide-de-camp", ETC.) + + mo = PL_SB_POSTFIX_ADJ_STEMS_RE.search(word) + if mo and mo.group(2) != "": + return f"{self._sinoun(mo.group(1), 1, gender=gender)}{mo.group(2)}" + + with contextlib.suppress(ValueError): + return self._handle_prepositional_phrase( + words.lowered, + functools.partial(self._sinoun, count=1, gender=gender), + ' ', + ) + + with contextlib.suppress(ValueError): + return self._handle_prepositional_phrase( + words.lowered, + functools.partial(self._sinoun, count=1, gender=gender), + '-', + ) + + # HANDLE PRONOUNS + + for k, v in si_pron_acc_keys_bysize.items(): + if words.lowered[-k:] in v: # ends with accusative pronoun + for pk, pv in pl_prep_bysize.items(): + if words.lowered[:pk] in pv: # starts with a prep + if words.lowered.split() == [ + words.lowered[:pk], + words.lowered[-k:], + ]: + # only whitespace in between + return words.lowered[:-k] + get_si_pron( + "acc", words.lowered[-k:], gender + ) + + try: + return get_si_pron("nom", words.lowered, gender) + except KeyError: + pass + + try: + return get_si_pron("acc", words.lowered, gender) + except KeyError: + pass + + # HANDLE ISOLATED IRREGULAR PLURALS + + if words.last in si_sb_irregular_caps: + llen = len(words.last) + return f"{word[:-llen]}{si_sb_irregular_caps[words.last]}" + + if words.last.lower() in si_sb_irregular: + llen = len(words.last.lower()) + return f"{word[:-llen]}{si_sb_irregular[words.last.lower()]}" + + dash_split = words.lowered.split("-") + if (" ".join(dash_split[-2:])).lower() in si_sb_irregular_compound: + llen = len( + " ".join(dash_split[-2:]) + ) # TODO: what if 2 spaces between these words? + return "{}{}".format( + word[:-llen], + si_sb_irregular_compound[(" ".join(dash_split[-2:])).lower()], + ) + + if words.lowered[-5:] == "quies": + return word[:-3] + "y" + + if words.lowered[-7:] == "persons": + return word[:-1] + if words.lowered[-6:] == "people": + return word[:-4] + "rson" + + # HANDLE FAMILIES OF IRREGULAR PLURALS + + if words.lowered[-4:] == "mans": + for k, v in si_sb_U_man_mans_bysize.items(): + if words.lowered[-k:] in v: + return word[:-1] + for k, v in si_sb_U_man_mans_caps_bysize.items(): + if word[-k:] in v: + return word[:-1] + if words.lowered[-3:] == "men": + return word[:-3] + "man" + if words.lowered[-4:] == "mice": + return word[:-4] + "mouse" + if words.lowered[-4:] == "lice": + v = si_sb_U_louse_lice_bysize.get(len(word)) + if v and words.lowered in v: + return word[:-4] + "louse" + if words.lowered[-5:] == "geese": + return word[:-5] + "goose" + if words.lowered[-5:] == "teeth": + return word[:-5] + "tooth" + if words.lowered[-4:] == "feet": + return word[:-4] + "foot" + + if words.lowered == "dice": + return "die" + + # HANDLE UNASSIMILATED IMPORTS + + if words.lowered[-4:] == "ceps": + return word + if words.lowered[-3:] == "zoa": + return word[:-1] + "on" + + for lastlet, d, unass_numend, post in ( + ("s", si_sb_U_ch_chs_bysize, -1, ""), + ("s", si_sb_U_ex_ices_bysize, -4, "ex"), + ("s", si_sb_U_ix_ices_bysize, -4, "ix"), + ("a", si_sb_U_um_a_bysize, -1, "um"), + ("i", si_sb_U_us_i_bysize, -1, "us"), + ("a", si_sb_U_on_a_bysize, -1, "on"), + ("e", si_sb_U_a_ae_bysize, -1, ""), + ): + if words.lowered[-1] == lastlet: # this test to add speed + for k, v in d.items(): + if words.lowered[-k:] in v: + return word[:unass_numend] + post + + # HANDLE INCOMPLETELY ASSIMILATED IMPORTS + + if self.classical_dict["ancient"]: + if words.lowered[-6:] == "trices": + return word[:-3] + "x" + if words.lowered[-4:] in ("eaux", "ieux"): + return word[:-1] + if words.lowered[-5:] in ("ynges", "inges", "anges") and len(word) > 6: + return word[:-3] + "x" + + for lastlet, d, class_numend, post in ( + ("a", si_sb_C_en_ina_bysize, -3, "en"), + ("s", si_sb_C_ex_ices_bysize, -4, "ex"), + ("s", si_sb_C_ix_ices_bysize, -4, "ix"), + ("a", si_sb_C_um_a_bysize, -1, "um"), + ("i", si_sb_C_us_i_bysize, -1, "us"), + ("s", pl_sb_C_us_us_bysize, None, ""), + ("e", si_sb_C_a_ae_bysize, -1, ""), + ("a", si_sb_C_a_ata_bysize, -2, ""), + ("s", si_sb_C_is_ides_bysize, -3, "s"), + ("i", si_sb_C_o_i_bysize, -1, "o"), + ("a", si_sb_C_on_a_bysize, -1, "on"), + ("m", si_sb_C_im_bysize, -2, ""), + ("i", si_sb_C_i_bysize, -1, ""), + ): + if words.lowered[-1] == lastlet: # this test to add speed + for k, v in d.items(): + if words.lowered[-k:] in v: + return word[:class_numend] + post + + # HANDLE PLURLS ENDING IN uses -> use + + if ( + words.lowered[-6:] == "houses" + or word in si_sb_uses_use_case + or words.last.lower() in si_sb_uses_use + ): + return word[:-1] + + # HANDLE PLURLS ENDING IN ies -> ie + + if word in si_sb_ies_ie_case or words.last.lower() in si_sb_ies_ie: + return word[:-1] + + # HANDLE PLURLS ENDING IN oes -> oe + + if ( + words.lowered[-5:] == "shoes" + or word in si_sb_oes_oe_case + or words.last.lower() in si_sb_oes_oe + ): + return word[:-1] + + # HANDLE SINGULAR NOUNS ENDING IN ...s OR OTHER SILIBANTS + + if word in si_sb_sses_sse_case or words.last.lower() in si_sb_sses_sse: + return word[:-1] + + if words.last.lower() in si_sb_singular_s_complete: + return word[:-2] + + for k, v in si_sb_singular_s_bysize.items(): + if words.lowered[-k:] in v: + return word[:-2] + + if words.lowered[-4:] == "eses" and word[0] == word[0].upper(): + return word[:-2] + + if words.last.lower() in si_sb_z_zes: + return word[:-2] + + if words.last.lower() in si_sb_zzes_zz: + return word[:-2] + + if words.lowered[-4:] == "zzes": + return word[:-3] + + if word in si_sb_ches_che_case or words.last.lower() in si_sb_ches_che: + return word[:-1] + + if words.lowered[-4:] in ("ches", "shes"): + return word[:-2] + + if words.last.lower() in si_sb_xes_xe: + return word[:-1] + + if words.lowered[-3:] == "xes": + return word[:-2] + + # HANDLE ...f -> ...ves + + if word in si_sb_ves_ve_case or words.last.lower() in si_sb_ves_ve: + return word[:-1] + + if words.lowered[-3:] == "ves": + if words.lowered[-5:-3] in ("el", "al", "ol"): + return word[:-3] + "f" + if words.lowered[-5:-3] == "ea" and word[-6:-5] != "d": + return word[:-3] + "f" + if words.lowered[-5:-3] in ("ni", "li", "wi"): + return word[:-3] + "fe" + if words.lowered[-5:-3] == "ar": + return word[:-3] + "f" + + # HANDLE ...y + + if words.lowered[-2:] == "ys": + if len(words.lowered) > 2 and words.lowered[-3] in "aeiou": + return word[:-1] + + if self.classical_dict["names"]: + if words.lowered[-2:] == "ys" and word[0] == word[0].upper(): + return word[:-1] + + if words.lowered[-3:] == "ies": + return word[:-3] + "y" + + # HANDLE ...o + + if words.lowered[-2:] == "os": + if words.last.lower() in si_sb_U_o_os_complete: + return word[:-1] + + for k, v in si_sb_U_o_os_bysize.items(): + if words.lowered[-k:] in v: + return word[:-1] + + if words.lowered[-3:] in ("aos", "eos", "ios", "oos", "uos"): + return word[:-1] + + if words.lowered[-3:] == "oes": + return word[:-2] + + # UNASSIMILATED IMPORTS FINAL RULE + + if word in si_sb_es_is: + return word[:-2] + "is" + + # OTHERWISE JUST REMOVE ...s + + if words.lowered[-1] == "s": + return word[:-1] + + # COULD NOT FIND SINGULAR + + return False + + # ADJECTIVES + + @typechecked + def a(self, text: Word, count: Optional[Union[int, str, Any]] = 1) -> str: + """ + Return the appropriate indefinite article followed by text. + + The indefinite article is either 'a' or 'an'. + + If count is not one, then return count followed by text + instead of 'a' or 'an'. + + Whitespace at the start and end is preserved. + + """ + mo = INDEFINITE_ARTICLE_TEST.search(text) + if mo: + word = mo.group(2) + if not word: + return text + pre = mo.group(1) + post = mo.group(3) + result = self._indef_article(word, count) + return f"{pre}{result}{post}" + return "" + + an = a + + _indef_article_cases = ( + # HANDLE ORDINAL FORMS + (A_ordinal_a, "a"), + (A_ordinal_an, "an"), + # HANDLE SPECIAL CASES + (A_explicit_an, "an"), + (SPECIAL_AN, "an"), + (SPECIAL_A, "a"), + # HANDLE ABBREVIATIONS + (A_abbrev, "an"), + (SPECIAL_ABBREV_AN, "an"), + (SPECIAL_ABBREV_A, "a"), + # HANDLE CONSONANTS + (CONSONANTS, "a"), + # HANDLE SPECIAL VOWEL-FORMS + (ARTICLE_SPECIAL_EU, "a"), + (ARTICLE_SPECIAL_ONCE, "a"), + (ARTICLE_SPECIAL_ONETIME, "a"), + (ARTICLE_SPECIAL_UNIT, "a"), + (ARTICLE_SPECIAL_UBA, "a"), + (ARTICLE_SPECIAL_UKR, "a"), + (A_explicit_a, "a"), + # HANDLE SPECIAL CAPITALS + (SPECIAL_CAPITALS, "a"), + # HANDLE VOWELS + (VOWELS, "an"), + # HANDLE y... + # (BEFORE CERTAIN CONSONANTS IMPLIES (UNNATURALIZED) "i.." SOUND) + (A_y_cons, "an"), + ) + + def _indef_article(self, word: str, count: Union[int, str, Any]) -> str: + mycount = self.get_count(count) + + if mycount != 1: + return f"{count} {word}" + + # HANDLE USER-DEFINED VARIANTS + + value = self.ud_match(word, self.A_a_user_defined) + if value is not None: + return f"{value} {word}" + + matches = ( + f'{article} {word}' + for regexen, article in self._indef_article_cases + if regexen.search(word) + ) + + # OTHERWISE, GUESS "a" + fallback = f'a {word}' + return next(matches, fallback) + + # 2. TRANSLATE ZERO-QUANTIFIED $word TO "no plural($word)" + + @typechecked + def no(self, text: Word, count: Optional[Union[int, str]] = None) -> str: + """ + If count is 0, no, zero or nil, return 'no' followed by the plural + of text. + + If count is one of: + 1, a, an, one, each, every, this, that + return count followed by text. + + Otherwise return count follow by the plural of text. + + In the return value count is always followed by a space. + + Whitespace at the start and end is preserved. + + """ + if count is None and self.persistent_count is not None: + count = self.persistent_count + + if count is None: + count = 0 + mo = PARTITION_WORD.search(text) + if mo: + pre = mo.group(1) + word = mo.group(2) + post = mo.group(3) + else: + pre = "" + word = "" + post = "" + + if str(count).lower() in pl_count_zero: + count = 'no' + return f"{pre}{count} {self.plural(word, count)}{post}" + + # PARTICIPLES + + @typechecked + def present_participle(self, word: Word) -> str: + """ + Return the present participle for word. + + word is the 3rd person singular verb. + + """ + plv = self.plural_verb(word, 2) + ans = plv + + for regexen, repl in PRESENT_PARTICIPLE_REPLACEMENTS: + ans, num = regexen.subn(repl, plv) + if num: + return f"{ans}ing" + return f"{ans}ing" + + # NUMERICAL INFLECTIONS + + @typechecked + def ordinal(self, num: Union[Number, Word]) -> str: + """ + Return the ordinal of num. + + >>> ordinal = engine().ordinal + >>> ordinal(1) + '1st' + >>> ordinal('one') + 'first' + """ + if DIGIT.match(str(num)): + if isinstance(num, (float, int)) and int(num) == num: + n = int(num) + else: + if "." in str(num): + try: + # numbers after decimal, + # so only need last one for ordinal + n = int(str(num)[-1]) + + except ValueError: # ends with '.', so need to use whole string + n = int(str(num)[:-1]) + else: + n = int(num) # type: ignore + try: + post = nth[n % 100] + except KeyError: + post = nth[n % 10] + return f"{num}{post}" + else: + return self._sub_ord(num) + + def millfn(self, ind: int = 0) -> str: + if ind > len(mill) - 1: + raise NumOutOfRangeError + return mill[ind] + + def unitfn(self, units: int, mindex: int = 0) -> str: + return f"{unit[units]}{self.millfn(mindex)}" + + def tenfn(self, tens, units, mindex=0) -> str: + if tens != 1: + tens_part = ten[tens] + if tens and units: + hyphen = "-" + else: + hyphen = "" + unit_part = unit[units] + mill_part = self.millfn(mindex) + return f"{tens_part}{hyphen}{unit_part}{mill_part}" + return f"{teen[units]}{mill[mindex]}" + + def hundfn(self, hundreds: int, tens: int, units: int, mindex: int) -> str: + if hundreds: + andword = f" {self._number_args['andword']} " if tens or units else "" + # use unit not unitfn as simpler + return ( + f"{unit[hundreds]} hundred{andword}" + f"{self.tenfn(tens, units)}{self.millfn(mindex)}, " + ) + if tens or units: + return f"{self.tenfn(tens, units)}{self.millfn(mindex)}, " + return "" + + def group1sub(self, mo: Match) -> str: + units = int(mo.group(1)) + if units == 1: + return f" {self._number_args['one']}, " + elif units: + return f"{unit[units]}, " + else: + return f" {self._number_args['zero']}, " + + def group1bsub(self, mo: Match) -> str: + units = int(mo.group(1)) + if units: + return f"{unit[units]}, " + else: + return f" {self._number_args['zero']}, " + + def group2sub(self, mo: Match) -> str: + tens = int(mo.group(1)) + units = int(mo.group(2)) + if tens: + return f"{self.tenfn(tens, units)}, " + if units: + return f" {self._number_args['zero']} {unit[units]}, " + return f" {self._number_args['zero']} {self._number_args['zero']}, " + + def group3sub(self, mo: Match) -> str: + hundreds = int(mo.group(1)) + tens = int(mo.group(2)) + units = int(mo.group(3)) + if hundreds == 1: + hunword = f" {self._number_args['one']}" + elif hundreds: + hunword = str(unit[hundreds]) + else: + hunword = f" {self._number_args['zero']}" + if tens: + tenword = self.tenfn(tens, units) + elif units: + tenword = f" {self._number_args['zero']} {unit[units]}" + else: + tenword = f" {self._number_args['zero']} {self._number_args['zero']}" + return f"{hunword} {tenword}, " + + def hundsub(self, mo: Match) -> str: + ret = self.hundfn( + int(mo.group(1)), int(mo.group(2)), int(mo.group(3)), self.mill_count + ) + self.mill_count += 1 + return ret + + def tensub(self, mo: Match) -> str: + return f"{self.tenfn(int(mo.group(1)), int(mo.group(2)), self.mill_count)}, " + + def unitsub(self, mo: Match) -> str: + return f"{self.unitfn(int(mo.group(1)), self.mill_count)}, " + + def enword(self, num: str, group: int) -> str: + # import pdb + # pdb.set_trace() + + if group == 1: + num = DIGIT_GROUP.sub(self.group1sub, num) + elif group == 2: + num = TWO_DIGITS.sub(self.group2sub, num) + num = DIGIT_GROUP.sub(self.group1bsub, num, 1) + elif group == 3: + num = THREE_DIGITS.sub(self.group3sub, num) + num = TWO_DIGITS.sub(self.group2sub, num, 1) + num = DIGIT_GROUP.sub(self.group1sub, num, 1) + elif int(num) == 0: + num = self._number_args["zero"] + elif int(num) == 1: + num = self._number_args["one"] + else: + num = num.lstrip().lstrip("0") + self.mill_count = 0 + # surely there's a better way to do the next bit + mo = THREE_DIGITS_WORD.search(num) + while mo: + num = THREE_DIGITS_WORD.sub(self.hundsub, num, 1) + mo = THREE_DIGITS_WORD.search(num) + num = TWO_DIGITS_WORD.sub(self.tensub, num, 1) + num = ONE_DIGIT_WORD.sub(self.unitsub, num, 1) + return num + + @staticmethod + def _sub_ord(val): + new = ordinal_suff.sub(lambda match: ordinal[match.group(1)], val) + return new + "th" * (new == val) + + @classmethod + def _chunk_num(cls, num, decimal, group): + if decimal: + max_split = -1 if group != 0 else 1 + chunks = num.split(".", max_split) + else: + chunks = [num] + return cls._remove_last_blank(chunks) + + @staticmethod + def _remove_last_blank(chunks): + """ + Remove the last item from chunks if it's a blank string. + + Return the resultant chunks and whether the last item was removed. + """ + removed = chunks[-1] == "" + result = chunks[:-1] if removed else chunks + return result, removed + + @staticmethod + def _get_sign(num): + return {'+': 'plus', '-': 'minus'}.get(num.lstrip()[0], '') + + @typechecked + def number_to_words( # noqa: C901 + self, + num: Union[Number, Word], + wantlist: bool = False, + group: int = 0, + comma: Union[Falsish, str] = ",", + andword: str = "and", + zero: str = "zero", + one: str = "one", + decimal: Union[Falsish, str] = "point", + threshold: Optional[int] = None, + ) -> Union[str, List[str]]: + """ + Return a number in words. + + group = 1, 2 or 3 to group numbers before turning into words + comma: define comma + + andword: + word for 'and'. Can be set to ''. + e.g. "one hundred and one" vs "one hundred one" + + zero: word for '0' + one: word for '1' + decimal: word for decimal point + threshold: numbers above threshold not turned into words + + parameters not remembered from last call. Departure from Perl version. + """ + self._number_args = {"andword": andword, "zero": zero, "one": one} + num = str(num) + + # Handle "stylistic" conversions (up to a given threshold)... + if threshold is not None and float(num) > threshold: + spnum = num.split(".", 1) + while comma: + (spnum[0], n) = FOUR_DIGIT_COMMA.subn(r"\1,\2", spnum[0]) + if n == 0: + break + try: + return f"{spnum[0]}.{spnum[1]}" + except IndexError: + return str(spnum[0]) + + if group < 0 or group > 3: + raise BadChunkingOptionError + + sign = self._get_sign(num) + + if num in nth_suff: + num = zero + + myord = num[-2:] in nth_suff + if myord: + num = num[:-2] + + chunks, finalpoint = self._chunk_num(num, decimal, group) + + loopstart = chunks[0] == "" + first: bool | None = not loopstart + + def _handle_chunk(chunk): + nonlocal first + + # remove all non numeric \D + chunk = NON_DIGIT.sub("", chunk) + if chunk == "": + chunk = "0" + + if group == 0 and not first: + chunk = self.enword(chunk, 1) + else: + chunk = self.enword(chunk, group) + + if chunk[-2:] == ", ": + chunk = chunk[:-2] + chunk = WHITESPACES_COMMA.sub(",", chunk) + + if group == 0 and first: + chunk = COMMA_WORD.sub(f" {andword} \\1", chunk) + chunk = WHITESPACES.sub(" ", chunk) + # chunk = re.sub(r"(\A\s|\s\Z)", self.blankfn, chunk) + chunk = chunk.strip() + if first: + first = None + return chunk + + chunks[loopstart:] = map(_handle_chunk, chunks[loopstart:]) + + numchunks = [] + if first != 0: + numchunks = chunks[0].split(f"{comma} ") + + if myord and numchunks: + numchunks[-1] = self._sub_ord(numchunks[-1]) + + for chunk in chunks[1:]: + numchunks.append(decimal) + numchunks.extend(chunk.split(f"{comma} ")) + + if finalpoint: + numchunks.append(decimal) + + if wantlist: + return [sign] * bool(sign) + numchunks + + signout = f"{sign} " if sign else "" + valout = ( + ', '.join(numchunks) + if group + else ''.join(self._render(numchunks, decimal, comma)) + ) + return signout + valout + + @staticmethod + def _render(chunks, decimal, comma): + first_item = chunks.pop(0) + yield first_item + first = decimal is None or not first_item.endswith(decimal) + for nc in chunks: + if nc == decimal: + first = False + elif first: + yield comma + yield f" {nc}" + + @typechecked + def join( + self, + words: Optional[Sequence[Word]], + sep: Optional[str] = None, + sep_spaced: bool = True, + final_sep: Optional[str] = None, + conj: str = "and", + conj_spaced: bool = True, + ) -> str: + """ + Join words into a list. + + e.g. join(['ant', 'bee', 'fly']) returns 'ant, bee, and fly' + + options: + conj: replacement for 'and' + sep: separator. default ',', unless ',' is in the list then ';' + final_sep: final separator. default ',', unless ',' is in the list then ';' + conj_spaced: boolean. Should conj have spaces around it + + """ + if not words: + return "" + if len(words) == 1: + return words[0] + + if conj_spaced: + if conj == "": + conj = " " + else: + conj = f" {conj} " + + if len(words) == 2: + return f"{words[0]}{conj}{words[1]}" + + if sep is None: + if "," in "".join(words): + sep = ";" + else: + sep = "," + if final_sep is None: + final_sep = sep + + final_sep = f"{final_sep}{conj}" + + if sep_spaced: + sep += " " + + return f"{sep.join(words[0:-1])}{final_sep}{words[-1]}" diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/inflect/__pycache__/__init__.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/inflect/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ee520659aaf06e82617c7cb63c3679678b757319 Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/inflect/__pycache__/__init__.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/inflect/compat/__pycache__/__init__.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/inflect/compat/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..682891aff1218352188a55093d8017060917da6e Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/inflect/compat/__pycache__/__init__.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/inflect/compat/__pycache__/py38.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/inflect/compat/__pycache__/py38.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0981a1f6069ebb094599470a2ee70c40391ce825 Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/inflect/compat/__pycache__/py38.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/inflect/compat/py38.py b/falcon/lib/python3.10/site-packages/setuptools/_vendor/inflect/compat/py38.py new file mode 100644 index 0000000000000000000000000000000000000000..a2d01bd98f4ae3d9236d0e6ec3b89faa9ca706ae --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/inflect/compat/py38.py @@ -0,0 +1,7 @@ +import sys + + +if sys.version_info > (3, 9): + from typing import Annotated +else: # pragma: no cover + from typing_extensions import Annotated # noqa: F401 diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/inflect/py.typed b/falcon/lib/python3.10/site-packages/setuptools/_vendor/inflect/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/METADATA b/falcon/lib/python3.10/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..fe6ca5ad880ff2a77c81a94ed84ee60c742e298f --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/METADATA @@ -0,0 +1,85 @@ +Metadata-Version: 2.1 +Name: jaraco.collections +Version: 5.1.0 +Summary: Collection objects similar to those in stdlib by jaraco +Author-email: "Jason R. Coombs" +Project-URL: Source, https://github.com/jaraco/jaraco.collections +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: jaraco.text +Provides-Extra: check +Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'check' +Requires-Dist: pytest-ruff >=0.2.1 ; (sys_platform != "cygwin") and extra == 'check' +Provides-Extra: cover +Requires-Dist: pytest-cov ; extra == 'cover' +Provides-Extra: doc +Requires-Dist: sphinx >=3.5 ; extra == 'doc' +Requires-Dist: jaraco.packaging >=9.3 ; extra == 'doc' +Requires-Dist: rst.linker >=1.9 ; extra == 'doc' +Requires-Dist: furo ; extra == 'doc' +Requires-Dist: sphinx-lint ; extra == 'doc' +Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'doc' +Provides-Extra: enabler +Requires-Dist: pytest-enabler >=2.2 ; extra == 'enabler' +Provides-Extra: test +Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'test' +Provides-Extra: type +Requires-Dist: pytest-mypy ; extra == 'type' + +.. image:: https://img.shields.io/pypi/v/jaraco.collections.svg + :target: https://pypi.org/project/jaraco.collections + +.. image:: https://img.shields.io/pypi/pyversions/jaraco.collections.svg + +.. image:: https://github.com/jaraco/jaraco.collections/actions/workflows/main.yml/badge.svg + :target: https://github.com/jaraco/jaraco.collections/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json + :target: https://github.com/astral-sh/ruff + :alt: Ruff + +.. image:: https://readthedocs.org/projects/jaracocollections/badge/?version=latest + :target: https://jaracocollections.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2024-informational + :target: https://blog.jaraco.com/skeleton + +.. image:: https://tidelift.com/badges/package/pypi/jaraco.collections + :target: https://tidelift.com/subscription/pkg/pypi-jaraco.collections?utm_source=pypi-jaraco.collections&utm_medium=readme + +Models and classes to supplement the stdlib 'collections' module. + +See the docs, linked above, for descriptions and usage examples. + +Highlights include: + +- RangeMap: A mapping that accepts a range of values for keys. +- Projection: A subset over an existing mapping. +- KeyTransformingDict: Generalized mapping with keys transformed by a function. +- FoldedCaseKeyedDict: A dict whose string keys are case-insensitive. +- BijectiveMap: A map where keys map to values and values back to their keys. +- ItemsAsAttributes: A mapping mix-in exposing items as attributes. +- IdentityOverrideMap: A map whose keys map by default to themselves unless overridden. +- FrozenDict: A hashable, immutable map. +- Enumeration: An object whose keys are enumerated. +- Everything: A container that contains all things. +- Least, Greatest: Objects that are always less than or greater than any other. +- pop_all: Return all items from the mutable sequence and remove them from that sequence. +- DictStack: A stack of dicts, great for sharing scopes. +- WeightedLookup: A specialized RangeMap for selecting an item by weights. + +For Enterprise +============== + +Available as part of the Tidelift Subscription. + +This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. + +`Learn more `_. diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/WHEEL b/falcon/lib/python3.10/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..50e1e84e4a3fa44387f2798f8f465963bc3fc406 --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (73.0.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/packaging-24.2.dist-info/LICENSE b/falcon/lib/python3.10/site-packages/setuptools/_vendor/packaging-24.2.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..6f62d44e4ef733c0e713afcd2371fed7f2b3de67 --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/packaging-24.2.dist-info/LICENSE @@ -0,0 +1,3 @@ +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made +under the terms of *both* these licenses. diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/packaging-24.2.dist-info/LICENSE.APACHE b/falcon/lib/python3.10/site-packages/setuptools/_vendor/packaging-24.2.dist-info/LICENSE.APACHE new file mode 100644 index 0000000000000000000000000000000000000000..f433b1a53f5b830a205fd2df78e2b34974656c7b --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/packaging-24.2.dist-info/LICENSE.APACHE @@ -0,0 +1,177 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/packaging-24.2.dist-info/LICENSE.BSD b/falcon/lib/python3.10/site-packages/setuptools/_vendor/packaging-24.2.dist-info/LICENSE.BSD new file mode 100644 index 0000000000000000000000000000000000000000..42ce7b75c92fb01a3f6ed17eea363f756b7da582 --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/packaging-24.2.dist-info/LICENSE.BSD @@ -0,0 +1,23 @@ +Copyright (c) Donald Stufft and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/packaging-24.2.dist-info/METADATA b/falcon/lib/python3.10/site-packages/setuptools/_vendor/packaging-24.2.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..1479c8694bfbd583a896dbe9bd33cdb6d7e7371e --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/packaging-24.2.dist-info/METADATA @@ -0,0 +1,102 @@ +Metadata-Version: 2.3 +Name: packaging +Version: 24.2 +Summary: Core utilities for Python packages +Author-email: Donald Stufft +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Typing :: Typed +Project-URL: Documentation, https://packaging.pypa.io/ +Project-URL: Source, https://github.com/pypa/packaging + +packaging +========= + +.. start-intro + +Reusable core utilities for various Python Packaging +`interoperability specifications `_. + +This library provides utilities that implement the interoperability +specifications which have clearly one correct behaviour (eg: :pep:`440`) +or benefit greatly from having a single shared implementation (eg: :pep:`425`). + +.. end-intro + +The ``packaging`` project includes the following: version handling, specifiers, +markers, requirements, tags, utilities. + +Documentation +------------- + +The `documentation`_ provides information and the API for the following: + +- Version Handling +- Specifiers +- Markers +- Requirements +- Tags +- Utilities + +Installation +------------ + +Use ``pip`` to install these utilities:: + + pip install packaging + +The ``packaging`` library uses calendar-based versioning (``YY.N``). + +Discussion +---------- + +If you run into bugs, you can file them in our `issue tracker`_. + +You can also join ``#pypa`` on Freenode to ask questions or get involved. + + +.. _`documentation`: https://packaging.pypa.io/ +.. _`issue tracker`: https://github.com/pypa/packaging/issues + + +Code of Conduct +--------------- + +Everyone interacting in the packaging project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. + +.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + +Contributing +------------ + +The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as +well as how to report a potential security issue. The documentation for this +project also covers information about `project development`_ and `security`_. + +.. _`project development`: https://packaging.pypa.io/en/latest/development/ +.. _`security`: https://packaging.pypa.io/en/latest/security/ + +Project History +--------------- + +Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for +recent changes and project history. + +.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/ + diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/packaging-24.2.dist-info/WHEEL b/falcon/lib/python3.10/site-packages/setuptools/_vendor/packaging-24.2.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..e3c6feefa22927866e3fd5575379ea972b432aaf --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/packaging-24.2.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.10.1 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/__init__.py b/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4c6ec97ec6961bcf184b6e0b2437b9924db0b9de --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/__init__.py @@ -0,0 +1,11 @@ +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2021 Taneli Hukkinen +# Licensed to PSF under a Contributor Agreement. + +__all__ = ("loads", "load", "TOMLDecodeError") +__version__ = "2.0.1" # DO NOT EDIT THIS LINE MANUALLY. LET bump2version UTILITY DO IT + +from ._parser import TOMLDecodeError, load, loads + +# Pretend this exception was created here. +TOMLDecodeError.__module__ = __name__ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/__pycache__/__init__.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c62465759beac01bac21e03547ed9760c7421bc3 Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/__pycache__/__init__.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/__pycache__/_re.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/__pycache__/_re.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..960ad899e5ed8107f679344e48737cf0de22d014 Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/__pycache__/_re.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/__pycache__/_types.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/__pycache__/_types.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..84387a32cb046d83c2d1427fc66cf2ca2394d79d Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/__pycache__/_types.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/_parser.py b/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..f1bb0aa19a556725aa2ae2b8cea95489c99a9078 --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/_parser.py @@ -0,0 +1,691 @@ +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2021 Taneli Hukkinen +# Licensed to PSF under a Contributor Agreement. + +from __future__ import annotations + +from collections.abc import Iterable +import string +from types import MappingProxyType +from typing import Any, BinaryIO, NamedTuple + +from ._re import ( + RE_DATETIME, + RE_LOCALTIME, + RE_NUMBER, + match_to_datetime, + match_to_localtime, + match_to_number, +) +from ._types import Key, ParseFloat, Pos + +ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127)) + +# Neither of these sets include quotation mark or backslash. They are +# currently handled as separate cases in the parser functions. +ILLEGAL_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t") +ILLEGAL_MULTILINE_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t\n") + +ILLEGAL_LITERAL_STR_CHARS = ILLEGAL_BASIC_STR_CHARS +ILLEGAL_MULTILINE_LITERAL_STR_CHARS = ILLEGAL_MULTILINE_BASIC_STR_CHARS + +ILLEGAL_COMMENT_CHARS = ILLEGAL_BASIC_STR_CHARS + +TOML_WS = frozenset(" \t") +TOML_WS_AND_NEWLINE = TOML_WS | frozenset("\n") +BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + "-_") +KEY_INITIAL_CHARS = BARE_KEY_CHARS | frozenset("\"'") +HEXDIGIT_CHARS = frozenset(string.hexdigits) + +BASIC_STR_ESCAPE_REPLACEMENTS = MappingProxyType( + { + "\\b": "\u0008", # backspace + "\\t": "\u0009", # tab + "\\n": "\u000A", # linefeed + "\\f": "\u000C", # form feed + "\\r": "\u000D", # carriage return + '\\"': "\u0022", # quote + "\\\\": "\u005C", # backslash + } +) + + +class TOMLDecodeError(ValueError): + """An error raised if a document is not valid TOML.""" + + +def load(__fp: BinaryIO, *, parse_float: ParseFloat = float) -> dict[str, Any]: + """Parse TOML from a binary file object.""" + b = __fp.read() + try: + s = b.decode() + except AttributeError: + raise TypeError( + "File must be opened in binary mode, e.g. use `open('foo.toml', 'rb')`" + ) from None + return loads(s, parse_float=parse_float) + + +def loads(__s: str, *, parse_float: ParseFloat = float) -> dict[str, Any]: # noqa: C901 + """Parse TOML from a string.""" + + # The spec allows converting "\r\n" to "\n", even in string + # literals. Let's do so to simplify parsing. + src = __s.replace("\r\n", "\n") + pos = 0 + out = Output(NestedDict(), Flags()) + header: Key = () + parse_float = make_safe_parse_float(parse_float) + + # Parse one statement at a time + # (typically means one line in TOML source) + while True: + # 1. Skip line leading whitespace + pos = skip_chars(src, pos, TOML_WS) + + # 2. Parse rules. Expect one of the following: + # - end of file + # - end of line + # - comment + # - key/value pair + # - append dict to list (and move to its namespace) + # - create dict (and move to its namespace) + # Skip trailing whitespace when applicable. + try: + char = src[pos] + except IndexError: + break + if char == "\n": + pos += 1 + continue + if char in KEY_INITIAL_CHARS: + pos = key_value_rule(src, pos, out, header, parse_float) + pos = skip_chars(src, pos, TOML_WS) + elif char == "[": + try: + second_char: str | None = src[pos + 1] + except IndexError: + second_char = None + out.flags.finalize_pending() + if second_char == "[": + pos, header = create_list_rule(src, pos, out) + else: + pos, header = create_dict_rule(src, pos, out) + pos = skip_chars(src, pos, TOML_WS) + elif char != "#": + raise suffixed_err(src, pos, "Invalid statement") + + # 3. Skip comment + pos = skip_comment(src, pos) + + # 4. Expect end of line or end of file + try: + char = src[pos] + except IndexError: + break + if char != "\n": + raise suffixed_err( + src, pos, "Expected newline or end of document after a statement" + ) + pos += 1 + + return out.data.dict + + +class Flags: + """Flags that map to parsed keys/namespaces.""" + + # Marks an immutable namespace (inline array or inline table). + FROZEN = 0 + # Marks a nest that has been explicitly created and can no longer + # be opened using the "[table]" syntax. + EXPLICIT_NEST = 1 + + def __init__(self) -> None: + self._flags: dict[str, dict] = {} + self._pending_flags: set[tuple[Key, int]] = set() + + def add_pending(self, key: Key, flag: int) -> None: + self._pending_flags.add((key, flag)) + + def finalize_pending(self) -> None: + for key, flag in self._pending_flags: + self.set(key, flag, recursive=False) + self._pending_flags.clear() + + def unset_all(self, key: Key) -> None: + cont = self._flags + for k in key[:-1]: + if k not in cont: + return + cont = cont[k]["nested"] + cont.pop(key[-1], None) + + def set(self, key: Key, flag: int, *, recursive: bool) -> None: # noqa: A003 + cont = self._flags + key_parent, key_stem = key[:-1], key[-1] + for k in key_parent: + if k not in cont: + cont[k] = {"flags": set(), "recursive_flags": set(), "nested": {}} + cont = cont[k]["nested"] + if key_stem not in cont: + cont[key_stem] = {"flags": set(), "recursive_flags": set(), "nested": {}} + cont[key_stem]["recursive_flags" if recursive else "flags"].add(flag) + + def is_(self, key: Key, flag: int) -> bool: + if not key: + return False # document root has no flags + cont = self._flags + for k in key[:-1]: + if k not in cont: + return False + inner_cont = cont[k] + if flag in inner_cont["recursive_flags"]: + return True + cont = inner_cont["nested"] + key_stem = key[-1] + if key_stem in cont: + cont = cont[key_stem] + return flag in cont["flags"] or flag in cont["recursive_flags"] + return False + + +class NestedDict: + def __init__(self) -> None: + # The parsed content of the TOML document + self.dict: dict[str, Any] = {} + + def get_or_create_nest( + self, + key: Key, + *, + access_lists: bool = True, + ) -> dict: + cont: Any = self.dict + for k in key: + if k not in cont: + cont[k] = {} + cont = cont[k] + if access_lists and isinstance(cont, list): + cont = cont[-1] + if not isinstance(cont, dict): + raise KeyError("There is no nest behind this key") + return cont + + def append_nest_to_list(self, key: Key) -> None: + cont = self.get_or_create_nest(key[:-1]) + last_key = key[-1] + if last_key in cont: + list_ = cont[last_key] + if not isinstance(list_, list): + raise KeyError("An object other than list found behind this key") + list_.append({}) + else: + cont[last_key] = [{}] + + +class Output(NamedTuple): + data: NestedDict + flags: Flags + + +def skip_chars(src: str, pos: Pos, chars: Iterable[str]) -> Pos: + try: + while src[pos] in chars: + pos += 1 + except IndexError: + pass + return pos + + +def skip_until( + src: str, + pos: Pos, + expect: str, + *, + error_on: frozenset[str], + error_on_eof: bool, +) -> Pos: + try: + new_pos = src.index(expect, pos) + except ValueError: + new_pos = len(src) + if error_on_eof: + raise suffixed_err(src, new_pos, f"Expected {expect!r}") from None + + if not error_on.isdisjoint(src[pos:new_pos]): + while src[pos] not in error_on: + pos += 1 + raise suffixed_err(src, pos, f"Found invalid character {src[pos]!r}") + return new_pos + + +def skip_comment(src: str, pos: Pos) -> Pos: + try: + char: str | None = src[pos] + except IndexError: + char = None + if char == "#": + return skip_until( + src, pos + 1, "\n", error_on=ILLEGAL_COMMENT_CHARS, error_on_eof=False + ) + return pos + + +def skip_comments_and_array_ws(src: str, pos: Pos) -> Pos: + while True: + pos_before_skip = pos + pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE) + pos = skip_comment(src, pos) + if pos == pos_before_skip: + return pos + + +def create_dict_rule(src: str, pos: Pos, out: Output) -> tuple[Pos, Key]: + pos += 1 # Skip "[" + pos = skip_chars(src, pos, TOML_WS) + pos, key = parse_key(src, pos) + + if out.flags.is_(key, Flags.EXPLICIT_NEST) or out.flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Cannot declare {key} twice") + out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) + try: + out.data.get_or_create_nest(key) + except KeyError: + raise suffixed_err(src, pos, "Cannot overwrite a value") from None + + if not src.startswith("]", pos): + raise suffixed_err(src, pos, "Expected ']' at the end of a table declaration") + return pos + 1, key + + +def create_list_rule(src: str, pos: Pos, out: Output) -> tuple[Pos, Key]: + pos += 2 # Skip "[[" + pos = skip_chars(src, pos, TOML_WS) + pos, key = parse_key(src, pos) + + if out.flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Cannot mutate immutable namespace {key}") + # Free the namespace now that it points to another empty list item... + out.flags.unset_all(key) + # ...but this key precisely is still prohibited from table declaration + out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) + try: + out.data.append_nest_to_list(key) + except KeyError: + raise suffixed_err(src, pos, "Cannot overwrite a value") from None + + if not src.startswith("]]", pos): + raise suffixed_err(src, pos, "Expected ']]' at the end of an array declaration") + return pos + 2, key + + +def key_value_rule( + src: str, pos: Pos, out: Output, header: Key, parse_float: ParseFloat +) -> Pos: + pos, key, value = parse_key_value_pair(src, pos, parse_float) + key_parent, key_stem = key[:-1], key[-1] + abs_key_parent = header + key_parent + + relative_path_cont_keys = (header + key[:i] for i in range(1, len(key))) + for cont_key in relative_path_cont_keys: + # Check that dotted key syntax does not redefine an existing table + if out.flags.is_(cont_key, Flags.EXPLICIT_NEST): + raise suffixed_err(src, pos, f"Cannot redefine namespace {cont_key}") + # Containers in the relative path can't be opened with the table syntax or + # dotted key/value syntax in following table sections. + out.flags.add_pending(cont_key, Flags.EXPLICIT_NEST) + + if out.flags.is_(abs_key_parent, Flags.FROZEN): + raise suffixed_err( + src, pos, f"Cannot mutate immutable namespace {abs_key_parent}" + ) + + try: + nest = out.data.get_or_create_nest(abs_key_parent) + except KeyError: + raise suffixed_err(src, pos, "Cannot overwrite a value") from None + if key_stem in nest: + raise suffixed_err(src, pos, "Cannot overwrite a value") + # Mark inline table and array namespaces recursively immutable + if isinstance(value, (dict, list)): + out.flags.set(header + key, Flags.FROZEN, recursive=True) + nest[key_stem] = value + return pos + + +def parse_key_value_pair( + src: str, pos: Pos, parse_float: ParseFloat +) -> tuple[Pos, Key, Any]: + pos, key = parse_key(src, pos) + try: + char: str | None = src[pos] + except IndexError: + char = None + if char != "=": + raise suffixed_err(src, pos, "Expected '=' after a key in a key/value pair") + pos += 1 + pos = skip_chars(src, pos, TOML_WS) + pos, value = parse_value(src, pos, parse_float) + return pos, key, value + + +def parse_key(src: str, pos: Pos) -> tuple[Pos, Key]: + pos, key_part = parse_key_part(src, pos) + key: Key = (key_part,) + pos = skip_chars(src, pos, TOML_WS) + while True: + try: + char: str | None = src[pos] + except IndexError: + char = None + if char != ".": + return pos, key + pos += 1 + pos = skip_chars(src, pos, TOML_WS) + pos, key_part = parse_key_part(src, pos) + key += (key_part,) + pos = skip_chars(src, pos, TOML_WS) + + +def parse_key_part(src: str, pos: Pos) -> tuple[Pos, str]: + try: + char: str | None = src[pos] + except IndexError: + char = None + if char in BARE_KEY_CHARS: + start_pos = pos + pos = skip_chars(src, pos, BARE_KEY_CHARS) + return pos, src[start_pos:pos] + if char == "'": + return parse_literal_str(src, pos) + if char == '"': + return parse_one_line_basic_str(src, pos) + raise suffixed_err(src, pos, "Invalid initial character for a key part") + + +def parse_one_line_basic_str(src: str, pos: Pos) -> tuple[Pos, str]: + pos += 1 + return parse_basic_str(src, pos, multiline=False) + + +def parse_array(src: str, pos: Pos, parse_float: ParseFloat) -> tuple[Pos, list]: + pos += 1 + array: list = [] + + pos = skip_comments_and_array_ws(src, pos) + if src.startswith("]", pos): + return pos + 1, array + while True: + pos, val = parse_value(src, pos, parse_float) + array.append(val) + pos = skip_comments_and_array_ws(src, pos) + + c = src[pos : pos + 1] + if c == "]": + return pos + 1, array + if c != ",": + raise suffixed_err(src, pos, "Unclosed array") + pos += 1 + + pos = skip_comments_and_array_ws(src, pos) + if src.startswith("]", pos): + return pos + 1, array + + +def parse_inline_table(src: str, pos: Pos, parse_float: ParseFloat) -> tuple[Pos, dict]: + pos += 1 + nested_dict = NestedDict() + flags = Flags() + + pos = skip_chars(src, pos, TOML_WS) + if src.startswith("}", pos): + return pos + 1, nested_dict.dict + while True: + pos, key, value = parse_key_value_pair(src, pos, parse_float) + key_parent, key_stem = key[:-1], key[-1] + if flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Cannot mutate immutable namespace {key}") + try: + nest = nested_dict.get_or_create_nest(key_parent, access_lists=False) + except KeyError: + raise suffixed_err(src, pos, "Cannot overwrite a value") from None + if key_stem in nest: + raise suffixed_err(src, pos, f"Duplicate inline table key {key_stem!r}") + nest[key_stem] = value + pos = skip_chars(src, pos, TOML_WS) + c = src[pos : pos + 1] + if c == "}": + return pos + 1, nested_dict.dict + if c != ",": + raise suffixed_err(src, pos, "Unclosed inline table") + if isinstance(value, (dict, list)): + flags.set(key, Flags.FROZEN, recursive=True) + pos += 1 + pos = skip_chars(src, pos, TOML_WS) + + +def parse_basic_str_escape( + src: str, pos: Pos, *, multiline: bool = False +) -> tuple[Pos, str]: + escape_id = src[pos : pos + 2] + pos += 2 + if multiline and escape_id in {"\\ ", "\\\t", "\\\n"}: + # Skip whitespace until next non-whitespace character or end of + # the doc. Error if non-whitespace is found before newline. + if escape_id != "\\\n": + pos = skip_chars(src, pos, TOML_WS) + try: + char = src[pos] + except IndexError: + return pos, "" + if char != "\n": + raise suffixed_err(src, pos, "Unescaped '\\' in a string") + pos += 1 + pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE) + return pos, "" + if escape_id == "\\u": + return parse_hex_char(src, pos, 4) + if escape_id == "\\U": + return parse_hex_char(src, pos, 8) + try: + return pos, BASIC_STR_ESCAPE_REPLACEMENTS[escape_id] + except KeyError: + raise suffixed_err(src, pos, "Unescaped '\\' in a string") from None + + +def parse_basic_str_escape_multiline(src: str, pos: Pos) -> tuple[Pos, str]: + return parse_basic_str_escape(src, pos, multiline=True) + + +def parse_hex_char(src: str, pos: Pos, hex_len: int) -> tuple[Pos, str]: + hex_str = src[pos : pos + hex_len] + if len(hex_str) != hex_len or not HEXDIGIT_CHARS.issuperset(hex_str): + raise suffixed_err(src, pos, "Invalid hex value") + pos += hex_len + hex_int = int(hex_str, 16) + if not is_unicode_scalar_value(hex_int): + raise suffixed_err(src, pos, "Escaped character is not a Unicode scalar value") + return pos, chr(hex_int) + + +def parse_literal_str(src: str, pos: Pos) -> tuple[Pos, str]: + pos += 1 # Skip starting apostrophe + start_pos = pos + pos = skip_until( + src, pos, "'", error_on=ILLEGAL_LITERAL_STR_CHARS, error_on_eof=True + ) + return pos + 1, src[start_pos:pos] # Skip ending apostrophe + + +def parse_multiline_str(src: str, pos: Pos, *, literal: bool) -> tuple[Pos, str]: + pos += 3 + if src.startswith("\n", pos): + pos += 1 + + if literal: + delim = "'" + end_pos = skip_until( + src, + pos, + "'''", + error_on=ILLEGAL_MULTILINE_LITERAL_STR_CHARS, + error_on_eof=True, + ) + result = src[pos:end_pos] + pos = end_pos + 3 + else: + delim = '"' + pos, result = parse_basic_str(src, pos, multiline=True) + + # Add at maximum two extra apostrophes/quotes if the end sequence + # is 4 or 5 chars long instead of just 3. + if not src.startswith(delim, pos): + return pos, result + pos += 1 + if not src.startswith(delim, pos): + return pos, result + delim + pos += 1 + return pos, result + (delim * 2) + + +def parse_basic_str(src: str, pos: Pos, *, multiline: bool) -> tuple[Pos, str]: + if multiline: + error_on = ILLEGAL_MULTILINE_BASIC_STR_CHARS + parse_escapes = parse_basic_str_escape_multiline + else: + error_on = ILLEGAL_BASIC_STR_CHARS + parse_escapes = parse_basic_str_escape + result = "" + start_pos = pos + while True: + try: + char = src[pos] + except IndexError: + raise suffixed_err(src, pos, "Unterminated string") from None + if char == '"': + if not multiline: + return pos + 1, result + src[start_pos:pos] + if src.startswith('"""', pos): + return pos + 3, result + src[start_pos:pos] + pos += 1 + continue + if char == "\\": + result += src[start_pos:pos] + pos, parsed_escape = parse_escapes(src, pos) + result += parsed_escape + start_pos = pos + continue + if char in error_on: + raise suffixed_err(src, pos, f"Illegal character {char!r}") + pos += 1 + + +def parse_value( # noqa: C901 + src: str, pos: Pos, parse_float: ParseFloat +) -> tuple[Pos, Any]: + try: + char: str | None = src[pos] + except IndexError: + char = None + + # IMPORTANT: order conditions based on speed of checking and likelihood + + # Basic strings + if char == '"': + if src.startswith('"""', pos): + return parse_multiline_str(src, pos, literal=False) + return parse_one_line_basic_str(src, pos) + + # Literal strings + if char == "'": + if src.startswith("'''", pos): + return parse_multiline_str(src, pos, literal=True) + return parse_literal_str(src, pos) + + # Booleans + if char == "t": + if src.startswith("true", pos): + return pos + 4, True + if char == "f": + if src.startswith("false", pos): + return pos + 5, False + + # Arrays + if char == "[": + return parse_array(src, pos, parse_float) + + # Inline tables + if char == "{": + return parse_inline_table(src, pos, parse_float) + + # Dates and times + datetime_match = RE_DATETIME.match(src, pos) + if datetime_match: + try: + datetime_obj = match_to_datetime(datetime_match) + except ValueError as e: + raise suffixed_err(src, pos, "Invalid date or datetime") from e + return datetime_match.end(), datetime_obj + localtime_match = RE_LOCALTIME.match(src, pos) + if localtime_match: + return localtime_match.end(), match_to_localtime(localtime_match) + + # Integers and "normal" floats. + # The regex will greedily match any type starting with a decimal + # char, so needs to be located after handling of dates and times. + number_match = RE_NUMBER.match(src, pos) + if number_match: + return number_match.end(), match_to_number(number_match, parse_float) + + # Special floats + first_three = src[pos : pos + 3] + if first_three in {"inf", "nan"}: + return pos + 3, parse_float(first_three) + first_four = src[pos : pos + 4] + if first_four in {"-inf", "+inf", "-nan", "+nan"}: + return pos + 4, parse_float(first_four) + + raise suffixed_err(src, pos, "Invalid value") + + +def suffixed_err(src: str, pos: Pos, msg: str) -> TOMLDecodeError: + """Return a `TOMLDecodeError` where error message is suffixed with + coordinates in source.""" + + def coord_repr(src: str, pos: Pos) -> str: + if pos >= len(src): + return "end of document" + line = src.count("\n", 0, pos) + 1 + if line == 1: + column = pos + 1 + else: + column = pos - src.rindex("\n", 0, pos) + return f"line {line}, column {column}" + + return TOMLDecodeError(f"{msg} (at {coord_repr(src, pos)})") + + +def is_unicode_scalar_value(codepoint: int) -> bool: + return (0 <= codepoint <= 55295) or (57344 <= codepoint <= 1114111) + + +def make_safe_parse_float(parse_float: ParseFloat) -> ParseFloat: + """A decorator to make `parse_float` safe. + + `parse_float` must not return dicts or lists, because these types + would be mixed with parsed TOML tables and arrays, thus confusing + the parser. The returned decorated callable raises `ValueError` + instead of returning illegal types. + """ + # The default `float` callable never returns illegal types. Optimize it. + if parse_float is float: # type: ignore[comparison-overlap] + return float + + def safe_parse_float(float_str: str) -> Any: + float_value = parse_float(float_str) + if isinstance(float_value, (dict, list)): + raise ValueError("parse_float must not return dicts or lists") + return float_value + + return safe_parse_float diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/_re.py b/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/_re.py new file mode 100644 index 0000000000000000000000000000000000000000..994bb7493fd92865e6ab87c277ba5741b44c31a9 --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/_re.py @@ -0,0 +1,107 @@ +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2021 Taneli Hukkinen +# Licensed to PSF under a Contributor Agreement. + +from __future__ import annotations + +from datetime import date, datetime, time, timedelta, timezone, tzinfo +from functools import lru_cache +import re +from typing import Any + +from ._types import ParseFloat + +# E.g. +# - 00:32:00.999999 +# - 00:32:00 +_TIME_RE_STR = r"([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(?:\.([0-9]{1,6})[0-9]*)?" + +RE_NUMBER = re.compile( + r""" +0 +(?: + x[0-9A-Fa-f](?:_?[0-9A-Fa-f])* # hex + | + b[01](?:_?[01])* # bin + | + o[0-7](?:_?[0-7])* # oct +) +| +[+-]?(?:0|[1-9](?:_?[0-9])*) # dec, integer part +(?P + (?:\.[0-9](?:_?[0-9])*)? # optional fractional part + (?:[eE][+-]?[0-9](?:_?[0-9])*)? # optional exponent part +) +""", + flags=re.VERBOSE, +) +RE_LOCALTIME = re.compile(_TIME_RE_STR) +RE_DATETIME = re.compile( + rf""" +([0-9]{{4}})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01]) # date, e.g. 1988-10-27 +(?: + [Tt ] + {_TIME_RE_STR} + (?:([Zz])|([+-])([01][0-9]|2[0-3]):([0-5][0-9]))? # optional time offset +)? +""", + flags=re.VERBOSE, +) + + +def match_to_datetime(match: re.Match) -> datetime | date: + """Convert a `RE_DATETIME` match to `datetime.datetime` or `datetime.date`. + + Raises ValueError if the match does not correspond to a valid date + or datetime. + """ + ( + year_str, + month_str, + day_str, + hour_str, + minute_str, + sec_str, + micros_str, + zulu_time, + offset_sign_str, + offset_hour_str, + offset_minute_str, + ) = match.groups() + year, month, day = int(year_str), int(month_str), int(day_str) + if hour_str is None: + return date(year, month, day) + hour, minute, sec = int(hour_str), int(minute_str), int(sec_str) + micros = int(micros_str.ljust(6, "0")) if micros_str else 0 + if offset_sign_str: + tz: tzinfo | None = cached_tz( + offset_hour_str, offset_minute_str, offset_sign_str + ) + elif zulu_time: + tz = timezone.utc + else: # local date-time + tz = None + return datetime(year, month, day, hour, minute, sec, micros, tzinfo=tz) + + +@lru_cache(maxsize=None) +def cached_tz(hour_str: str, minute_str: str, sign_str: str) -> timezone: + sign = 1 if sign_str == "+" else -1 + return timezone( + timedelta( + hours=sign * int(hour_str), + minutes=sign * int(minute_str), + ) + ) + + +def match_to_localtime(match: re.Match) -> time: + hour_str, minute_str, sec_str, micros_str = match.groups() + micros = int(micros_str.ljust(6, "0")) if micros_str else 0 + return time(int(hour_str), int(minute_str), int(sec_str), micros) + + +def match_to_number(match: re.Match, parse_float: ParseFloat) -> Any: + if match.group("floatpart"): + return parse_float(match.group()) + return int(match.group(), 0) diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/_types.py b/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/_types.py new file mode 100644 index 0000000000000000000000000000000000000000..d949412e03b29d70592c7721fe747e5085c2e280 --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/_types.py @@ -0,0 +1,10 @@ +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2021 Taneli Hukkinen +# Licensed to PSF under a Contributor Agreement. + +from typing import Any, Callable, Tuple + +# Type annotations +ParseFloat = Callable[[str], Any] +Key = Tuple[str, ...] +Pos = int diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/py.typed b/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..7632ecf77545c5e5501cb3fc5719df0761104ca2 --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/tomli/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561 diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__init__.py b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6781cad094b29db0959df4fb0c29c0fa9d480c0c --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__init__.py @@ -0,0 +1,48 @@ +import os +from typing import Any + +from ._checkers import TypeCheckerCallable as TypeCheckerCallable +from ._checkers import TypeCheckLookupCallback as TypeCheckLookupCallback +from ._checkers import check_type_internal as check_type_internal +from ._checkers import checker_lookup_functions as checker_lookup_functions +from ._checkers import load_plugins as load_plugins +from ._config import CollectionCheckStrategy as CollectionCheckStrategy +from ._config import ForwardRefPolicy as ForwardRefPolicy +from ._config import TypeCheckConfiguration as TypeCheckConfiguration +from ._decorators import typechecked as typechecked +from ._decorators import typeguard_ignore as typeguard_ignore +from ._exceptions import InstrumentationWarning as InstrumentationWarning +from ._exceptions import TypeCheckError as TypeCheckError +from ._exceptions import TypeCheckWarning as TypeCheckWarning +from ._exceptions import TypeHintWarning as TypeHintWarning +from ._functions import TypeCheckFailCallback as TypeCheckFailCallback +from ._functions import check_type as check_type +from ._functions import warn_on_error as warn_on_error +from ._importhook import ImportHookManager as ImportHookManager +from ._importhook import TypeguardFinder as TypeguardFinder +from ._importhook import install_import_hook as install_import_hook +from ._memo import TypeCheckMemo as TypeCheckMemo +from ._suppression import suppress_type_checks as suppress_type_checks +from ._utils import Unset as Unset + +# Re-export imports so they look like they live directly in this package +for value in list(locals().values()): + if getattr(value, "__module__", "").startswith(f"{__name__}."): + value.__module__ = __name__ + + +config: TypeCheckConfiguration + + +def __getattr__(name: str) -> Any: + if name == "config": + from ._config import global_config + + return global_config + + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") + + +# Automatically load checker lookup functions unless explicitly disabled +if "TYPEGUARD_DISABLE_PLUGIN_AUTOLOAD" not in os.environ: + load_plugins() diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_checkers.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_checkers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3b110fdba92a17140a3ae1c6a654b3b9868d816f Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_checkers.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_config.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_config.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fab59702c0bc440f823d2e65221ad33c7c97b382 Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_config.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_decorators.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_decorators.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..48459ca37680af09dd6a78b5bca60238ec698755 Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_decorators.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_exceptions.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_exceptions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a7dee4ed5244893bbf6350d5387319330722b436 Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_exceptions.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_functions.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_functions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..48ecc1b48ce0303f53e2fb19fb1d21bcb93ee6ee Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_functions.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_importhook.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_importhook.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b8521a6984a20bb8ed3c2454dedf39595c7dd3b6 Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_importhook.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_memo.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_memo.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..da1153a6b492cbc0213d850f1ab4aee4c168afbf Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_memo.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_pytest_plugin.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_pytest_plugin.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b28189470f23c2ca12f444ef271aca8d97e62f39 Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_pytest_plugin.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_transformer.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_transformer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..30ea64f1433333003db18e9f1c3f1496da33113e Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_transformer.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_union_transformer.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_union_transformer.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a514fd3f65182efea5680b5cc65305ac7f1f2a26 Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_union_transformer.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_utils.cpython-310.pyc b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ecdc36fa1a66324c2e6f80a2d900326708ab83a8 Binary files /dev/null and b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_utils.cpython-310.pyc differ diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_checkers.py b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_checkers.py new file mode 100644 index 0000000000000000000000000000000000000000..67dd5ad4dcbda70353b06fc08db962d1a41ba266 --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_checkers.py @@ -0,0 +1,993 @@ +from __future__ import annotations + +import collections.abc +import inspect +import sys +import types +import typing +import warnings +from enum import Enum +from inspect import Parameter, isclass, isfunction +from io import BufferedIOBase, IOBase, RawIOBase, TextIOBase +from textwrap import indent +from typing import ( + IO, + AbstractSet, + Any, + BinaryIO, + Callable, + Dict, + ForwardRef, + List, + Mapping, + MutableMapping, + NewType, + Optional, + Sequence, + Set, + TextIO, + Tuple, + Type, + TypeVar, + Union, +) +from unittest.mock import Mock +from weakref import WeakKeyDictionary + +try: + import typing_extensions +except ImportError: + typing_extensions = None # type: ignore[assignment] + +# Must use this because typing.is_typeddict does not recognize +# TypedDict from typing_extensions, and as of version 4.12.0 +# typing_extensions.TypedDict is different from typing.TypedDict +# on all versions. +from typing_extensions import is_typeddict + +from ._config import ForwardRefPolicy +from ._exceptions import TypeCheckError, TypeHintWarning +from ._memo import TypeCheckMemo +from ._utils import evaluate_forwardref, get_stacklevel, get_type_name, qualified_name + +if sys.version_info >= (3, 11): + from typing import ( + Annotated, + NotRequired, + TypeAlias, + get_args, + get_origin, + ) + + SubclassableAny = Any +else: + from typing_extensions import ( + Annotated, + NotRequired, + TypeAlias, + get_args, + get_origin, + ) + from typing_extensions import Any as SubclassableAny + +if sys.version_info >= (3, 10): + from importlib.metadata import entry_points + from typing import ParamSpec +else: + from importlib_metadata import entry_points + from typing_extensions import ParamSpec + +TypeCheckerCallable: TypeAlias = Callable[ + [Any, Any, Tuple[Any, ...], TypeCheckMemo], Any +] +TypeCheckLookupCallback: TypeAlias = Callable[ + [Any, Tuple[Any, ...], Tuple[Any, ...]], Optional[TypeCheckerCallable] +] + +checker_lookup_functions: list[TypeCheckLookupCallback] = [] +generic_alias_types: tuple[type, ...] = (type(List), type(List[Any])) +if sys.version_info >= (3, 9): + generic_alias_types += (types.GenericAlias,) + +protocol_check_cache: WeakKeyDictionary[ + type[Any], dict[type[Any], TypeCheckError | None] +] = WeakKeyDictionary() + +# Sentinel +_missing = object() + +# Lifted from mypy.sharedparse +BINARY_MAGIC_METHODS = { + "__add__", + "__and__", + "__cmp__", + "__divmod__", + "__div__", + "__eq__", + "__floordiv__", + "__ge__", + "__gt__", + "__iadd__", + "__iand__", + "__idiv__", + "__ifloordiv__", + "__ilshift__", + "__imatmul__", + "__imod__", + "__imul__", + "__ior__", + "__ipow__", + "__irshift__", + "__isub__", + "__itruediv__", + "__ixor__", + "__le__", + "__lshift__", + "__lt__", + "__matmul__", + "__mod__", + "__mul__", + "__ne__", + "__or__", + "__pow__", + "__radd__", + "__rand__", + "__rdiv__", + "__rfloordiv__", + "__rlshift__", + "__rmatmul__", + "__rmod__", + "__rmul__", + "__ror__", + "__rpow__", + "__rrshift__", + "__rshift__", + "__rsub__", + "__rtruediv__", + "__rxor__", + "__sub__", + "__truediv__", + "__xor__", +} + + +def check_callable( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + if not callable(value): + raise TypeCheckError("is not callable") + + if args: + try: + signature = inspect.signature(value) + except (TypeError, ValueError): + return + + argument_types = args[0] + if isinstance(argument_types, list) and not any( + type(item) is ParamSpec for item in argument_types + ): + # The callable must not have keyword-only arguments without defaults + unfulfilled_kwonlyargs = [ + param.name + for param in signature.parameters.values() + if param.kind == Parameter.KEYWORD_ONLY + and param.default == Parameter.empty + ] + if unfulfilled_kwonlyargs: + raise TypeCheckError( + f"has mandatory keyword-only arguments in its declaration: " + f'{", ".join(unfulfilled_kwonlyargs)}' + ) + + num_positional_args = num_mandatory_pos_args = 0 + has_varargs = False + for param in signature.parameters.values(): + if param.kind in ( + Parameter.POSITIONAL_ONLY, + Parameter.POSITIONAL_OR_KEYWORD, + ): + num_positional_args += 1 + if param.default is Parameter.empty: + num_mandatory_pos_args += 1 + elif param.kind == Parameter.VAR_POSITIONAL: + has_varargs = True + + if num_mandatory_pos_args > len(argument_types): + raise TypeCheckError( + f"has too many mandatory positional arguments in its declaration; " + f"expected {len(argument_types)} but {num_mandatory_pos_args} " + f"mandatory positional argument(s) declared" + ) + elif not has_varargs and num_positional_args < len(argument_types): + raise TypeCheckError( + f"has too few arguments in its declaration; expected " + f"{len(argument_types)} but {num_positional_args} argument(s) " + f"declared" + ) + + +def check_mapping( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + if origin_type is Dict or origin_type is dict: + if not isinstance(value, dict): + raise TypeCheckError("is not a dict") + if origin_type is MutableMapping or origin_type is collections.abc.MutableMapping: + if not isinstance(value, collections.abc.MutableMapping): + raise TypeCheckError("is not a mutable mapping") + elif not isinstance(value, collections.abc.Mapping): + raise TypeCheckError("is not a mapping") + + if args: + key_type, value_type = args + if key_type is not Any or value_type is not Any: + samples = memo.config.collection_check_strategy.iterate_samples( + value.items() + ) + for k, v in samples: + try: + check_type_internal(k, key_type, memo) + except TypeCheckError as exc: + exc.append_path_element(f"key {k!r}") + raise + + try: + check_type_internal(v, value_type, memo) + except TypeCheckError as exc: + exc.append_path_element(f"value of key {k!r}") + raise + + +def check_typed_dict( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + if not isinstance(value, dict): + raise TypeCheckError("is not a dict") + + declared_keys = frozenset(origin_type.__annotations__) + if hasattr(origin_type, "__required_keys__"): + required_keys = set(origin_type.__required_keys__) + else: # py3.8 and lower + required_keys = set(declared_keys) if origin_type.__total__ else set() + + existing_keys = set(value) + extra_keys = existing_keys - declared_keys + if extra_keys: + keys_formatted = ", ".join(f'"{key}"' for key in sorted(extra_keys, key=repr)) + raise TypeCheckError(f"has unexpected extra key(s): {keys_formatted}") + + # Detect NotRequired fields which are hidden by get_type_hints() + type_hints: dict[str, type] = {} + for key, annotation in origin_type.__annotations__.items(): + if isinstance(annotation, ForwardRef): + annotation = evaluate_forwardref(annotation, memo) + if get_origin(annotation) is NotRequired: + required_keys.discard(key) + annotation = get_args(annotation)[0] + + type_hints[key] = annotation + + missing_keys = required_keys - existing_keys + if missing_keys: + keys_formatted = ", ".join(f'"{key}"' for key in sorted(missing_keys, key=repr)) + raise TypeCheckError(f"is missing required key(s): {keys_formatted}") + + for key, argtype in type_hints.items(): + argvalue = value.get(key, _missing) + if argvalue is not _missing: + try: + check_type_internal(argvalue, argtype, memo) + except TypeCheckError as exc: + exc.append_path_element(f"value of key {key!r}") + raise + + +def check_list( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + if not isinstance(value, list): + raise TypeCheckError("is not a list") + + if args and args != (Any,): + samples = memo.config.collection_check_strategy.iterate_samples(value) + for i, v in enumerate(samples): + try: + check_type_internal(v, args[0], memo) + except TypeCheckError as exc: + exc.append_path_element(f"item {i}") + raise + + +def check_sequence( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + if not isinstance(value, collections.abc.Sequence): + raise TypeCheckError("is not a sequence") + + if args and args != (Any,): + samples = memo.config.collection_check_strategy.iterate_samples(value) + for i, v in enumerate(samples): + try: + check_type_internal(v, args[0], memo) + except TypeCheckError as exc: + exc.append_path_element(f"item {i}") + raise + + +def check_set( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + if origin_type is frozenset: + if not isinstance(value, frozenset): + raise TypeCheckError("is not a frozenset") + elif not isinstance(value, AbstractSet): + raise TypeCheckError("is not a set") + + if args and args != (Any,): + samples = memo.config.collection_check_strategy.iterate_samples(value) + for v in samples: + try: + check_type_internal(v, args[0], memo) + except TypeCheckError as exc: + exc.append_path_element(f"[{v}]") + raise + + +def check_tuple( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + # Specialized check for NamedTuples + if field_types := getattr(origin_type, "__annotations__", None): + if not isinstance(value, origin_type): + raise TypeCheckError( + f"is not a named tuple of type {qualified_name(origin_type)}" + ) + + for name, field_type in field_types.items(): + try: + check_type_internal(getattr(value, name), field_type, memo) + except TypeCheckError as exc: + exc.append_path_element(f"attribute {name!r}") + raise + + return + elif not isinstance(value, tuple): + raise TypeCheckError("is not a tuple") + + if args: + use_ellipsis = args[-1] is Ellipsis + tuple_params = args[: -1 if use_ellipsis else None] + else: + # Unparametrized Tuple or plain tuple + return + + if use_ellipsis: + element_type = tuple_params[0] + samples = memo.config.collection_check_strategy.iterate_samples(value) + for i, element in enumerate(samples): + try: + check_type_internal(element, element_type, memo) + except TypeCheckError as exc: + exc.append_path_element(f"item {i}") + raise + elif tuple_params == ((),): + if value != (): + raise TypeCheckError("is not an empty tuple") + else: + if len(value) != len(tuple_params): + raise TypeCheckError( + f"has wrong number of elements (expected {len(tuple_params)}, got " + f"{len(value)} instead)" + ) + + for i, (element, element_type) in enumerate(zip(value, tuple_params)): + try: + check_type_internal(element, element_type, memo) + except TypeCheckError as exc: + exc.append_path_element(f"item {i}") + raise + + +def check_union( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + errors: dict[str, TypeCheckError] = {} + try: + for type_ in args: + try: + check_type_internal(value, type_, memo) + return + except TypeCheckError as exc: + errors[get_type_name(type_)] = exc + + formatted_errors = indent( + "\n".join(f"{key}: {error}" for key, error in errors.items()), " " + ) + finally: + del errors # avoid creating ref cycle + raise TypeCheckError(f"did not match any element in the union:\n{formatted_errors}") + + +def check_uniontype( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + errors: dict[str, TypeCheckError] = {} + for type_ in args: + try: + check_type_internal(value, type_, memo) + return + except TypeCheckError as exc: + errors[get_type_name(type_)] = exc + + formatted_errors = indent( + "\n".join(f"{key}: {error}" for key, error in errors.items()), " " + ) + raise TypeCheckError(f"did not match any element in the union:\n{formatted_errors}") + + +def check_class( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + if not isclass(value) and not isinstance(value, generic_alias_types): + raise TypeCheckError("is not a class") + + if not args: + return + + if isinstance(args[0], ForwardRef): + expected_class = evaluate_forwardref(args[0], memo) + else: + expected_class = args[0] + + if expected_class is Any: + return + elif getattr(expected_class, "_is_protocol", False): + check_protocol(value, expected_class, (), memo) + elif isinstance(expected_class, TypeVar): + check_typevar(value, expected_class, (), memo, subclass_check=True) + elif get_origin(expected_class) is Union: + errors: dict[str, TypeCheckError] = {} + for arg in get_args(expected_class): + if arg is Any: + return + + try: + check_class(value, type, (arg,), memo) + return + except TypeCheckError as exc: + errors[get_type_name(arg)] = exc + else: + formatted_errors = indent( + "\n".join(f"{key}: {error}" for key, error in errors.items()), " " + ) + raise TypeCheckError( + f"did not match any element in the union:\n{formatted_errors}" + ) + elif not issubclass(value, expected_class): # type: ignore[arg-type] + raise TypeCheckError(f"is not a subclass of {qualified_name(expected_class)}") + + +def check_newtype( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + check_type_internal(value, origin_type.__supertype__, memo) + + +def check_instance( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + if not isinstance(value, origin_type): + raise TypeCheckError(f"is not an instance of {qualified_name(origin_type)}") + + +def check_typevar( + value: Any, + origin_type: TypeVar, + args: tuple[Any, ...], + memo: TypeCheckMemo, + *, + subclass_check: bool = False, +) -> None: + if origin_type.__bound__ is not None: + annotation = ( + Type[origin_type.__bound__] if subclass_check else origin_type.__bound__ + ) + check_type_internal(value, annotation, memo) + elif origin_type.__constraints__: + for constraint in origin_type.__constraints__: + annotation = Type[constraint] if subclass_check else constraint + try: + check_type_internal(value, annotation, memo) + except TypeCheckError: + pass + else: + break + else: + formatted_constraints = ", ".join( + get_type_name(constraint) for constraint in origin_type.__constraints__ + ) + raise TypeCheckError( + f"does not match any of the constraints " f"({formatted_constraints})" + ) + + +if typing_extensions is None: + + def _is_literal_type(typ: object) -> bool: + return typ is typing.Literal + +else: + + def _is_literal_type(typ: object) -> bool: + return typ is typing.Literal or typ is typing_extensions.Literal + + +def check_literal( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + def get_literal_args(literal_args: tuple[Any, ...]) -> tuple[Any, ...]: + retval: list[Any] = [] + for arg in literal_args: + if _is_literal_type(get_origin(arg)): + retval.extend(get_literal_args(arg.__args__)) + elif arg is None or isinstance(arg, (int, str, bytes, bool, Enum)): + retval.append(arg) + else: + raise TypeError( + f"Illegal literal value: {arg}" + ) # TypeError here is deliberate + + return tuple(retval) + + final_args = tuple(get_literal_args(args)) + try: + index = final_args.index(value) + except ValueError: + pass + else: + if type(final_args[index]) is type(value): + return + + formatted_args = ", ".join(repr(arg) for arg in final_args) + raise TypeCheckError(f"is not any of ({formatted_args})") from None + + +def check_literal_string( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + check_type_internal(value, str, memo) + + +def check_typeguard( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + check_type_internal(value, bool, memo) + + +def check_none( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + if value is not None: + raise TypeCheckError("is not None") + + +def check_number( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + if origin_type is complex and not isinstance(value, (complex, float, int)): + raise TypeCheckError("is neither complex, float or int") + elif origin_type is float and not isinstance(value, (float, int)): + raise TypeCheckError("is neither float or int") + + +def check_io( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + if origin_type is TextIO or (origin_type is IO and args == (str,)): + if not isinstance(value, TextIOBase): + raise TypeCheckError("is not a text based I/O object") + elif origin_type is BinaryIO or (origin_type is IO and args == (bytes,)): + if not isinstance(value, (RawIOBase, BufferedIOBase)): + raise TypeCheckError("is not a binary I/O object") + elif not isinstance(value, IOBase): + raise TypeCheckError("is not an I/O object") + + +def check_protocol( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + subject: type[Any] = value if isclass(value) else type(value) + + if subject in protocol_check_cache: + result_map = protocol_check_cache[subject] + if origin_type in result_map: + if exc := result_map[origin_type]: + raise exc + else: + return + + # Collect a set of methods and non-method attributes present in the protocol + ignored_attrs = set(dir(typing.Protocol)) | { + "__annotations__", + "__non_callable_proto_members__", + } + expected_methods: dict[str, tuple[Any, Any]] = {} + expected_noncallable_members: dict[str, Any] = {} + for attrname in dir(origin_type): + # Skip attributes present in typing.Protocol + if attrname in ignored_attrs: + continue + + member = getattr(origin_type, attrname) + if callable(member): + signature = inspect.signature(member) + argtypes = [ + (p.annotation if p.annotation is not Parameter.empty else Any) + for p in signature.parameters.values() + if p.kind is not Parameter.KEYWORD_ONLY + ] or Ellipsis + return_annotation = ( + signature.return_annotation + if signature.return_annotation is not Parameter.empty + else Any + ) + expected_methods[attrname] = argtypes, return_annotation + else: + expected_noncallable_members[attrname] = member + + for attrname, annotation in typing.get_type_hints(origin_type).items(): + expected_noncallable_members[attrname] = annotation + + subject_annotations = typing.get_type_hints(subject) + + # Check that all required methods are present and their signatures are compatible + result_map = protocol_check_cache.setdefault(subject, {}) + try: + for attrname, callable_args in expected_methods.items(): + try: + method = getattr(subject, attrname) + except AttributeError: + if attrname in subject_annotations: + raise TypeCheckError( + f"is not compatible with the {origin_type.__qualname__} protocol " + f"because its {attrname!r} attribute is not a method" + ) from None + else: + raise TypeCheckError( + f"is not compatible with the {origin_type.__qualname__} protocol " + f"because it has no method named {attrname!r}" + ) from None + + if not callable(method): + raise TypeCheckError( + f"is not compatible with the {origin_type.__qualname__} protocol " + f"because its {attrname!r} attribute is not a callable" + ) + + # TODO: raise exception on added keyword-only arguments without defaults + try: + check_callable(method, Callable, callable_args, memo) + except TypeCheckError as exc: + raise TypeCheckError( + f"is not compatible with the {origin_type.__qualname__} protocol " + f"because its {attrname!r} method {exc}" + ) from None + + # Check that all required non-callable members are present + for attrname in expected_noncallable_members: + # TODO: implement assignability checks for non-callable members + if attrname not in subject_annotations and not hasattr(subject, attrname): + raise TypeCheckError( + f"is not compatible with the {origin_type.__qualname__} protocol " + f"because it has no attribute named {attrname!r}" + ) + except TypeCheckError as exc: + result_map[origin_type] = exc + raise + else: + result_map[origin_type] = None + + +def check_byteslike( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + if not isinstance(value, (bytearray, bytes, memoryview)): + raise TypeCheckError("is not bytes-like") + + +def check_self( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + if memo.self_type is None: + raise TypeCheckError("cannot be checked against Self outside of a method call") + + if isclass(value): + if not issubclass(value, memo.self_type): + raise TypeCheckError( + f"is not an instance of the self type " + f"({qualified_name(memo.self_type)})" + ) + elif not isinstance(value, memo.self_type): + raise TypeCheckError( + f"is not an instance of the self type ({qualified_name(memo.self_type)})" + ) + + +def check_paramspec( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + pass # No-op for now + + +def check_instanceof( + value: Any, + origin_type: Any, + args: tuple[Any, ...], + memo: TypeCheckMemo, +) -> None: + if not isinstance(value, origin_type): + raise TypeCheckError(f"is not an instance of {qualified_name(origin_type)}") + + +def check_type_internal( + value: Any, + annotation: Any, + memo: TypeCheckMemo, +) -> None: + """ + Check that the given object is compatible with the given type annotation. + + This function should only be used by type checker callables. Applications should use + :func:`~.check_type` instead. + + :param value: the value to check + :param annotation: the type annotation to check against + :param memo: a memo object containing configuration and information necessary for + looking up forward references + """ + + if isinstance(annotation, ForwardRef): + try: + annotation = evaluate_forwardref(annotation, memo) + except NameError: + if memo.config.forward_ref_policy is ForwardRefPolicy.ERROR: + raise + elif memo.config.forward_ref_policy is ForwardRefPolicy.WARN: + warnings.warn( + f"Cannot resolve forward reference {annotation.__forward_arg__!r}", + TypeHintWarning, + stacklevel=get_stacklevel(), + ) + + return + + if annotation is Any or annotation is SubclassableAny or isinstance(value, Mock): + return + + # Skip type checks if value is an instance of a class that inherits from Any + if not isclass(value) and SubclassableAny in type(value).__bases__: + return + + extras: tuple[Any, ...] + origin_type = get_origin(annotation) + if origin_type is Annotated: + annotation, *extras_ = get_args(annotation) + extras = tuple(extras_) + origin_type = get_origin(annotation) + else: + extras = () + + if origin_type is not None: + args = get_args(annotation) + + # Compatibility hack to distinguish between unparametrized and empty tuple + # (tuple[()]), necessary due to https://github.com/python/cpython/issues/91137 + if origin_type in (tuple, Tuple) and annotation is not Tuple and not args: + args = ((),) + else: + origin_type = annotation + args = () + + for lookup_func in checker_lookup_functions: + checker = lookup_func(origin_type, args, extras) + if checker: + checker(value, origin_type, args, memo) + return + + if isclass(origin_type): + if not isinstance(value, origin_type): + raise TypeCheckError(f"is not an instance of {qualified_name(origin_type)}") + elif type(origin_type) is str: # noqa: E721 + warnings.warn( + f"Skipping type check against {origin_type!r}; this looks like a " + f"string-form forward reference imported from another module", + TypeHintWarning, + stacklevel=get_stacklevel(), + ) + + +# Equality checks are applied to these +origin_type_checkers = { + bytes: check_byteslike, + AbstractSet: check_set, + BinaryIO: check_io, + Callable: check_callable, + collections.abc.Callable: check_callable, + complex: check_number, + dict: check_mapping, + Dict: check_mapping, + float: check_number, + frozenset: check_set, + IO: check_io, + list: check_list, + List: check_list, + typing.Literal: check_literal, + Mapping: check_mapping, + MutableMapping: check_mapping, + None: check_none, + collections.abc.Mapping: check_mapping, + collections.abc.MutableMapping: check_mapping, + Sequence: check_sequence, + collections.abc.Sequence: check_sequence, + collections.abc.Set: check_set, + set: check_set, + Set: check_set, + TextIO: check_io, + tuple: check_tuple, + Tuple: check_tuple, + type: check_class, + Type: check_class, + Union: check_union, +} +if sys.version_info >= (3, 10): + origin_type_checkers[types.UnionType] = check_uniontype + origin_type_checkers[typing.TypeGuard] = check_typeguard +if sys.version_info >= (3, 11): + origin_type_checkers.update( + {typing.LiteralString: check_literal_string, typing.Self: check_self} + ) +if typing_extensions is not None: + # On some Python versions, these may simply be re-exports from typing, + # but exactly which Python versions is subject to change, + # so it's best to err on the safe side + # and update the dictionary on all Python versions + # if typing_extensions is installed + origin_type_checkers[typing_extensions.Literal] = check_literal + origin_type_checkers[typing_extensions.LiteralString] = check_literal_string + origin_type_checkers[typing_extensions.Self] = check_self + origin_type_checkers[typing_extensions.TypeGuard] = check_typeguard + + +def builtin_checker_lookup( + origin_type: Any, args: tuple[Any, ...], extras: tuple[Any, ...] +) -> TypeCheckerCallable | None: + checker = origin_type_checkers.get(origin_type) + if checker is not None: + return checker + elif is_typeddict(origin_type): + return check_typed_dict + elif isclass(origin_type) and issubclass( + origin_type, + Tuple, # type: ignore[arg-type] + ): + # NamedTuple + return check_tuple + elif getattr(origin_type, "_is_protocol", False): + return check_protocol + elif isinstance(origin_type, ParamSpec): + return check_paramspec + elif isinstance(origin_type, TypeVar): + return check_typevar + elif origin_type.__class__ is NewType: + # typing.NewType on Python 3.10+ + return check_newtype + elif ( + isfunction(origin_type) + and getattr(origin_type, "__module__", None) == "typing" + and getattr(origin_type, "__qualname__", "").startswith("NewType.") + and hasattr(origin_type, "__supertype__") + ): + # typing.NewType on Python 3.9 and below + return check_newtype + + return None + + +checker_lookup_functions.append(builtin_checker_lookup) + + +def load_plugins() -> None: + """ + Load all type checker lookup functions from entry points. + + All entry points from the ``typeguard.checker_lookup`` group are loaded, and the + returned lookup functions are added to :data:`typeguard.checker_lookup_functions`. + + .. note:: This function is called implicitly on import, unless the + ``TYPEGUARD_DISABLE_PLUGIN_AUTOLOAD`` environment variable is present. + """ + + for ep in entry_points(group="typeguard.checker_lookup"): + try: + plugin = ep.load() + except Exception as exc: + warnings.warn( + f"Failed to load plugin {ep.name!r}: " f"{qualified_name(exc)}: {exc}", + stacklevel=2, + ) + continue + + if not callable(plugin): + warnings.warn( + f"Plugin {ep} returned a non-callable object: {plugin!r}", stacklevel=2 + ) + continue + + checker_lookup_functions.insert(0, plugin) diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_config.py b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_config.py new file mode 100644 index 0000000000000000000000000000000000000000..36efad53965488e7e003c265262db8d5d902a930 --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_config.py @@ -0,0 +1,108 @@ +from __future__ import annotations + +from collections.abc import Iterable +from dataclasses import dataclass +from enum import Enum, auto +from typing import TYPE_CHECKING, TypeVar + +if TYPE_CHECKING: + from ._functions import TypeCheckFailCallback + +T = TypeVar("T") + + +class ForwardRefPolicy(Enum): + """ + Defines how unresolved forward references are handled. + + Members: + + * ``ERROR``: propagate the :exc:`NameError` when the forward reference lookup fails + * ``WARN``: emit a :class:`~.TypeHintWarning` if the forward reference lookup fails + * ``IGNORE``: silently skip checks for unresolveable forward references + """ + + ERROR = auto() + WARN = auto() + IGNORE = auto() + + +class CollectionCheckStrategy(Enum): + """ + Specifies how thoroughly the contents of collections are type checked. + + This has an effect on the following built-in checkers: + + * ``AbstractSet`` + * ``Dict`` + * ``List`` + * ``Mapping`` + * ``Set`` + * ``Tuple[, ...]`` (arbitrarily sized tuples) + + Members: + + * ``FIRST_ITEM``: check only the first item + * ``ALL_ITEMS``: check all items + """ + + FIRST_ITEM = auto() + ALL_ITEMS = auto() + + def iterate_samples(self, collection: Iterable[T]) -> Iterable[T]: + if self is CollectionCheckStrategy.FIRST_ITEM: + try: + return [next(iter(collection))] + except StopIteration: + return () + else: + return collection + + +@dataclass +class TypeCheckConfiguration: + """ + You can change Typeguard's behavior with these settings. + + .. attribute:: typecheck_fail_callback + :type: Callable[[TypeCheckError, TypeCheckMemo], Any] + + Callable that is called when type checking fails. + + Default: ``None`` (the :exc:`~.TypeCheckError` is raised directly) + + .. attribute:: forward_ref_policy + :type: ForwardRefPolicy + + Specifies what to do when a forward reference fails to resolve. + + Default: ``WARN`` + + .. attribute:: collection_check_strategy + :type: CollectionCheckStrategy + + Specifies how thoroughly the contents of collections (list, dict, etc.) are + type checked. + + Default: ``FIRST_ITEM`` + + .. attribute:: debug_instrumentation + :type: bool + + If set to ``True``, the code of modules or functions instrumented by typeguard + is printed to ``sys.stderr`` after the instrumentation is done + + Requires Python 3.9 or newer. + + Default: ``False`` + """ + + forward_ref_policy: ForwardRefPolicy = ForwardRefPolicy.WARN + typecheck_fail_callback: TypeCheckFailCallback | None = None + collection_check_strategy: CollectionCheckStrategy = ( + CollectionCheckStrategy.FIRST_ITEM + ) + debug_instrumentation: bool = False + + +global_config = TypeCheckConfiguration() diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_decorators.py b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_decorators.py new file mode 100644 index 0000000000000000000000000000000000000000..cf3253351fe3b07af0b79f7c1ffac7b2d6127a19 --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_decorators.py @@ -0,0 +1,235 @@ +from __future__ import annotations + +import ast +import inspect +import sys +from collections.abc import Sequence +from functools import partial +from inspect import isclass, isfunction +from types import CodeType, FrameType, FunctionType +from typing import TYPE_CHECKING, Any, Callable, ForwardRef, TypeVar, cast, overload +from warnings import warn + +from ._config import CollectionCheckStrategy, ForwardRefPolicy, global_config +from ._exceptions import InstrumentationWarning +from ._functions import TypeCheckFailCallback +from ._transformer import TypeguardTransformer +from ._utils import Unset, function_name, get_stacklevel, is_method_of, unset + +if TYPE_CHECKING: + from typeshed.stdlib.types import _Cell + + _F = TypeVar("_F") + + def typeguard_ignore(f: _F) -> _F: + """This decorator is a noop during static type-checking.""" + return f + +else: + from typing import no_type_check as typeguard_ignore # noqa: F401 + +T_CallableOrType = TypeVar("T_CallableOrType", bound=Callable[..., Any]) + + +def make_cell(value: object) -> _Cell: + return (lambda: value).__closure__[0] # type: ignore[index] + + +def find_target_function( + new_code: CodeType, target_path: Sequence[str], firstlineno: int +) -> CodeType | None: + target_name = target_path[0] + for const in new_code.co_consts: + if isinstance(const, CodeType): + if const.co_name == target_name: + if const.co_firstlineno == firstlineno: + return const + elif len(target_path) > 1: + target_code = find_target_function( + const, target_path[1:], firstlineno + ) + if target_code: + return target_code + + return None + + +def instrument(f: T_CallableOrType) -> FunctionType | str: + if not getattr(f, "__code__", None): + return "no code associated" + elif not getattr(f, "__module__", None): + return "__module__ attribute is not set" + elif f.__code__.co_filename == "": + return "cannot instrument functions defined in a REPL" + elif hasattr(f, "__wrapped__"): + return ( + "@typechecked only supports instrumenting functions wrapped with " + "@classmethod, @staticmethod or @property" + ) + + target_path = [item for item in f.__qualname__.split(".") if item != ""] + module_source = inspect.getsource(sys.modules[f.__module__]) + module_ast = ast.parse(module_source) + instrumentor = TypeguardTransformer(target_path, f.__code__.co_firstlineno) + instrumentor.visit(module_ast) + + if not instrumentor.target_node or instrumentor.target_lineno is None: + return "instrumentor did not find the target function" + + module_code = compile(module_ast, f.__code__.co_filename, "exec", dont_inherit=True) + new_code = find_target_function( + module_code, target_path, instrumentor.target_lineno + ) + if not new_code: + return "cannot find the target function in the AST" + + if global_config.debug_instrumentation and sys.version_info >= (3, 9): + # Find the matching AST node, then unparse it to source and print to stdout + print( + f"Source code of {f.__qualname__}() after instrumentation:" + "\n----------------------------------------------", + file=sys.stderr, + ) + print(ast.unparse(instrumentor.target_node), file=sys.stderr) + print( + "----------------------------------------------", + file=sys.stderr, + ) + + closure = f.__closure__ + if new_code.co_freevars != f.__code__.co_freevars: + # Create a new closure and find values for the new free variables + frame = cast(FrameType, inspect.currentframe()) + frame = cast(FrameType, frame.f_back) + frame_locals = cast(FrameType, frame.f_back).f_locals + cells: list[_Cell] = [] + for key in new_code.co_freevars: + if key in instrumentor.names_used_in_annotations: + # Find the value and make a new cell from it + value = frame_locals.get(key) or ForwardRef(key) + cells.append(make_cell(value)) + else: + # Reuse the cell from the existing closure + assert f.__closure__ + cells.append(f.__closure__[f.__code__.co_freevars.index(key)]) + + closure = tuple(cells) + + new_function = FunctionType(new_code, f.__globals__, f.__name__, closure=closure) + new_function.__module__ = f.__module__ + new_function.__name__ = f.__name__ + new_function.__qualname__ = f.__qualname__ + new_function.__annotations__ = f.__annotations__ + new_function.__doc__ = f.__doc__ + new_function.__defaults__ = f.__defaults__ + new_function.__kwdefaults__ = f.__kwdefaults__ + return new_function + + +@overload +def typechecked( + *, + forward_ref_policy: ForwardRefPolicy | Unset = unset, + typecheck_fail_callback: TypeCheckFailCallback | Unset = unset, + collection_check_strategy: CollectionCheckStrategy | Unset = unset, + debug_instrumentation: bool | Unset = unset, +) -> Callable[[T_CallableOrType], T_CallableOrType]: ... + + +@overload +def typechecked(target: T_CallableOrType) -> T_CallableOrType: ... + + +def typechecked( + target: T_CallableOrType | None = None, + *, + forward_ref_policy: ForwardRefPolicy | Unset = unset, + typecheck_fail_callback: TypeCheckFailCallback | Unset = unset, + collection_check_strategy: CollectionCheckStrategy | Unset = unset, + debug_instrumentation: bool | Unset = unset, +) -> Any: + """ + Instrument the target function to perform run-time type checking. + + This decorator recompiles the target function, injecting code to type check + arguments, return values, yield values (excluding ``yield from``) and assignments to + annotated local variables. + + This can also be used as a class decorator. This will instrument all type annotated + methods, including :func:`@classmethod `, + :func:`@staticmethod `, and :class:`@property ` decorated + methods in the class. + + .. note:: When Python is run in optimized mode (``-O`` or ``-OO``, this decorator + is a no-op). This is a feature meant for selectively introducing type checking + into a code base where the checks aren't meant to be run in production. + + :param target: the function or class to enable type checking for + :param forward_ref_policy: override for + :attr:`.TypeCheckConfiguration.forward_ref_policy` + :param typecheck_fail_callback: override for + :attr:`.TypeCheckConfiguration.typecheck_fail_callback` + :param collection_check_strategy: override for + :attr:`.TypeCheckConfiguration.collection_check_strategy` + :param debug_instrumentation: override for + :attr:`.TypeCheckConfiguration.debug_instrumentation` + + """ + if target is None: + return partial( + typechecked, + forward_ref_policy=forward_ref_policy, + typecheck_fail_callback=typecheck_fail_callback, + collection_check_strategy=collection_check_strategy, + debug_instrumentation=debug_instrumentation, + ) + + if not __debug__: + return target + + if isclass(target): + for key, attr in target.__dict__.items(): + if is_method_of(attr, target): + retval = instrument(attr) + if isfunction(retval): + setattr(target, key, retval) + elif isinstance(attr, (classmethod, staticmethod)): + if is_method_of(attr.__func__, target): + retval = instrument(attr.__func__) + if isfunction(retval): + wrapper = attr.__class__(retval) + setattr(target, key, wrapper) + elif isinstance(attr, property): + kwargs: dict[str, Any] = dict(doc=attr.__doc__) + for name in ("fset", "fget", "fdel"): + property_func = kwargs[name] = getattr(attr, name) + if is_method_of(property_func, target): + retval = instrument(property_func) + if isfunction(retval): + kwargs[name] = retval + + setattr(target, key, attr.__class__(**kwargs)) + + return target + + # Find either the first Python wrapper or the actual function + wrapper_class: ( + type[classmethod[Any, Any, Any]] | type[staticmethod[Any, Any]] | None + ) = None + if isinstance(target, (classmethod, staticmethod)): + wrapper_class = target.__class__ + target = target.__func__ + + retval = instrument(target) + if isinstance(retval, str): + warn( + f"{retval} -- not typechecking {function_name(target)}", + InstrumentationWarning, + stacklevel=get_stacklevel(), + ) + return target + + if wrapper_class is None: + return retval + else: + return wrapper_class(retval) diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_exceptions.py b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..625437a64990b5fae898c9c98fe9a7bd6be90cfb --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_exceptions.py @@ -0,0 +1,42 @@ +from collections import deque +from typing import Deque + + +class TypeHintWarning(UserWarning): + """ + A warning that is emitted when a type hint in string form could not be resolved to + an actual type. + """ + + +class TypeCheckWarning(UserWarning): + """Emitted by typeguard's type checkers when a type mismatch is detected.""" + + def __init__(self, message: str): + super().__init__(message) + + +class InstrumentationWarning(UserWarning): + """Emitted when there's a problem with instrumenting a function for type checks.""" + + def __init__(self, message: str): + super().__init__(message) + + +class TypeCheckError(Exception): + """ + Raised by typeguard's type checkers when a type mismatch is detected. + """ + + def __init__(self, message: str): + super().__init__(message) + self._path: Deque[str] = deque() + + def append_path_element(self, element: str) -> None: + self._path.append(element) + + def __str__(self) -> str: + if self._path: + return " of ".join(self._path) + " " + str(self.args[0]) + else: + return str(self.args[0]) diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_functions.py b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_functions.py new file mode 100644 index 0000000000000000000000000000000000000000..28497856a325e1b00dbe4aadfd2ed1c6082cd23a --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_functions.py @@ -0,0 +1,308 @@ +from __future__ import annotations + +import sys +import warnings +from typing import Any, Callable, NoReturn, TypeVar, Union, overload + +from . import _suppression +from ._checkers import BINARY_MAGIC_METHODS, check_type_internal +from ._config import ( + CollectionCheckStrategy, + ForwardRefPolicy, + TypeCheckConfiguration, +) +from ._exceptions import TypeCheckError, TypeCheckWarning +from ._memo import TypeCheckMemo +from ._utils import get_stacklevel, qualified_name + +if sys.version_info >= (3, 11): + from typing import Literal, Never, TypeAlias +else: + from typing_extensions import Literal, Never, TypeAlias + +T = TypeVar("T") +TypeCheckFailCallback: TypeAlias = Callable[[TypeCheckError, TypeCheckMemo], Any] + + +@overload +def check_type( + value: object, + expected_type: type[T], + *, + forward_ref_policy: ForwardRefPolicy = ..., + typecheck_fail_callback: TypeCheckFailCallback | None = ..., + collection_check_strategy: CollectionCheckStrategy = ..., +) -> T: ... + + +@overload +def check_type( + value: object, + expected_type: Any, + *, + forward_ref_policy: ForwardRefPolicy = ..., + typecheck_fail_callback: TypeCheckFailCallback | None = ..., + collection_check_strategy: CollectionCheckStrategy = ..., +) -> Any: ... + + +def check_type( + value: object, + expected_type: Any, + *, + forward_ref_policy: ForwardRefPolicy = TypeCheckConfiguration().forward_ref_policy, + typecheck_fail_callback: TypeCheckFailCallback | None = ( + TypeCheckConfiguration().typecheck_fail_callback + ), + collection_check_strategy: CollectionCheckStrategy = ( + TypeCheckConfiguration().collection_check_strategy + ), +) -> Any: + """ + Ensure that ``value`` matches ``expected_type``. + + The types from the :mod:`typing` module do not support :func:`isinstance` or + :func:`issubclass` so a number of type specific checks are required. This function + knows which checker to call for which type. + + This function wraps :func:`~.check_type_internal` in the following ways: + + * Respects type checking suppression (:func:`~.suppress_type_checks`) + * Forms a :class:`~.TypeCheckMemo` from the current stack frame + * Calls the configured type check fail callback if the check fails + + Note that this function is independent of the globally shared configuration in + :data:`typeguard.config`. This means that usage within libraries is safe from being + affected configuration changes made by other libraries or by the integrating + application. Instead, configuration options have the same default values as their + corresponding fields in :class:`TypeCheckConfiguration`. + + :param value: value to be checked against ``expected_type`` + :param expected_type: a class or generic type instance, or a tuple of such things + :param forward_ref_policy: see :attr:`TypeCheckConfiguration.forward_ref_policy` + :param typecheck_fail_callback: + see :attr`TypeCheckConfiguration.typecheck_fail_callback` + :param collection_check_strategy: + see :attr:`TypeCheckConfiguration.collection_check_strategy` + :return: ``value``, unmodified + :raises TypeCheckError: if there is a type mismatch + + """ + if type(expected_type) is tuple: + expected_type = Union[expected_type] + + config = TypeCheckConfiguration( + forward_ref_policy=forward_ref_policy, + typecheck_fail_callback=typecheck_fail_callback, + collection_check_strategy=collection_check_strategy, + ) + + if _suppression.type_checks_suppressed or expected_type is Any: + return value + + frame = sys._getframe(1) + memo = TypeCheckMemo(frame.f_globals, frame.f_locals, config=config) + try: + check_type_internal(value, expected_type, memo) + except TypeCheckError as exc: + exc.append_path_element(qualified_name(value, add_class_prefix=True)) + if config.typecheck_fail_callback: + config.typecheck_fail_callback(exc, memo) + else: + raise + + return value + + +def check_argument_types( + func_name: str, + arguments: dict[str, tuple[Any, Any]], + memo: TypeCheckMemo, +) -> Literal[True]: + if _suppression.type_checks_suppressed: + return True + + for argname, (value, annotation) in arguments.items(): + if annotation is NoReturn or annotation is Never: + exc = TypeCheckError( + f"{func_name}() was declared never to be called but it was" + ) + if memo.config.typecheck_fail_callback: + memo.config.typecheck_fail_callback(exc, memo) + else: + raise exc + + try: + check_type_internal(value, annotation, memo) + except TypeCheckError as exc: + qualname = qualified_name(value, add_class_prefix=True) + exc.append_path_element(f'argument "{argname}" ({qualname})') + if memo.config.typecheck_fail_callback: + memo.config.typecheck_fail_callback(exc, memo) + else: + raise + + return True + + +def check_return_type( + func_name: str, + retval: T, + annotation: Any, + memo: TypeCheckMemo, +) -> T: + if _suppression.type_checks_suppressed: + return retval + + if annotation is NoReturn or annotation is Never: + exc = TypeCheckError(f"{func_name}() was declared never to return but it did") + if memo.config.typecheck_fail_callback: + memo.config.typecheck_fail_callback(exc, memo) + else: + raise exc + + try: + check_type_internal(retval, annotation, memo) + except TypeCheckError as exc: + # Allow NotImplemented if this is a binary magic method (__eq__() et al) + if retval is NotImplemented and annotation is bool: + # This does (and cannot) not check if it's actually a method + func_name = func_name.rsplit(".", 1)[-1] + if func_name in BINARY_MAGIC_METHODS: + return retval + + qualname = qualified_name(retval, add_class_prefix=True) + exc.append_path_element(f"the return value ({qualname})") + if memo.config.typecheck_fail_callback: + memo.config.typecheck_fail_callback(exc, memo) + else: + raise + + return retval + + +def check_send_type( + func_name: str, + sendval: T, + annotation: Any, + memo: TypeCheckMemo, +) -> T: + if _suppression.type_checks_suppressed: + return sendval + + if annotation is NoReturn or annotation is Never: + exc = TypeCheckError( + f"{func_name}() was declared never to be sent a value to but it was" + ) + if memo.config.typecheck_fail_callback: + memo.config.typecheck_fail_callback(exc, memo) + else: + raise exc + + try: + check_type_internal(sendval, annotation, memo) + except TypeCheckError as exc: + qualname = qualified_name(sendval, add_class_prefix=True) + exc.append_path_element(f"the value sent to generator ({qualname})") + if memo.config.typecheck_fail_callback: + memo.config.typecheck_fail_callback(exc, memo) + else: + raise + + return sendval + + +def check_yield_type( + func_name: str, + yieldval: T, + annotation: Any, + memo: TypeCheckMemo, +) -> T: + if _suppression.type_checks_suppressed: + return yieldval + + if annotation is NoReturn or annotation is Never: + exc = TypeCheckError(f"{func_name}() was declared never to yield but it did") + if memo.config.typecheck_fail_callback: + memo.config.typecheck_fail_callback(exc, memo) + else: + raise exc + + try: + check_type_internal(yieldval, annotation, memo) + except TypeCheckError as exc: + qualname = qualified_name(yieldval, add_class_prefix=True) + exc.append_path_element(f"the yielded value ({qualname})") + if memo.config.typecheck_fail_callback: + memo.config.typecheck_fail_callback(exc, memo) + else: + raise + + return yieldval + + +def check_variable_assignment( + value: object, varname: str, annotation: Any, memo: TypeCheckMemo +) -> Any: + if _suppression.type_checks_suppressed: + return value + + try: + check_type_internal(value, annotation, memo) + except TypeCheckError as exc: + qualname = qualified_name(value, add_class_prefix=True) + exc.append_path_element(f"value assigned to {varname} ({qualname})") + if memo.config.typecheck_fail_callback: + memo.config.typecheck_fail_callback(exc, memo) + else: + raise + + return value + + +def check_multi_variable_assignment( + value: Any, targets: list[dict[str, Any]], memo: TypeCheckMemo +) -> Any: + if max(len(target) for target in targets) == 1: + iterated_values = [value] + else: + iterated_values = list(value) + + if not _suppression.type_checks_suppressed: + for expected_types in targets: + value_index = 0 + for ann_index, (varname, expected_type) in enumerate( + expected_types.items() + ): + if varname.startswith("*"): + varname = varname[1:] + keys_left = len(expected_types) - 1 - ann_index + next_value_index = len(iterated_values) - keys_left + obj: object = iterated_values[value_index:next_value_index] + value_index = next_value_index + else: + obj = iterated_values[value_index] + value_index += 1 + + try: + check_type_internal(obj, expected_type, memo) + except TypeCheckError as exc: + qualname = qualified_name(obj, add_class_prefix=True) + exc.append_path_element(f"value assigned to {varname} ({qualname})") + if memo.config.typecheck_fail_callback: + memo.config.typecheck_fail_callback(exc, memo) + else: + raise + + return iterated_values[0] if len(iterated_values) == 1 else iterated_values + + +def warn_on_error(exc: TypeCheckError, memo: TypeCheckMemo) -> None: + """ + Emit a warning on a type mismatch. + + This is intended to be used as an error handler in + :attr:`TypeCheckConfiguration.typecheck_fail_callback`. + + """ + warnings.warn(TypeCheckWarning(str(exc)), stacklevel=get_stacklevel()) diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_importhook.py b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_importhook.py new file mode 100644 index 0000000000000000000000000000000000000000..8590540a5a2ace23bbfc8272aca8e066740474a6 --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_importhook.py @@ -0,0 +1,213 @@ +from __future__ import annotations + +import ast +import sys +import types +from collections.abc import Callable, Iterable +from importlib.abc import MetaPathFinder +from importlib.machinery import ModuleSpec, SourceFileLoader +from importlib.util import cache_from_source, decode_source +from inspect import isclass +from os import PathLike +from types import CodeType, ModuleType, TracebackType +from typing import Sequence, TypeVar +from unittest.mock import patch + +from ._config import global_config +from ._transformer import TypeguardTransformer + +if sys.version_info >= (3, 12): + from collections.abc import Buffer +else: + from typing_extensions import Buffer + +if sys.version_info >= (3, 11): + from typing import ParamSpec +else: + from typing_extensions import ParamSpec + +if sys.version_info >= (3, 10): + from importlib.metadata import PackageNotFoundError, version +else: + from importlib_metadata import PackageNotFoundError, version + +try: + OPTIMIZATION = "typeguard" + "".join(version("typeguard").split(".")[:3]) +except PackageNotFoundError: + OPTIMIZATION = "typeguard" + +P = ParamSpec("P") +T = TypeVar("T") + + +# The name of this function is magical +def _call_with_frames_removed( + f: Callable[P, T], *args: P.args, **kwargs: P.kwargs +) -> T: + return f(*args, **kwargs) + + +def optimized_cache_from_source(path: str, debug_override: bool | None = None) -> str: + return cache_from_source(path, debug_override, optimization=OPTIMIZATION) + + +class TypeguardLoader(SourceFileLoader): + @staticmethod + def source_to_code( + data: Buffer | str | ast.Module | ast.Expression | ast.Interactive, + path: Buffer | str | PathLike[str] = "", + ) -> CodeType: + if isinstance(data, (ast.Module, ast.Expression, ast.Interactive)): + tree = data + else: + if isinstance(data, str): + source = data + else: + source = decode_source(data) + + tree = _call_with_frames_removed( + ast.parse, + source, + path, + "exec", + ) + + tree = TypeguardTransformer().visit(tree) + ast.fix_missing_locations(tree) + + if global_config.debug_instrumentation and sys.version_info >= (3, 9): + print( + f"Source code of {path!r} after instrumentation:\n" + "----------------------------------------------", + file=sys.stderr, + ) + print(ast.unparse(tree), file=sys.stderr) + print("----------------------------------------------", file=sys.stderr) + + return _call_with_frames_removed( + compile, tree, path, "exec", 0, dont_inherit=True + ) + + def exec_module(self, module: ModuleType) -> None: + # Use a custom optimization marker – the import lock should make this monkey + # patch safe + with patch( + "importlib._bootstrap_external.cache_from_source", + optimized_cache_from_source, + ): + super().exec_module(module) + + +class TypeguardFinder(MetaPathFinder): + """ + Wraps another path finder and instruments the module with + :func:`@typechecked ` if :meth:`should_instrument` returns + ``True``. + + Should not be used directly, but rather via :func:`~.install_import_hook`. + + .. versionadded:: 2.6 + """ + + def __init__(self, packages: list[str] | None, original_pathfinder: MetaPathFinder): + self.packages = packages + self._original_pathfinder = original_pathfinder + + def find_spec( + self, + fullname: str, + path: Sequence[str] | None, + target: types.ModuleType | None = None, + ) -> ModuleSpec | None: + if self.should_instrument(fullname): + spec = self._original_pathfinder.find_spec(fullname, path, target) + if spec is not None and isinstance(spec.loader, SourceFileLoader): + spec.loader = TypeguardLoader(spec.loader.name, spec.loader.path) + return spec + + return None + + def should_instrument(self, module_name: str) -> bool: + """ + Determine whether the module with the given name should be instrumented. + + :param module_name: full name of the module that is about to be imported (e.g. + ``xyz.abc``) + + """ + if self.packages is None: + return True + + for package in self.packages: + if module_name == package or module_name.startswith(package + "."): + return True + + return False + + +class ImportHookManager: + """ + A handle that can be used to uninstall the Typeguard import hook. + """ + + def __init__(self, hook: MetaPathFinder): + self.hook = hook + + def __enter__(self) -> None: + pass + + def __exit__( + self, + exc_type: type[BaseException], + exc_val: BaseException, + exc_tb: TracebackType, + ) -> None: + self.uninstall() + + def uninstall(self) -> None: + """Uninstall the import hook.""" + try: + sys.meta_path.remove(self.hook) + except ValueError: + pass # already removed + + +def install_import_hook( + packages: Iterable[str] | None = None, + *, + cls: type[TypeguardFinder] = TypeguardFinder, +) -> ImportHookManager: + """ + Install an import hook that instruments functions for automatic type checking. + + This only affects modules loaded **after** this hook has been installed. + + :param packages: an iterable of package names to instrument, or ``None`` to + instrument all packages + :param cls: a custom meta path finder class + :return: a context manager that uninstalls the hook on exit (or when you call + ``.uninstall()``) + + .. versionadded:: 2.6 + + """ + if packages is None: + target_packages: list[str] | None = None + elif isinstance(packages, str): + target_packages = [packages] + else: + target_packages = list(packages) + + for finder in sys.meta_path: + if ( + isclass(finder) + and finder.__name__ == "PathFinder" + and hasattr(finder, "find_spec") + ): + break + else: + raise RuntimeError("Cannot find a PathFinder in sys.meta_path") + + hook = cls(target_packages, finder) + sys.meta_path.insert(0, hook) + return ImportHookManager(hook) diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_memo.py b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_memo.py new file mode 100644 index 0000000000000000000000000000000000000000..1d0d80c66d788e2f8f5e24ce14249319c54054c0 --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_memo.py @@ -0,0 +1,48 @@ +from __future__ import annotations + +from typing import Any + +from typeguard._config import TypeCheckConfiguration, global_config + + +class TypeCheckMemo: + """ + Contains information necessary for type checkers to do their work. + + .. attribute:: globals + :type: dict[str, Any] + + Dictionary of global variables to use for resolving forward references. + + .. attribute:: locals + :type: dict[str, Any] + + Dictionary of local variables to use for resolving forward references. + + .. attribute:: self_type + :type: type | None + + When running type checks within an instance method or class method, this is the + class object that the first argument (usually named ``self`` or ``cls``) refers + to. + + .. attribute:: config + :type: TypeCheckConfiguration + + Contains the configuration for a particular set of type checking operations. + """ + + __slots__ = "globals", "locals", "self_type", "config" + + def __init__( + self, + globals: dict[str, Any], + locals: dict[str, Any], + *, + self_type: type | None = None, + config: TypeCheckConfiguration = global_config, + ): + self.globals = globals + self.locals = locals + self.self_type = self_type + self.config = config diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_pytest_plugin.py b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_pytest_plugin.py new file mode 100644 index 0000000000000000000000000000000000000000..7b2f494ec7dbe5333c2798c0fcebb83cc3ac9907 --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_pytest_plugin.py @@ -0,0 +1,127 @@ +from __future__ import annotations + +import sys +import warnings +from typing import TYPE_CHECKING, Any, Literal + +from typeguard._config import CollectionCheckStrategy, ForwardRefPolicy, global_config +from typeguard._exceptions import InstrumentationWarning +from typeguard._importhook import install_import_hook +from typeguard._utils import qualified_name, resolve_reference + +if TYPE_CHECKING: + from pytest import Config, Parser + + +def pytest_addoption(parser: Parser) -> None: + def add_ini_option( + opt_type: ( + Literal["string", "paths", "pathlist", "args", "linelist", "bool"] | None + ), + ) -> None: + parser.addini( + group.options[-1].names()[0][2:], + group.options[-1].attrs()["help"], + opt_type, + ) + + group = parser.getgroup("typeguard") + group.addoption( + "--typeguard-packages", + action="store", + help="comma separated name list of packages and modules to instrument for " + "type checking, or :all: to instrument all modules loaded after typeguard", + ) + add_ini_option("linelist") + + group.addoption( + "--typeguard-debug-instrumentation", + action="store_true", + help="print all instrumented code to stderr", + ) + add_ini_option("bool") + + group.addoption( + "--typeguard-typecheck-fail-callback", + action="store", + help=( + "a module:varname (e.g. typeguard:warn_on_error) reference to a function " + "that is called (with the exception, and memo object as arguments) to " + "handle a TypeCheckError" + ), + ) + add_ini_option("string") + + group.addoption( + "--typeguard-forward-ref-policy", + action="store", + choices=list(ForwardRefPolicy.__members__), + help=( + "determines how to deal with unresolveable forward references in type " + "annotations" + ), + ) + add_ini_option("string") + + group.addoption( + "--typeguard-collection-check-strategy", + action="store", + choices=list(CollectionCheckStrategy.__members__), + help="determines how thoroughly to check collections (list, dict, etc)", + ) + add_ini_option("string") + + +def pytest_configure(config: Config) -> None: + def getoption(name: str) -> Any: + return config.getoption(name.replace("-", "_")) or config.getini(name) + + packages: list[str] | None = [] + if packages_option := config.getoption("typeguard_packages"): + packages = [pkg.strip() for pkg in packages_option.split(",")] + elif packages_ini := config.getini("typeguard-packages"): + packages = packages_ini + + if packages: + if packages == [":all:"]: + packages = None + else: + already_imported_packages = sorted( + package for package in packages if package in sys.modules + ) + if already_imported_packages: + warnings.warn( + f"typeguard cannot check these packages because they are already " + f"imported: {', '.join(already_imported_packages)}", + InstrumentationWarning, + stacklevel=1, + ) + + install_import_hook(packages=packages) + + debug_option = getoption("typeguard-debug-instrumentation") + if debug_option: + global_config.debug_instrumentation = True + + fail_callback_option = getoption("typeguard-typecheck-fail-callback") + if fail_callback_option: + callback = resolve_reference(fail_callback_option) + if not callable(callback): + raise TypeError( + f"{fail_callback_option} ({qualified_name(callback.__class__)}) is not " + f"a callable" + ) + + global_config.typecheck_fail_callback = callback + + forward_ref_policy_option = getoption("typeguard-forward-ref-policy") + if forward_ref_policy_option: + forward_ref_policy = ForwardRefPolicy.__members__[forward_ref_policy_option] + global_config.forward_ref_policy = forward_ref_policy + + collection_check_strategy_option = getoption("typeguard-collection-check-strategy") + if collection_check_strategy_option: + collection_check_strategy = CollectionCheckStrategy.__members__[ + collection_check_strategy_option + ] + global_config.collection_check_strategy = collection_check_strategy diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_suppression.py b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_suppression.py new file mode 100644 index 0000000000000000000000000000000000000000..bbbfbfbe8ef364d45a1db581562c73b323819b84 --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_suppression.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +import sys +from collections.abc import Callable, Generator +from contextlib import contextmanager +from functools import update_wrapper +from threading import Lock +from typing import ContextManager, TypeVar, overload + +if sys.version_info >= (3, 10): + from typing import ParamSpec +else: + from typing_extensions import ParamSpec + +P = ParamSpec("P") +T = TypeVar("T") + +type_checks_suppressed = 0 +type_checks_suppress_lock = Lock() + + +@overload +def suppress_type_checks(func: Callable[P, T]) -> Callable[P, T]: ... + + +@overload +def suppress_type_checks() -> ContextManager[None]: ... + + +def suppress_type_checks( + func: Callable[P, T] | None = None, +) -> Callable[P, T] | ContextManager[None]: + """ + Temporarily suppress all type checking. + + This function has two operating modes, based on how it's used: + + #. as a context manager (``with suppress_type_checks(): ...``) + #. as a decorator (``@suppress_type_checks``) + + When used as a context manager, :func:`check_type` and any automatically + instrumented functions skip the actual type checking. These context managers can be + nested. + + When used as a decorator, all type checking is suppressed while the function is + running. + + Type checking will resume once no more context managers are active and no decorated + functions are running. + + Both operating modes are thread-safe. + + """ + + def wrapper(*args: P.args, **kwargs: P.kwargs) -> T: + global type_checks_suppressed + + with type_checks_suppress_lock: + type_checks_suppressed += 1 + + assert func is not None + try: + return func(*args, **kwargs) + finally: + with type_checks_suppress_lock: + type_checks_suppressed -= 1 + + def cm() -> Generator[None, None, None]: + global type_checks_suppressed + + with type_checks_suppress_lock: + type_checks_suppressed += 1 + + try: + yield + finally: + with type_checks_suppress_lock: + type_checks_suppressed -= 1 + + if func is None: + # Context manager mode + return contextmanager(cm)() + else: + # Decorator mode + update_wrapper(wrapper, func) + return wrapper diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_transformer.py b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..13ac3630e6c29ca32b2a84a3e6a85665b439cedf --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_transformer.py @@ -0,0 +1,1229 @@ +from __future__ import annotations + +import ast +import builtins +import sys +import typing +from ast import ( + AST, + Add, + AnnAssign, + Assign, + AsyncFunctionDef, + Attribute, + AugAssign, + BinOp, + BitAnd, + BitOr, + BitXor, + Call, + ClassDef, + Constant, + Dict, + Div, + Expr, + Expression, + FloorDiv, + FunctionDef, + If, + Import, + ImportFrom, + Index, + List, + Load, + LShift, + MatMult, + Mod, + Module, + Mult, + Name, + NamedExpr, + NodeTransformer, + NodeVisitor, + Pass, + Pow, + Return, + RShift, + Starred, + Store, + Sub, + Subscript, + Tuple, + Yield, + YieldFrom, + alias, + copy_location, + expr, + fix_missing_locations, + keyword, + walk, +) +from collections import defaultdict +from collections.abc import Generator, Sequence +from contextlib import contextmanager +from copy import deepcopy +from dataclasses import dataclass, field +from typing import Any, ClassVar, cast, overload + +generator_names = ( + "typing.Generator", + "collections.abc.Generator", + "typing.Iterator", + "collections.abc.Iterator", + "typing.Iterable", + "collections.abc.Iterable", + "typing.AsyncIterator", + "collections.abc.AsyncIterator", + "typing.AsyncIterable", + "collections.abc.AsyncIterable", + "typing.AsyncGenerator", + "collections.abc.AsyncGenerator", +) +anytype_names = ( + "typing.Any", + "typing_extensions.Any", +) +literal_names = ( + "typing.Literal", + "typing_extensions.Literal", +) +annotated_names = ( + "typing.Annotated", + "typing_extensions.Annotated", +) +ignore_decorators = ( + "typing.no_type_check", + "typeguard.typeguard_ignore", +) +aug_assign_functions = { + Add: "iadd", + Sub: "isub", + Mult: "imul", + MatMult: "imatmul", + Div: "itruediv", + FloorDiv: "ifloordiv", + Mod: "imod", + Pow: "ipow", + LShift: "ilshift", + RShift: "irshift", + BitAnd: "iand", + BitXor: "ixor", + BitOr: "ior", +} + + +@dataclass +class TransformMemo: + node: Module | ClassDef | FunctionDef | AsyncFunctionDef | None + parent: TransformMemo | None + path: tuple[str, ...] + joined_path: Constant = field(init=False) + return_annotation: expr | None = None + yield_annotation: expr | None = None + send_annotation: expr | None = None + is_async: bool = False + local_names: set[str] = field(init=False, default_factory=set) + imported_names: dict[str, str] = field(init=False, default_factory=dict) + ignored_names: set[str] = field(init=False, default_factory=set) + load_names: defaultdict[str, dict[str, Name]] = field( + init=False, default_factory=lambda: defaultdict(dict) + ) + has_yield_expressions: bool = field(init=False, default=False) + has_return_expressions: bool = field(init=False, default=False) + memo_var_name: Name | None = field(init=False, default=None) + should_instrument: bool = field(init=False, default=True) + variable_annotations: dict[str, expr] = field(init=False, default_factory=dict) + configuration_overrides: dict[str, Any] = field(init=False, default_factory=dict) + code_inject_index: int = field(init=False, default=0) + + def __post_init__(self) -> None: + elements: list[str] = [] + memo = self + while isinstance(memo.node, (ClassDef, FunctionDef, AsyncFunctionDef)): + elements.insert(0, memo.node.name) + if not memo.parent: + break + + memo = memo.parent + if isinstance(memo.node, (FunctionDef, AsyncFunctionDef)): + elements.insert(0, "") + + self.joined_path = Constant(".".join(elements)) + + # Figure out where to insert instrumentation code + if self.node: + for index, child in enumerate(self.node.body): + if isinstance(child, ImportFrom) and child.module == "__future__": + # (module only) __future__ imports must come first + continue + elif ( + isinstance(child, Expr) + and isinstance(child.value, Constant) + and isinstance(child.value.value, str) + ): + continue # docstring + + self.code_inject_index = index + break + + def get_unused_name(self, name: str) -> str: + memo: TransformMemo | None = self + while memo is not None: + if name in memo.local_names: + memo = self + name += "_" + else: + memo = memo.parent + + self.local_names.add(name) + return name + + def is_ignored_name(self, expression: expr | Expr | None) -> bool: + top_expression = ( + expression.value if isinstance(expression, Expr) else expression + ) + + if isinstance(top_expression, Attribute) and isinstance( + top_expression.value, Name + ): + name = top_expression.value.id + elif isinstance(top_expression, Name): + name = top_expression.id + else: + return False + + memo: TransformMemo | None = self + while memo is not None: + if name in memo.ignored_names: + return True + + memo = memo.parent + + return False + + def get_memo_name(self) -> Name: + if not self.memo_var_name: + self.memo_var_name = Name(id="memo", ctx=Load()) + + return self.memo_var_name + + def get_import(self, module: str, name: str) -> Name: + if module in self.load_names and name in self.load_names[module]: + return self.load_names[module][name] + + qualified_name = f"{module}.{name}" + if name in self.imported_names and self.imported_names[name] == qualified_name: + return Name(id=name, ctx=Load()) + + alias = self.get_unused_name(name) + node = self.load_names[module][name] = Name(id=alias, ctx=Load()) + self.imported_names[name] = qualified_name + return node + + def insert_imports(self, node: Module | FunctionDef | AsyncFunctionDef) -> None: + """Insert imports needed by injected code.""" + if not self.load_names: + return + + # Insert imports after any "from __future__ ..." imports and any docstring + for modulename, names in self.load_names.items(): + aliases = [ + alias(orig_name, new_name.id if orig_name != new_name.id else None) + for orig_name, new_name in sorted(names.items()) + ] + node.body.insert(self.code_inject_index, ImportFrom(modulename, aliases, 0)) + + def name_matches(self, expression: expr | Expr | None, *names: str) -> bool: + if expression is None: + return False + + path: list[str] = [] + top_expression = ( + expression.value if isinstance(expression, Expr) else expression + ) + + if isinstance(top_expression, Subscript): + top_expression = top_expression.value + elif isinstance(top_expression, Call): + top_expression = top_expression.func + + while isinstance(top_expression, Attribute): + path.insert(0, top_expression.attr) + top_expression = top_expression.value + + if not isinstance(top_expression, Name): + return False + + if top_expression.id in self.imported_names: + translated = self.imported_names[top_expression.id] + elif hasattr(builtins, top_expression.id): + translated = "builtins." + top_expression.id + else: + translated = top_expression.id + + path.insert(0, translated) + joined_path = ".".join(path) + if joined_path in names: + return True + elif self.parent: + return self.parent.name_matches(expression, *names) + else: + return False + + def get_config_keywords(self) -> list[keyword]: + if self.parent and isinstance(self.parent.node, ClassDef): + overrides = self.parent.configuration_overrides.copy() + else: + overrides = {} + + overrides.update(self.configuration_overrides) + return [keyword(key, value) for key, value in overrides.items()] + + +class NameCollector(NodeVisitor): + def __init__(self) -> None: + self.names: set[str] = set() + + def visit_Import(self, node: Import) -> None: + for name in node.names: + self.names.add(name.asname or name.name) + + def visit_ImportFrom(self, node: ImportFrom) -> None: + for name in node.names: + self.names.add(name.asname or name.name) + + def visit_Assign(self, node: Assign) -> None: + for target in node.targets: + if isinstance(target, Name): + self.names.add(target.id) + + def visit_NamedExpr(self, node: NamedExpr) -> Any: + if isinstance(node.target, Name): + self.names.add(node.target.id) + + def visit_FunctionDef(self, node: FunctionDef) -> None: + pass + + def visit_ClassDef(self, node: ClassDef) -> None: + pass + + +class GeneratorDetector(NodeVisitor): + """Detects if a function node is a generator function.""" + + contains_yields: bool = False + in_root_function: bool = False + + def visit_Yield(self, node: Yield) -> Any: + self.contains_yields = True + + def visit_YieldFrom(self, node: YieldFrom) -> Any: + self.contains_yields = True + + def visit_ClassDef(self, node: ClassDef) -> Any: + pass + + def visit_FunctionDef(self, node: FunctionDef | AsyncFunctionDef) -> Any: + if not self.in_root_function: + self.in_root_function = True + self.generic_visit(node) + self.in_root_function = False + + def visit_AsyncFunctionDef(self, node: AsyncFunctionDef) -> Any: + self.visit_FunctionDef(node) + + +class AnnotationTransformer(NodeTransformer): + type_substitutions: ClassVar[dict[str, tuple[str, str]]] = { + "builtins.dict": ("typing", "Dict"), + "builtins.list": ("typing", "List"), + "builtins.tuple": ("typing", "Tuple"), + "builtins.set": ("typing", "Set"), + "builtins.frozenset": ("typing", "FrozenSet"), + } + + def __init__(self, transformer: TypeguardTransformer): + self.transformer = transformer + self._memo = transformer._memo + self._level = 0 + + def visit(self, node: AST) -> Any: + # Don't process Literals + if isinstance(node, expr) and self._memo.name_matches(node, *literal_names): + return node + + self._level += 1 + new_node = super().visit(node) + self._level -= 1 + + if isinstance(new_node, Expression) and not hasattr(new_node, "body"): + return None + + # Return None if this new node matches a variation of typing.Any + if ( + self._level == 0 + and isinstance(new_node, expr) + and self._memo.name_matches(new_node, *anytype_names) + ): + return None + + return new_node + + def visit_BinOp(self, node: BinOp) -> Any: + self.generic_visit(node) + + if isinstance(node.op, BitOr): + # If either branch of the BinOp has been transformed to `None`, it means + # that a type in the union was ignored, so the entire annotation should e + # ignored + if not hasattr(node, "left") or not hasattr(node, "right"): + return None + + # Return Any if either side is Any + if self._memo.name_matches(node.left, *anytype_names): + return node.left + elif self._memo.name_matches(node.right, *anytype_names): + return node.right + + if sys.version_info < (3, 10): + union_name = self.transformer._get_import("typing", "Union") + return Subscript( + value=union_name, + slice=Index( + Tuple(elts=[node.left, node.right], ctx=Load()), ctx=Load() + ), + ctx=Load(), + ) + + return node + + def visit_Attribute(self, node: Attribute) -> Any: + if self._memo.is_ignored_name(node): + return None + + return node + + def visit_Subscript(self, node: Subscript) -> Any: + if self._memo.is_ignored_name(node.value): + return None + + # The subscript of typing(_extensions).Literal can be any arbitrary string, so + # don't try to evaluate it as code + if node.slice: + if isinstance(node.slice, Index): + # Python 3.8 + slice_value = node.slice.value # type: ignore[attr-defined] + else: + slice_value = node.slice + + if isinstance(slice_value, Tuple): + if self._memo.name_matches(node.value, *annotated_names): + # Only treat the first argument to typing.Annotated as a potential + # forward reference + items = cast( + typing.List[expr], + [self.visit(slice_value.elts[0])] + slice_value.elts[1:], + ) + else: + items = cast( + typing.List[expr], + [self.visit(item) for item in slice_value.elts], + ) + + # If this is a Union and any of the items is Any, erase the entire + # annotation + if self._memo.name_matches(node.value, "typing.Union") and any( + item is None + or ( + isinstance(item, expr) + and self._memo.name_matches(item, *anytype_names) + ) + for item in items + ): + return None + + # If all items in the subscript were Any, erase the subscript entirely + if all(item is None for item in items): + return node.value + + for index, item in enumerate(items): + if item is None: + items[index] = self.transformer._get_import("typing", "Any") + + slice_value.elts = items + else: + self.generic_visit(node) + + # If the transformer erased the slice entirely, just return the node + # value without the subscript (unless it's Optional, in which case erase + # the node entirely + if self._memo.name_matches( + node.value, "typing.Optional" + ) and not hasattr(node, "slice"): + return None + if sys.version_info >= (3, 9) and not hasattr(node, "slice"): + return node.value + elif sys.version_info < (3, 9) and not hasattr(node.slice, "value"): + return node.value + + return node + + def visit_Name(self, node: Name) -> Any: + if self._memo.is_ignored_name(node): + return None + + if sys.version_info < (3, 9): + for typename, substitute in self.type_substitutions.items(): + if self._memo.name_matches(node, typename): + new_node = self.transformer._get_import(*substitute) + return copy_location(new_node, node) + + return node + + def visit_Call(self, node: Call) -> Any: + # Don't recurse into calls + return node + + def visit_Constant(self, node: Constant) -> Any: + if isinstance(node.value, str): + expression = ast.parse(node.value, mode="eval") + new_node = self.visit(expression) + if new_node: + return copy_location(new_node.body, node) + else: + return None + + return node + + +class TypeguardTransformer(NodeTransformer): + def __init__( + self, target_path: Sequence[str] | None = None, target_lineno: int | None = None + ) -> None: + self._target_path = tuple(target_path) if target_path else None + self._memo = self._module_memo = TransformMemo(None, None, ()) + self.names_used_in_annotations: set[str] = set() + self.target_node: FunctionDef | AsyncFunctionDef | None = None + self.target_lineno = target_lineno + + def generic_visit(self, node: AST) -> AST: + has_non_empty_body_initially = bool(getattr(node, "body", None)) + initial_type = type(node) + + node = super().generic_visit(node) + + if ( + type(node) is initial_type + and has_non_empty_body_initially + and hasattr(node, "body") + and not node.body + ): + # If we have still the same node type after transformation + # but we've optimised it's body away, we add a `pass` statement. + node.body = [Pass()] + + return node + + @contextmanager + def _use_memo( + self, node: ClassDef | FunctionDef | AsyncFunctionDef + ) -> Generator[None, Any, None]: + new_memo = TransformMemo(node, self._memo, self._memo.path + (node.name,)) + old_memo = self._memo + self._memo = new_memo + + if isinstance(node, (FunctionDef, AsyncFunctionDef)): + new_memo.should_instrument = ( + self._target_path is None or new_memo.path == self._target_path + ) + if new_memo.should_instrument: + # Check if the function is a generator function + detector = GeneratorDetector() + detector.visit(node) + + # Extract yield, send and return types where possible from a subscripted + # annotation like Generator[int, str, bool] + return_annotation = deepcopy(node.returns) + if detector.contains_yields and new_memo.name_matches( + return_annotation, *generator_names + ): + if isinstance(return_annotation, Subscript): + annotation_slice = return_annotation.slice + + # Python < 3.9 + if isinstance(annotation_slice, Index): + annotation_slice = ( + annotation_slice.value # type: ignore[attr-defined] + ) + + if isinstance(annotation_slice, Tuple): + items = annotation_slice.elts + else: + items = [annotation_slice] + + if len(items) > 0: + new_memo.yield_annotation = self._convert_annotation( + items[0] + ) + + if len(items) > 1: + new_memo.send_annotation = self._convert_annotation( + items[1] + ) + + if len(items) > 2: + new_memo.return_annotation = self._convert_annotation( + items[2] + ) + else: + new_memo.return_annotation = self._convert_annotation( + return_annotation + ) + + if isinstance(node, AsyncFunctionDef): + new_memo.is_async = True + + yield + self._memo = old_memo + + def _get_import(self, module: str, name: str) -> Name: + memo = self._memo if self._target_path else self._module_memo + return memo.get_import(module, name) + + @overload + def _convert_annotation(self, annotation: None) -> None: ... + + @overload + def _convert_annotation(self, annotation: expr) -> expr: ... + + def _convert_annotation(self, annotation: expr | None) -> expr | None: + if annotation is None: + return None + + # Convert PEP 604 unions (x | y) and generic built-in collections where + # necessary, and undo forward references + new_annotation = cast(expr, AnnotationTransformer(self).visit(annotation)) + if isinstance(new_annotation, expr): + new_annotation = ast.copy_location(new_annotation, annotation) + + # Store names used in the annotation + names = {node.id for node in walk(new_annotation) if isinstance(node, Name)} + self.names_used_in_annotations.update(names) + + return new_annotation + + def visit_Name(self, node: Name) -> Name: + self._memo.local_names.add(node.id) + return node + + def visit_Module(self, node: Module) -> Module: + self._module_memo = self._memo = TransformMemo(node, None, ()) + self.generic_visit(node) + self._module_memo.insert_imports(node) + + fix_missing_locations(node) + return node + + def visit_Import(self, node: Import) -> Import: + for name in node.names: + self._memo.local_names.add(name.asname or name.name) + self._memo.imported_names[name.asname or name.name] = name.name + + return node + + def visit_ImportFrom(self, node: ImportFrom) -> ImportFrom: + for name in node.names: + if name.name != "*": + alias = name.asname or name.name + self._memo.local_names.add(alias) + self._memo.imported_names[alias] = f"{node.module}.{name.name}" + + return node + + def visit_ClassDef(self, node: ClassDef) -> ClassDef | None: + self._memo.local_names.add(node.name) + + # Eliminate top level classes not belonging to the target path + if ( + self._target_path is not None + and not self._memo.path + and node.name != self._target_path[0] + ): + return None + + with self._use_memo(node): + for decorator in node.decorator_list.copy(): + if self._memo.name_matches(decorator, "typeguard.typechecked"): + # Remove the decorator to prevent duplicate instrumentation + node.decorator_list.remove(decorator) + + # Store any configuration overrides + if isinstance(decorator, Call) and decorator.keywords: + self._memo.configuration_overrides.update( + {kw.arg: kw.value for kw in decorator.keywords if kw.arg} + ) + + self.generic_visit(node) + return node + + def visit_FunctionDef( + self, node: FunctionDef | AsyncFunctionDef + ) -> FunctionDef | AsyncFunctionDef | None: + """ + Injects type checks for function arguments, and for a return of None if the + function is annotated to return something else than Any or None, and the body + ends without an explicit "return". + + """ + self._memo.local_names.add(node.name) + + # Eliminate top level functions not belonging to the target path + if ( + self._target_path is not None + and not self._memo.path + and node.name != self._target_path[0] + ): + return None + + # Skip instrumentation if we're instrumenting the whole module and the function + # contains either @no_type_check or @typeguard_ignore + if self._target_path is None: + for decorator in node.decorator_list: + if self._memo.name_matches(decorator, *ignore_decorators): + return node + + with self._use_memo(node): + arg_annotations: dict[str, Any] = {} + if self._target_path is None or self._memo.path == self._target_path: + # Find line number we're supposed to match against + if node.decorator_list: + first_lineno = node.decorator_list[0].lineno + else: + first_lineno = node.lineno + + for decorator in node.decorator_list.copy(): + if self._memo.name_matches(decorator, "typing.overload"): + # Remove overloads entirely + return None + elif self._memo.name_matches(decorator, "typeguard.typechecked"): + # Remove the decorator to prevent duplicate instrumentation + node.decorator_list.remove(decorator) + + # Store any configuration overrides + if isinstance(decorator, Call) and decorator.keywords: + self._memo.configuration_overrides = { + kw.arg: kw.value for kw in decorator.keywords if kw.arg + } + + if self.target_lineno == first_lineno: + assert self.target_node is None + self.target_node = node + if node.decorator_list: + self.target_lineno = node.decorator_list[0].lineno + else: + self.target_lineno = node.lineno + + all_args = node.args.args + node.args.kwonlyargs + node.args.posonlyargs + + # Ensure that any type shadowed by the positional or keyword-only + # argument names are ignored in this function + for arg in all_args: + self._memo.ignored_names.add(arg.arg) + + # Ensure that any type shadowed by the variable positional argument name + # (e.g. "args" in *args) is ignored this function + if node.args.vararg: + self._memo.ignored_names.add(node.args.vararg.arg) + + # Ensure that any type shadowed by the variable keywrod argument name + # (e.g. "kwargs" in *kwargs) is ignored this function + if node.args.kwarg: + self._memo.ignored_names.add(node.args.kwarg.arg) + + for arg in all_args: + annotation = self._convert_annotation(deepcopy(arg.annotation)) + if annotation: + arg_annotations[arg.arg] = annotation + + if node.args.vararg: + annotation_ = self._convert_annotation(node.args.vararg.annotation) + if annotation_: + if sys.version_info >= (3, 9): + container = Name("tuple", ctx=Load()) + else: + container = self._get_import("typing", "Tuple") + + subscript_slice: Tuple | Index = Tuple( + [ + annotation_, + Constant(Ellipsis), + ], + ctx=Load(), + ) + if sys.version_info < (3, 9): + subscript_slice = Index(subscript_slice, ctx=Load()) + + arg_annotations[node.args.vararg.arg] = Subscript( + container, subscript_slice, ctx=Load() + ) + + if node.args.kwarg: + annotation_ = self._convert_annotation(node.args.kwarg.annotation) + if annotation_: + if sys.version_info >= (3, 9): + container = Name("dict", ctx=Load()) + else: + container = self._get_import("typing", "Dict") + + subscript_slice = Tuple( + [ + Name("str", ctx=Load()), + annotation_, + ], + ctx=Load(), + ) + if sys.version_info < (3, 9): + subscript_slice = Index(subscript_slice, ctx=Load()) + + arg_annotations[node.args.kwarg.arg] = Subscript( + container, subscript_slice, ctx=Load() + ) + + if arg_annotations: + self._memo.variable_annotations.update(arg_annotations) + + self.generic_visit(node) + + if arg_annotations: + annotations_dict = Dict( + keys=[Constant(key) for key in arg_annotations.keys()], + values=[ + Tuple([Name(key, ctx=Load()), annotation], ctx=Load()) + for key, annotation in arg_annotations.items() + ], + ) + func_name = self._get_import( + "typeguard._functions", "check_argument_types" + ) + args = [ + self._memo.joined_path, + annotations_dict, + self._memo.get_memo_name(), + ] + node.body.insert( + self._memo.code_inject_index, Expr(Call(func_name, args, [])) + ) + + # Add a checked "return None" to the end if there's no explicit return + # Skip if the return annotation is None or Any + if ( + self._memo.return_annotation + and (not self._memo.is_async or not self._memo.has_yield_expressions) + and not isinstance(node.body[-1], Return) + and ( + not isinstance(self._memo.return_annotation, Constant) + or self._memo.return_annotation.value is not None + ) + ): + func_name = self._get_import( + "typeguard._functions", "check_return_type" + ) + return_node = Return( + Call( + func_name, + [ + self._memo.joined_path, + Constant(None), + self._memo.return_annotation, + self._memo.get_memo_name(), + ], + [], + ) + ) + + # Replace a placeholder "pass" at the end + if isinstance(node.body[-1], Pass): + copy_location(return_node, node.body[-1]) + del node.body[-1] + + node.body.append(return_node) + + # Insert code to create the call memo, if it was ever needed for this + # function + if self._memo.memo_var_name: + memo_kwargs: dict[str, Any] = {} + if self._memo.parent and isinstance(self._memo.parent.node, ClassDef): + for decorator in node.decorator_list: + if ( + isinstance(decorator, Name) + and decorator.id == "staticmethod" + ): + break + elif ( + isinstance(decorator, Name) + and decorator.id == "classmethod" + ): + memo_kwargs["self_type"] = Name( + id=node.args.args[0].arg, ctx=Load() + ) + break + else: + if node.args.args: + if node.name == "__new__": + memo_kwargs["self_type"] = Name( + id=node.args.args[0].arg, ctx=Load() + ) + else: + memo_kwargs["self_type"] = Attribute( + Name(id=node.args.args[0].arg, ctx=Load()), + "__class__", + ctx=Load(), + ) + + # Construct the function reference + # Nested functions get special treatment: the function name is added + # to free variables (and the closure of the resulting function) + names: list[str] = [node.name] + memo = self._memo.parent + while memo: + if isinstance(memo.node, (FunctionDef, AsyncFunctionDef)): + # This is a nested function. Use the function name as-is. + del names[:-1] + break + elif not isinstance(memo.node, ClassDef): + break + + names.insert(0, memo.node.name) + memo = memo.parent + + config_keywords = self._memo.get_config_keywords() + if config_keywords: + memo_kwargs["config"] = Call( + self._get_import("dataclasses", "replace"), + [self._get_import("typeguard._config", "global_config")], + config_keywords, + ) + + self._memo.memo_var_name.id = self._memo.get_unused_name("memo") + memo_store_name = Name(id=self._memo.memo_var_name.id, ctx=Store()) + globals_call = Call(Name(id="globals", ctx=Load()), [], []) + locals_call = Call(Name(id="locals", ctx=Load()), [], []) + memo_expr = Call( + self._get_import("typeguard", "TypeCheckMemo"), + [globals_call, locals_call], + [keyword(key, value) for key, value in memo_kwargs.items()], + ) + node.body.insert( + self._memo.code_inject_index, + Assign([memo_store_name], memo_expr), + ) + + self._memo.insert_imports(node) + + # Special case the __new__() method to create a local alias from the + # class name to the first argument (usually "cls") + if ( + isinstance(node, FunctionDef) + and node.args + and self._memo.parent is not None + and isinstance(self._memo.parent.node, ClassDef) + and node.name == "__new__" + ): + first_args_expr = Name(node.args.args[0].arg, ctx=Load()) + cls_name = Name(self._memo.parent.node.name, ctx=Store()) + node.body.insert( + self._memo.code_inject_index, + Assign([cls_name], first_args_expr), + ) + + # Rmove any placeholder "pass" at the end + if isinstance(node.body[-1], Pass): + del node.body[-1] + + return node + + def visit_AsyncFunctionDef( + self, node: AsyncFunctionDef + ) -> FunctionDef | AsyncFunctionDef | None: + return self.visit_FunctionDef(node) + + def visit_Return(self, node: Return) -> Return: + """This injects type checks into "return" statements.""" + self.generic_visit(node) + if ( + self._memo.return_annotation + and self._memo.should_instrument + and not self._memo.is_ignored_name(self._memo.return_annotation) + ): + func_name = self._get_import("typeguard._functions", "check_return_type") + old_node = node + retval = old_node.value or Constant(None) + node = Return( + Call( + func_name, + [ + self._memo.joined_path, + retval, + self._memo.return_annotation, + self._memo.get_memo_name(), + ], + [], + ) + ) + copy_location(node, old_node) + + return node + + def visit_Yield(self, node: Yield) -> Yield | Call: + """ + This injects type checks into "yield" expressions, checking both the yielded + value and the value sent back to the generator, when appropriate. + + """ + self._memo.has_yield_expressions = True + self.generic_visit(node) + + if ( + self._memo.yield_annotation + and self._memo.should_instrument + and not self._memo.is_ignored_name(self._memo.yield_annotation) + ): + func_name = self._get_import("typeguard._functions", "check_yield_type") + yieldval = node.value or Constant(None) + node.value = Call( + func_name, + [ + self._memo.joined_path, + yieldval, + self._memo.yield_annotation, + self._memo.get_memo_name(), + ], + [], + ) + + if ( + self._memo.send_annotation + and self._memo.should_instrument + and not self._memo.is_ignored_name(self._memo.send_annotation) + ): + func_name = self._get_import("typeguard._functions", "check_send_type") + old_node = node + call_node = Call( + func_name, + [ + self._memo.joined_path, + old_node, + self._memo.send_annotation, + self._memo.get_memo_name(), + ], + [], + ) + copy_location(call_node, old_node) + return call_node + + return node + + def visit_AnnAssign(self, node: AnnAssign) -> Any: + """ + This injects a type check into a local variable annotation-assignment within a + function body. + + """ + self.generic_visit(node) + + if ( + isinstance(self._memo.node, (FunctionDef, AsyncFunctionDef)) + and node.annotation + and isinstance(node.target, Name) + ): + self._memo.ignored_names.add(node.target.id) + annotation = self._convert_annotation(deepcopy(node.annotation)) + if annotation: + self._memo.variable_annotations[node.target.id] = annotation + if node.value: + func_name = self._get_import( + "typeguard._functions", "check_variable_assignment" + ) + node.value = Call( + func_name, + [ + node.value, + Constant(node.target.id), + annotation, + self._memo.get_memo_name(), + ], + [], + ) + + return node + + def visit_Assign(self, node: Assign) -> Any: + """ + This injects a type check into a local variable assignment within a function + body. The variable must have been annotated earlier in the function body. + + """ + self.generic_visit(node) + + # Only instrument function-local assignments + if isinstance(self._memo.node, (FunctionDef, AsyncFunctionDef)): + targets: list[dict[Constant, expr | None]] = [] + check_required = False + for target in node.targets: + elts: Sequence[expr] + if isinstance(target, Name): + elts = [target] + elif isinstance(target, Tuple): + elts = target.elts + else: + continue + + annotations_: dict[Constant, expr | None] = {} + for exp in elts: + prefix = "" + if isinstance(exp, Starred): + exp = exp.value + prefix = "*" + + if isinstance(exp, Name): + self._memo.ignored_names.add(exp.id) + name = prefix + exp.id + annotation = self._memo.variable_annotations.get(exp.id) + if annotation: + annotations_[Constant(name)] = annotation + check_required = True + else: + annotations_[Constant(name)] = None + + targets.append(annotations_) + + if check_required: + # Replace missing annotations with typing.Any + for item in targets: + for key, expression in item.items(): + if expression is None: + item[key] = self._get_import("typing", "Any") + + if len(targets) == 1 and len(targets[0]) == 1: + func_name = self._get_import( + "typeguard._functions", "check_variable_assignment" + ) + target_varname = next(iter(targets[0])) + node.value = Call( + func_name, + [ + node.value, + target_varname, + targets[0][target_varname], + self._memo.get_memo_name(), + ], + [], + ) + elif targets: + func_name = self._get_import( + "typeguard._functions", "check_multi_variable_assignment" + ) + targets_arg = List( + [ + Dict(keys=list(target), values=list(target.values())) + for target in targets + ], + ctx=Load(), + ) + node.value = Call( + func_name, + [node.value, targets_arg, self._memo.get_memo_name()], + [], + ) + + return node + + def visit_NamedExpr(self, node: NamedExpr) -> Any: + """This injects a type check into an assignment expression (a := foo()).""" + self.generic_visit(node) + + # Only instrument function-local assignments + if isinstance(self._memo.node, (FunctionDef, AsyncFunctionDef)) and isinstance( + node.target, Name + ): + self._memo.ignored_names.add(node.target.id) + + # Bail out if no matching annotation is found + annotation = self._memo.variable_annotations.get(node.target.id) + if annotation is None: + return node + + func_name = self._get_import( + "typeguard._functions", "check_variable_assignment" + ) + node.value = Call( + func_name, + [ + node.value, + Constant(node.target.id), + annotation, + self._memo.get_memo_name(), + ], + [], + ) + + return node + + def visit_AugAssign(self, node: AugAssign) -> Any: + """ + This injects a type check into an augmented assignment expression (a += 1). + + """ + self.generic_visit(node) + + # Only instrument function-local assignments + if isinstance(self._memo.node, (FunctionDef, AsyncFunctionDef)) and isinstance( + node.target, Name + ): + # Bail out if no matching annotation is found + annotation = self._memo.variable_annotations.get(node.target.id) + if annotation is None: + return node + + # Bail out if the operator is not found (newer Python version?) + try: + operator_func_name = aug_assign_functions[node.op.__class__] + except KeyError: + return node + + operator_func = self._get_import("operator", operator_func_name) + operator_call = Call( + operator_func, [Name(node.target.id, ctx=Load()), node.value], [] + ) + check_call = Call( + self._get_import("typeguard._functions", "check_variable_assignment"), + [ + operator_call, + Constant(node.target.id), + annotation, + self._memo.get_memo_name(), + ], + [], + ) + return Assign(targets=[node.target], value=check_call) + + return node + + def visit_If(self, node: If) -> Any: + """ + This blocks names from being collected from a module-level + "if typing.TYPE_CHECKING:" block, so that they won't be type checked. + + """ + self.generic_visit(node) + + if ( + self._memo is self._module_memo + and isinstance(node.test, Name) + and self._memo.name_matches(node.test, "typing.TYPE_CHECKING") + ): + collector = NameCollector() + collector.visit(node) + self._memo.ignored_names.update(collector.names) + + return node diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_union_transformer.py b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_union_transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..19617e6af5a0fd691b9d3793f9486ade2868d5bf --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_union_transformer.py @@ -0,0 +1,55 @@ +""" +Transforms lazily evaluated PEP 604 unions into typing.Unions, for compatibility with +Python versions older than 3.10. +""" + +from __future__ import annotations + +from ast import ( + BinOp, + BitOr, + Index, + Load, + Name, + NodeTransformer, + Subscript, + fix_missing_locations, + parse, +) +from ast import Tuple as ASTTuple +from types import CodeType +from typing import Any, Dict, FrozenSet, List, Set, Tuple, Union + +type_substitutions = { + "dict": Dict, + "list": List, + "tuple": Tuple, + "set": Set, + "frozenset": FrozenSet, + "Union": Union, +} + + +class UnionTransformer(NodeTransformer): + def __init__(self, union_name: Name | None = None): + self.union_name = union_name or Name(id="Union", ctx=Load()) + + def visit_BinOp(self, node: BinOp) -> Any: + self.generic_visit(node) + if isinstance(node.op, BitOr): + return Subscript( + value=self.union_name, + slice=Index( + ASTTuple(elts=[node.left, node.right], ctx=Load()), ctx=Load() + ), + ctx=Load(), + ) + + return node + + +def compile_type_hint(hint: str) -> CodeType: + parsed = parse(hint, "", "eval") + UnionTransformer().visit(parsed) + fix_missing_locations(parsed) + return compile(parsed, "", "eval", flags=0) diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_utils.py b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..9bcc8417f8773115de078d307682b457af848052 --- /dev/null +++ b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/_utils.py @@ -0,0 +1,173 @@ +from __future__ import annotations + +import inspect +import sys +from importlib import import_module +from inspect import currentframe +from types import CodeType, FrameType, FunctionType +from typing import TYPE_CHECKING, Any, Callable, ForwardRef, Union, cast, final +from weakref import WeakValueDictionary + +if TYPE_CHECKING: + from ._memo import TypeCheckMemo + +if sys.version_info >= (3, 13): + from typing import get_args, get_origin + + def evaluate_forwardref(forwardref: ForwardRef, memo: TypeCheckMemo) -> Any: + return forwardref._evaluate( + memo.globals, memo.locals, type_params=(), recursive_guard=frozenset() + ) + +elif sys.version_info >= (3, 10): + from typing import get_args, get_origin + + def evaluate_forwardref(forwardref: ForwardRef, memo: TypeCheckMemo) -> Any: + return forwardref._evaluate( + memo.globals, memo.locals, recursive_guard=frozenset() + ) + +else: + from typing_extensions import get_args, get_origin + + evaluate_extra_args: tuple[frozenset[Any], ...] = ( + (frozenset(),) if sys.version_info >= (3, 9) else () + ) + + def evaluate_forwardref(forwardref: ForwardRef, memo: TypeCheckMemo) -> Any: + from ._union_transformer import compile_type_hint, type_substitutions + + if not forwardref.__forward_evaluated__: + forwardref.__forward_code__ = compile_type_hint(forwardref.__forward_arg__) + + try: + return forwardref._evaluate(memo.globals, memo.locals, *evaluate_extra_args) + except NameError: + if sys.version_info < (3, 10): + # Try again, with the type substitutions (list -> List etc.) in place + new_globals = memo.globals.copy() + new_globals.setdefault("Union", Union) + if sys.version_info < (3, 9): + new_globals.update(type_substitutions) + + return forwardref._evaluate( + new_globals, memo.locals or new_globals, *evaluate_extra_args + ) + + raise + + +_functions_map: WeakValueDictionary[CodeType, FunctionType] = WeakValueDictionary() + + +def get_type_name(type_: Any) -> str: + name: str + for attrname in "__name__", "_name", "__forward_arg__": + candidate = getattr(type_, attrname, None) + if isinstance(candidate, str): + name = candidate + break + else: + origin = get_origin(type_) + candidate = getattr(origin, "_name", None) + if candidate is None: + candidate = type_.__class__.__name__.strip("_") + + if isinstance(candidate, str): + name = candidate + else: + return "(unknown)" + + args = get_args(type_) + if args: + if name == "Literal": + formatted_args = ", ".join(repr(arg) for arg in args) + else: + formatted_args = ", ".join(get_type_name(arg) for arg in args) + + name += f"[{formatted_args}]" + + module = getattr(type_, "__module__", None) + if module and module not in (None, "typing", "typing_extensions", "builtins"): + name = module + "." + name + + return name + + +def qualified_name(obj: Any, *, add_class_prefix: bool = False) -> str: + """ + Return the qualified name (e.g. package.module.Type) for the given object. + + Builtins and types from the :mod:`typing` package get special treatment by having + the module name stripped from the generated name. + + """ + if obj is None: + return "None" + elif inspect.isclass(obj): + prefix = "class " if add_class_prefix else "" + type_ = obj + else: + prefix = "" + type_ = type(obj) + + module = type_.__module__ + qualname = type_.__qualname__ + name = qualname if module in ("typing", "builtins") else f"{module}.{qualname}" + return prefix + name + + +def function_name(func: Callable[..., Any]) -> str: + """ + Return the qualified name of the given function. + + Builtins and types from the :mod:`typing` package get special treatment by having + the module name stripped from the generated name. + + """ + # For partial functions and objects with __call__ defined, __qualname__ does not + # exist + module = getattr(func, "__module__", "") + qualname = (module + ".") if module not in ("builtins", "") else "" + return qualname + getattr(func, "__qualname__", repr(func)) + + +def resolve_reference(reference: str) -> Any: + modulename, varname = reference.partition(":")[::2] + if not modulename or not varname: + raise ValueError(f"{reference!r} is not a module:varname reference") + + obj = import_module(modulename) + for attr in varname.split("."): + obj = getattr(obj, attr) + + return obj + + +def is_method_of(obj: object, cls: type) -> bool: + return ( + inspect.isfunction(obj) + and obj.__module__ == cls.__module__ + and obj.__qualname__.startswith(cls.__qualname__ + ".") + ) + + +def get_stacklevel() -> int: + level = 1 + frame = cast(FrameType, currentframe()).f_back + while frame and frame.f_globals.get("__name__", "").startswith("typeguard."): + level += 1 + frame = frame.f_back + + return level + + +@final +class Unset: + __slots__ = () + + def __repr__(self) -> str: + return "" + + +unset = Unset() diff --git a/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/py.typed b/falcon/lib/python3.10/site-packages/setuptools/_vendor/typeguard/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391