Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .gitattributes +1 -0
- janus/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/INSTALLER +1 -0
- janus/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/LICENSE +20 -0
- janus/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/METADATA +46 -0
- janus/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/RECORD +44 -0
- janus/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/REQUESTED +0 -0
- janus/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/WHEEL +6 -0
- janus/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/top_level.txt +2 -0
- janus/lib/python3.10/site-packages/requests/__pycache__/__init__.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/requests/__pycache__/__version__.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/requests/__pycache__/_internal_utils.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/requests/__pycache__/adapters.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/requests/__pycache__/api.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/requests/__pycache__/auth.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/requests/__pycache__/certs.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/requests/__pycache__/compat.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/requests/__pycache__/cookies.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/requests/__pycache__/exceptions.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/requests/__pycache__/help.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/requests/__pycache__/hooks.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/requests/__pycache__/packages.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/requests/__pycache__/sessions.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/requests/__pycache__/status_codes.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/requests/__version__.py +14 -0
- janus/lib/python3.10/site-packages/requests/adapters.py +719 -0
- janus/lib/python3.10/site-packages/requests/auth.py +314 -0
- janus/lib/python3.10/site-packages/requests/cookies.py +561 -0
- janus/lib/python3.10/site-packages/requests/models.py +1037 -0
- janus/lib/python3.10/site-packages/requests/status_codes.py +128 -0
- janus/lib/python3.10/site-packages/requests/utils.py +1096 -0
- janus/lib/python3.10/site-packages/safetensors/_safetensors_rust.abi3.so +3 -0
- janus/lib/python3.10/site-packages/sympy/__init__.py +542 -0
- janus/lib/python3.10/site-packages/sympy/abc.py +111 -0
- janus/lib/python3.10/site-packages/sympy/conftest.py +96 -0
- janus/lib/python3.10/site-packages/sympy/discrete/__init__.py +20 -0
- janus/lib/python3.10/site-packages/sympy/discrete/__pycache__/recurrences.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/sympy/discrete/convolutions.py +597 -0
- janus/lib/python3.10/site-packages/sympy/discrete/recurrences.py +166 -0
- janus/lib/python3.10/site-packages/sympy/discrete/tests/__init__.py +0 -0
- janus/lib/python3.10/site-packages/sympy/discrete/tests/__pycache__/__init__.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/sympy/discrete/tests/__pycache__/test_recurrences.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/sympy/discrete/tests/__pycache__/test_transforms.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/sympy/discrete/tests/test_recurrences.py +59 -0
- janus/lib/python3.10/site-packages/sympy/discrete/tests/test_transforms.py +154 -0
- janus/lib/python3.10/site-packages/sympy/discrete/transforms.py +425 -0
- janus/lib/python3.10/site-packages/sympy/galgebra.py +1 -0
- janus/lib/python3.10/site-packages/sympy/multipledispatch/__pycache__/__init__.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/sympy/multipledispatch/__pycache__/core.cpython-310.pyc +0 -0
- janus/lib/python3.10/site-packages/sympy/multipledispatch/dispatcher.py +413 -0
- janus/lib/python3.10/site-packages/sympy/multipledispatch/tests/__init__.py +0 -0
.gitattributes
CHANGED
|
@@ -443,3 +443,4 @@ janus/lib/python3.10/site-packages/transformers/generation/__pycache__/logits_pr
|
|
| 443 |
janus/lib/python3.10/site-packages/transformers/models/oneformer/__pycache__/modeling_oneformer.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 444 |
janus/lib/python3.10/site-packages/transformers/models/perceiver/__pycache__/modeling_perceiver.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 445 |
infer_4_33_0/bin/python filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 443 |
janus/lib/python3.10/site-packages/transformers/models/oneformer/__pycache__/modeling_oneformer.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 444 |
janus/lib/python3.10/site-packages/transformers/models/perceiver/__pycache__/modeling_perceiver.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 445 |
infer_4_33_0/bin/python filter=lfs diff=lfs merge=lfs -text
|
| 446 |
+
janus/lib/python3.10/site-packages/safetensors/_safetensors_rust.abi3.so filter=lfs diff=lfs merge=lfs -text
|
janus/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
janus/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright (c) 2017-2021 Ingy döt Net
|
| 2 |
+
Copyright (c) 2006-2016 Kirill Simonov
|
| 3 |
+
|
| 4 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
| 5 |
+
this software and associated documentation files (the "Software"), to deal in
|
| 6 |
+
the Software without restriction, including without limitation the rights to
|
| 7 |
+
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
| 8 |
+
of the Software, and to permit persons to whom the Software is furnished to do
|
| 9 |
+
so, subject to the following conditions:
|
| 10 |
+
|
| 11 |
+
The above copyright notice and this permission notice shall be included in all
|
| 12 |
+
copies or substantial portions of the Software.
|
| 13 |
+
|
| 14 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 15 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 16 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 17 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 18 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 19 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 20 |
+
SOFTWARE.
|
janus/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/METADATA
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: PyYAML
|
| 3 |
+
Version: 6.0.2
|
| 4 |
+
Summary: YAML parser and emitter for Python
|
| 5 |
+
Home-page: https://pyyaml.org/
|
| 6 |
+
Download-URL: https://pypi.org/project/PyYAML/
|
| 7 |
+
Author: Kirill Simonov
|
| 8 |
+
Author-email: xi@resolvent.net
|
| 9 |
+
License: MIT
|
| 10 |
+
Project-URL: Bug Tracker, https://github.com/yaml/pyyaml/issues
|
| 11 |
+
Project-URL: CI, https://github.com/yaml/pyyaml/actions
|
| 12 |
+
Project-URL: Documentation, https://pyyaml.org/wiki/PyYAMLDocumentation
|
| 13 |
+
Project-URL: Mailing lists, http://lists.sourceforge.net/lists/listinfo/yaml-core
|
| 14 |
+
Project-URL: Source Code, https://github.com/yaml/pyyaml
|
| 15 |
+
Platform: Any
|
| 16 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 17 |
+
Classifier: Intended Audience :: Developers
|
| 18 |
+
Classifier: License :: OSI Approved :: MIT License
|
| 19 |
+
Classifier: Operating System :: OS Independent
|
| 20 |
+
Classifier: Programming Language :: Cython
|
| 21 |
+
Classifier: Programming Language :: Python
|
| 22 |
+
Classifier: Programming Language :: Python :: 3
|
| 23 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 24 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 25 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 26 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 27 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 28 |
+
Classifier: Programming Language :: Python :: 3.13
|
| 29 |
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
| 30 |
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
| 31 |
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
| 32 |
+
Classifier: Topic :: Text Processing :: Markup
|
| 33 |
+
Requires-Python: >=3.8
|
| 34 |
+
License-File: LICENSE
|
| 35 |
+
|
| 36 |
+
YAML is a data serialization format designed for human readability
|
| 37 |
+
and interaction with scripting languages. PyYAML is a YAML parser
|
| 38 |
+
and emitter for Python.
|
| 39 |
+
|
| 40 |
+
PyYAML features a complete YAML 1.1 parser, Unicode support, pickle
|
| 41 |
+
support, capable extension API, and sensible error messages. PyYAML
|
| 42 |
+
supports standard YAML tags and provides Python-specific tags that
|
| 43 |
+
allow to represent an arbitrary Python object.
|
| 44 |
+
|
| 45 |
+
PyYAML is applicable for a broad range of tasks from complex
|
| 46 |
+
configuration files to object serialization and persistence.
|
janus/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/RECORD
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
PyYAML-6.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
PyYAML-6.0.2.dist-info/LICENSE,sha256=jTko-dxEkP1jVwfLiOsmvXZBAqcoKVQwfT5RZ6V36KQ,1101
|
| 3 |
+
PyYAML-6.0.2.dist-info/METADATA,sha256=9-odFB5seu4pGPcEv7E8iyxNF51_uKnaNGjLAhz2lto,2060
|
| 4 |
+
PyYAML-6.0.2.dist-info/RECORD,,
|
| 5 |
+
PyYAML-6.0.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 6 |
+
PyYAML-6.0.2.dist-info/WHEEL,sha256=baMMpUvyD0gnRdCe6fvqCg8rft4FNTdLqZQ01WfKJmc,152
|
| 7 |
+
PyYAML-6.0.2.dist-info/top_level.txt,sha256=rpj0IVMTisAjh_1vG3Ccf9v5jpCQwAz6cD1IVU5ZdhQ,11
|
| 8 |
+
_yaml/__init__.py,sha256=04Ae_5osxahpJHa3XBZUAf4wi6XX32gR8D6X6p64GEA,1402
|
| 9 |
+
_yaml/__pycache__/__init__.cpython-310.pyc,,
|
| 10 |
+
yaml/__init__.py,sha256=N35S01HMesFTe0aRRMWkPj0Pa8IEbHpE9FK7cr5Bdtw,12311
|
| 11 |
+
yaml/__pycache__/__init__.cpython-310.pyc,,
|
| 12 |
+
yaml/__pycache__/composer.cpython-310.pyc,,
|
| 13 |
+
yaml/__pycache__/constructor.cpython-310.pyc,,
|
| 14 |
+
yaml/__pycache__/cyaml.cpython-310.pyc,,
|
| 15 |
+
yaml/__pycache__/dumper.cpython-310.pyc,,
|
| 16 |
+
yaml/__pycache__/emitter.cpython-310.pyc,,
|
| 17 |
+
yaml/__pycache__/error.cpython-310.pyc,,
|
| 18 |
+
yaml/__pycache__/events.cpython-310.pyc,,
|
| 19 |
+
yaml/__pycache__/loader.cpython-310.pyc,,
|
| 20 |
+
yaml/__pycache__/nodes.cpython-310.pyc,,
|
| 21 |
+
yaml/__pycache__/parser.cpython-310.pyc,,
|
| 22 |
+
yaml/__pycache__/reader.cpython-310.pyc,,
|
| 23 |
+
yaml/__pycache__/representer.cpython-310.pyc,,
|
| 24 |
+
yaml/__pycache__/resolver.cpython-310.pyc,,
|
| 25 |
+
yaml/__pycache__/scanner.cpython-310.pyc,,
|
| 26 |
+
yaml/__pycache__/serializer.cpython-310.pyc,,
|
| 27 |
+
yaml/__pycache__/tokens.cpython-310.pyc,,
|
| 28 |
+
yaml/_yaml.cpython-310-x86_64-linux-gnu.so,sha256=20HV-cVpIFuOuVUTmQ1-PQIbyt0n8ctfXq7JCMIfbrU,2383664
|
| 29 |
+
yaml/composer.py,sha256=_Ko30Wr6eDWUeUpauUGT3Lcg9QPBnOPVlTnIMRGJ9FM,4883
|
| 30 |
+
yaml/constructor.py,sha256=kNgkfaeLUkwQYY_Q6Ff1Tz2XVw_pG1xVE9Ak7z-viLA,28639
|
| 31 |
+
yaml/cyaml.py,sha256=6ZrAG9fAYvdVe2FK_w0hmXoG7ZYsoYUwapG8CiC72H0,3851
|
| 32 |
+
yaml/dumper.py,sha256=PLctZlYwZLp7XmeUdwRuv4nYOZ2UBnDIUy8-lKfLF-o,2837
|
| 33 |
+
yaml/emitter.py,sha256=jghtaU7eFwg31bG0B7RZea_29Adi9CKmXq_QjgQpCkQ,43006
|
| 34 |
+
yaml/error.py,sha256=Ah9z-toHJUbE9j-M8YpxgSRM5CgLCcwVzJgLLRF2Fxo,2533
|
| 35 |
+
yaml/events.py,sha256=50_TksgQiE4up-lKo_V-nBy-tAIxkIPQxY5qDhKCeHw,2445
|
| 36 |
+
yaml/loader.py,sha256=UVa-zIqmkFSCIYq_PgSGm4NSJttHY2Rf_zQ4_b1fHN0,2061
|
| 37 |
+
yaml/nodes.py,sha256=gPKNj8pKCdh2d4gr3gIYINnPOaOxGhJAUiYhGRnPE84,1440
|
| 38 |
+
yaml/parser.py,sha256=ilWp5vvgoHFGzvOZDItFoGjD6D42nhlZrZyjAwa0oJo,25495
|
| 39 |
+
yaml/reader.py,sha256=0dmzirOiDG4Xo41RnuQS7K9rkY3xjHiVasfDMNTqCNw,6794
|
| 40 |
+
yaml/representer.py,sha256=IuWP-cAW9sHKEnS0gCqSa894k1Bg4cgTxaDwIcbRQ-Y,14190
|
| 41 |
+
yaml/resolver.py,sha256=9L-VYfm4mWHxUD1Vg4X7rjDRK_7VZd6b92wzq7Y2IKY,9004
|
| 42 |
+
yaml/scanner.py,sha256=YEM3iLZSaQwXcQRg2l2R4MdT0zGP2F9eHkKGKnHyWQY,51279
|
| 43 |
+
yaml/serializer.py,sha256=ChuFgmhU01hj4xgI8GaKv6vfM2Bujwa9i7d2FAHj7cA,4165
|
| 44 |
+
yaml/tokens.py,sha256=lTQIzSVw8Mg9wv459-TjiOQe6wVziqaRlqX2_89rp54,2573
|
janus/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/REQUESTED
ADDED
|
File without changes
|
janus/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: bdist_wheel (0.44.0)
|
| 3 |
+
Root-Is-Purelib: false
|
| 4 |
+
Tag: cp310-cp310-manylinux_2_17_x86_64
|
| 5 |
+
Tag: cp310-cp310-manylinux2014_x86_64
|
| 6 |
+
|
janus/lib/python3.10/site-packages/PyYAML-6.0.2.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
_yaml
|
| 2 |
+
yaml
|
janus/lib/python3.10/site-packages/requests/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (3.9 kB). View file
|
|
|
janus/lib/python3.10/site-packages/requests/__pycache__/__version__.cpython-310.pyc
ADDED
|
Binary file (517 Bytes). View file
|
|
|
janus/lib/python3.10/site-packages/requests/__pycache__/_internal_utils.cpython-310.pyc
ADDED
|
Binary file (1.59 kB). View file
|
|
|
janus/lib/python3.10/site-packages/requests/__pycache__/adapters.cpython-310.pyc
ADDED
|
Binary file (22 kB). View file
|
|
|
janus/lib/python3.10/site-packages/requests/__pycache__/api.cpython-310.pyc
ADDED
|
Binary file (6.69 kB). View file
|
|
|
janus/lib/python3.10/site-packages/requests/__pycache__/auth.cpython-310.pyc
ADDED
|
Binary file (8.09 kB). View file
|
|
|
janus/lib/python3.10/site-packages/requests/__pycache__/certs.cpython-310.pyc
ADDED
|
Binary file (594 Bytes). View file
|
|
|
janus/lib/python3.10/site-packages/requests/__pycache__/compat.cpython-310.pyc
ADDED
|
Binary file (1.75 kB). View file
|
|
|
janus/lib/python3.10/site-packages/requests/__pycache__/cookies.cpython-310.pyc
ADDED
|
Binary file (18.7 kB). View file
|
|
|
janus/lib/python3.10/site-packages/requests/__pycache__/exceptions.cpython-310.pyc
ADDED
|
Binary file (6.2 kB). View file
|
|
|
janus/lib/python3.10/site-packages/requests/__pycache__/help.cpython-310.pyc
ADDED
|
Binary file (2.82 kB). View file
|
|
|
janus/lib/python3.10/site-packages/requests/__pycache__/hooks.cpython-310.pyc
ADDED
|
Binary file (961 Bytes). View file
|
|
|
janus/lib/python3.10/site-packages/requests/__pycache__/packages.cpython-310.pyc
ADDED
|
Binary file (598 Bytes). View file
|
|
|
janus/lib/python3.10/site-packages/requests/__pycache__/sessions.cpython-310.pyc
ADDED
|
Binary file (19.7 kB). View file
|
|
|
janus/lib/python3.10/site-packages/requests/__pycache__/status_codes.cpython-310.pyc
ADDED
|
Binary file (4.71 kB). View file
|
|
|
janus/lib/python3.10/site-packages/requests/__version__.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# .-. .-. .-. . . .-. .-. .-. .-.
|
| 2 |
+
# |( |- |.| | | |- `-. | `-.
|
| 3 |
+
# ' ' `-' `-`.`-' `-' `-' ' `-'
|
| 4 |
+
|
| 5 |
+
__title__ = "requests"
|
| 6 |
+
__description__ = "Python HTTP for Humans."
|
| 7 |
+
__url__ = "https://requests.readthedocs.io"
|
| 8 |
+
__version__ = "2.32.3"
|
| 9 |
+
__build__ = 0x023203
|
| 10 |
+
__author__ = "Kenneth Reitz"
|
| 11 |
+
__author_email__ = "me@kennethreitz.org"
|
| 12 |
+
__license__ = "Apache-2.0"
|
| 13 |
+
__copyright__ = "Copyright Kenneth Reitz"
|
| 14 |
+
__cake__ = "\u2728 \U0001f370 \u2728"
|
janus/lib/python3.10/site-packages/requests/adapters.py
ADDED
|
@@ -0,0 +1,719 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.adapters
|
| 3 |
+
~~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module contains the transport adapters that Requests uses to define
|
| 6 |
+
and maintain connections.
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
import os.path
|
| 10 |
+
import socket # noqa: F401
|
| 11 |
+
import typing
|
| 12 |
+
import warnings
|
| 13 |
+
|
| 14 |
+
from urllib3.exceptions import ClosedPoolError, ConnectTimeoutError
|
| 15 |
+
from urllib3.exceptions import HTTPError as _HTTPError
|
| 16 |
+
from urllib3.exceptions import InvalidHeader as _InvalidHeader
|
| 17 |
+
from urllib3.exceptions import (
|
| 18 |
+
LocationValueError,
|
| 19 |
+
MaxRetryError,
|
| 20 |
+
NewConnectionError,
|
| 21 |
+
ProtocolError,
|
| 22 |
+
)
|
| 23 |
+
from urllib3.exceptions import ProxyError as _ProxyError
|
| 24 |
+
from urllib3.exceptions import ReadTimeoutError, ResponseError
|
| 25 |
+
from urllib3.exceptions import SSLError as _SSLError
|
| 26 |
+
from urllib3.poolmanager import PoolManager, proxy_from_url
|
| 27 |
+
from urllib3.util import Timeout as TimeoutSauce
|
| 28 |
+
from urllib3.util import parse_url
|
| 29 |
+
from urllib3.util.retry import Retry
|
| 30 |
+
from urllib3.util.ssl_ import create_urllib3_context
|
| 31 |
+
|
| 32 |
+
from .auth import _basic_auth_str
|
| 33 |
+
from .compat import basestring, urlparse
|
| 34 |
+
from .cookies import extract_cookies_to_jar
|
| 35 |
+
from .exceptions import (
|
| 36 |
+
ConnectionError,
|
| 37 |
+
ConnectTimeout,
|
| 38 |
+
InvalidHeader,
|
| 39 |
+
InvalidProxyURL,
|
| 40 |
+
InvalidSchema,
|
| 41 |
+
InvalidURL,
|
| 42 |
+
ProxyError,
|
| 43 |
+
ReadTimeout,
|
| 44 |
+
RetryError,
|
| 45 |
+
SSLError,
|
| 46 |
+
)
|
| 47 |
+
from .models import Response
|
| 48 |
+
from .structures import CaseInsensitiveDict
|
| 49 |
+
from .utils import (
|
| 50 |
+
DEFAULT_CA_BUNDLE_PATH,
|
| 51 |
+
extract_zipped_paths,
|
| 52 |
+
get_auth_from_url,
|
| 53 |
+
get_encoding_from_headers,
|
| 54 |
+
prepend_scheme_if_needed,
|
| 55 |
+
select_proxy,
|
| 56 |
+
urldefragauth,
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
try:
|
| 60 |
+
from urllib3.contrib.socks import SOCKSProxyManager
|
| 61 |
+
except ImportError:
|
| 62 |
+
|
| 63 |
+
def SOCKSProxyManager(*args, **kwargs):
|
| 64 |
+
raise InvalidSchema("Missing dependencies for SOCKS support.")
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
if typing.TYPE_CHECKING:
|
| 68 |
+
from .models import PreparedRequest
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
DEFAULT_POOLBLOCK = False
|
| 72 |
+
DEFAULT_POOLSIZE = 10
|
| 73 |
+
DEFAULT_RETRIES = 0
|
| 74 |
+
DEFAULT_POOL_TIMEOUT = None
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
try:
|
| 78 |
+
import ssl # noqa: F401
|
| 79 |
+
|
| 80 |
+
_preloaded_ssl_context = create_urllib3_context()
|
| 81 |
+
_preloaded_ssl_context.load_verify_locations(
|
| 82 |
+
extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
|
| 83 |
+
)
|
| 84 |
+
except ImportError:
|
| 85 |
+
# Bypass default SSLContext creation when Python
|
| 86 |
+
# interpreter isn't built with the ssl module.
|
| 87 |
+
_preloaded_ssl_context = None
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
def _urllib3_request_context(
|
| 91 |
+
request: "PreparedRequest",
|
| 92 |
+
verify: "bool | str | None",
|
| 93 |
+
client_cert: "typing.Tuple[str, str] | str | None",
|
| 94 |
+
poolmanager: "PoolManager",
|
| 95 |
+
) -> "(typing.Dict[str, typing.Any], typing.Dict[str, typing.Any])":
|
| 96 |
+
host_params = {}
|
| 97 |
+
pool_kwargs = {}
|
| 98 |
+
parsed_request_url = urlparse(request.url)
|
| 99 |
+
scheme = parsed_request_url.scheme.lower()
|
| 100 |
+
port = parsed_request_url.port
|
| 101 |
+
|
| 102 |
+
# Determine if we have and should use our default SSLContext
|
| 103 |
+
# to optimize performance on standard requests.
|
| 104 |
+
poolmanager_kwargs = getattr(poolmanager, "connection_pool_kw", {})
|
| 105 |
+
has_poolmanager_ssl_context = poolmanager_kwargs.get("ssl_context")
|
| 106 |
+
should_use_default_ssl_context = (
|
| 107 |
+
_preloaded_ssl_context is not None and not has_poolmanager_ssl_context
|
| 108 |
+
)
|
| 109 |
+
|
| 110 |
+
cert_reqs = "CERT_REQUIRED"
|
| 111 |
+
if verify is False:
|
| 112 |
+
cert_reqs = "CERT_NONE"
|
| 113 |
+
elif verify is True and should_use_default_ssl_context:
|
| 114 |
+
pool_kwargs["ssl_context"] = _preloaded_ssl_context
|
| 115 |
+
elif isinstance(verify, str):
|
| 116 |
+
if not os.path.isdir(verify):
|
| 117 |
+
pool_kwargs["ca_certs"] = verify
|
| 118 |
+
else:
|
| 119 |
+
pool_kwargs["ca_cert_dir"] = verify
|
| 120 |
+
pool_kwargs["cert_reqs"] = cert_reqs
|
| 121 |
+
if client_cert is not None:
|
| 122 |
+
if isinstance(client_cert, tuple) and len(client_cert) == 2:
|
| 123 |
+
pool_kwargs["cert_file"] = client_cert[0]
|
| 124 |
+
pool_kwargs["key_file"] = client_cert[1]
|
| 125 |
+
else:
|
| 126 |
+
# According to our docs, we allow users to specify just the client
|
| 127 |
+
# cert path
|
| 128 |
+
pool_kwargs["cert_file"] = client_cert
|
| 129 |
+
host_params = {
|
| 130 |
+
"scheme": scheme,
|
| 131 |
+
"host": parsed_request_url.hostname,
|
| 132 |
+
"port": port,
|
| 133 |
+
}
|
| 134 |
+
return host_params, pool_kwargs
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
class BaseAdapter:
|
| 138 |
+
"""The Base Transport Adapter"""
|
| 139 |
+
|
| 140 |
+
def __init__(self):
|
| 141 |
+
super().__init__()
|
| 142 |
+
|
| 143 |
+
def send(
|
| 144 |
+
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
|
| 145 |
+
):
|
| 146 |
+
"""Sends PreparedRequest object. Returns Response object.
|
| 147 |
+
|
| 148 |
+
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
| 149 |
+
:param stream: (optional) Whether to stream the request content.
|
| 150 |
+
:param timeout: (optional) How long to wait for the server to send
|
| 151 |
+
data before giving up, as a float, or a :ref:`(connect timeout,
|
| 152 |
+
read timeout) <timeouts>` tuple.
|
| 153 |
+
:type timeout: float or tuple
|
| 154 |
+
:param verify: (optional) Either a boolean, in which case it controls whether we verify
|
| 155 |
+
the server's TLS certificate, or a string, in which case it must be a path
|
| 156 |
+
to a CA bundle to use
|
| 157 |
+
:param cert: (optional) Any user-provided SSL certificate to be trusted.
|
| 158 |
+
:param proxies: (optional) The proxies dictionary to apply to the request.
|
| 159 |
+
"""
|
| 160 |
+
raise NotImplementedError
|
| 161 |
+
|
| 162 |
+
def close(self):
|
| 163 |
+
"""Cleans up adapter specific items."""
|
| 164 |
+
raise NotImplementedError
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
class HTTPAdapter(BaseAdapter):
|
| 168 |
+
"""The built-in HTTP Adapter for urllib3.
|
| 169 |
+
|
| 170 |
+
Provides a general-case interface for Requests sessions to contact HTTP and
|
| 171 |
+
HTTPS urls by implementing the Transport Adapter interface. This class will
|
| 172 |
+
usually be created by the :class:`Session <Session>` class under the
|
| 173 |
+
covers.
|
| 174 |
+
|
| 175 |
+
:param pool_connections: The number of urllib3 connection pools to cache.
|
| 176 |
+
:param pool_maxsize: The maximum number of connections to save in the pool.
|
| 177 |
+
:param max_retries: The maximum number of retries each connection
|
| 178 |
+
should attempt. Note, this applies only to failed DNS lookups, socket
|
| 179 |
+
connections and connection timeouts, never to requests where data has
|
| 180 |
+
made it to the server. By default, Requests does not retry failed
|
| 181 |
+
connections. If you need granular control over the conditions under
|
| 182 |
+
which we retry a request, import urllib3's ``Retry`` class and pass
|
| 183 |
+
that instead.
|
| 184 |
+
:param pool_block: Whether the connection pool should block for connections.
|
| 185 |
+
|
| 186 |
+
Usage::
|
| 187 |
+
|
| 188 |
+
>>> import requests
|
| 189 |
+
>>> s = requests.Session()
|
| 190 |
+
>>> a = requests.adapters.HTTPAdapter(max_retries=3)
|
| 191 |
+
>>> s.mount('http://', a)
|
| 192 |
+
"""
|
| 193 |
+
|
| 194 |
+
__attrs__ = [
|
| 195 |
+
"max_retries",
|
| 196 |
+
"config",
|
| 197 |
+
"_pool_connections",
|
| 198 |
+
"_pool_maxsize",
|
| 199 |
+
"_pool_block",
|
| 200 |
+
]
|
| 201 |
+
|
| 202 |
+
def __init__(
|
| 203 |
+
self,
|
| 204 |
+
pool_connections=DEFAULT_POOLSIZE,
|
| 205 |
+
pool_maxsize=DEFAULT_POOLSIZE,
|
| 206 |
+
max_retries=DEFAULT_RETRIES,
|
| 207 |
+
pool_block=DEFAULT_POOLBLOCK,
|
| 208 |
+
):
|
| 209 |
+
if max_retries == DEFAULT_RETRIES:
|
| 210 |
+
self.max_retries = Retry(0, read=False)
|
| 211 |
+
else:
|
| 212 |
+
self.max_retries = Retry.from_int(max_retries)
|
| 213 |
+
self.config = {}
|
| 214 |
+
self.proxy_manager = {}
|
| 215 |
+
|
| 216 |
+
super().__init__()
|
| 217 |
+
|
| 218 |
+
self._pool_connections = pool_connections
|
| 219 |
+
self._pool_maxsize = pool_maxsize
|
| 220 |
+
self._pool_block = pool_block
|
| 221 |
+
|
| 222 |
+
self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
|
| 223 |
+
|
| 224 |
+
def __getstate__(self):
|
| 225 |
+
return {attr: getattr(self, attr, None) for attr in self.__attrs__}
|
| 226 |
+
|
| 227 |
+
def __setstate__(self, state):
|
| 228 |
+
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
|
| 229 |
+
# self.poolmanager uses a lambda function, which isn't pickleable.
|
| 230 |
+
self.proxy_manager = {}
|
| 231 |
+
self.config = {}
|
| 232 |
+
|
| 233 |
+
for attr, value in state.items():
|
| 234 |
+
setattr(self, attr, value)
|
| 235 |
+
|
| 236 |
+
self.init_poolmanager(
|
| 237 |
+
self._pool_connections, self._pool_maxsize, block=self._pool_block
|
| 238 |
+
)
|
| 239 |
+
|
| 240 |
+
def init_poolmanager(
|
| 241 |
+
self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs
|
| 242 |
+
):
|
| 243 |
+
"""Initializes a urllib3 PoolManager.
|
| 244 |
+
|
| 245 |
+
This method should not be called from user code, and is only
|
| 246 |
+
exposed for use when subclassing the
|
| 247 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 248 |
+
|
| 249 |
+
:param connections: The number of urllib3 connection pools to cache.
|
| 250 |
+
:param maxsize: The maximum number of connections to save in the pool.
|
| 251 |
+
:param block: Block when no free connections are available.
|
| 252 |
+
:param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
|
| 253 |
+
"""
|
| 254 |
+
# save these values for pickling
|
| 255 |
+
self._pool_connections = connections
|
| 256 |
+
self._pool_maxsize = maxsize
|
| 257 |
+
self._pool_block = block
|
| 258 |
+
|
| 259 |
+
self.poolmanager = PoolManager(
|
| 260 |
+
num_pools=connections,
|
| 261 |
+
maxsize=maxsize,
|
| 262 |
+
block=block,
|
| 263 |
+
**pool_kwargs,
|
| 264 |
+
)
|
| 265 |
+
|
| 266 |
+
def proxy_manager_for(self, proxy, **proxy_kwargs):
|
| 267 |
+
"""Return urllib3 ProxyManager for the given proxy.
|
| 268 |
+
|
| 269 |
+
This method should not be called from user code, and is only
|
| 270 |
+
exposed for use when subclassing the
|
| 271 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 272 |
+
|
| 273 |
+
:param proxy: The proxy to return a urllib3 ProxyManager for.
|
| 274 |
+
:param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
|
| 275 |
+
:returns: ProxyManager
|
| 276 |
+
:rtype: urllib3.ProxyManager
|
| 277 |
+
"""
|
| 278 |
+
if proxy in self.proxy_manager:
|
| 279 |
+
manager = self.proxy_manager[proxy]
|
| 280 |
+
elif proxy.lower().startswith("socks"):
|
| 281 |
+
username, password = get_auth_from_url(proxy)
|
| 282 |
+
manager = self.proxy_manager[proxy] = SOCKSProxyManager(
|
| 283 |
+
proxy,
|
| 284 |
+
username=username,
|
| 285 |
+
password=password,
|
| 286 |
+
num_pools=self._pool_connections,
|
| 287 |
+
maxsize=self._pool_maxsize,
|
| 288 |
+
block=self._pool_block,
|
| 289 |
+
**proxy_kwargs,
|
| 290 |
+
)
|
| 291 |
+
else:
|
| 292 |
+
proxy_headers = self.proxy_headers(proxy)
|
| 293 |
+
manager = self.proxy_manager[proxy] = proxy_from_url(
|
| 294 |
+
proxy,
|
| 295 |
+
proxy_headers=proxy_headers,
|
| 296 |
+
num_pools=self._pool_connections,
|
| 297 |
+
maxsize=self._pool_maxsize,
|
| 298 |
+
block=self._pool_block,
|
| 299 |
+
**proxy_kwargs,
|
| 300 |
+
)
|
| 301 |
+
|
| 302 |
+
return manager
|
| 303 |
+
|
| 304 |
+
def cert_verify(self, conn, url, verify, cert):
|
| 305 |
+
"""Verify a SSL certificate. This method should not be called from user
|
| 306 |
+
code, and is only exposed for use when subclassing the
|
| 307 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 308 |
+
|
| 309 |
+
:param conn: The urllib3 connection object associated with the cert.
|
| 310 |
+
:param url: The requested URL.
|
| 311 |
+
:param verify: Either a boolean, in which case it controls whether we verify
|
| 312 |
+
the server's TLS certificate, or a string, in which case it must be a path
|
| 313 |
+
to a CA bundle to use
|
| 314 |
+
:param cert: The SSL certificate to verify.
|
| 315 |
+
"""
|
| 316 |
+
if url.lower().startswith("https") and verify:
|
| 317 |
+
conn.cert_reqs = "CERT_REQUIRED"
|
| 318 |
+
|
| 319 |
+
# Only load the CA certificates if 'verify' is a string indicating the CA bundle to use.
|
| 320 |
+
# Otherwise, if verify is a boolean, we don't load anything since
|
| 321 |
+
# the connection will be using a context with the default certificates already loaded,
|
| 322 |
+
# and this avoids a call to the slow load_verify_locations()
|
| 323 |
+
if verify is not True:
|
| 324 |
+
# `verify` must be a str with a path then
|
| 325 |
+
cert_loc = verify
|
| 326 |
+
|
| 327 |
+
if not os.path.exists(cert_loc):
|
| 328 |
+
raise OSError(
|
| 329 |
+
f"Could not find a suitable TLS CA certificate bundle, "
|
| 330 |
+
f"invalid path: {cert_loc}"
|
| 331 |
+
)
|
| 332 |
+
|
| 333 |
+
if not os.path.isdir(cert_loc):
|
| 334 |
+
conn.ca_certs = cert_loc
|
| 335 |
+
else:
|
| 336 |
+
conn.ca_cert_dir = cert_loc
|
| 337 |
+
else:
|
| 338 |
+
conn.cert_reqs = "CERT_NONE"
|
| 339 |
+
conn.ca_certs = None
|
| 340 |
+
conn.ca_cert_dir = None
|
| 341 |
+
|
| 342 |
+
if cert:
|
| 343 |
+
if not isinstance(cert, basestring):
|
| 344 |
+
conn.cert_file = cert[0]
|
| 345 |
+
conn.key_file = cert[1]
|
| 346 |
+
else:
|
| 347 |
+
conn.cert_file = cert
|
| 348 |
+
conn.key_file = None
|
| 349 |
+
if conn.cert_file and not os.path.exists(conn.cert_file):
|
| 350 |
+
raise OSError(
|
| 351 |
+
f"Could not find the TLS certificate file, "
|
| 352 |
+
f"invalid path: {conn.cert_file}"
|
| 353 |
+
)
|
| 354 |
+
if conn.key_file and not os.path.exists(conn.key_file):
|
| 355 |
+
raise OSError(
|
| 356 |
+
f"Could not find the TLS key file, invalid path: {conn.key_file}"
|
| 357 |
+
)
|
| 358 |
+
|
| 359 |
+
def build_response(self, req, resp):
|
| 360 |
+
"""Builds a :class:`Response <requests.Response>` object from a urllib3
|
| 361 |
+
response. This should not be called from user code, and is only exposed
|
| 362 |
+
for use when subclassing the
|
| 363 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
|
| 364 |
+
|
| 365 |
+
:param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
|
| 366 |
+
:param resp: The urllib3 response object.
|
| 367 |
+
:rtype: requests.Response
|
| 368 |
+
"""
|
| 369 |
+
response = Response()
|
| 370 |
+
|
| 371 |
+
# Fallback to None if there's no status_code, for whatever reason.
|
| 372 |
+
response.status_code = getattr(resp, "status", None)
|
| 373 |
+
|
| 374 |
+
# Make headers case-insensitive.
|
| 375 |
+
response.headers = CaseInsensitiveDict(getattr(resp, "headers", {}))
|
| 376 |
+
|
| 377 |
+
# Set encoding.
|
| 378 |
+
response.encoding = get_encoding_from_headers(response.headers)
|
| 379 |
+
response.raw = resp
|
| 380 |
+
response.reason = response.raw.reason
|
| 381 |
+
|
| 382 |
+
if isinstance(req.url, bytes):
|
| 383 |
+
response.url = req.url.decode("utf-8")
|
| 384 |
+
else:
|
| 385 |
+
response.url = req.url
|
| 386 |
+
|
| 387 |
+
# Add new cookies from the server.
|
| 388 |
+
extract_cookies_to_jar(response.cookies, req, resp)
|
| 389 |
+
|
| 390 |
+
# Give the Response some context.
|
| 391 |
+
response.request = req
|
| 392 |
+
response.connection = self
|
| 393 |
+
|
| 394 |
+
return response
|
| 395 |
+
|
| 396 |
+
def build_connection_pool_key_attributes(self, request, verify, cert=None):
|
| 397 |
+
"""Build the PoolKey attributes used by urllib3 to return a connection.
|
| 398 |
+
|
| 399 |
+
This looks at the PreparedRequest, the user-specified verify value,
|
| 400 |
+
and the value of the cert parameter to determine what PoolKey values
|
| 401 |
+
to use to select a connection from a given urllib3 Connection Pool.
|
| 402 |
+
|
| 403 |
+
The SSL related pool key arguments are not consistently set. As of
|
| 404 |
+
this writing, use the following to determine what keys may be in that
|
| 405 |
+
dictionary:
|
| 406 |
+
|
| 407 |
+
* If ``verify`` is ``True``, ``"ssl_context"`` will be set and will be the
|
| 408 |
+
default Requests SSL Context
|
| 409 |
+
* If ``verify`` is ``False``, ``"ssl_context"`` will not be set but
|
| 410 |
+
``"cert_reqs"`` will be set
|
| 411 |
+
* If ``verify`` is a string, (i.e., it is a user-specified trust bundle)
|
| 412 |
+
``"ca_certs"`` will be set if the string is not a directory recognized
|
| 413 |
+
by :py:func:`os.path.isdir`, otherwise ``"ca_certs_dir"`` will be
|
| 414 |
+
set.
|
| 415 |
+
* If ``"cert"`` is specified, ``"cert_file"`` will always be set. If
|
| 416 |
+
``"cert"`` is a tuple with a second item, ``"key_file"`` will also
|
| 417 |
+
be present
|
| 418 |
+
|
| 419 |
+
To override these settings, one may subclass this class, call this
|
| 420 |
+
method and use the above logic to change parameters as desired. For
|
| 421 |
+
example, if one wishes to use a custom :py:class:`ssl.SSLContext` one
|
| 422 |
+
must both set ``"ssl_context"`` and based on what else they require,
|
| 423 |
+
alter the other keys to ensure the desired behaviour.
|
| 424 |
+
|
| 425 |
+
:param request:
|
| 426 |
+
The PreparedReqest being sent over the connection.
|
| 427 |
+
:type request:
|
| 428 |
+
:class:`~requests.models.PreparedRequest`
|
| 429 |
+
:param verify:
|
| 430 |
+
Either a boolean, in which case it controls whether
|
| 431 |
+
we verify the server's TLS certificate, or a string, in which case it
|
| 432 |
+
must be a path to a CA bundle to use.
|
| 433 |
+
:param cert:
|
| 434 |
+
(optional) Any user-provided SSL certificate for client
|
| 435 |
+
authentication (a.k.a., mTLS). This may be a string (i.e., just
|
| 436 |
+
the path to a file which holds both certificate and key) or a
|
| 437 |
+
tuple of length 2 with the certificate file path and key file
|
| 438 |
+
path.
|
| 439 |
+
:returns:
|
| 440 |
+
A tuple of two dictionaries. The first is the "host parameters"
|
| 441 |
+
portion of the Pool Key including scheme, hostname, and port. The
|
| 442 |
+
second is a dictionary of SSLContext related parameters.
|
| 443 |
+
"""
|
| 444 |
+
return _urllib3_request_context(request, verify, cert, self.poolmanager)
|
| 445 |
+
|
| 446 |
+
def get_connection_with_tls_context(self, request, verify, proxies=None, cert=None):
|
| 447 |
+
"""Returns a urllib3 connection for the given request and TLS settings.
|
| 448 |
+
This should not be called from user code, and is only exposed for use
|
| 449 |
+
when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 450 |
+
|
| 451 |
+
:param request:
|
| 452 |
+
The :class:`PreparedRequest <PreparedRequest>` object to be sent
|
| 453 |
+
over the connection.
|
| 454 |
+
:param verify:
|
| 455 |
+
Either a boolean, in which case it controls whether we verify the
|
| 456 |
+
server's TLS certificate, or a string, in which case it must be a
|
| 457 |
+
path to a CA bundle to use.
|
| 458 |
+
:param proxies:
|
| 459 |
+
(optional) The proxies dictionary to apply to the request.
|
| 460 |
+
:param cert:
|
| 461 |
+
(optional) Any user-provided SSL certificate to be used for client
|
| 462 |
+
authentication (a.k.a., mTLS).
|
| 463 |
+
:rtype:
|
| 464 |
+
urllib3.ConnectionPool
|
| 465 |
+
"""
|
| 466 |
+
proxy = select_proxy(request.url, proxies)
|
| 467 |
+
try:
|
| 468 |
+
host_params, pool_kwargs = self.build_connection_pool_key_attributes(
|
| 469 |
+
request,
|
| 470 |
+
verify,
|
| 471 |
+
cert,
|
| 472 |
+
)
|
| 473 |
+
except ValueError as e:
|
| 474 |
+
raise InvalidURL(e, request=request)
|
| 475 |
+
if proxy:
|
| 476 |
+
proxy = prepend_scheme_if_needed(proxy, "http")
|
| 477 |
+
proxy_url = parse_url(proxy)
|
| 478 |
+
if not proxy_url.host:
|
| 479 |
+
raise InvalidProxyURL(
|
| 480 |
+
"Please check proxy URL. It is malformed "
|
| 481 |
+
"and could be missing the host."
|
| 482 |
+
)
|
| 483 |
+
proxy_manager = self.proxy_manager_for(proxy)
|
| 484 |
+
conn = proxy_manager.connection_from_host(
|
| 485 |
+
**host_params, pool_kwargs=pool_kwargs
|
| 486 |
+
)
|
| 487 |
+
else:
|
| 488 |
+
# Only scheme should be lower case
|
| 489 |
+
conn = self.poolmanager.connection_from_host(
|
| 490 |
+
**host_params, pool_kwargs=pool_kwargs
|
| 491 |
+
)
|
| 492 |
+
|
| 493 |
+
return conn
|
| 494 |
+
|
| 495 |
+
def get_connection(self, url, proxies=None):
|
| 496 |
+
"""DEPRECATED: Users should move to `get_connection_with_tls_context`
|
| 497 |
+
for all subclasses of HTTPAdapter using Requests>=2.32.2.
|
| 498 |
+
|
| 499 |
+
Returns a urllib3 connection for the given URL. This should not be
|
| 500 |
+
called from user code, and is only exposed for use when subclassing the
|
| 501 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 502 |
+
|
| 503 |
+
:param url: The URL to connect to.
|
| 504 |
+
:param proxies: (optional) A Requests-style dictionary of proxies used on this request.
|
| 505 |
+
:rtype: urllib3.ConnectionPool
|
| 506 |
+
"""
|
| 507 |
+
warnings.warn(
|
| 508 |
+
(
|
| 509 |
+
"`get_connection` has been deprecated in favor of "
|
| 510 |
+
"`get_connection_with_tls_context`. Custom HTTPAdapter subclasses "
|
| 511 |
+
"will need to migrate for Requests>=2.32.2. Please see "
|
| 512 |
+
"https://github.com/psf/requests/pull/6710 for more details."
|
| 513 |
+
),
|
| 514 |
+
DeprecationWarning,
|
| 515 |
+
)
|
| 516 |
+
proxy = select_proxy(url, proxies)
|
| 517 |
+
|
| 518 |
+
if proxy:
|
| 519 |
+
proxy = prepend_scheme_if_needed(proxy, "http")
|
| 520 |
+
proxy_url = parse_url(proxy)
|
| 521 |
+
if not proxy_url.host:
|
| 522 |
+
raise InvalidProxyURL(
|
| 523 |
+
"Please check proxy URL. It is malformed "
|
| 524 |
+
"and could be missing the host."
|
| 525 |
+
)
|
| 526 |
+
proxy_manager = self.proxy_manager_for(proxy)
|
| 527 |
+
conn = proxy_manager.connection_from_url(url)
|
| 528 |
+
else:
|
| 529 |
+
# Only scheme should be lower case
|
| 530 |
+
parsed = urlparse(url)
|
| 531 |
+
url = parsed.geturl()
|
| 532 |
+
conn = self.poolmanager.connection_from_url(url)
|
| 533 |
+
|
| 534 |
+
return conn
|
| 535 |
+
|
| 536 |
+
def close(self):
|
| 537 |
+
"""Disposes of any internal state.
|
| 538 |
+
|
| 539 |
+
Currently, this closes the PoolManager and any active ProxyManager,
|
| 540 |
+
which closes any pooled connections.
|
| 541 |
+
"""
|
| 542 |
+
self.poolmanager.clear()
|
| 543 |
+
for proxy in self.proxy_manager.values():
|
| 544 |
+
proxy.clear()
|
| 545 |
+
|
| 546 |
+
def request_url(self, request, proxies):
|
| 547 |
+
"""Obtain the url to use when making the final request.
|
| 548 |
+
|
| 549 |
+
If the message is being sent through a HTTP proxy, the full URL has to
|
| 550 |
+
be used. Otherwise, we should only use the path portion of the URL.
|
| 551 |
+
|
| 552 |
+
This should not be called from user code, and is only exposed for use
|
| 553 |
+
when subclassing the
|
| 554 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 555 |
+
|
| 556 |
+
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
| 557 |
+
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
|
| 558 |
+
:rtype: str
|
| 559 |
+
"""
|
| 560 |
+
proxy = select_proxy(request.url, proxies)
|
| 561 |
+
scheme = urlparse(request.url).scheme
|
| 562 |
+
|
| 563 |
+
is_proxied_http_request = proxy and scheme != "https"
|
| 564 |
+
using_socks_proxy = False
|
| 565 |
+
if proxy:
|
| 566 |
+
proxy_scheme = urlparse(proxy).scheme.lower()
|
| 567 |
+
using_socks_proxy = proxy_scheme.startswith("socks")
|
| 568 |
+
|
| 569 |
+
url = request.path_url
|
| 570 |
+
if url.startswith("//"): # Don't confuse urllib3
|
| 571 |
+
url = f"/{url.lstrip('/')}"
|
| 572 |
+
|
| 573 |
+
if is_proxied_http_request and not using_socks_proxy:
|
| 574 |
+
url = urldefragauth(request.url)
|
| 575 |
+
|
| 576 |
+
return url
|
| 577 |
+
|
| 578 |
+
def add_headers(self, request, **kwargs):
|
| 579 |
+
"""Add any headers needed by the connection. As of v2.0 this does
|
| 580 |
+
nothing by default, but is left for overriding by users that subclass
|
| 581 |
+
the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 582 |
+
|
| 583 |
+
This should not be called from user code, and is only exposed for use
|
| 584 |
+
when subclassing the
|
| 585 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 586 |
+
|
| 587 |
+
:param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
|
| 588 |
+
:param kwargs: The keyword arguments from the call to send().
|
| 589 |
+
"""
|
| 590 |
+
pass
|
| 591 |
+
|
| 592 |
+
def proxy_headers(self, proxy):
|
| 593 |
+
"""Returns a dictionary of the headers to add to any request sent
|
| 594 |
+
through a proxy. This works with urllib3 magic to ensure that they are
|
| 595 |
+
correctly sent to the proxy, rather than in a tunnelled request if
|
| 596 |
+
CONNECT is being used.
|
| 597 |
+
|
| 598 |
+
This should not be called from user code, and is only exposed for use
|
| 599 |
+
when subclassing the
|
| 600 |
+
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
| 601 |
+
|
| 602 |
+
:param proxy: The url of the proxy being used for this request.
|
| 603 |
+
:rtype: dict
|
| 604 |
+
"""
|
| 605 |
+
headers = {}
|
| 606 |
+
username, password = get_auth_from_url(proxy)
|
| 607 |
+
|
| 608 |
+
if username:
|
| 609 |
+
headers["Proxy-Authorization"] = _basic_auth_str(username, password)
|
| 610 |
+
|
| 611 |
+
return headers
|
| 612 |
+
|
| 613 |
+
def send(
|
| 614 |
+
self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
|
| 615 |
+
):
|
| 616 |
+
"""Sends PreparedRequest object. Returns Response object.
|
| 617 |
+
|
| 618 |
+
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
| 619 |
+
:param stream: (optional) Whether to stream the request content.
|
| 620 |
+
:param timeout: (optional) How long to wait for the server to send
|
| 621 |
+
data before giving up, as a float, or a :ref:`(connect timeout,
|
| 622 |
+
read timeout) <timeouts>` tuple.
|
| 623 |
+
:type timeout: float or tuple or urllib3 Timeout object
|
| 624 |
+
:param verify: (optional) Either a boolean, in which case it controls whether
|
| 625 |
+
we verify the server's TLS certificate, or a string, in which case it
|
| 626 |
+
must be a path to a CA bundle to use
|
| 627 |
+
:param cert: (optional) Any user-provided SSL certificate to be trusted.
|
| 628 |
+
:param proxies: (optional) The proxies dictionary to apply to the request.
|
| 629 |
+
:rtype: requests.Response
|
| 630 |
+
"""
|
| 631 |
+
|
| 632 |
+
try:
|
| 633 |
+
conn = self.get_connection_with_tls_context(
|
| 634 |
+
request, verify, proxies=proxies, cert=cert
|
| 635 |
+
)
|
| 636 |
+
except LocationValueError as e:
|
| 637 |
+
raise InvalidURL(e, request=request)
|
| 638 |
+
|
| 639 |
+
self.cert_verify(conn, request.url, verify, cert)
|
| 640 |
+
url = self.request_url(request, proxies)
|
| 641 |
+
self.add_headers(
|
| 642 |
+
request,
|
| 643 |
+
stream=stream,
|
| 644 |
+
timeout=timeout,
|
| 645 |
+
verify=verify,
|
| 646 |
+
cert=cert,
|
| 647 |
+
proxies=proxies,
|
| 648 |
+
)
|
| 649 |
+
|
| 650 |
+
chunked = not (request.body is None or "Content-Length" in request.headers)
|
| 651 |
+
|
| 652 |
+
if isinstance(timeout, tuple):
|
| 653 |
+
try:
|
| 654 |
+
connect, read = timeout
|
| 655 |
+
timeout = TimeoutSauce(connect=connect, read=read)
|
| 656 |
+
except ValueError:
|
| 657 |
+
raise ValueError(
|
| 658 |
+
f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
|
| 659 |
+
f"or a single float to set both timeouts to the same value."
|
| 660 |
+
)
|
| 661 |
+
elif isinstance(timeout, TimeoutSauce):
|
| 662 |
+
pass
|
| 663 |
+
else:
|
| 664 |
+
timeout = TimeoutSauce(connect=timeout, read=timeout)
|
| 665 |
+
|
| 666 |
+
try:
|
| 667 |
+
resp = conn.urlopen(
|
| 668 |
+
method=request.method,
|
| 669 |
+
url=url,
|
| 670 |
+
body=request.body,
|
| 671 |
+
headers=request.headers,
|
| 672 |
+
redirect=False,
|
| 673 |
+
assert_same_host=False,
|
| 674 |
+
preload_content=False,
|
| 675 |
+
decode_content=False,
|
| 676 |
+
retries=self.max_retries,
|
| 677 |
+
timeout=timeout,
|
| 678 |
+
chunked=chunked,
|
| 679 |
+
)
|
| 680 |
+
|
| 681 |
+
except (ProtocolError, OSError) as err:
|
| 682 |
+
raise ConnectionError(err, request=request)
|
| 683 |
+
|
| 684 |
+
except MaxRetryError as e:
|
| 685 |
+
if isinstance(e.reason, ConnectTimeoutError):
|
| 686 |
+
# TODO: Remove this in 3.0.0: see #2811
|
| 687 |
+
if not isinstance(e.reason, NewConnectionError):
|
| 688 |
+
raise ConnectTimeout(e, request=request)
|
| 689 |
+
|
| 690 |
+
if isinstance(e.reason, ResponseError):
|
| 691 |
+
raise RetryError(e, request=request)
|
| 692 |
+
|
| 693 |
+
if isinstance(e.reason, _ProxyError):
|
| 694 |
+
raise ProxyError(e, request=request)
|
| 695 |
+
|
| 696 |
+
if isinstance(e.reason, _SSLError):
|
| 697 |
+
# This branch is for urllib3 v1.22 and later.
|
| 698 |
+
raise SSLError(e, request=request)
|
| 699 |
+
|
| 700 |
+
raise ConnectionError(e, request=request)
|
| 701 |
+
|
| 702 |
+
except ClosedPoolError as e:
|
| 703 |
+
raise ConnectionError(e, request=request)
|
| 704 |
+
|
| 705 |
+
except _ProxyError as e:
|
| 706 |
+
raise ProxyError(e)
|
| 707 |
+
|
| 708 |
+
except (_SSLError, _HTTPError) as e:
|
| 709 |
+
if isinstance(e, _SSLError):
|
| 710 |
+
# This branch is for urllib3 versions earlier than v1.22
|
| 711 |
+
raise SSLError(e, request=request)
|
| 712 |
+
elif isinstance(e, ReadTimeoutError):
|
| 713 |
+
raise ReadTimeout(e, request=request)
|
| 714 |
+
elif isinstance(e, _InvalidHeader):
|
| 715 |
+
raise InvalidHeader(e, request=request)
|
| 716 |
+
else:
|
| 717 |
+
raise
|
| 718 |
+
|
| 719 |
+
return self.build_response(request, resp)
|
janus/lib/python3.10/site-packages/requests/auth.py
ADDED
|
@@ -0,0 +1,314 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.auth
|
| 3 |
+
~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module contains the authentication handlers for Requests.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import hashlib
|
| 9 |
+
import os
|
| 10 |
+
import re
|
| 11 |
+
import threading
|
| 12 |
+
import time
|
| 13 |
+
import warnings
|
| 14 |
+
from base64 import b64encode
|
| 15 |
+
|
| 16 |
+
from ._internal_utils import to_native_string
|
| 17 |
+
from .compat import basestring, str, urlparse
|
| 18 |
+
from .cookies import extract_cookies_to_jar
|
| 19 |
+
from .utils import parse_dict_header
|
| 20 |
+
|
| 21 |
+
CONTENT_TYPE_FORM_URLENCODED = "application/x-www-form-urlencoded"
|
| 22 |
+
CONTENT_TYPE_MULTI_PART = "multipart/form-data"
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def _basic_auth_str(username, password):
|
| 26 |
+
"""Returns a Basic Auth string."""
|
| 27 |
+
|
| 28 |
+
# "I want us to put a big-ol' comment on top of it that
|
| 29 |
+
# says that this behaviour is dumb but we need to preserve
|
| 30 |
+
# it because people are relying on it."
|
| 31 |
+
# - Lukasa
|
| 32 |
+
#
|
| 33 |
+
# These are here solely to maintain backwards compatibility
|
| 34 |
+
# for things like ints. This will be removed in 3.0.0.
|
| 35 |
+
if not isinstance(username, basestring):
|
| 36 |
+
warnings.warn(
|
| 37 |
+
"Non-string usernames will no longer be supported in Requests "
|
| 38 |
+
"3.0.0. Please convert the object you've passed in ({!r}) to "
|
| 39 |
+
"a string or bytes object in the near future to avoid "
|
| 40 |
+
"problems.".format(username),
|
| 41 |
+
category=DeprecationWarning,
|
| 42 |
+
)
|
| 43 |
+
username = str(username)
|
| 44 |
+
|
| 45 |
+
if not isinstance(password, basestring):
|
| 46 |
+
warnings.warn(
|
| 47 |
+
"Non-string passwords will no longer be supported in Requests "
|
| 48 |
+
"3.0.0. Please convert the object you've passed in ({!r}) to "
|
| 49 |
+
"a string or bytes object in the near future to avoid "
|
| 50 |
+
"problems.".format(type(password)),
|
| 51 |
+
category=DeprecationWarning,
|
| 52 |
+
)
|
| 53 |
+
password = str(password)
|
| 54 |
+
# -- End Removal --
|
| 55 |
+
|
| 56 |
+
if isinstance(username, str):
|
| 57 |
+
username = username.encode("latin1")
|
| 58 |
+
|
| 59 |
+
if isinstance(password, str):
|
| 60 |
+
password = password.encode("latin1")
|
| 61 |
+
|
| 62 |
+
authstr = "Basic " + to_native_string(
|
| 63 |
+
b64encode(b":".join((username, password))).strip()
|
| 64 |
+
)
|
| 65 |
+
|
| 66 |
+
return authstr
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class AuthBase:
|
| 70 |
+
"""Base class that all auth implementations derive from"""
|
| 71 |
+
|
| 72 |
+
def __call__(self, r):
|
| 73 |
+
raise NotImplementedError("Auth hooks must be callable.")
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class HTTPBasicAuth(AuthBase):
|
| 77 |
+
"""Attaches HTTP Basic Authentication to the given Request object."""
|
| 78 |
+
|
| 79 |
+
def __init__(self, username, password):
|
| 80 |
+
self.username = username
|
| 81 |
+
self.password = password
|
| 82 |
+
|
| 83 |
+
def __eq__(self, other):
|
| 84 |
+
return all(
|
| 85 |
+
[
|
| 86 |
+
self.username == getattr(other, "username", None),
|
| 87 |
+
self.password == getattr(other, "password", None),
|
| 88 |
+
]
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
def __ne__(self, other):
|
| 92 |
+
return not self == other
|
| 93 |
+
|
| 94 |
+
def __call__(self, r):
|
| 95 |
+
r.headers["Authorization"] = _basic_auth_str(self.username, self.password)
|
| 96 |
+
return r
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
class HTTPProxyAuth(HTTPBasicAuth):
|
| 100 |
+
"""Attaches HTTP Proxy Authentication to a given Request object."""
|
| 101 |
+
|
| 102 |
+
def __call__(self, r):
|
| 103 |
+
r.headers["Proxy-Authorization"] = _basic_auth_str(self.username, self.password)
|
| 104 |
+
return r
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
class HTTPDigestAuth(AuthBase):
|
| 108 |
+
"""Attaches HTTP Digest Authentication to the given Request object."""
|
| 109 |
+
|
| 110 |
+
def __init__(self, username, password):
|
| 111 |
+
self.username = username
|
| 112 |
+
self.password = password
|
| 113 |
+
# Keep state in per-thread local storage
|
| 114 |
+
self._thread_local = threading.local()
|
| 115 |
+
|
| 116 |
+
def init_per_thread_state(self):
|
| 117 |
+
# Ensure state is initialized just once per-thread
|
| 118 |
+
if not hasattr(self._thread_local, "init"):
|
| 119 |
+
self._thread_local.init = True
|
| 120 |
+
self._thread_local.last_nonce = ""
|
| 121 |
+
self._thread_local.nonce_count = 0
|
| 122 |
+
self._thread_local.chal = {}
|
| 123 |
+
self._thread_local.pos = None
|
| 124 |
+
self._thread_local.num_401_calls = None
|
| 125 |
+
|
| 126 |
+
def build_digest_header(self, method, url):
|
| 127 |
+
"""
|
| 128 |
+
:rtype: str
|
| 129 |
+
"""
|
| 130 |
+
|
| 131 |
+
realm = self._thread_local.chal["realm"]
|
| 132 |
+
nonce = self._thread_local.chal["nonce"]
|
| 133 |
+
qop = self._thread_local.chal.get("qop")
|
| 134 |
+
algorithm = self._thread_local.chal.get("algorithm")
|
| 135 |
+
opaque = self._thread_local.chal.get("opaque")
|
| 136 |
+
hash_utf8 = None
|
| 137 |
+
|
| 138 |
+
if algorithm is None:
|
| 139 |
+
_algorithm = "MD5"
|
| 140 |
+
else:
|
| 141 |
+
_algorithm = algorithm.upper()
|
| 142 |
+
# lambdas assume digest modules are imported at the top level
|
| 143 |
+
if _algorithm == "MD5" or _algorithm == "MD5-SESS":
|
| 144 |
+
|
| 145 |
+
def md5_utf8(x):
|
| 146 |
+
if isinstance(x, str):
|
| 147 |
+
x = x.encode("utf-8")
|
| 148 |
+
return hashlib.md5(x).hexdigest()
|
| 149 |
+
|
| 150 |
+
hash_utf8 = md5_utf8
|
| 151 |
+
elif _algorithm == "SHA":
|
| 152 |
+
|
| 153 |
+
def sha_utf8(x):
|
| 154 |
+
if isinstance(x, str):
|
| 155 |
+
x = x.encode("utf-8")
|
| 156 |
+
return hashlib.sha1(x).hexdigest()
|
| 157 |
+
|
| 158 |
+
hash_utf8 = sha_utf8
|
| 159 |
+
elif _algorithm == "SHA-256":
|
| 160 |
+
|
| 161 |
+
def sha256_utf8(x):
|
| 162 |
+
if isinstance(x, str):
|
| 163 |
+
x = x.encode("utf-8")
|
| 164 |
+
return hashlib.sha256(x).hexdigest()
|
| 165 |
+
|
| 166 |
+
hash_utf8 = sha256_utf8
|
| 167 |
+
elif _algorithm == "SHA-512":
|
| 168 |
+
|
| 169 |
+
def sha512_utf8(x):
|
| 170 |
+
if isinstance(x, str):
|
| 171 |
+
x = x.encode("utf-8")
|
| 172 |
+
return hashlib.sha512(x).hexdigest()
|
| 173 |
+
|
| 174 |
+
hash_utf8 = sha512_utf8
|
| 175 |
+
|
| 176 |
+
KD = lambda s, d: hash_utf8(f"{s}:{d}") # noqa:E731
|
| 177 |
+
|
| 178 |
+
if hash_utf8 is None:
|
| 179 |
+
return None
|
| 180 |
+
|
| 181 |
+
# XXX not implemented yet
|
| 182 |
+
entdig = None
|
| 183 |
+
p_parsed = urlparse(url)
|
| 184 |
+
#: path is request-uri defined in RFC 2616 which should not be empty
|
| 185 |
+
path = p_parsed.path or "/"
|
| 186 |
+
if p_parsed.query:
|
| 187 |
+
path += f"?{p_parsed.query}"
|
| 188 |
+
|
| 189 |
+
A1 = f"{self.username}:{realm}:{self.password}"
|
| 190 |
+
A2 = f"{method}:{path}"
|
| 191 |
+
|
| 192 |
+
HA1 = hash_utf8(A1)
|
| 193 |
+
HA2 = hash_utf8(A2)
|
| 194 |
+
|
| 195 |
+
if nonce == self._thread_local.last_nonce:
|
| 196 |
+
self._thread_local.nonce_count += 1
|
| 197 |
+
else:
|
| 198 |
+
self._thread_local.nonce_count = 1
|
| 199 |
+
ncvalue = f"{self._thread_local.nonce_count:08x}"
|
| 200 |
+
s = str(self._thread_local.nonce_count).encode("utf-8")
|
| 201 |
+
s += nonce.encode("utf-8")
|
| 202 |
+
s += time.ctime().encode("utf-8")
|
| 203 |
+
s += os.urandom(8)
|
| 204 |
+
|
| 205 |
+
cnonce = hashlib.sha1(s).hexdigest()[:16]
|
| 206 |
+
if _algorithm == "MD5-SESS":
|
| 207 |
+
HA1 = hash_utf8(f"{HA1}:{nonce}:{cnonce}")
|
| 208 |
+
|
| 209 |
+
if not qop:
|
| 210 |
+
respdig = KD(HA1, f"{nonce}:{HA2}")
|
| 211 |
+
elif qop == "auth" or "auth" in qop.split(","):
|
| 212 |
+
noncebit = f"{nonce}:{ncvalue}:{cnonce}:auth:{HA2}"
|
| 213 |
+
respdig = KD(HA1, noncebit)
|
| 214 |
+
else:
|
| 215 |
+
# XXX handle auth-int.
|
| 216 |
+
return None
|
| 217 |
+
|
| 218 |
+
self._thread_local.last_nonce = nonce
|
| 219 |
+
|
| 220 |
+
# XXX should the partial digests be encoded too?
|
| 221 |
+
base = (
|
| 222 |
+
f'username="{self.username}", realm="{realm}", nonce="{nonce}", '
|
| 223 |
+
f'uri="{path}", response="{respdig}"'
|
| 224 |
+
)
|
| 225 |
+
if opaque:
|
| 226 |
+
base += f', opaque="{opaque}"'
|
| 227 |
+
if algorithm:
|
| 228 |
+
base += f', algorithm="{algorithm}"'
|
| 229 |
+
if entdig:
|
| 230 |
+
base += f', digest="{entdig}"'
|
| 231 |
+
if qop:
|
| 232 |
+
base += f', qop="auth", nc={ncvalue}, cnonce="{cnonce}"'
|
| 233 |
+
|
| 234 |
+
return f"Digest {base}"
|
| 235 |
+
|
| 236 |
+
def handle_redirect(self, r, **kwargs):
|
| 237 |
+
"""Reset num_401_calls counter on redirects."""
|
| 238 |
+
if r.is_redirect:
|
| 239 |
+
self._thread_local.num_401_calls = 1
|
| 240 |
+
|
| 241 |
+
def handle_401(self, r, **kwargs):
|
| 242 |
+
"""
|
| 243 |
+
Takes the given response and tries digest-auth, if needed.
|
| 244 |
+
|
| 245 |
+
:rtype: requests.Response
|
| 246 |
+
"""
|
| 247 |
+
|
| 248 |
+
# If response is not 4xx, do not auth
|
| 249 |
+
# See https://github.com/psf/requests/issues/3772
|
| 250 |
+
if not 400 <= r.status_code < 500:
|
| 251 |
+
self._thread_local.num_401_calls = 1
|
| 252 |
+
return r
|
| 253 |
+
|
| 254 |
+
if self._thread_local.pos is not None:
|
| 255 |
+
# Rewind the file position indicator of the body to where
|
| 256 |
+
# it was to resend the request.
|
| 257 |
+
r.request.body.seek(self._thread_local.pos)
|
| 258 |
+
s_auth = r.headers.get("www-authenticate", "")
|
| 259 |
+
|
| 260 |
+
if "digest" in s_auth.lower() and self._thread_local.num_401_calls < 2:
|
| 261 |
+
self._thread_local.num_401_calls += 1
|
| 262 |
+
pat = re.compile(r"digest ", flags=re.IGNORECASE)
|
| 263 |
+
self._thread_local.chal = parse_dict_header(pat.sub("", s_auth, count=1))
|
| 264 |
+
|
| 265 |
+
# Consume content and release the original connection
|
| 266 |
+
# to allow our new request to reuse the same one.
|
| 267 |
+
r.content
|
| 268 |
+
r.close()
|
| 269 |
+
prep = r.request.copy()
|
| 270 |
+
extract_cookies_to_jar(prep._cookies, r.request, r.raw)
|
| 271 |
+
prep.prepare_cookies(prep._cookies)
|
| 272 |
+
|
| 273 |
+
prep.headers["Authorization"] = self.build_digest_header(
|
| 274 |
+
prep.method, prep.url
|
| 275 |
+
)
|
| 276 |
+
_r = r.connection.send(prep, **kwargs)
|
| 277 |
+
_r.history.append(r)
|
| 278 |
+
_r.request = prep
|
| 279 |
+
|
| 280 |
+
return _r
|
| 281 |
+
|
| 282 |
+
self._thread_local.num_401_calls = 1
|
| 283 |
+
return r
|
| 284 |
+
|
| 285 |
+
def __call__(self, r):
|
| 286 |
+
# Initialize per-thread state, if needed
|
| 287 |
+
self.init_per_thread_state()
|
| 288 |
+
# If we have a saved nonce, skip the 401
|
| 289 |
+
if self._thread_local.last_nonce:
|
| 290 |
+
r.headers["Authorization"] = self.build_digest_header(r.method, r.url)
|
| 291 |
+
try:
|
| 292 |
+
self._thread_local.pos = r.body.tell()
|
| 293 |
+
except AttributeError:
|
| 294 |
+
# In the case of HTTPDigestAuth being reused and the body of
|
| 295 |
+
# the previous request was a file-like object, pos has the
|
| 296 |
+
# file position of the previous body. Ensure it's set to
|
| 297 |
+
# None.
|
| 298 |
+
self._thread_local.pos = None
|
| 299 |
+
r.register_hook("response", self.handle_401)
|
| 300 |
+
r.register_hook("response", self.handle_redirect)
|
| 301 |
+
self._thread_local.num_401_calls = 1
|
| 302 |
+
|
| 303 |
+
return r
|
| 304 |
+
|
| 305 |
+
def __eq__(self, other):
|
| 306 |
+
return all(
|
| 307 |
+
[
|
| 308 |
+
self.username == getattr(other, "username", None),
|
| 309 |
+
self.password == getattr(other, "password", None),
|
| 310 |
+
]
|
| 311 |
+
)
|
| 312 |
+
|
| 313 |
+
def __ne__(self, other):
|
| 314 |
+
return not self == other
|
janus/lib/python3.10/site-packages/requests/cookies.py
ADDED
|
@@ -0,0 +1,561 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.cookies
|
| 3 |
+
~~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
Compatibility code to be able to use `http.cookiejar.CookieJar` with requests.
|
| 6 |
+
|
| 7 |
+
requests.utils imports from here, so be careful with imports.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import calendar
|
| 11 |
+
import copy
|
| 12 |
+
import time
|
| 13 |
+
|
| 14 |
+
from ._internal_utils import to_native_string
|
| 15 |
+
from .compat import Morsel, MutableMapping, cookielib, urlparse, urlunparse
|
| 16 |
+
|
| 17 |
+
try:
|
| 18 |
+
import threading
|
| 19 |
+
except ImportError:
|
| 20 |
+
import dummy_threading as threading
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class MockRequest:
|
| 24 |
+
"""Wraps a `requests.Request` to mimic a `urllib2.Request`.
|
| 25 |
+
|
| 26 |
+
The code in `http.cookiejar.CookieJar` expects this interface in order to correctly
|
| 27 |
+
manage cookie policies, i.e., determine whether a cookie can be set, given the
|
| 28 |
+
domains of the request and the cookie.
|
| 29 |
+
|
| 30 |
+
The original request object is read-only. The client is responsible for collecting
|
| 31 |
+
the new headers via `get_new_headers()` and interpreting them appropriately. You
|
| 32 |
+
probably want `get_cookie_header`, defined below.
|
| 33 |
+
"""
|
| 34 |
+
|
| 35 |
+
def __init__(self, request):
|
| 36 |
+
self._r = request
|
| 37 |
+
self._new_headers = {}
|
| 38 |
+
self.type = urlparse(self._r.url).scheme
|
| 39 |
+
|
| 40 |
+
def get_type(self):
|
| 41 |
+
return self.type
|
| 42 |
+
|
| 43 |
+
def get_host(self):
|
| 44 |
+
return urlparse(self._r.url).netloc
|
| 45 |
+
|
| 46 |
+
def get_origin_req_host(self):
|
| 47 |
+
return self.get_host()
|
| 48 |
+
|
| 49 |
+
def get_full_url(self):
|
| 50 |
+
# Only return the response's URL if the user hadn't set the Host
|
| 51 |
+
# header
|
| 52 |
+
if not self._r.headers.get("Host"):
|
| 53 |
+
return self._r.url
|
| 54 |
+
# If they did set it, retrieve it and reconstruct the expected domain
|
| 55 |
+
host = to_native_string(self._r.headers["Host"], encoding="utf-8")
|
| 56 |
+
parsed = urlparse(self._r.url)
|
| 57 |
+
# Reconstruct the URL as we expect it
|
| 58 |
+
return urlunparse(
|
| 59 |
+
[
|
| 60 |
+
parsed.scheme,
|
| 61 |
+
host,
|
| 62 |
+
parsed.path,
|
| 63 |
+
parsed.params,
|
| 64 |
+
parsed.query,
|
| 65 |
+
parsed.fragment,
|
| 66 |
+
]
|
| 67 |
+
)
|
| 68 |
+
|
| 69 |
+
def is_unverifiable(self):
|
| 70 |
+
return True
|
| 71 |
+
|
| 72 |
+
def has_header(self, name):
|
| 73 |
+
return name in self._r.headers or name in self._new_headers
|
| 74 |
+
|
| 75 |
+
def get_header(self, name, default=None):
|
| 76 |
+
return self._r.headers.get(name, self._new_headers.get(name, default))
|
| 77 |
+
|
| 78 |
+
def add_header(self, key, val):
|
| 79 |
+
"""cookiejar has no legitimate use for this method; add it back if you find one."""
|
| 80 |
+
raise NotImplementedError(
|
| 81 |
+
"Cookie headers should be added with add_unredirected_header()"
|
| 82 |
+
)
|
| 83 |
+
|
| 84 |
+
def add_unredirected_header(self, name, value):
|
| 85 |
+
self._new_headers[name] = value
|
| 86 |
+
|
| 87 |
+
def get_new_headers(self):
|
| 88 |
+
return self._new_headers
|
| 89 |
+
|
| 90 |
+
@property
|
| 91 |
+
def unverifiable(self):
|
| 92 |
+
return self.is_unverifiable()
|
| 93 |
+
|
| 94 |
+
@property
|
| 95 |
+
def origin_req_host(self):
|
| 96 |
+
return self.get_origin_req_host()
|
| 97 |
+
|
| 98 |
+
@property
|
| 99 |
+
def host(self):
|
| 100 |
+
return self.get_host()
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
class MockResponse:
|
| 104 |
+
"""Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
|
| 105 |
+
|
| 106 |
+
...what? Basically, expose the parsed HTTP headers from the server response
|
| 107 |
+
the way `http.cookiejar` expects to see them.
|
| 108 |
+
"""
|
| 109 |
+
|
| 110 |
+
def __init__(self, headers):
|
| 111 |
+
"""Make a MockResponse for `cookiejar` to read.
|
| 112 |
+
|
| 113 |
+
:param headers: a httplib.HTTPMessage or analogous carrying the headers
|
| 114 |
+
"""
|
| 115 |
+
self._headers = headers
|
| 116 |
+
|
| 117 |
+
def info(self):
|
| 118 |
+
return self._headers
|
| 119 |
+
|
| 120 |
+
def getheaders(self, name):
|
| 121 |
+
self._headers.getheaders(name)
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
def extract_cookies_to_jar(jar, request, response):
|
| 125 |
+
"""Extract the cookies from the response into a CookieJar.
|
| 126 |
+
|
| 127 |
+
:param jar: http.cookiejar.CookieJar (not necessarily a RequestsCookieJar)
|
| 128 |
+
:param request: our own requests.Request object
|
| 129 |
+
:param response: urllib3.HTTPResponse object
|
| 130 |
+
"""
|
| 131 |
+
if not (hasattr(response, "_original_response") and response._original_response):
|
| 132 |
+
return
|
| 133 |
+
# the _original_response field is the wrapped httplib.HTTPResponse object,
|
| 134 |
+
req = MockRequest(request)
|
| 135 |
+
# pull out the HTTPMessage with the headers and put it in the mock:
|
| 136 |
+
res = MockResponse(response._original_response.msg)
|
| 137 |
+
jar.extract_cookies(res, req)
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
def get_cookie_header(jar, request):
|
| 141 |
+
"""
|
| 142 |
+
Produce an appropriate Cookie header string to be sent with `request`, or None.
|
| 143 |
+
|
| 144 |
+
:rtype: str
|
| 145 |
+
"""
|
| 146 |
+
r = MockRequest(request)
|
| 147 |
+
jar.add_cookie_header(r)
|
| 148 |
+
return r.get_new_headers().get("Cookie")
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
|
| 152 |
+
"""Unsets a cookie by name, by default over all domains and paths.
|
| 153 |
+
|
| 154 |
+
Wraps CookieJar.clear(), is O(n).
|
| 155 |
+
"""
|
| 156 |
+
clearables = []
|
| 157 |
+
for cookie in cookiejar:
|
| 158 |
+
if cookie.name != name:
|
| 159 |
+
continue
|
| 160 |
+
if domain is not None and domain != cookie.domain:
|
| 161 |
+
continue
|
| 162 |
+
if path is not None and path != cookie.path:
|
| 163 |
+
continue
|
| 164 |
+
clearables.append((cookie.domain, cookie.path, cookie.name))
|
| 165 |
+
|
| 166 |
+
for domain, path, name in clearables:
|
| 167 |
+
cookiejar.clear(domain, path, name)
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
class CookieConflictError(RuntimeError):
|
| 171 |
+
"""There are two cookies that meet the criteria specified in the cookie jar.
|
| 172 |
+
Use .get and .set and include domain and path args in order to be more specific.
|
| 173 |
+
"""
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
|
| 177 |
+
"""Compatibility class; is a http.cookiejar.CookieJar, but exposes a dict
|
| 178 |
+
interface.
|
| 179 |
+
|
| 180 |
+
This is the CookieJar we create by default for requests and sessions that
|
| 181 |
+
don't specify one, since some clients may expect response.cookies and
|
| 182 |
+
session.cookies to support dict operations.
|
| 183 |
+
|
| 184 |
+
Requests does not use the dict interface internally; it's just for
|
| 185 |
+
compatibility with external client code. All requests code should work
|
| 186 |
+
out of the box with externally provided instances of ``CookieJar``, e.g.
|
| 187 |
+
``LWPCookieJar`` and ``FileCookieJar``.
|
| 188 |
+
|
| 189 |
+
Unlike a regular CookieJar, this class is pickleable.
|
| 190 |
+
|
| 191 |
+
.. warning:: dictionary operations that are normally O(1) may be O(n).
|
| 192 |
+
"""
|
| 193 |
+
|
| 194 |
+
def get(self, name, default=None, domain=None, path=None):
|
| 195 |
+
"""Dict-like get() that also supports optional domain and path args in
|
| 196 |
+
order to resolve naming collisions from using one cookie jar over
|
| 197 |
+
multiple domains.
|
| 198 |
+
|
| 199 |
+
.. warning:: operation is O(n), not O(1).
|
| 200 |
+
"""
|
| 201 |
+
try:
|
| 202 |
+
return self._find_no_duplicates(name, domain, path)
|
| 203 |
+
except KeyError:
|
| 204 |
+
return default
|
| 205 |
+
|
| 206 |
+
def set(self, name, value, **kwargs):
|
| 207 |
+
"""Dict-like set() that also supports optional domain and path args in
|
| 208 |
+
order to resolve naming collisions from using one cookie jar over
|
| 209 |
+
multiple domains.
|
| 210 |
+
"""
|
| 211 |
+
# support client code that unsets cookies by assignment of a None value:
|
| 212 |
+
if value is None:
|
| 213 |
+
remove_cookie_by_name(
|
| 214 |
+
self, name, domain=kwargs.get("domain"), path=kwargs.get("path")
|
| 215 |
+
)
|
| 216 |
+
return
|
| 217 |
+
|
| 218 |
+
if isinstance(value, Morsel):
|
| 219 |
+
c = morsel_to_cookie(value)
|
| 220 |
+
else:
|
| 221 |
+
c = create_cookie(name, value, **kwargs)
|
| 222 |
+
self.set_cookie(c)
|
| 223 |
+
return c
|
| 224 |
+
|
| 225 |
+
def iterkeys(self):
|
| 226 |
+
"""Dict-like iterkeys() that returns an iterator of names of cookies
|
| 227 |
+
from the jar.
|
| 228 |
+
|
| 229 |
+
.. seealso:: itervalues() and iteritems().
|
| 230 |
+
"""
|
| 231 |
+
for cookie in iter(self):
|
| 232 |
+
yield cookie.name
|
| 233 |
+
|
| 234 |
+
def keys(self):
|
| 235 |
+
"""Dict-like keys() that returns a list of names of cookies from the
|
| 236 |
+
jar.
|
| 237 |
+
|
| 238 |
+
.. seealso:: values() and items().
|
| 239 |
+
"""
|
| 240 |
+
return list(self.iterkeys())
|
| 241 |
+
|
| 242 |
+
def itervalues(self):
|
| 243 |
+
"""Dict-like itervalues() that returns an iterator of values of cookies
|
| 244 |
+
from the jar.
|
| 245 |
+
|
| 246 |
+
.. seealso:: iterkeys() and iteritems().
|
| 247 |
+
"""
|
| 248 |
+
for cookie in iter(self):
|
| 249 |
+
yield cookie.value
|
| 250 |
+
|
| 251 |
+
def values(self):
|
| 252 |
+
"""Dict-like values() that returns a list of values of cookies from the
|
| 253 |
+
jar.
|
| 254 |
+
|
| 255 |
+
.. seealso:: keys() and items().
|
| 256 |
+
"""
|
| 257 |
+
return list(self.itervalues())
|
| 258 |
+
|
| 259 |
+
def iteritems(self):
|
| 260 |
+
"""Dict-like iteritems() that returns an iterator of name-value tuples
|
| 261 |
+
from the jar.
|
| 262 |
+
|
| 263 |
+
.. seealso:: iterkeys() and itervalues().
|
| 264 |
+
"""
|
| 265 |
+
for cookie in iter(self):
|
| 266 |
+
yield cookie.name, cookie.value
|
| 267 |
+
|
| 268 |
+
def items(self):
|
| 269 |
+
"""Dict-like items() that returns a list of name-value tuples from the
|
| 270 |
+
jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a
|
| 271 |
+
vanilla python dict of key value pairs.
|
| 272 |
+
|
| 273 |
+
.. seealso:: keys() and values().
|
| 274 |
+
"""
|
| 275 |
+
return list(self.iteritems())
|
| 276 |
+
|
| 277 |
+
def list_domains(self):
|
| 278 |
+
"""Utility method to list all the domains in the jar."""
|
| 279 |
+
domains = []
|
| 280 |
+
for cookie in iter(self):
|
| 281 |
+
if cookie.domain not in domains:
|
| 282 |
+
domains.append(cookie.domain)
|
| 283 |
+
return domains
|
| 284 |
+
|
| 285 |
+
def list_paths(self):
|
| 286 |
+
"""Utility method to list all the paths in the jar."""
|
| 287 |
+
paths = []
|
| 288 |
+
for cookie in iter(self):
|
| 289 |
+
if cookie.path not in paths:
|
| 290 |
+
paths.append(cookie.path)
|
| 291 |
+
return paths
|
| 292 |
+
|
| 293 |
+
def multiple_domains(self):
|
| 294 |
+
"""Returns True if there are multiple domains in the jar.
|
| 295 |
+
Returns False otherwise.
|
| 296 |
+
|
| 297 |
+
:rtype: bool
|
| 298 |
+
"""
|
| 299 |
+
domains = []
|
| 300 |
+
for cookie in iter(self):
|
| 301 |
+
if cookie.domain is not None and cookie.domain in domains:
|
| 302 |
+
return True
|
| 303 |
+
domains.append(cookie.domain)
|
| 304 |
+
return False # there is only one domain in jar
|
| 305 |
+
|
| 306 |
+
def get_dict(self, domain=None, path=None):
|
| 307 |
+
"""Takes as an argument an optional domain and path and returns a plain
|
| 308 |
+
old Python dict of name-value pairs of cookies that meet the
|
| 309 |
+
requirements.
|
| 310 |
+
|
| 311 |
+
:rtype: dict
|
| 312 |
+
"""
|
| 313 |
+
dictionary = {}
|
| 314 |
+
for cookie in iter(self):
|
| 315 |
+
if (domain is None or cookie.domain == domain) and (
|
| 316 |
+
path is None or cookie.path == path
|
| 317 |
+
):
|
| 318 |
+
dictionary[cookie.name] = cookie.value
|
| 319 |
+
return dictionary
|
| 320 |
+
|
| 321 |
+
def __contains__(self, name):
|
| 322 |
+
try:
|
| 323 |
+
return super().__contains__(name)
|
| 324 |
+
except CookieConflictError:
|
| 325 |
+
return True
|
| 326 |
+
|
| 327 |
+
def __getitem__(self, name):
|
| 328 |
+
"""Dict-like __getitem__() for compatibility with client code. Throws
|
| 329 |
+
exception if there are more than one cookie with name. In that case,
|
| 330 |
+
use the more explicit get() method instead.
|
| 331 |
+
|
| 332 |
+
.. warning:: operation is O(n), not O(1).
|
| 333 |
+
"""
|
| 334 |
+
return self._find_no_duplicates(name)
|
| 335 |
+
|
| 336 |
+
def __setitem__(self, name, value):
|
| 337 |
+
"""Dict-like __setitem__ for compatibility with client code. Throws
|
| 338 |
+
exception if there is already a cookie of that name in the jar. In that
|
| 339 |
+
case, use the more explicit set() method instead.
|
| 340 |
+
"""
|
| 341 |
+
self.set(name, value)
|
| 342 |
+
|
| 343 |
+
def __delitem__(self, name):
|
| 344 |
+
"""Deletes a cookie given a name. Wraps ``http.cookiejar.CookieJar``'s
|
| 345 |
+
``remove_cookie_by_name()``.
|
| 346 |
+
"""
|
| 347 |
+
remove_cookie_by_name(self, name)
|
| 348 |
+
|
| 349 |
+
def set_cookie(self, cookie, *args, **kwargs):
|
| 350 |
+
if (
|
| 351 |
+
hasattr(cookie.value, "startswith")
|
| 352 |
+
and cookie.value.startswith('"')
|
| 353 |
+
and cookie.value.endswith('"')
|
| 354 |
+
):
|
| 355 |
+
cookie.value = cookie.value.replace('\\"', "")
|
| 356 |
+
return super().set_cookie(cookie, *args, **kwargs)
|
| 357 |
+
|
| 358 |
+
def update(self, other):
|
| 359 |
+
"""Updates this jar with cookies from another CookieJar or dict-like"""
|
| 360 |
+
if isinstance(other, cookielib.CookieJar):
|
| 361 |
+
for cookie in other:
|
| 362 |
+
self.set_cookie(copy.copy(cookie))
|
| 363 |
+
else:
|
| 364 |
+
super().update(other)
|
| 365 |
+
|
| 366 |
+
def _find(self, name, domain=None, path=None):
|
| 367 |
+
"""Requests uses this method internally to get cookie values.
|
| 368 |
+
|
| 369 |
+
If there are conflicting cookies, _find arbitrarily chooses one.
|
| 370 |
+
See _find_no_duplicates if you want an exception thrown if there are
|
| 371 |
+
conflicting cookies.
|
| 372 |
+
|
| 373 |
+
:param name: a string containing name of cookie
|
| 374 |
+
:param domain: (optional) string containing domain of cookie
|
| 375 |
+
:param path: (optional) string containing path of cookie
|
| 376 |
+
:return: cookie.value
|
| 377 |
+
"""
|
| 378 |
+
for cookie in iter(self):
|
| 379 |
+
if cookie.name == name:
|
| 380 |
+
if domain is None or cookie.domain == domain:
|
| 381 |
+
if path is None or cookie.path == path:
|
| 382 |
+
return cookie.value
|
| 383 |
+
|
| 384 |
+
raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}")
|
| 385 |
+
|
| 386 |
+
def _find_no_duplicates(self, name, domain=None, path=None):
|
| 387 |
+
"""Both ``__get_item__`` and ``get`` call this function: it's never
|
| 388 |
+
used elsewhere in Requests.
|
| 389 |
+
|
| 390 |
+
:param name: a string containing name of cookie
|
| 391 |
+
:param domain: (optional) string containing domain of cookie
|
| 392 |
+
:param path: (optional) string containing path of cookie
|
| 393 |
+
:raises KeyError: if cookie is not found
|
| 394 |
+
:raises CookieConflictError: if there are multiple cookies
|
| 395 |
+
that match name and optionally domain and path
|
| 396 |
+
:return: cookie.value
|
| 397 |
+
"""
|
| 398 |
+
toReturn = None
|
| 399 |
+
for cookie in iter(self):
|
| 400 |
+
if cookie.name == name:
|
| 401 |
+
if domain is None or cookie.domain == domain:
|
| 402 |
+
if path is None or cookie.path == path:
|
| 403 |
+
if toReturn is not None:
|
| 404 |
+
# if there are multiple cookies that meet passed in criteria
|
| 405 |
+
raise CookieConflictError(
|
| 406 |
+
f"There are multiple cookies with name, {name!r}"
|
| 407 |
+
)
|
| 408 |
+
# we will eventually return this as long as no cookie conflict
|
| 409 |
+
toReturn = cookie.value
|
| 410 |
+
|
| 411 |
+
if toReturn:
|
| 412 |
+
return toReturn
|
| 413 |
+
raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}")
|
| 414 |
+
|
| 415 |
+
def __getstate__(self):
|
| 416 |
+
"""Unlike a normal CookieJar, this class is pickleable."""
|
| 417 |
+
state = self.__dict__.copy()
|
| 418 |
+
# remove the unpickleable RLock object
|
| 419 |
+
state.pop("_cookies_lock")
|
| 420 |
+
return state
|
| 421 |
+
|
| 422 |
+
def __setstate__(self, state):
|
| 423 |
+
"""Unlike a normal CookieJar, this class is pickleable."""
|
| 424 |
+
self.__dict__.update(state)
|
| 425 |
+
if "_cookies_lock" not in self.__dict__:
|
| 426 |
+
self._cookies_lock = threading.RLock()
|
| 427 |
+
|
| 428 |
+
def copy(self):
|
| 429 |
+
"""Return a copy of this RequestsCookieJar."""
|
| 430 |
+
new_cj = RequestsCookieJar()
|
| 431 |
+
new_cj.set_policy(self.get_policy())
|
| 432 |
+
new_cj.update(self)
|
| 433 |
+
return new_cj
|
| 434 |
+
|
| 435 |
+
def get_policy(self):
|
| 436 |
+
"""Return the CookiePolicy instance used."""
|
| 437 |
+
return self._policy
|
| 438 |
+
|
| 439 |
+
|
| 440 |
+
def _copy_cookie_jar(jar):
|
| 441 |
+
if jar is None:
|
| 442 |
+
return None
|
| 443 |
+
|
| 444 |
+
if hasattr(jar, "copy"):
|
| 445 |
+
# We're dealing with an instance of RequestsCookieJar
|
| 446 |
+
return jar.copy()
|
| 447 |
+
# We're dealing with a generic CookieJar instance
|
| 448 |
+
new_jar = copy.copy(jar)
|
| 449 |
+
new_jar.clear()
|
| 450 |
+
for cookie in jar:
|
| 451 |
+
new_jar.set_cookie(copy.copy(cookie))
|
| 452 |
+
return new_jar
|
| 453 |
+
|
| 454 |
+
|
| 455 |
+
def create_cookie(name, value, **kwargs):
|
| 456 |
+
"""Make a cookie from underspecified parameters.
|
| 457 |
+
|
| 458 |
+
By default, the pair of `name` and `value` will be set for the domain ''
|
| 459 |
+
and sent on every request (this is sometimes called a "supercookie").
|
| 460 |
+
"""
|
| 461 |
+
result = {
|
| 462 |
+
"version": 0,
|
| 463 |
+
"name": name,
|
| 464 |
+
"value": value,
|
| 465 |
+
"port": None,
|
| 466 |
+
"domain": "",
|
| 467 |
+
"path": "/",
|
| 468 |
+
"secure": False,
|
| 469 |
+
"expires": None,
|
| 470 |
+
"discard": True,
|
| 471 |
+
"comment": None,
|
| 472 |
+
"comment_url": None,
|
| 473 |
+
"rest": {"HttpOnly": None},
|
| 474 |
+
"rfc2109": False,
|
| 475 |
+
}
|
| 476 |
+
|
| 477 |
+
badargs = set(kwargs) - set(result)
|
| 478 |
+
if badargs:
|
| 479 |
+
raise TypeError(
|
| 480 |
+
f"create_cookie() got unexpected keyword arguments: {list(badargs)}"
|
| 481 |
+
)
|
| 482 |
+
|
| 483 |
+
result.update(kwargs)
|
| 484 |
+
result["port_specified"] = bool(result["port"])
|
| 485 |
+
result["domain_specified"] = bool(result["domain"])
|
| 486 |
+
result["domain_initial_dot"] = result["domain"].startswith(".")
|
| 487 |
+
result["path_specified"] = bool(result["path"])
|
| 488 |
+
|
| 489 |
+
return cookielib.Cookie(**result)
|
| 490 |
+
|
| 491 |
+
|
| 492 |
+
def morsel_to_cookie(morsel):
|
| 493 |
+
"""Convert a Morsel object into a Cookie containing the one k/v pair."""
|
| 494 |
+
|
| 495 |
+
expires = None
|
| 496 |
+
if morsel["max-age"]:
|
| 497 |
+
try:
|
| 498 |
+
expires = int(time.time() + int(morsel["max-age"]))
|
| 499 |
+
except ValueError:
|
| 500 |
+
raise TypeError(f"max-age: {morsel['max-age']} must be integer")
|
| 501 |
+
elif morsel["expires"]:
|
| 502 |
+
time_template = "%a, %d-%b-%Y %H:%M:%S GMT"
|
| 503 |
+
expires = calendar.timegm(time.strptime(morsel["expires"], time_template))
|
| 504 |
+
return create_cookie(
|
| 505 |
+
comment=morsel["comment"],
|
| 506 |
+
comment_url=bool(morsel["comment"]),
|
| 507 |
+
discard=False,
|
| 508 |
+
domain=morsel["domain"],
|
| 509 |
+
expires=expires,
|
| 510 |
+
name=morsel.key,
|
| 511 |
+
path=morsel["path"],
|
| 512 |
+
port=None,
|
| 513 |
+
rest={"HttpOnly": morsel["httponly"]},
|
| 514 |
+
rfc2109=False,
|
| 515 |
+
secure=bool(morsel["secure"]),
|
| 516 |
+
value=morsel.value,
|
| 517 |
+
version=morsel["version"] or 0,
|
| 518 |
+
)
|
| 519 |
+
|
| 520 |
+
|
| 521 |
+
def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
|
| 522 |
+
"""Returns a CookieJar from a key/value dictionary.
|
| 523 |
+
|
| 524 |
+
:param cookie_dict: Dict of key/values to insert into CookieJar.
|
| 525 |
+
:param cookiejar: (optional) A cookiejar to add the cookies to.
|
| 526 |
+
:param overwrite: (optional) If False, will not replace cookies
|
| 527 |
+
already in the jar with new ones.
|
| 528 |
+
:rtype: CookieJar
|
| 529 |
+
"""
|
| 530 |
+
if cookiejar is None:
|
| 531 |
+
cookiejar = RequestsCookieJar()
|
| 532 |
+
|
| 533 |
+
if cookie_dict is not None:
|
| 534 |
+
names_from_jar = [cookie.name for cookie in cookiejar]
|
| 535 |
+
for name in cookie_dict:
|
| 536 |
+
if overwrite or (name not in names_from_jar):
|
| 537 |
+
cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
|
| 538 |
+
|
| 539 |
+
return cookiejar
|
| 540 |
+
|
| 541 |
+
|
| 542 |
+
def merge_cookies(cookiejar, cookies):
|
| 543 |
+
"""Add cookies to cookiejar and returns a merged CookieJar.
|
| 544 |
+
|
| 545 |
+
:param cookiejar: CookieJar object to add the cookies to.
|
| 546 |
+
:param cookies: Dictionary or CookieJar object to be added.
|
| 547 |
+
:rtype: CookieJar
|
| 548 |
+
"""
|
| 549 |
+
if not isinstance(cookiejar, cookielib.CookieJar):
|
| 550 |
+
raise ValueError("You can only merge into CookieJar")
|
| 551 |
+
|
| 552 |
+
if isinstance(cookies, dict):
|
| 553 |
+
cookiejar = cookiejar_from_dict(cookies, cookiejar=cookiejar, overwrite=False)
|
| 554 |
+
elif isinstance(cookies, cookielib.CookieJar):
|
| 555 |
+
try:
|
| 556 |
+
cookiejar.update(cookies)
|
| 557 |
+
except AttributeError:
|
| 558 |
+
for cookie_in_jar in cookies:
|
| 559 |
+
cookiejar.set_cookie(cookie_in_jar)
|
| 560 |
+
|
| 561 |
+
return cookiejar
|
janus/lib/python3.10/site-packages/requests/models.py
ADDED
|
@@ -0,0 +1,1037 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.models
|
| 3 |
+
~~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module contains the primary objects that power Requests.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import datetime
|
| 9 |
+
|
| 10 |
+
# Import encoding now, to avoid implicit import later.
|
| 11 |
+
# Implicit import within threads may cause LookupError when standard library is in a ZIP,
|
| 12 |
+
# such as in Embedded Python. See https://github.com/psf/requests/issues/3578.
|
| 13 |
+
import encodings.idna # noqa: F401
|
| 14 |
+
from io import UnsupportedOperation
|
| 15 |
+
|
| 16 |
+
from urllib3.exceptions import (
|
| 17 |
+
DecodeError,
|
| 18 |
+
LocationParseError,
|
| 19 |
+
ProtocolError,
|
| 20 |
+
ReadTimeoutError,
|
| 21 |
+
SSLError,
|
| 22 |
+
)
|
| 23 |
+
from urllib3.fields import RequestField
|
| 24 |
+
from urllib3.filepost import encode_multipart_formdata
|
| 25 |
+
from urllib3.util import parse_url
|
| 26 |
+
|
| 27 |
+
from ._internal_utils import to_native_string, unicode_is_ascii
|
| 28 |
+
from .auth import HTTPBasicAuth
|
| 29 |
+
from .compat import (
|
| 30 |
+
Callable,
|
| 31 |
+
JSONDecodeError,
|
| 32 |
+
Mapping,
|
| 33 |
+
basestring,
|
| 34 |
+
builtin_str,
|
| 35 |
+
chardet,
|
| 36 |
+
cookielib,
|
| 37 |
+
)
|
| 38 |
+
from .compat import json as complexjson
|
| 39 |
+
from .compat import urlencode, urlsplit, urlunparse
|
| 40 |
+
from .cookies import _copy_cookie_jar, cookiejar_from_dict, get_cookie_header
|
| 41 |
+
from .exceptions import (
|
| 42 |
+
ChunkedEncodingError,
|
| 43 |
+
ConnectionError,
|
| 44 |
+
ContentDecodingError,
|
| 45 |
+
HTTPError,
|
| 46 |
+
InvalidJSONError,
|
| 47 |
+
InvalidURL,
|
| 48 |
+
)
|
| 49 |
+
from .exceptions import JSONDecodeError as RequestsJSONDecodeError
|
| 50 |
+
from .exceptions import MissingSchema
|
| 51 |
+
from .exceptions import SSLError as RequestsSSLError
|
| 52 |
+
from .exceptions import StreamConsumedError
|
| 53 |
+
from .hooks import default_hooks
|
| 54 |
+
from .status_codes import codes
|
| 55 |
+
from .structures import CaseInsensitiveDict
|
| 56 |
+
from .utils import (
|
| 57 |
+
check_header_validity,
|
| 58 |
+
get_auth_from_url,
|
| 59 |
+
guess_filename,
|
| 60 |
+
guess_json_utf,
|
| 61 |
+
iter_slices,
|
| 62 |
+
parse_header_links,
|
| 63 |
+
requote_uri,
|
| 64 |
+
stream_decode_response_unicode,
|
| 65 |
+
super_len,
|
| 66 |
+
to_key_val_list,
|
| 67 |
+
)
|
| 68 |
+
|
| 69 |
+
#: The set of HTTP status codes that indicate an automatically
|
| 70 |
+
#: processable redirect.
|
| 71 |
+
REDIRECT_STATI = (
|
| 72 |
+
codes.moved, # 301
|
| 73 |
+
codes.found, # 302
|
| 74 |
+
codes.other, # 303
|
| 75 |
+
codes.temporary_redirect, # 307
|
| 76 |
+
codes.permanent_redirect, # 308
|
| 77 |
+
)
|
| 78 |
+
|
| 79 |
+
DEFAULT_REDIRECT_LIMIT = 30
|
| 80 |
+
CONTENT_CHUNK_SIZE = 10 * 1024
|
| 81 |
+
ITER_CHUNK_SIZE = 512
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
class RequestEncodingMixin:
|
| 85 |
+
@property
|
| 86 |
+
def path_url(self):
|
| 87 |
+
"""Build the path URL to use."""
|
| 88 |
+
|
| 89 |
+
url = []
|
| 90 |
+
|
| 91 |
+
p = urlsplit(self.url)
|
| 92 |
+
|
| 93 |
+
path = p.path
|
| 94 |
+
if not path:
|
| 95 |
+
path = "/"
|
| 96 |
+
|
| 97 |
+
url.append(path)
|
| 98 |
+
|
| 99 |
+
query = p.query
|
| 100 |
+
if query:
|
| 101 |
+
url.append("?")
|
| 102 |
+
url.append(query)
|
| 103 |
+
|
| 104 |
+
return "".join(url)
|
| 105 |
+
|
| 106 |
+
@staticmethod
|
| 107 |
+
def _encode_params(data):
|
| 108 |
+
"""Encode parameters in a piece of data.
|
| 109 |
+
|
| 110 |
+
Will successfully encode parameters when passed as a dict or a list of
|
| 111 |
+
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
|
| 112 |
+
if parameters are supplied as a dict.
|
| 113 |
+
"""
|
| 114 |
+
|
| 115 |
+
if isinstance(data, (str, bytes)):
|
| 116 |
+
return data
|
| 117 |
+
elif hasattr(data, "read"):
|
| 118 |
+
return data
|
| 119 |
+
elif hasattr(data, "__iter__"):
|
| 120 |
+
result = []
|
| 121 |
+
for k, vs in to_key_val_list(data):
|
| 122 |
+
if isinstance(vs, basestring) or not hasattr(vs, "__iter__"):
|
| 123 |
+
vs = [vs]
|
| 124 |
+
for v in vs:
|
| 125 |
+
if v is not None:
|
| 126 |
+
result.append(
|
| 127 |
+
(
|
| 128 |
+
k.encode("utf-8") if isinstance(k, str) else k,
|
| 129 |
+
v.encode("utf-8") if isinstance(v, str) else v,
|
| 130 |
+
)
|
| 131 |
+
)
|
| 132 |
+
return urlencode(result, doseq=True)
|
| 133 |
+
else:
|
| 134 |
+
return data
|
| 135 |
+
|
| 136 |
+
@staticmethod
|
| 137 |
+
def _encode_files(files, data):
|
| 138 |
+
"""Build the body for a multipart/form-data request.
|
| 139 |
+
|
| 140 |
+
Will successfully encode files when passed as a dict or a list of
|
| 141 |
+
tuples. Order is retained if data is a list of tuples but arbitrary
|
| 142 |
+
if parameters are supplied as a dict.
|
| 143 |
+
The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)
|
| 144 |
+
or 4-tuples (filename, fileobj, contentype, custom_headers).
|
| 145 |
+
"""
|
| 146 |
+
if not files:
|
| 147 |
+
raise ValueError("Files must be provided.")
|
| 148 |
+
elif isinstance(data, basestring):
|
| 149 |
+
raise ValueError("Data must not be a string.")
|
| 150 |
+
|
| 151 |
+
new_fields = []
|
| 152 |
+
fields = to_key_val_list(data or {})
|
| 153 |
+
files = to_key_val_list(files or {})
|
| 154 |
+
|
| 155 |
+
for field, val in fields:
|
| 156 |
+
if isinstance(val, basestring) or not hasattr(val, "__iter__"):
|
| 157 |
+
val = [val]
|
| 158 |
+
for v in val:
|
| 159 |
+
if v is not None:
|
| 160 |
+
# Don't call str() on bytestrings: in Py3 it all goes wrong.
|
| 161 |
+
if not isinstance(v, bytes):
|
| 162 |
+
v = str(v)
|
| 163 |
+
|
| 164 |
+
new_fields.append(
|
| 165 |
+
(
|
| 166 |
+
field.decode("utf-8")
|
| 167 |
+
if isinstance(field, bytes)
|
| 168 |
+
else field,
|
| 169 |
+
v.encode("utf-8") if isinstance(v, str) else v,
|
| 170 |
+
)
|
| 171 |
+
)
|
| 172 |
+
|
| 173 |
+
for k, v in files:
|
| 174 |
+
# support for explicit filename
|
| 175 |
+
ft = None
|
| 176 |
+
fh = None
|
| 177 |
+
if isinstance(v, (tuple, list)):
|
| 178 |
+
if len(v) == 2:
|
| 179 |
+
fn, fp = v
|
| 180 |
+
elif len(v) == 3:
|
| 181 |
+
fn, fp, ft = v
|
| 182 |
+
else:
|
| 183 |
+
fn, fp, ft, fh = v
|
| 184 |
+
else:
|
| 185 |
+
fn = guess_filename(v) or k
|
| 186 |
+
fp = v
|
| 187 |
+
|
| 188 |
+
if isinstance(fp, (str, bytes, bytearray)):
|
| 189 |
+
fdata = fp
|
| 190 |
+
elif hasattr(fp, "read"):
|
| 191 |
+
fdata = fp.read()
|
| 192 |
+
elif fp is None:
|
| 193 |
+
continue
|
| 194 |
+
else:
|
| 195 |
+
fdata = fp
|
| 196 |
+
|
| 197 |
+
rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
|
| 198 |
+
rf.make_multipart(content_type=ft)
|
| 199 |
+
new_fields.append(rf)
|
| 200 |
+
|
| 201 |
+
body, content_type = encode_multipart_formdata(new_fields)
|
| 202 |
+
|
| 203 |
+
return body, content_type
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
class RequestHooksMixin:
|
| 207 |
+
def register_hook(self, event, hook):
|
| 208 |
+
"""Properly register a hook."""
|
| 209 |
+
|
| 210 |
+
if event not in self.hooks:
|
| 211 |
+
raise ValueError(f'Unsupported event specified, with event name "{event}"')
|
| 212 |
+
|
| 213 |
+
if isinstance(hook, Callable):
|
| 214 |
+
self.hooks[event].append(hook)
|
| 215 |
+
elif hasattr(hook, "__iter__"):
|
| 216 |
+
self.hooks[event].extend(h for h in hook if isinstance(h, Callable))
|
| 217 |
+
|
| 218 |
+
def deregister_hook(self, event, hook):
|
| 219 |
+
"""Deregister a previously registered hook.
|
| 220 |
+
Returns True if the hook existed, False if not.
|
| 221 |
+
"""
|
| 222 |
+
|
| 223 |
+
try:
|
| 224 |
+
self.hooks[event].remove(hook)
|
| 225 |
+
return True
|
| 226 |
+
except ValueError:
|
| 227 |
+
return False
|
| 228 |
+
|
| 229 |
+
|
| 230 |
+
class Request(RequestHooksMixin):
|
| 231 |
+
"""A user-created :class:`Request <Request>` object.
|
| 232 |
+
|
| 233 |
+
Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
|
| 234 |
+
|
| 235 |
+
:param method: HTTP method to use.
|
| 236 |
+
:param url: URL to send.
|
| 237 |
+
:param headers: dictionary of headers to send.
|
| 238 |
+
:param files: dictionary of {filename: fileobject} files to multipart upload.
|
| 239 |
+
:param data: the body to attach to the request. If a dictionary or
|
| 240 |
+
list of tuples ``[(key, value)]`` is provided, form-encoding will
|
| 241 |
+
take place.
|
| 242 |
+
:param json: json for the body to attach to the request (if files or data is not specified).
|
| 243 |
+
:param params: URL parameters to append to the URL. If a dictionary or
|
| 244 |
+
list of tuples ``[(key, value)]`` is provided, form-encoding will
|
| 245 |
+
take place.
|
| 246 |
+
:param auth: Auth handler or (user, pass) tuple.
|
| 247 |
+
:param cookies: dictionary or CookieJar of cookies to attach to this request.
|
| 248 |
+
:param hooks: dictionary of callback hooks, for internal usage.
|
| 249 |
+
|
| 250 |
+
Usage::
|
| 251 |
+
|
| 252 |
+
>>> import requests
|
| 253 |
+
>>> req = requests.Request('GET', 'https://httpbin.org/get')
|
| 254 |
+
>>> req.prepare()
|
| 255 |
+
<PreparedRequest [GET]>
|
| 256 |
+
"""
|
| 257 |
+
|
| 258 |
+
def __init__(
|
| 259 |
+
self,
|
| 260 |
+
method=None,
|
| 261 |
+
url=None,
|
| 262 |
+
headers=None,
|
| 263 |
+
files=None,
|
| 264 |
+
data=None,
|
| 265 |
+
params=None,
|
| 266 |
+
auth=None,
|
| 267 |
+
cookies=None,
|
| 268 |
+
hooks=None,
|
| 269 |
+
json=None,
|
| 270 |
+
):
|
| 271 |
+
# Default empty dicts for dict params.
|
| 272 |
+
data = [] if data is None else data
|
| 273 |
+
files = [] if files is None else files
|
| 274 |
+
headers = {} if headers is None else headers
|
| 275 |
+
params = {} if params is None else params
|
| 276 |
+
hooks = {} if hooks is None else hooks
|
| 277 |
+
|
| 278 |
+
self.hooks = default_hooks()
|
| 279 |
+
for k, v in list(hooks.items()):
|
| 280 |
+
self.register_hook(event=k, hook=v)
|
| 281 |
+
|
| 282 |
+
self.method = method
|
| 283 |
+
self.url = url
|
| 284 |
+
self.headers = headers
|
| 285 |
+
self.files = files
|
| 286 |
+
self.data = data
|
| 287 |
+
self.json = json
|
| 288 |
+
self.params = params
|
| 289 |
+
self.auth = auth
|
| 290 |
+
self.cookies = cookies
|
| 291 |
+
|
| 292 |
+
def __repr__(self):
|
| 293 |
+
return f"<Request [{self.method}]>"
|
| 294 |
+
|
| 295 |
+
def prepare(self):
|
| 296 |
+
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
|
| 297 |
+
p = PreparedRequest()
|
| 298 |
+
p.prepare(
|
| 299 |
+
method=self.method,
|
| 300 |
+
url=self.url,
|
| 301 |
+
headers=self.headers,
|
| 302 |
+
files=self.files,
|
| 303 |
+
data=self.data,
|
| 304 |
+
json=self.json,
|
| 305 |
+
params=self.params,
|
| 306 |
+
auth=self.auth,
|
| 307 |
+
cookies=self.cookies,
|
| 308 |
+
hooks=self.hooks,
|
| 309 |
+
)
|
| 310 |
+
return p
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
| 314 |
+
"""The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
|
| 315 |
+
containing the exact bytes that will be sent to the server.
|
| 316 |
+
|
| 317 |
+
Instances are generated from a :class:`Request <Request>` object, and
|
| 318 |
+
should not be instantiated manually; doing so may produce undesirable
|
| 319 |
+
effects.
|
| 320 |
+
|
| 321 |
+
Usage::
|
| 322 |
+
|
| 323 |
+
>>> import requests
|
| 324 |
+
>>> req = requests.Request('GET', 'https://httpbin.org/get')
|
| 325 |
+
>>> r = req.prepare()
|
| 326 |
+
>>> r
|
| 327 |
+
<PreparedRequest [GET]>
|
| 328 |
+
|
| 329 |
+
>>> s = requests.Session()
|
| 330 |
+
>>> s.send(r)
|
| 331 |
+
<Response [200]>
|
| 332 |
+
"""
|
| 333 |
+
|
| 334 |
+
def __init__(self):
|
| 335 |
+
#: HTTP verb to send to the server.
|
| 336 |
+
self.method = None
|
| 337 |
+
#: HTTP URL to send the request to.
|
| 338 |
+
self.url = None
|
| 339 |
+
#: dictionary of HTTP headers.
|
| 340 |
+
self.headers = None
|
| 341 |
+
# The `CookieJar` used to create the Cookie header will be stored here
|
| 342 |
+
# after prepare_cookies is called
|
| 343 |
+
self._cookies = None
|
| 344 |
+
#: request body to send to the server.
|
| 345 |
+
self.body = None
|
| 346 |
+
#: dictionary of callback hooks, for internal usage.
|
| 347 |
+
self.hooks = default_hooks()
|
| 348 |
+
#: integer denoting starting position of a readable file-like body.
|
| 349 |
+
self._body_position = None
|
| 350 |
+
|
| 351 |
+
def prepare(
|
| 352 |
+
self,
|
| 353 |
+
method=None,
|
| 354 |
+
url=None,
|
| 355 |
+
headers=None,
|
| 356 |
+
files=None,
|
| 357 |
+
data=None,
|
| 358 |
+
params=None,
|
| 359 |
+
auth=None,
|
| 360 |
+
cookies=None,
|
| 361 |
+
hooks=None,
|
| 362 |
+
json=None,
|
| 363 |
+
):
|
| 364 |
+
"""Prepares the entire request with the given parameters."""
|
| 365 |
+
|
| 366 |
+
self.prepare_method(method)
|
| 367 |
+
self.prepare_url(url, params)
|
| 368 |
+
self.prepare_headers(headers)
|
| 369 |
+
self.prepare_cookies(cookies)
|
| 370 |
+
self.prepare_body(data, files, json)
|
| 371 |
+
self.prepare_auth(auth, url)
|
| 372 |
+
|
| 373 |
+
# Note that prepare_auth must be last to enable authentication schemes
|
| 374 |
+
# such as OAuth to work on a fully prepared request.
|
| 375 |
+
|
| 376 |
+
# This MUST go after prepare_auth. Authenticators could add a hook
|
| 377 |
+
self.prepare_hooks(hooks)
|
| 378 |
+
|
| 379 |
+
def __repr__(self):
|
| 380 |
+
return f"<PreparedRequest [{self.method}]>"
|
| 381 |
+
|
| 382 |
+
def copy(self):
|
| 383 |
+
p = PreparedRequest()
|
| 384 |
+
p.method = self.method
|
| 385 |
+
p.url = self.url
|
| 386 |
+
p.headers = self.headers.copy() if self.headers is not None else None
|
| 387 |
+
p._cookies = _copy_cookie_jar(self._cookies)
|
| 388 |
+
p.body = self.body
|
| 389 |
+
p.hooks = self.hooks
|
| 390 |
+
p._body_position = self._body_position
|
| 391 |
+
return p
|
| 392 |
+
|
| 393 |
+
def prepare_method(self, method):
|
| 394 |
+
"""Prepares the given HTTP method."""
|
| 395 |
+
self.method = method
|
| 396 |
+
if self.method is not None:
|
| 397 |
+
self.method = to_native_string(self.method.upper())
|
| 398 |
+
|
| 399 |
+
@staticmethod
|
| 400 |
+
def _get_idna_encoded_host(host):
|
| 401 |
+
import idna
|
| 402 |
+
|
| 403 |
+
try:
|
| 404 |
+
host = idna.encode(host, uts46=True).decode("utf-8")
|
| 405 |
+
except idna.IDNAError:
|
| 406 |
+
raise UnicodeError
|
| 407 |
+
return host
|
| 408 |
+
|
| 409 |
+
def prepare_url(self, url, params):
|
| 410 |
+
"""Prepares the given HTTP URL."""
|
| 411 |
+
#: Accept objects that have string representations.
|
| 412 |
+
#: We're unable to blindly call unicode/str functions
|
| 413 |
+
#: as this will include the bytestring indicator (b'')
|
| 414 |
+
#: on python 3.x.
|
| 415 |
+
#: https://github.com/psf/requests/pull/2238
|
| 416 |
+
if isinstance(url, bytes):
|
| 417 |
+
url = url.decode("utf8")
|
| 418 |
+
else:
|
| 419 |
+
url = str(url)
|
| 420 |
+
|
| 421 |
+
# Remove leading whitespaces from url
|
| 422 |
+
url = url.lstrip()
|
| 423 |
+
|
| 424 |
+
# Don't do any URL preparation for non-HTTP schemes like `mailto`,
|
| 425 |
+
# `data` etc to work around exceptions from `url_parse`, which
|
| 426 |
+
# handles RFC 3986 only.
|
| 427 |
+
if ":" in url and not url.lower().startswith("http"):
|
| 428 |
+
self.url = url
|
| 429 |
+
return
|
| 430 |
+
|
| 431 |
+
# Support for unicode domain names and paths.
|
| 432 |
+
try:
|
| 433 |
+
scheme, auth, host, port, path, query, fragment = parse_url(url)
|
| 434 |
+
except LocationParseError as e:
|
| 435 |
+
raise InvalidURL(*e.args)
|
| 436 |
+
|
| 437 |
+
if not scheme:
|
| 438 |
+
raise MissingSchema(
|
| 439 |
+
f"Invalid URL {url!r}: No scheme supplied. "
|
| 440 |
+
f"Perhaps you meant https://{url}?"
|
| 441 |
+
)
|
| 442 |
+
|
| 443 |
+
if not host:
|
| 444 |
+
raise InvalidURL(f"Invalid URL {url!r}: No host supplied")
|
| 445 |
+
|
| 446 |
+
# In general, we want to try IDNA encoding the hostname if the string contains
|
| 447 |
+
# non-ASCII characters. This allows users to automatically get the correct IDNA
|
| 448 |
+
# behaviour. For strings containing only ASCII characters, we need to also verify
|
| 449 |
+
# it doesn't start with a wildcard (*), before allowing the unencoded hostname.
|
| 450 |
+
if not unicode_is_ascii(host):
|
| 451 |
+
try:
|
| 452 |
+
host = self._get_idna_encoded_host(host)
|
| 453 |
+
except UnicodeError:
|
| 454 |
+
raise InvalidURL("URL has an invalid label.")
|
| 455 |
+
elif host.startswith(("*", ".")):
|
| 456 |
+
raise InvalidURL("URL has an invalid label.")
|
| 457 |
+
|
| 458 |
+
# Carefully reconstruct the network location
|
| 459 |
+
netloc = auth or ""
|
| 460 |
+
if netloc:
|
| 461 |
+
netloc += "@"
|
| 462 |
+
netloc += host
|
| 463 |
+
if port:
|
| 464 |
+
netloc += f":{port}"
|
| 465 |
+
|
| 466 |
+
# Bare domains aren't valid URLs.
|
| 467 |
+
if not path:
|
| 468 |
+
path = "/"
|
| 469 |
+
|
| 470 |
+
if isinstance(params, (str, bytes)):
|
| 471 |
+
params = to_native_string(params)
|
| 472 |
+
|
| 473 |
+
enc_params = self._encode_params(params)
|
| 474 |
+
if enc_params:
|
| 475 |
+
if query:
|
| 476 |
+
query = f"{query}&{enc_params}"
|
| 477 |
+
else:
|
| 478 |
+
query = enc_params
|
| 479 |
+
|
| 480 |
+
url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
|
| 481 |
+
self.url = url
|
| 482 |
+
|
| 483 |
+
def prepare_headers(self, headers):
|
| 484 |
+
"""Prepares the given HTTP headers."""
|
| 485 |
+
|
| 486 |
+
self.headers = CaseInsensitiveDict()
|
| 487 |
+
if headers:
|
| 488 |
+
for header in headers.items():
|
| 489 |
+
# Raise exception on invalid header value.
|
| 490 |
+
check_header_validity(header)
|
| 491 |
+
name, value = header
|
| 492 |
+
self.headers[to_native_string(name)] = value
|
| 493 |
+
|
| 494 |
+
def prepare_body(self, data, files, json=None):
|
| 495 |
+
"""Prepares the given HTTP body data."""
|
| 496 |
+
|
| 497 |
+
# Check if file, fo, generator, iterator.
|
| 498 |
+
# If not, run through normal process.
|
| 499 |
+
|
| 500 |
+
# Nottin' on you.
|
| 501 |
+
body = None
|
| 502 |
+
content_type = None
|
| 503 |
+
|
| 504 |
+
if not data and json is not None:
|
| 505 |
+
# urllib3 requires a bytes-like body. Python 2's json.dumps
|
| 506 |
+
# provides this natively, but Python 3 gives a Unicode string.
|
| 507 |
+
content_type = "application/json"
|
| 508 |
+
|
| 509 |
+
try:
|
| 510 |
+
body = complexjson.dumps(json, allow_nan=False)
|
| 511 |
+
except ValueError as ve:
|
| 512 |
+
raise InvalidJSONError(ve, request=self)
|
| 513 |
+
|
| 514 |
+
if not isinstance(body, bytes):
|
| 515 |
+
body = body.encode("utf-8")
|
| 516 |
+
|
| 517 |
+
is_stream = all(
|
| 518 |
+
[
|
| 519 |
+
hasattr(data, "__iter__"),
|
| 520 |
+
not isinstance(data, (basestring, list, tuple, Mapping)),
|
| 521 |
+
]
|
| 522 |
+
)
|
| 523 |
+
|
| 524 |
+
if is_stream:
|
| 525 |
+
try:
|
| 526 |
+
length = super_len(data)
|
| 527 |
+
except (TypeError, AttributeError, UnsupportedOperation):
|
| 528 |
+
length = None
|
| 529 |
+
|
| 530 |
+
body = data
|
| 531 |
+
|
| 532 |
+
if getattr(body, "tell", None) is not None:
|
| 533 |
+
# Record the current file position before reading.
|
| 534 |
+
# This will allow us to rewind a file in the event
|
| 535 |
+
# of a redirect.
|
| 536 |
+
try:
|
| 537 |
+
self._body_position = body.tell()
|
| 538 |
+
except OSError:
|
| 539 |
+
# This differentiates from None, allowing us to catch
|
| 540 |
+
# a failed `tell()` later when trying to rewind the body
|
| 541 |
+
self._body_position = object()
|
| 542 |
+
|
| 543 |
+
if files:
|
| 544 |
+
raise NotImplementedError(
|
| 545 |
+
"Streamed bodies and files are mutually exclusive."
|
| 546 |
+
)
|
| 547 |
+
|
| 548 |
+
if length:
|
| 549 |
+
self.headers["Content-Length"] = builtin_str(length)
|
| 550 |
+
else:
|
| 551 |
+
self.headers["Transfer-Encoding"] = "chunked"
|
| 552 |
+
else:
|
| 553 |
+
# Multi-part file uploads.
|
| 554 |
+
if files:
|
| 555 |
+
(body, content_type) = self._encode_files(files, data)
|
| 556 |
+
else:
|
| 557 |
+
if data:
|
| 558 |
+
body = self._encode_params(data)
|
| 559 |
+
if isinstance(data, basestring) or hasattr(data, "read"):
|
| 560 |
+
content_type = None
|
| 561 |
+
else:
|
| 562 |
+
content_type = "application/x-www-form-urlencoded"
|
| 563 |
+
|
| 564 |
+
self.prepare_content_length(body)
|
| 565 |
+
|
| 566 |
+
# Add content-type if it wasn't explicitly provided.
|
| 567 |
+
if content_type and ("content-type" not in self.headers):
|
| 568 |
+
self.headers["Content-Type"] = content_type
|
| 569 |
+
|
| 570 |
+
self.body = body
|
| 571 |
+
|
| 572 |
+
def prepare_content_length(self, body):
|
| 573 |
+
"""Prepare Content-Length header based on request method and body"""
|
| 574 |
+
if body is not None:
|
| 575 |
+
length = super_len(body)
|
| 576 |
+
if length:
|
| 577 |
+
# If length exists, set it. Otherwise, we fallback
|
| 578 |
+
# to Transfer-Encoding: chunked.
|
| 579 |
+
self.headers["Content-Length"] = builtin_str(length)
|
| 580 |
+
elif (
|
| 581 |
+
self.method not in ("GET", "HEAD")
|
| 582 |
+
and self.headers.get("Content-Length") is None
|
| 583 |
+
):
|
| 584 |
+
# Set Content-Length to 0 for methods that can have a body
|
| 585 |
+
# but don't provide one. (i.e. not GET or HEAD)
|
| 586 |
+
self.headers["Content-Length"] = "0"
|
| 587 |
+
|
| 588 |
+
def prepare_auth(self, auth, url=""):
|
| 589 |
+
"""Prepares the given HTTP auth data."""
|
| 590 |
+
|
| 591 |
+
# If no Auth is explicitly provided, extract it from the URL first.
|
| 592 |
+
if auth is None:
|
| 593 |
+
url_auth = get_auth_from_url(self.url)
|
| 594 |
+
auth = url_auth if any(url_auth) else None
|
| 595 |
+
|
| 596 |
+
if auth:
|
| 597 |
+
if isinstance(auth, tuple) and len(auth) == 2:
|
| 598 |
+
# special-case basic HTTP auth
|
| 599 |
+
auth = HTTPBasicAuth(*auth)
|
| 600 |
+
|
| 601 |
+
# Allow auth to make its changes.
|
| 602 |
+
r = auth(self)
|
| 603 |
+
|
| 604 |
+
# Update self to reflect the auth changes.
|
| 605 |
+
self.__dict__.update(r.__dict__)
|
| 606 |
+
|
| 607 |
+
# Recompute Content-Length
|
| 608 |
+
self.prepare_content_length(self.body)
|
| 609 |
+
|
| 610 |
+
def prepare_cookies(self, cookies):
|
| 611 |
+
"""Prepares the given HTTP cookie data.
|
| 612 |
+
|
| 613 |
+
This function eventually generates a ``Cookie`` header from the
|
| 614 |
+
given cookies using cookielib. Due to cookielib's design, the header
|
| 615 |
+
will not be regenerated if it already exists, meaning this function
|
| 616 |
+
can only be called once for the life of the
|
| 617 |
+
:class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
|
| 618 |
+
to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
|
| 619 |
+
header is removed beforehand.
|
| 620 |
+
"""
|
| 621 |
+
if isinstance(cookies, cookielib.CookieJar):
|
| 622 |
+
self._cookies = cookies
|
| 623 |
+
else:
|
| 624 |
+
self._cookies = cookiejar_from_dict(cookies)
|
| 625 |
+
|
| 626 |
+
cookie_header = get_cookie_header(self._cookies, self)
|
| 627 |
+
if cookie_header is not None:
|
| 628 |
+
self.headers["Cookie"] = cookie_header
|
| 629 |
+
|
| 630 |
+
def prepare_hooks(self, hooks):
|
| 631 |
+
"""Prepares the given hooks."""
|
| 632 |
+
# hooks can be passed as None to the prepare method and to this
|
| 633 |
+
# method. To prevent iterating over None, simply use an empty list
|
| 634 |
+
# if hooks is False-y
|
| 635 |
+
hooks = hooks or []
|
| 636 |
+
for event in hooks:
|
| 637 |
+
self.register_hook(event, hooks[event])
|
| 638 |
+
|
| 639 |
+
|
| 640 |
+
class Response:
|
| 641 |
+
"""The :class:`Response <Response>` object, which contains a
|
| 642 |
+
server's response to an HTTP request.
|
| 643 |
+
"""
|
| 644 |
+
|
| 645 |
+
__attrs__ = [
|
| 646 |
+
"_content",
|
| 647 |
+
"status_code",
|
| 648 |
+
"headers",
|
| 649 |
+
"url",
|
| 650 |
+
"history",
|
| 651 |
+
"encoding",
|
| 652 |
+
"reason",
|
| 653 |
+
"cookies",
|
| 654 |
+
"elapsed",
|
| 655 |
+
"request",
|
| 656 |
+
]
|
| 657 |
+
|
| 658 |
+
def __init__(self):
|
| 659 |
+
self._content = False
|
| 660 |
+
self._content_consumed = False
|
| 661 |
+
self._next = None
|
| 662 |
+
|
| 663 |
+
#: Integer Code of responded HTTP Status, e.g. 404 or 200.
|
| 664 |
+
self.status_code = None
|
| 665 |
+
|
| 666 |
+
#: Case-insensitive Dictionary of Response Headers.
|
| 667 |
+
#: For example, ``headers['content-encoding']`` will return the
|
| 668 |
+
#: value of a ``'Content-Encoding'`` response header.
|
| 669 |
+
self.headers = CaseInsensitiveDict()
|
| 670 |
+
|
| 671 |
+
#: File-like object representation of response (for advanced usage).
|
| 672 |
+
#: Use of ``raw`` requires that ``stream=True`` be set on the request.
|
| 673 |
+
#: This requirement does not apply for use internally to Requests.
|
| 674 |
+
self.raw = None
|
| 675 |
+
|
| 676 |
+
#: Final URL location of Response.
|
| 677 |
+
self.url = None
|
| 678 |
+
|
| 679 |
+
#: Encoding to decode with when accessing r.text.
|
| 680 |
+
self.encoding = None
|
| 681 |
+
|
| 682 |
+
#: A list of :class:`Response <Response>` objects from
|
| 683 |
+
#: the history of the Request. Any redirect responses will end
|
| 684 |
+
#: up here. The list is sorted from the oldest to the most recent request.
|
| 685 |
+
self.history = []
|
| 686 |
+
|
| 687 |
+
#: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
|
| 688 |
+
self.reason = None
|
| 689 |
+
|
| 690 |
+
#: A CookieJar of Cookies the server sent back.
|
| 691 |
+
self.cookies = cookiejar_from_dict({})
|
| 692 |
+
|
| 693 |
+
#: The amount of time elapsed between sending the request
|
| 694 |
+
#: and the arrival of the response (as a timedelta).
|
| 695 |
+
#: This property specifically measures the time taken between sending
|
| 696 |
+
#: the first byte of the request and finishing parsing the headers. It
|
| 697 |
+
#: is therefore unaffected by consuming the response content or the
|
| 698 |
+
#: value of the ``stream`` keyword argument.
|
| 699 |
+
self.elapsed = datetime.timedelta(0)
|
| 700 |
+
|
| 701 |
+
#: The :class:`PreparedRequest <PreparedRequest>` object to which this
|
| 702 |
+
#: is a response.
|
| 703 |
+
self.request = None
|
| 704 |
+
|
| 705 |
+
def __enter__(self):
|
| 706 |
+
return self
|
| 707 |
+
|
| 708 |
+
def __exit__(self, *args):
|
| 709 |
+
self.close()
|
| 710 |
+
|
| 711 |
+
def __getstate__(self):
|
| 712 |
+
# Consume everything; accessing the content attribute makes
|
| 713 |
+
# sure the content has been fully read.
|
| 714 |
+
if not self._content_consumed:
|
| 715 |
+
self.content
|
| 716 |
+
|
| 717 |
+
return {attr: getattr(self, attr, None) for attr in self.__attrs__}
|
| 718 |
+
|
| 719 |
+
def __setstate__(self, state):
|
| 720 |
+
for name, value in state.items():
|
| 721 |
+
setattr(self, name, value)
|
| 722 |
+
|
| 723 |
+
# pickled objects do not have .raw
|
| 724 |
+
setattr(self, "_content_consumed", True)
|
| 725 |
+
setattr(self, "raw", None)
|
| 726 |
+
|
| 727 |
+
def __repr__(self):
|
| 728 |
+
return f"<Response [{self.status_code}]>"
|
| 729 |
+
|
| 730 |
+
def __bool__(self):
|
| 731 |
+
"""Returns True if :attr:`status_code` is less than 400.
|
| 732 |
+
|
| 733 |
+
This attribute checks if the status code of the response is between
|
| 734 |
+
400 and 600 to see if there was a client error or a server error. If
|
| 735 |
+
the status code, is between 200 and 400, this will return True. This
|
| 736 |
+
is **not** a check to see if the response code is ``200 OK``.
|
| 737 |
+
"""
|
| 738 |
+
return self.ok
|
| 739 |
+
|
| 740 |
+
def __nonzero__(self):
|
| 741 |
+
"""Returns True if :attr:`status_code` is less than 400.
|
| 742 |
+
|
| 743 |
+
This attribute checks if the status code of the response is between
|
| 744 |
+
400 and 600 to see if there was a client error or a server error. If
|
| 745 |
+
the status code, is between 200 and 400, this will return True. This
|
| 746 |
+
is **not** a check to see if the response code is ``200 OK``.
|
| 747 |
+
"""
|
| 748 |
+
return self.ok
|
| 749 |
+
|
| 750 |
+
def __iter__(self):
|
| 751 |
+
"""Allows you to use a response as an iterator."""
|
| 752 |
+
return self.iter_content(128)
|
| 753 |
+
|
| 754 |
+
@property
|
| 755 |
+
def ok(self):
|
| 756 |
+
"""Returns True if :attr:`status_code` is less than 400, False if not.
|
| 757 |
+
|
| 758 |
+
This attribute checks if the status code of the response is between
|
| 759 |
+
400 and 600 to see if there was a client error or a server error. If
|
| 760 |
+
the status code is between 200 and 400, this will return True. This
|
| 761 |
+
is **not** a check to see if the response code is ``200 OK``.
|
| 762 |
+
"""
|
| 763 |
+
try:
|
| 764 |
+
self.raise_for_status()
|
| 765 |
+
except HTTPError:
|
| 766 |
+
return False
|
| 767 |
+
return True
|
| 768 |
+
|
| 769 |
+
@property
|
| 770 |
+
def is_redirect(self):
|
| 771 |
+
"""True if this Response is a well-formed HTTP redirect that could have
|
| 772 |
+
been processed automatically (by :meth:`Session.resolve_redirects`).
|
| 773 |
+
"""
|
| 774 |
+
return "location" in self.headers and self.status_code in REDIRECT_STATI
|
| 775 |
+
|
| 776 |
+
@property
|
| 777 |
+
def is_permanent_redirect(self):
|
| 778 |
+
"""True if this Response one of the permanent versions of redirect."""
|
| 779 |
+
return "location" in self.headers and self.status_code in (
|
| 780 |
+
codes.moved_permanently,
|
| 781 |
+
codes.permanent_redirect,
|
| 782 |
+
)
|
| 783 |
+
|
| 784 |
+
@property
|
| 785 |
+
def next(self):
|
| 786 |
+
"""Returns a PreparedRequest for the next request in a redirect chain, if there is one."""
|
| 787 |
+
return self._next
|
| 788 |
+
|
| 789 |
+
@property
|
| 790 |
+
def apparent_encoding(self):
|
| 791 |
+
"""The apparent encoding, provided by the charset_normalizer or chardet libraries."""
|
| 792 |
+
if chardet is not None:
|
| 793 |
+
return chardet.detect(self.content)["encoding"]
|
| 794 |
+
else:
|
| 795 |
+
# If no character detection library is available, we'll fall back
|
| 796 |
+
# to a standard Python utf-8 str.
|
| 797 |
+
return "utf-8"
|
| 798 |
+
|
| 799 |
+
def iter_content(self, chunk_size=1, decode_unicode=False):
|
| 800 |
+
"""Iterates over the response data. When stream=True is set on the
|
| 801 |
+
request, this avoids reading the content at once into memory for
|
| 802 |
+
large responses. The chunk size is the number of bytes it should
|
| 803 |
+
read into memory. This is not necessarily the length of each item
|
| 804 |
+
returned as decoding can take place.
|
| 805 |
+
|
| 806 |
+
chunk_size must be of type int or None. A value of None will
|
| 807 |
+
function differently depending on the value of `stream`.
|
| 808 |
+
stream=True will read data as it arrives in whatever size the
|
| 809 |
+
chunks are received. If stream=False, data is returned as
|
| 810 |
+
a single chunk.
|
| 811 |
+
|
| 812 |
+
If decode_unicode is True, content will be decoded using the best
|
| 813 |
+
available encoding based on the response.
|
| 814 |
+
"""
|
| 815 |
+
|
| 816 |
+
def generate():
|
| 817 |
+
# Special case for urllib3.
|
| 818 |
+
if hasattr(self.raw, "stream"):
|
| 819 |
+
try:
|
| 820 |
+
yield from self.raw.stream(chunk_size, decode_content=True)
|
| 821 |
+
except ProtocolError as e:
|
| 822 |
+
raise ChunkedEncodingError(e)
|
| 823 |
+
except DecodeError as e:
|
| 824 |
+
raise ContentDecodingError(e)
|
| 825 |
+
except ReadTimeoutError as e:
|
| 826 |
+
raise ConnectionError(e)
|
| 827 |
+
except SSLError as e:
|
| 828 |
+
raise RequestsSSLError(e)
|
| 829 |
+
else:
|
| 830 |
+
# Standard file-like object.
|
| 831 |
+
while True:
|
| 832 |
+
chunk = self.raw.read(chunk_size)
|
| 833 |
+
if not chunk:
|
| 834 |
+
break
|
| 835 |
+
yield chunk
|
| 836 |
+
|
| 837 |
+
self._content_consumed = True
|
| 838 |
+
|
| 839 |
+
if self._content_consumed and isinstance(self._content, bool):
|
| 840 |
+
raise StreamConsumedError()
|
| 841 |
+
elif chunk_size is not None and not isinstance(chunk_size, int):
|
| 842 |
+
raise TypeError(
|
| 843 |
+
f"chunk_size must be an int, it is instead a {type(chunk_size)}."
|
| 844 |
+
)
|
| 845 |
+
# simulate reading small chunks of the content
|
| 846 |
+
reused_chunks = iter_slices(self._content, chunk_size)
|
| 847 |
+
|
| 848 |
+
stream_chunks = generate()
|
| 849 |
+
|
| 850 |
+
chunks = reused_chunks if self._content_consumed else stream_chunks
|
| 851 |
+
|
| 852 |
+
if decode_unicode:
|
| 853 |
+
chunks = stream_decode_response_unicode(chunks, self)
|
| 854 |
+
|
| 855 |
+
return chunks
|
| 856 |
+
|
| 857 |
+
def iter_lines(
|
| 858 |
+
self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None
|
| 859 |
+
):
|
| 860 |
+
"""Iterates over the response data, one line at a time. When
|
| 861 |
+
stream=True is set on the request, this avoids reading the
|
| 862 |
+
content at once into memory for large responses.
|
| 863 |
+
|
| 864 |
+
.. note:: This method is not reentrant safe.
|
| 865 |
+
"""
|
| 866 |
+
|
| 867 |
+
pending = None
|
| 868 |
+
|
| 869 |
+
for chunk in self.iter_content(
|
| 870 |
+
chunk_size=chunk_size, decode_unicode=decode_unicode
|
| 871 |
+
):
|
| 872 |
+
if pending is not None:
|
| 873 |
+
chunk = pending + chunk
|
| 874 |
+
|
| 875 |
+
if delimiter:
|
| 876 |
+
lines = chunk.split(delimiter)
|
| 877 |
+
else:
|
| 878 |
+
lines = chunk.splitlines()
|
| 879 |
+
|
| 880 |
+
if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
|
| 881 |
+
pending = lines.pop()
|
| 882 |
+
else:
|
| 883 |
+
pending = None
|
| 884 |
+
|
| 885 |
+
yield from lines
|
| 886 |
+
|
| 887 |
+
if pending is not None:
|
| 888 |
+
yield pending
|
| 889 |
+
|
| 890 |
+
@property
|
| 891 |
+
def content(self):
|
| 892 |
+
"""Content of the response, in bytes."""
|
| 893 |
+
|
| 894 |
+
if self._content is False:
|
| 895 |
+
# Read the contents.
|
| 896 |
+
if self._content_consumed:
|
| 897 |
+
raise RuntimeError("The content for this response was already consumed")
|
| 898 |
+
|
| 899 |
+
if self.status_code == 0 or self.raw is None:
|
| 900 |
+
self._content = None
|
| 901 |
+
else:
|
| 902 |
+
self._content = b"".join(self.iter_content(CONTENT_CHUNK_SIZE)) or b""
|
| 903 |
+
|
| 904 |
+
self._content_consumed = True
|
| 905 |
+
# don't need to release the connection; that's been handled by urllib3
|
| 906 |
+
# since we exhausted the data.
|
| 907 |
+
return self._content
|
| 908 |
+
|
| 909 |
+
@property
|
| 910 |
+
def text(self):
|
| 911 |
+
"""Content of the response, in unicode.
|
| 912 |
+
|
| 913 |
+
If Response.encoding is None, encoding will be guessed using
|
| 914 |
+
``charset_normalizer`` or ``chardet``.
|
| 915 |
+
|
| 916 |
+
The encoding of the response content is determined based solely on HTTP
|
| 917 |
+
headers, following RFC 2616 to the letter. If you can take advantage of
|
| 918 |
+
non-HTTP knowledge to make a better guess at the encoding, you should
|
| 919 |
+
set ``r.encoding`` appropriately before accessing this property.
|
| 920 |
+
"""
|
| 921 |
+
|
| 922 |
+
# Try charset from content-type
|
| 923 |
+
content = None
|
| 924 |
+
encoding = self.encoding
|
| 925 |
+
|
| 926 |
+
if not self.content:
|
| 927 |
+
return ""
|
| 928 |
+
|
| 929 |
+
# Fallback to auto-detected encoding.
|
| 930 |
+
if self.encoding is None:
|
| 931 |
+
encoding = self.apparent_encoding
|
| 932 |
+
|
| 933 |
+
# Decode unicode from given encoding.
|
| 934 |
+
try:
|
| 935 |
+
content = str(self.content, encoding, errors="replace")
|
| 936 |
+
except (LookupError, TypeError):
|
| 937 |
+
# A LookupError is raised if the encoding was not found which could
|
| 938 |
+
# indicate a misspelling or similar mistake.
|
| 939 |
+
#
|
| 940 |
+
# A TypeError can be raised if encoding is None
|
| 941 |
+
#
|
| 942 |
+
# So we try blindly encoding.
|
| 943 |
+
content = str(self.content, errors="replace")
|
| 944 |
+
|
| 945 |
+
return content
|
| 946 |
+
|
| 947 |
+
def json(self, **kwargs):
|
| 948 |
+
r"""Returns the json-encoded content of a response, if any.
|
| 949 |
+
|
| 950 |
+
:param \*\*kwargs: Optional arguments that ``json.loads`` takes.
|
| 951 |
+
:raises requests.exceptions.JSONDecodeError: If the response body does not
|
| 952 |
+
contain valid json.
|
| 953 |
+
"""
|
| 954 |
+
|
| 955 |
+
if not self.encoding and self.content and len(self.content) > 3:
|
| 956 |
+
# No encoding set. JSON RFC 4627 section 3 states we should expect
|
| 957 |
+
# UTF-8, -16 or -32. Detect which one to use; If the detection or
|
| 958 |
+
# decoding fails, fall back to `self.text` (using charset_normalizer to make
|
| 959 |
+
# a best guess).
|
| 960 |
+
encoding = guess_json_utf(self.content)
|
| 961 |
+
if encoding is not None:
|
| 962 |
+
try:
|
| 963 |
+
return complexjson.loads(self.content.decode(encoding), **kwargs)
|
| 964 |
+
except UnicodeDecodeError:
|
| 965 |
+
# Wrong UTF codec detected; usually because it's not UTF-8
|
| 966 |
+
# but some other 8-bit codec. This is an RFC violation,
|
| 967 |
+
# and the server didn't bother to tell us what codec *was*
|
| 968 |
+
# used.
|
| 969 |
+
pass
|
| 970 |
+
except JSONDecodeError as e:
|
| 971 |
+
raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
|
| 972 |
+
|
| 973 |
+
try:
|
| 974 |
+
return complexjson.loads(self.text, **kwargs)
|
| 975 |
+
except JSONDecodeError as e:
|
| 976 |
+
# Catch JSON-related errors and raise as requests.JSONDecodeError
|
| 977 |
+
# This aliases json.JSONDecodeError and simplejson.JSONDecodeError
|
| 978 |
+
raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
|
| 979 |
+
|
| 980 |
+
@property
|
| 981 |
+
def links(self):
|
| 982 |
+
"""Returns the parsed header links of the response, if any."""
|
| 983 |
+
|
| 984 |
+
header = self.headers.get("link")
|
| 985 |
+
|
| 986 |
+
resolved_links = {}
|
| 987 |
+
|
| 988 |
+
if header:
|
| 989 |
+
links = parse_header_links(header)
|
| 990 |
+
|
| 991 |
+
for link in links:
|
| 992 |
+
key = link.get("rel") or link.get("url")
|
| 993 |
+
resolved_links[key] = link
|
| 994 |
+
|
| 995 |
+
return resolved_links
|
| 996 |
+
|
| 997 |
+
def raise_for_status(self):
|
| 998 |
+
"""Raises :class:`HTTPError`, if one occurred."""
|
| 999 |
+
|
| 1000 |
+
http_error_msg = ""
|
| 1001 |
+
if isinstance(self.reason, bytes):
|
| 1002 |
+
# We attempt to decode utf-8 first because some servers
|
| 1003 |
+
# choose to localize their reason strings. If the string
|
| 1004 |
+
# isn't utf-8, we fall back to iso-8859-1 for all other
|
| 1005 |
+
# encodings. (See PR #3538)
|
| 1006 |
+
try:
|
| 1007 |
+
reason = self.reason.decode("utf-8")
|
| 1008 |
+
except UnicodeDecodeError:
|
| 1009 |
+
reason = self.reason.decode("iso-8859-1")
|
| 1010 |
+
else:
|
| 1011 |
+
reason = self.reason
|
| 1012 |
+
|
| 1013 |
+
if 400 <= self.status_code < 500:
|
| 1014 |
+
http_error_msg = (
|
| 1015 |
+
f"{self.status_code} Client Error: {reason} for url: {self.url}"
|
| 1016 |
+
)
|
| 1017 |
+
|
| 1018 |
+
elif 500 <= self.status_code < 600:
|
| 1019 |
+
http_error_msg = (
|
| 1020 |
+
f"{self.status_code} Server Error: {reason} for url: {self.url}"
|
| 1021 |
+
)
|
| 1022 |
+
|
| 1023 |
+
if http_error_msg:
|
| 1024 |
+
raise HTTPError(http_error_msg, response=self)
|
| 1025 |
+
|
| 1026 |
+
def close(self):
|
| 1027 |
+
"""Releases the connection back to the pool. Once this method has been
|
| 1028 |
+
called the underlying ``raw`` object must not be accessed again.
|
| 1029 |
+
|
| 1030 |
+
*Note: Should not normally need to be called explicitly.*
|
| 1031 |
+
"""
|
| 1032 |
+
if not self._content_consumed:
|
| 1033 |
+
self.raw.close()
|
| 1034 |
+
|
| 1035 |
+
release_conn = getattr(self.raw, "release_conn", None)
|
| 1036 |
+
if release_conn is not None:
|
| 1037 |
+
release_conn()
|
janus/lib/python3.10/site-packages/requests/status_codes.py
ADDED
|
@@ -0,0 +1,128 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
r"""
|
| 2 |
+
The ``codes`` object defines a mapping from common names for HTTP statuses
|
| 3 |
+
to their numerical codes, accessible either as attributes or as dictionary
|
| 4 |
+
items.
|
| 5 |
+
|
| 6 |
+
Example::
|
| 7 |
+
|
| 8 |
+
>>> import requests
|
| 9 |
+
>>> requests.codes['temporary_redirect']
|
| 10 |
+
307
|
| 11 |
+
>>> requests.codes.teapot
|
| 12 |
+
418
|
| 13 |
+
>>> requests.codes['\o/']
|
| 14 |
+
200
|
| 15 |
+
|
| 16 |
+
Some codes have multiple names, and both upper- and lower-case versions of
|
| 17 |
+
the names are allowed. For example, ``codes.ok``, ``codes.OK``, and
|
| 18 |
+
``codes.okay`` all correspond to the HTTP status code 200.
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
from .structures import LookupDict
|
| 22 |
+
|
| 23 |
+
_codes = {
|
| 24 |
+
# Informational.
|
| 25 |
+
100: ("continue",),
|
| 26 |
+
101: ("switching_protocols",),
|
| 27 |
+
102: ("processing", "early-hints"),
|
| 28 |
+
103: ("checkpoint",),
|
| 29 |
+
122: ("uri_too_long", "request_uri_too_long"),
|
| 30 |
+
200: ("ok", "okay", "all_ok", "all_okay", "all_good", "\\o/", "✓"),
|
| 31 |
+
201: ("created",),
|
| 32 |
+
202: ("accepted",),
|
| 33 |
+
203: ("non_authoritative_info", "non_authoritative_information"),
|
| 34 |
+
204: ("no_content",),
|
| 35 |
+
205: ("reset_content", "reset"),
|
| 36 |
+
206: ("partial_content", "partial"),
|
| 37 |
+
207: ("multi_status", "multiple_status", "multi_stati", "multiple_stati"),
|
| 38 |
+
208: ("already_reported",),
|
| 39 |
+
226: ("im_used",),
|
| 40 |
+
# Redirection.
|
| 41 |
+
300: ("multiple_choices",),
|
| 42 |
+
301: ("moved_permanently", "moved", "\\o-"),
|
| 43 |
+
302: ("found",),
|
| 44 |
+
303: ("see_other", "other"),
|
| 45 |
+
304: ("not_modified",),
|
| 46 |
+
305: ("use_proxy",),
|
| 47 |
+
306: ("switch_proxy",),
|
| 48 |
+
307: ("temporary_redirect", "temporary_moved", "temporary"),
|
| 49 |
+
308: (
|
| 50 |
+
"permanent_redirect",
|
| 51 |
+
"resume_incomplete",
|
| 52 |
+
"resume",
|
| 53 |
+
), # "resume" and "resume_incomplete" to be removed in 3.0
|
| 54 |
+
# Client Error.
|
| 55 |
+
400: ("bad_request", "bad"),
|
| 56 |
+
401: ("unauthorized",),
|
| 57 |
+
402: ("payment_required", "payment"),
|
| 58 |
+
403: ("forbidden",),
|
| 59 |
+
404: ("not_found", "-o-"),
|
| 60 |
+
405: ("method_not_allowed", "not_allowed"),
|
| 61 |
+
406: ("not_acceptable",),
|
| 62 |
+
407: ("proxy_authentication_required", "proxy_auth", "proxy_authentication"),
|
| 63 |
+
408: ("request_timeout", "timeout"),
|
| 64 |
+
409: ("conflict",),
|
| 65 |
+
410: ("gone",),
|
| 66 |
+
411: ("length_required",),
|
| 67 |
+
412: ("precondition_failed", "precondition"),
|
| 68 |
+
413: ("request_entity_too_large", "content_too_large"),
|
| 69 |
+
414: ("request_uri_too_large", "uri_too_long"),
|
| 70 |
+
415: ("unsupported_media_type", "unsupported_media", "media_type"),
|
| 71 |
+
416: (
|
| 72 |
+
"requested_range_not_satisfiable",
|
| 73 |
+
"requested_range",
|
| 74 |
+
"range_not_satisfiable",
|
| 75 |
+
),
|
| 76 |
+
417: ("expectation_failed",),
|
| 77 |
+
418: ("im_a_teapot", "teapot", "i_am_a_teapot"),
|
| 78 |
+
421: ("misdirected_request",),
|
| 79 |
+
422: ("unprocessable_entity", "unprocessable", "unprocessable_content"),
|
| 80 |
+
423: ("locked",),
|
| 81 |
+
424: ("failed_dependency", "dependency"),
|
| 82 |
+
425: ("unordered_collection", "unordered", "too_early"),
|
| 83 |
+
426: ("upgrade_required", "upgrade"),
|
| 84 |
+
428: ("precondition_required", "precondition"),
|
| 85 |
+
429: ("too_many_requests", "too_many"),
|
| 86 |
+
431: ("header_fields_too_large", "fields_too_large"),
|
| 87 |
+
444: ("no_response", "none"),
|
| 88 |
+
449: ("retry_with", "retry"),
|
| 89 |
+
450: ("blocked_by_windows_parental_controls", "parental_controls"),
|
| 90 |
+
451: ("unavailable_for_legal_reasons", "legal_reasons"),
|
| 91 |
+
499: ("client_closed_request",),
|
| 92 |
+
# Server Error.
|
| 93 |
+
500: ("internal_server_error", "server_error", "/o\\", "✗"),
|
| 94 |
+
501: ("not_implemented",),
|
| 95 |
+
502: ("bad_gateway",),
|
| 96 |
+
503: ("service_unavailable", "unavailable"),
|
| 97 |
+
504: ("gateway_timeout",),
|
| 98 |
+
505: ("http_version_not_supported", "http_version"),
|
| 99 |
+
506: ("variant_also_negotiates",),
|
| 100 |
+
507: ("insufficient_storage",),
|
| 101 |
+
509: ("bandwidth_limit_exceeded", "bandwidth"),
|
| 102 |
+
510: ("not_extended",),
|
| 103 |
+
511: ("network_authentication_required", "network_auth", "network_authentication"),
|
| 104 |
+
}
|
| 105 |
+
|
| 106 |
+
codes = LookupDict(name="status_codes")
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
def _init():
|
| 110 |
+
for code, titles in _codes.items():
|
| 111 |
+
for title in titles:
|
| 112 |
+
setattr(codes, title, code)
|
| 113 |
+
if not title.startswith(("\\", "/")):
|
| 114 |
+
setattr(codes, title.upper(), code)
|
| 115 |
+
|
| 116 |
+
def doc(code):
|
| 117 |
+
names = ", ".join(f"``{n}``" for n in _codes[code])
|
| 118 |
+
return "* %d: %s" % (code, names)
|
| 119 |
+
|
| 120 |
+
global __doc__
|
| 121 |
+
__doc__ = (
|
| 122 |
+
__doc__ + "\n" + "\n".join(doc(code) for code in sorted(_codes))
|
| 123 |
+
if __doc__ is not None
|
| 124 |
+
else None
|
| 125 |
+
)
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
_init()
|
janus/lib/python3.10/site-packages/requests/utils.py
ADDED
|
@@ -0,0 +1,1096 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
requests.utils
|
| 3 |
+
~~~~~~~~~~~~~~
|
| 4 |
+
|
| 5 |
+
This module provides utility functions that are used within Requests
|
| 6 |
+
that are also useful for external consumption.
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
import codecs
|
| 10 |
+
import contextlib
|
| 11 |
+
import io
|
| 12 |
+
import os
|
| 13 |
+
import re
|
| 14 |
+
import socket
|
| 15 |
+
import struct
|
| 16 |
+
import sys
|
| 17 |
+
import tempfile
|
| 18 |
+
import warnings
|
| 19 |
+
import zipfile
|
| 20 |
+
from collections import OrderedDict
|
| 21 |
+
|
| 22 |
+
from urllib3.util import make_headers, parse_url
|
| 23 |
+
|
| 24 |
+
from . import certs
|
| 25 |
+
from .__version__ import __version__
|
| 26 |
+
|
| 27 |
+
# to_native_string is unused here, but imported here for backwards compatibility
|
| 28 |
+
from ._internal_utils import ( # noqa: F401
|
| 29 |
+
_HEADER_VALIDATORS_BYTE,
|
| 30 |
+
_HEADER_VALIDATORS_STR,
|
| 31 |
+
HEADER_VALIDATORS,
|
| 32 |
+
to_native_string,
|
| 33 |
+
)
|
| 34 |
+
from .compat import (
|
| 35 |
+
Mapping,
|
| 36 |
+
basestring,
|
| 37 |
+
bytes,
|
| 38 |
+
getproxies,
|
| 39 |
+
getproxies_environment,
|
| 40 |
+
integer_types,
|
| 41 |
+
)
|
| 42 |
+
from .compat import parse_http_list as _parse_list_header
|
| 43 |
+
from .compat import (
|
| 44 |
+
proxy_bypass,
|
| 45 |
+
proxy_bypass_environment,
|
| 46 |
+
quote,
|
| 47 |
+
str,
|
| 48 |
+
unquote,
|
| 49 |
+
urlparse,
|
| 50 |
+
urlunparse,
|
| 51 |
+
)
|
| 52 |
+
from .cookies import cookiejar_from_dict
|
| 53 |
+
from .exceptions import (
|
| 54 |
+
FileModeWarning,
|
| 55 |
+
InvalidHeader,
|
| 56 |
+
InvalidURL,
|
| 57 |
+
UnrewindableBodyError,
|
| 58 |
+
)
|
| 59 |
+
from .structures import CaseInsensitiveDict
|
| 60 |
+
|
| 61 |
+
NETRC_FILES = (".netrc", "_netrc")
|
| 62 |
+
|
| 63 |
+
DEFAULT_CA_BUNDLE_PATH = certs.where()
|
| 64 |
+
|
| 65 |
+
DEFAULT_PORTS = {"http": 80, "https": 443}
|
| 66 |
+
|
| 67 |
+
# Ensure that ', ' is used to preserve previous delimiter behavior.
|
| 68 |
+
DEFAULT_ACCEPT_ENCODING = ", ".join(
|
| 69 |
+
re.split(r",\s*", make_headers(accept_encoding=True)["accept-encoding"])
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
if sys.platform == "win32":
|
| 74 |
+
# provide a proxy_bypass version on Windows without DNS lookups
|
| 75 |
+
|
| 76 |
+
def proxy_bypass_registry(host):
|
| 77 |
+
try:
|
| 78 |
+
import winreg
|
| 79 |
+
except ImportError:
|
| 80 |
+
return False
|
| 81 |
+
|
| 82 |
+
try:
|
| 83 |
+
internetSettings = winreg.OpenKey(
|
| 84 |
+
winreg.HKEY_CURRENT_USER,
|
| 85 |
+
r"Software\Microsoft\Windows\CurrentVersion\Internet Settings",
|
| 86 |
+
)
|
| 87 |
+
# ProxyEnable could be REG_SZ or REG_DWORD, normalizing it
|
| 88 |
+
proxyEnable = int(winreg.QueryValueEx(internetSettings, "ProxyEnable")[0])
|
| 89 |
+
# ProxyOverride is almost always a string
|
| 90 |
+
proxyOverride = winreg.QueryValueEx(internetSettings, "ProxyOverride")[0]
|
| 91 |
+
except (OSError, ValueError):
|
| 92 |
+
return False
|
| 93 |
+
if not proxyEnable or not proxyOverride:
|
| 94 |
+
return False
|
| 95 |
+
|
| 96 |
+
# make a check value list from the registry entry: replace the
|
| 97 |
+
# '<local>' string by the localhost entry and the corresponding
|
| 98 |
+
# canonical entry.
|
| 99 |
+
proxyOverride = proxyOverride.split(";")
|
| 100 |
+
# filter out empty strings to avoid re.match return true in the following code.
|
| 101 |
+
proxyOverride = filter(None, proxyOverride)
|
| 102 |
+
# now check if we match one of the registry values.
|
| 103 |
+
for test in proxyOverride:
|
| 104 |
+
if test == "<local>":
|
| 105 |
+
if "." not in host:
|
| 106 |
+
return True
|
| 107 |
+
test = test.replace(".", r"\.") # mask dots
|
| 108 |
+
test = test.replace("*", r".*") # change glob sequence
|
| 109 |
+
test = test.replace("?", r".") # change glob char
|
| 110 |
+
if re.match(test, host, re.I):
|
| 111 |
+
return True
|
| 112 |
+
return False
|
| 113 |
+
|
| 114 |
+
def proxy_bypass(host): # noqa
|
| 115 |
+
"""Return True, if the host should be bypassed.
|
| 116 |
+
|
| 117 |
+
Checks proxy settings gathered from the environment, if specified,
|
| 118 |
+
or the registry.
|
| 119 |
+
"""
|
| 120 |
+
if getproxies_environment():
|
| 121 |
+
return proxy_bypass_environment(host)
|
| 122 |
+
else:
|
| 123 |
+
return proxy_bypass_registry(host)
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def dict_to_sequence(d):
|
| 127 |
+
"""Returns an internal sequence dictionary update."""
|
| 128 |
+
|
| 129 |
+
if hasattr(d, "items"):
|
| 130 |
+
d = d.items()
|
| 131 |
+
|
| 132 |
+
return d
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def super_len(o):
|
| 136 |
+
total_length = None
|
| 137 |
+
current_position = 0
|
| 138 |
+
|
| 139 |
+
if isinstance(o, str):
|
| 140 |
+
o = o.encode("utf-8")
|
| 141 |
+
|
| 142 |
+
if hasattr(o, "__len__"):
|
| 143 |
+
total_length = len(o)
|
| 144 |
+
|
| 145 |
+
elif hasattr(o, "len"):
|
| 146 |
+
total_length = o.len
|
| 147 |
+
|
| 148 |
+
elif hasattr(o, "fileno"):
|
| 149 |
+
try:
|
| 150 |
+
fileno = o.fileno()
|
| 151 |
+
except (io.UnsupportedOperation, AttributeError):
|
| 152 |
+
# AttributeError is a surprising exception, seeing as how we've just checked
|
| 153 |
+
# that `hasattr(o, 'fileno')`. It happens for objects obtained via
|
| 154 |
+
# `Tarfile.extractfile()`, per issue 5229.
|
| 155 |
+
pass
|
| 156 |
+
else:
|
| 157 |
+
total_length = os.fstat(fileno).st_size
|
| 158 |
+
|
| 159 |
+
# Having used fstat to determine the file length, we need to
|
| 160 |
+
# confirm that this file was opened up in binary mode.
|
| 161 |
+
if "b" not in o.mode:
|
| 162 |
+
warnings.warn(
|
| 163 |
+
(
|
| 164 |
+
"Requests has determined the content-length for this "
|
| 165 |
+
"request using the binary size of the file: however, the "
|
| 166 |
+
"file has been opened in text mode (i.e. without the 'b' "
|
| 167 |
+
"flag in the mode). This may lead to an incorrect "
|
| 168 |
+
"content-length. In Requests 3.0, support will be removed "
|
| 169 |
+
"for files in text mode."
|
| 170 |
+
),
|
| 171 |
+
FileModeWarning,
|
| 172 |
+
)
|
| 173 |
+
|
| 174 |
+
if hasattr(o, "tell"):
|
| 175 |
+
try:
|
| 176 |
+
current_position = o.tell()
|
| 177 |
+
except OSError:
|
| 178 |
+
# This can happen in some weird situations, such as when the file
|
| 179 |
+
# is actually a special file descriptor like stdin. In this
|
| 180 |
+
# instance, we don't know what the length is, so set it to zero and
|
| 181 |
+
# let requests chunk it instead.
|
| 182 |
+
if total_length is not None:
|
| 183 |
+
current_position = total_length
|
| 184 |
+
else:
|
| 185 |
+
if hasattr(o, "seek") and total_length is None:
|
| 186 |
+
# StringIO and BytesIO have seek but no usable fileno
|
| 187 |
+
try:
|
| 188 |
+
# seek to end of file
|
| 189 |
+
o.seek(0, 2)
|
| 190 |
+
total_length = o.tell()
|
| 191 |
+
|
| 192 |
+
# seek back to current position to support
|
| 193 |
+
# partially read file-like objects
|
| 194 |
+
o.seek(current_position or 0)
|
| 195 |
+
except OSError:
|
| 196 |
+
total_length = 0
|
| 197 |
+
|
| 198 |
+
if total_length is None:
|
| 199 |
+
total_length = 0
|
| 200 |
+
|
| 201 |
+
return max(0, total_length - current_position)
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
def get_netrc_auth(url, raise_errors=False):
|
| 205 |
+
"""Returns the Requests tuple auth for a given url from netrc."""
|
| 206 |
+
|
| 207 |
+
netrc_file = os.environ.get("NETRC")
|
| 208 |
+
if netrc_file is not None:
|
| 209 |
+
netrc_locations = (netrc_file,)
|
| 210 |
+
else:
|
| 211 |
+
netrc_locations = (f"~/{f}" for f in NETRC_FILES)
|
| 212 |
+
|
| 213 |
+
try:
|
| 214 |
+
from netrc import NetrcParseError, netrc
|
| 215 |
+
|
| 216 |
+
netrc_path = None
|
| 217 |
+
|
| 218 |
+
for f in netrc_locations:
|
| 219 |
+
try:
|
| 220 |
+
loc = os.path.expanduser(f)
|
| 221 |
+
except KeyError:
|
| 222 |
+
# os.path.expanduser can fail when $HOME is undefined and
|
| 223 |
+
# getpwuid fails. See https://bugs.python.org/issue20164 &
|
| 224 |
+
# https://github.com/psf/requests/issues/1846
|
| 225 |
+
return
|
| 226 |
+
|
| 227 |
+
if os.path.exists(loc):
|
| 228 |
+
netrc_path = loc
|
| 229 |
+
break
|
| 230 |
+
|
| 231 |
+
# Abort early if there isn't one.
|
| 232 |
+
if netrc_path is None:
|
| 233 |
+
return
|
| 234 |
+
|
| 235 |
+
ri = urlparse(url)
|
| 236 |
+
|
| 237 |
+
# Strip port numbers from netloc. This weird `if...encode`` dance is
|
| 238 |
+
# used for Python 3.2, which doesn't support unicode literals.
|
| 239 |
+
splitstr = b":"
|
| 240 |
+
if isinstance(url, str):
|
| 241 |
+
splitstr = splitstr.decode("ascii")
|
| 242 |
+
host = ri.netloc.split(splitstr)[0]
|
| 243 |
+
|
| 244 |
+
try:
|
| 245 |
+
_netrc = netrc(netrc_path).authenticators(host)
|
| 246 |
+
if _netrc:
|
| 247 |
+
# Return with login / password
|
| 248 |
+
login_i = 0 if _netrc[0] else 1
|
| 249 |
+
return (_netrc[login_i], _netrc[2])
|
| 250 |
+
except (NetrcParseError, OSError):
|
| 251 |
+
# If there was a parsing error or a permissions issue reading the file,
|
| 252 |
+
# we'll just skip netrc auth unless explicitly asked to raise errors.
|
| 253 |
+
if raise_errors:
|
| 254 |
+
raise
|
| 255 |
+
|
| 256 |
+
# App Engine hackiness.
|
| 257 |
+
except (ImportError, AttributeError):
|
| 258 |
+
pass
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
def guess_filename(obj):
|
| 262 |
+
"""Tries to guess the filename of the given object."""
|
| 263 |
+
name = getattr(obj, "name", None)
|
| 264 |
+
if name and isinstance(name, basestring) and name[0] != "<" and name[-1] != ">":
|
| 265 |
+
return os.path.basename(name)
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
def extract_zipped_paths(path):
|
| 269 |
+
"""Replace nonexistent paths that look like they refer to a member of a zip
|
| 270 |
+
archive with the location of an extracted copy of the target, or else
|
| 271 |
+
just return the provided path unchanged.
|
| 272 |
+
"""
|
| 273 |
+
if os.path.exists(path):
|
| 274 |
+
# this is already a valid path, no need to do anything further
|
| 275 |
+
return path
|
| 276 |
+
|
| 277 |
+
# find the first valid part of the provided path and treat that as a zip archive
|
| 278 |
+
# assume the rest of the path is the name of a member in the archive
|
| 279 |
+
archive, member = os.path.split(path)
|
| 280 |
+
while archive and not os.path.exists(archive):
|
| 281 |
+
archive, prefix = os.path.split(archive)
|
| 282 |
+
if not prefix:
|
| 283 |
+
# If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split),
|
| 284 |
+
# we _can_ end up in an infinite loop on a rare corner case affecting a small number of users
|
| 285 |
+
break
|
| 286 |
+
member = "/".join([prefix, member])
|
| 287 |
+
|
| 288 |
+
if not zipfile.is_zipfile(archive):
|
| 289 |
+
return path
|
| 290 |
+
|
| 291 |
+
zip_file = zipfile.ZipFile(archive)
|
| 292 |
+
if member not in zip_file.namelist():
|
| 293 |
+
return path
|
| 294 |
+
|
| 295 |
+
# we have a valid zip archive and a valid member of that archive
|
| 296 |
+
tmp = tempfile.gettempdir()
|
| 297 |
+
extracted_path = os.path.join(tmp, member.split("/")[-1])
|
| 298 |
+
if not os.path.exists(extracted_path):
|
| 299 |
+
# use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition
|
| 300 |
+
with atomic_open(extracted_path) as file_handler:
|
| 301 |
+
file_handler.write(zip_file.read(member))
|
| 302 |
+
return extracted_path
|
| 303 |
+
|
| 304 |
+
|
| 305 |
+
@contextlib.contextmanager
|
| 306 |
+
def atomic_open(filename):
|
| 307 |
+
"""Write a file to the disk in an atomic fashion"""
|
| 308 |
+
tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename))
|
| 309 |
+
try:
|
| 310 |
+
with os.fdopen(tmp_descriptor, "wb") as tmp_handler:
|
| 311 |
+
yield tmp_handler
|
| 312 |
+
os.replace(tmp_name, filename)
|
| 313 |
+
except BaseException:
|
| 314 |
+
os.remove(tmp_name)
|
| 315 |
+
raise
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
def from_key_val_list(value):
|
| 319 |
+
"""Take an object and test to see if it can be represented as a
|
| 320 |
+
dictionary. Unless it can not be represented as such, return an
|
| 321 |
+
OrderedDict, e.g.,
|
| 322 |
+
|
| 323 |
+
::
|
| 324 |
+
|
| 325 |
+
>>> from_key_val_list([('key', 'val')])
|
| 326 |
+
OrderedDict([('key', 'val')])
|
| 327 |
+
>>> from_key_val_list('string')
|
| 328 |
+
Traceback (most recent call last):
|
| 329 |
+
...
|
| 330 |
+
ValueError: cannot encode objects that are not 2-tuples
|
| 331 |
+
>>> from_key_val_list({'key': 'val'})
|
| 332 |
+
OrderedDict([('key', 'val')])
|
| 333 |
+
|
| 334 |
+
:rtype: OrderedDict
|
| 335 |
+
"""
|
| 336 |
+
if value is None:
|
| 337 |
+
return None
|
| 338 |
+
|
| 339 |
+
if isinstance(value, (str, bytes, bool, int)):
|
| 340 |
+
raise ValueError("cannot encode objects that are not 2-tuples")
|
| 341 |
+
|
| 342 |
+
return OrderedDict(value)
|
| 343 |
+
|
| 344 |
+
|
| 345 |
+
def to_key_val_list(value):
|
| 346 |
+
"""Take an object and test to see if it can be represented as a
|
| 347 |
+
dictionary. If it can be, return a list of tuples, e.g.,
|
| 348 |
+
|
| 349 |
+
::
|
| 350 |
+
|
| 351 |
+
>>> to_key_val_list([('key', 'val')])
|
| 352 |
+
[('key', 'val')]
|
| 353 |
+
>>> to_key_val_list({'key': 'val'})
|
| 354 |
+
[('key', 'val')]
|
| 355 |
+
>>> to_key_val_list('string')
|
| 356 |
+
Traceback (most recent call last):
|
| 357 |
+
...
|
| 358 |
+
ValueError: cannot encode objects that are not 2-tuples
|
| 359 |
+
|
| 360 |
+
:rtype: list
|
| 361 |
+
"""
|
| 362 |
+
if value is None:
|
| 363 |
+
return None
|
| 364 |
+
|
| 365 |
+
if isinstance(value, (str, bytes, bool, int)):
|
| 366 |
+
raise ValueError("cannot encode objects that are not 2-tuples")
|
| 367 |
+
|
| 368 |
+
if isinstance(value, Mapping):
|
| 369 |
+
value = value.items()
|
| 370 |
+
|
| 371 |
+
return list(value)
|
| 372 |
+
|
| 373 |
+
|
| 374 |
+
# From mitsuhiko/werkzeug (used with permission).
|
| 375 |
+
def parse_list_header(value):
|
| 376 |
+
"""Parse lists as described by RFC 2068 Section 2.
|
| 377 |
+
|
| 378 |
+
In particular, parse comma-separated lists where the elements of
|
| 379 |
+
the list may include quoted-strings. A quoted-string could
|
| 380 |
+
contain a comma. A non-quoted string could have quotes in the
|
| 381 |
+
middle. Quotes are removed automatically after parsing.
|
| 382 |
+
|
| 383 |
+
It basically works like :func:`parse_set_header` just that items
|
| 384 |
+
may appear multiple times and case sensitivity is preserved.
|
| 385 |
+
|
| 386 |
+
The return value is a standard :class:`list`:
|
| 387 |
+
|
| 388 |
+
>>> parse_list_header('token, "quoted value"')
|
| 389 |
+
['token', 'quoted value']
|
| 390 |
+
|
| 391 |
+
To create a header from the :class:`list` again, use the
|
| 392 |
+
:func:`dump_header` function.
|
| 393 |
+
|
| 394 |
+
:param value: a string with a list header.
|
| 395 |
+
:return: :class:`list`
|
| 396 |
+
:rtype: list
|
| 397 |
+
"""
|
| 398 |
+
result = []
|
| 399 |
+
for item in _parse_list_header(value):
|
| 400 |
+
if item[:1] == item[-1:] == '"':
|
| 401 |
+
item = unquote_header_value(item[1:-1])
|
| 402 |
+
result.append(item)
|
| 403 |
+
return result
|
| 404 |
+
|
| 405 |
+
|
| 406 |
+
# From mitsuhiko/werkzeug (used with permission).
|
| 407 |
+
def parse_dict_header(value):
|
| 408 |
+
"""Parse lists of key, value pairs as described by RFC 2068 Section 2 and
|
| 409 |
+
convert them into a python dict:
|
| 410 |
+
|
| 411 |
+
>>> d = parse_dict_header('foo="is a fish", bar="as well"')
|
| 412 |
+
>>> type(d) is dict
|
| 413 |
+
True
|
| 414 |
+
>>> sorted(d.items())
|
| 415 |
+
[('bar', 'as well'), ('foo', 'is a fish')]
|
| 416 |
+
|
| 417 |
+
If there is no value for a key it will be `None`:
|
| 418 |
+
|
| 419 |
+
>>> parse_dict_header('key_without_value')
|
| 420 |
+
{'key_without_value': None}
|
| 421 |
+
|
| 422 |
+
To create a header from the :class:`dict` again, use the
|
| 423 |
+
:func:`dump_header` function.
|
| 424 |
+
|
| 425 |
+
:param value: a string with a dict header.
|
| 426 |
+
:return: :class:`dict`
|
| 427 |
+
:rtype: dict
|
| 428 |
+
"""
|
| 429 |
+
result = {}
|
| 430 |
+
for item in _parse_list_header(value):
|
| 431 |
+
if "=" not in item:
|
| 432 |
+
result[item] = None
|
| 433 |
+
continue
|
| 434 |
+
name, value = item.split("=", 1)
|
| 435 |
+
if value[:1] == value[-1:] == '"':
|
| 436 |
+
value = unquote_header_value(value[1:-1])
|
| 437 |
+
result[name] = value
|
| 438 |
+
return result
|
| 439 |
+
|
| 440 |
+
|
| 441 |
+
# From mitsuhiko/werkzeug (used with permission).
|
| 442 |
+
def unquote_header_value(value, is_filename=False):
|
| 443 |
+
r"""Unquotes a header value. (Reversal of :func:`quote_header_value`).
|
| 444 |
+
This does not use the real unquoting but what browsers are actually
|
| 445 |
+
using for quoting.
|
| 446 |
+
|
| 447 |
+
:param value: the header value to unquote.
|
| 448 |
+
:rtype: str
|
| 449 |
+
"""
|
| 450 |
+
if value and value[0] == value[-1] == '"':
|
| 451 |
+
# this is not the real unquoting, but fixing this so that the
|
| 452 |
+
# RFC is met will result in bugs with internet explorer and
|
| 453 |
+
# probably some other browsers as well. IE for example is
|
| 454 |
+
# uploading files with "C:\foo\bar.txt" as filename
|
| 455 |
+
value = value[1:-1]
|
| 456 |
+
|
| 457 |
+
# if this is a filename and the starting characters look like
|
| 458 |
+
# a UNC path, then just return the value without quotes. Using the
|
| 459 |
+
# replace sequence below on a UNC path has the effect of turning
|
| 460 |
+
# the leading double slash into a single slash and then
|
| 461 |
+
# _fix_ie_filename() doesn't work correctly. See #458.
|
| 462 |
+
if not is_filename or value[:2] != "\\\\":
|
| 463 |
+
return value.replace("\\\\", "\\").replace('\\"', '"')
|
| 464 |
+
return value
|
| 465 |
+
|
| 466 |
+
|
| 467 |
+
def dict_from_cookiejar(cj):
|
| 468 |
+
"""Returns a key/value dictionary from a CookieJar.
|
| 469 |
+
|
| 470 |
+
:param cj: CookieJar object to extract cookies from.
|
| 471 |
+
:rtype: dict
|
| 472 |
+
"""
|
| 473 |
+
|
| 474 |
+
cookie_dict = {cookie.name: cookie.value for cookie in cj}
|
| 475 |
+
return cookie_dict
|
| 476 |
+
|
| 477 |
+
|
| 478 |
+
def add_dict_to_cookiejar(cj, cookie_dict):
|
| 479 |
+
"""Returns a CookieJar from a key/value dictionary.
|
| 480 |
+
|
| 481 |
+
:param cj: CookieJar to insert cookies into.
|
| 482 |
+
:param cookie_dict: Dict of key/values to insert into CookieJar.
|
| 483 |
+
:rtype: CookieJar
|
| 484 |
+
"""
|
| 485 |
+
|
| 486 |
+
return cookiejar_from_dict(cookie_dict, cj)
|
| 487 |
+
|
| 488 |
+
|
| 489 |
+
def get_encodings_from_content(content):
|
| 490 |
+
"""Returns encodings from given content string.
|
| 491 |
+
|
| 492 |
+
:param content: bytestring to extract encodings from.
|
| 493 |
+
"""
|
| 494 |
+
warnings.warn(
|
| 495 |
+
(
|
| 496 |
+
"In requests 3.0, get_encodings_from_content will be removed. For "
|
| 497 |
+
"more information, please see the discussion on issue #2266. (This"
|
| 498 |
+
" warning should only appear once.)"
|
| 499 |
+
),
|
| 500 |
+
DeprecationWarning,
|
| 501 |
+
)
|
| 502 |
+
|
| 503 |
+
charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
|
| 504 |
+
pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)
|
| 505 |
+
xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]')
|
| 506 |
+
|
| 507 |
+
return (
|
| 508 |
+
charset_re.findall(content)
|
| 509 |
+
+ pragma_re.findall(content)
|
| 510 |
+
+ xml_re.findall(content)
|
| 511 |
+
)
|
| 512 |
+
|
| 513 |
+
|
| 514 |
+
def _parse_content_type_header(header):
|
| 515 |
+
"""Returns content type and parameters from given header
|
| 516 |
+
|
| 517 |
+
:param header: string
|
| 518 |
+
:return: tuple containing content type and dictionary of
|
| 519 |
+
parameters
|
| 520 |
+
"""
|
| 521 |
+
|
| 522 |
+
tokens = header.split(";")
|
| 523 |
+
content_type, params = tokens[0].strip(), tokens[1:]
|
| 524 |
+
params_dict = {}
|
| 525 |
+
items_to_strip = "\"' "
|
| 526 |
+
|
| 527 |
+
for param in params:
|
| 528 |
+
param = param.strip()
|
| 529 |
+
if param:
|
| 530 |
+
key, value = param, True
|
| 531 |
+
index_of_equals = param.find("=")
|
| 532 |
+
if index_of_equals != -1:
|
| 533 |
+
key = param[:index_of_equals].strip(items_to_strip)
|
| 534 |
+
value = param[index_of_equals + 1 :].strip(items_to_strip)
|
| 535 |
+
params_dict[key.lower()] = value
|
| 536 |
+
return content_type, params_dict
|
| 537 |
+
|
| 538 |
+
|
| 539 |
+
def get_encoding_from_headers(headers):
|
| 540 |
+
"""Returns encodings from given HTTP Header Dict.
|
| 541 |
+
|
| 542 |
+
:param headers: dictionary to extract encoding from.
|
| 543 |
+
:rtype: str
|
| 544 |
+
"""
|
| 545 |
+
|
| 546 |
+
content_type = headers.get("content-type")
|
| 547 |
+
|
| 548 |
+
if not content_type:
|
| 549 |
+
return None
|
| 550 |
+
|
| 551 |
+
content_type, params = _parse_content_type_header(content_type)
|
| 552 |
+
|
| 553 |
+
if "charset" in params:
|
| 554 |
+
return params["charset"].strip("'\"")
|
| 555 |
+
|
| 556 |
+
if "text" in content_type:
|
| 557 |
+
return "ISO-8859-1"
|
| 558 |
+
|
| 559 |
+
if "application/json" in content_type:
|
| 560 |
+
# Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset
|
| 561 |
+
return "utf-8"
|
| 562 |
+
|
| 563 |
+
|
| 564 |
+
def stream_decode_response_unicode(iterator, r):
|
| 565 |
+
"""Stream decodes an iterator."""
|
| 566 |
+
|
| 567 |
+
if r.encoding is None:
|
| 568 |
+
yield from iterator
|
| 569 |
+
return
|
| 570 |
+
|
| 571 |
+
decoder = codecs.getincrementaldecoder(r.encoding)(errors="replace")
|
| 572 |
+
for chunk in iterator:
|
| 573 |
+
rv = decoder.decode(chunk)
|
| 574 |
+
if rv:
|
| 575 |
+
yield rv
|
| 576 |
+
rv = decoder.decode(b"", final=True)
|
| 577 |
+
if rv:
|
| 578 |
+
yield rv
|
| 579 |
+
|
| 580 |
+
|
| 581 |
+
def iter_slices(string, slice_length):
|
| 582 |
+
"""Iterate over slices of a string."""
|
| 583 |
+
pos = 0
|
| 584 |
+
if slice_length is None or slice_length <= 0:
|
| 585 |
+
slice_length = len(string)
|
| 586 |
+
while pos < len(string):
|
| 587 |
+
yield string[pos : pos + slice_length]
|
| 588 |
+
pos += slice_length
|
| 589 |
+
|
| 590 |
+
|
| 591 |
+
def get_unicode_from_response(r):
|
| 592 |
+
"""Returns the requested content back in unicode.
|
| 593 |
+
|
| 594 |
+
:param r: Response object to get unicode content from.
|
| 595 |
+
|
| 596 |
+
Tried:
|
| 597 |
+
|
| 598 |
+
1. charset from content-type
|
| 599 |
+
2. fall back and replace all unicode characters
|
| 600 |
+
|
| 601 |
+
:rtype: str
|
| 602 |
+
"""
|
| 603 |
+
warnings.warn(
|
| 604 |
+
(
|
| 605 |
+
"In requests 3.0, get_unicode_from_response will be removed. For "
|
| 606 |
+
"more information, please see the discussion on issue #2266. (This"
|
| 607 |
+
" warning should only appear once.)"
|
| 608 |
+
),
|
| 609 |
+
DeprecationWarning,
|
| 610 |
+
)
|
| 611 |
+
|
| 612 |
+
tried_encodings = []
|
| 613 |
+
|
| 614 |
+
# Try charset from content-type
|
| 615 |
+
encoding = get_encoding_from_headers(r.headers)
|
| 616 |
+
|
| 617 |
+
if encoding:
|
| 618 |
+
try:
|
| 619 |
+
return str(r.content, encoding)
|
| 620 |
+
except UnicodeError:
|
| 621 |
+
tried_encodings.append(encoding)
|
| 622 |
+
|
| 623 |
+
# Fall back:
|
| 624 |
+
try:
|
| 625 |
+
return str(r.content, encoding, errors="replace")
|
| 626 |
+
except TypeError:
|
| 627 |
+
return r.content
|
| 628 |
+
|
| 629 |
+
|
| 630 |
+
# The unreserved URI characters (RFC 3986)
|
| 631 |
+
UNRESERVED_SET = frozenset(
|
| 632 |
+
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~"
|
| 633 |
+
)
|
| 634 |
+
|
| 635 |
+
|
| 636 |
+
def unquote_unreserved(uri):
|
| 637 |
+
"""Un-escape any percent-escape sequences in a URI that are unreserved
|
| 638 |
+
characters. This leaves all reserved, illegal and non-ASCII bytes encoded.
|
| 639 |
+
|
| 640 |
+
:rtype: str
|
| 641 |
+
"""
|
| 642 |
+
parts = uri.split("%")
|
| 643 |
+
for i in range(1, len(parts)):
|
| 644 |
+
h = parts[i][0:2]
|
| 645 |
+
if len(h) == 2 and h.isalnum():
|
| 646 |
+
try:
|
| 647 |
+
c = chr(int(h, 16))
|
| 648 |
+
except ValueError:
|
| 649 |
+
raise InvalidURL(f"Invalid percent-escape sequence: '{h}'")
|
| 650 |
+
|
| 651 |
+
if c in UNRESERVED_SET:
|
| 652 |
+
parts[i] = c + parts[i][2:]
|
| 653 |
+
else:
|
| 654 |
+
parts[i] = f"%{parts[i]}"
|
| 655 |
+
else:
|
| 656 |
+
parts[i] = f"%{parts[i]}"
|
| 657 |
+
return "".join(parts)
|
| 658 |
+
|
| 659 |
+
|
| 660 |
+
def requote_uri(uri):
|
| 661 |
+
"""Re-quote the given URI.
|
| 662 |
+
|
| 663 |
+
This function passes the given URI through an unquote/quote cycle to
|
| 664 |
+
ensure that it is fully and consistently quoted.
|
| 665 |
+
|
| 666 |
+
:rtype: str
|
| 667 |
+
"""
|
| 668 |
+
safe_with_percent = "!#$%&'()*+,/:;=?@[]~"
|
| 669 |
+
safe_without_percent = "!#$&'()*+,/:;=?@[]~"
|
| 670 |
+
try:
|
| 671 |
+
# Unquote only the unreserved characters
|
| 672 |
+
# Then quote only illegal characters (do not quote reserved,
|
| 673 |
+
# unreserved, or '%')
|
| 674 |
+
return quote(unquote_unreserved(uri), safe=safe_with_percent)
|
| 675 |
+
except InvalidURL:
|
| 676 |
+
# We couldn't unquote the given URI, so let's try quoting it, but
|
| 677 |
+
# there may be unquoted '%'s in the URI. We need to make sure they're
|
| 678 |
+
# properly quoted so they do not cause issues elsewhere.
|
| 679 |
+
return quote(uri, safe=safe_without_percent)
|
| 680 |
+
|
| 681 |
+
|
| 682 |
+
def address_in_network(ip, net):
|
| 683 |
+
"""This function allows you to check if an IP belongs to a network subnet
|
| 684 |
+
|
| 685 |
+
Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24
|
| 686 |
+
returns False if ip = 192.168.1.1 and net = 192.168.100.0/24
|
| 687 |
+
|
| 688 |
+
:rtype: bool
|
| 689 |
+
"""
|
| 690 |
+
ipaddr = struct.unpack("=L", socket.inet_aton(ip))[0]
|
| 691 |
+
netaddr, bits = net.split("/")
|
| 692 |
+
netmask = struct.unpack("=L", socket.inet_aton(dotted_netmask(int(bits))))[0]
|
| 693 |
+
network = struct.unpack("=L", socket.inet_aton(netaddr))[0] & netmask
|
| 694 |
+
return (ipaddr & netmask) == (network & netmask)
|
| 695 |
+
|
| 696 |
+
|
| 697 |
+
def dotted_netmask(mask):
|
| 698 |
+
"""Converts mask from /xx format to xxx.xxx.xxx.xxx
|
| 699 |
+
|
| 700 |
+
Example: if mask is 24 function returns 255.255.255.0
|
| 701 |
+
|
| 702 |
+
:rtype: str
|
| 703 |
+
"""
|
| 704 |
+
bits = 0xFFFFFFFF ^ (1 << 32 - mask) - 1
|
| 705 |
+
return socket.inet_ntoa(struct.pack(">I", bits))
|
| 706 |
+
|
| 707 |
+
|
| 708 |
+
def is_ipv4_address(string_ip):
|
| 709 |
+
"""
|
| 710 |
+
:rtype: bool
|
| 711 |
+
"""
|
| 712 |
+
try:
|
| 713 |
+
socket.inet_aton(string_ip)
|
| 714 |
+
except OSError:
|
| 715 |
+
return False
|
| 716 |
+
return True
|
| 717 |
+
|
| 718 |
+
|
| 719 |
+
def is_valid_cidr(string_network):
|
| 720 |
+
"""
|
| 721 |
+
Very simple check of the cidr format in no_proxy variable.
|
| 722 |
+
|
| 723 |
+
:rtype: bool
|
| 724 |
+
"""
|
| 725 |
+
if string_network.count("/") == 1:
|
| 726 |
+
try:
|
| 727 |
+
mask = int(string_network.split("/")[1])
|
| 728 |
+
except ValueError:
|
| 729 |
+
return False
|
| 730 |
+
|
| 731 |
+
if mask < 1 or mask > 32:
|
| 732 |
+
return False
|
| 733 |
+
|
| 734 |
+
try:
|
| 735 |
+
socket.inet_aton(string_network.split("/")[0])
|
| 736 |
+
except OSError:
|
| 737 |
+
return False
|
| 738 |
+
else:
|
| 739 |
+
return False
|
| 740 |
+
return True
|
| 741 |
+
|
| 742 |
+
|
| 743 |
+
@contextlib.contextmanager
|
| 744 |
+
def set_environ(env_name, value):
|
| 745 |
+
"""Set the environment variable 'env_name' to 'value'
|
| 746 |
+
|
| 747 |
+
Save previous value, yield, and then restore the previous value stored in
|
| 748 |
+
the environment variable 'env_name'.
|
| 749 |
+
|
| 750 |
+
If 'value' is None, do nothing"""
|
| 751 |
+
value_changed = value is not None
|
| 752 |
+
if value_changed:
|
| 753 |
+
old_value = os.environ.get(env_name)
|
| 754 |
+
os.environ[env_name] = value
|
| 755 |
+
try:
|
| 756 |
+
yield
|
| 757 |
+
finally:
|
| 758 |
+
if value_changed:
|
| 759 |
+
if old_value is None:
|
| 760 |
+
del os.environ[env_name]
|
| 761 |
+
else:
|
| 762 |
+
os.environ[env_name] = old_value
|
| 763 |
+
|
| 764 |
+
|
| 765 |
+
def should_bypass_proxies(url, no_proxy):
|
| 766 |
+
"""
|
| 767 |
+
Returns whether we should bypass proxies or not.
|
| 768 |
+
|
| 769 |
+
:rtype: bool
|
| 770 |
+
"""
|
| 771 |
+
|
| 772 |
+
# Prioritize lowercase environment variables over uppercase
|
| 773 |
+
# to keep a consistent behaviour with other http projects (curl, wget).
|
| 774 |
+
def get_proxy(key):
|
| 775 |
+
return os.environ.get(key) or os.environ.get(key.upper())
|
| 776 |
+
|
| 777 |
+
# First check whether no_proxy is defined. If it is, check that the URL
|
| 778 |
+
# we're getting isn't in the no_proxy list.
|
| 779 |
+
no_proxy_arg = no_proxy
|
| 780 |
+
if no_proxy is None:
|
| 781 |
+
no_proxy = get_proxy("no_proxy")
|
| 782 |
+
parsed = urlparse(url)
|
| 783 |
+
|
| 784 |
+
if parsed.hostname is None:
|
| 785 |
+
# URLs don't always have hostnames, e.g. file:/// urls.
|
| 786 |
+
return True
|
| 787 |
+
|
| 788 |
+
if no_proxy:
|
| 789 |
+
# We need to check whether we match here. We need to see if we match
|
| 790 |
+
# the end of the hostname, both with and without the port.
|
| 791 |
+
no_proxy = (host for host in no_proxy.replace(" ", "").split(",") if host)
|
| 792 |
+
|
| 793 |
+
if is_ipv4_address(parsed.hostname):
|
| 794 |
+
for proxy_ip in no_proxy:
|
| 795 |
+
if is_valid_cidr(proxy_ip):
|
| 796 |
+
if address_in_network(parsed.hostname, proxy_ip):
|
| 797 |
+
return True
|
| 798 |
+
elif parsed.hostname == proxy_ip:
|
| 799 |
+
# If no_proxy ip was defined in plain IP notation instead of cidr notation &
|
| 800 |
+
# matches the IP of the index
|
| 801 |
+
return True
|
| 802 |
+
else:
|
| 803 |
+
host_with_port = parsed.hostname
|
| 804 |
+
if parsed.port:
|
| 805 |
+
host_with_port += f":{parsed.port}"
|
| 806 |
+
|
| 807 |
+
for host in no_proxy:
|
| 808 |
+
if parsed.hostname.endswith(host) or host_with_port.endswith(host):
|
| 809 |
+
# The URL does match something in no_proxy, so we don't want
|
| 810 |
+
# to apply the proxies on this URL.
|
| 811 |
+
return True
|
| 812 |
+
|
| 813 |
+
with set_environ("no_proxy", no_proxy_arg):
|
| 814 |
+
# parsed.hostname can be `None` in cases such as a file URI.
|
| 815 |
+
try:
|
| 816 |
+
bypass = proxy_bypass(parsed.hostname)
|
| 817 |
+
except (TypeError, socket.gaierror):
|
| 818 |
+
bypass = False
|
| 819 |
+
|
| 820 |
+
if bypass:
|
| 821 |
+
return True
|
| 822 |
+
|
| 823 |
+
return False
|
| 824 |
+
|
| 825 |
+
|
| 826 |
+
def get_environ_proxies(url, no_proxy=None):
|
| 827 |
+
"""
|
| 828 |
+
Return a dict of environment proxies.
|
| 829 |
+
|
| 830 |
+
:rtype: dict
|
| 831 |
+
"""
|
| 832 |
+
if should_bypass_proxies(url, no_proxy=no_proxy):
|
| 833 |
+
return {}
|
| 834 |
+
else:
|
| 835 |
+
return getproxies()
|
| 836 |
+
|
| 837 |
+
|
| 838 |
+
def select_proxy(url, proxies):
|
| 839 |
+
"""Select a proxy for the url, if applicable.
|
| 840 |
+
|
| 841 |
+
:param url: The url being for the request
|
| 842 |
+
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
|
| 843 |
+
"""
|
| 844 |
+
proxies = proxies or {}
|
| 845 |
+
urlparts = urlparse(url)
|
| 846 |
+
if urlparts.hostname is None:
|
| 847 |
+
return proxies.get(urlparts.scheme, proxies.get("all"))
|
| 848 |
+
|
| 849 |
+
proxy_keys = [
|
| 850 |
+
urlparts.scheme + "://" + urlparts.hostname,
|
| 851 |
+
urlparts.scheme,
|
| 852 |
+
"all://" + urlparts.hostname,
|
| 853 |
+
"all",
|
| 854 |
+
]
|
| 855 |
+
proxy = None
|
| 856 |
+
for proxy_key in proxy_keys:
|
| 857 |
+
if proxy_key in proxies:
|
| 858 |
+
proxy = proxies[proxy_key]
|
| 859 |
+
break
|
| 860 |
+
|
| 861 |
+
return proxy
|
| 862 |
+
|
| 863 |
+
|
| 864 |
+
def resolve_proxies(request, proxies, trust_env=True):
|
| 865 |
+
"""This method takes proxy information from a request and configuration
|
| 866 |
+
input to resolve a mapping of target proxies. This will consider settings
|
| 867 |
+
such as NO_PROXY to strip proxy configurations.
|
| 868 |
+
|
| 869 |
+
:param request: Request or PreparedRequest
|
| 870 |
+
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
|
| 871 |
+
:param trust_env: Boolean declaring whether to trust environment configs
|
| 872 |
+
|
| 873 |
+
:rtype: dict
|
| 874 |
+
"""
|
| 875 |
+
proxies = proxies if proxies is not None else {}
|
| 876 |
+
url = request.url
|
| 877 |
+
scheme = urlparse(url).scheme
|
| 878 |
+
no_proxy = proxies.get("no_proxy")
|
| 879 |
+
new_proxies = proxies.copy()
|
| 880 |
+
|
| 881 |
+
if trust_env and not should_bypass_proxies(url, no_proxy=no_proxy):
|
| 882 |
+
environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)
|
| 883 |
+
|
| 884 |
+
proxy = environ_proxies.get(scheme, environ_proxies.get("all"))
|
| 885 |
+
|
| 886 |
+
if proxy:
|
| 887 |
+
new_proxies.setdefault(scheme, proxy)
|
| 888 |
+
return new_proxies
|
| 889 |
+
|
| 890 |
+
|
| 891 |
+
def default_user_agent(name="python-requests"):
|
| 892 |
+
"""
|
| 893 |
+
Return a string representing the default user agent.
|
| 894 |
+
|
| 895 |
+
:rtype: str
|
| 896 |
+
"""
|
| 897 |
+
return f"{name}/{__version__}"
|
| 898 |
+
|
| 899 |
+
|
| 900 |
+
def default_headers():
|
| 901 |
+
"""
|
| 902 |
+
:rtype: requests.structures.CaseInsensitiveDict
|
| 903 |
+
"""
|
| 904 |
+
return CaseInsensitiveDict(
|
| 905 |
+
{
|
| 906 |
+
"User-Agent": default_user_agent(),
|
| 907 |
+
"Accept-Encoding": DEFAULT_ACCEPT_ENCODING,
|
| 908 |
+
"Accept": "*/*",
|
| 909 |
+
"Connection": "keep-alive",
|
| 910 |
+
}
|
| 911 |
+
)
|
| 912 |
+
|
| 913 |
+
|
| 914 |
+
def parse_header_links(value):
|
| 915 |
+
"""Return a list of parsed link headers proxies.
|
| 916 |
+
|
| 917 |
+
i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg"
|
| 918 |
+
|
| 919 |
+
:rtype: list
|
| 920 |
+
"""
|
| 921 |
+
|
| 922 |
+
links = []
|
| 923 |
+
|
| 924 |
+
replace_chars = " '\""
|
| 925 |
+
|
| 926 |
+
value = value.strip(replace_chars)
|
| 927 |
+
if not value:
|
| 928 |
+
return links
|
| 929 |
+
|
| 930 |
+
for val in re.split(", *<", value):
|
| 931 |
+
try:
|
| 932 |
+
url, params = val.split(";", 1)
|
| 933 |
+
except ValueError:
|
| 934 |
+
url, params = val, ""
|
| 935 |
+
|
| 936 |
+
link = {"url": url.strip("<> '\"")}
|
| 937 |
+
|
| 938 |
+
for param in params.split(";"):
|
| 939 |
+
try:
|
| 940 |
+
key, value = param.split("=")
|
| 941 |
+
except ValueError:
|
| 942 |
+
break
|
| 943 |
+
|
| 944 |
+
link[key.strip(replace_chars)] = value.strip(replace_chars)
|
| 945 |
+
|
| 946 |
+
links.append(link)
|
| 947 |
+
|
| 948 |
+
return links
|
| 949 |
+
|
| 950 |
+
|
| 951 |
+
# Null bytes; no need to recreate these on each call to guess_json_utf
|
| 952 |
+
_null = "\x00".encode("ascii") # encoding to ASCII for Python 3
|
| 953 |
+
_null2 = _null * 2
|
| 954 |
+
_null3 = _null * 3
|
| 955 |
+
|
| 956 |
+
|
| 957 |
+
def guess_json_utf(data):
|
| 958 |
+
"""
|
| 959 |
+
:rtype: str
|
| 960 |
+
"""
|
| 961 |
+
# JSON always starts with two ASCII characters, so detection is as
|
| 962 |
+
# easy as counting the nulls and from their location and count
|
| 963 |
+
# determine the encoding. Also detect a BOM, if present.
|
| 964 |
+
sample = data[:4]
|
| 965 |
+
if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):
|
| 966 |
+
return "utf-32" # BOM included
|
| 967 |
+
if sample[:3] == codecs.BOM_UTF8:
|
| 968 |
+
return "utf-8-sig" # BOM included, MS style (discouraged)
|
| 969 |
+
if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):
|
| 970 |
+
return "utf-16" # BOM included
|
| 971 |
+
nullcount = sample.count(_null)
|
| 972 |
+
if nullcount == 0:
|
| 973 |
+
return "utf-8"
|
| 974 |
+
if nullcount == 2:
|
| 975 |
+
if sample[::2] == _null2: # 1st and 3rd are null
|
| 976 |
+
return "utf-16-be"
|
| 977 |
+
if sample[1::2] == _null2: # 2nd and 4th are null
|
| 978 |
+
return "utf-16-le"
|
| 979 |
+
# Did not detect 2 valid UTF-16 ascii-range characters
|
| 980 |
+
if nullcount == 3:
|
| 981 |
+
if sample[:3] == _null3:
|
| 982 |
+
return "utf-32-be"
|
| 983 |
+
if sample[1:] == _null3:
|
| 984 |
+
return "utf-32-le"
|
| 985 |
+
# Did not detect a valid UTF-32 ascii-range character
|
| 986 |
+
return None
|
| 987 |
+
|
| 988 |
+
|
| 989 |
+
def prepend_scheme_if_needed(url, new_scheme):
|
| 990 |
+
"""Given a URL that may or may not have a scheme, prepend the given scheme.
|
| 991 |
+
Does not replace a present scheme with the one provided as an argument.
|
| 992 |
+
|
| 993 |
+
:rtype: str
|
| 994 |
+
"""
|
| 995 |
+
parsed = parse_url(url)
|
| 996 |
+
scheme, auth, host, port, path, query, fragment = parsed
|
| 997 |
+
|
| 998 |
+
# A defect in urlparse determines that there isn't a netloc present in some
|
| 999 |
+
# urls. We previously assumed parsing was overly cautious, and swapped the
|
| 1000 |
+
# netloc and path. Due to a lack of tests on the original defect, this is
|
| 1001 |
+
# maintained with parse_url for backwards compatibility.
|
| 1002 |
+
netloc = parsed.netloc
|
| 1003 |
+
if not netloc:
|
| 1004 |
+
netloc, path = path, netloc
|
| 1005 |
+
|
| 1006 |
+
if auth:
|
| 1007 |
+
# parse_url doesn't provide the netloc with auth
|
| 1008 |
+
# so we'll add it ourselves.
|
| 1009 |
+
netloc = "@".join([auth, netloc])
|
| 1010 |
+
if scheme is None:
|
| 1011 |
+
scheme = new_scheme
|
| 1012 |
+
if path is None:
|
| 1013 |
+
path = ""
|
| 1014 |
+
|
| 1015 |
+
return urlunparse((scheme, netloc, path, "", query, fragment))
|
| 1016 |
+
|
| 1017 |
+
|
| 1018 |
+
def get_auth_from_url(url):
|
| 1019 |
+
"""Given a url with authentication components, extract them into a tuple of
|
| 1020 |
+
username,password.
|
| 1021 |
+
|
| 1022 |
+
:rtype: (str,str)
|
| 1023 |
+
"""
|
| 1024 |
+
parsed = urlparse(url)
|
| 1025 |
+
|
| 1026 |
+
try:
|
| 1027 |
+
auth = (unquote(parsed.username), unquote(parsed.password))
|
| 1028 |
+
except (AttributeError, TypeError):
|
| 1029 |
+
auth = ("", "")
|
| 1030 |
+
|
| 1031 |
+
return auth
|
| 1032 |
+
|
| 1033 |
+
|
| 1034 |
+
def check_header_validity(header):
|
| 1035 |
+
"""Verifies that header parts don't contain leading whitespace
|
| 1036 |
+
reserved characters, or return characters.
|
| 1037 |
+
|
| 1038 |
+
:param header: tuple, in the format (name, value).
|
| 1039 |
+
"""
|
| 1040 |
+
name, value = header
|
| 1041 |
+
_validate_header_part(header, name, 0)
|
| 1042 |
+
_validate_header_part(header, value, 1)
|
| 1043 |
+
|
| 1044 |
+
|
| 1045 |
+
def _validate_header_part(header, header_part, header_validator_index):
|
| 1046 |
+
if isinstance(header_part, str):
|
| 1047 |
+
validator = _HEADER_VALIDATORS_STR[header_validator_index]
|
| 1048 |
+
elif isinstance(header_part, bytes):
|
| 1049 |
+
validator = _HEADER_VALIDATORS_BYTE[header_validator_index]
|
| 1050 |
+
else:
|
| 1051 |
+
raise InvalidHeader(
|
| 1052 |
+
f"Header part ({header_part!r}) from {header} "
|
| 1053 |
+
f"must be of type str or bytes, not {type(header_part)}"
|
| 1054 |
+
)
|
| 1055 |
+
|
| 1056 |
+
if not validator.match(header_part):
|
| 1057 |
+
header_kind = "name" if header_validator_index == 0 else "value"
|
| 1058 |
+
raise InvalidHeader(
|
| 1059 |
+
f"Invalid leading whitespace, reserved character(s), or return "
|
| 1060 |
+
f"character(s) in header {header_kind}: {header_part!r}"
|
| 1061 |
+
)
|
| 1062 |
+
|
| 1063 |
+
|
| 1064 |
+
def urldefragauth(url):
|
| 1065 |
+
"""
|
| 1066 |
+
Given a url remove the fragment and the authentication part.
|
| 1067 |
+
|
| 1068 |
+
:rtype: str
|
| 1069 |
+
"""
|
| 1070 |
+
scheme, netloc, path, params, query, fragment = urlparse(url)
|
| 1071 |
+
|
| 1072 |
+
# see func:`prepend_scheme_if_needed`
|
| 1073 |
+
if not netloc:
|
| 1074 |
+
netloc, path = path, netloc
|
| 1075 |
+
|
| 1076 |
+
netloc = netloc.rsplit("@", 1)[-1]
|
| 1077 |
+
|
| 1078 |
+
return urlunparse((scheme, netloc, path, params, query, ""))
|
| 1079 |
+
|
| 1080 |
+
|
| 1081 |
+
def rewind_body(prepared_request):
|
| 1082 |
+
"""Move file pointer back to its recorded starting position
|
| 1083 |
+
so it can be read again on redirect.
|
| 1084 |
+
"""
|
| 1085 |
+
body_seek = getattr(prepared_request.body, "seek", None)
|
| 1086 |
+
if body_seek is not None and isinstance(
|
| 1087 |
+
prepared_request._body_position, integer_types
|
| 1088 |
+
):
|
| 1089 |
+
try:
|
| 1090 |
+
body_seek(prepared_request._body_position)
|
| 1091 |
+
except OSError:
|
| 1092 |
+
raise UnrewindableBodyError(
|
| 1093 |
+
"An error occurred when rewinding request body for redirect."
|
| 1094 |
+
)
|
| 1095 |
+
else:
|
| 1096 |
+
raise UnrewindableBodyError("Unable to rewind request body for redirect.")
|
janus/lib/python3.10/site-packages/safetensors/_safetensors_rust.abi3.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d0b3dba08af60863aafd4d9f18abf0b3299ae852f869860655fd869af56bebb7
|
| 3 |
+
size 1089040
|
janus/lib/python3.10/site-packages/sympy/__init__.py
ADDED
|
@@ -0,0 +1,542 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
SymPy is a Python library for symbolic mathematics. It aims to become a
|
| 3 |
+
full-featured computer algebra system (CAS) while keeping the code as simple
|
| 4 |
+
as possible in order to be comprehensible and easily extensible. SymPy is
|
| 5 |
+
written entirely in Python. It depends on mpmath, and other external libraries
|
| 6 |
+
may be optionally for things like plotting support.
|
| 7 |
+
|
| 8 |
+
See the webpage for more information and documentation:
|
| 9 |
+
|
| 10 |
+
https://sympy.org
|
| 11 |
+
|
| 12 |
+
"""
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
import sys
|
| 16 |
+
if sys.version_info < (3, 8):
|
| 17 |
+
raise ImportError("Python version 3.8 or above is required for SymPy.")
|
| 18 |
+
del sys
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
try:
|
| 22 |
+
import mpmath
|
| 23 |
+
except ImportError:
|
| 24 |
+
raise ImportError("SymPy now depends on mpmath as an external library. "
|
| 25 |
+
"See https://docs.sympy.org/latest/install.html#mpmath for more information.")
|
| 26 |
+
|
| 27 |
+
del mpmath
|
| 28 |
+
|
| 29 |
+
from sympy.release import __version__
|
| 30 |
+
from sympy.core.cache import lazy_function
|
| 31 |
+
|
| 32 |
+
if 'dev' in __version__:
|
| 33 |
+
def enable_warnings():
|
| 34 |
+
import warnings
|
| 35 |
+
warnings.filterwarnings('default', '.*', DeprecationWarning, module='sympy.*')
|
| 36 |
+
del warnings
|
| 37 |
+
enable_warnings()
|
| 38 |
+
del enable_warnings
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def __sympy_debug():
|
| 42 |
+
# helper function so we don't import os globally
|
| 43 |
+
import os
|
| 44 |
+
debug_str = os.getenv('SYMPY_DEBUG', 'False')
|
| 45 |
+
if debug_str in ('True', 'False'):
|
| 46 |
+
return eval(debug_str)
|
| 47 |
+
else:
|
| 48 |
+
raise RuntimeError("unrecognized value for SYMPY_DEBUG: %s" %
|
| 49 |
+
debug_str)
|
| 50 |
+
SYMPY_DEBUG = __sympy_debug() # type: bool
|
| 51 |
+
|
| 52 |
+
from .core import (sympify, SympifyError, cacheit, Basic, Atom,
|
| 53 |
+
preorder_traversal, S, Expr, AtomicExpr, UnevaluatedExpr, Symbol,
|
| 54 |
+
Wild, Dummy, symbols, var, Number, Float, Rational, Integer,
|
| 55 |
+
NumberSymbol, RealNumber, igcd, ilcm, seterr, E, I, nan, oo, pi, zoo,
|
| 56 |
+
AlgebraicNumber, comp, mod_inverse, Pow, integer_nthroot, integer_log,
|
| 57 |
+
trailing, Mul, prod, Add, Mod, Rel, Eq, Ne, Lt, Le, Gt, Ge, Equality,
|
| 58 |
+
GreaterThan, LessThan, Unequality, StrictGreaterThan, StrictLessThan,
|
| 59 |
+
vectorize, Lambda, WildFunction, Derivative, diff, FunctionClass,
|
| 60 |
+
Function, Subs, expand, PoleError, count_ops, expand_mul, expand_log,
|
| 61 |
+
expand_func, expand_trig, expand_complex, expand_multinomial, nfloat,
|
| 62 |
+
expand_power_base, expand_power_exp, arity, PrecisionExhausted, N,
|
| 63 |
+
evalf, Tuple, Dict, gcd_terms, factor_terms, factor_nc, evaluate,
|
| 64 |
+
Catalan, EulerGamma, GoldenRatio, TribonacciConstant, bottom_up, use,
|
| 65 |
+
postorder_traversal, default_sort_key, ordered, num_digits)
|
| 66 |
+
|
| 67 |
+
from .logic import (to_cnf, to_dnf, to_nnf, And, Or, Not, Xor, Nand, Nor,
|
| 68 |
+
Implies, Equivalent, ITE, POSform, SOPform, simplify_logic, bool_map,
|
| 69 |
+
true, false, satisfiable)
|
| 70 |
+
|
| 71 |
+
from .assumptions import (AppliedPredicate, Predicate, AssumptionsContext,
|
| 72 |
+
assuming, Q, ask, register_handler, remove_handler, refine)
|
| 73 |
+
|
| 74 |
+
from .polys import (Poly, PurePoly, poly_from_expr, parallel_poly_from_expr,
|
| 75 |
+
degree, total_degree, degree_list, LC, LM, LT, pdiv, prem, pquo,
|
| 76 |
+
pexquo, div, rem, quo, exquo, half_gcdex, gcdex, invert,
|
| 77 |
+
subresultants, resultant, discriminant, cofactors, gcd_list, gcd,
|
| 78 |
+
lcm_list, lcm, terms_gcd, trunc, monic, content, primitive, compose,
|
| 79 |
+
decompose, sturm, gff_list, gff, sqf_norm, sqf_part, sqf_list, sqf,
|
| 80 |
+
factor_list, factor, intervals, refine_root, count_roots, all_roots,
|
| 81 |
+
real_roots, nroots, ground_roots, nth_power_roots_poly, cancel,
|
| 82 |
+
reduced, groebner, is_zero_dimensional, GroebnerBasis, poly,
|
| 83 |
+
symmetrize, horner, interpolate, rational_interpolate, viete, together,
|
| 84 |
+
BasePolynomialError, ExactQuotientFailed, PolynomialDivisionFailed,
|
| 85 |
+
OperationNotSupported, HeuristicGCDFailed, HomomorphismFailed,
|
| 86 |
+
IsomorphismFailed, ExtraneousFactors, EvaluationFailed,
|
| 87 |
+
RefinementFailed, CoercionFailed, NotInvertible, NotReversible,
|
| 88 |
+
NotAlgebraic, DomainError, PolynomialError, UnificationFailed,
|
| 89 |
+
GeneratorsError, GeneratorsNeeded, ComputationFailed,
|
| 90 |
+
UnivariatePolynomialError, MultivariatePolynomialError,
|
| 91 |
+
PolificationFailed, OptionError, FlagError, minpoly,
|
| 92 |
+
minimal_polynomial, primitive_element, field_isomorphism,
|
| 93 |
+
to_number_field, isolate, round_two, prime_decomp, prime_valuation,
|
| 94 |
+
galois_group, itermonomials, Monomial, lex, grlex,
|
| 95 |
+
grevlex, ilex, igrlex, igrevlex, CRootOf, rootof, RootOf,
|
| 96 |
+
ComplexRootOf, RootSum, roots, Domain, FiniteField, IntegerRing,
|
| 97 |
+
RationalField, RealField, ComplexField, PythonFiniteField,
|
| 98 |
+
GMPYFiniteField, PythonIntegerRing, GMPYIntegerRing, PythonRational,
|
| 99 |
+
GMPYRationalField, AlgebraicField, PolynomialRing, FractionField,
|
| 100 |
+
ExpressionDomain, FF_python, FF_gmpy, ZZ_python, ZZ_gmpy, QQ_python,
|
| 101 |
+
QQ_gmpy, GF, FF, ZZ, QQ, ZZ_I, QQ_I, RR, CC, EX, EXRAW,
|
| 102 |
+
construct_domain, swinnerton_dyer_poly, cyclotomic_poly,
|
| 103 |
+
symmetric_poly, random_poly, interpolating_poly, jacobi_poly,
|
| 104 |
+
chebyshevt_poly, chebyshevu_poly, hermite_poly, hermite_prob_poly,
|
| 105 |
+
legendre_poly, laguerre_poly, apart, apart_list, assemble_partfrac_list,
|
| 106 |
+
Options, ring, xring, vring, sring, field, xfield, vfield, sfield)
|
| 107 |
+
|
| 108 |
+
from .series import (Order, O, limit, Limit, gruntz, series, approximants,
|
| 109 |
+
residue, EmptySequence, SeqPer, SeqFormula, sequence, SeqAdd, SeqMul,
|
| 110 |
+
fourier_series, fps, difference_delta, limit_seq)
|
| 111 |
+
|
| 112 |
+
from .functions import (factorial, factorial2, rf, ff, binomial,
|
| 113 |
+
RisingFactorial, FallingFactorial, subfactorial, carmichael,
|
| 114 |
+
fibonacci, lucas, motzkin, tribonacci, harmonic, bernoulli, bell, euler,
|
| 115 |
+
catalan, genocchi, andre, partition, divisor_sigma, legendre_symbol,
|
| 116 |
+
jacobi_symbol, kronecker_symbol, mobius, primenu, primeomega,
|
| 117 |
+
totient, reduced_totient, primepi, sqrt, root, Min, Max, Id,
|
| 118 |
+
real_root, Rem, cbrt, re, im, sign, Abs, conjugate, arg, polar_lift,
|
| 119 |
+
periodic_argument, unbranched_argument, principal_branch, transpose,
|
| 120 |
+
adjoint, polarify, unpolarify, sin, cos, tan, sec, csc, cot, sinc,
|
| 121 |
+
asin, acos, atan, asec, acsc, acot, atan2, exp_polar, exp, ln, log,
|
| 122 |
+
LambertW, sinh, cosh, tanh, coth, sech, csch, asinh, acosh, atanh,
|
| 123 |
+
acoth, asech, acsch, floor, ceiling, frac, Piecewise, piecewise_fold,
|
| 124 |
+
piecewise_exclusive, erf, erfc, erfi, erf2, erfinv, erfcinv, erf2inv,
|
| 125 |
+
Ei, expint, E1, li, Li, Si, Ci, Shi, Chi, fresnels, fresnelc, gamma,
|
| 126 |
+
lowergamma, uppergamma, polygamma, loggamma, digamma, trigamma,
|
| 127 |
+
multigamma, dirichlet_eta, zeta, lerchphi, polylog, stieltjes, Eijk,
|
| 128 |
+
LeviCivita, KroneckerDelta, SingularityFunction, DiracDelta, Heaviside,
|
| 129 |
+
bspline_basis, bspline_basis_set, interpolating_spline, besselj,
|
| 130 |
+
bessely, besseli, besselk, hankel1, hankel2, jn, yn, jn_zeros, hn1,
|
| 131 |
+
hn2, airyai, airybi, airyaiprime, airybiprime, marcumq, hyper,
|
| 132 |
+
meijerg, appellf1, legendre, assoc_legendre, hermite, hermite_prob,
|
| 133 |
+
chebyshevt, chebyshevu, chebyshevu_root, chebyshevt_root, laguerre,
|
| 134 |
+
assoc_laguerre, gegenbauer, jacobi, jacobi_normalized, Ynm, Ynm_c,
|
| 135 |
+
Znm, elliptic_k, elliptic_f, elliptic_e, elliptic_pi, beta, mathieus,
|
| 136 |
+
mathieuc, mathieusprime, mathieucprime, riemann_xi, betainc, betainc_regularized)
|
| 137 |
+
|
| 138 |
+
from .ntheory import (nextprime, prevprime, prime, primerange,
|
| 139 |
+
randprime, Sieve, sieve, primorial, cycle_length, composite,
|
| 140 |
+
compositepi, isprime, divisors, proper_divisors, factorint,
|
| 141 |
+
multiplicity, perfect_power, pollard_pm1, pollard_rho, primefactors,
|
| 142 |
+
divisor_count, proper_divisor_count,
|
| 143 |
+
factorrat,
|
| 144 |
+
mersenne_prime_exponent, is_perfect, is_mersenne_prime, is_abundant,
|
| 145 |
+
is_deficient, is_amicable, is_carmichael, abundance, npartitions, is_primitive_root,
|
| 146 |
+
is_quad_residue, n_order, sqrt_mod,
|
| 147 |
+
quadratic_residues, primitive_root, nthroot_mod, is_nthpow_residue,
|
| 148 |
+
sqrt_mod_iter, discrete_log, quadratic_congruence,
|
| 149 |
+
binomial_coefficients, binomial_coefficients_list,
|
| 150 |
+
multinomial_coefficients, continued_fraction_periodic,
|
| 151 |
+
continued_fraction_iterator, continued_fraction_reduce,
|
| 152 |
+
continued_fraction_convergents, continued_fraction, egyptian_fraction)
|
| 153 |
+
|
| 154 |
+
from .concrete import product, Product, summation, Sum
|
| 155 |
+
|
| 156 |
+
from .discrete import (fft, ifft, ntt, intt, fwht, ifwht, mobius_transform,
|
| 157 |
+
inverse_mobius_transform, convolution, covering_product,
|
| 158 |
+
intersecting_product)
|
| 159 |
+
|
| 160 |
+
from .simplify import (simplify, hypersimp, hypersimilar, logcombine,
|
| 161 |
+
separatevars, posify, besselsimp, kroneckersimp, signsimp,
|
| 162 |
+
nsimplify, FU, fu, sqrtdenest, cse, epath, EPath, hyperexpand,
|
| 163 |
+
collect, rcollect, radsimp, collect_const, fraction, numer, denom,
|
| 164 |
+
trigsimp, exptrigsimp, powsimp, powdenest, combsimp, gammasimp,
|
| 165 |
+
ratsimp, ratsimpmodprime)
|
| 166 |
+
|
| 167 |
+
from .sets import (Set, Interval, Union, EmptySet, FiniteSet, ProductSet,
|
| 168 |
+
Intersection, DisjointUnion, imageset, Complement, SymmetricDifference, ImageSet,
|
| 169 |
+
Range, ComplexRegion, Complexes, Reals, Contains, ConditionSet, Ordinal,
|
| 170 |
+
OmegaPower, ord0, PowerSet, Naturals, Naturals0, UniversalSet,
|
| 171 |
+
Integers, Rationals)
|
| 172 |
+
|
| 173 |
+
from .solvers import (solve, solve_linear_system, solve_linear_system_LU,
|
| 174 |
+
solve_undetermined_coeffs, nsolve, solve_linear, checksol, det_quick,
|
| 175 |
+
inv_quick, check_assumptions, failing_assumptions, diophantine,
|
| 176 |
+
rsolve, rsolve_poly, rsolve_ratio, rsolve_hyper, checkodesol,
|
| 177 |
+
classify_ode, dsolve, homogeneous_order, solve_poly_system,
|
| 178 |
+
solve_triangulated, pde_separate, pde_separate_add, pde_separate_mul,
|
| 179 |
+
pdsolve, classify_pde, checkpdesol, ode_order, reduce_inequalities,
|
| 180 |
+
reduce_abs_inequality, reduce_abs_inequalities, solve_poly_inequality,
|
| 181 |
+
solve_rational_inequalities, solve_univariate_inequality, decompogen,
|
| 182 |
+
solveset, linsolve, linear_eq_to_matrix, nonlinsolve, substitution)
|
| 183 |
+
|
| 184 |
+
from .matrices import (ShapeError, NonSquareMatrixError, GramSchmidt,
|
| 185 |
+
casoratian, diag, eye, hessian, jordan_cell, list2numpy, matrix2numpy,
|
| 186 |
+
matrix_multiply_elementwise, ones, randMatrix, rot_axis1, rot_axis2,
|
| 187 |
+
rot_axis3, symarray, wronskian, zeros, MutableDenseMatrix,
|
| 188 |
+
DeferredVector, MatrixBase, Matrix, MutableMatrix,
|
| 189 |
+
MutableSparseMatrix, banded, ImmutableDenseMatrix,
|
| 190 |
+
ImmutableSparseMatrix, ImmutableMatrix, SparseMatrix, MatrixSlice,
|
| 191 |
+
BlockDiagMatrix, BlockMatrix, FunctionMatrix, Identity, Inverse,
|
| 192 |
+
MatAdd, MatMul, MatPow, MatrixExpr, MatrixSymbol, Trace, Transpose,
|
| 193 |
+
ZeroMatrix, OneMatrix, blockcut, block_collapse, matrix_symbols,
|
| 194 |
+
Adjoint, hadamard_product, HadamardProduct, HadamardPower,
|
| 195 |
+
Determinant, det, diagonalize_vector, DiagMatrix, DiagonalMatrix,
|
| 196 |
+
DiagonalOf, trace, DotProduct, kronecker_product, KroneckerProduct,
|
| 197 |
+
PermutationMatrix, MatrixPermute, Permanent, per, rot_ccw_axis1,
|
| 198 |
+
rot_ccw_axis2, rot_ccw_axis3, rot_givens)
|
| 199 |
+
|
| 200 |
+
from .geometry import (Point, Point2D, Point3D, Line, Ray, Segment, Line2D,
|
| 201 |
+
Segment2D, Ray2D, Line3D, Segment3D, Ray3D, Plane, Ellipse, Circle,
|
| 202 |
+
Polygon, RegularPolygon, Triangle, rad, deg, are_similar, centroid,
|
| 203 |
+
convex_hull, idiff, intersection, closest_points, farthest_points,
|
| 204 |
+
GeometryError, Curve, Parabola)
|
| 205 |
+
|
| 206 |
+
from .utilities import (flatten, group, take, subsets, variations,
|
| 207 |
+
numbered_symbols, cartes, capture, dict_merge, prefixes, postfixes,
|
| 208 |
+
sift, topological_sort, unflatten, has_dups, has_variety, reshape,
|
| 209 |
+
rotations, filldedent, lambdify,
|
| 210 |
+
threaded, xthreaded, public, memoize_property, timed)
|
| 211 |
+
|
| 212 |
+
from .integrals import (integrate, Integral, line_integrate, mellin_transform,
|
| 213 |
+
inverse_mellin_transform, MellinTransform, InverseMellinTransform,
|
| 214 |
+
laplace_transform, laplace_correspondence, laplace_initial_conds,
|
| 215 |
+
inverse_laplace_transform, LaplaceTransform,
|
| 216 |
+
InverseLaplaceTransform, fourier_transform, inverse_fourier_transform,
|
| 217 |
+
FourierTransform, InverseFourierTransform, sine_transform,
|
| 218 |
+
inverse_sine_transform, SineTransform, InverseSineTransform,
|
| 219 |
+
cosine_transform, inverse_cosine_transform, CosineTransform,
|
| 220 |
+
InverseCosineTransform, hankel_transform, inverse_hankel_transform,
|
| 221 |
+
HankelTransform, InverseHankelTransform, singularityintegrate)
|
| 222 |
+
|
| 223 |
+
from .tensor import (IndexedBase, Idx, Indexed, get_contraction_structure,
|
| 224 |
+
get_indices, shape, MutableDenseNDimArray, ImmutableDenseNDimArray,
|
| 225 |
+
MutableSparseNDimArray, ImmutableSparseNDimArray, NDimArray,
|
| 226 |
+
tensorproduct, tensorcontraction, tensordiagonal, derive_by_array,
|
| 227 |
+
permutedims, Array, DenseNDimArray, SparseNDimArray)
|
| 228 |
+
|
| 229 |
+
from .parsing import parse_expr
|
| 230 |
+
|
| 231 |
+
from .calculus import (euler_equations, singularities, is_increasing,
|
| 232 |
+
is_strictly_increasing, is_decreasing, is_strictly_decreasing,
|
| 233 |
+
is_monotonic, finite_diff_weights, apply_finite_diff,
|
| 234 |
+
differentiate_finite, periodicity, not_empty_in, AccumBounds,
|
| 235 |
+
is_convex, stationary_points, minimum, maximum)
|
| 236 |
+
|
| 237 |
+
from .algebras import Quaternion
|
| 238 |
+
|
| 239 |
+
from .printing import (pager_print, pretty, pretty_print, pprint,
|
| 240 |
+
pprint_use_unicode, pprint_try_use_unicode, latex, print_latex,
|
| 241 |
+
multiline_latex, mathml, print_mathml, python, print_python, pycode,
|
| 242 |
+
ccode, print_ccode, smtlib_code, glsl_code, print_glsl, cxxcode, fcode,
|
| 243 |
+
print_fcode, rcode, print_rcode, jscode, print_jscode, julia_code,
|
| 244 |
+
mathematica_code, octave_code, rust_code, print_gtk, preview, srepr,
|
| 245 |
+
print_tree, StrPrinter, sstr, sstrrepr, TableForm, dotprint,
|
| 246 |
+
maple_code, print_maple_code)
|
| 247 |
+
|
| 248 |
+
test = lazy_function('sympy.testing.runtests_pytest', 'test')
|
| 249 |
+
doctest = lazy_function('sympy.testing.runtests', 'doctest')
|
| 250 |
+
|
| 251 |
+
# This module causes conflicts with other modules:
|
| 252 |
+
# from .stats import *
|
| 253 |
+
# Adds about .04-.05 seconds of import time
|
| 254 |
+
# from combinatorics import *
|
| 255 |
+
# This module is slow to import:
|
| 256 |
+
#from physics import units
|
| 257 |
+
from .plotting import plot, textplot, plot_backends, plot_implicit, plot_parametric
|
| 258 |
+
from .interactive import init_session, init_printing, interactive_traversal
|
| 259 |
+
|
| 260 |
+
evalf._create_evalf_table()
|
| 261 |
+
|
| 262 |
+
__all__ = [
|
| 263 |
+
'__version__',
|
| 264 |
+
|
| 265 |
+
# sympy.core
|
| 266 |
+
'sympify', 'SympifyError', 'cacheit', 'Basic', 'Atom',
|
| 267 |
+
'preorder_traversal', 'S', 'Expr', 'AtomicExpr', 'UnevaluatedExpr',
|
| 268 |
+
'Symbol', 'Wild', 'Dummy', 'symbols', 'var', 'Number', 'Float',
|
| 269 |
+
'Rational', 'Integer', 'NumberSymbol', 'RealNumber', 'igcd', 'ilcm',
|
| 270 |
+
'seterr', 'E', 'I', 'nan', 'oo', 'pi', 'zoo', 'AlgebraicNumber', 'comp',
|
| 271 |
+
'mod_inverse', 'Pow', 'integer_nthroot', 'integer_log', 'trailing', 'Mul', 'prod',
|
| 272 |
+
'Add', 'Mod', 'Rel', 'Eq', 'Ne', 'Lt', 'Le', 'Gt', 'Ge', 'Equality',
|
| 273 |
+
'GreaterThan', 'LessThan', 'Unequality', 'StrictGreaterThan',
|
| 274 |
+
'StrictLessThan', 'vectorize', 'Lambda', 'WildFunction', 'Derivative',
|
| 275 |
+
'diff', 'FunctionClass', 'Function', 'Subs', 'expand', 'PoleError',
|
| 276 |
+
'count_ops', 'expand_mul', 'expand_log', 'expand_func', 'expand_trig',
|
| 277 |
+
'expand_complex', 'expand_multinomial', 'nfloat', 'expand_power_base',
|
| 278 |
+
'expand_power_exp', 'arity', 'PrecisionExhausted', 'N', 'evalf', 'Tuple',
|
| 279 |
+
'Dict', 'gcd_terms', 'factor_terms', 'factor_nc', 'evaluate', 'Catalan',
|
| 280 |
+
'EulerGamma', 'GoldenRatio', 'TribonacciConstant', 'bottom_up', 'use',
|
| 281 |
+
'postorder_traversal', 'default_sort_key', 'ordered', 'num_digits',
|
| 282 |
+
|
| 283 |
+
# sympy.logic
|
| 284 |
+
'to_cnf', 'to_dnf', 'to_nnf', 'And', 'Or', 'Not', 'Xor', 'Nand', 'Nor',
|
| 285 |
+
'Implies', 'Equivalent', 'ITE', 'POSform', 'SOPform', 'simplify_logic',
|
| 286 |
+
'bool_map', 'true', 'false', 'satisfiable',
|
| 287 |
+
|
| 288 |
+
# sympy.assumptions
|
| 289 |
+
'AppliedPredicate', 'Predicate', 'AssumptionsContext', 'assuming', 'Q',
|
| 290 |
+
'ask', 'register_handler', 'remove_handler', 'refine',
|
| 291 |
+
|
| 292 |
+
# sympy.polys
|
| 293 |
+
'Poly', 'PurePoly', 'poly_from_expr', 'parallel_poly_from_expr', 'degree',
|
| 294 |
+
'total_degree', 'degree_list', 'LC', 'LM', 'LT', 'pdiv', 'prem', 'pquo',
|
| 295 |
+
'pexquo', 'div', 'rem', 'quo', 'exquo', 'half_gcdex', 'gcdex', 'invert',
|
| 296 |
+
'subresultants', 'resultant', 'discriminant', 'cofactors', 'gcd_list',
|
| 297 |
+
'gcd', 'lcm_list', 'lcm', 'terms_gcd', 'trunc', 'monic', 'content',
|
| 298 |
+
'primitive', 'compose', 'decompose', 'sturm', 'gff_list', 'gff',
|
| 299 |
+
'sqf_norm', 'sqf_part', 'sqf_list', 'sqf', 'factor_list', 'factor',
|
| 300 |
+
'intervals', 'refine_root', 'count_roots', 'all_roots', 'real_roots',
|
| 301 |
+
'nroots', 'ground_roots', 'nth_power_roots_poly', 'cancel', 'reduced',
|
| 302 |
+
'groebner', 'is_zero_dimensional', 'GroebnerBasis', 'poly', 'symmetrize',
|
| 303 |
+
'horner', 'interpolate', 'rational_interpolate', 'viete', 'together',
|
| 304 |
+
'BasePolynomialError', 'ExactQuotientFailed', 'PolynomialDivisionFailed',
|
| 305 |
+
'OperationNotSupported', 'HeuristicGCDFailed', 'HomomorphismFailed',
|
| 306 |
+
'IsomorphismFailed', 'ExtraneousFactors', 'EvaluationFailed',
|
| 307 |
+
'RefinementFailed', 'CoercionFailed', 'NotInvertible', 'NotReversible',
|
| 308 |
+
'NotAlgebraic', 'DomainError', 'PolynomialError', 'UnificationFailed',
|
| 309 |
+
'GeneratorsError', 'GeneratorsNeeded', 'ComputationFailed',
|
| 310 |
+
'UnivariatePolynomialError', 'MultivariatePolynomialError',
|
| 311 |
+
'PolificationFailed', 'OptionError', 'FlagError', 'minpoly',
|
| 312 |
+
'minimal_polynomial', 'primitive_element', 'field_isomorphism',
|
| 313 |
+
'to_number_field', 'isolate', 'round_two', 'prime_decomp',
|
| 314 |
+
'prime_valuation', 'galois_group', 'itermonomials', 'Monomial', 'lex', 'grlex',
|
| 315 |
+
'grevlex', 'ilex', 'igrlex', 'igrevlex', 'CRootOf', 'rootof', 'RootOf',
|
| 316 |
+
'ComplexRootOf', 'RootSum', 'roots', 'Domain', 'FiniteField',
|
| 317 |
+
'IntegerRing', 'RationalField', 'RealField', 'ComplexField',
|
| 318 |
+
'PythonFiniteField', 'GMPYFiniteField', 'PythonIntegerRing',
|
| 319 |
+
'GMPYIntegerRing', 'PythonRational', 'GMPYRationalField',
|
| 320 |
+
'AlgebraicField', 'PolynomialRing', 'FractionField', 'ExpressionDomain',
|
| 321 |
+
'FF_python', 'FF_gmpy', 'ZZ_python', 'ZZ_gmpy', 'QQ_python', 'QQ_gmpy',
|
| 322 |
+
'GF', 'FF', 'ZZ', 'QQ', 'ZZ_I', 'QQ_I', 'RR', 'CC', 'EX', 'EXRAW',
|
| 323 |
+
'construct_domain', 'swinnerton_dyer_poly', 'cyclotomic_poly',
|
| 324 |
+
'symmetric_poly', 'random_poly', 'interpolating_poly', 'jacobi_poly',
|
| 325 |
+
'chebyshevt_poly', 'chebyshevu_poly', 'hermite_poly', 'hermite_prob_poly',
|
| 326 |
+
'legendre_poly', 'laguerre_poly', 'apart', 'apart_list', 'assemble_partfrac_list',
|
| 327 |
+
'Options', 'ring', 'xring', 'vring', 'sring', 'field', 'xfield', 'vfield',
|
| 328 |
+
'sfield',
|
| 329 |
+
|
| 330 |
+
# sympy.series
|
| 331 |
+
'Order', 'O', 'limit', 'Limit', 'gruntz', 'series', 'approximants',
|
| 332 |
+
'residue', 'EmptySequence', 'SeqPer', 'SeqFormula', 'sequence', 'SeqAdd',
|
| 333 |
+
'SeqMul', 'fourier_series', 'fps', 'difference_delta', 'limit_seq',
|
| 334 |
+
|
| 335 |
+
# sympy.functions
|
| 336 |
+
'factorial', 'factorial2', 'rf', 'ff', 'binomial', 'RisingFactorial',
|
| 337 |
+
'FallingFactorial', 'subfactorial', 'carmichael', 'fibonacci', 'lucas',
|
| 338 |
+
'motzkin', 'tribonacci', 'harmonic', 'bernoulli', 'bell', 'euler', 'catalan',
|
| 339 |
+
'genocchi', 'andre', 'partition', 'divisor_sigma', 'legendre_symbol', 'jacobi_symbol',
|
| 340 |
+
'kronecker_symbol', 'mobius', 'primenu', 'primeomega', 'totient', 'primepi',
|
| 341 |
+
'reduced_totient', 'sqrt', 'root', 'Min', 'Max', 'Id', 'real_root',
|
| 342 |
+
'Rem', 'cbrt', 're', 'im', 'sign', 'Abs', 'conjugate', 'arg', 'polar_lift',
|
| 343 |
+
'periodic_argument', 'unbranched_argument', 'principal_branch',
|
| 344 |
+
'transpose', 'adjoint', 'polarify', 'unpolarify', 'sin', 'cos', 'tan',
|
| 345 |
+
'sec', 'csc', 'cot', 'sinc', 'asin', 'acos', 'atan', 'asec', 'acsc',
|
| 346 |
+
'acot', 'atan2', 'exp_polar', 'exp', 'ln', 'log', 'LambertW', 'sinh',
|
| 347 |
+
'cosh', 'tanh', 'coth', 'sech', 'csch', 'asinh', 'acosh', 'atanh',
|
| 348 |
+
'acoth', 'asech', 'acsch', 'floor', 'ceiling', 'frac', 'Piecewise',
|
| 349 |
+
'piecewise_fold', 'piecewise_exclusive', 'erf', 'erfc', 'erfi', 'erf2',
|
| 350 |
+
'erfinv', 'erfcinv', 'erf2inv', 'Ei', 'expint', 'E1', 'li', 'Li', 'Si',
|
| 351 |
+
'Ci', 'Shi', 'Chi', 'fresnels', 'fresnelc', 'gamma', 'lowergamma',
|
| 352 |
+
'uppergamma', 'polygamma', 'loggamma', 'digamma', 'trigamma', 'multigamma',
|
| 353 |
+
'dirichlet_eta', 'zeta', 'lerchphi', 'polylog', 'stieltjes', 'Eijk', 'LeviCivita',
|
| 354 |
+
'KroneckerDelta', 'SingularityFunction', 'DiracDelta', 'Heaviside',
|
| 355 |
+
'bspline_basis', 'bspline_basis_set', 'interpolating_spline', 'besselj',
|
| 356 |
+
'bessely', 'besseli', 'besselk', 'hankel1', 'hankel2', 'jn', 'yn',
|
| 357 |
+
'jn_zeros', 'hn1', 'hn2', 'airyai', 'airybi', 'airyaiprime',
|
| 358 |
+
'airybiprime', 'marcumq', 'hyper', 'meijerg', 'appellf1', 'legendre',
|
| 359 |
+
'assoc_legendre', 'hermite', 'hermite_prob', 'chebyshevt', 'chebyshevu',
|
| 360 |
+
'chebyshevu_root', 'chebyshevt_root', 'laguerre', 'assoc_laguerre',
|
| 361 |
+
'gegenbauer', 'jacobi', 'jacobi_normalized', 'Ynm', 'Ynm_c', 'Znm',
|
| 362 |
+
'elliptic_k', 'elliptic_f', 'elliptic_e', 'elliptic_pi', 'beta',
|
| 363 |
+
'mathieus', 'mathieuc', 'mathieusprime', 'mathieucprime', 'riemann_xi','betainc',
|
| 364 |
+
'betainc_regularized',
|
| 365 |
+
|
| 366 |
+
# sympy.ntheory
|
| 367 |
+
'nextprime', 'prevprime', 'prime', 'primerange', 'randprime',
|
| 368 |
+
'Sieve', 'sieve', 'primorial', 'cycle_length', 'composite', 'compositepi',
|
| 369 |
+
'isprime', 'divisors', 'proper_divisors', 'factorint', 'multiplicity',
|
| 370 |
+
'perfect_power', 'pollard_pm1', 'pollard_rho', 'primefactors',
|
| 371 |
+
'divisor_count', 'proper_divisor_count',
|
| 372 |
+
'factorrat',
|
| 373 |
+
'mersenne_prime_exponent', 'is_perfect', 'is_mersenne_prime',
|
| 374 |
+
'is_abundant', 'is_deficient', 'is_amicable', 'is_carmichael', 'abundance',
|
| 375 |
+
'npartitions',
|
| 376 |
+
'is_primitive_root', 'is_quad_residue',
|
| 377 |
+
'n_order', 'sqrt_mod', 'quadratic_residues',
|
| 378 |
+
'primitive_root', 'nthroot_mod', 'is_nthpow_residue', 'sqrt_mod_iter',
|
| 379 |
+
'discrete_log', 'quadratic_congruence', 'binomial_coefficients',
|
| 380 |
+
'binomial_coefficients_list', 'multinomial_coefficients',
|
| 381 |
+
'continued_fraction_periodic', 'continued_fraction_iterator',
|
| 382 |
+
'continued_fraction_reduce', 'continued_fraction_convergents',
|
| 383 |
+
'continued_fraction', 'egyptian_fraction',
|
| 384 |
+
|
| 385 |
+
# sympy.concrete
|
| 386 |
+
'product', 'Product', 'summation', 'Sum',
|
| 387 |
+
|
| 388 |
+
# sympy.discrete
|
| 389 |
+
'fft', 'ifft', 'ntt', 'intt', 'fwht', 'ifwht', 'mobius_transform',
|
| 390 |
+
'inverse_mobius_transform', 'convolution', 'covering_product',
|
| 391 |
+
'intersecting_product',
|
| 392 |
+
|
| 393 |
+
# sympy.simplify
|
| 394 |
+
'simplify', 'hypersimp', 'hypersimilar', 'logcombine', 'separatevars',
|
| 395 |
+
'posify', 'besselsimp', 'kroneckersimp', 'signsimp',
|
| 396 |
+
'nsimplify', 'FU', 'fu', 'sqrtdenest', 'cse', 'epath', 'EPath',
|
| 397 |
+
'hyperexpand', 'collect', 'rcollect', 'radsimp', 'collect_const',
|
| 398 |
+
'fraction', 'numer', 'denom', 'trigsimp', 'exptrigsimp', 'powsimp',
|
| 399 |
+
'powdenest', 'combsimp', 'gammasimp', 'ratsimp', 'ratsimpmodprime',
|
| 400 |
+
|
| 401 |
+
# sympy.sets
|
| 402 |
+
'Set', 'Interval', 'Union', 'EmptySet', 'FiniteSet', 'ProductSet',
|
| 403 |
+
'Intersection', 'imageset', 'DisjointUnion', 'Complement', 'SymmetricDifference',
|
| 404 |
+
'ImageSet', 'Range', 'ComplexRegion', 'Reals', 'Contains', 'ConditionSet',
|
| 405 |
+
'Ordinal', 'OmegaPower', 'ord0', 'PowerSet', 'Naturals',
|
| 406 |
+
'Naturals0', 'UniversalSet', 'Integers', 'Rationals', 'Complexes',
|
| 407 |
+
|
| 408 |
+
# sympy.solvers
|
| 409 |
+
'solve', 'solve_linear_system', 'solve_linear_system_LU',
|
| 410 |
+
'solve_undetermined_coeffs', 'nsolve', 'solve_linear', 'checksol',
|
| 411 |
+
'det_quick', 'inv_quick', 'check_assumptions', 'failing_assumptions',
|
| 412 |
+
'diophantine', 'rsolve', 'rsolve_poly', 'rsolve_ratio', 'rsolve_hyper',
|
| 413 |
+
'checkodesol', 'classify_ode', 'dsolve', 'homogeneous_order',
|
| 414 |
+
'solve_poly_system', 'solve_triangulated', 'pde_separate',
|
| 415 |
+
'pde_separate_add', 'pde_separate_mul', 'pdsolve', 'classify_pde',
|
| 416 |
+
'checkpdesol', 'ode_order', 'reduce_inequalities',
|
| 417 |
+
'reduce_abs_inequality', 'reduce_abs_inequalities',
|
| 418 |
+
'solve_poly_inequality', 'solve_rational_inequalities',
|
| 419 |
+
'solve_univariate_inequality', 'decompogen', 'solveset', 'linsolve',
|
| 420 |
+
'linear_eq_to_matrix', 'nonlinsolve', 'substitution',
|
| 421 |
+
|
| 422 |
+
# sympy.matrices
|
| 423 |
+
'ShapeError', 'NonSquareMatrixError', 'GramSchmidt', 'casoratian', 'diag',
|
| 424 |
+
'eye', 'hessian', 'jordan_cell', 'list2numpy', 'matrix2numpy',
|
| 425 |
+
'matrix_multiply_elementwise', 'ones', 'randMatrix', 'rot_axis1',
|
| 426 |
+
'rot_axis2', 'rot_axis3', 'symarray', 'wronskian', 'zeros',
|
| 427 |
+
'MutableDenseMatrix', 'DeferredVector', 'MatrixBase', 'Matrix',
|
| 428 |
+
'MutableMatrix', 'MutableSparseMatrix', 'banded', 'ImmutableDenseMatrix',
|
| 429 |
+
'ImmutableSparseMatrix', 'ImmutableMatrix', 'SparseMatrix', 'MatrixSlice',
|
| 430 |
+
'BlockDiagMatrix', 'BlockMatrix', 'FunctionMatrix', 'Identity', 'Inverse',
|
| 431 |
+
'MatAdd', 'MatMul', 'MatPow', 'MatrixExpr', 'MatrixSymbol', 'Trace',
|
| 432 |
+
'Transpose', 'ZeroMatrix', 'OneMatrix', 'blockcut', 'block_collapse',
|
| 433 |
+
'matrix_symbols', 'Adjoint', 'hadamard_product', 'HadamardProduct',
|
| 434 |
+
'HadamardPower', 'Determinant', 'det', 'diagonalize_vector', 'DiagMatrix',
|
| 435 |
+
'DiagonalMatrix', 'DiagonalOf', 'trace', 'DotProduct',
|
| 436 |
+
'kronecker_product', 'KroneckerProduct', 'PermutationMatrix',
|
| 437 |
+
'MatrixPermute', 'Permanent', 'per', 'rot_ccw_axis1', 'rot_ccw_axis2',
|
| 438 |
+
'rot_ccw_axis3', 'rot_givens',
|
| 439 |
+
|
| 440 |
+
# sympy.geometry
|
| 441 |
+
'Point', 'Point2D', 'Point3D', 'Line', 'Ray', 'Segment', 'Line2D',
|
| 442 |
+
'Segment2D', 'Ray2D', 'Line3D', 'Segment3D', 'Ray3D', 'Plane', 'Ellipse',
|
| 443 |
+
'Circle', 'Polygon', 'RegularPolygon', 'Triangle', 'rad', 'deg',
|
| 444 |
+
'are_similar', 'centroid', 'convex_hull', 'idiff', 'intersection',
|
| 445 |
+
'closest_points', 'farthest_points', 'GeometryError', 'Curve', 'Parabola',
|
| 446 |
+
|
| 447 |
+
# sympy.utilities
|
| 448 |
+
'flatten', 'group', 'take', 'subsets', 'variations', 'numbered_symbols',
|
| 449 |
+
'cartes', 'capture', 'dict_merge', 'prefixes', 'postfixes', 'sift',
|
| 450 |
+
'topological_sort', 'unflatten', 'has_dups', 'has_variety', 'reshape',
|
| 451 |
+
'rotations', 'filldedent', 'lambdify', 'threaded', 'xthreaded',
|
| 452 |
+
'public', 'memoize_property', 'timed',
|
| 453 |
+
|
| 454 |
+
# sympy.integrals
|
| 455 |
+
'integrate', 'Integral', 'line_integrate', 'mellin_transform',
|
| 456 |
+
'inverse_mellin_transform', 'MellinTransform', 'InverseMellinTransform',
|
| 457 |
+
'laplace_transform', 'inverse_laplace_transform', 'LaplaceTransform',
|
| 458 |
+
'laplace_correspondence', 'laplace_initial_conds',
|
| 459 |
+
'InverseLaplaceTransform', 'fourier_transform',
|
| 460 |
+
'inverse_fourier_transform', 'FourierTransform',
|
| 461 |
+
'InverseFourierTransform', 'sine_transform', 'inverse_sine_transform',
|
| 462 |
+
'SineTransform', 'InverseSineTransform', 'cosine_transform',
|
| 463 |
+
'inverse_cosine_transform', 'CosineTransform', 'InverseCosineTransform',
|
| 464 |
+
'hankel_transform', 'inverse_hankel_transform', 'HankelTransform',
|
| 465 |
+
'InverseHankelTransform', 'singularityintegrate',
|
| 466 |
+
|
| 467 |
+
# sympy.tensor
|
| 468 |
+
'IndexedBase', 'Idx', 'Indexed', 'get_contraction_structure',
|
| 469 |
+
'get_indices', 'shape', 'MutableDenseNDimArray', 'ImmutableDenseNDimArray',
|
| 470 |
+
'MutableSparseNDimArray', 'ImmutableSparseNDimArray', 'NDimArray',
|
| 471 |
+
'tensorproduct', 'tensorcontraction', 'tensordiagonal', 'derive_by_array',
|
| 472 |
+
'permutedims', 'Array', 'DenseNDimArray', 'SparseNDimArray',
|
| 473 |
+
|
| 474 |
+
# sympy.parsing
|
| 475 |
+
'parse_expr',
|
| 476 |
+
|
| 477 |
+
# sympy.calculus
|
| 478 |
+
'euler_equations', 'singularities', 'is_increasing',
|
| 479 |
+
'is_strictly_increasing', 'is_decreasing', 'is_strictly_decreasing',
|
| 480 |
+
'is_monotonic', 'finite_diff_weights', 'apply_finite_diff',
|
| 481 |
+
'differentiate_finite', 'periodicity', 'not_empty_in',
|
| 482 |
+
'AccumBounds', 'is_convex', 'stationary_points', 'minimum', 'maximum',
|
| 483 |
+
|
| 484 |
+
# sympy.algebras
|
| 485 |
+
'Quaternion',
|
| 486 |
+
|
| 487 |
+
# sympy.printing
|
| 488 |
+
'pager_print', 'pretty', 'pretty_print', 'pprint', 'pprint_use_unicode',
|
| 489 |
+
'pprint_try_use_unicode', 'latex', 'print_latex', 'multiline_latex',
|
| 490 |
+
'mathml', 'print_mathml', 'python', 'print_python', 'pycode', 'ccode',
|
| 491 |
+
'print_ccode', 'smtlib_code', 'glsl_code', 'print_glsl', 'cxxcode', 'fcode',
|
| 492 |
+
'print_fcode', 'rcode', 'print_rcode', 'jscode', 'print_jscode',
|
| 493 |
+
'julia_code', 'mathematica_code', 'octave_code', 'rust_code', 'print_gtk',
|
| 494 |
+
'preview', 'srepr', 'print_tree', 'StrPrinter', 'sstr', 'sstrrepr',
|
| 495 |
+
'TableForm', 'dotprint', 'maple_code', 'print_maple_code',
|
| 496 |
+
|
| 497 |
+
# sympy.plotting
|
| 498 |
+
'plot', 'textplot', 'plot_backends', 'plot_implicit', 'plot_parametric',
|
| 499 |
+
|
| 500 |
+
# sympy.interactive
|
| 501 |
+
'init_session', 'init_printing', 'interactive_traversal',
|
| 502 |
+
|
| 503 |
+
# sympy.testing
|
| 504 |
+
'test', 'doctest',
|
| 505 |
+
]
|
| 506 |
+
|
| 507 |
+
|
| 508 |
+
#===========================================================================#
|
| 509 |
+
# #
|
| 510 |
+
# XXX: The names below were importable before SymPy 1.6 using #
|
| 511 |
+
# #
|
| 512 |
+
# from sympy import * #
|
| 513 |
+
# #
|
| 514 |
+
# This happened implicitly because there was no __all__ defined in this #
|
| 515 |
+
# __init__.py file. Not every package is imported. The list matches what #
|
| 516 |
+
# would have been imported before. It is possible that these packages will #
|
| 517 |
+
# not be imported by a star-import from sympy in future. #
|
| 518 |
+
# #
|
| 519 |
+
#===========================================================================#
|
| 520 |
+
|
| 521 |
+
|
| 522 |
+
__all__.extend((
|
| 523 |
+
'algebras',
|
| 524 |
+
'assumptions',
|
| 525 |
+
'calculus',
|
| 526 |
+
'concrete',
|
| 527 |
+
'discrete',
|
| 528 |
+
'external',
|
| 529 |
+
'functions',
|
| 530 |
+
'geometry',
|
| 531 |
+
'interactive',
|
| 532 |
+
'multipledispatch',
|
| 533 |
+
'ntheory',
|
| 534 |
+
'parsing',
|
| 535 |
+
'plotting',
|
| 536 |
+
'polys',
|
| 537 |
+
'printing',
|
| 538 |
+
'release',
|
| 539 |
+
'strategies',
|
| 540 |
+
'tensor',
|
| 541 |
+
'utilities',
|
| 542 |
+
))
|
janus/lib/python3.10/site-packages/sympy/abc.py
ADDED
|
@@ -0,0 +1,111 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
This module exports all latin and greek letters as Symbols, so you can
|
| 3 |
+
conveniently do
|
| 4 |
+
|
| 5 |
+
>>> from sympy.abc import x, y
|
| 6 |
+
|
| 7 |
+
instead of the slightly more clunky-looking
|
| 8 |
+
|
| 9 |
+
>>> from sympy import symbols
|
| 10 |
+
>>> x, y = symbols('x y')
|
| 11 |
+
|
| 12 |
+
Caveats
|
| 13 |
+
=======
|
| 14 |
+
|
| 15 |
+
1. As of the time of writing this, the names ``O``, ``S``, ``I``, ``N``,
|
| 16 |
+
``E``, and ``Q`` are colliding with names defined in SymPy. If you import them
|
| 17 |
+
from both ``sympy.abc`` and ``sympy``, the second import will "win".
|
| 18 |
+
This is an issue only for * imports, which should only be used for short-lived
|
| 19 |
+
code such as interactive sessions and throwaway scripts that do not survive
|
| 20 |
+
until the next SymPy upgrade, where ``sympy`` may contain a different set of
|
| 21 |
+
names.
|
| 22 |
+
|
| 23 |
+
2. This module does not define symbol names on demand, i.e.
|
| 24 |
+
``from sympy.abc import foo`` will be reported as an error because
|
| 25 |
+
``sympy.abc`` does not contain the name ``foo``. To get a symbol named ``foo``,
|
| 26 |
+
you still need to use ``Symbol('foo')`` or ``symbols('foo')``.
|
| 27 |
+
You can freely mix usage of ``sympy.abc`` and ``Symbol``/``symbols``, though
|
| 28 |
+
sticking with one and only one way to get the symbols does tend to make the code
|
| 29 |
+
more readable.
|
| 30 |
+
|
| 31 |
+
The module also defines some special names to help detect which names clash
|
| 32 |
+
with the default SymPy namespace.
|
| 33 |
+
|
| 34 |
+
``_clash1`` defines all the single letter variables that clash with
|
| 35 |
+
SymPy objects; ``_clash2`` defines the multi-letter clashing symbols;
|
| 36 |
+
and ``_clash`` is the union of both. These can be passed for ``locals``
|
| 37 |
+
during sympification if one desires Symbols rather than the non-Symbol
|
| 38 |
+
objects for those names.
|
| 39 |
+
|
| 40 |
+
Examples
|
| 41 |
+
========
|
| 42 |
+
|
| 43 |
+
>>> from sympy import S
|
| 44 |
+
>>> from sympy.abc import _clash1, _clash2, _clash
|
| 45 |
+
>>> S("Q & C", locals=_clash1)
|
| 46 |
+
C & Q
|
| 47 |
+
>>> S('pi(x)', locals=_clash2)
|
| 48 |
+
pi(x)
|
| 49 |
+
>>> S('pi(C, Q)', locals=_clash)
|
| 50 |
+
pi(C, Q)
|
| 51 |
+
|
| 52 |
+
"""
|
| 53 |
+
|
| 54 |
+
from typing import Any, Dict as tDict
|
| 55 |
+
|
| 56 |
+
import string
|
| 57 |
+
|
| 58 |
+
from .core import Symbol, symbols
|
| 59 |
+
from .core.alphabets import greeks
|
| 60 |
+
from sympy.parsing.sympy_parser import null
|
| 61 |
+
|
| 62 |
+
##### Symbol definitions #####
|
| 63 |
+
|
| 64 |
+
# Implementation note: The easiest way to avoid typos in the symbols()
|
| 65 |
+
# parameter is to copy it from the left-hand side of the assignment.
|
| 66 |
+
|
| 67 |
+
a, b, c, d, e, f, g, h, i, j = symbols('a, b, c, d, e, f, g, h, i, j')
|
| 68 |
+
k, l, m, n, o, p, q, r, s, t = symbols('k, l, m, n, o, p, q, r, s, t')
|
| 69 |
+
u, v, w, x, y, z = symbols('u, v, w, x, y, z')
|
| 70 |
+
|
| 71 |
+
A, B, C, D, E, F, G, H, I, J = symbols('A, B, C, D, E, F, G, H, I, J')
|
| 72 |
+
K, L, M, N, O, P, Q, R, S, T = symbols('K, L, M, N, O, P, Q, R, S, T')
|
| 73 |
+
U, V, W, X, Y, Z = symbols('U, V, W, X, Y, Z')
|
| 74 |
+
|
| 75 |
+
alpha, beta, gamma, delta = symbols('alpha, beta, gamma, delta')
|
| 76 |
+
epsilon, zeta, eta, theta = symbols('epsilon, zeta, eta, theta')
|
| 77 |
+
iota, kappa, lamda, mu = symbols('iota, kappa, lamda, mu')
|
| 78 |
+
nu, xi, omicron, pi = symbols('nu, xi, omicron, pi')
|
| 79 |
+
rho, sigma, tau, upsilon = symbols('rho, sigma, tau, upsilon')
|
| 80 |
+
phi, chi, psi, omega = symbols('phi, chi, psi, omega')
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
##### Clashing-symbols diagnostics #####
|
| 84 |
+
|
| 85 |
+
# We want to know which names in SymPy collide with those in here.
|
| 86 |
+
# This is mostly for diagnosing SymPy's namespace during SymPy development.
|
| 87 |
+
|
| 88 |
+
_latin = list(string.ascii_letters)
|
| 89 |
+
# QOSINE should not be imported as they clash; gamma, pi and zeta clash, too
|
| 90 |
+
_greek = list(greeks) # make a copy, so we can mutate it
|
| 91 |
+
# Note: We import lamda since lambda is a reserved keyword in Python
|
| 92 |
+
_greek.remove("lambda")
|
| 93 |
+
_greek.append("lamda")
|
| 94 |
+
|
| 95 |
+
ns: tDict[str, Any] = {}
|
| 96 |
+
exec('from sympy import *', ns)
|
| 97 |
+
_clash1: tDict[str, Any] = {}
|
| 98 |
+
_clash2: tDict[str, Any] = {}
|
| 99 |
+
while ns:
|
| 100 |
+
_k, _ = ns.popitem()
|
| 101 |
+
if _k in _greek:
|
| 102 |
+
_clash2[_k] = null
|
| 103 |
+
_greek.remove(_k)
|
| 104 |
+
elif _k in _latin:
|
| 105 |
+
_clash1[_k] = null
|
| 106 |
+
_latin.remove(_k)
|
| 107 |
+
_clash = {}
|
| 108 |
+
_clash.update(_clash1)
|
| 109 |
+
_clash.update(_clash2)
|
| 110 |
+
|
| 111 |
+
del _latin, _greek, Symbol, _k, null
|
janus/lib/python3.10/site-packages/sympy/conftest.py
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
|
| 3 |
+
sys._running_pytest = True # type: ignore
|
| 4 |
+
from sympy.external.importtools import version_tuple
|
| 5 |
+
|
| 6 |
+
import pytest
|
| 7 |
+
from sympy.core.cache import clear_cache, USE_CACHE
|
| 8 |
+
from sympy.external.gmpy import GROUND_TYPES
|
| 9 |
+
from sympy.utilities.misc import ARCH
|
| 10 |
+
import re
|
| 11 |
+
|
| 12 |
+
try:
|
| 13 |
+
import hypothesis
|
| 14 |
+
|
| 15 |
+
hypothesis.settings.register_profile("sympy_hypothesis_profile", deadline=None)
|
| 16 |
+
hypothesis.settings.load_profile("sympy_hypothesis_profile")
|
| 17 |
+
except ImportError:
|
| 18 |
+
raise ImportError(
|
| 19 |
+
"hypothesis is a required dependency to run the SymPy test suite. "
|
| 20 |
+
"Install it with 'pip install hypothesis' or 'conda install -c conda-forge hypothesis'"
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
sp = re.compile(r"([0-9]+)/([1-9][0-9]*)")
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def process_split(config, items):
|
| 28 |
+
split = config.getoption("--split")
|
| 29 |
+
if not split:
|
| 30 |
+
return
|
| 31 |
+
m = sp.match(split)
|
| 32 |
+
if not m:
|
| 33 |
+
raise ValueError(
|
| 34 |
+
"split must be a string of the form a/b " "where a and b are ints."
|
| 35 |
+
)
|
| 36 |
+
i, t = map(int, m.groups())
|
| 37 |
+
start, end = (i - 1) * len(items) // t, i * len(items) // t
|
| 38 |
+
|
| 39 |
+
if i < t:
|
| 40 |
+
# remove elements from end of list first
|
| 41 |
+
del items[end:]
|
| 42 |
+
del items[:start]
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def pytest_report_header(config):
|
| 46 |
+
s = "architecture: %s\n" % ARCH
|
| 47 |
+
s += "cache: %s\n" % USE_CACHE
|
| 48 |
+
version = ""
|
| 49 |
+
if GROUND_TYPES == "gmpy":
|
| 50 |
+
import gmpy2
|
| 51 |
+
|
| 52 |
+
version = gmpy2.version()
|
| 53 |
+
elif GROUND_TYPES == "flint":
|
| 54 |
+
try:
|
| 55 |
+
from flint import __version__
|
| 56 |
+
except ImportError:
|
| 57 |
+
version = "unknown"
|
| 58 |
+
else:
|
| 59 |
+
version = f'(python-flint=={__version__})'
|
| 60 |
+
s += "ground types: %s %s\n" % (GROUND_TYPES, version)
|
| 61 |
+
return s
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def pytest_terminal_summary(terminalreporter):
|
| 65 |
+
if terminalreporter.stats.get("error", None) or terminalreporter.stats.get(
|
| 66 |
+
"failed", None
|
| 67 |
+
):
|
| 68 |
+
terminalreporter.write_sep(" ", "DO *NOT* COMMIT!", red=True, bold=True)
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def pytest_addoption(parser):
|
| 72 |
+
parser.addoption("--split", action="store", default="", help="split tests")
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def pytest_collection_modifyitems(config, items):
|
| 76 |
+
"""pytest hook."""
|
| 77 |
+
# handle splits
|
| 78 |
+
process_split(config, items)
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
@pytest.fixture(autouse=True, scope="module")
|
| 82 |
+
def file_clear_cache():
|
| 83 |
+
clear_cache()
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
@pytest.fixture(autouse=True, scope="module")
|
| 87 |
+
def check_disabled(request):
|
| 88 |
+
if getattr(request.module, "disabled", False):
|
| 89 |
+
pytest.skip("test requirements not met.")
|
| 90 |
+
elif getattr(request.module, "ipython", False):
|
| 91 |
+
# need to check version and options for ipython tests
|
| 92 |
+
if (
|
| 93 |
+
version_tuple(pytest.__version__) < version_tuple("2.6.3")
|
| 94 |
+
and pytest.config.getvalue("-s") != "no"
|
| 95 |
+
):
|
| 96 |
+
pytest.skip("run py.test with -s or upgrade to newer version.")
|
janus/lib/python3.10/site-packages/sympy/discrete/__init__.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""This module contains functions which operate on discrete sequences.
|
| 2 |
+
|
| 3 |
+
Transforms - ``fft``, ``ifft``, ``ntt``, ``intt``, ``fwht``, ``ifwht``,
|
| 4 |
+
``mobius_transform``, ``inverse_mobius_transform``
|
| 5 |
+
|
| 6 |
+
Convolutions - ``convolution``, ``convolution_fft``, ``convolution_ntt``,
|
| 7 |
+
``convolution_fwht``, ``convolution_subset``,
|
| 8 |
+
``covering_product``, ``intersecting_product``
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
from .transforms import (fft, ifft, ntt, intt, fwht, ifwht,
|
| 12 |
+
mobius_transform, inverse_mobius_transform)
|
| 13 |
+
from .convolutions import convolution, covering_product, intersecting_product
|
| 14 |
+
|
| 15 |
+
__all__ = [
|
| 16 |
+
'fft', 'ifft', 'ntt', 'intt', 'fwht', 'ifwht', 'mobius_transform',
|
| 17 |
+
'inverse_mobius_transform',
|
| 18 |
+
|
| 19 |
+
'convolution', 'covering_product', 'intersecting_product',
|
| 20 |
+
]
|
janus/lib/python3.10/site-packages/sympy/discrete/__pycache__/recurrences.cpython-310.pyc
ADDED
|
Binary file (5.08 kB). View file
|
|
|
janus/lib/python3.10/site-packages/sympy/discrete/convolutions.py
ADDED
|
@@ -0,0 +1,597 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Convolution (using **FFT**, **NTT**, **FWHT**), Subset Convolution,
|
| 3 |
+
Covering Product, Intersecting Product
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
from sympy.core import S, sympify, Rational
|
| 7 |
+
from sympy.core.function import expand_mul
|
| 8 |
+
from sympy.discrete.transforms import (
|
| 9 |
+
fft, ifft, ntt, intt, fwht, ifwht,
|
| 10 |
+
mobius_transform, inverse_mobius_transform)
|
| 11 |
+
from sympy.external.gmpy import MPZ, lcm
|
| 12 |
+
from sympy.utilities.iterables import iterable
|
| 13 |
+
from sympy.utilities.misc import as_int
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def convolution(a, b, cycle=0, dps=None, prime=None, dyadic=None, subset=None):
|
| 17 |
+
"""
|
| 18 |
+
Performs convolution by determining the type of desired
|
| 19 |
+
convolution using hints.
|
| 20 |
+
|
| 21 |
+
Exactly one of ``dps``, ``prime``, ``dyadic``, ``subset`` arguments
|
| 22 |
+
should be specified explicitly for identifying the type of convolution,
|
| 23 |
+
and the argument ``cycle`` can be specified optionally.
|
| 24 |
+
|
| 25 |
+
For the default arguments, linear convolution is performed using **FFT**.
|
| 26 |
+
|
| 27 |
+
Parameters
|
| 28 |
+
==========
|
| 29 |
+
|
| 30 |
+
a, b : iterables
|
| 31 |
+
The sequences for which convolution is performed.
|
| 32 |
+
cycle : Integer
|
| 33 |
+
Specifies the length for doing cyclic convolution.
|
| 34 |
+
dps : Integer
|
| 35 |
+
Specifies the number of decimal digits for precision for
|
| 36 |
+
performing **FFT** on the sequence.
|
| 37 |
+
prime : Integer
|
| 38 |
+
Prime modulus of the form `(m 2^k + 1)` to be used for
|
| 39 |
+
performing **NTT** on the sequence.
|
| 40 |
+
dyadic : bool
|
| 41 |
+
Identifies the convolution type as dyadic (*bitwise-XOR*)
|
| 42 |
+
convolution, which is performed using **FWHT**.
|
| 43 |
+
subset : bool
|
| 44 |
+
Identifies the convolution type as subset convolution.
|
| 45 |
+
|
| 46 |
+
Examples
|
| 47 |
+
========
|
| 48 |
+
|
| 49 |
+
>>> from sympy import convolution, symbols, S, I
|
| 50 |
+
>>> u, v, w, x, y, z = symbols('u v w x y z')
|
| 51 |
+
|
| 52 |
+
>>> convolution([1 + 2*I, 4 + 3*I], [S(5)/4, 6], dps=3)
|
| 53 |
+
[1.25 + 2.5*I, 11.0 + 15.8*I, 24.0 + 18.0*I]
|
| 54 |
+
>>> convolution([1, 2, 3], [4, 5, 6], cycle=3)
|
| 55 |
+
[31, 31, 28]
|
| 56 |
+
|
| 57 |
+
>>> convolution([111, 777], [888, 444], prime=19*2**10 + 1)
|
| 58 |
+
[1283, 19351, 14219]
|
| 59 |
+
>>> convolution([111, 777], [888, 444], prime=19*2**10 + 1, cycle=2)
|
| 60 |
+
[15502, 19351]
|
| 61 |
+
|
| 62 |
+
>>> convolution([u, v], [x, y, z], dyadic=True)
|
| 63 |
+
[u*x + v*y, u*y + v*x, u*z, v*z]
|
| 64 |
+
>>> convolution([u, v], [x, y, z], dyadic=True, cycle=2)
|
| 65 |
+
[u*x + u*z + v*y, u*y + v*x + v*z]
|
| 66 |
+
|
| 67 |
+
>>> convolution([u, v, w], [x, y, z], subset=True)
|
| 68 |
+
[u*x, u*y + v*x, u*z + w*x, v*z + w*y]
|
| 69 |
+
>>> convolution([u, v, w], [x, y, z], subset=True, cycle=3)
|
| 70 |
+
[u*x + v*z + w*y, u*y + v*x, u*z + w*x]
|
| 71 |
+
|
| 72 |
+
"""
|
| 73 |
+
|
| 74 |
+
c = as_int(cycle)
|
| 75 |
+
if c < 0:
|
| 76 |
+
raise ValueError("The length for cyclic convolution "
|
| 77 |
+
"must be non-negative")
|
| 78 |
+
|
| 79 |
+
dyadic = True if dyadic else None
|
| 80 |
+
subset = True if subset else None
|
| 81 |
+
if sum(x is not None for x in (prime, dps, dyadic, subset)) > 1:
|
| 82 |
+
raise TypeError("Ambiguity in determining the type of convolution")
|
| 83 |
+
|
| 84 |
+
if prime is not None:
|
| 85 |
+
ls = convolution_ntt(a, b, prime=prime)
|
| 86 |
+
return ls if not c else [sum(ls[i::c]) % prime for i in range(c)]
|
| 87 |
+
|
| 88 |
+
if dyadic:
|
| 89 |
+
ls = convolution_fwht(a, b)
|
| 90 |
+
elif subset:
|
| 91 |
+
ls = convolution_subset(a, b)
|
| 92 |
+
else:
|
| 93 |
+
def loop(a):
|
| 94 |
+
dens = []
|
| 95 |
+
for i in a:
|
| 96 |
+
if isinstance(i, Rational) and i.q - 1:
|
| 97 |
+
dens.append(i.q)
|
| 98 |
+
elif not isinstance(i, int):
|
| 99 |
+
return
|
| 100 |
+
if dens:
|
| 101 |
+
l = lcm(*dens)
|
| 102 |
+
return [i*l if type(i) is int else i.p*(l//i.q) for i in a], l
|
| 103 |
+
# no lcm of den to deal with
|
| 104 |
+
return a, 1
|
| 105 |
+
ls = None
|
| 106 |
+
da = loop(a)
|
| 107 |
+
if da is not None:
|
| 108 |
+
db = loop(b)
|
| 109 |
+
if db is not None:
|
| 110 |
+
(ia, ma), (ib, mb) = da, db
|
| 111 |
+
den = ma*mb
|
| 112 |
+
ls = convolution_int(ia, ib)
|
| 113 |
+
if den != 1:
|
| 114 |
+
ls = [Rational(i, den) for i in ls]
|
| 115 |
+
if ls is None:
|
| 116 |
+
ls = convolution_fft(a, b, dps)
|
| 117 |
+
|
| 118 |
+
return ls if not c else [sum(ls[i::c]) for i in range(c)]
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
#----------------------------------------------------------------------------#
|
| 122 |
+
# #
|
| 123 |
+
# Convolution for Complex domain #
|
| 124 |
+
# #
|
| 125 |
+
#----------------------------------------------------------------------------#
|
| 126 |
+
|
| 127 |
+
def convolution_fft(a, b, dps=None):
|
| 128 |
+
"""
|
| 129 |
+
Performs linear convolution using Fast Fourier Transform.
|
| 130 |
+
|
| 131 |
+
Parameters
|
| 132 |
+
==========
|
| 133 |
+
|
| 134 |
+
a, b : iterables
|
| 135 |
+
The sequences for which convolution is performed.
|
| 136 |
+
dps : Integer
|
| 137 |
+
Specifies the number of decimal digits for precision.
|
| 138 |
+
|
| 139 |
+
Examples
|
| 140 |
+
========
|
| 141 |
+
|
| 142 |
+
>>> from sympy import S, I
|
| 143 |
+
>>> from sympy.discrete.convolutions import convolution_fft
|
| 144 |
+
|
| 145 |
+
>>> convolution_fft([2, 3], [4, 5])
|
| 146 |
+
[8, 22, 15]
|
| 147 |
+
>>> convolution_fft([2, 5], [6, 7, 3])
|
| 148 |
+
[12, 44, 41, 15]
|
| 149 |
+
>>> convolution_fft([1 + 2*I, 4 + 3*I], [S(5)/4, 6])
|
| 150 |
+
[5/4 + 5*I/2, 11 + 63*I/4, 24 + 18*I]
|
| 151 |
+
|
| 152 |
+
References
|
| 153 |
+
==========
|
| 154 |
+
|
| 155 |
+
.. [1] https://en.wikipedia.org/wiki/Convolution_theorem
|
| 156 |
+
.. [2] https://en.wikipedia.org/wiki/Discrete_Fourier_transform_(general%29
|
| 157 |
+
|
| 158 |
+
"""
|
| 159 |
+
|
| 160 |
+
a, b = a[:], b[:]
|
| 161 |
+
n = m = len(a) + len(b) - 1 # convolution size
|
| 162 |
+
|
| 163 |
+
if n > 0 and n&(n - 1): # not a power of 2
|
| 164 |
+
n = 2**n.bit_length()
|
| 165 |
+
|
| 166 |
+
# padding with zeros
|
| 167 |
+
a += [S.Zero]*(n - len(a))
|
| 168 |
+
b += [S.Zero]*(n - len(b))
|
| 169 |
+
|
| 170 |
+
a, b = fft(a, dps), fft(b, dps)
|
| 171 |
+
a = [expand_mul(x*y) for x, y in zip(a, b)]
|
| 172 |
+
a = ifft(a, dps)[:m]
|
| 173 |
+
|
| 174 |
+
return a
|
| 175 |
+
|
| 176 |
+
|
| 177 |
+
#----------------------------------------------------------------------------#
|
| 178 |
+
# #
|
| 179 |
+
# Convolution for GF(p) #
|
| 180 |
+
# #
|
| 181 |
+
#----------------------------------------------------------------------------#
|
| 182 |
+
|
| 183 |
+
def convolution_ntt(a, b, prime):
|
| 184 |
+
"""
|
| 185 |
+
Performs linear convolution using Number Theoretic Transform.
|
| 186 |
+
|
| 187 |
+
Parameters
|
| 188 |
+
==========
|
| 189 |
+
|
| 190 |
+
a, b : iterables
|
| 191 |
+
The sequences for which convolution is performed.
|
| 192 |
+
prime : Integer
|
| 193 |
+
Prime modulus of the form `(m 2^k + 1)` to be used for performing
|
| 194 |
+
**NTT** on the sequence.
|
| 195 |
+
|
| 196 |
+
Examples
|
| 197 |
+
========
|
| 198 |
+
|
| 199 |
+
>>> from sympy.discrete.convolutions import convolution_ntt
|
| 200 |
+
>>> convolution_ntt([2, 3], [4, 5], prime=19*2**10 + 1)
|
| 201 |
+
[8, 22, 15]
|
| 202 |
+
>>> convolution_ntt([2, 5], [6, 7, 3], prime=19*2**10 + 1)
|
| 203 |
+
[12, 44, 41, 15]
|
| 204 |
+
>>> convolution_ntt([333, 555], [222, 666], prime=19*2**10 + 1)
|
| 205 |
+
[15555, 14219, 19404]
|
| 206 |
+
|
| 207 |
+
References
|
| 208 |
+
==========
|
| 209 |
+
|
| 210 |
+
.. [1] https://en.wikipedia.org/wiki/Convolution_theorem
|
| 211 |
+
.. [2] https://en.wikipedia.org/wiki/Discrete_Fourier_transform_(general%29
|
| 212 |
+
|
| 213 |
+
"""
|
| 214 |
+
|
| 215 |
+
a, b, p = a[:], b[:], as_int(prime)
|
| 216 |
+
n = m = len(a) + len(b) - 1 # convolution size
|
| 217 |
+
|
| 218 |
+
if n > 0 and n&(n - 1): # not a power of 2
|
| 219 |
+
n = 2**n.bit_length()
|
| 220 |
+
|
| 221 |
+
# padding with zeros
|
| 222 |
+
a += [0]*(n - len(a))
|
| 223 |
+
b += [0]*(n - len(b))
|
| 224 |
+
|
| 225 |
+
a, b = ntt(a, p), ntt(b, p)
|
| 226 |
+
a = [x*y % p for x, y in zip(a, b)]
|
| 227 |
+
a = intt(a, p)[:m]
|
| 228 |
+
|
| 229 |
+
return a
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
#----------------------------------------------------------------------------#
|
| 233 |
+
# #
|
| 234 |
+
# Convolution for 2**n-group #
|
| 235 |
+
# #
|
| 236 |
+
#----------------------------------------------------------------------------#
|
| 237 |
+
|
| 238 |
+
def convolution_fwht(a, b):
|
| 239 |
+
"""
|
| 240 |
+
Performs dyadic (*bitwise-XOR*) convolution using Fast Walsh Hadamard
|
| 241 |
+
Transform.
|
| 242 |
+
|
| 243 |
+
The convolution is automatically padded to the right with zeros, as the
|
| 244 |
+
*radix-2 FWHT* requires the number of sample points to be a power of 2.
|
| 245 |
+
|
| 246 |
+
Parameters
|
| 247 |
+
==========
|
| 248 |
+
|
| 249 |
+
a, b : iterables
|
| 250 |
+
The sequences for which convolution is performed.
|
| 251 |
+
|
| 252 |
+
Examples
|
| 253 |
+
========
|
| 254 |
+
|
| 255 |
+
>>> from sympy import symbols, S, I
|
| 256 |
+
>>> from sympy.discrete.convolutions import convolution_fwht
|
| 257 |
+
|
| 258 |
+
>>> u, v, x, y = symbols('u v x y')
|
| 259 |
+
>>> convolution_fwht([u, v], [x, y])
|
| 260 |
+
[u*x + v*y, u*y + v*x]
|
| 261 |
+
|
| 262 |
+
>>> convolution_fwht([2, 3], [4, 5])
|
| 263 |
+
[23, 22]
|
| 264 |
+
>>> convolution_fwht([2, 5 + 4*I, 7], [6*I, 7, 3 + 4*I])
|
| 265 |
+
[56 + 68*I, -10 + 30*I, 6 + 50*I, 48 + 32*I]
|
| 266 |
+
|
| 267 |
+
>>> convolution_fwht([S(33)/7, S(55)/6, S(7)/4], [S(2)/3, 5])
|
| 268 |
+
[2057/42, 1870/63, 7/6, 35/4]
|
| 269 |
+
|
| 270 |
+
References
|
| 271 |
+
==========
|
| 272 |
+
|
| 273 |
+
.. [1] https://www.radioeng.cz/fulltexts/2002/02_03_40_42.pdf
|
| 274 |
+
.. [2] https://en.wikipedia.org/wiki/Hadamard_transform
|
| 275 |
+
|
| 276 |
+
"""
|
| 277 |
+
|
| 278 |
+
if not a or not b:
|
| 279 |
+
return []
|
| 280 |
+
|
| 281 |
+
a, b = a[:], b[:]
|
| 282 |
+
n = max(len(a), len(b))
|
| 283 |
+
|
| 284 |
+
if n&(n - 1): # not a power of 2
|
| 285 |
+
n = 2**n.bit_length()
|
| 286 |
+
|
| 287 |
+
# padding with zeros
|
| 288 |
+
a += [S.Zero]*(n - len(a))
|
| 289 |
+
b += [S.Zero]*(n - len(b))
|
| 290 |
+
|
| 291 |
+
a, b = fwht(a), fwht(b)
|
| 292 |
+
a = [expand_mul(x*y) for x, y in zip(a, b)]
|
| 293 |
+
a = ifwht(a)
|
| 294 |
+
|
| 295 |
+
return a
|
| 296 |
+
|
| 297 |
+
|
| 298 |
+
#----------------------------------------------------------------------------#
|
| 299 |
+
# #
|
| 300 |
+
# Subset Convolution #
|
| 301 |
+
# #
|
| 302 |
+
#----------------------------------------------------------------------------#
|
| 303 |
+
|
| 304 |
+
def convolution_subset(a, b):
|
| 305 |
+
"""
|
| 306 |
+
Performs Subset Convolution of given sequences.
|
| 307 |
+
|
| 308 |
+
The indices of each argument, considered as bit strings, correspond to
|
| 309 |
+
subsets of a finite set.
|
| 310 |
+
|
| 311 |
+
The sequence is automatically padded to the right with zeros, as the
|
| 312 |
+
definition of subset based on bitmasks (indices) requires the size of
|
| 313 |
+
sequence to be a power of 2.
|
| 314 |
+
|
| 315 |
+
Parameters
|
| 316 |
+
==========
|
| 317 |
+
|
| 318 |
+
a, b : iterables
|
| 319 |
+
The sequences for which convolution is performed.
|
| 320 |
+
|
| 321 |
+
Examples
|
| 322 |
+
========
|
| 323 |
+
|
| 324 |
+
>>> from sympy import symbols, S
|
| 325 |
+
>>> from sympy.discrete.convolutions import convolution_subset
|
| 326 |
+
>>> u, v, x, y, z = symbols('u v x y z')
|
| 327 |
+
|
| 328 |
+
>>> convolution_subset([u, v], [x, y])
|
| 329 |
+
[u*x, u*y + v*x]
|
| 330 |
+
>>> convolution_subset([u, v, x], [y, z])
|
| 331 |
+
[u*y, u*z + v*y, x*y, x*z]
|
| 332 |
+
|
| 333 |
+
>>> convolution_subset([1, S(2)/3], [3, 4])
|
| 334 |
+
[3, 6]
|
| 335 |
+
>>> convolution_subset([1, 3, S(5)/7], [7])
|
| 336 |
+
[7, 21, 5, 0]
|
| 337 |
+
|
| 338 |
+
References
|
| 339 |
+
==========
|
| 340 |
+
|
| 341 |
+
.. [1] https://people.csail.mit.edu/rrw/presentations/subset-conv.pdf
|
| 342 |
+
|
| 343 |
+
"""
|
| 344 |
+
|
| 345 |
+
if not a or not b:
|
| 346 |
+
return []
|
| 347 |
+
|
| 348 |
+
if not iterable(a) or not iterable(b):
|
| 349 |
+
raise TypeError("Expected a sequence of coefficients for convolution")
|
| 350 |
+
|
| 351 |
+
a = [sympify(arg) for arg in a]
|
| 352 |
+
b = [sympify(arg) for arg in b]
|
| 353 |
+
n = max(len(a), len(b))
|
| 354 |
+
|
| 355 |
+
if n&(n - 1): # not a power of 2
|
| 356 |
+
n = 2**n.bit_length()
|
| 357 |
+
|
| 358 |
+
# padding with zeros
|
| 359 |
+
a += [S.Zero]*(n - len(a))
|
| 360 |
+
b += [S.Zero]*(n - len(b))
|
| 361 |
+
|
| 362 |
+
c = [S.Zero]*n
|
| 363 |
+
|
| 364 |
+
for mask in range(n):
|
| 365 |
+
smask = mask
|
| 366 |
+
while smask > 0:
|
| 367 |
+
c[mask] += expand_mul(a[smask] * b[mask^smask])
|
| 368 |
+
smask = (smask - 1)&mask
|
| 369 |
+
|
| 370 |
+
c[mask] += expand_mul(a[smask] * b[mask^smask])
|
| 371 |
+
|
| 372 |
+
return c
|
| 373 |
+
|
| 374 |
+
|
| 375 |
+
#----------------------------------------------------------------------------#
|
| 376 |
+
# #
|
| 377 |
+
# Covering Product #
|
| 378 |
+
# #
|
| 379 |
+
#----------------------------------------------------------------------------#
|
| 380 |
+
|
| 381 |
+
def covering_product(a, b):
|
| 382 |
+
"""
|
| 383 |
+
Returns the covering product of given sequences.
|
| 384 |
+
|
| 385 |
+
The indices of each argument, considered as bit strings, correspond to
|
| 386 |
+
subsets of a finite set.
|
| 387 |
+
|
| 388 |
+
The covering product of given sequences is a sequence which contains
|
| 389 |
+
the sum of products of the elements of the given sequences grouped by
|
| 390 |
+
the *bitwise-OR* of the corresponding indices.
|
| 391 |
+
|
| 392 |
+
The sequence is automatically padded to the right with zeros, as the
|
| 393 |
+
definition of subset based on bitmasks (indices) requires the size of
|
| 394 |
+
sequence to be a power of 2.
|
| 395 |
+
|
| 396 |
+
Parameters
|
| 397 |
+
==========
|
| 398 |
+
|
| 399 |
+
a, b : iterables
|
| 400 |
+
The sequences for which covering product is to be obtained.
|
| 401 |
+
|
| 402 |
+
Examples
|
| 403 |
+
========
|
| 404 |
+
|
| 405 |
+
>>> from sympy import symbols, S, I, covering_product
|
| 406 |
+
>>> u, v, x, y, z = symbols('u v x y z')
|
| 407 |
+
|
| 408 |
+
>>> covering_product([u, v], [x, y])
|
| 409 |
+
[u*x, u*y + v*x + v*y]
|
| 410 |
+
>>> covering_product([u, v, x], [y, z])
|
| 411 |
+
[u*y, u*z + v*y + v*z, x*y, x*z]
|
| 412 |
+
|
| 413 |
+
>>> covering_product([1, S(2)/3], [3, 4 + 5*I])
|
| 414 |
+
[3, 26/3 + 25*I/3]
|
| 415 |
+
>>> covering_product([1, 3, S(5)/7], [7, 8])
|
| 416 |
+
[7, 53, 5, 40/7]
|
| 417 |
+
|
| 418 |
+
References
|
| 419 |
+
==========
|
| 420 |
+
|
| 421 |
+
.. [1] https://people.csail.mit.edu/rrw/presentations/subset-conv.pdf
|
| 422 |
+
|
| 423 |
+
"""
|
| 424 |
+
|
| 425 |
+
if not a or not b:
|
| 426 |
+
return []
|
| 427 |
+
|
| 428 |
+
a, b = a[:], b[:]
|
| 429 |
+
n = max(len(a), len(b))
|
| 430 |
+
|
| 431 |
+
if n&(n - 1): # not a power of 2
|
| 432 |
+
n = 2**n.bit_length()
|
| 433 |
+
|
| 434 |
+
# padding with zeros
|
| 435 |
+
a += [S.Zero]*(n - len(a))
|
| 436 |
+
b += [S.Zero]*(n - len(b))
|
| 437 |
+
|
| 438 |
+
a, b = mobius_transform(a), mobius_transform(b)
|
| 439 |
+
a = [expand_mul(x*y) for x, y in zip(a, b)]
|
| 440 |
+
a = inverse_mobius_transform(a)
|
| 441 |
+
|
| 442 |
+
return a
|
| 443 |
+
|
| 444 |
+
|
| 445 |
+
#----------------------------------------------------------------------------#
|
| 446 |
+
# #
|
| 447 |
+
# Intersecting Product #
|
| 448 |
+
# #
|
| 449 |
+
#----------------------------------------------------------------------------#
|
| 450 |
+
|
| 451 |
+
def intersecting_product(a, b):
|
| 452 |
+
"""
|
| 453 |
+
Returns the intersecting product of given sequences.
|
| 454 |
+
|
| 455 |
+
The indices of each argument, considered as bit strings, correspond to
|
| 456 |
+
subsets of a finite set.
|
| 457 |
+
|
| 458 |
+
The intersecting product of given sequences is the sequence which
|
| 459 |
+
contains the sum of products of the elements of the given sequences
|
| 460 |
+
grouped by the *bitwise-AND* of the corresponding indices.
|
| 461 |
+
|
| 462 |
+
The sequence is automatically padded to the right with zeros, as the
|
| 463 |
+
definition of subset based on bitmasks (indices) requires the size of
|
| 464 |
+
sequence to be a power of 2.
|
| 465 |
+
|
| 466 |
+
Parameters
|
| 467 |
+
==========
|
| 468 |
+
|
| 469 |
+
a, b : iterables
|
| 470 |
+
The sequences for which intersecting product is to be obtained.
|
| 471 |
+
|
| 472 |
+
Examples
|
| 473 |
+
========
|
| 474 |
+
|
| 475 |
+
>>> from sympy import symbols, S, I, intersecting_product
|
| 476 |
+
>>> u, v, x, y, z = symbols('u v x y z')
|
| 477 |
+
|
| 478 |
+
>>> intersecting_product([u, v], [x, y])
|
| 479 |
+
[u*x + u*y + v*x, v*y]
|
| 480 |
+
>>> intersecting_product([u, v, x], [y, z])
|
| 481 |
+
[u*y + u*z + v*y + x*y + x*z, v*z, 0, 0]
|
| 482 |
+
|
| 483 |
+
>>> intersecting_product([1, S(2)/3], [3, 4 + 5*I])
|
| 484 |
+
[9 + 5*I, 8/3 + 10*I/3]
|
| 485 |
+
>>> intersecting_product([1, 3, S(5)/7], [7, 8])
|
| 486 |
+
[327/7, 24, 0, 0]
|
| 487 |
+
|
| 488 |
+
References
|
| 489 |
+
==========
|
| 490 |
+
|
| 491 |
+
.. [1] https://people.csail.mit.edu/rrw/presentations/subset-conv.pdf
|
| 492 |
+
|
| 493 |
+
"""
|
| 494 |
+
|
| 495 |
+
if not a or not b:
|
| 496 |
+
return []
|
| 497 |
+
|
| 498 |
+
a, b = a[:], b[:]
|
| 499 |
+
n = max(len(a), len(b))
|
| 500 |
+
|
| 501 |
+
if n&(n - 1): # not a power of 2
|
| 502 |
+
n = 2**n.bit_length()
|
| 503 |
+
|
| 504 |
+
# padding with zeros
|
| 505 |
+
a += [S.Zero]*(n - len(a))
|
| 506 |
+
b += [S.Zero]*(n - len(b))
|
| 507 |
+
|
| 508 |
+
a, b = mobius_transform(a, subset=False), mobius_transform(b, subset=False)
|
| 509 |
+
a = [expand_mul(x*y) for x, y in zip(a, b)]
|
| 510 |
+
a = inverse_mobius_transform(a, subset=False)
|
| 511 |
+
|
| 512 |
+
return a
|
| 513 |
+
|
| 514 |
+
|
| 515 |
+
#----------------------------------------------------------------------------#
|
| 516 |
+
# #
|
| 517 |
+
# Integer Convolutions #
|
| 518 |
+
# #
|
| 519 |
+
#----------------------------------------------------------------------------#
|
| 520 |
+
|
| 521 |
+
def convolution_int(a, b):
|
| 522 |
+
"""Return the convolution of two sequences as a list.
|
| 523 |
+
|
| 524 |
+
The iterables must consist solely of integers.
|
| 525 |
+
|
| 526 |
+
Parameters
|
| 527 |
+
==========
|
| 528 |
+
|
| 529 |
+
a, b : Sequence
|
| 530 |
+
The sequences for which convolution is performed.
|
| 531 |
+
|
| 532 |
+
Explanation
|
| 533 |
+
===========
|
| 534 |
+
|
| 535 |
+
This function performs the convolution of ``a`` and ``b`` by packing
|
| 536 |
+
each into a single integer, multiplying them together, and then
|
| 537 |
+
unpacking the result from the product. The intuition behind this is
|
| 538 |
+
that if we evaluate some polynomial [1]:
|
| 539 |
+
|
| 540 |
+
.. math ::
|
| 541 |
+
1156x^6 + 3808x^5 + 8440x^4 + 14856x^3 + 16164x^2 + 14040x + 8100
|
| 542 |
+
|
| 543 |
+
at say $x = 10^5$ we obtain $1156038080844014856161641404008100$.
|
| 544 |
+
Note we can read of the coefficients for each term every five digits.
|
| 545 |
+
If the $x$ we chose to evaluate at is large enough, the same will hold
|
| 546 |
+
for the product.
|
| 547 |
+
|
| 548 |
+
The idea now is since big integer multiplication in libraries such
|
| 549 |
+
as GMP is highly optimised, this will be reasonably fast.
|
| 550 |
+
|
| 551 |
+
Examples
|
| 552 |
+
========
|
| 553 |
+
|
| 554 |
+
>>> from sympy.discrete.convolutions import convolution_int
|
| 555 |
+
|
| 556 |
+
>>> convolution_int([2, 3], [4, 5])
|
| 557 |
+
[8, 22, 15]
|
| 558 |
+
>>> convolution_int([1, 1, -1], [1, 1])
|
| 559 |
+
[1, 2, 0, -1]
|
| 560 |
+
|
| 561 |
+
References
|
| 562 |
+
==========
|
| 563 |
+
|
| 564 |
+
.. [1] Fateman, Richard J.
|
| 565 |
+
Can you save time in multiplying polynomials by encoding them as integers?
|
| 566 |
+
University of California, Berkeley, California (2004).
|
| 567 |
+
https://people.eecs.berkeley.edu/~fateman/papers/polysbyGMP.pdf
|
| 568 |
+
"""
|
| 569 |
+
# An upper bound on the largest coefficient in p(x)q(x) is given by (1 + min(dp, dq))N(p)N(q)
|
| 570 |
+
# where dp = deg(p), dq = deg(q), N(f) denotes the coefficient of largest modulus in f [1]
|
| 571 |
+
B = max(abs(c) for c in a)*max(abs(c) for c in b)*(1 + min(len(a) - 1, len(b) - 1))
|
| 572 |
+
x, power = MPZ(1), 0
|
| 573 |
+
while x <= (2*B): # multiply by two for negative coefficients, see [1]
|
| 574 |
+
x <<= 1
|
| 575 |
+
power += 1
|
| 576 |
+
|
| 577 |
+
def to_integer(poly):
|
| 578 |
+
n, mul = MPZ(0), 0
|
| 579 |
+
for c in reversed(poly):
|
| 580 |
+
if c and not mul: mul = -1 if c < 0 else 1
|
| 581 |
+
n <<= power
|
| 582 |
+
n += mul*int(c)
|
| 583 |
+
return mul, n
|
| 584 |
+
|
| 585 |
+
# Perform packing and multiplication
|
| 586 |
+
(a_mul, a_packed), (b_mul, b_packed) = to_integer(a), to_integer(b)
|
| 587 |
+
result = a_packed * b_packed
|
| 588 |
+
|
| 589 |
+
# Perform unpacking
|
| 590 |
+
mul = a_mul * b_mul
|
| 591 |
+
mask, half, borrow, poly = x - 1, x >> 1, 0, []
|
| 592 |
+
while result or borrow:
|
| 593 |
+
coeff = (result & mask) + borrow
|
| 594 |
+
result >>= power
|
| 595 |
+
borrow = coeff >= half
|
| 596 |
+
poly.append(mul * int(coeff if coeff < half else coeff - x))
|
| 597 |
+
return poly or [0]
|
janus/lib/python3.10/site-packages/sympy/discrete/recurrences.py
ADDED
|
@@ -0,0 +1,166 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Recurrences
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from sympy.core import S, sympify
|
| 6 |
+
from sympy.utilities.iterables import iterable
|
| 7 |
+
from sympy.utilities.misc import as_int
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def linrec(coeffs, init, n):
|
| 11 |
+
r"""
|
| 12 |
+
Evaluation of univariate linear recurrences of homogeneous type
|
| 13 |
+
having coefficients independent of the recurrence variable.
|
| 14 |
+
|
| 15 |
+
Parameters
|
| 16 |
+
==========
|
| 17 |
+
|
| 18 |
+
coeffs : iterable
|
| 19 |
+
Coefficients of the recurrence
|
| 20 |
+
init : iterable
|
| 21 |
+
Initial values of the recurrence
|
| 22 |
+
n : Integer
|
| 23 |
+
Point of evaluation for the recurrence
|
| 24 |
+
|
| 25 |
+
Notes
|
| 26 |
+
=====
|
| 27 |
+
|
| 28 |
+
Let `y(n)` be the recurrence of given type, ``c`` be the sequence
|
| 29 |
+
of coefficients, ``b`` be the sequence of initial/base values of the
|
| 30 |
+
recurrence and ``k`` (equal to ``len(c)``) be the order of recurrence.
|
| 31 |
+
Then,
|
| 32 |
+
|
| 33 |
+
.. math :: y(n) = \begin{cases} b_n & 0 \le n < k \\
|
| 34 |
+
c_0 y(n-1) + c_1 y(n-2) + \cdots + c_{k-1} y(n-k) & n \ge k
|
| 35 |
+
\end{cases}
|
| 36 |
+
|
| 37 |
+
Let `x_0, x_1, \ldots, x_n` be a sequence and consider the transformation
|
| 38 |
+
that maps each polynomial `f(x)` to `T(f(x))` where each power `x^i` is
|
| 39 |
+
replaced by the corresponding value `x_i`. The sequence is then a solution
|
| 40 |
+
of the recurrence if and only if `T(x^i p(x)) = 0` for each `i \ge 0` where
|
| 41 |
+
`p(x) = x^k - c_0 x^(k-1) - \cdots - c_{k-1}` is the characteristic
|
| 42 |
+
polynomial.
|
| 43 |
+
|
| 44 |
+
Then `T(f(x)p(x)) = 0` for each polynomial `f(x)` (as it is a linear
|
| 45 |
+
combination of powers `x^i`). Now, if `x^n` is congruent to
|
| 46 |
+
`g(x) = a_0 x^0 + a_1 x^1 + \cdots + a_{k-1} x^{k-1}` modulo `p(x)`, then
|
| 47 |
+
`T(x^n) = x_n` is equal to
|
| 48 |
+
`T(g(x)) = a_0 x_0 + a_1 x_1 + \cdots + a_{k-1} x_{k-1}`.
|
| 49 |
+
|
| 50 |
+
Computation of `x^n`,
|
| 51 |
+
given `x^k = c_0 x^{k-1} + c_1 x^{k-2} + \cdots + c_{k-1}`
|
| 52 |
+
is performed using exponentiation by squaring (refer to [1_]) with
|
| 53 |
+
an additional reduction step performed to retain only first `k` powers
|
| 54 |
+
of `x` in the representation of `x^n`.
|
| 55 |
+
|
| 56 |
+
Examples
|
| 57 |
+
========
|
| 58 |
+
|
| 59 |
+
>>> from sympy.discrete.recurrences import linrec
|
| 60 |
+
>>> from sympy.abc import x, y, z
|
| 61 |
+
|
| 62 |
+
>>> linrec(coeffs=[1, 1], init=[0, 1], n=10)
|
| 63 |
+
55
|
| 64 |
+
|
| 65 |
+
>>> linrec(coeffs=[1, 1], init=[x, y], n=10)
|
| 66 |
+
34*x + 55*y
|
| 67 |
+
|
| 68 |
+
>>> linrec(coeffs=[x, y], init=[0, 1], n=5)
|
| 69 |
+
x**2*y + x*(x**3 + 2*x*y) + y**2
|
| 70 |
+
|
| 71 |
+
>>> linrec(coeffs=[1, 2, 3, 0, 0, 4], init=[x, y, z], n=16)
|
| 72 |
+
13576*x + 5676*y + 2356*z
|
| 73 |
+
|
| 74 |
+
References
|
| 75 |
+
==========
|
| 76 |
+
|
| 77 |
+
.. [1] https://en.wikipedia.org/wiki/Exponentiation_by_squaring
|
| 78 |
+
.. [2] https://en.wikipedia.org/w/index.php?title=Modular_exponentiation§ion=6#Matrices
|
| 79 |
+
|
| 80 |
+
See Also
|
| 81 |
+
========
|
| 82 |
+
|
| 83 |
+
sympy.polys.agca.extensions.ExtensionElement.__pow__
|
| 84 |
+
|
| 85 |
+
"""
|
| 86 |
+
|
| 87 |
+
if not coeffs:
|
| 88 |
+
return S.Zero
|
| 89 |
+
|
| 90 |
+
if not iterable(coeffs):
|
| 91 |
+
raise TypeError("Expected a sequence of coefficients for"
|
| 92 |
+
" the recurrence")
|
| 93 |
+
|
| 94 |
+
if not iterable(init):
|
| 95 |
+
raise TypeError("Expected a sequence of values for the initialization"
|
| 96 |
+
" of the recurrence")
|
| 97 |
+
|
| 98 |
+
n = as_int(n)
|
| 99 |
+
if n < 0:
|
| 100 |
+
raise ValueError("Point of evaluation of recurrence must be a "
|
| 101 |
+
"non-negative integer")
|
| 102 |
+
|
| 103 |
+
c = [sympify(arg) for arg in coeffs]
|
| 104 |
+
b = [sympify(arg) for arg in init]
|
| 105 |
+
k = len(c)
|
| 106 |
+
|
| 107 |
+
if len(b) > k:
|
| 108 |
+
raise TypeError("Count of initial values should not exceed the "
|
| 109 |
+
"order of the recurrence")
|
| 110 |
+
else:
|
| 111 |
+
b += [S.Zero]*(k - len(b)) # remaining initial values default to zero
|
| 112 |
+
|
| 113 |
+
if n < k:
|
| 114 |
+
return b[n]
|
| 115 |
+
terms = [u*v for u, v in zip(linrec_coeffs(c, n), b)]
|
| 116 |
+
return sum(terms[:-1], terms[-1])
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def linrec_coeffs(c, n):
|
| 120 |
+
r"""
|
| 121 |
+
Compute the coefficients of n'th term in linear recursion
|
| 122 |
+
sequence defined by c.
|
| 123 |
+
|
| 124 |
+
`x^k = c_0 x^{k-1} + c_1 x^{k-2} + \cdots + c_{k-1}`.
|
| 125 |
+
|
| 126 |
+
It computes the coefficients by using binary exponentiation.
|
| 127 |
+
This function is used by `linrec` and `_eval_pow_by_cayley`.
|
| 128 |
+
|
| 129 |
+
Parameters
|
| 130 |
+
==========
|
| 131 |
+
|
| 132 |
+
c = coefficients of the divisor polynomial
|
| 133 |
+
n = exponent of x, so dividend is x^n
|
| 134 |
+
|
| 135 |
+
"""
|
| 136 |
+
|
| 137 |
+
k = len(c)
|
| 138 |
+
|
| 139 |
+
def _square_and_reduce(u, offset):
|
| 140 |
+
# squares `(u_0 + u_1 x + u_2 x^2 + \cdots + u_{k-1} x^k)` (and
|
| 141 |
+
# multiplies by `x` if offset is 1) and reduces the above result of
|
| 142 |
+
# length upto `2k` to `k` using the characteristic equation of the
|
| 143 |
+
# recurrence given by, `x^k = c_0 x^{k-1} + c_1 x^{k-2} + \cdots + c_{k-1}`
|
| 144 |
+
|
| 145 |
+
w = [S.Zero]*(2*len(u) - 1 + offset)
|
| 146 |
+
for i, p in enumerate(u):
|
| 147 |
+
for j, q in enumerate(u):
|
| 148 |
+
w[offset + i + j] += p*q
|
| 149 |
+
|
| 150 |
+
for j in range(len(w) - 1, k - 1, -1):
|
| 151 |
+
for i in range(k):
|
| 152 |
+
w[j - i - 1] += w[j]*c[i]
|
| 153 |
+
|
| 154 |
+
return w[:k]
|
| 155 |
+
|
| 156 |
+
def _final_coeffs(n):
|
| 157 |
+
# computes the final coefficient list - `cf` corresponding to the
|
| 158 |
+
# point at which recurrence is to be evalauted - `n`, such that,
|
| 159 |
+
# `y(n) = cf_0 y(k-1) + cf_1 y(k-2) + \cdots + cf_{k-1} y(0)`
|
| 160 |
+
|
| 161 |
+
if n < k:
|
| 162 |
+
return [S.Zero]*n + [S.One] + [S.Zero]*(k - n - 1)
|
| 163 |
+
else:
|
| 164 |
+
return _square_and_reduce(_final_coeffs(n // 2), n % 2)
|
| 165 |
+
|
| 166 |
+
return _final_coeffs(n)
|
janus/lib/python3.10/site-packages/sympy/discrete/tests/__init__.py
ADDED
|
File without changes
|
janus/lib/python3.10/site-packages/sympy/discrete/tests/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (171 Bytes). View file
|
|
|
janus/lib/python3.10/site-packages/sympy/discrete/tests/__pycache__/test_recurrences.cpython-310.pyc
ADDED
|
Binary file (3.43 kB). View file
|
|
|
janus/lib/python3.10/site-packages/sympy/discrete/tests/__pycache__/test_transforms.cpython-310.pyc
ADDED
|
Binary file (6.72 kB). View file
|
|
|
janus/lib/python3.10/site-packages/sympy/discrete/tests/test_recurrences.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from sympy.core.numbers import Rational
|
| 2 |
+
from sympy.functions.combinatorial.numbers import fibonacci
|
| 3 |
+
from sympy.core import S, symbols
|
| 4 |
+
from sympy.testing.pytest import raises
|
| 5 |
+
from sympy.discrete.recurrences import linrec
|
| 6 |
+
|
| 7 |
+
def test_linrec():
|
| 8 |
+
assert linrec(coeffs=[1, 1], init=[1, 1], n=20) == 10946
|
| 9 |
+
assert linrec(coeffs=[1, 2, 3, 4, 5], init=[1, 1, 0, 2], n=10) == 1040
|
| 10 |
+
assert linrec(coeffs=[0, 0, 11, 13], init=[23, 27], n=25) == 59628567384
|
| 11 |
+
assert linrec(coeffs=[0, 0, 1, 1, 2], init=[1, 5, 3], n=15) == 165
|
| 12 |
+
assert linrec(coeffs=[11, 13, 15, 17], init=[1, 2, 3, 4], n=70) == \
|
| 13 |
+
56889923441670659718376223533331214868804815612050381493741233489928913241
|
| 14 |
+
assert linrec(coeffs=[0]*55 + [1, 1, 2, 3], init=[0]*50 + [1, 2, 3], n=4000) == \
|
| 15 |
+
702633573874937994980598979769135096432444135301118916539
|
| 16 |
+
|
| 17 |
+
assert linrec(coeffs=[11, 13, 15, 17], init=[1, 2, 3, 4], n=10**4)
|
| 18 |
+
assert linrec(coeffs=[11, 13, 15, 17], init=[1, 2, 3, 4], n=10**5)
|
| 19 |
+
|
| 20 |
+
assert all(linrec(coeffs=[1, 1], init=[0, 1], n=n) == fibonacci(n)
|
| 21 |
+
for n in range(95, 115))
|
| 22 |
+
|
| 23 |
+
assert all(linrec(coeffs=[1, 1], init=[1, 1], n=n) == fibonacci(n + 1)
|
| 24 |
+
for n in range(595, 615))
|
| 25 |
+
|
| 26 |
+
a = [S.Half, Rational(3, 4), Rational(5, 6), 7, Rational(8, 9), Rational(3, 5)]
|
| 27 |
+
b = [1, 2, 8, Rational(5, 7), Rational(3, 7), Rational(2, 9), 6]
|
| 28 |
+
x, y, z = symbols('x y z')
|
| 29 |
+
|
| 30 |
+
assert linrec(coeffs=a[:5], init=b[:4], n=80) == \
|
| 31 |
+
Rational(1726244235456268979436592226626304376013002142588105090705187189,
|
| 32 |
+
1960143456748895967474334873705475211264)
|
| 33 |
+
|
| 34 |
+
assert linrec(coeffs=a[:4], init=b[:4], n=50) == \
|
| 35 |
+
Rational(368949940033050147080268092104304441, 504857282956046106624)
|
| 36 |
+
|
| 37 |
+
assert linrec(coeffs=a[3:], init=b[:3], n=35) == \
|
| 38 |
+
Rational(97409272177295731943657945116791049305244422833125109,
|
| 39 |
+
814315512679031689453125)
|
| 40 |
+
|
| 41 |
+
assert linrec(coeffs=[0]*60 + [Rational(2, 3), Rational(4, 5)], init=b, n=3000) == \
|
| 42 |
+
Rational(26777668739896791448594650497024, 48084516708184142230517578125)
|
| 43 |
+
|
| 44 |
+
raises(TypeError, lambda: linrec(coeffs=[11, 13, 15, 17], init=[1, 2, 3, 4, 5], n=1))
|
| 45 |
+
raises(TypeError, lambda: linrec(coeffs=a[:4], init=b[:5], n=10000))
|
| 46 |
+
raises(ValueError, lambda: linrec(coeffs=a[:4], init=b[:4], n=-10000))
|
| 47 |
+
raises(TypeError, lambda: linrec(x, b, n=10000))
|
| 48 |
+
raises(TypeError, lambda: linrec(a, y, n=10000))
|
| 49 |
+
|
| 50 |
+
assert linrec(coeffs=[x, y, z], init=[1, 1, 1], n=4) == \
|
| 51 |
+
x**2 + x*y + x*z + y + z
|
| 52 |
+
assert linrec(coeffs=[1, 2, 1], init=[x, y, z], n=20) == \
|
| 53 |
+
269542*x + 664575*y + 578949*z
|
| 54 |
+
assert linrec(coeffs=[0, 3, 1, 2], init=[x, y], n=30) == \
|
| 55 |
+
58516436*x + 56372788*y
|
| 56 |
+
assert linrec(coeffs=[0]*50 + [1, 2, 3], init=[x, y, z], n=1000) == \
|
| 57 |
+
11477135884896*x + 25999077948732*y + 41975630244216*z
|
| 58 |
+
assert linrec(coeffs=[], init=[1, 1], n=20) == 0
|
| 59 |
+
assert linrec(coeffs=[x, y, z], init=[1, 2, 3], n=2) == 3
|
janus/lib/python3.10/site-packages/sympy/discrete/tests/test_transforms.py
ADDED
|
@@ -0,0 +1,154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from sympy.functions.elementary.miscellaneous import sqrt
|
| 2 |
+
from sympy.core import S, Symbol, symbols, I, Rational
|
| 3 |
+
from sympy.discrete import (fft, ifft, ntt, intt, fwht, ifwht,
|
| 4 |
+
mobius_transform, inverse_mobius_transform)
|
| 5 |
+
from sympy.testing.pytest import raises
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def test_fft_ifft():
|
| 9 |
+
assert all(tf(ls) == ls for tf in (fft, ifft)
|
| 10 |
+
for ls in ([], [Rational(5, 3)]))
|
| 11 |
+
|
| 12 |
+
ls = list(range(6))
|
| 13 |
+
fls = [15, -7*sqrt(2)/2 - 4 - sqrt(2)*I/2 + 2*I, 2 + 3*I,
|
| 14 |
+
-4 + 7*sqrt(2)/2 - 2*I - sqrt(2)*I/2, -3,
|
| 15 |
+
-4 + 7*sqrt(2)/2 + sqrt(2)*I/2 + 2*I,
|
| 16 |
+
2 - 3*I, -7*sqrt(2)/2 - 4 - 2*I + sqrt(2)*I/2]
|
| 17 |
+
|
| 18 |
+
assert fft(ls) == fls
|
| 19 |
+
assert ifft(fls) == ls + [S.Zero]*2
|
| 20 |
+
|
| 21 |
+
ls = [1 + 2*I, 3 + 4*I, 5 + 6*I]
|
| 22 |
+
ifls = [Rational(9, 4) + 3*I, I*Rational(-7, 4), Rational(3, 4) + I, -2 - I/4]
|
| 23 |
+
|
| 24 |
+
assert ifft(ls) == ifls
|
| 25 |
+
assert fft(ifls) == ls + [S.Zero]
|
| 26 |
+
|
| 27 |
+
x = Symbol('x', real=True)
|
| 28 |
+
raises(TypeError, lambda: fft(x))
|
| 29 |
+
raises(ValueError, lambda: ifft([x, 2*x, 3*x**2, 4*x**3]))
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def test_ntt_intt():
|
| 33 |
+
# prime moduli of the form (m*2**k + 1), sequence length
|
| 34 |
+
# should be a divisor of 2**k
|
| 35 |
+
p = 7*17*2**23 + 1
|
| 36 |
+
q = 2*500000003 + 1 # only for sequences of length 1 or 2
|
| 37 |
+
r = 2*3*5*7 # composite modulus
|
| 38 |
+
|
| 39 |
+
assert all(tf(ls, p) == ls for tf in (ntt, intt)
|
| 40 |
+
for ls in ([], [5]))
|
| 41 |
+
|
| 42 |
+
ls = list(range(6))
|
| 43 |
+
nls = [15, 801133602, 738493201, 334102277, 998244350, 849020224,
|
| 44 |
+
259751156, 12232587]
|
| 45 |
+
|
| 46 |
+
assert ntt(ls, p) == nls
|
| 47 |
+
assert intt(nls, p) == ls + [0]*2
|
| 48 |
+
|
| 49 |
+
ls = [1 + 2*I, 3 + 4*I, 5 + 6*I]
|
| 50 |
+
x = Symbol('x', integer=True)
|
| 51 |
+
|
| 52 |
+
raises(TypeError, lambda: ntt(x, p))
|
| 53 |
+
raises(ValueError, lambda: intt([x, 2*x, 3*x**2, 4*x**3], p))
|
| 54 |
+
raises(ValueError, lambda: intt(ls, p))
|
| 55 |
+
raises(ValueError, lambda: ntt([1.2, 2.1, 3.5], p))
|
| 56 |
+
raises(ValueError, lambda: ntt([3, 5, 6], q))
|
| 57 |
+
raises(ValueError, lambda: ntt([4, 5, 7], r))
|
| 58 |
+
raises(ValueError, lambda: ntt([1.0, 2.0, 3.0], p))
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
def test_fwht_ifwht():
|
| 62 |
+
assert all(tf(ls) == ls for tf in (fwht, ifwht) \
|
| 63 |
+
for ls in ([], [Rational(7, 4)]))
|
| 64 |
+
|
| 65 |
+
ls = [213, 321, 43235, 5325, 312, 53]
|
| 66 |
+
fls = [49459, 38061, -47661, -37759, 48729, 37543, -48391, -38277]
|
| 67 |
+
|
| 68 |
+
assert fwht(ls) == fls
|
| 69 |
+
assert ifwht(fls) == ls + [S.Zero]*2
|
| 70 |
+
|
| 71 |
+
ls = [S.Half + 2*I, Rational(3, 7) + 4*I, Rational(5, 6) + 6*I, Rational(7, 3), Rational(9, 4)]
|
| 72 |
+
ifls = [Rational(533, 672) + I*3/2, Rational(23, 224) + I/2, Rational(1, 672), Rational(107, 224) - I,
|
| 73 |
+
Rational(155, 672) + I*3/2, Rational(-103, 224) + I/2, Rational(-377, 672), Rational(-19, 224) - I]
|
| 74 |
+
|
| 75 |
+
assert ifwht(ls) == ifls
|
| 76 |
+
assert fwht(ifls) == ls + [S.Zero]*3
|
| 77 |
+
|
| 78 |
+
x, y = symbols('x y')
|
| 79 |
+
|
| 80 |
+
raises(TypeError, lambda: fwht(x))
|
| 81 |
+
|
| 82 |
+
ls = [x, 2*x, 3*x**2, 4*x**3]
|
| 83 |
+
ifls = [x**3 + 3*x**2/4 + x*Rational(3, 4),
|
| 84 |
+
-x**3 + 3*x**2/4 - x/4,
|
| 85 |
+
-x**3 - 3*x**2/4 + x*Rational(3, 4),
|
| 86 |
+
x**3 - 3*x**2/4 - x/4]
|
| 87 |
+
|
| 88 |
+
assert ifwht(ls) == ifls
|
| 89 |
+
assert fwht(ifls) == ls
|
| 90 |
+
|
| 91 |
+
ls = [x, y, x**2, y**2, x*y]
|
| 92 |
+
fls = [x**2 + x*y + x + y**2 + y,
|
| 93 |
+
x**2 + x*y + x - y**2 - y,
|
| 94 |
+
-x**2 + x*y + x - y**2 + y,
|
| 95 |
+
-x**2 + x*y + x + y**2 - y,
|
| 96 |
+
x**2 - x*y + x + y**2 + y,
|
| 97 |
+
x**2 - x*y + x - y**2 - y,
|
| 98 |
+
-x**2 - x*y + x - y**2 + y,
|
| 99 |
+
-x**2 - x*y + x + y**2 - y]
|
| 100 |
+
|
| 101 |
+
assert fwht(ls) == fls
|
| 102 |
+
assert ifwht(fls) == ls + [S.Zero]*3
|
| 103 |
+
|
| 104 |
+
ls = list(range(6))
|
| 105 |
+
|
| 106 |
+
assert fwht(ls) == [x*8 for x in ifwht(ls)]
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
def test_mobius_transform():
|
| 110 |
+
assert all(tf(ls, subset=subset) == ls
|
| 111 |
+
for ls in ([], [Rational(7, 4)]) for subset in (True, False)
|
| 112 |
+
for tf in (mobius_transform, inverse_mobius_transform))
|
| 113 |
+
|
| 114 |
+
w, x, y, z = symbols('w x y z')
|
| 115 |
+
|
| 116 |
+
assert mobius_transform([x, y]) == [x, x + y]
|
| 117 |
+
assert inverse_mobius_transform([x, x + y]) == [x, y]
|
| 118 |
+
assert mobius_transform([x, y], subset=False) == [x + y, y]
|
| 119 |
+
assert inverse_mobius_transform([x + y, y], subset=False) == [x, y]
|
| 120 |
+
|
| 121 |
+
assert mobius_transform([w, x, y, z]) == [w, w + x, w + y, w + x + y + z]
|
| 122 |
+
assert inverse_mobius_transform([w, w + x, w + y, w + x + y + z]) == \
|
| 123 |
+
[w, x, y, z]
|
| 124 |
+
assert mobius_transform([w, x, y, z], subset=False) == \
|
| 125 |
+
[w + x + y + z, x + z, y + z, z]
|
| 126 |
+
assert inverse_mobius_transform([w + x + y + z, x + z, y + z, z], subset=False) == \
|
| 127 |
+
[w, x, y, z]
|
| 128 |
+
|
| 129 |
+
ls = [Rational(2, 3), Rational(6, 7), Rational(5, 8), 9, Rational(5, 3) + 7*I]
|
| 130 |
+
mls = [Rational(2, 3), Rational(32, 21), Rational(31, 24), Rational(1873, 168),
|
| 131 |
+
Rational(7, 3) + 7*I, Rational(67, 21) + 7*I, Rational(71, 24) + 7*I,
|
| 132 |
+
Rational(2153, 168) + 7*I]
|
| 133 |
+
|
| 134 |
+
assert mobius_transform(ls) == mls
|
| 135 |
+
assert inverse_mobius_transform(mls) == ls + [S.Zero]*3
|
| 136 |
+
|
| 137 |
+
mls = [Rational(2153, 168) + 7*I, Rational(69, 7), Rational(77, 8), 9, Rational(5, 3) + 7*I, 0, 0, 0]
|
| 138 |
+
|
| 139 |
+
assert mobius_transform(ls, subset=False) == mls
|
| 140 |
+
assert inverse_mobius_transform(mls, subset=False) == ls + [S.Zero]*3
|
| 141 |
+
|
| 142 |
+
ls = ls[:-1]
|
| 143 |
+
mls = [Rational(2, 3), Rational(32, 21), Rational(31, 24), Rational(1873, 168)]
|
| 144 |
+
|
| 145 |
+
assert mobius_transform(ls) == mls
|
| 146 |
+
assert inverse_mobius_transform(mls) == ls
|
| 147 |
+
|
| 148 |
+
mls = [Rational(1873, 168), Rational(69, 7), Rational(77, 8), 9]
|
| 149 |
+
|
| 150 |
+
assert mobius_transform(ls, subset=False) == mls
|
| 151 |
+
assert inverse_mobius_transform(mls, subset=False) == ls
|
| 152 |
+
|
| 153 |
+
raises(TypeError, lambda: mobius_transform(x, subset=True))
|
| 154 |
+
raises(TypeError, lambda: inverse_mobius_transform(y, subset=False))
|
janus/lib/python3.10/site-packages/sympy/discrete/transforms.py
ADDED
|
@@ -0,0 +1,425 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Discrete Fourier Transform, Number Theoretic Transform,
|
| 3 |
+
Walsh Hadamard Transform, Mobius Transform
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
from sympy.core import S, Symbol, sympify
|
| 7 |
+
from sympy.core.function import expand_mul
|
| 8 |
+
from sympy.core.numbers import pi, I
|
| 9 |
+
from sympy.functions.elementary.trigonometric import sin, cos
|
| 10 |
+
from sympy.ntheory import isprime, primitive_root
|
| 11 |
+
from sympy.utilities.iterables import ibin, iterable
|
| 12 |
+
from sympy.utilities.misc import as_int
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
#----------------------------------------------------------------------------#
|
| 16 |
+
# #
|
| 17 |
+
# Discrete Fourier Transform #
|
| 18 |
+
# #
|
| 19 |
+
#----------------------------------------------------------------------------#
|
| 20 |
+
|
| 21 |
+
def _fourier_transform(seq, dps, inverse=False):
|
| 22 |
+
"""Utility function for the Discrete Fourier Transform"""
|
| 23 |
+
|
| 24 |
+
if not iterable(seq):
|
| 25 |
+
raise TypeError("Expected a sequence of numeric coefficients "
|
| 26 |
+
"for Fourier Transform")
|
| 27 |
+
|
| 28 |
+
a = [sympify(arg) for arg in seq]
|
| 29 |
+
if any(x.has(Symbol) for x in a):
|
| 30 |
+
raise ValueError("Expected non-symbolic coefficients")
|
| 31 |
+
|
| 32 |
+
n = len(a)
|
| 33 |
+
if n < 2:
|
| 34 |
+
return a
|
| 35 |
+
|
| 36 |
+
b = n.bit_length() - 1
|
| 37 |
+
if n&(n - 1): # not a power of 2
|
| 38 |
+
b += 1
|
| 39 |
+
n = 2**b
|
| 40 |
+
|
| 41 |
+
a += [S.Zero]*(n - len(a))
|
| 42 |
+
for i in range(1, n):
|
| 43 |
+
j = int(ibin(i, b, str=True)[::-1], 2)
|
| 44 |
+
if i < j:
|
| 45 |
+
a[i], a[j] = a[j], a[i]
|
| 46 |
+
|
| 47 |
+
ang = -2*pi/n if inverse else 2*pi/n
|
| 48 |
+
|
| 49 |
+
if dps is not None:
|
| 50 |
+
ang = ang.evalf(dps + 2)
|
| 51 |
+
|
| 52 |
+
w = [cos(ang*i) + I*sin(ang*i) for i in range(n // 2)]
|
| 53 |
+
|
| 54 |
+
h = 2
|
| 55 |
+
while h <= n:
|
| 56 |
+
hf, ut = h // 2, n // h
|
| 57 |
+
for i in range(0, n, h):
|
| 58 |
+
for j in range(hf):
|
| 59 |
+
u, v = a[i + j], expand_mul(a[i + j + hf]*w[ut * j])
|
| 60 |
+
a[i + j], a[i + j + hf] = u + v, u - v
|
| 61 |
+
h *= 2
|
| 62 |
+
|
| 63 |
+
if inverse:
|
| 64 |
+
a = [(x/n).evalf(dps) for x in a] if dps is not None \
|
| 65 |
+
else [x/n for x in a]
|
| 66 |
+
|
| 67 |
+
return a
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
def fft(seq, dps=None):
|
| 71 |
+
r"""
|
| 72 |
+
Performs the Discrete Fourier Transform (**DFT**) in the complex domain.
|
| 73 |
+
|
| 74 |
+
The sequence is automatically padded to the right with zeros, as the
|
| 75 |
+
*radix-2 FFT* requires the number of sample points to be a power of 2.
|
| 76 |
+
|
| 77 |
+
This method should be used with default arguments only for short sequences
|
| 78 |
+
as the complexity of expressions increases with the size of the sequence.
|
| 79 |
+
|
| 80 |
+
Parameters
|
| 81 |
+
==========
|
| 82 |
+
|
| 83 |
+
seq : iterable
|
| 84 |
+
The sequence on which **DFT** is to be applied.
|
| 85 |
+
dps : Integer
|
| 86 |
+
Specifies the number of decimal digits for precision.
|
| 87 |
+
|
| 88 |
+
Examples
|
| 89 |
+
========
|
| 90 |
+
|
| 91 |
+
>>> from sympy import fft, ifft
|
| 92 |
+
|
| 93 |
+
>>> fft([1, 2, 3, 4])
|
| 94 |
+
[10, -2 - 2*I, -2, -2 + 2*I]
|
| 95 |
+
>>> ifft(_)
|
| 96 |
+
[1, 2, 3, 4]
|
| 97 |
+
|
| 98 |
+
>>> ifft([1, 2, 3, 4])
|
| 99 |
+
[5/2, -1/2 + I/2, -1/2, -1/2 - I/2]
|
| 100 |
+
>>> fft(_)
|
| 101 |
+
[1, 2, 3, 4]
|
| 102 |
+
|
| 103 |
+
>>> ifft([1, 7, 3, 4], dps=15)
|
| 104 |
+
[3.75, -0.5 - 0.75*I, -1.75, -0.5 + 0.75*I]
|
| 105 |
+
>>> fft(_)
|
| 106 |
+
[1.0, 7.0, 3.0, 4.0]
|
| 107 |
+
|
| 108 |
+
References
|
| 109 |
+
==========
|
| 110 |
+
|
| 111 |
+
.. [1] https://en.wikipedia.org/wiki/Cooley%E2%80%93Tukey_FFT_algorithm
|
| 112 |
+
.. [2] https://mathworld.wolfram.com/FastFourierTransform.html
|
| 113 |
+
|
| 114 |
+
"""
|
| 115 |
+
|
| 116 |
+
return _fourier_transform(seq, dps=dps)
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def ifft(seq, dps=None):
|
| 120 |
+
return _fourier_transform(seq, dps=dps, inverse=True)
|
| 121 |
+
|
| 122 |
+
ifft.__doc__ = fft.__doc__
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
#----------------------------------------------------------------------------#
|
| 126 |
+
# #
|
| 127 |
+
# Number Theoretic Transform #
|
| 128 |
+
# #
|
| 129 |
+
#----------------------------------------------------------------------------#
|
| 130 |
+
|
| 131 |
+
def _number_theoretic_transform(seq, prime, inverse=False):
|
| 132 |
+
"""Utility function for the Number Theoretic Transform"""
|
| 133 |
+
|
| 134 |
+
if not iterable(seq):
|
| 135 |
+
raise TypeError("Expected a sequence of integer coefficients "
|
| 136 |
+
"for Number Theoretic Transform")
|
| 137 |
+
|
| 138 |
+
p = as_int(prime)
|
| 139 |
+
if not isprime(p):
|
| 140 |
+
raise ValueError("Expected prime modulus for "
|
| 141 |
+
"Number Theoretic Transform")
|
| 142 |
+
|
| 143 |
+
a = [as_int(x) % p for x in seq]
|
| 144 |
+
|
| 145 |
+
n = len(a)
|
| 146 |
+
if n < 1:
|
| 147 |
+
return a
|
| 148 |
+
|
| 149 |
+
b = n.bit_length() - 1
|
| 150 |
+
if n&(n - 1):
|
| 151 |
+
b += 1
|
| 152 |
+
n = 2**b
|
| 153 |
+
|
| 154 |
+
if (p - 1) % n:
|
| 155 |
+
raise ValueError("Expected prime modulus of the form (m*2**k + 1)")
|
| 156 |
+
|
| 157 |
+
a += [0]*(n - len(a))
|
| 158 |
+
for i in range(1, n):
|
| 159 |
+
j = int(ibin(i, b, str=True)[::-1], 2)
|
| 160 |
+
if i < j:
|
| 161 |
+
a[i], a[j] = a[j], a[i]
|
| 162 |
+
|
| 163 |
+
pr = primitive_root(p)
|
| 164 |
+
|
| 165 |
+
rt = pow(pr, (p - 1) // n, p)
|
| 166 |
+
if inverse:
|
| 167 |
+
rt = pow(rt, p - 2, p)
|
| 168 |
+
|
| 169 |
+
w = [1]*(n // 2)
|
| 170 |
+
for i in range(1, n // 2):
|
| 171 |
+
w[i] = w[i - 1]*rt % p
|
| 172 |
+
|
| 173 |
+
h = 2
|
| 174 |
+
while h <= n:
|
| 175 |
+
hf, ut = h // 2, n // h
|
| 176 |
+
for i in range(0, n, h):
|
| 177 |
+
for j in range(hf):
|
| 178 |
+
u, v = a[i + j], a[i + j + hf]*w[ut * j]
|
| 179 |
+
a[i + j], a[i + j + hf] = (u + v) % p, (u - v) % p
|
| 180 |
+
h *= 2
|
| 181 |
+
|
| 182 |
+
if inverse:
|
| 183 |
+
rv = pow(n, p - 2, p)
|
| 184 |
+
a = [x*rv % p for x in a]
|
| 185 |
+
|
| 186 |
+
return a
|
| 187 |
+
|
| 188 |
+
|
| 189 |
+
def ntt(seq, prime):
|
| 190 |
+
r"""
|
| 191 |
+
Performs the Number Theoretic Transform (**NTT**), which specializes the
|
| 192 |
+
Discrete Fourier Transform (**DFT**) over quotient ring `Z/pZ` for prime
|
| 193 |
+
`p` instead of complex numbers `C`.
|
| 194 |
+
|
| 195 |
+
The sequence is automatically padded to the right with zeros, as the
|
| 196 |
+
*radix-2 NTT* requires the number of sample points to be a power of 2.
|
| 197 |
+
|
| 198 |
+
Parameters
|
| 199 |
+
==========
|
| 200 |
+
|
| 201 |
+
seq : iterable
|
| 202 |
+
The sequence on which **DFT** is to be applied.
|
| 203 |
+
prime : Integer
|
| 204 |
+
Prime modulus of the form `(m 2^k + 1)` to be used for performing
|
| 205 |
+
**NTT** on the sequence.
|
| 206 |
+
|
| 207 |
+
Examples
|
| 208 |
+
========
|
| 209 |
+
|
| 210 |
+
>>> from sympy import ntt, intt
|
| 211 |
+
>>> ntt([1, 2, 3, 4], prime=3*2**8 + 1)
|
| 212 |
+
[10, 643, 767, 122]
|
| 213 |
+
>>> intt(_, 3*2**8 + 1)
|
| 214 |
+
[1, 2, 3, 4]
|
| 215 |
+
>>> intt([1, 2, 3, 4], prime=3*2**8 + 1)
|
| 216 |
+
[387, 415, 384, 353]
|
| 217 |
+
>>> ntt(_, prime=3*2**8 + 1)
|
| 218 |
+
[1, 2, 3, 4]
|
| 219 |
+
|
| 220 |
+
References
|
| 221 |
+
==========
|
| 222 |
+
|
| 223 |
+
.. [1] http://www.apfloat.org/ntt.html
|
| 224 |
+
.. [2] https://mathworld.wolfram.com/NumberTheoreticTransform.html
|
| 225 |
+
.. [3] https://en.wikipedia.org/wiki/Discrete_Fourier_transform_(general%29
|
| 226 |
+
|
| 227 |
+
"""
|
| 228 |
+
|
| 229 |
+
return _number_theoretic_transform(seq, prime=prime)
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
def intt(seq, prime):
|
| 233 |
+
return _number_theoretic_transform(seq, prime=prime, inverse=True)
|
| 234 |
+
|
| 235 |
+
intt.__doc__ = ntt.__doc__
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
#----------------------------------------------------------------------------#
|
| 239 |
+
# #
|
| 240 |
+
# Walsh Hadamard Transform #
|
| 241 |
+
# #
|
| 242 |
+
#----------------------------------------------------------------------------#
|
| 243 |
+
|
| 244 |
+
def _walsh_hadamard_transform(seq, inverse=False):
|
| 245 |
+
"""Utility function for the Walsh Hadamard Transform"""
|
| 246 |
+
|
| 247 |
+
if not iterable(seq):
|
| 248 |
+
raise TypeError("Expected a sequence of coefficients "
|
| 249 |
+
"for Walsh Hadamard Transform")
|
| 250 |
+
|
| 251 |
+
a = [sympify(arg) for arg in seq]
|
| 252 |
+
n = len(a)
|
| 253 |
+
if n < 2:
|
| 254 |
+
return a
|
| 255 |
+
|
| 256 |
+
if n&(n - 1):
|
| 257 |
+
n = 2**n.bit_length()
|
| 258 |
+
|
| 259 |
+
a += [S.Zero]*(n - len(a))
|
| 260 |
+
h = 2
|
| 261 |
+
while h <= n:
|
| 262 |
+
hf = h // 2
|
| 263 |
+
for i in range(0, n, h):
|
| 264 |
+
for j in range(hf):
|
| 265 |
+
u, v = a[i + j], a[i + j + hf]
|
| 266 |
+
a[i + j], a[i + j + hf] = u + v, u - v
|
| 267 |
+
h *= 2
|
| 268 |
+
|
| 269 |
+
if inverse:
|
| 270 |
+
a = [x/n for x in a]
|
| 271 |
+
|
| 272 |
+
return a
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
def fwht(seq):
|
| 276 |
+
r"""
|
| 277 |
+
Performs the Walsh Hadamard Transform (**WHT**), and uses Hadamard
|
| 278 |
+
ordering for the sequence.
|
| 279 |
+
|
| 280 |
+
The sequence is automatically padded to the right with zeros, as the
|
| 281 |
+
*radix-2 FWHT* requires the number of sample points to be a power of 2.
|
| 282 |
+
|
| 283 |
+
Parameters
|
| 284 |
+
==========
|
| 285 |
+
|
| 286 |
+
seq : iterable
|
| 287 |
+
The sequence on which WHT is to be applied.
|
| 288 |
+
|
| 289 |
+
Examples
|
| 290 |
+
========
|
| 291 |
+
|
| 292 |
+
>>> from sympy import fwht, ifwht
|
| 293 |
+
>>> fwht([4, 2, 2, 0, 0, 2, -2, 0])
|
| 294 |
+
[8, 0, 8, 0, 8, 8, 0, 0]
|
| 295 |
+
>>> ifwht(_)
|
| 296 |
+
[4, 2, 2, 0, 0, 2, -2, 0]
|
| 297 |
+
|
| 298 |
+
>>> ifwht([19, -1, 11, -9, -7, 13, -15, 5])
|
| 299 |
+
[2, 0, 4, 0, 3, 10, 0, 0]
|
| 300 |
+
>>> fwht(_)
|
| 301 |
+
[19, -1, 11, -9, -7, 13, -15, 5]
|
| 302 |
+
|
| 303 |
+
References
|
| 304 |
+
==========
|
| 305 |
+
|
| 306 |
+
.. [1] https://en.wikipedia.org/wiki/Hadamard_transform
|
| 307 |
+
.. [2] https://en.wikipedia.org/wiki/Fast_Walsh%E2%80%93Hadamard_transform
|
| 308 |
+
|
| 309 |
+
"""
|
| 310 |
+
|
| 311 |
+
return _walsh_hadamard_transform(seq)
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
def ifwht(seq):
|
| 315 |
+
return _walsh_hadamard_transform(seq, inverse=True)
|
| 316 |
+
|
| 317 |
+
ifwht.__doc__ = fwht.__doc__
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
#----------------------------------------------------------------------------#
|
| 321 |
+
# #
|
| 322 |
+
# Mobius Transform for Subset Lattice #
|
| 323 |
+
# #
|
| 324 |
+
#----------------------------------------------------------------------------#
|
| 325 |
+
|
| 326 |
+
def _mobius_transform(seq, sgn, subset):
|
| 327 |
+
r"""Utility function for performing Mobius Transform using
|
| 328 |
+
Yate's Dynamic Programming method"""
|
| 329 |
+
|
| 330 |
+
if not iterable(seq):
|
| 331 |
+
raise TypeError("Expected a sequence of coefficients")
|
| 332 |
+
|
| 333 |
+
a = [sympify(arg) for arg in seq]
|
| 334 |
+
|
| 335 |
+
n = len(a)
|
| 336 |
+
if n < 2:
|
| 337 |
+
return a
|
| 338 |
+
|
| 339 |
+
if n&(n - 1):
|
| 340 |
+
n = 2**n.bit_length()
|
| 341 |
+
|
| 342 |
+
a += [S.Zero]*(n - len(a))
|
| 343 |
+
|
| 344 |
+
if subset:
|
| 345 |
+
i = 1
|
| 346 |
+
while i < n:
|
| 347 |
+
for j in range(n):
|
| 348 |
+
if j & i:
|
| 349 |
+
a[j] += sgn*a[j ^ i]
|
| 350 |
+
i *= 2
|
| 351 |
+
|
| 352 |
+
else:
|
| 353 |
+
i = 1
|
| 354 |
+
while i < n:
|
| 355 |
+
for j in range(n):
|
| 356 |
+
if j & i:
|
| 357 |
+
continue
|
| 358 |
+
a[j] += sgn*a[j ^ i]
|
| 359 |
+
i *= 2
|
| 360 |
+
|
| 361 |
+
return a
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
def mobius_transform(seq, subset=True):
|
| 365 |
+
r"""
|
| 366 |
+
Performs the Mobius Transform for subset lattice with indices of
|
| 367 |
+
sequence as bitmasks.
|
| 368 |
+
|
| 369 |
+
The indices of each argument, considered as bit strings, correspond
|
| 370 |
+
to subsets of a finite set.
|
| 371 |
+
|
| 372 |
+
The sequence is automatically padded to the right with zeros, as the
|
| 373 |
+
definition of subset/superset based on bitmasks (indices) requires
|
| 374 |
+
the size of sequence to be a power of 2.
|
| 375 |
+
|
| 376 |
+
Parameters
|
| 377 |
+
==========
|
| 378 |
+
|
| 379 |
+
seq : iterable
|
| 380 |
+
The sequence on which Mobius Transform is to be applied.
|
| 381 |
+
subset : bool
|
| 382 |
+
Specifies if Mobius Transform is applied by enumerating subsets
|
| 383 |
+
or supersets of the given set.
|
| 384 |
+
|
| 385 |
+
Examples
|
| 386 |
+
========
|
| 387 |
+
|
| 388 |
+
>>> from sympy import symbols
|
| 389 |
+
>>> from sympy import mobius_transform, inverse_mobius_transform
|
| 390 |
+
>>> x, y, z = symbols('x y z')
|
| 391 |
+
|
| 392 |
+
>>> mobius_transform([x, y, z])
|
| 393 |
+
[x, x + y, x + z, x + y + z]
|
| 394 |
+
>>> inverse_mobius_transform(_)
|
| 395 |
+
[x, y, z, 0]
|
| 396 |
+
|
| 397 |
+
>>> mobius_transform([x, y, z], subset=False)
|
| 398 |
+
[x + y + z, y, z, 0]
|
| 399 |
+
>>> inverse_mobius_transform(_, subset=False)
|
| 400 |
+
[x, y, z, 0]
|
| 401 |
+
|
| 402 |
+
>>> mobius_transform([1, 2, 3, 4])
|
| 403 |
+
[1, 3, 4, 10]
|
| 404 |
+
>>> inverse_mobius_transform(_)
|
| 405 |
+
[1, 2, 3, 4]
|
| 406 |
+
>>> mobius_transform([1, 2, 3, 4], subset=False)
|
| 407 |
+
[10, 6, 7, 4]
|
| 408 |
+
>>> inverse_mobius_transform(_, subset=False)
|
| 409 |
+
[1, 2, 3, 4]
|
| 410 |
+
|
| 411 |
+
References
|
| 412 |
+
==========
|
| 413 |
+
|
| 414 |
+
.. [1] https://en.wikipedia.org/wiki/M%C3%B6bius_inversion_formula
|
| 415 |
+
.. [2] https://people.csail.mit.edu/rrw/presentations/subset-conv.pdf
|
| 416 |
+
.. [3] https://arxiv.org/pdf/1211.0189.pdf
|
| 417 |
+
|
| 418 |
+
"""
|
| 419 |
+
|
| 420 |
+
return _mobius_transform(seq, sgn=+1, subset=subset)
|
| 421 |
+
|
| 422 |
+
def inverse_mobius_transform(seq, subset=True):
|
| 423 |
+
return _mobius_transform(seq, sgn=-1, subset=subset)
|
| 424 |
+
|
| 425 |
+
inverse_mobius_transform.__doc__ = mobius_transform.__doc__
|
janus/lib/python3.10/site-packages/sympy/galgebra.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
raise ImportError("""As of SymPy 1.0 the galgebra module is maintained separately at https://github.com/pygae/galgebra""")
|
janus/lib/python3.10/site-packages/sympy/multipledispatch/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (410 Bytes). View file
|
|
|
janus/lib/python3.10/site-packages/sympy/multipledispatch/__pycache__/core.cpython-310.pyc
ADDED
|
Binary file (2.37 kB). View file
|
|
|
janus/lib/python3.10/site-packages/sympy/multipledispatch/dispatcher.py
ADDED
|
@@ -0,0 +1,413 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from warnings import warn
|
| 4 |
+
import inspect
|
| 5 |
+
from .conflict import ordering, ambiguities, super_signature, AmbiguityWarning
|
| 6 |
+
from .utils import expand_tuples
|
| 7 |
+
import itertools as itl
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class MDNotImplementedError(NotImplementedError):
|
| 11 |
+
""" A NotImplementedError for multiple dispatch """
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
### Functions for on_ambiguity
|
| 15 |
+
|
| 16 |
+
def ambiguity_warn(dispatcher, ambiguities):
|
| 17 |
+
""" Raise warning when ambiguity is detected
|
| 18 |
+
|
| 19 |
+
Parameters
|
| 20 |
+
----------
|
| 21 |
+
dispatcher : Dispatcher
|
| 22 |
+
The dispatcher on which the ambiguity was detected
|
| 23 |
+
ambiguities : set
|
| 24 |
+
Set of type signature pairs that are ambiguous within this dispatcher
|
| 25 |
+
|
| 26 |
+
See Also:
|
| 27 |
+
Dispatcher.add
|
| 28 |
+
warning_text
|
| 29 |
+
"""
|
| 30 |
+
warn(warning_text(dispatcher.name, ambiguities), AmbiguityWarning)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class RaiseNotImplementedError:
|
| 34 |
+
"""Raise ``NotImplementedError`` when called."""
|
| 35 |
+
|
| 36 |
+
def __init__(self, dispatcher):
|
| 37 |
+
self.dispatcher = dispatcher
|
| 38 |
+
|
| 39 |
+
def __call__(self, *args, **kwargs):
|
| 40 |
+
types = tuple(type(a) for a in args)
|
| 41 |
+
raise NotImplementedError(
|
| 42 |
+
"Ambiguous signature for %s: <%s>" % (
|
| 43 |
+
self.dispatcher.name, str_signature(types)
|
| 44 |
+
))
|
| 45 |
+
|
| 46 |
+
def ambiguity_register_error_ignore_dup(dispatcher, ambiguities):
|
| 47 |
+
"""
|
| 48 |
+
If super signature for ambiguous types is duplicate types, ignore it.
|
| 49 |
+
Else, register instance of ``RaiseNotImplementedError`` for ambiguous types.
|
| 50 |
+
|
| 51 |
+
Parameters
|
| 52 |
+
----------
|
| 53 |
+
dispatcher : Dispatcher
|
| 54 |
+
The dispatcher on which the ambiguity was detected
|
| 55 |
+
ambiguities : set
|
| 56 |
+
Set of type signature pairs that are ambiguous within this dispatcher
|
| 57 |
+
|
| 58 |
+
See Also:
|
| 59 |
+
Dispatcher.add
|
| 60 |
+
ambiguity_warn
|
| 61 |
+
"""
|
| 62 |
+
for amb in ambiguities:
|
| 63 |
+
signature = tuple(super_signature(amb))
|
| 64 |
+
if len(set(signature)) == 1:
|
| 65 |
+
continue
|
| 66 |
+
dispatcher.add(
|
| 67 |
+
signature, RaiseNotImplementedError(dispatcher),
|
| 68 |
+
on_ambiguity=ambiguity_register_error_ignore_dup
|
| 69 |
+
)
|
| 70 |
+
|
| 71 |
+
###
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
_unresolved_dispatchers: set[Dispatcher] = set()
|
| 75 |
+
_resolve = [True]
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def halt_ordering():
|
| 79 |
+
_resolve[0] = False
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def restart_ordering(on_ambiguity=ambiguity_warn):
|
| 83 |
+
_resolve[0] = True
|
| 84 |
+
while _unresolved_dispatchers:
|
| 85 |
+
dispatcher = _unresolved_dispatchers.pop()
|
| 86 |
+
dispatcher.reorder(on_ambiguity=on_ambiguity)
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
class Dispatcher:
|
| 90 |
+
""" Dispatch methods based on type signature
|
| 91 |
+
|
| 92 |
+
Use ``dispatch`` to add implementations
|
| 93 |
+
|
| 94 |
+
Examples
|
| 95 |
+
--------
|
| 96 |
+
|
| 97 |
+
>>> from sympy.multipledispatch import dispatch
|
| 98 |
+
>>> @dispatch(int)
|
| 99 |
+
... def f(x):
|
| 100 |
+
... return x + 1
|
| 101 |
+
|
| 102 |
+
>>> @dispatch(float)
|
| 103 |
+
... def f(x): # noqa: F811
|
| 104 |
+
... return x - 1
|
| 105 |
+
|
| 106 |
+
>>> f(3)
|
| 107 |
+
4
|
| 108 |
+
>>> f(3.0)
|
| 109 |
+
2.0
|
| 110 |
+
"""
|
| 111 |
+
__slots__ = '__name__', 'name', 'funcs', 'ordering', '_cache', 'doc'
|
| 112 |
+
|
| 113 |
+
def __init__(self, name, doc=None):
|
| 114 |
+
self.name = self.__name__ = name
|
| 115 |
+
self.funcs = {}
|
| 116 |
+
self._cache = {}
|
| 117 |
+
self.ordering = []
|
| 118 |
+
self.doc = doc
|
| 119 |
+
|
| 120 |
+
def register(self, *types, **kwargs):
|
| 121 |
+
""" Register dispatcher with new implementation
|
| 122 |
+
|
| 123 |
+
>>> from sympy.multipledispatch.dispatcher import Dispatcher
|
| 124 |
+
>>> f = Dispatcher('f')
|
| 125 |
+
>>> @f.register(int)
|
| 126 |
+
... def inc(x):
|
| 127 |
+
... return x + 1
|
| 128 |
+
|
| 129 |
+
>>> @f.register(float)
|
| 130 |
+
... def dec(x):
|
| 131 |
+
... return x - 1
|
| 132 |
+
|
| 133 |
+
>>> @f.register(list)
|
| 134 |
+
... @f.register(tuple)
|
| 135 |
+
... def reverse(x):
|
| 136 |
+
... return x[::-1]
|
| 137 |
+
|
| 138 |
+
>>> f(1)
|
| 139 |
+
2
|
| 140 |
+
|
| 141 |
+
>>> f(1.0)
|
| 142 |
+
0.0
|
| 143 |
+
|
| 144 |
+
>>> f([1, 2, 3])
|
| 145 |
+
[3, 2, 1]
|
| 146 |
+
"""
|
| 147 |
+
def _(func):
|
| 148 |
+
self.add(types, func, **kwargs)
|
| 149 |
+
return func
|
| 150 |
+
return _
|
| 151 |
+
|
| 152 |
+
@classmethod
|
| 153 |
+
def get_func_params(cls, func):
|
| 154 |
+
if hasattr(inspect, "signature"):
|
| 155 |
+
sig = inspect.signature(func)
|
| 156 |
+
return sig.parameters.values()
|
| 157 |
+
|
| 158 |
+
@classmethod
|
| 159 |
+
def get_func_annotations(cls, func):
|
| 160 |
+
""" Get annotations of function positional parameters
|
| 161 |
+
"""
|
| 162 |
+
params = cls.get_func_params(func)
|
| 163 |
+
if params:
|
| 164 |
+
Parameter = inspect.Parameter
|
| 165 |
+
|
| 166 |
+
params = (param for param in params
|
| 167 |
+
if param.kind in
|
| 168 |
+
(Parameter.POSITIONAL_ONLY,
|
| 169 |
+
Parameter.POSITIONAL_OR_KEYWORD))
|
| 170 |
+
|
| 171 |
+
annotations = tuple(
|
| 172 |
+
param.annotation
|
| 173 |
+
for param in params)
|
| 174 |
+
|
| 175 |
+
if not any(ann is Parameter.empty for ann in annotations):
|
| 176 |
+
return annotations
|
| 177 |
+
|
| 178 |
+
def add(self, signature, func, on_ambiguity=ambiguity_warn):
|
| 179 |
+
""" Add new types/method pair to dispatcher
|
| 180 |
+
|
| 181 |
+
>>> from sympy.multipledispatch import Dispatcher
|
| 182 |
+
>>> D = Dispatcher('add')
|
| 183 |
+
>>> D.add((int, int), lambda x, y: x + y)
|
| 184 |
+
>>> D.add((float, float), lambda x, y: x + y)
|
| 185 |
+
|
| 186 |
+
>>> D(1, 2)
|
| 187 |
+
3
|
| 188 |
+
>>> D(1, 2.0)
|
| 189 |
+
Traceback (most recent call last):
|
| 190 |
+
...
|
| 191 |
+
NotImplementedError: Could not find signature for add: <int, float>
|
| 192 |
+
|
| 193 |
+
When ``add`` detects a warning it calls the ``on_ambiguity`` callback
|
| 194 |
+
with a dispatcher/itself, and a set of ambiguous type signature pairs
|
| 195 |
+
as inputs. See ``ambiguity_warn`` for an example.
|
| 196 |
+
"""
|
| 197 |
+
# Handle annotations
|
| 198 |
+
if not signature:
|
| 199 |
+
annotations = self.get_func_annotations(func)
|
| 200 |
+
if annotations:
|
| 201 |
+
signature = annotations
|
| 202 |
+
|
| 203 |
+
# Handle union types
|
| 204 |
+
if any(isinstance(typ, tuple) for typ in signature):
|
| 205 |
+
for typs in expand_tuples(signature):
|
| 206 |
+
self.add(typs, func, on_ambiguity)
|
| 207 |
+
return
|
| 208 |
+
|
| 209 |
+
for typ in signature:
|
| 210 |
+
if not isinstance(typ, type):
|
| 211 |
+
str_sig = ', '.join(c.__name__ if isinstance(c, type)
|
| 212 |
+
else str(c) for c in signature)
|
| 213 |
+
raise TypeError("Tried to dispatch on non-type: %s\n"
|
| 214 |
+
"In signature: <%s>\n"
|
| 215 |
+
"In function: %s" %
|
| 216 |
+
(typ, str_sig, self.name))
|
| 217 |
+
|
| 218 |
+
self.funcs[signature] = func
|
| 219 |
+
self.reorder(on_ambiguity=on_ambiguity)
|
| 220 |
+
self._cache.clear()
|
| 221 |
+
|
| 222 |
+
def reorder(self, on_ambiguity=ambiguity_warn):
|
| 223 |
+
if _resolve[0]:
|
| 224 |
+
self.ordering = ordering(self.funcs)
|
| 225 |
+
amb = ambiguities(self.funcs)
|
| 226 |
+
if amb:
|
| 227 |
+
on_ambiguity(self, amb)
|
| 228 |
+
else:
|
| 229 |
+
_unresolved_dispatchers.add(self)
|
| 230 |
+
|
| 231 |
+
def __call__(self, *args, **kwargs):
|
| 232 |
+
types = tuple([type(arg) for arg in args])
|
| 233 |
+
try:
|
| 234 |
+
func = self._cache[types]
|
| 235 |
+
except KeyError:
|
| 236 |
+
func = self.dispatch(*types)
|
| 237 |
+
if not func:
|
| 238 |
+
raise NotImplementedError(
|
| 239 |
+
'Could not find signature for %s: <%s>' %
|
| 240 |
+
(self.name, str_signature(types)))
|
| 241 |
+
self._cache[types] = func
|
| 242 |
+
try:
|
| 243 |
+
return func(*args, **kwargs)
|
| 244 |
+
|
| 245 |
+
except MDNotImplementedError:
|
| 246 |
+
funcs = self.dispatch_iter(*types)
|
| 247 |
+
next(funcs) # burn first
|
| 248 |
+
for func in funcs:
|
| 249 |
+
try:
|
| 250 |
+
return func(*args, **kwargs)
|
| 251 |
+
except MDNotImplementedError:
|
| 252 |
+
pass
|
| 253 |
+
raise NotImplementedError("Matching functions for "
|
| 254 |
+
"%s: <%s> found, but none completed successfully"
|
| 255 |
+
% (self.name, str_signature(types)))
|
| 256 |
+
|
| 257 |
+
def __str__(self):
|
| 258 |
+
return "<dispatched %s>" % self.name
|
| 259 |
+
__repr__ = __str__
|
| 260 |
+
|
| 261 |
+
def dispatch(self, *types):
|
| 262 |
+
""" Deterimine appropriate implementation for this type signature
|
| 263 |
+
|
| 264 |
+
This method is internal. Users should call this object as a function.
|
| 265 |
+
Implementation resolution occurs within the ``__call__`` method.
|
| 266 |
+
|
| 267 |
+
>>> from sympy.multipledispatch import dispatch
|
| 268 |
+
>>> @dispatch(int)
|
| 269 |
+
... def inc(x):
|
| 270 |
+
... return x + 1
|
| 271 |
+
|
| 272 |
+
>>> implementation = inc.dispatch(int)
|
| 273 |
+
>>> implementation(3)
|
| 274 |
+
4
|
| 275 |
+
|
| 276 |
+
>>> print(inc.dispatch(float))
|
| 277 |
+
None
|
| 278 |
+
|
| 279 |
+
See Also:
|
| 280 |
+
``sympy.multipledispatch.conflict`` - module to determine resolution order
|
| 281 |
+
"""
|
| 282 |
+
|
| 283 |
+
if types in self.funcs:
|
| 284 |
+
return self.funcs[types]
|
| 285 |
+
|
| 286 |
+
try:
|
| 287 |
+
return next(self.dispatch_iter(*types))
|
| 288 |
+
except StopIteration:
|
| 289 |
+
return None
|
| 290 |
+
|
| 291 |
+
def dispatch_iter(self, *types):
|
| 292 |
+
n = len(types)
|
| 293 |
+
for signature in self.ordering:
|
| 294 |
+
if len(signature) == n and all(map(issubclass, types, signature)):
|
| 295 |
+
result = self.funcs[signature]
|
| 296 |
+
yield result
|
| 297 |
+
|
| 298 |
+
def resolve(self, types):
|
| 299 |
+
""" Deterimine appropriate implementation for this type signature
|
| 300 |
+
|
| 301 |
+
.. deprecated:: 0.4.4
|
| 302 |
+
Use ``dispatch(*types)`` instead
|
| 303 |
+
"""
|
| 304 |
+
warn("resolve() is deprecated, use dispatch(*types)",
|
| 305 |
+
DeprecationWarning)
|
| 306 |
+
|
| 307 |
+
return self.dispatch(*types)
|
| 308 |
+
|
| 309 |
+
def __getstate__(self):
|
| 310 |
+
return {'name': self.name,
|
| 311 |
+
'funcs': self.funcs}
|
| 312 |
+
|
| 313 |
+
def __setstate__(self, d):
|
| 314 |
+
self.name = d['name']
|
| 315 |
+
self.funcs = d['funcs']
|
| 316 |
+
self.ordering = ordering(self.funcs)
|
| 317 |
+
self._cache = {}
|
| 318 |
+
|
| 319 |
+
@property
|
| 320 |
+
def __doc__(self):
|
| 321 |
+
docs = ["Multiply dispatched method: %s" % self.name]
|
| 322 |
+
|
| 323 |
+
if self.doc:
|
| 324 |
+
docs.append(self.doc)
|
| 325 |
+
|
| 326 |
+
other = []
|
| 327 |
+
for sig in self.ordering[::-1]:
|
| 328 |
+
func = self.funcs[sig]
|
| 329 |
+
if func.__doc__:
|
| 330 |
+
s = 'Inputs: <%s>\n' % str_signature(sig)
|
| 331 |
+
s += '-' * len(s) + '\n'
|
| 332 |
+
s += func.__doc__.strip()
|
| 333 |
+
docs.append(s)
|
| 334 |
+
else:
|
| 335 |
+
other.append(str_signature(sig))
|
| 336 |
+
|
| 337 |
+
if other:
|
| 338 |
+
docs.append('Other signatures:\n ' + '\n '.join(other))
|
| 339 |
+
|
| 340 |
+
return '\n\n'.join(docs)
|
| 341 |
+
|
| 342 |
+
def _help(self, *args):
|
| 343 |
+
return self.dispatch(*map(type, args)).__doc__
|
| 344 |
+
|
| 345 |
+
def help(self, *args, **kwargs):
|
| 346 |
+
""" Print docstring for the function corresponding to inputs """
|
| 347 |
+
print(self._help(*args))
|
| 348 |
+
|
| 349 |
+
def _source(self, *args):
|
| 350 |
+
func = self.dispatch(*map(type, args))
|
| 351 |
+
if not func:
|
| 352 |
+
raise TypeError("No function found")
|
| 353 |
+
return source(func)
|
| 354 |
+
|
| 355 |
+
def source(self, *args, **kwargs):
|
| 356 |
+
""" Print source code for the function corresponding to inputs """
|
| 357 |
+
print(self._source(*args))
|
| 358 |
+
|
| 359 |
+
|
| 360 |
+
def source(func):
|
| 361 |
+
s = 'File: %s\n\n' % inspect.getsourcefile(func)
|
| 362 |
+
s = s + inspect.getsource(func)
|
| 363 |
+
return s
|
| 364 |
+
|
| 365 |
+
|
| 366 |
+
class MethodDispatcher(Dispatcher):
|
| 367 |
+
""" Dispatch methods based on type signature
|
| 368 |
+
|
| 369 |
+
See Also:
|
| 370 |
+
Dispatcher
|
| 371 |
+
"""
|
| 372 |
+
|
| 373 |
+
@classmethod
|
| 374 |
+
def get_func_params(cls, func):
|
| 375 |
+
if hasattr(inspect, "signature"):
|
| 376 |
+
sig = inspect.signature(func)
|
| 377 |
+
return itl.islice(sig.parameters.values(), 1, None)
|
| 378 |
+
|
| 379 |
+
def __get__(self, instance, owner):
|
| 380 |
+
self.obj = instance
|
| 381 |
+
self.cls = owner
|
| 382 |
+
return self
|
| 383 |
+
|
| 384 |
+
def __call__(self, *args, **kwargs):
|
| 385 |
+
types = tuple([type(arg) for arg in args])
|
| 386 |
+
func = self.dispatch(*types)
|
| 387 |
+
if not func:
|
| 388 |
+
raise NotImplementedError('Could not find signature for %s: <%s>' %
|
| 389 |
+
(self.name, str_signature(types)))
|
| 390 |
+
return func(self.obj, *args, **kwargs)
|
| 391 |
+
|
| 392 |
+
|
| 393 |
+
def str_signature(sig):
|
| 394 |
+
""" String representation of type signature
|
| 395 |
+
|
| 396 |
+
>>> from sympy.multipledispatch.dispatcher import str_signature
|
| 397 |
+
>>> str_signature((int, float))
|
| 398 |
+
'int, float'
|
| 399 |
+
"""
|
| 400 |
+
return ', '.join(cls.__name__ for cls in sig)
|
| 401 |
+
|
| 402 |
+
|
| 403 |
+
def warning_text(name, amb):
|
| 404 |
+
""" The text for ambiguity warnings """
|
| 405 |
+
text = "\nAmbiguities exist in dispatched function %s\n\n" % (name)
|
| 406 |
+
text += "The following signatures may result in ambiguous behavior:\n"
|
| 407 |
+
for pair in amb:
|
| 408 |
+
text += "\t" + \
|
| 409 |
+
', '.join('[' + str_signature(s) + ']' for s in pair) + "\n"
|
| 410 |
+
text += "\n\nConsider making the following additions:\n\n"
|
| 411 |
+
text += '\n\n'.join(['@dispatch(' + str_signature(super_signature(s))
|
| 412 |
+
+ ')\ndef %s(...)' % name for s in amb])
|
| 413 |
+
return text
|
janus/lib/python3.10/site-packages/sympy/multipledispatch/tests/__init__.py
ADDED
|
File without changes
|