Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/INSTALLER +1 -0
- .venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/LICENSE +20 -0
- .venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/METADATA +46 -0
- .venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/RECORD +43 -0
- .venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/WHEEL +6 -0
- .venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/top_level.txt +2 -0
- .venv/lib/python3.11/site-packages/__pycache__/example.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/__pycache__/google_auth_httplib2.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/__pycache__/isympy.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/__pycache__/nest_asyncio.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/__pycache__/py.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/__pycache__/six.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/cffi/_cffi_errors.h +149 -0
- .venv/lib/python3.11/site-packages/cffi/_embedding.h +550 -0
- .venv/lib/python3.11/site-packages/cffi/backend_ctypes.py +1121 -0
- .venv/lib/python3.11/site-packages/cffi/cffi_opcode.py +187 -0
- .venv/lib/python3.11/site-packages/cffi/commontypes.py +82 -0
- .venv/lib/python3.11/site-packages/cffi/error.py +31 -0
- .venv/lib/python3.11/site-packages/cffi/lock.py +30 -0
- .venv/lib/python3.11/site-packages/cffi/recompiler.py +1598 -0
- .venv/lib/python3.11/site-packages/cffi/setuptools_ext.py +216 -0
- .venv/lib/python3.11/site-packages/cffi/vengine_gen.py +679 -0
- .venv/lib/python3.11/site-packages/diskcache/__init__.py +68 -0
- .venv/lib/python3.11/site-packages/diskcache/__pycache__/__init__.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/diskcache/__pycache__/cli.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/diskcache/__pycache__/djangocache.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/diskcache/__pycache__/fanout.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/diskcache/__pycache__/persistent.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/diskcache/__pycache__/recipes.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/diskcache/cli.py +1 -0
- .venv/lib/python3.11/site-packages/diskcache/core.py +2452 -0
- .venv/lib/python3.11/site-packages/diskcache/djangocache.py +456 -0
- .venv/lib/python3.11/site-packages/diskcache/fanout.py +687 -0
- .venv/lib/python3.11/site-packages/diskcache/persistent.py +1245 -0
- .venv/lib/python3.11/site-packages/diskcache/recipes.py +488 -0
- .venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/INSTALLER +1 -0
- .venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/METADATA +203 -0
- .venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/RECORD +54 -0
- .venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/WHEEL +4 -0
- .venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/entry_points.txt +2 -0
- .venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/licenses/LICENSE.md +12 -0
- .venv/lib/python3.11/site-packages/outlines-0.1.11.dist-info/INSTALLER +1 -0
- .venv/lib/python3.11/site-packages/outlines-0.1.11.dist-info/LICENSE +201 -0
- .venv/lib/python3.11/site-packages/outlines-0.1.11.dist-info/METADATA +503 -0
- .venv/lib/python3.11/site-packages/outlines-0.1.11.dist-info/RECORD +100 -0
- .venv/lib/python3.11/site-packages/outlines-0.1.11.dist-info/WHEEL +5 -0
- .venv/lib/python3.11/site-packages/outlines-0.1.11.dist-info/top_level.txt +1 -0
- .venv/lib/python3.11/site-packages/partial_json_parser/__pycache__/__init__.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/partial_json_parser/__pycache__/options.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/partial_json_parser/__pycache__/playground.cpython-311.pyc +0 -0
.venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
.venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright (c) 2017-2021 Ingy döt Net
|
| 2 |
+
Copyright (c) 2006-2016 Kirill Simonov
|
| 3 |
+
|
| 4 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
| 5 |
+
this software and associated documentation files (the "Software"), to deal in
|
| 6 |
+
the Software without restriction, including without limitation the rights to
|
| 7 |
+
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
| 8 |
+
of the Software, and to permit persons to whom the Software is furnished to do
|
| 9 |
+
so, subject to the following conditions:
|
| 10 |
+
|
| 11 |
+
The above copyright notice and this permission notice shall be included in all
|
| 12 |
+
copies or substantial portions of the Software.
|
| 13 |
+
|
| 14 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 15 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 16 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 17 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 18 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 19 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 20 |
+
SOFTWARE.
|
.venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/METADATA
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: PyYAML
|
| 3 |
+
Version: 6.0.2
|
| 4 |
+
Summary: YAML parser and emitter for Python
|
| 5 |
+
Home-page: https://pyyaml.org/
|
| 6 |
+
Download-URL: https://pypi.org/project/PyYAML/
|
| 7 |
+
Author: Kirill Simonov
|
| 8 |
+
Author-email: xi@resolvent.net
|
| 9 |
+
License: MIT
|
| 10 |
+
Project-URL: Bug Tracker, https://github.com/yaml/pyyaml/issues
|
| 11 |
+
Project-URL: CI, https://github.com/yaml/pyyaml/actions
|
| 12 |
+
Project-URL: Documentation, https://pyyaml.org/wiki/PyYAMLDocumentation
|
| 13 |
+
Project-URL: Mailing lists, http://lists.sourceforge.net/lists/listinfo/yaml-core
|
| 14 |
+
Project-URL: Source Code, https://github.com/yaml/pyyaml
|
| 15 |
+
Platform: Any
|
| 16 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 17 |
+
Classifier: Intended Audience :: Developers
|
| 18 |
+
Classifier: License :: OSI Approved :: MIT License
|
| 19 |
+
Classifier: Operating System :: OS Independent
|
| 20 |
+
Classifier: Programming Language :: Cython
|
| 21 |
+
Classifier: Programming Language :: Python
|
| 22 |
+
Classifier: Programming Language :: Python :: 3
|
| 23 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 24 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 25 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 26 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 27 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 28 |
+
Classifier: Programming Language :: Python :: 3.13
|
| 29 |
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
| 30 |
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
| 31 |
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
| 32 |
+
Classifier: Topic :: Text Processing :: Markup
|
| 33 |
+
Requires-Python: >=3.8
|
| 34 |
+
License-File: LICENSE
|
| 35 |
+
|
| 36 |
+
YAML is a data serialization format designed for human readability
|
| 37 |
+
and interaction with scripting languages. PyYAML is a YAML parser
|
| 38 |
+
and emitter for Python.
|
| 39 |
+
|
| 40 |
+
PyYAML features a complete YAML 1.1 parser, Unicode support, pickle
|
| 41 |
+
support, capable extension API, and sensible error messages. PyYAML
|
| 42 |
+
supports standard YAML tags and provides Python-specific tags that
|
| 43 |
+
allow to represent an arbitrary Python object.
|
| 44 |
+
|
| 45 |
+
PyYAML is applicable for a broad range of tasks from complex
|
| 46 |
+
configuration files to object serialization and persistence.
|
.venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/RECORD
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
PyYAML-6.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
PyYAML-6.0.2.dist-info/LICENSE,sha256=jTko-dxEkP1jVwfLiOsmvXZBAqcoKVQwfT5RZ6V36KQ,1101
|
| 3 |
+
PyYAML-6.0.2.dist-info/METADATA,sha256=9-odFB5seu4pGPcEv7E8iyxNF51_uKnaNGjLAhz2lto,2060
|
| 4 |
+
PyYAML-6.0.2.dist-info/RECORD,,
|
| 5 |
+
PyYAML-6.0.2.dist-info/WHEEL,sha256=YWWHkv6sHhBDPNqgSfLklIm4KZnZJH4x2lIHOwCoU7Q,152
|
| 6 |
+
PyYAML-6.0.2.dist-info/top_level.txt,sha256=rpj0IVMTisAjh_1vG3Ccf9v5jpCQwAz6cD1IVU5ZdhQ,11
|
| 7 |
+
_yaml/__init__.py,sha256=04Ae_5osxahpJHa3XBZUAf4wi6XX32gR8D6X6p64GEA,1402
|
| 8 |
+
_yaml/__pycache__/__init__.cpython-311.pyc,,
|
| 9 |
+
yaml/__init__.py,sha256=N35S01HMesFTe0aRRMWkPj0Pa8IEbHpE9FK7cr5Bdtw,12311
|
| 10 |
+
yaml/__pycache__/__init__.cpython-311.pyc,,
|
| 11 |
+
yaml/__pycache__/composer.cpython-311.pyc,,
|
| 12 |
+
yaml/__pycache__/constructor.cpython-311.pyc,,
|
| 13 |
+
yaml/__pycache__/cyaml.cpython-311.pyc,,
|
| 14 |
+
yaml/__pycache__/dumper.cpython-311.pyc,,
|
| 15 |
+
yaml/__pycache__/emitter.cpython-311.pyc,,
|
| 16 |
+
yaml/__pycache__/error.cpython-311.pyc,,
|
| 17 |
+
yaml/__pycache__/events.cpython-311.pyc,,
|
| 18 |
+
yaml/__pycache__/loader.cpython-311.pyc,,
|
| 19 |
+
yaml/__pycache__/nodes.cpython-311.pyc,,
|
| 20 |
+
yaml/__pycache__/parser.cpython-311.pyc,,
|
| 21 |
+
yaml/__pycache__/reader.cpython-311.pyc,,
|
| 22 |
+
yaml/__pycache__/representer.cpython-311.pyc,,
|
| 23 |
+
yaml/__pycache__/resolver.cpython-311.pyc,,
|
| 24 |
+
yaml/__pycache__/scanner.cpython-311.pyc,,
|
| 25 |
+
yaml/__pycache__/serializer.cpython-311.pyc,,
|
| 26 |
+
yaml/__pycache__/tokens.cpython-311.pyc,,
|
| 27 |
+
yaml/_yaml.cpython-311-x86_64-linux-gnu.so,sha256=sZBsAqPs6VM8YzOkHpNL0qKIfR0zNM9gttjzjoVPaiI,2466120
|
| 28 |
+
yaml/composer.py,sha256=_Ko30Wr6eDWUeUpauUGT3Lcg9QPBnOPVlTnIMRGJ9FM,4883
|
| 29 |
+
yaml/constructor.py,sha256=kNgkfaeLUkwQYY_Q6Ff1Tz2XVw_pG1xVE9Ak7z-viLA,28639
|
| 30 |
+
yaml/cyaml.py,sha256=6ZrAG9fAYvdVe2FK_w0hmXoG7ZYsoYUwapG8CiC72H0,3851
|
| 31 |
+
yaml/dumper.py,sha256=PLctZlYwZLp7XmeUdwRuv4nYOZ2UBnDIUy8-lKfLF-o,2837
|
| 32 |
+
yaml/emitter.py,sha256=jghtaU7eFwg31bG0B7RZea_29Adi9CKmXq_QjgQpCkQ,43006
|
| 33 |
+
yaml/error.py,sha256=Ah9z-toHJUbE9j-M8YpxgSRM5CgLCcwVzJgLLRF2Fxo,2533
|
| 34 |
+
yaml/events.py,sha256=50_TksgQiE4up-lKo_V-nBy-tAIxkIPQxY5qDhKCeHw,2445
|
| 35 |
+
yaml/loader.py,sha256=UVa-zIqmkFSCIYq_PgSGm4NSJttHY2Rf_zQ4_b1fHN0,2061
|
| 36 |
+
yaml/nodes.py,sha256=gPKNj8pKCdh2d4gr3gIYINnPOaOxGhJAUiYhGRnPE84,1440
|
| 37 |
+
yaml/parser.py,sha256=ilWp5vvgoHFGzvOZDItFoGjD6D42nhlZrZyjAwa0oJo,25495
|
| 38 |
+
yaml/reader.py,sha256=0dmzirOiDG4Xo41RnuQS7K9rkY3xjHiVasfDMNTqCNw,6794
|
| 39 |
+
yaml/representer.py,sha256=IuWP-cAW9sHKEnS0gCqSa894k1Bg4cgTxaDwIcbRQ-Y,14190
|
| 40 |
+
yaml/resolver.py,sha256=9L-VYfm4mWHxUD1Vg4X7rjDRK_7VZd6b92wzq7Y2IKY,9004
|
| 41 |
+
yaml/scanner.py,sha256=YEM3iLZSaQwXcQRg2l2R4MdT0zGP2F9eHkKGKnHyWQY,51279
|
| 42 |
+
yaml/serializer.py,sha256=ChuFgmhU01hj4xgI8GaKv6vfM2Bujwa9i7d2FAHj7cA,4165
|
| 43 |
+
yaml/tokens.py,sha256=lTQIzSVw8Mg9wv459-TjiOQe6wVziqaRlqX2_89rp54,2573
|
.venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: bdist_wheel (0.44.0)
|
| 3 |
+
Root-Is-Purelib: false
|
| 4 |
+
Tag: cp311-cp311-manylinux_2_17_x86_64
|
| 5 |
+
Tag: cp311-cp311-manylinux2014_x86_64
|
| 6 |
+
|
.venv/lib/python3.11/site-packages/PyYAML-6.0.2.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
_yaml
|
| 2 |
+
yaml
|
.venv/lib/python3.11/site-packages/__pycache__/example.cpython-311.pyc
ADDED
|
Binary file (6.47 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/__pycache__/google_auth_httplib2.cpython-311.pyc
ADDED
|
Binary file (11.7 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/__pycache__/isympy.cpython-311.pyc
ADDED
|
Binary file (11.3 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/__pycache__/nest_asyncio.cpython-311.pyc
ADDED
|
Binary file (13.2 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/__pycache__/py.cpython-311.pyc
ADDED
|
Binary file (524 Bytes). View file
|
|
|
.venv/lib/python3.11/site-packages/__pycache__/six.cpython-311.pyc
ADDED
|
Binary file (46.6 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/cffi/_cffi_errors.h
ADDED
|
@@ -0,0 +1,149 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#ifndef CFFI_MESSAGEBOX
|
| 2 |
+
# ifdef _MSC_VER
|
| 3 |
+
# define CFFI_MESSAGEBOX 1
|
| 4 |
+
# else
|
| 5 |
+
# define CFFI_MESSAGEBOX 0
|
| 6 |
+
# endif
|
| 7 |
+
#endif
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
#if CFFI_MESSAGEBOX
|
| 11 |
+
/* Windows only: logic to take the Python-CFFI embedding logic
|
| 12 |
+
initialization errors and display them in a background thread
|
| 13 |
+
with MessageBox. The idea is that if the whole program closes
|
| 14 |
+
as a result of this problem, then likely it is already a console
|
| 15 |
+
program and you can read the stderr output in the console too.
|
| 16 |
+
If it is not a console program, then it will likely show its own
|
| 17 |
+
dialog to complain, or generally not abruptly close, and for this
|
| 18 |
+
case the background thread should stay alive.
|
| 19 |
+
*/
|
| 20 |
+
static void *volatile _cffi_bootstrap_text;
|
| 21 |
+
|
| 22 |
+
static PyObject *_cffi_start_error_capture(void)
|
| 23 |
+
{
|
| 24 |
+
PyObject *result = NULL;
|
| 25 |
+
PyObject *x, *m, *bi;
|
| 26 |
+
|
| 27 |
+
if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text,
|
| 28 |
+
(void *)1, NULL) != NULL)
|
| 29 |
+
return (PyObject *)1;
|
| 30 |
+
|
| 31 |
+
m = PyImport_AddModule("_cffi_error_capture");
|
| 32 |
+
if (m == NULL)
|
| 33 |
+
goto error;
|
| 34 |
+
|
| 35 |
+
result = PyModule_GetDict(m);
|
| 36 |
+
if (result == NULL)
|
| 37 |
+
goto error;
|
| 38 |
+
|
| 39 |
+
#if PY_MAJOR_VERSION >= 3
|
| 40 |
+
bi = PyImport_ImportModule("builtins");
|
| 41 |
+
#else
|
| 42 |
+
bi = PyImport_ImportModule("__builtin__");
|
| 43 |
+
#endif
|
| 44 |
+
if (bi == NULL)
|
| 45 |
+
goto error;
|
| 46 |
+
PyDict_SetItemString(result, "__builtins__", bi);
|
| 47 |
+
Py_DECREF(bi);
|
| 48 |
+
|
| 49 |
+
x = PyRun_String(
|
| 50 |
+
"import sys\n"
|
| 51 |
+
"class FileLike:\n"
|
| 52 |
+
" def write(self, x):\n"
|
| 53 |
+
" try:\n"
|
| 54 |
+
" of.write(x)\n"
|
| 55 |
+
" except: pass\n"
|
| 56 |
+
" self.buf += x\n"
|
| 57 |
+
" def flush(self):\n"
|
| 58 |
+
" pass\n"
|
| 59 |
+
"fl = FileLike()\n"
|
| 60 |
+
"fl.buf = ''\n"
|
| 61 |
+
"of = sys.stderr\n"
|
| 62 |
+
"sys.stderr = fl\n"
|
| 63 |
+
"def done():\n"
|
| 64 |
+
" sys.stderr = of\n"
|
| 65 |
+
" return fl.buf\n", /* make sure the returned value stays alive */
|
| 66 |
+
Py_file_input,
|
| 67 |
+
result, result);
|
| 68 |
+
Py_XDECREF(x);
|
| 69 |
+
|
| 70 |
+
error:
|
| 71 |
+
if (PyErr_Occurred())
|
| 72 |
+
{
|
| 73 |
+
PyErr_WriteUnraisable(Py_None);
|
| 74 |
+
PyErr_Clear();
|
| 75 |
+
}
|
| 76 |
+
return result;
|
| 77 |
+
}
|
| 78 |
+
|
| 79 |
+
#pragma comment(lib, "user32.lib")
|
| 80 |
+
|
| 81 |
+
static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored)
|
| 82 |
+
{
|
| 83 |
+
Sleep(666); /* may be interrupted if the whole process is closing */
|
| 84 |
+
#if PY_MAJOR_VERSION >= 3
|
| 85 |
+
MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text,
|
| 86 |
+
L"Python-CFFI error",
|
| 87 |
+
MB_OK | MB_ICONERROR);
|
| 88 |
+
#else
|
| 89 |
+
MessageBoxA(NULL, (char *)_cffi_bootstrap_text,
|
| 90 |
+
"Python-CFFI error",
|
| 91 |
+
MB_OK | MB_ICONERROR);
|
| 92 |
+
#endif
|
| 93 |
+
_cffi_bootstrap_text = NULL;
|
| 94 |
+
return 0;
|
| 95 |
+
}
|
| 96 |
+
|
| 97 |
+
static void _cffi_stop_error_capture(PyObject *ecap)
|
| 98 |
+
{
|
| 99 |
+
PyObject *s;
|
| 100 |
+
void *text;
|
| 101 |
+
|
| 102 |
+
if (ecap == (PyObject *)1)
|
| 103 |
+
return;
|
| 104 |
+
|
| 105 |
+
if (ecap == NULL)
|
| 106 |
+
goto error;
|
| 107 |
+
|
| 108 |
+
s = PyRun_String("done()", Py_eval_input, ecap, ecap);
|
| 109 |
+
if (s == NULL)
|
| 110 |
+
goto error;
|
| 111 |
+
|
| 112 |
+
/* Show a dialog box, but in a background thread, and
|
| 113 |
+
never show multiple dialog boxes at once. */
|
| 114 |
+
#if PY_MAJOR_VERSION >= 3
|
| 115 |
+
text = PyUnicode_AsWideCharString(s, NULL);
|
| 116 |
+
#else
|
| 117 |
+
text = PyString_AsString(s);
|
| 118 |
+
#endif
|
| 119 |
+
|
| 120 |
+
_cffi_bootstrap_text = text;
|
| 121 |
+
|
| 122 |
+
if (text != NULL)
|
| 123 |
+
{
|
| 124 |
+
HANDLE h;
|
| 125 |
+
h = CreateThread(NULL, 0, _cffi_bootstrap_dialog,
|
| 126 |
+
NULL, 0, NULL);
|
| 127 |
+
if (h != NULL)
|
| 128 |
+
CloseHandle(h);
|
| 129 |
+
}
|
| 130 |
+
/* decref the string, but it should stay alive as 'fl.buf'
|
| 131 |
+
in the small module above. It will really be freed only if
|
| 132 |
+
we later get another similar error. So it's a leak of at
|
| 133 |
+
most one copy of the small module. That's fine for this
|
| 134 |
+
situation which is usually a "fatal error" anyway. */
|
| 135 |
+
Py_DECREF(s);
|
| 136 |
+
PyErr_Clear();
|
| 137 |
+
return;
|
| 138 |
+
|
| 139 |
+
error:
|
| 140 |
+
_cffi_bootstrap_text = NULL;
|
| 141 |
+
PyErr_Clear();
|
| 142 |
+
}
|
| 143 |
+
|
| 144 |
+
#else
|
| 145 |
+
|
| 146 |
+
static PyObject *_cffi_start_error_capture(void) { return NULL; }
|
| 147 |
+
static void _cffi_stop_error_capture(PyObject *ecap) { }
|
| 148 |
+
|
| 149 |
+
#endif
|
.venv/lib/python3.11/site-packages/cffi/_embedding.h
ADDED
|
@@ -0,0 +1,550 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
/***** Support code for embedding *****/
|
| 3 |
+
|
| 4 |
+
#ifdef __cplusplus
|
| 5 |
+
extern "C" {
|
| 6 |
+
#endif
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
#if defined(_WIN32)
|
| 10 |
+
# define CFFI_DLLEXPORT __declspec(dllexport)
|
| 11 |
+
#elif defined(__GNUC__)
|
| 12 |
+
# define CFFI_DLLEXPORT __attribute__((visibility("default")))
|
| 13 |
+
#else
|
| 14 |
+
# define CFFI_DLLEXPORT /* nothing */
|
| 15 |
+
#endif
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
/* There are two global variables of type _cffi_call_python_fnptr:
|
| 19 |
+
|
| 20 |
+
* _cffi_call_python, which we declare just below, is the one called
|
| 21 |
+
by ``extern "Python"`` implementations.
|
| 22 |
+
|
| 23 |
+
* _cffi_call_python_org, which on CPython is actually part of the
|
| 24 |
+
_cffi_exports[] array, is the function pointer copied from
|
| 25 |
+
_cffi_backend. If _cffi_start_python() fails, then this is set
|
| 26 |
+
to NULL; otherwise, it should never be NULL.
|
| 27 |
+
|
| 28 |
+
After initialization is complete, both are equal. However, the
|
| 29 |
+
first one remains equal to &_cffi_start_and_call_python until the
|
| 30 |
+
very end of initialization, when we are (or should be) sure that
|
| 31 |
+
concurrent threads also see a completely initialized world, and
|
| 32 |
+
only then is it changed.
|
| 33 |
+
*/
|
| 34 |
+
#undef _cffi_call_python
|
| 35 |
+
typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *);
|
| 36 |
+
static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *);
|
| 37 |
+
static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python;
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
#ifndef _MSC_VER
|
| 41 |
+
/* --- Assuming a GCC not infinitely old --- */
|
| 42 |
+
# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n)
|
| 43 |
+
# define cffi_write_barrier() __sync_synchronize()
|
| 44 |
+
# if !defined(__amd64__) && !defined(__x86_64__) && \
|
| 45 |
+
!defined(__i386__) && !defined(__i386)
|
| 46 |
+
# define cffi_read_barrier() __sync_synchronize()
|
| 47 |
+
# else
|
| 48 |
+
# define cffi_read_barrier() (void)0
|
| 49 |
+
# endif
|
| 50 |
+
#else
|
| 51 |
+
/* --- Windows threads version --- */
|
| 52 |
+
# include <Windows.h>
|
| 53 |
+
# define cffi_compare_and_swap(l,o,n) \
|
| 54 |
+
(InterlockedCompareExchangePointer(l,n,o) == (o))
|
| 55 |
+
# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0)
|
| 56 |
+
# define cffi_read_barrier() (void)0
|
| 57 |
+
static volatile LONG _cffi_dummy;
|
| 58 |
+
#endif
|
| 59 |
+
|
| 60 |
+
#ifdef WITH_THREAD
|
| 61 |
+
# ifndef _MSC_VER
|
| 62 |
+
# include <pthread.h>
|
| 63 |
+
static pthread_mutex_t _cffi_embed_startup_lock;
|
| 64 |
+
# else
|
| 65 |
+
static CRITICAL_SECTION _cffi_embed_startup_lock;
|
| 66 |
+
# endif
|
| 67 |
+
static char _cffi_embed_startup_lock_ready = 0;
|
| 68 |
+
#endif
|
| 69 |
+
|
| 70 |
+
static void _cffi_acquire_reentrant_mutex(void)
|
| 71 |
+
{
|
| 72 |
+
static void *volatile lock = NULL;
|
| 73 |
+
|
| 74 |
+
while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) {
|
| 75 |
+
/* should ideally do a spin loop instruction here, but
|
| 76 |
+
hard to do it portably and doesn't really matter I
|
| 77 |
+
think: pthread_mutex_init() should be very fast, and
|
| 78 |
+
this is only run at start-up anyway. */
|
| 79 |
+
}
|
| 80 |
+
|
| 81 |
+
#ifdef WITH_THREAD
|
| 82 |
+
if (!_cffi_embed_startup_lock_ready) {
|
| 83 |
+
# ifndef _MSC_VER
|
| 84 |
+
pthread_mutexattr_t attr;
|
| 85 |
+
pthread_mutexattr_init(&attr);
|
| 86 |
+
pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE);
|
| 87 |
+
pthread_mutex_init(&_cffi_embed_startup_lock, &attr);
|
| 88 |
+
# else
|
| 89 |
+
InitializeCriticalSection(&_cffi_embed_startup_lock);
|
| 90 |
+
# endif
|
| 91 |
+
_cffi_embed_startup_lock_ready = 1;
|
| 92 |
+
}
|
| 93 |
+
#endif
|
| 94 |
+
|
| 95 |
+
while (!cffi_compare_and_swap(&lock, (void *)1, NULL))
|
| 96 |
+
;
|
| 97 |
+
|
| 98 |
+
#ifndef _MSC_VER
|
| 99 |
+
pthread_mutex_lock(&_cffi_embed_startup_lock);
|
| 100 |
+
#else
|
| 101 |
+
EnterCriticalSection(&_cffi_embed_startup_lock);
|
| 102 |
+
#endif
|
| 103 |
+
}
|
| 104 |
+
|
| 105 |
+
static void _cffi_release_reentrant_mutex(void)
|
| 106 |
+
{
|
| 107 |
+
#ifndef _MSC_VER
|
| 108 |
+
pthread_mutex_unlock(&_cffi_embed_startup_lock);
|
| 109 |
+
#else
|
| 110 |
+
LeaveCriticalSection(&_cffi_embed_startup_lock);
|
| 111 |
+
#endif
|
| 112 |
+
}
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
/********** CPython-specific section **********/
|
| 116 |
+
#ifndef PYPY_VERSION
|
| 117 |
+
|
| 118 |
+
#include "_cffi_errors.h"
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX]
|
| 122 |
+
|
| 123 |
+
PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */
|
| 124 |
+
|
| 125 |
+
static void _cffi_py_initialize(void)
|
| 126 |
+
{
|
| 127 |
+
/* XXX use initsigs=0, which "skips initialization registration of
|
| 128 |
+
signal handlers, which might be useful when Python is
|
| 129 |
+
embedded" according to the Python docs. But review and think
|
| 130 |
+
if it should be a user-controllable setting.
|
| 131 |
+
|
| 132 |
+
XXX we should also give a way to write errors to a buffer
|
| 133 |
+
instead of to stderr.
|
| 134 |
+
|
| 135 |
+
XXX if importing 'site' fails, CPython (any version) calls
|
| 136 |
+
exit(). Should we try to work around this behavior here?
|
| 137 |
+
*/
|
| 138 |
+
Py_InitializeEx(0);
|
| 139 |
+
}
|
| 140 |
+
|
| 141 |
+
static int _cffi_initialize_python(void)
|
| 142 |
+
{
|
| 143 |
+
/* This initializes Python, imports _cffi_backend, and then the
|
| 144 |
+
present .dll/.so is set up as a CPython C extension module.
|
| 145 |
+
*/
|
| 146 |
+
int result;
|
| 147 |
+
PyGILState_STATE state;
|
| 148 |
+
PyObject *pycode=NULL, *global_dict=NULL, *x;
|
| 149 |
+
PyObject *builtins;
|
| 150 |
+
|
| 151 |
+
state = PyGILState_Ensure();
|
| 152 |
+
|
| 153 |
+
/* Call the initxxx() function from the present module. It will
|
| 154 |
+
create and initialize us as a CPython extension module, instead
|
| 155 |
+
of letting the startup Python code do it---it might reimport
|
| 156 |
+
the same .dll/.so and get maybe confused on some platforms.
|
| 157 |
+
It might also have troubles locating the .dll/.so again for all
|
| 158 |
+
I know.
|
| 159 |
+
*/
|
| 160 |
+
(void)_CFFI_PYTHON_STARTUP_FUNC();
|
| 161 |
+
if (PyErr_Occurred())
|
| 162 |
+
goto error;
|
| 163 |
+
|
| 164 |
+
/* Now run the Python code provided to ffi.embedding_init_code().
|
| 165 |
+
*/
|
| 166 |
+
pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE,
|
| 167 |
+
"<init code for '" _CFFI_MODULE_NAME "'>",
|
| 168 |
+
Py_file_input);
|
| 169 |
+
if (pycode == NULL)
|
| 170 |
+
goto error;
|
| 171 |
+
global_dict = PyDict_New();
|
| 172 |
+
if (global_dict == NULL)
|
| 173 |
+
goto error;
|
| 174 |
+
builtins = PyEval_GetBuiltins();
|
| 175 |
+
if (builtins == NULL)
|
| 176 |
+
goto error;
|
| 177 |
+
if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0)
|
| 178 |
+
goto error;
|
| 179 |
+
x = PyEval_EvalCode(
|
| 180 |
+
#if PY_MAJOR_VERSION < 3
|
| 181 |
+
(PyCodeObject *)
|
| 182 |
+
#endif
|
| 183 |
+
pycode, global_dict, global_dict);
|
| 184 |
+
if (x == NULL)
|
| 185 |
+
goto error;
|
| 186 |
+
Py_DECREF(x);
|
| 187 |
+
|
| 188 |
+
/* Done! Now if we've been called from
|
| 189 |
+
_cffi_start_and_call_python() in an ``extern "Python"``, we can
|
| 190 |
+
only hope that the Python code did correctly set up the
|
| 191 |
+
corresponding @ffi.def_extern() function. Otherwise, the
|
| 192 |
+
general logic of ``extern "Python"`` functions (inside the
|
| 193 |
+
_cffi_backend module) will find that the reference is still
|
| 194 |
+
missing and print an error.
|
| 195 |
+
*/
|
| 196 |
+
result = 0;
|
| 197 |
+
done:
|
| 198 |
+
Py_XDECREF(pycode);
|
| 199 |
+
Py_XDECREF(global_dict);
|
| 200 |
+
PyGILState_Release(state);
|
| 201 |
+
return result;
|
| 202 |
+
|
| 203 |
+
error:;
|
| 204 |
+
{
|
| 205 |
+
/* Print as much information as potentially useful.
|
| 206 |
+
Debugging load-time failures with embedding is not fun
|
| 207 |
+
*/
|
| 208 |
+
PyObject *ecap;
|
| 209 |
+
PyObject *exception, *v, *tb, *f, *modules, *mod;
|
| 210 |
+
PyErr_Fetch(&exception, &v, &tb);
|
| 211 |
+
ecap = _cffi_start_error_capture();
|
| 212 |
+
f = PySys_GetObject((char *)"stderr");
|
| 213 |
+
if (f != NULL && f != Py_None) {
|
| 214 |
+
PyFile_WriteString(
|
| 215 |
+
"Failed to initialize the Python-CFFI embedding logic:\n\n", f);
|
| 216 |
+
}
|
| 217 |
+
|
| 218 |
+
if (exception != NULL) {
|
| 219 |
+
PyErr_NormalizeException(&exception, &v, &tb);
|
| 220 |
+
PyErr_Display(exception, v, tb);
|
| 221 |
+
}
|
| 222 |
+
Py_XDECREF(exception);
|
| 223 |
+
Py_XDECREF(v);
|
| 224 |
+
Py_XDECREF(tb);
|
| 225 |
+
|
| 226 |
+
if (f != NULL && f != Py_None) {
|
| 227 |
+
PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME
|
| 228 |
+
"\ncompiled with cffi version: 1.17.1"
|
| 229 |
+
"\n_cffi_backend module: ", f);
|
| 230 |
+
modules = PyImport_GetModuleDict();
|
| 231 |
+
mod = PyDict_GetItemString(modules, "_cffi_backend");
|
| 232 |
+
if (mod == NULL) {
|
| 233 |
+
PyFile_WriteString("not loaded", f);
|
| 234 |
+
}
|
| 235 |
+
else {
|
| 236 |
+
v = PyObject_GetAttrString(mod, "__file__");
|
| 237 |
+
PyFile_WriteObject(v, f, 0);
|
| 238 |
+
Py_XDECREF(v);
|
| 239 |
+
}
|
| 240 |
+
PyFile_WriteString("\nsys.path: ", f);
|
| 241 |
+
PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0);
|
| 242 |
+
PyFile_WriteString("\n\n", f);
|
| 243 |
+
}
|
| 244 |
+
_cffi_stop_error_capture(ecap);
|
| 245 |
+
}
|
| 246 |
+
result = -1;
|
| 247 |
+
goto done;
|
| 248 |
+
}
|
| 249 |
+
|
| 250 |
+
#if PY_VERSION_HEX < 0x03080000
|
| 251 |
+
PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */
|
| 252 |
+
#endif
|
| 253 |
+
|
| 254 |
+
static int _cffi_carefully_make_gil(void)
|
| 255 |
+
{
|
| 256 |
+
/* This does the basic initialization of Python. It can be called
|
| 257 |
+
completely concurrently from unrelated threads. It assumes
|
| 258 |
+
that we don't hold the GIL before (if it exists), and we don't
|
| 259 |
+
hold it afterwards.
|
| 260 |
+
|
| 261 |
+
(What it really does used to be completely different in Python 2
|
| 262 |
+
and Python 3, with the Python 2 solution avoiding the spin-lock
|
| 263 |
+
around the Py_InitializeEx() call. However, after recent changes
|
| 264 |
+
to CPython 2.7 (issue #358) it no longer works. So we use the
|
| 265 |
+
Python 3 solution everywhere.)
|
| 266 |
+
|
| 267 |
+
This initializes Python by calling Py_InitializeEx().
|
| 268 |
+
Important: this must not be called concurrently at all.
|
| 269 |
+
So we use a global variable as a simple spin lock. This global
|
| 270 |
+
variable must be from 'libpythonX.Y.so', not from this
|
| 271 |
+
cffi-based extension module, because it must be shared from
|
| 272 |
+
different cffi-based extension modules.
|
| 273 |
+
|
| 274 |
+
In Python < 3.8, we choose
|
| 275 |
+
_PyParser_TokenNames[0] as a completely arbitrary pointer value
|
| 276 |
+
that is never written to. The default is to point to the
|
| 277 |
+
string "ENDMARKER". We change it temporarily to point to the
|
| 278 |
+
next character in that string. (Yes, I know it's REALLY
|
| 279 |
+
obscure.)
|
| 280 |
+
|
| 281 |
+
In Python >= 3.8, this string array is no longer writable, so
|
| 282 |
+
instead we pick PyCapsuleType.tp_version_tag. We can't change
|
| 283 |
+
Python < 3.8 because someone might use a mixture of cffi
|
| 284 |
+
embedded modules, some of which were compiled before this file
|
| 285 |
+
changed.
|
| 286 |
+
|
| 287 |
+
In Python >= 3.12, this stopped working because that particular
|
| 288 |
+
tp_version_tag gets modified during interpreter startup. It's
|
| 289 |
+
arguably a bad idea before 3.12 too, but again we can't change
|
| 290 |
+
that because someone might use a mixture of cffi embedded
|
| 291 |
+
modules, and no-one reported a bug so far. In Python >= 3.12
|
| 292 |
+
we go instead for PyCapsuleType.tp_as_buffer, which is supposed
|
| 293 |
+
to always be NULL. We write to it temporarily a pointer to
|
| 294 |
+
a struct full of NULLs, which is semantically the same.
|
| 295 |
+
*/
|
| 296 |
+
|
| 297 |
+
#ifdef WITH_THREAD
|
| 298 |
+
# if PY_VERSION_HEX < 0x03080000
|
| 299 |
+
char *volatile *lock = (char *volatile *)_PyParser_TokenNames;
|
| 300 |
+
char *old_value, *locked_value;
|
| 301 |
+
|
| 302 |
+
while (1) { /* spin loop */
|
| 303 |
+
old_value = *lock;
|
| 304 |
+
locked_value = old_value + 1;
|
| 305 |
+
if (old_value[0] == 'E') {
|
| 306 |
+
assert(old_value[1] == 'N');
|
| 307 |
+
if (cffi_compare_and_swap(lock, old_value, locked_value))
|
| 308 |
+
break;
|
| 309 |
+
}
|
| 310 |
+
else {
|
| 311 |
+
assert(old_value[0] == 'N');
|
| 312 |
+
/* should ideally do a spin loop instruction here, but
|
| 313 |
+
hard to do it portably and doesn't really matter I
|
| 314 |
+
think: PyEval_InitThreads() should be very fast, and
|
| 315 |
+
this is only run at start-up anyway. */
|
| 316 |
+
}
|
| 317 |
+
}
|
| 318 |
+
# else
|
| 319 |
+
# if PY_VERSION_HEX < 0x030C0000
|
| 320 |
+
int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag;
|
| 321 |
+
int old_value, locked_value = -42;
|
| 322 |
+
assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG));
|
| 323 |
+
# else
|
| 324 |
+
static struct ebp_s { PyBufferProcs buf; int mark; } empty_buffer_procs;
|
| 325 |
+
empty_buffer_procs.mark = -42;
|
| 326 |
+
PyBufferProcs *volatile *lock = (PyBufferProcs *volatile *)
|
| 327 |
+
&PyCapsule_Type.tp_as_buffer;
|
| 328 |
+
PyBufferProcs *old_value, *locked_value = &empty_buffer_procs.buf;
|
| 329 |
+
# endif
|
| 330 |
+
|
| 331 |
+
while (1) { /* spin loop */
|
| 332 |
+
old_value = *lock;
|
| 333 |
+
if (old_value == 0) {
|
| 334 |
+
if (cffi_compare_and_swap(lock, old_value, locked_value))
|
| 335 |
+
break;
|
| 336 |
+
}
|
| 337 |
+
else {
|
| 338 |
+
# if PY_VERSION_HEX < 0x030C0000
|
| 339 |
+
assert(old_value == locked_value);
|
| 340 |
+
# else
|
| 341 |
+
/* The pointer should point to a possibly different
|
| 342 |
+
empty_buffer_procs from another C extension module */
|
| 343 |
+
assert(((struct ebp_s *)old_value)->mark == -42);
|
| 344 |
+
# endif
|
| 345 |
+
/* should ideally do a spin loop instruction here, but
|
| 346 |
+
hard to do it portably and doesn't really matter I
|
| 347 |
+
think: PyEval_InitThreads() should be very fast, and
|
| 348 |
+
this is only run at start-up anyway. */
|
| 349 |
+
}
|
| 350 |
+
}
|
| 351 |
+
# endif
|
| 352 |
+
#endif
|
| 353 |
+
|
| 354 |
+
/* call Py_InitializeEx() */
|
| 355 |
+
if (!Py_IsInitialized()) {
|
| 356 |
+
_cffi_py_initialize();
|
| 357 |
+
#if PY_VERSION_HEX < 0x03070000
|
| 358 |
+
PyEval_InitThreads();
|
| 359 |
+
#endif
|
| 360 |
+
PyEval_SaveThread(); /* release the GIL */
|
| 361 |
+
/* the returned tstate must be the one that has been stored into the
|
| 362 |
+
autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */
|
| 363 |
+
}
|
| 364 |
+
else {
|
| 365 |
+
#if PY_VERSION_HEX < 0x03070000
|
| 366 |
+
/* PyEval_InitThreads() is always a no-op from CPython 3.7 */
|
| 367 |
+
PyGILState_STATE state = PyGILState_Ensure();
|
| 368 |
+
PyEval_InitThreads();
|
| 369 |
+
PyGILState_Release(state);
|
| 370 |
+
#endif
|
| 371 |
+
}
|
| 372 |
+
|
| 373 |
+
#ifdef WITH_THREAD
|
| 374 |
+
/* release the lock */
|
| 375 |
+
while (!cffi_compare_and_swap(lock, locked_value, old_value))
|
| 376 |
+
;
|
| 377 |
+
#endif
|
| 378 |
+
|
| 379 |
+
return 0;
|
| 380 |
+
}
|
| 381 |
+
|
| 382 |
+
/********** end CPython-specific section **********/
|
| 383 |
+
|
| 384 |
+
|
| 385 |
+
#else
|
| 386 |
+
|
| 387 |
+
|
| 388 |
+
/********** PyPy-specific section **********/
|
| 389 |
+
|
| 390 |
+
PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */
|
| 391 |
+
|
| 392 |
+
static struct _cffi_pypy_init_s {
|
| 393 |
+
const char *name;
|
| 394 |
+
void *func; /* function pointer */
|
| 395 |
+
const char *code;
|
| 396 |
+
} _cffi_pypy_init = {
|
| 397 |
+
_CFFI_MODULE_NAME,
|
| 398 |
+
_CFFI_PYTHON_STARTUP_FUNC,
|
| 399 |
+
_CFFI_PYTHON_STARTUP_CODE,
|
| 400 |
+
};
|
| 401 |
+
|
| 402 |
+
extern int pypy_carefully_make_gil(const char *);
|
| 403 |
+
extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *);
|
| 404 |
+
|
| 405 |
+
static int _cffi_carefully_make_gil(void)
|
| 406 |
+
{
|
| 407 |
+
return pypy_carefully_make_gil(_CFFI_MODULE_NAME);
|
| 408 |
+
}
|
| 409 |
+
|
| 410 |
+
static int _cffi_initialize_python(void)
|
| 411 |
+
{
|
| 412 |
+
return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init);
|
| 413 |
+
}
|
| 414 |
+
|
| 415 |
+
/********** end PyPy-specific section **********/
|
| 416 |
+
|
| 417 |
+
|
| 418 |
+
#endif
|
| 419 |
+
|
| 420 |
+
|
| 421 |
+
#ifdef __GNUC__
|
| 422 |
+
__attribute__((noinline))
|
| 423 |
+
#endif
|
| 424 |
+
static _cffi_call_python_fnptr _cffi_start_python(void)
|
| 425 |
+
{
|
| 426 |
+
/* Delicate logic to initialize Python. This function can be
|
| 427 |
+
called multiple times concurrently, e.g. when the process calls
|
| 428 |
+
its first ``extern "Python"`` functions in multiple threads at
|
| 429 |
+
once. It can also be called recursively, in which case we must
|
| 430 |
+
ignore it. We also have to consider what occurs if several
|
| 431 |
+
different cffi-based extensions reach this code in parallel
|
| 432 |
+
threads---it is a different copy of the code, then, and we
|
| 433 |
+
can't have any shared global variable unless it comes from
|
| 434 |
+
'libpythonX.Y.so'.
|
| 435 |
+
|
| 436 |
+
Idea:
|
| 437 |
+
|
| 438 |
+
* _cffi_carefully_make_gil(): "carefully" call
|
| 439 |
+
PyEval_InitThreads() (possibly with Py_InitializeEx() first).
|
| 440 |
+
|
| 441 |
+
* then we use a (local) custom lock to make sure that a call to this
|
| 442 |
+
cffi-based extension will wait if another call to the *same*
|
| 443 |
+
extension is running the initialization in another thread.
|
| 444 |
+
It is reentrant, so that a recursive call will not block, but
|
| 445 |
+
only one from a different thread.
|
| 446 |
+
|
| 447 |
+
* then we grab the GIL and (Python 2) we call Py_InitializeEx().
|
| 448 |
+
At this point, concurrent calls to Py_InitializeEx() are not
|
| 449 |
+
possible: we have the GIL.
|
| 450 |
+
|
| 451 |
+
* do the rest of the specific initialization, which may
|
| 452 |
+
temporarily release the GIL but not the custom lock.
|
| 453 |
+
Only release the custom lock when we are done.
|
| 454 |
+
*/
|
| 455 |
+
static char called = 0;
|
| 456 |
+
|
| 457 |
+
if (_cffi_carefully_make_gil() != 0)
|
| 458 |
+
return NULL;
|
| 459 |
+
|
| 460 |
+
_cffi_acquire_reentrant_mutex();
|
| 461 |
+
|
| 462 |
+
/* Here the GIL exists, but we don't have it. We're only protected
|
| 463 |
+
from concurrency by the reentrant mutex. */
|
| 464 |
+
|
| 465 |
+
/* This file only initializes the embedded module once, the first
|
| 466 |
+
time this is called, even if there are subinterpreters. */
|
| 467 |
+
if (!called) {
|
| 468 |
+
called = 1; /* invoke _cffi_initialize_python() only once,
|
| 469 |
+
but don't set '_cffi_call_python' right now,
|
| 470 |
+
otherwise concurrent threads won't call
|
| 471 |
+
this function at all (we need them to wait) */
|
| 472 |
+
if (_cffi_initialize_python() == 0) {
|
| 473 |
+
/* now initialization is finished. Switch to the fast-path. */
|
| 474 |
+
|
| 475 |
+
/* We would like nobody to see the new value of
|
| 476 |
+
'_cffi_call_python' without also seeing the rest of the
|
| 477 |
+
data initialized. However, this is not possible. But
|
| 478 |
+
the new value of '_cffi_call_python' is the function
|
| 479 |
+
'cffi_call_python()' from _cffi_backend. So: */
|
| 480 |
+
cffi_write_barrier();
|
| 481 |
+
/* ^^^ we put a write barrier here, and a corresponding
|
| 482 |
+
read barrier at the start of cffi_call_python(). This
|
| 483 |
+
ensures that after that read barrier, we see everything
|
| 484 |
+
done here before the write barrier.
|
| 485 |
+
*/
|
| 486 |
+
|
| 487 |
+
assert(_cffi_call_python_org != NULL);
|
| 488 |
+
_cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org;
|
| 489 |
+
}
|
| 490 |
+
else {
|
| 491 |
+
/* initialization failed. Reset this to NULL, even if it was
|
| 492 |
+
already set to some other value. Future calls to
|
| 493 |
+
_cffi_start_python() are still forced to occur, and will
|
| 494 |
+
always return NULL from now on. */
|
| 495 |
+
_cffi_call_python_org = NULL;
|
| 496 |
+
}
|
| 497 |
+
}
|
| 498 |
+
|
| 499 |
+
_cffi_release_reentrant_mutex();
|
| 500 |
+
|
| 501 |
+
return (_cffi_call_python_fnptr)_cffi_call_python_org;
|
| 502 |
+
}
|
| 503 |
+
|
| 504 |
+
static
|
| 505 |
+
void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args)
|
| 506 |
+
{
|
| 507 |
+
_cffi_call_python_fnptr fnptr;
|
| 508 |
+
int current_err = errno;
|
| 509 |
+
#ifdef _MSC_VER
|
| 510 |
+
int current_lasterr = GetLastError();
|
| 511 |
+
#endif
|
| 512 |
+
fnptr = _cffi_start_python();
|
| 513 |
+
if (fnptr == NULL) {
|
| 514 |
+
fprintf(stderr, "function %s() called, but initialization code "
|
| 515 |
+
"failed. Returning 0.\n", externpy->name);
|
| 516 |
+
memset(args, 0, externpy->size_of_result);
|
| 517 |
+
}
|
| 518 |
+
#ifdef _MSC_VER
|
| 519 |
+
SetLastError(current_lasterr);
|
| 520 |
+
#endif
|
| 521 |
+
errno = current_err;
|
| 522 |
+
|
| 523 |
+
if (fnptr != NULL)
|
| 524 |
+
fnptr(externpy, args);
|
| 525 |
+
}
|
| 526 |
+
|
| 527 |
+
|
| 528 |
+
/* The cffi_start_python() function makes sure Python is initialized
|
| 529 |
+
and our cffi module is set up. It can be called manually from the
|
| 530 |
+
user C code. The same effect is obtained automatically from any
|
| 531 |
+
dll-exported ``extern "Python"`` function. This function returns
|
| 532 |
+
-1 if initialization failed, 0 if all is OK. */
|
| 533 |
+
_CFFI_UNUSED_FN
|
| 534 |
+
static int cffi_start_python(void)
|
| 535 |
+
{
|
| 536 |
+
if (_cffi_call_python == &_cffi_start_and_call_python) {
|
| 537 |
+
if (_cffi_start_python() == NULL)
|
| 538 |
+
return -1;
|
| 539 |
+
}
|
| 540 |
+
cffi_read_barrier();
|
| 541 |
+
return 0;
|
| 542 |
+
}
|
| 543 |
+
|
| 544 |
+
#undef cffi_compare_and_swap
|
| 545 |
+
#undef cffi_write_barrier
|
| 546 |
+
#undef cffi_read_barrier
|
| 547 |
+
|
| 548 |
+
#ifdef __cplusplus
|
| 549 |
+
}
|
| 550 |
+
#endif
|
.venv/lib/python3.11/site-packages/cffi/backend_ctypes.py
ADDED
|
@@ -0,0 +1,1121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import ctypes, ctypes.util, operator, sys
|
| 2 |
+
from . import model
|
| 3 |
+
|
| 4 |
+
if sys.version_info < (3,):
|
| 5 |
+
bytechr = chr
|
| 6 |
+
else:
|
| 7 |
+
unicode = str
|
| 8 |
+
long = int
|
| 9 |
+
xrange = range
|
| 10 |
+
bytechr = lambda num: bytes([num])
|
| 11 |
+
|
| 12 |
+
class CTypesType(type):
|
| 13 |
+
pass
|
| 14 |
+
|
| 15 |
+
class CTypesData(object):
|
| 16 |
+
__metaclass__ = CTypesType
|
| 17 |
+
__slots__ = ['__weakref__']
|
| 18 |
+
__name__ = '<cdata>'
|
| 19 |
+
|
| 20 |
+
def __init__(self, *args):
|
| 21 |
+
raise TypeError("cannot instantiate %r" % (self.__class__,))
|
| 22 |
+
|
| 23 |
+
@classmethod
|
| 24 |
+
def _newp(cls, init):
|
| 25 |
+
raise TypeError("expected a pointer or array ctype, got '%s'"
|
| 26 |
+
% (cls._get_c_name(),))
|
| 27 |
+
|
| 28 |
+
@staticmethod
|
| 29 |
+
def _to_ctypes(value):
|
| 30 |
+
raise TypeError
|
| 31 |
+
|
| 32 |
+
@classmethod
|
| 33 |
+
def _arg_to_ctypes(cls, *value):
|
| 34 |
+
try:
|
| 35 |
+
ctype = cls._ctype
|
| 36 |
+
except AttributeError:
|
| 37 |
+
raise TypeError("cannot create an instance of %r" % (cls,))
|
| 38 |
+
if value:
|
| 39 |
+
res = cls._to_ctypes(*value)
|
| 40 |
+
if not isinstance(res, ctype):
|
| 41 |
+
res = cls._ctype(res)
|
| 42 |
+
else:
|
| 43 |
+
res = cls._ctype()
|
| 44 |
+
return res
|
| 45 |
+
|
| 46 |
+
@classmethod
|
| 47 |
+
def _create_ctype_obj(cls, init):
|
| 48 |
+
if init is None:
|
| 49 |
+
return cls._arg_to_ctypes()
|
| 50 |
+
else:
|
| 51 |
+
return cls._arg_to_ctypes(init)
|
| 52 |
+
|
| 53 |
+
@staticmethod
|
| 54 |
+
def _from_ctypes(ctypes_value):
|
| 55 |
+
raise TypeError
|
| 56 |
+
|
| 57 |
+
@classmethod
|
| 58 |
+
def _get_c_name(cls, replace_with=''):
|
| 59 |
+
return cls._reftypename.replace(' &', replace_with)
|
| 60 |
+
|
| 61 |
+
@classmethod
|
| 62 |
+
def _fix_class(cls):
|
| 63 |
+
cls.__name__ = 'CData<%s>' % (cls._get_c_name(),)
|
| 64 |
+
cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),)
|
| 65 |
+
cls.__module__ = 'ffi'
|
| 66 |
+
|
| 67 |
+
def _get_own_repr(self):
|
| 68 |
+
raise NotImplementedError
|
| 69 |
+
|
| 70 |
+
def _addr_repr(self, address):
|
| 71 |
+
if address == 0:
|
| 72 |
+
return 'NULL'
|
| 73 |
+
else:
|
| 74 |
+
if address < 0:
|
| 75 |
+
address += 1 << (8*ctypes.sizeof(ctypes.c_void_p))
|
| 76 |
+
return '0x%x' % address
|
| 77 |
+
|
| 78 |
+
def __repr__(self, c_name=None):
|
| 79 |
+
own = self._get_own_repr()
|
| 80 |
+
return '<cdata %r %s>' % (c_name or self._get_c_name(), own)
|
| 81 |
+
|
| 82 |
+
def _convert_to_address(self, BClass):
|
| 83 |
+
if BClass is None:
|
| 84 |
+
raise TypeError("cannot convert %r to an address" % (
|
| 85 |
+
self._get_c_name(),))
|
| 86 |
+
else:
|
| 87 |
+
raise TypeError("cannot convert %r to %r" % (
|
| 88 |
+
self._get_c_name(), BClass._get_c_name()))
|
| 89 |
+
|
| 90 |
+
@classmethod
|
| 91 |
+
def _get_size(cls):
|
| 92 |
+
return ctypes.sizeof(cls._ctype)
|
| 93 |
+
|
| 94 |
+
def _get_size_of_instance(self):
|
| 95 |
+
return ctypes.sizeof(self._ctype)
|
| 96 |
+
|
| 97 |
+
@classmethod
|
| 98 |
+
def _cast_from(cls, source):
|
| 99 |
+
raise TypeError("cannot cast to %r" % (cls._get_c_name(),))
|
| 100 |
+
|
| 101 |
+
def _cast_to_integer(self):
|
| 102 |
+
return self._convert_to_address(None)
|
| 103 |
+
|
| 104 |
+
@classmethod
|
| 105 |
+
def _alignment(cls):
|
| 106 |
+
return ctypes.alignment(cls._ctype)
|
| 107 |
+
|
| 108 |
+
def __iter__(self):
|
| 109 |
+
raise TypeError("cdata %r does not support iteration" % (
|
| 110 |
+
self._get_c_name()),)
|
| 111 |
+
|
| 112 |
+
def _make_cmp(name):
|
| 113 |
+
cmpfunc = getattr(operator, name)
|
| 114 |
+
def cmp(self, other):
|
| 115 |
+
v_is_ptr = not isinstance(self, CTypesGenericPrimitive)
|
| 116 |
+
w_is_ptr = (isinstance(other, CTypesData) and
|
| 117 |
+
not isinstance(other, CTypesGenericPrimitive))
|
| 118 |
+
if v_is_ptr and w_is_ptr:
|
| 119 |
+
return cmpfunc(self._convert_to_address(None),
|
| 120 |
+
other._convert_to_address(None))
|
| 121 |
+
elif v_is_ptr or w_is_ptr:
|
| 122 |
+
return NotImplemented
|
| 123 |
+
else:
|
| 124 |
+
if isinstance(self, CTypesGenericPrimitive):
|
| 125 |
+
self = self._value
|
| 126 |
+
if isinstance(other, CTypesGenericPrimitive):
|
| 127 |
+
other = other._value
|
| 128 |
+
return cmpfunc(self, other)
|
| 129 |
+
cmp.func_name = name
|
| 130 |
+
return cmp
|
| 131 |
+
|
| 132 |
+
__eq__ = _make_cmp('__eq__')
|
| 133 |
+
__ne__ = _make_cmp('__ne__')
|
| 134 |
+
__lt__ = _make_cmp('__lt__')
|
| 135 |
+
__le__ = _make_cmp('__le__')
|
| 136 |
+
__gt__ = _make_cmp('__gt__')
|
| 137 |
+
__ge__ = _make_cmp('__ge__')
|
| 138 |
+
|
| 139 |
+
def __hash__(self):
|
| 140 |
+
return hash(self._convert_to_address(None))
|
| 141 |
+
|
| 142 |
+
def _to_string(self, maxlen):
|
| 143 |
+
raise TypeError("string(): %r" % (self,))
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
class CTypesGenericPrimitive(CTypesData):
|
| 147 |
+
__slots__ = []
|
| 148 |
+
|
| 149 |
+
def __hash__(self):
|
| 150 |
+
return hash(self._value)
|
| 151 |
+
|
| 152 |
+
def _get_own_repr(self):
|
| 153 |
+
return repr(self._from_ctypes(self._value))
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
class CTypesGenericArray(CTypesData):
|
| 157 |
+
__slots__ = []
|
| 158 |
+
|
| 159 |
+
@classmethod
|
| 160 |
+
def _newp(cls, init):
|
| 161 |
+
return cls(init)
|
| 162 |
+
|
| 163 |
+
def __iter__(self):
|
| 164 |
+
for i in xrange(len(self)):
|
| 165 |
+
yield self[i]
|
| 166 |
+
|
| 167 |
+
def _get_own_repr(self):
|
| 168 |
+
return self._addr_repr(ctypes.addressof(self._blob))
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
class CTypesGenericPtr(CTypesData):
|
| 172 |
+
__slots__ = ['_address', '_as_ctype_ptr']
|
| 173 |
+
_automatic_casts = False
|
| 174 |
+
kind = "pointer"
|
| 175 |
+
|
| 176 |
+
@classmethod
|
| 177 |
+
def _newp(cls, init):
|
| 178 |
+
return cls(init)
|
| 179 |
+
|
| 180 |
+
@classmethod
|
| 181 |
+
def _cast_from(cls, source):
|
| 182 |
+
if source is None:
|
| 183 |
+
address = 0
|
| 184 |
+
elif isinstance(source, CTypesData):
|
| 185 |
+
address = source._cast_to_integer()
|
| 186 |
+
elif isinstance(source, (int, long)):
|
| 187 |
+
address = source
|
| 188 |
+
else:
|
| 189 |
+
raise TypeError("bad type for cast to %r: %r" %
|
| 190 |
+
(cls, type(source).__name__))
|
| 191 |
+
return cls._new_pointer_at(address)
|
| 192 |
+
|
| 193 |
+
@classmethod
|
| 194 |
+
def _new_pointer_at(cls, address):
|
| 195 |
+
self = cls.__new__(cls)
|
| 196 |
+
self._address = address
|
| 197 |
+
self._as_ctype_ptr = ctypes.cast(address, cls._ctype)
|
| 198 |
+
return self
|
| 199 |
+
|
| 200 |
+
def _get_own_repr(self):
|
| 201 |
+
try:
|
| 202 |
+
return self._addr_repr(self._address)
|
| 203 |
+
except AttributeError:
|
| 204 |
+
return '???'
|
| 205 |
+
|
| 206 |
+
def _cast_to_integer(self):
|
| 207 |
+
return self._address
|
| 208 |
+
|
| 209 |
+
def __nonzero__(self):
|
| 210 |
+
return bool(self._address)
|
| 211 |
+
__bool__ = __nonzero__
|
| 212 |
+
|
| 213 |
+
@classmethod
|
| 214 |
+
def _to_ctypes(cls, value):
|
| 215 |
+
if not isinstance(value, CTypesData):
|
| 216 |
+
raise TypeError("unexpected %s object" % type(value).__name__)
|
| 217 |
+
address = value._convert_to_address(cls)
|
| 218 |
+
return ctypes.cast(address, cls._ctype)
|
| 219 |
+
|
| 220 |
+
@classmethod
|
| 221 |
+
def _from_ctypes(cls, ctypes_ptr):
|
| 222 |
+
address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0
|
| 223 |
+
return cls._new_pointer_at(address)
|
| 224 |
+
|
| 225 |
+
@classmethod
|
| 226 |
+
def _initialize(cls, ctypes_ptr, value):
|
| 227 |
+
if value:
|
| 228 |
+
ctypes_ptr.contents = cls._to_ctypes(value).contents
|
| 229 |
+
|
| 230 |
+
def _convert_to_address(self, BClass):
|
| 231 |
+
if (BClass in (self.__class__, None) or BClass._automatic_casts
|
| 232 |
+
or self._automatic_casts):
|
| 233 |
+
return self._address
|
| 234 |
+
else:
|
| 235 |
+
return CTypesData._convert_to_address(self, BClass)
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
class CTypesBaseStructOrUnion(CTypesData):
|
| 239 |
+
__slots__ = ['_blob']
|
| 240 |
+
|
| 241 |
+
@classmethod
|
| 242 |
+
def _create_ctype_obj(cls, init):
|
| 243 |
+
# may be overridden
|
| 244 |
+
raise TypeError("cannot instantiate opaque type %s" % (cls,))
|
| 245 |
+
|
| 246 |
+
def _get_own_repr(self):
|
| 247 |
+
return self._addr_repr(ctypes.addressof(self._blob))
|
| 248 |
+
|
| 249 |
+
@classmethod
|
| 250 |
+
def _offsetof(cls, fieldname):
|
| 251 |
+
return getattr(cls._ctype, fieldname).offset
|
| 252 |
+
|
| 253 |
+
def _convert_to_address(self, BClass):
|
| 254 |
+
if getattr(BClass, '_BItem', None) is self.__class__:
|
| 255 |
+
return ctypes.addressof(self._blob)
|
| 256 |
+
else:
|
| 257 |
+
return CTypesData._convert_to_address(self, BClass)
|
| 258 |
+
|
| 259 |
+
@classmethod
|
| 260 |
+
def _from_ctypes(cls, ctypes_struct_or_union):
|
| 261 |
+
self = cls.__new__(cls)
|
| 262 |
+
self._blob = ctypes_struct_or_union
|
| 263 |
+
return self
|
| 264 |
+
|
| 265 |
+
@classmethod
|
| 266 |
+
def _to_ctypes(cls, value):
|
| 267 |
+
return value._blob
|
| 268 |
+
|
| 269 |
+
def __repr__(self, c_name=None):
|
| 270 |
+
return CTypesData.__repr__(self, c_name or self._get_c_name(' &'))
|
| 271 |
+
|
| 272 |
+
|
| 273 |
+
class CTypesBackend(object):
|
| 274 |
+
|
| 275 |
+
PRIMITIVE_TYPES = {
|
| 276 |
+
'char': ctypes.c_char,
|
| 277 |
+
'short': ctypes.c_short,
|
| 278 |
+
'int': ctypes.c_int,
|
| 279 |
+
'long': ctypes.c_long,
|
| 280 |
+
'long long': ctypes.c_longlong,
|
| 281 |
+
'signed char': ctypes.c_byte,
|
| 282 |
+
'unsigned char': ctypes.c_ubyte,
|
| 283 |
+
'unsigned short': ctypes.c_ushort,
|
| 284 |
+
'unsigned int': ctypes.c_uint,
|
| 285 |
+
'unsigned long': ctypes.c_ulong,
|
| 286 |
+
'unsigned long long': ctypes.c_ulonglong,
|
| 287 |
+
'float': ctypes.c_float,
|
| 288 |
+
'double': ctypes.c_double,
|
| 289 |
+
'_Bool': ctypes.c_bool,
|
| 290 |
+
}
|
| 291 |
+
|
| 292 |
+
for _name in ['unsigned long long', 'unsigned long',
|
| 293 |
+
'unsigned int', 'unsigned short', 'unsigned char']:
|
| 294 |
+
_size = ctypes.sizeof(PRIMITIVE_TYPES[_name])
|
| 295 |
+
PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name]
|
| 296 |
+
if _size == ctypes.sizeof(ctypes.c_void_p):
|
| 297 |
+
PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name]
|
| 298 |
+
if _size == ctypes.sizeof(ctypes.c_size_t):
|
| 299 |
+
PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name]
|
| 300 |
+
|
| 301 |
+
for _name in ['long long', 'long', 'int', 'short', 'signed char']:
|
| 302 |
+
_size = ctypes.sizeof(PRIMITIVE_TYPES[_name])
|
| 303 |
+
PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name]
|
| 304 |
+
if _size == ctypes.sizeof(ctypes.c_void_p):
|
| 305 |
+
PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name]
|
| 306 |
+
PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name]
|
| 307 |
+
if _size == ctypes.sizeof(ctypes.c_size_t):
|
| 308 |
+
PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name]
|
| 309 |
+
|
| 310 |
+
|
| 311 |
+
def __init__(self):
|
| 312 |
+
self.RTLD_LAZY = 0 # not supported anyway by ctypes
|
| 313 |
+
self.RTLD_NOW = 0
|
| 314 |
+
self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL
|
| 315 |
+
self.RTLD_LOCAL = ctypes.RTLD_LOCAL
|
| 316 |
+
|
| 317 |
+
def set_ffi(self, ffi):
|
| 318 |
+
self.ffi = ffi
|
| 319 |
+
|
| 320 |
+
def _get_types(self):
|
| 321 |
+
return CTypesData, CTypesType
|
| 322 |
+
|
| 323 |
+
def load_library(self, path, flags=0):
|
| 324 |
+
cdll = ctypes.CDLL(path, flags)
|
| 325 |
+
return CTypesLibrary(self, cdll)
|
| 326 |
+
|
| 327 |
+
def new_void_type(self):
|
| 328 |
+
class CTypesVoid(CTypesData):
|
| 329 |
+
__slots__ = []
|
| 330 |
+
_reftypename = 'void &'
|
| 331 |
+
@staticmethod
|
| 332 |
+
def _from_ctypes(novalue):
|
| 333 |
+
return None
|
| 334 |
+
@staticmethod
|
| 335 |
+
def _to_ctypes(novalue):
|
| 336 |
+
if novalue is not None:
|
| 337 |
+
raise TypeError("None expected, got %s object" %
|
| 338 |
+
(type(novalue).__name__,))
|
| 339 |
+
return None
|
| 340 |
+
CTypesVoid._fix_class()
|
| 341 |
+
return CTypesVoid
|
| 342 |
+
|
| 343 |
+
def new_primitive_type(self, name):
|
| 344 |
+
if name == 'wchar_t':
|
| 345 |
+
raise NotImplementedError(name)
|
| 346 |
+
ctype = self.PRIMITIVE_TYPES[name]
|
| 347 |
+
if name == 'char':
|
| 348 |
+
kind = 'char'
|
| 349 |
+
elif name in ('float', 'double'):
|
| 350 |
+
kind = 'float'
|
| 351 |
+
else:
|
| 352 |
+
if name in ('signed char', 'unsigned char'):
|
| 353 |
+
kind = 'byte'
|
| 354 |
+
elif name == '_Bool':
|
| 355 |
+
kind = 'bool'
|
| 356 |
+
else:
|
| 357 |
+
kind = 'int'
|
| 358 |
+
is_signed = (ctype(-1).value == -1)
|
| 359 |
+
#
|
| 360 |
+
def _cast_source_to_int(source):
|
| 361 |
+
if isinstance(source, (int, long, float)):
|
| 362 |
+
source = int(source)
|
| 363 |
+
elif isinstance(source, CTypesData):
|
| 364 |
+
source = source._cast_to_integer()
|
| 365 |
+
elif isinstance(source, bytes):
|
| 366 |
+
source = ord(source)
|
| 367 |
+
elif source is None:
|
| 368 |
+
source = 0
|
| 369 |
+
else:
|
| 370 |
+
raise TypeError("bad type for cast to %r: %r" %
|
| 371 |
+
(CTypesPrimitive, type(source).__name__))
|
| 372 |
+
return source
|
| 373 |
+
#
|
| 374 |
+
kind1 = kind
|
| 375 |
+
class CTypesPrimitive(CTypesGenericPrimitive):
|
| 376 |
+
__slots__ = ['_value']
|
| 377 |
+
_ctype = ctype
|
| 378 |
+
_reftypename = '%s &' % name
|
| 379 |
+
kind = kind1
|
| 380 |
+
|
| 381 |
+
def __init__(self, value):
|
| 382 |
+
self._value = value
|
| 383 |
+
|
| 384 |
+
@staticmethod
|
| 385 |
+
def _create_ctype_obj(init):
|
| 386 |
+
if init is None:
|
| 387 |
+
return ctype()
|
| 388 |
+
return ctype(CTypesPrimitive._to_ctypes(init))
|
| 389 |
+
|
| 390 |
+
if kind == 'int' or kind == 'byte':
|
| 391 |
+
@classmethod
|
| 392 |
+
def _cast_from(cls, source):
|
| 393 |
+
source = _cast_source_to_int(source)
|
| 394 |
+
source = ctype(source).value # cast within range
|
| 395 |
+
return cls(source)
|
| 396 |
+
def __int__(self):
|
| 397 |
+
return self._value
|
| 398 |
+
|
| 399 |
+
if kind == 'bool':
|
| 400 |
+
@classmethod
|
| 401 |
+
def _cast_from(cls, source):
|
| 402 |
+
if not isinstance(source, (int, long, float)):
|
| 403 |
+
source = _cast_source_to_int(source)
|
| 404 |
+
return cls(bool(source))
|
| 405 |
+
def __int__(self):
|
| 406 |
+
return int(self._value)
|
| 407 |
+
|
| 408 |
+
if kind == 'char':
|
| 409 |
+
@classmethod
|
| 410 |
+
def _cast_from(cls, source):
|
| 411 |
+
source = _cast_source_to_int(source)
|
| 412 |
+
source = bytechr(source & 0xFF)
|
| 413 |
+
return cls(source)
|
| 414 |
+
def __int__(self):
|
| 415 |
+
return ord(self._value)
|
| 416 |
+
|
| 417 |
+
if kind == 'float':
|
| 418 |
+
@classmethod
|
| 419 |
+
def _cast_from(cls, source):
|
| 420 |
+
if isinstance(source, float):
|
| 421 |
+
pass
|
| 422 |
+
elif isinstance(source, CTypesGenericPrimitive):
|
| 423 |
+
if hasattr(source, '__float__'):
|
| 424 |
+
source = float(source)
|
| 425 |
+
else:
|
| 426 |
+
source = int(source)
|
| 427 |
+
else:
|
| 428 |
+
source = _cast_source_to_int(source)
|
| 429 |
+
source = ctype(source).value # fix precision
|
| 430 |
+
return cls(source)
|
| 431 |
+
def __int__(self):
|
| 432 |
+
return int(self._value)
|
| 433 |
+
def __float__(self):
|
| 434 |
+
return self._value
|
| 435 |
+
|
| 436 |
+
_cast_to_integer = __int__
|
| 437 |
+
|
| 438 |
+
if kind == 'int' or kind == 'byte' or kind == 'bool':
|
| 439 |
+
@staticmethod
|
| 440 |
+
def _to_ctypes(x):
|
| 441 |
+
if not isinstance(x, (int, long)):
|
| 442 |
+
if isinstance(x, CTypesData):
|
| 443 |
+
x = int(x)
|
| 444 |
+
else:
|
| 445 |
+
raise TypeError("integer expected, got %s" %
|
| 446 |
+
type(x).__name__)
|
| 447 |
+
if ctype(x).value != x:
|
| 448 |
+
if not is_signed and x < 0:
|
| 449 |
+
raise OverflowError("%s: negative integer" % name)
|
| 450 |
+
else:
|
| 451 |
+
raise OverflowError("%s: integer out of bounds"
|
| 452 |
+
% name)
|
| 453 |
+
return x
|
| 454 |
+
|
| 455 |
+
if kind == 'char':
|
| 456 |
+
@staticmethod
|
| 457 |
+
def _to_ctypes(x):
|
| 458 |
+
if isinstance(x, bytes) and len(x) == 1:
|
| 459 |
+
return x
|
| 460 |
+
if isinstance(x, CTypesPrimitive): # <CData <char>>
|
| 461 |
+
return x._value
|
| 462 |
+
raise TypeError("character expected, got %s" %
|
| 463 |
+
type(x).__name__)
|
| 464 |
+
def __nonzero__(self):
|
| 465 |
+
return ord(self._value) != 0
|
| 466 |
+
else:
|
| 467 |
+
def __nonzero__(self):
|
| 468 |
+
return self._value != 0
|
| 469 |
+
__bool__ = __nonzero__
|
| 470 |
+
|
| 471 |
+
if kind == 'float':
|
| 472 |
+
@staticmethod
|
| 473 |
+
def _to_ctypes(x):
|
| 474 |
+
if not isinstance(x, (int, long, float, CTypesData)):
|
| 475 |
+
raise TypeError("float expected, got %s" %
|
| 476 |
+
type(x).__name__)
|
| 477 |
+
return ctype(x).value
|
| 478 |
+
|
| 479 |
+
@staticmethod
|
| 480 |
+
def _from_ctypes(value):
|
| 481 |
+
return getattr(value, 'value', value)
|
| 482 |
+
|
| 483 |
+
@staticmethod
|
| 484 |
+
def _initialize(blob, init):
|
| 485 |
+
blob.value = CTypesPrimitive._to_ctypes(init)
|
| 486 |
+
|
| 487 |
+
if kind == 'char':
|
| 488 |
+
def _to_string(self, maxlen):
|
| 489 |
+
return self._value
|
| 490 |
+
if kind == 'byte':
|
| 491 |
+
def _to_string(self, maxlen):
|
| 492 |
+
return chr(self._value & 0xff)
|
| 493 |
+
#
|
| 494 |
+
CTypesPrimitive._fix_class()
|
| 495 |
+
return CTypesPrimitive
|
| 496 |
+
|
| 497 |
+
def new_pointer_type(self, BItem):
|
| 498 |
+
getbtype = self.ffi._get_cached_btype
|
| 499 |
+
if BItem is getbtype(model.PrimitiveType('char')):
|
| 500 |
+
kind = 'charp'
|
| 501 |
+
elif BItem in (getbtype(model.PrimitiveType('signed char')),
|
| 502 |
+
getbtype(model.PrimitiveType('unsigned char'))):
|
| 503 |
+
kind = 'bytep'
|
| 504 |
+
elif BItem is getbtype(model.void_type):
|
| 505 |
+
kind = 'voidp'
|
| 506 |
+
else:
|
| 507 |
+
kind = 'generic'
|
| 508 |
+
#
|
| 509 |
+
class CTypesPtr(CTypesGenericPtr):
|
| 510 |
+
__slots__ = ['_own']
|
| 511 |
+
if kind == 'charp':
|
| 512 |
+
__slots__ += ['__as_strbuf']
|
| 513 |
+
_BItem = BItem
|
| 514 |
+
if hasattr(BItem, '_ctype'):
|
| 515 |
+
_ctype = ctypes.POINTER(BItem._ctype)
|
| 516 |
+
_bitem_size = ctypes.sizeof(BItem._ctype)
|
| 517 |
+
else:
|
| 518 |
+
_ctype = ctypes.c_void_p
|
| 519 |
+
if issubclass(BItem, CTypesGenericArray):
|
| 520 |
+
_reftypename = BItem._get_c_name('(* &)')
|
| 521 |
+
else:
|
| 522 |
+
_reftypename = BItem._get_c_name(' * &')
|
| 523 |
+
|
| 524 |
+
def __init__(self, init):
|
| 525 |
+
ctypeobj = BItem._create_ctype_obj(init)
|
| 526 |
+
if kind == 'charp':
|
| 527 |
+
self.__as_strbuf = ctypes.create_string_buffer(
|
| 528 |
+
ctypeobj.value + b'\x00')
|
| 529 |
+
self._as_ctype_ptr = ctypes.cast(
|
| 530 |
+
self.__as_strbuf, self._ctype)
|
| 531 |
+
else:
|
| 532 |
+
self._as_ctype_ptr = ctypes.pointer(ctypeobj)
|
| 533 |
+
self._address = ctypes.cast(self._as_ctype_ptr,
|
| 534 |
+
ctypes.c_void_p).value
|
| 535 |
+
self._own = True
|
| 536 |
+
|
| 537 |
+
def __add__(self, other):
|
| 538 |
+
if isinstance(other, (int, long)):
|
| 539 |
+
return self._new_pointer_at(self._address +
|
| 540 |
+
other * self._bitem_size)
|
| 541 |
+
else:
|
| 542 |
+
return NotImplemented
|
| 543 |
+
|
| 544 |
+
def __sub__(self, other):
|
| 545 |
+
if isinstance(other, (int, long)):
|
| 546 |
+
return self._new_pointer_at(self._address -
|
| 547 |
+
other * self._bitem_size)
|
| 548 |
+
elif type(self) is type(other):
|
| 549 |
+
return (self._address - other._address) // self._bitem_size
|
| 550 |
+
else:
|
| 551 |
+
return NotImplemented
|
| 552 |
+
|
| 553 |
+
def __getitem__(self, index):
|
| 554 |
+
if getattr(self, '_own', False) and index != 0:
|
| 555 |
+
raise IndexError
|
| 556 |
+
return BItem._from_ctypes(self._as_ctype_ptr[index])
|
| 557 |
+
|
| 558 |
+
def __setitem__(self, index, value):
|
| 559 |
+
self._as_ctype_ptr[index] = BItem._to_ctypes(value)
|
| 560 |
+
|
| 561 |
+
if kind == 'charp' or kind == 'voidp':
|
| 562 |
+
@classmethod
|
| 563 |
+
def _arg_to_ctypes(cls, *value):
|
| 564 |
+
if value and isinstance(value[0], bytes):
|
| 565 |
+
return ctypes.c_char_p(value[0])
|
| 566 |
+
else:
|
| 567 |
+
return super(CTypesPtr, cls)._arg_to_ctypes(*value)
|
| 568 |
+
|
| 569 |
+
if kind == 'charp' or kind == 'bytep':
|
| 570 |
+
def _to_string(self, maxlen):
|
| 571 |
+
if maxlen < 0:
|
| 572 |
+
maxlen = sys.maxsize
|
| 573 |
+
p = ctypes.cast(self._as_ctype_ptr,
|
| 574 |
+
ctypes.POINTER(ctypes.c_char))
|
| 575 |
+
n = 0
|
| 576 |
+
while n < maxlen and p[n] != b'\x00':
|
| 577 |
+
n += 1
|
| 578 |
+
return b''.join([p[i] for i in range(n)])
|
| 579 |
+
|
| 580 |
+
def _get_own_repr(self):
|
| 581 |
+
if getattr(self, '_own', False):
|
| 582 |
+
return 'owning %d bytes' % (
|
| 583 |
+
ctypes.sizeof(self._as_ctype_ptr.contents),)
|
| 584 |
+
return super(CTypesPtr, self)._get_own_repr()
|
| 585 |
+
#
|
| 586 |
+
if (BItem is self.ffi._get_cached_btype(model.void_type) or
|
| 587 |
+
BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))):
|
| 588 |
+
CTypesPtr._automatic_casts = True
|
| 589 |
+
#
|
| 590 |
+
CTypesPtr._fix_class()
|
| 591 |
+
return CTypesPtr
|
| 592 |
+
|
| 593 |
+
def new_array_type(self, CTypesPtr, length):
|
| 594 |
+
if length is None:
|
| 595 |
+
brackets = ' &[]'
|
| 596 |
+
else:
|
| 597 |
+
brackets = ' &[%d]' % length
|
| 598 |
+
BItem = CTypesPtr._BItem
|
| 599 |
+
getbtype = self.ffi._get_cached_btype
|
| 600 |
+
if BItem is getbtype(model.PrimitiveType('char')):
|
| 601 |
+
kind = 'char'
|
| 602 |
+
elif BItem in (getbtype(model.PrimitiveType('signed char')),
|
| 603 |
+
getbtype(model.PrimitiveType('unsigned char'))):
|
| 604 |
+
kind = 'byte'
|
| 605 |
+
else:
|
| 606 |
+
kind = 'generic'
|
| 607 |
+
#
|
| 608 |
+
class CTypesArray(CTypesGenericArray):
|
| 609 |
+
__slots__ = ['_blob', '_own']
|
| 610 |
+
if length is not None:
|
| 611 |
+
_ctype = BItem._ctype * length
|
| 612 |
+
else:
|
| 613 |
+
__slots__.append('_ctype')
|
| 614 |
+
_reftypename = BItem._get_c_name(brackets)
|
| 615 |
+
_declared_length = length
|
| 616 |
+
_CTPtr = CTypesPtr
|
| 617 |
+
|
| 618 |
+
def __init__(self, init):
|
| 619 |
+
if length is None:
|
| 620 |
+
if isinstance(init, (int, long)):
|
| 621 |
+
len1 = init
|
| 622 |
+
init = None
|
| 623 |
+
elif kind == 'char' and isinstance(init, bytes):
|
| 624 |
+
len1 = len(init) + 1 # extra null
|
| 625 |
+
else:
|
| 626 |
+
init = tuple(init)
|
| 627 |
+
len1 = len(init)
|
| 628 |
+
self._ctype = BItem._ctype * len1
|
| 629 |
+
self._blob = self._ctype()
|
| 630 |
+
self._own = True
|
| 631 |
+
if init is not None:
|
| 632 |
+
self._initialize(self._blob, init)
|
| 633 |
+
|
| 634 |
+
@staticmethod
|
| 635 |
+
def _initialize(blob, init):
|
| 636 |
+
if isinstance(init, bytes):
|
| 637 |
+
init = [init[i:i+1] for i in range(len(init))]
|
| 638 |
+
else:
|
| 639 |
+
if isinstance(init, CTypesGenericArray):
|
| 640 |
+
if (len(init) != len(blob) or
|
| 641 |
+
not isinstance(init, CTypesArray)):
|
| 642 |
+
raise TypeError("length/type mismatch: %s" % (init,))
|
| 643 |
+
init = tuple(init)
|
| 644 |
+
if len(init) > len(blob):
|
| 645 |
+
raise IndexError("too many initializers")
|
| 646 |
+
addr = ctypes.cast(blob, ctypes.c_void_p).value
|
| 647 |
+
PTR = ctypes.POINTER(BItem._ctype)
|
| 648 |
+
itemsize = ctypes.sizeof(BItem._ctype)
|
| 649 |
+
for i, value in enumerate(init):
|
| 650 |
+
p = ctypes.cast(addr + i * itemsize, PTR)
|
| 651 |
+
BItem._initialize(p.contents, value)
|
| 652 |
+
|
| 653 |
+
def __len__(self):
|
| 654 |
+
return len(self._blob)
|
| 655 |
+
|
| 656 |
+
def __getitem__(self, index):
|
| 657 |
+
if not (0 <= index < len(self._blob)):
|
| 658 |
+
raise IndexError
|
| 659 |
+
return BItem._from_ctypes(self._blob[index])
|
| 660 |
+
|
| 661 |
+
def __setitem__(self, index, value):
|
| 662 |
+
if not (0 <= index < len(self._blob)):
|
| 663 |
+
raise IndexError
|
| 664 |
+
self._blob[index] = BItem._to_ctypes(value)
|
| 665 |
+
|
| 666 |
+
if kind == 'char' or kind == 'byte':
|
| 667 |
+
def _to_string(self, maxlen):
|
| 668 |
+
if maxlen < 0:
|
| 669 |
+
maxlen = len(self._blob)
|
| 670 |
+
p = ctypes.cast(self._blob,
|
| 671 |
+
ctypes.POINTER(ctypes.c_char))
|
| 672 |
+
n = 0
|
| 673 |
+
while n < maxlen and p[n] != b'\x00':
|
| 674 |
+
n += 1
|
| 675 |
+
return b''.join([p[i] for i in range(n)])
|
| 676 |
+
|
| 677 |
+
def _get_own_repr(self):
|
| 678 |
+
if getattr(self, '_own', False):
|
| 679 |
+
return 'owning %d bytes' % (ctypes.sizeof(self._blob),)
|
| 680 |
+
return super(CTypesArray, self)._get_own_repr()
|
| 681 |
+
|
| 682 |
+
def _convert_to_address(self, BClass):
|
| 683 |
+
if BClass in (CTypesPtr, None) or BClass._automatic_casts:
|
| 684 |
+
return ctypes.addressof(self._blob)
|
| 685 |
+
else:
|
| 686 |
+
return CTypesData._convert_to_address(self, BClass)
|
| 687 |
+
|
| 688 |
+
@staticmethod
|
| 689 |
+
def _from_ctypes(ctypes_array):
|
| 690 |
+
self = CTypesArray.__new__(CTypesArray)
|
| 691 |
+
self._blob = ctypes_array
|
| 692 |
+
return self
|
| 693 |
+
|
| 694 |
+
@staticmethod
|
| 695 |
+
def _arg_to_ctypes(value):
|
| 696 |
+
return CTypesPtr._arg_to_ctypes(value)
|
| 697 |
+
|
| 698 |
+
def __add__(self, other):
|
| 699 |
+
if isinstance(other, (int, long)):
|
| 700 |
+
return CTypesPtr._new_pointer_at(
|
| 701 |
+
ctypes.addressof(self._blob) +
|
| 702 |
+
other * ctypes.sizeof(BItem._ctype))
|
| 703 |
+
else:
|
| 704 |
+
return NotImplemented
|
| 705 |
+
|
| 706 |
+
@classmethod
|
| 707 |
+
def _cast_from(cls, source):
|
| 708 |
+
raise NotImplementedError("casting to %r" % (
|
| 709 |
+
cls._get_c_name(),))
|
| 710 |
+
#
|
| 711 |
+
CTypesArray._fix_class()
|
| 712 |
+
return CTypesArray
|
| 713 |
+
|
| 714 |
+
def _new_struct_or_union(self, kind, name, base_ctypes_class):
|
| 715 |
+
#
|
| 716 |
+
class struct_or_union(base_ctypes_class):
|
| 717 |
+
pass
|
| 718 |
+
struct_or_union.__name__ = '%s_%s' % (kind, name)
|
| 719 |
+
kind1 = kind
|
| 720 |
+
#
|
| 721 |
+
class CTypesStructOrUnion(CTypesBaseStructOrUnion):
|
| 722 |
+
__slots__ = ['_blob']
|
| 723 |
+
_ctype = struct_or_union
|
| 724 |
+
_reftypename = '%s &' % (name,)
|
| 725 |
+
_kind = kind = kind1
|
| 726 |
+
#
|
| 727 |
+
CTypesStructOrUnion._fix_class()
|
| 728 |
+
return CTypesStructOrUnion
|
| 729 |
+
|
| 730 |
+
def new_struct_type(self, name):
|
| 731 |
+
return self._new_struct_or_union('struct', name, ctypes.Structure)
|
| 732 |
+
|
| 733 |
+
def new_union_type(self, name):
|
| 734 |
+
return self._new_struct_or_union('union', name, ctypes.Union)
|
| 735 |
+
|
| 736 |
+
def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp,
|
| 737 |
+
totalsize=-1, totalalignment=-1, sflags=0,
|
| 738 |
+
pack=0):
|
| 739 |
+
if totalsize >= 0 or totalalignment >= 0:
|
| 740 |
+
raise NotImplementedError("the ctypes backend of CFFI does not support "
|
| 741 |
+
"structures completed by verify(); please "
|
| 742 |
+
"compile and install the _cffi_backend module.")
|
| 743 |
+
struct_or_union = CTypesStructOrUnion._ctype
|
| 744 |
+
fnames = [fname for (fname, BField, bitsize) in fields]
|
| 745 |
+
btypes = [BField for (fname, BField, bitsize) in fields]
|
| 746 |
+
bitfields = [bitsize for (fname, BField, bitsize) in fields]
|
| 747 |
+
#
|
| 748 |
+
bfield_types = {}
|
| 749 |
+
cfields = []
|
| 750 |
+
for (fname, BField, bitsize) in fields:
|
| 751 |
+
if bitsize < 0:
|
| 752 |
+
cfields.append((fname, BField._ctype))
|
| 753 |
+
bfield_types[fname] = BField
|
| 754 |
+
else:
|
| 755 |
+
cfields.append((fname, BField._ctype, bitsize))
|
| 756 |
+
bfield_types[fname] = Ellipsis
|
| 757 |
+
if sflags & 8:
|
| 758 |
+
struct_or_union._pack_ = 1
|
| 759 |
+
elif pack:
|
| 760 |
+
struct_or_union._pack_ = pack
|
| 761 |
+
struct_or_union._fields_ = cfields
|
| 762 |
+
CTypesStructOrUnion._bfield_types = bfield_types
|
| 763 |
+
#
|
| 764 |
+
@staticmethod
|
| 765 |
+
def _create_ctype_obj(init):
|
| 766 |
+
result = struct_or_union()
|
| 767 |
+
if init is not None:
|
| 768 |
+
initialize(result, init)
|
| 769 |
+
return result
|
| 770 |
+
CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj
|
| 771 |
+
#
|
| 772 |
+
def initialize(blob, init):
|
| 773 |
+
if is_union:
|
| 774 |
+
if len(init) > 1:
|
| 775 |
+
raise ValueError("union initializer: %d items given, but "
|
| 776 |
+
"only one supported (use a dict if needed)"
|
| 777 |
+
% (len(init),))
|
| 778 |
+
if not isinstance(init, dict):
|
| 779 |
+
if isinstance(init, (bytes, unicode)):
|
| 780 |
+
raise TypeError("union initializer: got a str")
|
| 781 |
+
init = tuple(init)
|
| 782 |
+
if len(init) > len(fnames):
|
| 783 |
+
raise ValueError("too many values for %s initializer" %
|
| 784 |
+
CTypesStructOrUnion._get_c_name())
|
| 785 |
+
init = dict(zip(fnames, init))
|
| 786 |
+
addr = ctypes.addressof(blob)
|
| 787 |
+
for fname, value in init.items():
|
| 788 |
+
BField, bitsize = name2fieldtype[fname]
|
| 789 |
+
assert bitsize < 0, \
|
| 790 |
+
"not implemented: initializer with bit fields"
|
| 791 |
+
offset = CTypesStructOrUnion._offsetof(fname)
|
| 792 |
+
PTR = ctypes.POINTER(BField._ctype)
|
| 793 |
+
p = ctypes.cast(addr + offset, PTR)
|
| 794 |
+
BField._initialize(p.contents, value)
|
| 795 |
+
is_union = CTypesStructOrUnion._kind == 'union'
|
| 796 |
+
name2fieldtype = dict(zip(fnames, zip(btypes, bitfields)))
|
| 797 |
+
#
|
| 798 |
+
for fname, BField, bitsize in fields:
|
| 799 |
+
if fname == '':
|
| 800 |
+
raise NotImplementedError("nested anonymous structs/unions")
|
| 801 |
+
if hasattr(CTypesStructOrUnion, fname):
|
| 802 |
+
raise ValueError("the field name %r conflicts in "
|
| 803 |
+
"the ctypes backend" % fname)
|
| 804 |
+
if bitsize < 0:
|
| 805 |
+
def getter(self, fname=fname, BField=BField,
|
| 806 |
+
offset=CTypesStructOrUnion._offsetof(fname),
|
| 807 |
+
PTR=ctypes.POINTER(BField._ctype)):
|
| 808 |
+
addr = ctypes.addressof(self._blob)
|
| 809 |
+
p = ctypes.cast(addr + offset, PTR)
|
| 810 |
+
return BField._from_ctypes(p.contents)
|
| 811 |
+
def setter(self, value, fname=fname, BField=BField):
|
| 812 |
+
setattr(self._blob, fname, BField._to_ctypes(value))
|
| 813 |
+
#
|
| 814 |
+
if issubclass(BField, CTypesGenericArray):
|
| 815 |
+
setter = None
|
| 816 |
+
if BField._declared_length == 0:
|
| 817 |
+
def getter(self, fname=fname, BFieldPtr=BField._CTPtr,
|
| 818 |
+
offset=CTypesStructOrUnion._offsetof(fname),
|
| 819 |
+
PTR=ctypes.POINTER(BField._ctype)):
|
| 820 |
+
addr = ctypes.addressof(self._blob)
|
| 821 |
+
p = ctypes.cast(addr + offset, PTR)
|
| 822 |
+
return BFieldPtr._from_ctypes(p)
|
| 823 |
+
#
|
| 824 |
+
else:
|
| 825 |
+
def getter(self, fname=fname, BField=BField):
|
| 826 |
+
return BField._from_ctypes(getattr(self._blob, fname))
|
| 827 |
+
def setter(self, value, fname=fname, BField=BField):
|
| 828 |
+
# xxx obscure workaround
|
| 829 |
+
value = BField._to_ctypes(value)
|
| 830 |
+
oldvalue = getattr(self._blob, fname)
|
| 831 |
+
setattr(self._blob, fname, value)
|
| 832 |
+
if value != getattr(self._blob, fname):
|
| 833 |
+
setattr(self._blob, fname, oldvalue)
|
| 834 |
+
raise OverflowError("value too large for bitfield")
|
| 835 |
+
setattr(CTypesStructOrUnion, fname, property(getter, setter))
|
| 836 |
+
#
|
| 837 |
+
CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp))
|
| 838 |
+
for fname in fnames:
|
| 839 |
+
if hasattr(CTypesPtr, fname):
|
| 840 |
+
raise ValueError("the field name %r conflicts in "
|
| 841 |
+
"the ctypes backend" % fname)
|
| 842 |
+
def getter(self, fname=fname):
|
| 843 |
+
return getattr(self[0], fname)
|
| 844 |
+
def setter(self, value, fname=fname):
|
| 845 |
+
setattr(self[0], fname, value)
|
| 846 |
+
setattr(CTypesPtr, fname, property(getter, setter))
|
| 847 |
+
|
| 848 |
+
def new_function_type(self, BArgs, BResult, has_varargs):
|
| 849 |
+
nameargs = [BArg._get_c_name() for BArg in BArgs]
|
| 850 |
+
if has_varargs:
|
| 851 |
+
nameargs.append('...')
|
| 852 |
+
nameargs = ', '.join(nameargs)
|
| 853 |
+
#
|
| 854 |
+
class CTypesFunctionPtr(CTypesGenericPtr):
|
| 855 |
+
__slots__ = ['_own_callback', '_name']
|
| 856 |
+
_ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None),
|
| 857 |
+
*[BArg._ctype for BArg in BArgs],
|
| 858 |
+
use_errno=True)
|
| 859 |
+
_reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,))
|
| 860 |
+
|
| 861 |
+
def __init__(self, init, error=None):
|
| 862 |
+
# create a callback to the Python callable init()
|
| 863 |
+
import traceback
|
| 864 |
+
assert not has_varargs, "varargs not supported for callbacks"
|
| 865 |
+
if getattr(BResult, '_ctype', None) is not None:
|
| 866 |
+
error = BResult._from_ctypes(
|
| 867 |
+
BResult._create_ctype_obj(error))
|
| 868 |
+
else:
|
| 869 |
+
error = None
|
| 870 |
+
def callback(*args):
|
| 871 |
+
args2 = []
|
| 872 |
+
for arg, BArg in zip(args, BArgs):
|
| 873 |
+
args2.append(BArg._from_ctypes(arg))
|
| 874 |
+
try:
|
| 875 |
+
res2 = init(*args2)
|
| 876 |
+
res2 = BResult._to_ctypes(res2)
|
| 877 |
+
except:
|
| 878 |
+
traceback.print_exc()
|
| 879 |
+
res2 = error
|
| 880 |
+
if issubclass(BResult, CTypesGenericPtr):
|
| 881 |
+
if res2:
|
| 882 |
+
res2 = ctypes.cast(res2, ctypes.c_void_p).value
|
| 883 |
+
# .value: http://bugs.python.org/issue1574593
|
| 884 |
+
else:
|
| 885 |
+
res2 = None
|
| 886 |
+
#print repr(res2)
|
| 887 |
+
return res2
|
| 888 |
+
if issubclass(BResult, CTypesGenericPtr):
|
| 889 |
+
# The only pointers callbacks can return are void*s:
|
| 890 |
+
# http://bugs.python.org/issue5710
|
| 891 |
+
callback_ctype = ctypes.CFUNCTYPE(
|
| 892 |
+
ctypes.c_void_p,
|
| 893 |
+
*[BArg._ctype for BArg in BArgs],
|
| 894 |
+
use_errno=True)
|
| 895 |
+
else:
|
| 896 |
+
callback_ctype = CTypesFunctionPtr._ctype
|
| 897 |
+
self._as_ctype_ptr = callback_ctype(callback)
|
| 898 |
+
self._address = ctypes.cast(self._as_ctype_ptr,
|
| 899 |
+
ctypes.c_void_p).value
|
| 900 |
+
self._own_callback = init
|
| 901 |
+
|
| 902 |
+
@staticmethod
|
| 903 |
+
def _initialize(ctypes_ptr, value):
|
| 904 |
+
if value:
|
| 905 |
+
raise NotImplementedError("ctypes backend: not supported: "
|
| 906 |
+
"initializers for function pointers")
|
| 907 |
+
|
| 908 |
+
def __repr__(self):
|
| 909 |
+
c_name = getattr(self, '_name', None)
|
| 910 |
+
if c_name:
|
| 911 |
+
i = self._reftypename.index('(* &)')
|
| 912 |
+
if self._reftypename[i-1] not in ' )*':
|
| 913 |
+
c_name = ' ' + c_name
|
| 914 |
+
c_name = self._reftypename.replace('(* &)', c_name)
|
| 915 |
+
return CTypesData.__repr__(self, c_name)
|
| 916 |
+
|
| 917 |
+
def _get_own_repr(self):
|
| 918 |
+
if getattr(self, '_own_callback', None) is not None:
|
| 919 |
+
return 'calling %r' % (self._own_callback,)
|
| 920 |
+
return super(CTypesFunctionPtr, self)._get_own_repr()
|
| 921 |
+
|
| 922 |
+
def __call__(self, *args):
|
| 923 |
+
if has_varargs:
|
| 924 |
+
assert len(args) >= len(BArgs)
|
| 925 |
+
extraargs = args[len(BArgs):]
|
| 926 |
+
args = args[:len(BArgs)]
|
| 927 |
+
else:
|
| 928 |
+
assert len(args) == len(BArgs)
|
| 929 |
+
ctypes_args = []
|
| 930 |
+
for arg, BArg in zip(args, BArgs):
|
| 931 |
+
ctypes_args.append(BArg._arg_to_ctypes(arg))
|
| 932 |
+
if has_varargs:
|
| 933 |
+
for i, arg in enumerate(extraargs):
|
| 934 |
+
if arg is None:
|
| 935 |
+
ctypes_args.append(ctypes.c_void_p(0)) # NULL
|
| 936 |
+
continue
|
| 937 |
+
if not isinstance(arg, CTypesData):
|
| 938 |
+
raise TypeError(
|
| 939 |
+
"argument %d passed in the variadic part "
|
| 940 |
+
"needs to be a cdata object (got %s)" %
|
| 941 |
+
(1 + len(BArgs) + i, type(arg).__name__))
|
| 942 |
+
ctypes_args.append(arg._arg_to_ctypes(arg))
|
| 943 |
+
result = self._as_ctype_ptr(*ctypes_args)
|
| 944 |
+
return BResult._from_ctypes(result)
|
| 945 |
+
#
|
| 946 |
+
CTypesFunctionPtr._fix_class()
|
| 947 |
+
return CTypesFunctionPtr
|
| 948 |
+
|
| 949 |
+
def new_enum_type(self, name, enumerators, enumvalues, CTypesInt):
|
| 950 |
+
assert isinstance(name, str)
|
| 951 |
+
reverse_mapping = dict(zip(reversed(enumvalues),
|
| 952 |
+
reversed(enumerators)))
|
| 953 |
+
#
|
| 954 |
+
class CTypesEnum(CTypesInt):
|
| 955 |
+
__slots__ = []
|
| 956 |
+
_reftypename = '%s &' % name
|
| 957 |
+
|
| 958 |
+
def _get_own_repr(self):
|
| 959 |
+
value = self._value
|
| 960 |
+
try:
|
| 961 |
+
return '%d: %s' % (value, reverse_mapping[value])
|
| 962 |
+
except KeyError:
|
| 963 |
+
return str(value)
|
| 964 |
+
|
| 965 |
+
def _to_string(self, maxlen):
|
| 966 |
+
value = self._value
|
| 967 |
+
try:
|
| 968 |
+
return reverse_mapping[value]
|
| 969 |
+
except KeyError:
|
| 970 |
+
return str(value)
|
| 971 |
+
#
|
| 972 |
+
CTypesEnum._fix_class()
|
| 973 |
+
return CTypesEnum
|
| 974 |
+
|
| 975 |
+
def get_errno(self):
|
| 976 |
+
return ctypes.get_errno()
|
| 977 |
+
|
| 978 |
+
def set_errno(self, value):
|
| 979 |
+
ctypes.set_errno(value)
|
| 980 |
+
|
| 981 |
+
def string(self, b, maxlen=-1):
|
| 982 |
+
return b._to_string(maxlen)
|
| 983 |
+
|
| 984 |
+
def buffer(self, bptr, size=-1):
|
| 985 |
+
raise NotImplementedError("buffer() with ctypes backend")
|
| 986 |
+
|
| 987 |
+
def sizeof(self, cdata_or_BType):
|
| 988 |
+
if isinstance(cdata_or_BType, CTypesData):
|
| 989 |
+
return cdata_or_BType._get_size_of_instance()
|
| 990 |
+
else:
|
| 991 |
+
assert issubclass(cdata_or_BType, CTypesData)
|
| 992 |
+
return cdata_or_BType._get_size()
|
| 993 |
+
|
| 994 |
+
def alignof(self, BType):
|
| 995 |
+
assert issubclass(BType, CTypesData)
|
| 996 |
+
return BType._alignment()
|
| 997 |
+
|
| 998 |
+
def newp(self, BType, source):
|
| 999 |
+
if not issubclass(BType, CTypesData):
|
| 1000 |
+
raise TypeError
|
| 1001 |
+
return BType._newp(source)
|
| 1002 |
+
|
| 1003 |
+
def cast(self, BType, source):
|
| 1004 |
+
return BType._cast_from(source)
|
| 1005 |
+
|
| 1006 |
+
def callback(self, BType, source, error, onerror):
|
| 1007 |
+
assert onerror is None # XXX not implemented
|
| 1008 |
+
return BType(source, error)
|
| 1009 |
+
|
| 1010 |
+
_weakref_cache_ref = None
|
| 1011 |
+
|
| 1012 |
+
def gcp(self, cdata, destructor, size=0):
|
| 1013 |
+
if self._weakref_cache_ref is None:
|
| 1014 |
+
import weakref
|
| 1015 |
+
class MyRef(weakref.ref):
|
| 1016 |
+
def __eq__(self, other):
|
| 1017 |
+
myref = self()
|
| 1018 |
+
return self is other or (
|
| 1019 |
+
myref is not None and myref is other())
|
| 1020 |
+
def __ne__(self, other):
|
| 1021 |
+
return not (self == other)
|
| 1022 |
+
def __hash__(self):
|
| 1023 |
+
try:
|
| 1024 |
+
return self._hash
|
| 1025 |
+
except AttributeError:
|
| 1026 |
+
self._hash = hash(self())
|
| 1027 |
+
return self._hash
|
| 1028 |
+
self._weakref_cache_ref = {}, MyRef
|
| 1029 |
+
weak_cache, MyRef = self._weakref_cache_ref
|
| 1030 |
+
|
| 1031 |
+
if destructor is None:
|
| 1032 |
+
try:
|
| 1033 |
+
del weak_cache[MyRef(cdata)]
|
| 1034 |
+
except KeyError:
|
| 1035 |
+
raise TypeError("Can remove destructor only on a object "
|
| 1036 |
+
"previously returned by ffi.gc()")
|
| 1037 |
+
return None
|
| 1038 |
+
|
| 1039 |
+
def remove(k):
|
| 1040 |
+
cdata, destructor = weak_cache.pop(k, (None, None))
|
| 1041 |
+
if destructor is not None:
|
| 1042 |
+
destructor(cdata)
|
| 1043 |
+
|
| 1044 |
+
new_cdata = self.cast(self.typeof(cdata), cdata)
|
| 1045 |
+
assert new_cdata is not cdata
|
| 1046 |
+
weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor)
|
| 1047 |
+
return new_cdata
|
| 1048 |
+
|
| 1049 |
+
typeof = type
|
| 1050 |
+
|
| 1051 |
+
def getcname(self, BType, replace_with):
|
| 1052 |
+
return BType._get_c_name(replace_with)
|
| 1053 |
+
|
| 1054 |
+
def typeoffsetof(self, BType, fieldname, num=0):
|
| 1055 |
+
if isinstance(fieldname, str):
|
| 1056 |
+
if num == 0 and issubclass(BType, CTypesGenericPtr):
|
| 1057 |
+
BType = BType._BItem
|
| 1058 |
+
if not issubclass(BType, CTypesBaseStructOrUnion):
|
| 1059 |
+
raise TypeError("expected a struct or union ctype")
|
| 1060 |
+
BField = BType._bfield_types[fieldname]
|
| 1061 |
+
if BField is Ellipsis:
|
| 1062 |
+
raise TypeError("not supported for bitfields")
|
| 1063 |
+
return (BField, BType._offsetof(fieldname))
|
| 1064 |
+
elif isinstance(fieldname, (int, long)):
|
| 1065 |
+
if issubclass(BType, CTypesGenericArray):
|
| 1066 |
+
BType = BType._CTPtr
|
| 1067 |
+
if not issubclass(BType, CTypesGenericPtr):
|
| 1068 |
+
raise TypeError("expected an array or ptr ctype")
|
| 1069 |
+
BItem = BType._BItem
|
| 1070 |
+
offset = BItem._get_size() * fieldname
|
| 1071 |
+
if offset > sys.maxsize:
|
| 1072 |
+
raise OverflowError
|
| 1073 |
+
return (BItem, offset)
|
| 1074 |
+
else:
|
| 1075 |
+
raise TypeError(type(fieldname))
|
| 1076 |
+
|
| 1077 |
+
def rawaddressof(self, BTypePtr, cdata, offset=None):
|
| 1078 |
+
if isinstance(cdata, CTypesBaseStructOrUnion):
|
| 1079 |
+
ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata))
|
| 1080 |
+
elif isinstance(cdata, CTypesGenericPtr):
|
| 1081 |
+
if offset is None or not issubclass(type(cdata)._BItem,
|
| 1082 |
+
CTypesBaseStructOrUnion):
|
| 1083 |
+
raise TypeError("unexpected cdata type")
|
| 1084 |
+
ptr = type(cdata)._to_ctypes(cdata)
|
| 1085 |
+
elif isinstance(cdata, CTypesGenericArray):
|
| 1086 |
+
ptr = type(cdata)._to_ctypes(cdata)
|
| 1087 |
+
else:
|
| 1088 |
+
raise TypeError("expected a <cdata 'struct-or-union'>")
|
| 1089 |
+
if offset:
|
| 1090 |
+
ptr = ctypes.cast(
|
| 1091 |
+
ctypes.c_void_p(
|
| 1092 |
+
ctypes.cast(ptr, ctypes.c_void_p).value + offset),
|
| 1093 |
+
type(ptr))
|
| 1094 |
+
return BTypePtr._from_ctypes(ptr)
|
| 1095 |
+
|
| 1096 |
+
|
| 1097 |
+
class CTypesLibrary(object):
|
| 1098 |
+
|
| 1099 |
+
def __init__(self, backend, cdll):
|
| 1100 |
+
self.backend = backend
|
| 1101 |
+
self.cdll = cdll
|
| 1102 |
+
|
| 1103 |
+
def load_function(self, BType, name):
|
| 1104 |
+
c_func = getattr(self.cdll, name)
|
| 1105 |
+
funcobj = BType._from_ctypes(c_func)
|
| 1106 |
+
funcobj._name = name
|
| 1107 |
+
return funcobj
|
| 1108 |
+
|
| 1109 |
+
def read_variable(self, BType, name):
|
| 1110 |
+
try:
|
| 1111 |
+
ctypes_obj = BType._ctype.in_dll(self.cdll, name)
|
| 1112 |
+
except AttributeError as e:
|
| 1113 |
+
raise NotImplementedError(e)
|
| 1114 |
+
return BType._from_ctypes(ctypes_obj)
|
| 1115 |
+
|
| 1116 |
+
def write_variable(self, BType, name, value):
|
| 1117 |
+
new_ctypes_obj = BType._to_ctypes(value)
|
| 1118 |
+
ctypes_obj = BType._ctype.in_dll(self.cdll, name)
|
| 1119 |
+
ctypes.memmove(ctypes.addressof(ctypes_obj),
|
| 1120 |
+
ctypes.addressof(new_ctypes_obj),
|
| 1121 |
+
ctypes.sizeof(BType._ctype))
|
.venv/lib/python3.11/site-packages/cffi/cffi_opcode.py
ADDED
|
@@ -0,0 +1,187 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .error import VerificationError
|
| 2 |
+
|
| 3 |
+
class CffiOp(object):
|
| 4 |
+
def __init__(self, op, arg):
|
| 5 |
+
self.op = op
|
| 6 |
+
self.arg = arg
|
| 7 |
+
|
| 8 |
+
def as_c_expr(self):
|
| 9 |
+
if self.op is None:
|
| 10 |
+
assert isinstance(self.arg, str)
|
| 11 |
+
return '(_cffi_opcode_t)(%s)' % (self.arg,)
|
| 12 |
+
classname = CLASS_NAME[self.op]
|
| 13 |
+
return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg)
|
| 14 |
+
|
| 15 |
+
def as_python_bytes(self):
|
| 16 |
+
if self.op is None and self.arg.isdigit():
|
| 17 |
+
value = int(self.arg) # non-negative: '-' not in self.arg
|
| 18 |
+
if value >= 2**31:
|
| 19 |
+
raise OverflowError("cannot emit %r: limited to 2**31-1"
|
| 20 |
+
% (self.arg,))
|
| 21 |
+
return format_four_bytes(value)
|
| 22 |
+
if isinstance(self.arg, str):
|
| 23 |
+
raise VerificationError("cannot emit to Python: %r" % (self.arg,))
|
| 24 |
+
return format_four_bytes((self.arg << 8) | self.op)
|
| 25 |
+
|
| 26 |
+
def __str__(self):
|
| 27 |
+
classname = CLASS_NAME.get(self.op, self.op)
|
| 28 |
+
return '(%s %s)' % (classname, self.arg)
|
| 29 |
+
|
| 30 |
+
def format_four_bytes(num):
|
| 31 |
+
return '\\x%02X\\x%02X\\x%02X\\x%02X' % (
|
| 32 |
+
(num >> 24) & 0xFF,
|
| 33 |
+
(num >> 16) & 0xFF,
|
| 34 |
+
(num >> 8) & 0xFF,
|
| 35 |
+
(num ) & 0xFF)
|
| 36 |
+
|
| 37 |
+
OP_PRIMITIVE = 1
|
| 38 |
+
OP_POINTER = 3
|
| 39 |
+
OP_ARRAY = 5
|
| 40 |
+
OP_OPEN_ARRAY = 7
|
| 41 |
+
OP_STRUCT_UNION = 9
|
| 42 |
+
OP_ENUM = 11
|
| 43 |
+
OP_FUNCTION = 13
|
| 44 |
+
OP_FUNCTION_END = 15
|
| 45 |
+
OP_NOOP = 17
|
| 46 |
+
OP_BITFIELD = 19
|
| 47 |
+
OP_TYPENAME = 21
|
| 48 |
+
OP_CPYTHON_BLTN_V = 23 # varargs
|
| 49 |
+
OP_CPYTHON_BLTN_N = 25 # noargs
|
| 50 |
+
OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg)
|
| 51 |
+
OP_CONSTANT = 29
|
| 52 |
+
OP_CONSTANT_INT = 31
|
| 53 |
+
OP_GLOBAL_VAR = 33
|
| 54 |
+
OP_DLOPEN_FUNC = 35
|
| 55 |
+
OP_DLOPEN_CONST = 37
|
| 56 |
+
OP_GLOBAL_VAR_F = 39
|
| 57 |
+
OP_EXTERN_PYTHON = 41
|
| 58 |
+
|
| 59 |
+
PRIM_VOID = 0
|
| 60 |
+
PRIM_BOOL = 1
|
| 61 |
+
PRIM_CHAR = 2
|
| 62 |
+
PRIM_SCHAR = 3
|
| 63 |
+
PRIM_UCHAR = 4
|
| 64 |
+
PRIM_SHORT = 5
|
| 65 |
+
PRIM_USHORT = 6
|
| 66 |
+
PRIM_INT = 7
|
| 67 |
+
PRIM_UINT = 8
|
| 68 |
+
PRIM_LONG = 9
|
| 69 |
+
PRIM_ULONG = 10
|
| 70 |
+
PRIM_LONGLONG = 11
|
| 71 |
+
PRIM_ULONGLONG = 12
|
| 72 |
+
PRIM_FLOAT = 13
|
| 73 |
+
PRIM_DOUBLE = 14
|
| 74 |
+
PRIM_LONGDOUBLE = 15
|
| 75 |
+
|
| 76 |
+
PRIM_WCHAR = 16
|
| 77 |
+
PRIM_INT8 = 17
|
| 78 |
+
PRIM_UINT8 = 18
|
| 79 |
+
PRIM_INT16 = 19
|
| 80 |
+
PRIM_UINT16 = 20
|
| 81 |
+
PRIM_INT32 = 21
|
| 82 |
+
PRIM_UINT32 = 22
|
| 83 |
+
PRIM_INT64 = 23
|
| 84 |
+
PRIM_UINT64 = 24
|
| 85 |
+
PRIM_INTPTR = 25
|
| 86 |
+
PRIM_UINTPTR = 26
|
| 87 |
+
PRIM_PTRDIFF = 27
|
| 88 |
+
PRIM_SIZE = 28
|
| 89 |
+
PRIM_SSIZE = 29
|
| 90 |
+
PRIM_INT_LEAST8 = 30
|
| 91 |
+
PRIM_UINT_LEAST8 = 31
|
| 92 |
+
PRIM_INT_LEAST16 = 32
|
| 93 |
+
PRIM_UINT_LEAST16 = 33
|
| 94 |
+
PRIM_INT_LEAST32 = 34
|
| 95 |
+
PRIM_UINT_LEAST32 = 35
|
| 96 |
+
PRIM_INT_LEAST64 = 36
|
| 97 |
+
PRIM_UINT_LEAST64 = 37
|
| 98 |
+
PRIM_INT_FAST8 = 38
|
| 99 |
+
PRIM_UINT_FAST8 = 39
|
| 100 |
+
PRIM_INT_FAST16 = 40
|
| 101 |
+
PRIM_UINT_FAST16 = 41
|
| 102 |
+
PRIM_INT_FAST32 = 42
|
| 103 |
+
PRIM_UINT_FAST32 = 43
|
| 104 |
+
PRIM_INT_FAST64 = 44
|
| 105 |
+
PRIM_UINT_FAST64 = 45
|
| 106 |
+
PRIM_INTMAX = 46
|
| 107 |
+
PRIM_UINTMAX = 47
|
| 108 |
+
PRIM_FLOATCOMPLEX = 48
|
| 109 |
+
PRIM_DOUBLECOMPLEX = 49
|
| 110 |
+
PRIM_CHAR16 = 50
|
| 111 |
+
PRIM_CHAR32 = 51
|
| 112 |
+
|
| 113 |
+
_NUM_PRIM = 52
|
| 114 |
+
_UNKNOWN_PRIM = -1
|
| 115 |
+
_UNKNOWN_FLOAT_PRIM = -2
|
| 116 |
+
_UNKNOWN_LONG_DOUBLE = -3
|
| 117 |
+
|
| 118 |
+
_IO_FILE_STRUCT = -1
|
| 119 |
+
|
| 120 |
+
PRIMITIVE_TO_INDEX = {
|
| 121 |
+
'char': PRIM_CHAR,
|
| 122 |
+
'short': PRIM_SHORT,
|
| 123 |
+
'int': PRIM_INT,
|
| 124 |
+
'long': PRIM_LONG,
|
| 125 |
+
'long long': PRIM_LONGLONG,
|
| 126 |
+
'signed char': PRIM_SCHAR,
|
| 127 |
+
'unsigned char': PRIM_UCHAR,
|
| 128 |
+
'unsigned short': PRIM_USHORT,
|
| 129 |
+
'unsigned int': PRIM_UINT,
|
| 130 |
+
'unsigned long': PRIM_ULONG,
|
| 131 |
+
'unsigned long long': PRIM_ULONGLONG,
|
| 132 |
+
'float': PRIM_FLOAT,
|
| 133 |
+
'double': PRIM_DOUBLE,
|
| 134 |
+
'long double': PRIM_LONGDOUBLE,
|
| 135 |
+
'_cffi_float_complex_t': PRIM_FLOATCOMPLEX,
|
| 136 |
+
'_cffi_double_complex_t': PRIM_DOUBLECOMPLEX,
|
| 137 |
+
'_Bool': PRIM_BOOL,
|
| 138 |
+
'wchar_t': PRIM_WCHAR,
|
| 139 |
+
'char16_t': PRIM_CHAR16,
|
| 140 |
+
'char32_t': PRIM_CHAR32,
|
| 141 |
+
'int8_t': PRIM_INT8,
|
| 142 |
+
'uint8_t': PRIM_UINT8,
|
| 143 |
+
'int16_t': PRIM_INT16,
|
| 144 |
+
'uint16_t': PRIM_UINT16,
|
| 145 |
+
'int32_t': PRIM_INT32,
|
| 146 |
+
'uint32_t': PRIM_UINT32,
|
| 147 |
+
'int64_t': PRIM_INT64,
|
| 148 |
+
'uint64_t': PRIM_UINT64,
|
| 149 |
+
'intptr_t': PRIM_INTPTR,
|
| 150 |
+
'uintptr_t': PRIM_UINTPTR,
|
| 151 |
+
'ptrdiff_t': PRIM_PTRDIFF,
|
| 152 |
+
'size_t': PRIM_SIZE,
|
| 153 |
+
'ssize_t': PRIM_SSIZE,
|
| 154 |
+
'int_least8_t': PRIM_INT_LEAST8,
|
| 155 |
+
'uint_least8_t': PRIM_UINT_LEAST8,
|
| 156 |
+
'int_least16_t': PRIM_INT_LEAST16,
|
| 157 |
+
'uint_least16_t': PRIM_UINT_LEAST16,
|
| 158 |
+
'int_least32_t': PRIM_INT_LEAST32,
|
| 159 |
+
'uint_least32_t': PRIM_UINT_LEAST32,
|
| 160 |
+
'int_least64_t': PRIM_INT_LEAST64,
|
| 161 |
+
'uint_least64_t': PRIM_UINT_LEAST64,
|
| 162 |
+
'int_fast8_t': PRIM_INT_FAST8,
|
| 163 |
+
'uint_fast8_t': PRIM_UINT_FAST8,
|
| 164 |
+
'int_fast16_t': PRIM_INT_FAST16,
|
| 165 |
+
'uint_fast16_t': PRIM_UINT_FAST16,
|
| 166 |
+
'int_fast32_t': PRIM_INT_FAST32,
|
| 167 |
+
'uint_fast32_t': PRIM_UINT_FAST32,
|
| 168 |
+
'int_fast64_t': PRIM_INT_FAST64,
|
| 169 |
+
'uint_fast64_t': PRIM_UINT_FAST64,
|
| 170 |
+
'intmax_t': PRIM_INTMAX,
|
| 171 |
+
'uintmax_t': PRIM_UINTMAX,
|
| 172 |
+
}
|
| 173 |
+
|
| 174 |
+
F_UNION = 0x01
|
| 175 |
+
F_CHECK_FIELDS = 0x02
|
| 176 |
+
F_PACKED = 0x04
|
| 177 |
+
F_EXTERNAL = 0x08
|
| 178 |
+
F_OPAQUE = 0x10
|
| 179 |
+
|
| 180 |
+
G_FLAGS = dict([('_CFFI_' + _key, globals()[_key])
|
| 181 |
+
for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED',
|
| 182 |
+
'F_EXTERNAL', 'F_OPAQUE']])
|
| 183 |
+
|
| 184 |
+
CLASS_NAME = {}
|
| 185 |
+
for _name, _value in list(globals().items()):
|
| 186 |
+
if _name.startswith('OP_') and isinstance(_value, int):
|
| 187 |
+
CLASS_NAME[_value] = _name[3:]
|
.venv/lib/python3.11/site-packages/cffi/commontypes.py
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
from . import model
|
| 3 |
+
from .error import FFIError
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
COMMON_TYPES = {}
|
| 7 |
+
|
| 8 |
+
try:
|
| 9 |
+
# fetch "bool" and all simple Windows types
|
| 10 |
+
from _cffi_backend import _get_common_types
|
| 11 |
+
_get_common_types(COMMON_TYPES)
|
| 12 |
+
except ImportError:
|
| 13 |
+
pass
|
| 14 |
+
|
| 15 |
+
COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE')
|
| 16 |
+
COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above
|
| 17 |
+
COMMON_TYPES['float _Complex'] = '_cffi_float_complex_t'
|
| 18 |
+
COMMON_TYPES['double _Complex'] = '_cffi_double_complex_t'
|
| 19 |
+
|
| 20 |
+
for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
|
| 21 |
+
if _type.endswith('_t'):
|
| 22 |
+
COMMON_TYPES[_type] = _type
|
| 23 |
+
del _type
|
| 24 |
+
|
| 25 |
+
_CACHE = {}
|
| 26 |
+
|
| 27 |
+
def resolve_common_type(parser, commontype):
|
| 28 |
+
try:
|
| 29 |
+
return _CACHE[commontype]
|
| 30 |
+
except KeyError:
|
| 31 |
+
cdecl = COMMON_TYPES.get(commontype, commontype)
|
| 32 |
+
if not isinstance(cdecl, str):
|
| 33 |
+
result, quals = cdecl, 0 # cdecl is already a BaseType
|
| 34 |
+
elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
|
| 35 |
+
result, quals = model.PrimitiveType(cdecl), 0
|
| 36 |
+
elif cdecl == 'set-unicode-needed':
|
| 37 |
+
raise FFIError("The Windows type %r is only available after "
|
| 38 |
+
"you call ffi.set_unicode()" % (commontype,))
|
| 39 |
+
else:
|
| 40 |
+
if commontype == cdecl:
|
| 41 |
+
raise FFIError(
|
| 42 |
+
"Unsupported type: %r. Please look at "
|
| 43 |
+
"http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations "
|
| 44 |
+
"and file an issue if you think this type should really "
|
| 45 |
+
"be supported." % (commontype,))
|
| 46 |
+
result, quals = parser.parse_type_and_quals(cdecl) # recursive
|
| 47 |
+
|
| 48 |
+
assert isinstance(result, model.BaseTypeByIdentity)
|
| 49 |
+
_CACHE[commontype] = result, quals
|
| 50 |
+
return result, quals
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
# ____________________________________________________________
|
| 54 |
+
# extra types for Windows (most of them are in commontypes.c)
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def win_common_types():
|
| 58 |
+
return {
|
| 59 |
+
"UNICODE_STRING": model.StructType(
|
| 60 |
+
"_UNICODE_STRING",
|
| 61 |
+
["Length",
|
| 62 |
+
"MaximumLength",
|
| 63 |
+
"Buffer"],
|
| 64 |
+
[model.PrimitiveType("unsigned short"),
|
| 65 |
+
model.PrimitiveType("unsigned short"),
|
| 66 |
+
model.PointerType(model.PrimitiveType("wchar_t"))],
|
| 67 |
+
[-1, -1, -1]),
|
| 68 |
+
"PUNICODE_STRING": "UNICODE_STRING *",
|
| 69 |
+
"PCUNICODE_STRING": "const UNICODE_STRING *",
|
| 70 |
+
|
| 71 |
+
"TBYTE": "set-unicode-needed",
|
| 72 |
+
"TCHAR": "set-unicode-needed",
|
| 73 |
+
"LPCTSTR": "set-unicode-needed",
|
| 74 |
+
"PCTSTR": "set-unicode-needed",
|
| 75 |
+
"LPTSTR": "set-unicode-needed",
|
| 76 |
+
"PTSTR": "set-unicode-needed",
|
| 77 |
+
"PTBYTE": "set-unicode-needed",
|
| 78 |
+
"PTCHAR": "set-unicode-needed",
|
| 79 |
+
}
|
| 80 |
+
|
| 81 |
+
if sys.platform == 'win32':
|
| 82 |
+
COMMON_TYPES.update(win_common_types())
|
.venv/lib/python3.11/site-packages/cffi/error.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
class FFIError(Exception):
|
| 3 |
+
__module__ = 'cffi'
|
| 4 |
+
|
| 5 |
+
class CDefError(Exception):
|
| 6 |
+
__module__ = 'cffi'
|
| 7 |
+
def __str__(self):
|
| 8 |
+
try:
|
| 9 |
+
current_decl = self.args[1]
|
| 10 |
+
filename = current_decl.coord.file
|
| 11 |
+
linenum = current_decl.coord.line
|
| 12 |
+
prefix = '%s:%d: ' % (filename, linenum)
|
| 13 |
+
except (AttributeError, TypeError, IndexError):
|
| 14 |
+
prefix = ''
|
| 15 |
+
return '%s%s' % (prefix, self.args[0])
|
| 16 |
+
|
| 17 |
+
class VerificationError(Exception):
|
| 18 |
+
""" An error raised when verification fails
|
| 19 |
+
"""
|
| 20 |
+
__module__ = 'cffi'
|
| 21 |
+
|
| 22 |
+
class VerificationMissing(Exception):
|
| 23 |
+
""" An error raised when incomplete structures are passed into
|
| 24 |
+
cdef, but no verification has been done
|
| 25 |
+
"""
|
| 26 |
+
__module__ = 'cffi'
|
| 27 |
+
|
| 28 |
+
class PkgConfigError(Exception):
|
| 29 |
+
""" An error raised for missing modules in pkg-config
|
| 30 |
+
"""
|
| 31 |
+
__module__ = 'cffi'
|
.venv/lib/python3.11/site-packages/cffi/lock.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
|
| 3 |
+
if sys.version_info < (3,):
|
| 4 |
+
try:
|
| 5 |
+
from thread import allocate_lock
|
| 6 |
+
except ImportError:
|
| 7 |
+
from dummy_thread import allocate_lock
|
| 8 |
+
else:
|
| 9 |
+
try:
|
| 10 |
+
from _thread import allocate_lock
|
| 11 |
+
except ImportError:
|
| 12 |
+
from _dummy_thread import allocate_lock
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
##import sys
|
| 16 |
+
##l1 = allocate_lock
|
| 17 |
+
|
| 18 |
+
##class allocate_lock(object):
|
| 19 |
+
## def __init__(self):
|
| 20 |
+
## self._real = l1()
|
| 21 |
+
## def __enter__(self):
|
| 22 |
+
## for i in range(4, 0, -1):
|
| 23 |
+
## print sys._getframe(i).f_code
|
| 24 |
+
## print
|
| 25 |
+
## return self._real.__enter__()
|
| 26 |
+
## def __exit__(self, *args):
|
| 27 |
+
## return self._real.__exit__(*args)
|
| 28 |
+
## def acquire(self, f):
|
| 29 |
+
## assert f is False
|
| 30 |
+
## return self._real.acquire(f)
|
.venv/lib/python3.11/site-packages/cffi/recompiler.py
ADDED
|
@@ -0,0 +1,1598 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os, sys, io
|
| 2 |
+
from . import ffiplatform, model
|
| 3 |
+
from .error import VerificationError
|
| 4 |
+
from .cffi_opcode import *
|
| 5 |
+
|
| 6 |
+
VERSION_BASE = 0x2601
|
| 7 |
+
VERSION_EMBEDDED = 0x2701
|
| 8 |
+
VERSION_CHAR16CHAR32 = 0x2801
|
| 9 |
+
|
| 10 |
+
USE_LIMITED_API = (sys.platform != 'win32' or sys.version_info < (3, 0) or
|
| 11 |
+
sys.version_info >= (3, 5))
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class GlobalExpr:
|
| 15 |
+
def __init__(self, name, address, type_op, size=0, check_value=0):
|
| 16 |
+
self.name = name
|
| 17 |
+
self.address = address
|
| 18 |
+
self.type_op = type_op
|
| 19 |
+
self.size = size
|
| 20 |
+
self.check_value = check_value
|
| 21 |
+
|
| 22 |
+
def as_c_expr(self):
|
| 23 |
+
return ' { "%s", (void *)%s, %s, (void *)%s },' % (
|
| 24 |
+
self.name, self.address, self.type_op.as_c_expr(), self.size)
|
| 25 |
+
|
| 26 |
+
def as_python_expr(self):
|
| 27 |
+
return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name,
|
| 28 |
+
self.check_value)
|
| 29 |
+
|
| 30 |
+
class FieldExpr:
|
| 31 |
+
def __init__(self, name, field_offset, field_size, fbitsize, field_type_op):
|
| 32 |
+
self.name = name
|
| 33 |
+
self.field_offset = field_offset
|
| 34 |
+
self.field_size = field_size
|
| 35 |
+
self.fbitsize = fbitsize
|
| 36 |
+
self.field_type_op = field_type_op
|
| 37 |
+
|
| 38 |
+
def as_c_expr(self):
|
| 39 |
+
spaces = " " * len(self.name)
|
| 40 |
+
return (' { "%s", %s,\n' % (self.name, self.field_offset) +
|
| 41 |
+
' %s %s,\n' % (spaces, self.field_size) +
|
| 42 |
+
' %s %s },' % (spaces, self.field_type_op.as_c_expr()))
|
| 43 |
+
|
| 44 |
+
def as_python_expr(self):
|
| 45 |
+
raise NotImplementedError
|
| 46 |
+
|
| 47 |
+
def as_field_python_expr(self):
|
| 48 |
+
if self.field_type_op.op == OP_NOOP:
|
| 49 |
+
size_expr = ''
|
| 50 |
+
elif self.field_type_op.op == OP_BITFIELD:
|
| 51 |
+
size_expr = format_four_bytes(self.fbitsize)
|
| 52 |
+
else:
|
| 53 |
+
raise NotImplementedError
|
| 54 |
+
return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(),
|
| 55 |
+
size_expr,
|
| 56 |
+
self.name)
|
| 57 |
+
|
| 58 |
+
class StructUnionExpr:
|
| 59 |
+
def __init__(self, name, type_index, flags, size, alignment, comment,
|
| 60 |
+
first_field_index, c_fields):
|
| 61 |
+
self.name = name
|
| 62 |
+
self.type_index = type_index
|
| 63 |
+
self.flags = flags
|
| 64 |
+
self.size = size
|
| 65 |
+
self.alignment = alignment
|
| 66 |
+
self.comment = comment
|
| 67 |
+
self.first_field_index = first_field_index
|
| 68 |
+
self.c_fields = c_fields
|
| 69 |
+
|
| 70 |
+
def as_c_expr(self):
|
| 71 |
+
return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags)
|
| 72 |
+
+ '\n %s, %s, ' % (self.size, self.alignment)
|
| 73 |
+
+ '%d, %d ' % (self.first_field_index, len(self.c_fields))
|
| 74 |
+
+ ('/* %s */ ' % self.comment if self.comment else '')
|
| 75 |
+
+ '},')
|
| 76 |
+
|
| 77 |
+
def as_python_expr(self):
|
| 78 |
+
flags = eval(self.flags, G_FLAGS)
|
| 79 |
+
fields_expr = [c_field.as_field_python_expr()
|
| 80 |
+
for c_field in self.c_fields]
|
| 81 |
+
return "(b'%s%s%s',%s)" % (
|
| 82 |
+
format_four_bytes(self.type_index),
|
| 83 |
+
format_four_bytes(flags),
|
| 84 |
+
self.name,
|
| 85 |
+
','.join(fields_expr))
|
| 86 |
+
|
| 87 |
+
class EnumExpr:
|
| 88 |
+
def __init__(self, name, type_index, size, signed, allenums):
|
| 89 |
+
self.name = name
|
| 90 |
+
self.type_index = type_index
|
| 91 |
+
self.size = size
|
| 92 |
+
self.signed = signed
|
| 93 |
+
self.allenums = allenums
|
| 94 |
+
|
| 95 |
+
def as_c_expr(self):
|
| 96 |
+
return (' { "%s", %d, _cffi_prim_int(%s, %s),\n'
|
| 97 |
+
' "%s" },' % (self.name, self.type_index,
|
| 98 |
+
self.size, self.signed, self.allenums))
|
| 99 |
+
|
| 100 |
+
def as_python_expr(self):
|
| 101 |
+
prim_index = {
|
| 102 |
+
(1, 0): PRIM_UINT8, (1, 1): PRIM_INT8,
|
| 103 |
+
(2, 0): PRIM_UINT16, (2, 1): PRIM_INT16,
|
| 104 |
+
(4, 0): PRIM_UINT32, (4, 1): PRIM_INT32,
|
| 105 |
+
(8, 0): PRIM_UINT64, (8, 1): PRIM_INT64,
|
| 106 |
+
}[self.size, self.signed]
|
| 107 |
+
return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index),
|
| 108 |
+
format_four_bytes(prim_index),
|
| 109 |
+
self.name, self.allenums)
|
| 110 |
+
|
| 111 |
+
class TypenameExpr:
|
| 112 |
+
def __init__(self, name, type_index):
|
| 113 |
+
self.name = name
|
| 114 |
+
self.type_index = type_index
|
| 115 |
+
|
| 116 |
+
def as_c_expr(self):
|
| 117 |
+
return ' { "%s", %d },' % (self.name, self.type_index)
|
| 118 |
+
|
| 119 |
+
def as_python_expr(self):
|
| 120 |
+
return "b'%s%s'" % (format_four_bytes(self.type_index), self.name)
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
# ____________________________________________________________
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
class Recompiler:
|
| 127 |
+
_num_externpy = 0
|
| 128 |
+
|
| 129 |
+
def __init__(self, ffi, module_name, target_is_python=False):
|
| 130 |
+
self.ffi = ffi
|
| 131 |
+
self.module_name = module_name
|
| 132 |
+
self.target_is_python = target_is_python
|
| 133 |
+
self._version = VERSION_BASE
|
| 134 |
+
|
| 135 |
+
def needs_version(self, ver):
|
| 136 |
+
self._version = max(self._version, ver)
|
| 137 |
+
|
| 138 |
+
def collect_type_table(self):
|
| 139 |
+
self._typesdict = {}
|
| 140 |
+
self._generate("collecttype")
|
| 141 |
+
#
|
| 142 |
+
all_decls = sorted(self._typesdict, key=str)
|
| 143 |
+
#
|
| 144 |
+
# prepare all FUNCTION bytecode sequences first
|
| 145 |
+
self.cffi_types = []
|
| 146 |
+
for tp in all_decls:
|
| 147 |
+
if tp.is_raw_function:
|
| 148 |
+
assert self._typesdict[tp] is None
|
| 149 |
+
self._typesdict[tp] = len(self.cffi_types)
|
| 150 |
+
self.cffi_types.append(tp) # placeholder
|
| 151 |
+
for tp1 in tp.args:
|
| 152 |
+
assert isinstance(tp1, (model.VoidType,
|
| 153 |
+
model.BasePrimitiveType,
|
| 154 |
+
model.PointerType,
|
| 155 |
+
model.StructOrUnionOrEnum,
|
| 156 |
+
model.FunctionPtrType))
|
| 157 |
+
if self._typesdict[tp1] is None:
|
| 158 |
+
self._typesdict[tp1] = len(self.cffi_types)
|
| 159 |
+
self.cffi_types.append(tp1) # placeholder
|
| 160 |
+
self.cffi_types.append('END') # placeholder
|
| 161 |
+
#
|
| 162 |
+
# prepare all OTHER bytecode sequences
|
| 163 |
+
for tp in all_decls:
|
| 164 |
+
if not tp.is_raw_function and self._typesdict[tp] is None:
|
| 165 |
+
self._typesdict[tp] = len(self.cffi_types)
|
| 166 |
+
self.cffi_types.append(tp) # placeholder
|
| 167 |
+
if tp.is_array_type and tp.length is not None:
|
| 168 |
+
self.cffi_types.append('LEN') # placeholder
|
| 169 |
+
assert None not in self._typesdict.values()
|
| 170 |
+
#
|
| 171 |
+
# collect all structs and unions and enums
|
| 172 |
+
self._struct_unions = {}
|
| 173 |
+
self._enums = {}
|
| 174 |
+
for tp in all_decls:
|
| 175 |
+
if isinstance(tp, model.StructOrUnion):
|
| 176 |
+
self._struct_unions[tp] = None
|
| 177 |
+
elif isinstance(tp, model.EnumType):
|
| 178 |
+
self._enums[tp] = None
|
| 179 |
+
for i, tp in enumerate(sorted(self._struct_unions,
|
| 180 |
+
key=lambda tp: tp.name)):
|
| 181 |
+
self._struct_unions[tp] = i
|
| 182 |
+
for i, tp in enumerate(sorted(self._enums,
|
| 183 |
+
key=lambda tp: tp.name)):
|
| 184 |
+
self._enums[tp] = i
|
| 185 |
+
#
|
| 186 |
+
# emit all bytecode sequences now
|
| 187 |
+
for tp in all_decls:
|
| 188 |
+
method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__)
|
| 189 |
+
method(tp, self._typesdict[tp])
|
| 190 |
+
#
|
| 191 |
+
# consistency check
|
| 192 |
+
for op in self.cffi_types:
|
| 193 |
+
assert isinstance(op, CffiOp)
|
| 194 |
+
self.cffi_types = tuple(self.cffi_types) # don't change any more
|
| 195 |
+
|
| 196 |
+
def _enum_fields(self, tp):
|
| 197 |
+
# When producing C, expand all anonymous struct/union fields.
|
| 198 |
+
# That's necessary to have C code checking the offsets of the
|
| 199 |
+
# individual fields contained in them. When producing Python,
|
| 200 |
+
# don't do it and instead write it like it is, with the
|
| 201 |
+
# corresponding fields having an empty name. Empty names are
|
| 202 |
+
# recognized at runtime when we import the generated Python
|
| 203 |
+
# file.
|
| 204 |
+
expand_anonymous_struct_union = not self.target_is_python
|
| 205 |
+
return tp.enumfields(expand_anonymous_struct_union)
|
| 206 |
+
|
| 207 |
+
def _do_collect_type(self, tp):
|
| 208 |
+
if not isinstance(tp, model.BaseTypeByIdentity):
|
| 209 |
+
if isinstance(tp, tuple):
|
| 210 |
+
for x in tp:
|
| 211 |
+
self._do_collect_type(x)
|
| 212 |
+
return
|
| 213 |
+
if tp not in self._typesdict:
|
| 214 |
+
self._typesdict[tp] = None
|
| 215 |
+
if isinstance(tp, model.FunctionPtrType):
|
| 216 |
+
self._do_collect_type(tp.as_raw_function())
|
| 217 |
+
elif isinstance(tp, model.StructOrUnion):
|
| 218 |
+
if tp.fldtypes is not None and (
|
| 219 |
+
tp not in self.ffi._parser._included_declarations):
|
| 220 |
+
for name1, tp1, _, _ in self._enum_fields(tp):
|
| 221 |
+
self._do_collect_type(self._field_type(tp, name1, tp1))
|
| 222 |
+
else:
|
| 223 |
+
for _, x in tp._get_items():
|
| 224 |
+
self._do_collect_type(x)
|
| 225 |
+
|
| 226 |
+
def _generate(self, step_name):
|
| 227 |
+
lst = self.ffi._parser._declarations.items()
|
| 228 |
+
for name, (tp, quals) in sorted(lst):
|
| 229 |
+
kind, realname = name.split(' ', 1)
|
| 230 |
+
try:
|
| 231 |
+
method = getattr(self, '_generate_cpy_%s_%s' % (kind,
|
| 232 |
+
step_name))
|
| 233 |
+
except AttributeError:
|
| 234 |
+
raise VerificationError(
|
| 235 |
+
"not implemented in recompile(): %r" % name)
|
| 236 |
+
try:
|
| 237 |
+
self._current_quals = quals
|
| 238 |
+
method(tp, realname)
|
| 239 |
+
except Exception as e:
|
| 240 |
+
model.attach_exception_info(e, name)
|
| 241 |
+
raise
|
| 242 |
+
|
| 243 |
+
# ----------
|
| 244 |
+
|
| 245 |
+
ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"]
|
| 246 |
+
|
| 247 |
+
def collect_step_tables(self):
|
| 248 |
+
# collect the declarations for '_cffi_globals', '_cffi_typenames', etc.
|
| 249 |
+
self._lsts = {}
|
| 250 |
+
for step_name in self.ALL_STEPS:
|
| 251 |
+
self._lsts[step_name] = []
|
| 252 |
+
self._seen_struct_unions = set()
|
| 253 |
+
self._generate("ctx")
|
| 254 |
+
self._add_missing_struct_unions()
|
| 255 |
+
#
|
| 256 |
+
for step_name in self.ALL_STEPS:
|
| 257 |
+
lst = self._lsts[step_name]
|
| 258 |
+
if step_name != "field":
|
| 259 |
+
lst.sort(key=lambda entry: entry.name)
|
| 260 |
+
self._lsts[step_name] = tuple(lst) # don't change any more
|
| 261 |
+
#
|
| 262 |
+
# check for a possible internal inconsistency: _cffi_struct_unions
|
| 263 |
+
# should have been generated with exactly self._struct_unions
|
| 264 |
+
lst = self._lsts["struct_union"]
|
| 265 |
+
for tp, i in self._struct_unions.items():
|
| 266 |
+
assert i < len(lst)
|
| 267 |
+
assert lst[i].name == tp.name
|
| 268 |
+
assert len(lst) == len(self._struct_unions)
|
| 269 |
+
# same with enums
|
| 270 |
+
lst = self._lsts["enum"]
|
| 271 |
+
for tp, i in self._enums.items():
|
| 272 |
+
assert i < len(lst)
|
| 273 |
+
assert lst[i].name == tp.name
|
| 274 |
+
assert len(lst) == len(self._enums)
|
| 275 |
+
|
| 276 |
+
# ----------
|
| 277 |
+
|
| 278 |
+
def _prnt(self, what=''):
|
| 279 |
+
self._f.write(what + '\n')
|
| 280 |
+
|
| 281 |
+
def write_source_to_f(self, f, preamble):
|
| 282 |
+
if self.target_is_python:
|
| 283 |
+
assert preamble is None
|
| 284 |
+
self.write_py_source_to_f(f)
|
| 285 |
+
else:
|
| 286 |
+
assert preamble is not None
|
| 287 |
+
self.write_c_source_to_f(f, preamble)
|
| 288 |
+
|
| 289 |
+
def _rel_readlines(self, filename):
|
| 290 |
+
g = open(os.path.join(os.path.dirname(__file__), filename), 'r')
|
| 291 |
+
lines = g.readlines()
|
| 292 |
+
g.close()
|
| 293 |
+
return lines
|
| 294 |
+
|
| 295 |
+
def write_c_source_to_f(self, f, preamble):
|
| 296 |
+
self._f = f
|
| 297 |
+
prnt = self._prnt
|
| 298 |
+
if self.ffi._embedding is not None:
|
| 299 |
+
prnt('#define _CFFI_USE_EMBEDDING')
|
| 300 |
+
if not USE_LIMITED_API:
|
| 301 |
+
prnt('#define _CFFI_NO_LIMITED_API')
|
| 302 |
+
#
|
| 303 |
+
# first the '#include' (actually done by inlining the file's content)
|
| 304 |
+
lines = self._rel_readlines('_cffi_include.h')
|
| 305 |
+
i = lines.index('#include "parse_c_type.h"\n')
|
| 306 |
+
lines[i:i+1] = self._rel_readlines('parse_c_type.h')
|
| 307 |
+
prnt(''.join(lines))
|
| 308 |
+
#
|
| 309 |
+
# if we have ffi._embedding != None, we give it here as a macro
|
| 310 |
+
# and include an extra file
|
| 311 |
+
base_module_name = self.module_name.split('.')[-1]
|
| 312 |
+
if self.ffi._embedding is not None:
|
| 313 |
+
prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,))
|
| 314 |
+
prnt('static const char _CFFI_PYTHON_STARTUP_CODE[] = {')
|
| 315 |
+
self._print_string_literal_in_array(self.ffi._embedding)
|
| 316 |
+
prnt('0 };')
|
| 317 |
+
prnt('#ifdef PYPY_VERSION')
|
| 318 |
+
prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % (
|
| 319 |
+
base_module_name,))
|
| 320 |
+
prnt('#elif PY_MAJOR_VERSION >= 3')
|
| 321 |
+
prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % (
|
| 322 |
+
base_module_name,))
|
| 323 |
+
prnt('#else')
|
| 324 |
+
prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % (
|
| 325 |
+
base_module_name,))
|
| 326 |
+
prnt('#endif')
|
| 327 |
+
lines = self._rel_readlines('_embedding.h')
|
| 328 |
+
i = lines.index('#include "_cffi_errors.h"\n')
|
| 329 |
+
lines[i:i+1] = self._rel_readlines('_cffi_errors.h')
|
| 330 |
+
prnt(''.join(lines))
|
| 331 |
+
self.needs_version(VERSION_EMBEDDED)
|
| 332 |
+
#
|
| 333 |
+
# then paste the C source given by the user, verbatim.
|
| 334 |
+
prnt('/************************************************************/')
|
| 335 |
+
prnt()
|
| 336 |
+
prnt(preamble)
|
| 337 |
+
prnt()
|
| 338 |
+
prnt('/************************************************************/')
|
| 339 |
+
prnt()
|
| 340 |
+
#
|
| 341 |
+
# the declaration of '_cffi_types'
|
| 342 |
+
prnt('static void *_cffi_types[] = {')
|
| 343 |
+
typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()])
|
| 344 |
+
for i, op in enumerate(self.cffi_types):
|
| 345 |
+
comment = ''
|
| 346 |
+
if i in typeindex2type:
|
| 347 |
+
comment = ' // ' + typeindex2type[i]._get_c_name()
|
| 348 |
+
prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment))
|
| 349 |
+
if not self.cffi_types:
|
| 350 |
+
prnt(' 0')
|
| 351 |
+
prnt('};')
|
| 352 |
+
prnt()
|
| 353 |
+
#
|
| 354 |
+
# call generate_cpy_xxx_decl(), for every xxx found from
|
| 355 |
+
# ffi._parser._declarations. This generates all the functions.
|
| 356 |
+
self._seen_constants = set()
|
| 357 |
+
self._generate("decl")
|
| 358 |
+
#
|
| 359 |
+
# the declaration of '_cffi_globals' and '_cffi_typenames'
|
| 360 |
+
nums = {}
|
| 361 |
+
for step_name in self.ALL_STEPS:
|
| 362 |
+
lst = self._lsts[step_name]
|
| 363 |
+
nums[step_name] = len(lst)
|
| 364 |
+
if nums[step_name] > 0:
|
| 365 |
+
prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % (
|
| 366 |
+
step_name, step_name))
|
| 367 |
+
for entry in lst:
|
| 368 |
+
prnt(entry.as_c_expr())
|
| 369 |
+
prnt('};')
|
| 370 |
+
prnt()
|
| 371 |
+
#
|
| 372 |
+
# the declaration of '_cffi_includes'
|
| 373 |
+
if self.ffi._included_ffis:
|
| 374 |
+
prnt('static const char * const _cffi_includes[] = {')
|
| 375 |
+
for ffi_to_include in self.ffi._included_ffis:
|
| 376 |
+
try:
|
| 377 |
+
included_module_name, included_source = (
|
| 378 |
+
ffi_to_include._assigned_source[:2])
|
| 379 |
+
except AttributeError:
|
| 380 |
+
raise VerificationError(
|
| 381 |
+
"ffi object %r includes %r, but the latter has not "
|
| 382 |
+
"been prepared with set_source()" % (
|
| 383 |
+
self.ffi, ffi_to_include,))
|
| 384 |
+
if included_source is None:
|
| 385 |
+
raise VerificationError(
|
| 386 |
+
"not implemented yet: ffi.include() of a Python-based "
|
| 387 |
+
"ffi inside a C-based ffi")
|
| 388 |
+
prnt(' "%s",' % (included_module_name,))
|
| 389 |
+
prnt(' NULL')
|
| 390 |
+
prnt('};')
|
| 391 |
+
prnt()
|
| 392 |
+
#
|
| 393 |
+
# the declaration of '_cffi_type_context'
|
| 394 |
+
prnt('static const struct _cffi_type_context_s _cffi_type_context = {')
|
| 395 |
+
prnt(' _cffi_types,')
|
| 396 |
+
for step_name in self.ALL_STEPS:
|
| 397 |
+
if nums[step_name] > 0:
|
| 398 |
+
prnt(' _cffi_%ss,' % step_name)
|
| 399 |
+
else:
|
| 400 |
+
prnt(' NULL, /* no %ss */' % step_name)
|
| 401 |
+
for step_name in self.ALL_STEPS:
|
| 402 |
+
if step_name != "field":
|
| 403 |
+
prnt(' %d, /* num_%ss */' % (nums[step_name], step_name))
|
| 404 |
+
if self.ffi._included_ffis:
|
| 405 |
+
prnt(' _cffi_includes,')
|
| 406 |
+
else:
|
| 407 |
+
prnt(' NULL, /* no includes */')
|
| 408 |
+
prnt(' %d, /* num_types */' % (len(self.cffi_types),))
|
| 409 |
+
flags = 0
|
| 410 |
+
if self._num_externpy > 0 or self.ffi._embedding is not None:
|
| 411 |
+
flags |= 1 # set to mean that we use extern "Python"
|
| 412 |
+
prnt(' %d, /* flags */' % flags)
|
| 413 |
+
prnt('};')
|
| 414 |
+
prnt()
|
| 415 |
+
#
|
| 416 |
+
# the init function
|
| 417 |
+
prnt('#ifdef __GNUC__')
|
| 418 |
+
prnt('# pragma GCC visibility push(default) /* for -fvisibility= */')
|
| 419 |
+
prnt('#endif')
|
| 420 |
+
prnt()
|
| 421 |
+
prnt('#ifdef PYPY_VERSION')
|
| 422 |
+
prnt('PyMODINIT_FUNC')
|
| 423 |
+
prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,))
|
| 424 |
+
prnt('{')
|
| 425 |
+
if flags & 1:
|
| 426 |
+
prnt(' if (((intptr_t)p[0]) >= 0x0A03) {')
|
| 427 |
+
prnt(' _cffi_call_python_org = '
|
| 428 |
+
'(void(*)(struct _cffi_externpy_s *, char *))p[1];')
|
| 429 |
+
prnt(' }')
|
| 430 |
+
prnt(' p[0] = (const void *)0x%x;' % self._version)
|
| 431 |
+
prnt(' p[1] = &_cffi_type_context;')
|
| 432 |
+
prnt('#if PY_MAJOR_VERSION >= 3')
|
| 433 |
+
prnt(' return NULL;')
|
| 434 |
+
prnt('#endif')
|
| 435 |
+
prnt('}')
|
| 436 |
+
# on Windows, distutils insists on putting init_cffi_xyz in
|
| 437 |
+
# 'export_symbols', so instead of fighting it, just give up and
|
| 438 |
+
# give it one
|
| 439 |
+
prnt('# ifdef _MSC_VER')
|
| 440 |
+
prnt(' PyMODINIT_FUNC')
|
| 441 |
+
prnt('# if PY_MAJOR_VERSION >= 3')
|
| 442 |
+
prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,))
|
| 443 |
+
prnt('# else')
|
| 444 |
+
prnt(' init%s(void) { }' % (base_module_name,))
|
| 445 |
+
prnt('# endif')
|
| 446 |
+
prnt('# endif')
|
| 447 |
+
prnt('#elif PY_MAJOR_VERSION >= 3')
|
| 448 |
+
prnt('PyMODINIT_FUNC')
|
| 449 |
+
prnt('PyInit_%s(void)' % (base_module_name,))
|
| 450 |
+
prnt('{')
|
| 451 |
+
prnt(' return _cffi_init("%s", 0x%x, &_cffi_type_context);' % (
|
| 452 |
+
self.module_name, self._version))
|
| 453 |
+
prnt('}')
|
| 454 |
+
prnt('#else')
|
| 455 |
+
prnt('PyMODINIT_FUNC')
|
| 456 |
+
prnt('init%s(void)' % (base_module_name,))
|
| 457 |
+
prnt('{')
|
| 458 |
+
prnt(' _cffi_init("%s", 0x%x, &_cffi_type_context);' % (
|
| 459 |
+
self.module_name, self._version))
|
| 460 |
+
prnt('}')
|
| 461 |
+
prnt('#endif')
|
| 462 |
+
prnt()
|
| 463 |
+
prnt('#ifdef __GNUC__')
|
| 464 |
+
prnt('# pragma GCC visibility pop')
|
| 465 |
+
prnt('#endif')
|
| 466 |
+
self._version = None
|
| 467 |
+
|
| 468 |
+
def _to_py(self, x):
|
| 469 |
+
if isinstance(x, str):
|
| 470 |
+
return "b'%s'" % (x,)
|
| 471 |
+
if isinstance(x, (list, tuple)):
|
| 472 |
+
rep = [self._to_py(item) for item in x]
|
| 473 |
+
if len(rep) == 1:
|
| 474 |
+
rep.append('')
|
| 475 |
+
return "(%s)" % (','.join(rep),)
|
| 476 |
+
return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp.
|
| 477 |
+
|
| 478 |
+
def write_py_source_to_f(self, f):
|
| 479 |
+
self._f = f
|
| 480 |
+
prnt = self._prnt
|
| 481 |
+
#
|
| 482 |
+
# header
|
| 483 |
+
prnt("# auto-generated file")
|
| 484 |
+
prnt("import _cffi_backend")
|
| 485 |
+
#
|
| 486 |
+
# the 'import' of the included ffis
|
| 487 |
+
num_includes = len(self.ffi._included_ffis or ())
|
| 488 |
+
for i in range(num_includes):
|
| 489 |
+
ffi_to_include = self.ffi._included_ffis[i]
|
| 490 |
+
try:
|
| 491 |
+
included_module_name, included_source = (
|
| 492 |
+
ffi_to_include._assigned_source[:2])
|
| 493 |
+
except AttributeError:
|
| 494 |
+
raise VerificationError(
|
| 495 |
+
"ffi object %r includes %r, but the latter has not "
|
| 496 |
+
"been prepared with set_source()" % (
|
| 497 |
+
self.ffi, ffi_to_include,))
|
| 498 |
+
if included_source is not None:
|
| 499 |
+
raise VerificationError(
|
| 500 |
+
"not implemented yet: ffi.include() of a C-based "
|
| 501 |
+
"ffi inside a Python-based ffi")
|
| 502 |
+
prnt('from %s import ffi as _ffi%d' % (included_module_name, i))
|
| 503 |
+
prnt()
|
| 504 |
+
prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,))
|
| 505 |
+
prnt(" _version = 0x%x," % (self._version,))
|
| 506 |
+
self._version = None
|
| 507 |
+
#
|
| 508 |
+
# the '_types' keyword argument
|
| 509 |
+
self.cffi_types = tuple(self.cffi_types) # don't change any more
|
| 510 |
+
types_lst = [op.as_python_bytes() for op in self.cffi_types]
|
| 511 |
+
prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),))
|
| 512 |
+
typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()])
|
| 513 |
+
#
|
| 514 |
+
# the keyword arguments from ALL_STEPS
|
| 515 |
+
for step_name in self.ALL_STEPS:
|
| 516 |
+
lst = self._lsts[step_name]
|
| 517 |
+
if len(lst) > 0 and step_name != "field":
|
| 518 |
+
prnt(' _%ss = %s,' % (step_name, self._to_py(lst)))
|
| 519 |
+
#
|
| 520 |
+
# the '_includes' keyword argument
|
| 521 |
+
if num_includes > 0:
|
| 522 |
+
prnt(' _includes = (%s,),' % (
|
| 523 |
+
', '.join(['_ffi%d' % i for i in range(num_includes)]),))
|
| 524 |
+
#
|
| 525 |
+
# the footer
|
| 526 |
+
prnt(')')
|
| 527 |
+
|
| 528 |
+
# ----------
|
| 529 |
+
|
| 530 |
+
def _gettypenum(self, type):
|
| 531 |
+
# a KeyError here is a bug. please report it! :-)
|
| 532 |
+
return self._typesdict[type]
|
| 533 |
+
|
| 534 |
+
def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode):
|
| 535 |
+
extraarg = ''
|
| 536 |
+
if isinstance(tp, model.BasePrimitiveType) and not tp.is_complex_type():
|
| 537 |
+
if tp.is_integer_type() and tp.name != '_Bool':
|
| 538 |
+
converter = '_cffi_to_c_int'
|
| 539 |
+
extraarg = ', %s' % tp.name
|
| 540 |
+
elif isinstance(tp, model.UnknownFloatType):
|
| 541 |
+
# don't check with is_float_type(): it may be a 'long
|
| 542 |
+
# double' here, and _cffi_to_c_double would loose precision
|
| 543 |
+
converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),)
|
| 544 |
+
else:
|
| 545 |
+
cname = tp.get_c_name('')
|
| 546 |
+
converter = '(%s)_cffi_to_c_%s' % (cname,
|
| 547 |
+
tp.name.replace(' ', '_'))
|
| 548 |
+
if cname in ('char16_t', 'char32_t'):
|
| 549 |
+
self.needs_version(VERSION_CHAR16CHAR32)
|
| 550 |
+
errvalue = '-1'
|
| 551 |
+
#
|
| 552 |
+
elif isinstance(tp, model.PointerType):
|
| 553 |
+
self._convert_funcarg_to_c_ptr_or_array(tp, fromvar,
|
| 554 |
+
tovar, errcode)
|
| 555 |
+
return
|
| 556 |
+
#
|
| 557 |
+
elif (isinstance(tp, model.StructOrUnionOrEnum) or
|
| 558 |
+
isinstance(tp, model.BasePrimitiveType)):
|
| 559 |
+
# a struct (not a struct pointer) as a function argument;
|
| 560 |
+
# or, a complex (the same code works)
|
| 561 |
+
self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)'
|
| 562 |
+
% (tovar, self._gettypenum(tp), fromvar))
|
| 563 |
+
self._prnt(' %s;' % errcode)
|
| 564 |
+
return
|
| 565 |
+
#
|
| 566 |
+
elif isinstance(tp, model.FunctionPtrType):
|
| 567 |
+
converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('')
|
| 568 |
+
extraarg = ', _cffi_type(%d)' % self._gettypenum(tp)
|
| 569 |
+
errvalue = 'NULL'
|
| 570 |
+
#
|
| 571 |
+
else:
|
| 572 |
+
raise NotImplementedError(tp)
|
| 573 |
+
#
|
| 574 |
+
self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg))
|
| 575 |
+
self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % (
|
| 576 |
+
tovar, tp.get_c_name(''), errvalue))
|
| 577 |
+
self._prnt(' %s;' % errcode)
|
| 578 |
+
|
| 579 |
+
def _extra_local_variables(self, tp, localvars, freelines):
|
| 580 |
+
if isinstance(tp, model.PointerType):
|
| 581 |
+
localvars.add('Py_ssize_t datasize')
|
| 582 |
+
localvars.add('struct _cffi_freeme_s *large_args_free = NULL')
|
| 583 |
+
freelines.add('if (large_args_free != NULL)'
|
| 584 |
+
' _cffi_free_array_arguments(large_args_free);')
|
| 585 |
+
|
| 586 |
+
def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode):
|
| 587 |
+
self._prnt(' datasize = _cffi_prepare_pointer_call_argument(')
|
| 588 |
+
self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % (
|
| 589 |
+
self._gettypenum(tp), fromvar, tovar))
|
| 590 |
+
self._prnt(' if (datasize != 0) {')
|
| 591 |
+
self._prnt(' %s = ((size_t)datasize) <= 640 ? '
|
| 592 |
+
'(%s)alloca((size_t)datasize) : NULL;' % (
|
| 593 |
+
tovar, tp.get_c_name('')))
|
| 594 |
+
self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, '
|
| 595 |
+
'(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar))
|
| 596 |
+
self._prnt(' datasize, &large_args_free) < 0)')
|
| 597 |
+
self._prnt(' %s;' % errcode)
|
| 598 |
+
self._prnt(' }')
|
| 599 |
+
|
| 600 |
+
def _convert_expr_from_c(self, tp, var, context):
|
| 601 |
+
if isinstance(tp, model.BasePrimitiveType):
|
| 602 |
+
if tp.is_integer_type() and tp.name != '_Bool':
|
| 603 |
+
return '_cffi_from_c_int(%s, %s)' % (var, tp.name)
|
| 604 |
+
elif isinstance(tp, model.UnknownFloatType):
|
| 605 |
+
return '_cffi_from_c_double(%s)' % (var,)
|
| 606 |
+
elif tp.name != 'long double' and not tp.is_complex_type():
|
| 607 |
+
cname = tp.name.replace(' ', '_')
|
| 608 |
+
if cname in ('char16_t', 'char32_t'):
|
| 609 |
+
self.needs_version(VERSION_CHAR16CHAR32)
|
| 610 |
+
return '_cffi_from_c_%s(%s)' % (cname, var)
|
| 611 |
+
else:
|
| 612 |
+
return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % (
|
| 613 |
+
var, self._gettypenum(tp))
|
| 614 |
+
elif isinstance(tp, (model.PointerType, model.FunctionPtrType)):
|
| 615 |
+
return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
|
| 616 |
+
var, self._gettypenum(tp))
|
| 617 |
+
elif isinstance(tp, model.ArrayType):
|
| 618 |
+
return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
|
| 619 |
+
var, self._gettypenum(model.PointerType(tp.item)))
|
| 620 |
+
elif isinstance(tp, model.StructOrUnion):
|
| 621 |
+
if tp.fldnames is None:
|
| 622 |
+
raise TypeError("'%s' is used as %s, but is opaque" % (
|
| 623 |
+
tp._get_c_name(), context))
|
| 624 |
+
return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % (
|
| 625 |
+
var, self._gettypenum(tp))
|
| 626 |
+
elif isinstance(tp, model.EnumType):
|
| 627 |
+
return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % (
|
| 628 |
+
var, self._gettypenum(tp))
|
| 629 |
+
else:
|
| 630 |
+
raise NotImplementedError(tp)
|
| 631 |
+
|
| 632 |
+
# ----------
|
| 633 |
+
# typedefs
|
| 634 |
+
|
| 635 |
+
def _typedef_type(self, tp, name):
|
| 636 |
+
return self._global_type(tp, "(*(%s *)0)" % (name,))
|
| 637 |
+
|
| 638 |
+
def _generate_cpy_typedef_collecttype(self, tp, name):
|
| 639 |
+
self._do_collect_type(self._typedef_type(tp, name))
|
| 640 |
+
|
| 641 |
+
def _generate_cpy_typedef_decl(self, tp, name):
|
| 642 |
+
pass
|
| 643 |
+
|
| 644 |
+
def _typedef_ctx(self, tp, name):
|
| 645 |
+
type_index = self._typesdict[tp]
|
| 646 |
+
self._lsts["typename"].append(TypenameExpr(name, type_index))
|
| 647 |
+
|
| 648 |
+
def _generate_cpy_typedef_ctx(self, tp, name):
|
| 649 |
+
tp = self._typedef_type(tp, name)
|
| 650 |
+
self._typedef_ctx(tp, name)
|
| 651 |
+
if getattr(tp, "origin", None) == "unknown_type":
|
| 652 |
+
self._struct_ctx(tp, tp.name, approxname=None)
|
| 653 |
+
elif isinstance(tp, model.NamedPointerType):
|
| 654 |
+
self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name,
|
| 655 |
+
named_ptr=tp)
|
| 656 |
+
|
| 657 |
+
# ----------
|
| 658 |
+
# function declarations
|
| 659 |
+
|
| 660 |
+
def _generate_cpy_function_collecttype(self, tp, name):
|
| 661 |
+
self._do_collect_type(tp.as_raw_function())
|
| 662 |
+
if tp.ellipsis and not self.target_is_python:
|
| 663 |
+
self._do_collect_type(tp)
|
| 664 |
+
|
| 665 |
+
def _generate_cpy_function_decl(self, tp, name):
|
| 666 |
+
assert not self.target_is_python
|
| 667 |
+
assert isinstance(tp, model.FunctionPtrType)
|
| 668 |
+
if tp.ellipsis:
|
| 669 |
+
# cannot support vararg functions better than this: check for its
|
| 670 |
+
# exact type (including the fixed arguments), and build it as a
|
| 671 |
+
# constant function pointer (no CPython wrapper)
|
| 672 |
+
self._generate_cpy_constant_decl(tp, name)
|
| 673 |
+
return
|
| 674 |
+
prnt = self._prnt
|
| 675 |
+
numargs = len(tp.args)
|
| 676 |
+
if numargs == 0:
|
| 677 |
+
argname = 'noarg'
|
| 678 |
+
elif numargs == 1:
|
| 679 |
+
argname = 'arg0'
|
| 680 |
+
else:
|
| 681 |
+
argname = 'args'
|
| 682 |
+
#
|
| 683 |
+
# ------------------------------
|
| 684 |
+
# the 'd' version of the function, only for addressof(lib, 'func')
|
| 685 |
+
arguments = []
|
| 686 |
+
call_arguments = []
|
| 687 |
+
context = 'argument of %s' % name
|
| 688 |
+
for i, type in enumerate(tp.args):
|
| 689 |
+
arguments.append(type.get_c_name(' x%d' % i, context))
|
| 690 |
+
call_arguments.append('x%d' % i)
|
| 691 |
+
repr_arguments = ', '.join(arguments)
|
| 692 |
+
repr_arguments = repr_arguments or 'void'
|
| 693 |
+
if tp.abi:
|
| 694 |
+
abi = tp.abi + ' '
|
| 695 |
+
else:
|
| 696 |
+
abi = ''
|
| 697 |
+
name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments)
|
| 698 |
+
prnt('static %s' % (tp.result.get_c_name(name_and_arguments),))
|
| 699 |
+
prnt('{')
|
| 700 |
+
call_arguments = ', '.join(call_arguments)
|
| 701 |
+
result_code = 'return '
|
| 702 |
+
if isinstance(tp.result, model.VoidType):
|
| 703 |
+
result_code = ''
|
| 704 |
+
prnt(' %s%s(%s);' % (result_code, name, call_arguments))
|
| 705 |
+
prnt('}')
|
| 706 |
+
#
|
| 707 |
+
prnt('#ifndef PYPY_VERSION') # ------------------------------
|
| 708 |
+
#
|
| 709 |
+
prnt('static PyObject *')
|
| 710 |
+
prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname))
|
| 711 |
+
prnt('{')
|
| 712 |
+
#
|
| 713 |
+
context = 'argument of %s' % name
|
| 714 |
+
for i, type in enumerate(tp.args):
|
| 715 |
+
arg = type.get_c_name(' x%d' % i, context)
|
| 716 |
+
prnt(' %s;' % arg)
|
| 717 |
+
#
|
| 718 |
+
localvars = set()
|
| 719 |
+
freelines = set()
|
| 720 |
+
for type in tp.args:
|
| 721 |
+
self._extra_local_variables(type, localvars, freelines)
|
| 722 |
+
for decl in sorted(localvars):
|
| 723 |
+
prnt(' %s;' % (decl,))
|
| 724 |
+
#
|
| 725 |
+
if not isinstance(tp.result, model.VoidType):
|
| 726 |
+
result_code = 'result = '
|
| 727 |
+
context = 'result of %s' % name
|
| 728 |
+
result_decl = ' %s;' % tp.result.get_c_name(' result', context)
|
| 729 |
+
prnt(result_decl)
|
| 730 |
+
prnt(' PyObject *pyresult;')
|
| 731 |
+
else:
|
| 732 |
+
result_decl = None
|
| 733 |
+
result_code = ''
|
| 734 |
+
#
|
| 735 |
+
if len(tp.args) > 1:
|
| 736 |
+
rng = range(len(tp.args))
|
| 737 |
+
for i in rng:
|
| 738 |
+
prnt(' PyObject *arg%d;' % i)
|
| 739 |
+
prnt()
|
| 740 |
+
prnt(' if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))' % (
|
| 741 |
+
name, len(rng), len(rng),
|
| 742 |
+
', '.join(['&arg%d' % i for i in rng])))
|
| 743 |
+
prnt(' return NULL;')
|
| 744 |
+
prnt()
|
| 745 |
+
#
|
| 746 |
+
for i, type in enumerate(tp.args):
|
| 747 |
+
self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i,
|
| 748 |
+
'return NULL')
|
| 749 |
+
prnt()
|
| 750 |
+
#
|
| 751 |
+
prnt(' Py_BEGIN_ALLOW_THREADS')
|
| 752 |
+
prnt(' _cffi_restore_errno();')
|
| 753 |
+
call_arguments = ['x%d' % i for i in range(len(tp.args))]
|
| 754 |
+
call_arguments = ', '.join(call_arguments)
|
| 755 |
+
prnt(' { %s%s(%s); }' % (result_code, name, call_arguments))
|
| 756 |
+
prnt(' _cffi_save_errno();')
|
| 757 |
+
prnt(' Py_END_ALLOW_THREADS')
|
| 758 |
+
prnt()
|
| 759 |
+
#
|
| 760 |
+
prnt(' (void)self; /* unused */')
|
| 761 |
+
if numargs == 0:
|
| 762 |
+
prnt(' (void)noarg; /* unused */')
|
| 763 |
+
if result_code:
|
| 764 |
+
prnt(' pyresult = %s;' %
|
| 765 |
+
self._convert_expr_from_c(tp.result, 'result', 'result type'))
|
| 766 |
+
for freeline in freelines:
|
| 767 |
+
prnt(' ' + freeline)
|
| 768 |
+
prnt(' return pyresult;')
|
| 769 |
+
else:
|
| 770 |
+
for freeline in freelines:
|
| 771 |
+
prnt(' ' + freeline)
|
| 772 |
+
prnt(' Py_INCREF(Py_None);')
|
| 773 |
+
prnt(' return Py_None;')
|
| 774 |
+
prnt('}')
|
| 775 |
+
#
|
| 776 |
+
prnt('#else') # ------------------------------
|
| 777 |
+
#
|
| 778 |
+
# the PyPy version: need to replace struct/union arguments with
|
| 779 |
+
# pointers, and if the result is a struct/union, insert a first
|
| 780 |
+
# arg that is a pointer to the result. We also do that for
|
| 781 |
+
# complex args and return type.
|
| 782 |
+
def need_indirection(type):
|
| 783 |
+
return (isinstance(type, model.StructOrUnion) or
|
| 784 |
+
(isinstance(type, model.PrimitiveType) and
|
| 785 |
+
type.is_complex_type()))
|
| 786 |
+
difference = False
|
| 787 |
+
arguments = []
|
| 788 |
+
call_arguments = []
|
| 789 |
+
context = 'argument of %s' % name
|
| 790 |
+
for i, type in enumerate(tp.args):
|
| 791 |
+
indirection = ''
|
| 792 |
+
if need_indirection(type):
|
| 793 |
+
indirection = '*'
|
| 794 |
+
difference = True
|
| 795 |
+
arg = type.get_c_name(' %sx%d' % (indirection, i), context)
|
| 796 |
+
arguments.append(arg)
|
| 797 |
+
call_arguments.append('%sx%d' % (indirection, i))
|
| 798 |
+
tp_result = tp.result
|
| 799 |
+
if need_indirection(tp_result):
|
| 800 |
+
context = 'result of %s' % name
|
| 801 |
+
arg = tp_result.get_c_name(' *result', context)
|
| 802 |
+
arguments.insert(0, arg)
|
| 803 |
+
tp_result = model.void_type
|
| 804 |
+
result_decl = None
|
| 805 |
+
result_code = '*result = '
|
| 806 |
+
difference = True
|
| 807 |
+
if difference:
|
| 808 |
+
repr_arguments = ', '.join(arguments)
|
| 809 |
+
repr_arguments = repr_arguments or 'void'
|
| 810 |
+
name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name,
|
| 811 |
+
repr_arguments)
|
| 812 |
+
prnt('static %s' % (tp_result.get_c_name(name_and_arguments),))
|
| 813 |
+
prnt('{')
|
| 814 |
+
if result_decl:
|
| 815 |
+
prnt(result_decl)
|
| 816 |
+
call_arguments = ', '.join(call_arguments)
|
| 817 |
+
prnt(' { %s%s(%s); }' % (result_code, name, call_arguments))
|
| 818 |
+
if result_decl:
|
| 819 |
+
prnt(' return result;')
|
| 820 |
+
prnt('}')
|
| 821 |
+
else:
|
| 822 |
+
prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name))
|
| 823 |
+
#
|
| 824 |
+
prnt('#endif') # ------------------------------
|
| 825 |
+
prnt()
|
| 826 |
+
|
| 827 |
+
def _generate_cpy_function_ctx(self, tp, name):
|
| 828 |
+
if tp.ellipsis and not self.target_is_python:
|
| 829 |
+
self._generate_cpy_constant_ctx(tp, name)
|
| 830 |
+
return
|
| 831 |
+
type_index = self._typesdict[tp.as_raw_function()]
|
| 832 |
+
numargs = len(tp.args)
|
| 833 |
+
if self.target_is_python:
|
| 834 |
+
meth_kind = OP_DLOPEN_FUNC
|
| 835 |
+
elif numargs == 0:
|
| 836 |
+
meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS'
|
| 837 |
+
elif numargs == 1:
|
| 838 |
+
meth_kind = OP_CPYTHON_BLTN_O # 'METH_O'
|
| 839 |
+
else:
|
| 840 |
+
meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS'
|
| 841 |
+
self._lsts["global"].append(
|
| 842 |
+
GlobalExpr(name, '_cffi_f_%s' % name,
|
| 843 |
+
CffiOp(meth_kind, type_index),
|
| 844 |
+
size='_cffi_d_%s' % name))
|
| 845 |
+
|
| 846 |
+
# ----------
|
| 847 |
+
# named structs or unions
|
| 848 |
+
|
| 849 |
+
def _field_type(self, tp_struct, field_name, tp_field):
|
| 850 |
+
if isinstance(tp_field, model.ArrayType):
|
| 851 |
+
actual_length = tp_field.length
|
| 852 |
+
if actual_length == '...':
|
| 853 |
+
ptr_struct_name = tp_struct.get_c_name('*')
|
| 854 |
+
actual_length = '_cffi_array_len(((%s)0)->%s)' % (
|
| 855 |
+
ptr_struct_name, field_name)
|
| 856 |
+
tp_item = self._field_type(tp_struct, '%s[0]' % field_name,
|
| 857 |
+
tp_field.item)
|
| 858 |
+
tp_field = model.ArrayType(tp_item, actual_length)
|
| 859 |
+
return tp_field
|
| 860 |
+
|
| 861 |
+
def _struct_collecttype(self, tp):
|
| 862 |
+
self._do_collect_type(tp)
|
| 863 |
+
if self.target_is_python:
|
| 864 |
+
# also requires nested anon struct/unions in ABI mode, recursively
|
| 865 |
+
for fldtype in tp.anonymous_struct_fields():
|
| 866 |
+
self._struct_collecttype(fldtype)
|
| 867 |
+
|
| 868 |
+
def _struct_decl(self, tp, cname, approxname):
|
| 869 |
+
if tp.fldtypes is None:
|
| 870 |
+
return
|
| 871 |
+
prnt = self._prnt
|
| 872 |
+
checkfuncname = '_cffi_checkfld_%s' % (approxname,)
|
| 873 |
+
prnt('_CFFI_UNUSED_FN')
|
| 874 |
+
prnt('static void %s(%s *p)' % (checkfuncname, cname))
|
| 875 |
+
prnt('{')
|
| 876 |
+
prnt(' /* only to generate compile-time warnings or errors */')
|
| 877 |
+
prnt(' (void)p;')
|
| 878 |
+
for fname, ftype, fbitsize, fqual in self._enum_fields(tp):
|
| 879 |
+
try:
|
| 880 |
+
if ftype.is_integer_type() or fbitsize >= 0:
|
| 881 |
+
# accept all integers, but complain on float or double
|
| 882 |
+
if fname != '':
|
| 883 |
+
prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is "
|
| 884 |
+
"an integer */" % (fname, cname, fname))
|
| 885 |
+
continue
|
| 886 |
+
# only accept exactly the type declared, except that '[]'
|
| 887 |
+
# is interpreted as a '*' and so will match any array length.
|
| 888 |
+
# (It would also match '*', but that's harder to detect...)
|
| 889 |
+
while (isinstance(ftype, model.ArrayType)
|
| 890 |
+
and (ftype.length is None or ftype.length == '...')):
|
| 891 |
+
ftype = ftype.item
|
| 892 |
+
fname = fname + '[0]'
|
| 893 |
+
prnt(' { %s = &p->%s; (void)tmp; }' % (
|
| 894 |
+
ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
|
| 895 |
+
fname))
|
| 896 |
+
except VerificationError as e:
|
| 897 |
+
prnt(' /* %s */' % str(e)) # cannot verify it, ignore
|
| 898 |
+
prnt('}')
|
| 899 |
+
prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname))
|
| 900 |
+
prnt()
|
| 901 |
+
|
| 902 |
+
def _struct_ctx(self, tp, cname, approxname, named_ptr=None):
|
| 903 |
+
type_index = self._typesdict[tp]
|
| 904 |
+
reason_for_not_expanding = None
|
| 905 |
+
flags = []
|
| 906 |
+
if isinstance(tp, model.UnionType):
|
| 907 |
+
flags.append("_CFFI_F_UNION")
|
| 908 |
+
if tp.fldtypes is None:
|
| 909 |
+
flags.append("_CFFI_F_OPAQUE")
|
| 910 |
+
reason_for_not_expanding = "opaque"
|
| 911 |
+
if (tp not in self.ffi._parser._included_declarations and
|
| 912 |
+
(named_ptr is None or
|
| 913 |
+
named_ptr not in self.ffi._parser._included_declarations)):
|
| 914 |
+
if tp.fldtypes is None:
|
| 915 |
+
pass # opaque
|
| 916 |
+
elif tp.partial or any(tp.anonymous_struct_fields()):
|
| 917 |
+
pass # field layout obtained silently from the C compiler
|
| 918 |
+
else:
|
| 919 |
+
flags.append("_CFFI_F_CHECK_FIELDS")
|
| 920 |
+
if tp.packed:
|
| 921 |
+
if tp.packed > 1:
|
| 922 |
+
raise NotImplementedError(
|
| 923 |
+
"%r is declared with 'pack=%r'; only 0 or 1 are "
|
| 924 |
+
"supported in API mode (try to use \"...;\", which "
|
| 925 |
+
"does not require a 'pack' declaration)" %
|
| 926 |
+
(tp, tp.packed))
|
| 927 |
+
flags.append("_CFFI_F_PACKED")
|
| 928 |
+
else:
|
| 929 |
+
flags.append("_CFFI_F_EXTERNAL")
|
| 930 |
+
reason_for_not_expanding = "external"
|
| 931 |
+
flags = '|'.join(flags) or '0'
|
| 932 |
+
c_fields = []
|
| 933 |
+
if reason_for_not_expanding is None:
|
| 934 |
+
enumfields = list(self._enum_fields(tp))
|
| 935 |
+
for fldname, fldtype, fbitsize, fqual in enumfields:
|
| 936 |
+
fldtype = self._field_type(tp, fldname, fldtype)
|
| 937 |
+
self._check_not_opaque(fldtype,
|
| 938 |
+
"field '%s.%s'" % (tp.name, fldname))
|
| 939 |
+
# cname is None for _add_missing_struct_unions() only
|
| 940 |
+
op = OP_NOOP
|
| 941 |
+
if fbitsize >= 0:
|
| 942 |
+
op = OP_BITFIELD
|
| 943 |
+
size = '%d /* bits */' % fbitsize
|
| 944 |
+
elif cname is None or (
|
| 945 |
+
isinstance(fldtype, model.ArrayType) and
|
| 946 |
+
fldtype.length is None):
|
| 947 |
+
size = '(size_t)-1'
|
| 948 |
+
else:
|
| 949 |
+
size = 'sizeof(((%s)0)->%s)' % (
|
| 950 |
+
tp.get_c_name('*') if named_ptr is None
|
| 951 |
+
else named_ptr.name,
|
| 952 |
+
fldname)
|
| 953 |
+
if cname is None or fbitsize >= 0:
|
| 954 |
+
offset = '(size_t)-1'
|
| 955 |
+
elif named_ptr is not None:
|
| 956 |
+
offset = '((char *)&((%s)4096)->%s) - (char *)4096' % (
|
| 957 |
+
named_ptr.name, fldname)
|
| 958 |
+
else:
|
| 959 |
+
offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname)
|
| 960 |
+
c_fields.append(
|
| 961 |
+
FieldExpr(fldname, offset, size, fbitsize,
|
| 962 |
+
CffiOp(op, self._typesdict[fldtype])))
|
| 963 |
+
first_field_index = len(self._lsts["field"])
|
| 964 |
+
self._lsts["field"].extend(c_fields)
|
| 965 |
+
#
|
| 966 |
+
if cname is None: # unknown name, for _add_missing_struct_unions
|
| 967 |
+
size = '(size_t)-2'
|
| 968 |
+
align = -2
|
| 969 |
+
comment = "unnamed"
|
| 970 |
+
else:
|
| 971 |
+
if named_ptr is not None:
|
| 972 |
+
size = 'sizeof(*(%s)0)' % (named_ptr.name,)
|
| 973 |
+
align = '-1 /* unknown alignment */'
|
| 974 |
+
else:
|
| 975 |
+
size = 'sizeof(%s)' % (cname,)
|
| 976 |
+
align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,)
|
| 977 |
+
comment = None
|
| 978 |
+
else:
|
| 979 |
+
size = '(size_t)-1'
|
| 980 |
+
align = -1
|
| 981 |
+
first_field_index = -1
|
| 982 |
+
comment = reason_for_not_expanding
|
| 983 |
+
self._lsts["struct_union"].append(
|
| 984 |
+
StructUnionExpr(tp.name, type_index, flags, size, align, comment,
|
| 985 |
+
first_field_index, c_fields))
|
| 986 |
+
self._seen_struct_unions.add(tp)
|
| 987 |
+
|
| 988 |
+
def _check_not_opaque(self, tp, location):
|
| 989 |
+
while isinstance(tp, model.ArrayType):
|
| 990 |
+
tp = tp.item
|
| 991 |
+
if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None:
|
| 992 |
+
raise TypeError(
|
| 993 |
+
"%s is of an opaque type (not declared in cdef())" % location)
|
| 994 |
+
|
| 995 |
+
def _add_missing_struct_unions(self):
|
| 996 |
+
# not very nice, but some struct declarations might be missing
|
| 997 |
+
# because they don't have any known C name. Check that they are
|
| 998 |
+
# not partial (we can't complete or verify them!) and emit them
|
| 999 |
+
# anonymously.
|
| 1000 |
+
lst = list(self._struct_unions.items())
|
| 1001 |
+
lst.sort(key=lambda tp_order: tp_order[1])
|
| 1002 |
+
for tp, order in lst:
|
| 1003 |
+
if tp not in self._seen_struct_unions:
|
| 1004 |
+
if tp.partial:
|
| 1005 |
+
raise NotImplementedError("internal inconsistency: %r is "
|
| 1006 |
+
"partial but was not seen at "
|
| 1007 |
+
"this point" % (tp,))
|
| 1008 |
+
if tp.name.startswith('$') and tp.name[1:].isdigit():
|
| 1009 |
+
approxname = tp.name[1:]
|
| 1010 |
+
elif tp.name == '_IO_FILE' and tp.forcename == 'FILE':
|
| 1011 |
+
approxname = 'FILE'
|
| 1012 |
+
self._typedef_ctx(tp, 'FILE')
|
| 1013 |
+
else:
|
| 1014 |
+
raise NotImplementedError("internal inconsistency: %r" %
|
| 1015 |
+
(tp,))
|
| 1016 |
+
self._struct_ctx(tp, None, approxname)
|
| 1017 |
+
|
| 1018 |
+
def _generate_cpy_struct_collecttype(self, tp, name):
|
| 1019 |
+
self._struct_collecttype(tp)
|
| 1020 |
+
_generate_cpy_union_collecttype = _generate_cpy_struct_collecttype
|
| 1021 |
+
|
| 1022 |
+
def _struct_names(self, tp):
|
| 1023 |
+
cname = tp.get_c_name('')
|
| 1024 |
+
if ' ' in cname:
|
| 1025 |
+
return cname, cname.replace(' ', '_')
|
| 1026 |
+
else:
|
| 1027 |
+
return cname, '_' + cname
|
| 1028 |
+
|
| 1029 |
+
def _generate_cpy_struct_decl(self, tp, name):
|
| 1030 |
+
self._struct_decl(tp, *self._struct_names(tp))
|
| 1031 |
+
_generate_cpy_union_decl = _generate_cpy_struct_decl
|
| 1032 |
+
|
| 1033 |
+
def _generate_cpy_struct_ctx(self, tp, name):
|
| 1034 |
+
self._struct_ctx(tp, *self._struct_names(tp))
|
| 1035 |
+
_generate_cpy_union_ctx = _generate_cpy_struct_ctx
|
| 1036 |
+
|
| 1037 |
+
# ----------
|
| 1038 |
+
# 'anonymous' declarations. These are produced for anonymous structs
|
| 1039 |
+
# or unions; the 'name' is obtained by a typedef.
|
| 1040 |
+
|
| 1041 |
+
def _generate_cpy_anonymous_collecttype(self, tp, name):
|
| 1042 |
+
if isinstance(tp, model.EnumType):
|
| 1043 |
+
self._generate_cpy_enum_collecttype(tp, name)
|
| 1044 |
+
else:
|
| 1045 |
+
self._struct_collecttype(tp)
|
| 1046 |
+
|
| 1047 |
+
def _generate_cpy_anonymous_decl(self, tp, name):
|
| 1048 |
+
if isinstance(tp, model.EnumType):
|
| 1049 |
+
self._generate_cpy_enum_decl(tp)
|
| 1050 |
+
else:
|
| 1051 |
+
self._struct_decl(tp, name, 'typedef_' + name)
|
| 1052 |
+
|
| 1053 |
+
def _generate_cpy_anonymous_ctx(self, tp, name):
|
| 1054 |
+
if isinstance(tp, model.EnumType):
|
| 1055 |
+
self._enum_ctx(tp, name)
|
| 1056 |
+
else:
|
| 1057 |
+
self._struct_ctx(tp, name, 'typedef_' + name)
|
| 1058 |
+
|
| 1059 |
+
# ----------
|
| 1060 |
+
# constants, declared with "static const ..."
|
| 1061 |
+
|
| 1062 |
+
def _generate_cpy_const(self, is_int, name, tp=None, category='const',
|
| 1063 |
+
check_value=None):
|
| 1064 |
+
if (category, name) in self._seen_constants:
|
| 1065 |
+
raise VerificationError(
|
| 1066 |
+
"duplicate declaration of %s '%s'" % (category, name))
|
| 1067 |
+
self._seen_constants.add((category, name))
|
| 1068 |
+
#
|
| 1069 |
+
prnt = self._prnt
|
| 1070 |
+
funcname = '_cffi_%s_%s' % (category, name)
|
| 1071 |
+
if is_int:
|
| 1072 |
+
prnt('static int %s(unsigned long long *o)' % funcname)
|
| 1073 |
+
prnt('{')
|
| 1074 |
+
prnt(' int n = (%s) <= 0;' % (name,))
|
| 1075 |
+
prnt(' *o = (unsigned long long)((%s) | 0);'
|
| 1076 |
+
' /* check that %s is an integer */' % (name, name))
|
| 1077 |
+
if check_value is not None:
|
| 1078 |
+
if check_value > 0:
|
| 1079 |
+
check_value = '%dU' % (check_value,)
|
| 1080 |
+
prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,))
|
| 1081 |
+
prnt(' n |= 2;')
|
| 1082 |
+
prnt(' return n;')
|
| 1083 |
+
prnt('}')
|
| 1084 |
+
else:
|
| 1085 |
+
assert check_value is None
|
| 1086 |
+
prnt('static void %s(char *o)' % funcname)
|
| 1087 |
+
prnt('{')
|
| 1088 |
+
prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name))
|
| 1089 |
+
prnt('}')
|
| 1090 |
+
prnt()
|
| 1091 |
+
|
| 1092 |
+
def _generate_cpy_constant_collecttype(self, tp, name):
|
| 1093 |
+
is_int = tp.is_integer_type()
|
| 1094 |
+
if not is_int or self.target_is_python:
|
| 1095 |
+
self._do_collect_type(tp)
|
| 1096 |
+
|
| 1097 |
+
def _generate_cpy_constant_decl(self, tp, name):
|
| 1098 |
+
is_int = tp.is_integer_type()
|
| 1099 |
+
self._generate_cpy_const(is_int, name, tp)
|
| 1100 |
+
|
| 1101 |
+
def _generate_cpy_constant_ctx(self, tp, name):
|
| 1102 |
+
if not self.target_is_python and tp.is_integer_type():
|
| 1103 |
+
type_op = CffiOp(OP_CONSTANT_INT, -1)
|
| 1104 |
+
else:
|
| 1105 |
+
if self.target_is_python:
|
| 1106 |
+
const_kind = OP_DLOPEN_CONST
|
| 1107 |
+
else:
|
| 1108 |
+
const_kind = OP_CONSTANT
|
| 1109 |
+
type_index = self._typesdict[tp]
|
| 1110 |
+
type_op = CffiOp(const_kind, type_index)
|
| 1111 |
+
self._lsts["global"].append(
|
| 1112 |
+
GlobalExpr(name, '_cffi_const_%s' % name, type_op))
|
| 1113 |
+
|
| 1114 |
+
# ----------
|
| 1115 |
+
# enums
|
| 1116 |
+
|
| 1117 |
+
def _generate_cpy_enum_collecttype(self, tp, name):
|
| 1118 |
+
self._do_collect_type(tp)
|
| 1119 |
+
|
| 1120 |
+
def _generate_cpy_enum_decl(self, tp, name=None):
|
| 1121 |
+
for enumerator in tp.enumerators:
|
| 1122 |
+
self._generate_cpy_const(True, enumerator)
|
| 1123 |
+
|
| 1124 |
+
def _enum_ctx(self, tp, cname):
|
| 1125 |
+
type_index = self._typesdict[tp]
|
| 1126 |
+
type_op = CffiOp(OP_ENUM, -1)
|
| 1127 |
+
if self.target_is_python:
|
| 1128 |
+
tp.check_not_partial()
|
| 1129 |
+
for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
|
| 1130 |
+
self._lsts["global"].append(
|
| 1131 |
+
GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op,
|
| 1132 |
+
check_value=enumvalue))
|
| 1133 |
+
#
|
| 1134 |
+
if cname is not None and '$' not in cname and not self.target_is_python:
|
| 1135 |
+
size = "sizeof(%s)" % cname
|
| 1136 |
+
signed = "((%s)-1) <= 0" % cname
|
| 1137 |
+
else:
|
| 1138 |
+
basetp = tp.build_baseinttype(self.ffi, [])
|
| 1139 |
+
size = self.ffi.sizeof(basetp)
|
| 1140 |
+
signed = int(int(self.ffi.cast(basetp, -1)) < 0)
|
| 1141 |
+
allenums = ",".join(tp.enumerators)
|
| 1142 |
+
self._lsts["enum"].append(
|
| 1143 |
+
EnumExpr(tp.name, type_index, size, signed, allenums))
|
| 1144 |
+
|
| 1145 |
+
def _generate_cpy_enum_ctx(self, tp, name):
|
| 1146 |
+
self._enum_ctx(tp, tp._get_c_name())
|
| 1147 |
+
|
| 1148 |
+
# ----------
|
| 1149 |
+
# macros: for now only for integers
|
| 1150 |
+
|
| 1151 |
+
def _generate_cpy_macro_collecttype(self, tp, name):
|
| 1152 |
+
pass
|
| 1153 |
+
|
| 1154 |
+
def _generate_cpy_macro_decl(self, tp, name):
|
| 1155 |
+
if tp == '...':
|
| 1156 |
+
check_value = None
|
| 1157 |
+
else:
|
| 1158 |
+
check_value = tp # an integer
|
| 1159 |
+
self._generate_cpy_const(True, name, check_value=check_value)
|
| 1160 |
+
|
| 1161 |
+
def _generate_cpy_macro_ctx(self, tp, name):
|
| 1162 |
+
if tp == '...':
|
| 1163 |
+
if self.target_is_python:
|
| 1164 |
+
raise VerificationError(
|
| 1165 |
+
"cannot use the syntax '...' in '#define %s ...' when "
|
| 1166 |
+
"using the ABI mode" % (name,))
|
| 1167 |
+
check_value = None
|
| 1168 |
+
else:
|
| 1169 |
+
check_value = tp # an integer
|
| 1170 |
+
type_op = CffiOp(OP_CONSTANT_INT, -1)
|
| 1171 |
+
self._lsts["global"].append(
|
| 1172 |
+
GlobalExpr(name, '_cffi_const_%s' % name, type_op,
|
| 1173 |
+
check_value=check_value))
|
| 1174 |
+
|
| 1175 |
+
# ----------
|
| 1176 |
+
# global variables
|
| 1177 |
+
|
| 1178 |
+
def _global_type(self, tp, global_name):
|
| 1179 |
+
if isinstance(tp, model.ArrayType):
|
| 1180 |
+
actual_length = tp.length
|
| 1181 |
+
if actual_length == '...':
|
| 1182 |
+
actual_length = '_cffi_array_len(%s)' % (global_name,)
|
| 1183 |
+
tp_item = self._global_type(tp.item, '%s[0]' % global_name)
|
| 1184 |
+
tp = model.ArrayType(tp_item, actual_length)
|
| 1185 |
+
return tp
|
| 1186 |
+
|
| 1187 |
+
def _generate_cpy_variable_collecttype(self, tp, name):
|
| 1188 |
+
self._do_collect_type(self._global_type(tp, name))
|
| 1189 |
+
|
| 1190 |
+
def _generate_cpy_variable_decl(self, tp, name):
|
| 1191 |
+
prnt = self._prnt
|
| 1192 |
+
tp = self._global_type(tp, name)
|
| 1193 |
+
if isinstance(tp, model.ArrayType) and tp.length is None:
|
| 1194 |
+
tp = tp.item
|
| 1195 |
+
ampersand = ''
|
| 1196 |
+
else:
|
| 1197 |
+
ampersand = '&'
|
| 1198 |
+
# This code assumes that casts from "tp *" to "void *" is a
|
| 1199 |
+
# no-op, i.e. a function that returns a "tp *" can be called
|
| 1200 |
+
# as if it returned a "void *". This should be generally true
|
| 1201 |
+
# on any modern machine. The only exception to that rule (on
|
| 1202 |
+
# uncommon architectures, and as far as I can tell) might be
|
| 1203 |
+
# if 'tp' were a function type, but that is not possible here.
|
| 1204 |
+
# (If 'tp' is a function _pointer_ type, then casts from "fn_t
|
| 1205 |
+
# **" to "void *" are again no-ops, as far as I can tell.)
|
| 1206 |
+
decl = '*_cffi_var_%s(void)' % (name,)
|
| 1207 |
+
prnt('static ' + tp.get_c_name(decl, quals=self._current_quals))
|
| 1208 |
+
prnt('{')
|
| 1209 |
+
prnt(' return %s(%s);' % (ampersand, name))
|
| 1210 |
+
prnt('}')
|
| 1211 |
+
prnt()
|
| 1212 |
+
|
| 1213 |
+
def _generate_cpy_variable_ctx(self, tp, name):
|
| 1214 |
+
tp = self._global_type(tp, name)
|
| 1215 |
+
type_index = self._typesdict[tp]
|
| 1216 |
+
if self.target_is_python:
|
| 1217 |
+
op = OP_GLOBAL_VAR
|
| 1218 |
+
else:
|
| 1219 |
+
op = OP_GLOBAL_VAR_F
|
| 1220 |
+
self._lsts["global"].append(
|
| 1221 |
+
GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index)))
|
| 1222 |
+
|
| 1223 |
+
# ----------
|
| 1224 |
+
# extern "Python"
|
| 1225 |
+
|
| 1226 |
+
def _generate_cpy_extern_python_collecttype(self, tp, name):
|
| 1227 |
+
assert isinstance(tp, model.FunctionPtrType)
|
| 1228 |
+
self._do_collect_type(tp)
|
| 1229 |
+
_generate_cpy_dllexport_python_collecttype = \
|
| 1230 |
+
_generate_cpy_extern_python_plus_c_collecttype = \
|
| 1231 |
+
_generate_cpy_extern_python_collecttype
|
| 1232 |
+
|
| 1233 |
+
def _extern_python_decl(self, tp, name, tag_and_space):
|
| 1234 |
+
prnt = self._prnt
|
| 1235 |
+
if isinstance(tp.result, model.VoidType):
|
| 1236 |
+
size_of_result = '0'
|
| 1237 |
+
else:
|
| 1238 |
+
context = 'result of %s' % name
|
| 1239 |
+
size_of_result = '(int)sizeof(%s)' % (
|
| 1240 |
+
tp.result.get_c_name('', context),)
|
| 1241 |
+
prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name)
|
| 1242 |
+
prnt(' { "%s.%s", %s, 0, 0 };' % (
|
| 1243 |
+
self.module_name, name, size_of_result))
|
| 1244 |
+
prnt()
|
| 1245 |
+
#
|
| 1246 |
+
arguments = []
|
| 1247 |
+
context = 'argument of %s' % name
|
| 1248 |
+
for i, type in enumerate(tp.args):
|
| 1249 |
+
arg = type.get_c_name(' a%d' % i, context)
|
| 1250 |
+
arguments.append(arg)
|
| 1251 |
+
#
|
| 1252 |
+
repr_arguments = ', '.join(arguments)
|
| 1253 |
+
repr_arguments = repr_arguments or 'void'
|
| 1254 |
+
name_and_arguments = '%s(%s)' % (name, repr_arguments)
|
| 1255 |
+
if tp.abi == "__stdcall":
|
| 1256 |
+
name_and_arguments = '_cffi_stdcall ' + name_and_arguments
|
| 1257 |
+
#
|
| 1258 |
+
def may_need_128_bits(tp):
|
| 1259 |
+
return (isinstance(tp, model.PrimitiveType) and
|
| 1260 |
+
tp.name == 'long double')
|
| 1261 |
+
#
|
| 1262 |
+
size_of_a = max(len(tp.args)*8, 8)
|
| 1263 |
+
if may_need_128_bits(tp.result):
|
| 1264 |
+
size_of_a = max(size_of_a, 16)
|
| 1265 |
+
if isinstance(tp.result, model.StructOrUnion):
|
| 1266 |
+
size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % (
|
| 1267 |
+
tp.result.get_c_name(''), size_of_a,
|
| 1268 |
+
tp.result.get_c_name(''), size_of_a)
|
| 1269 |
+
prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments)))
|
| 1270 |
+
prnt('{')
|
| 1271 |
+
prnt(' char a[%s];' % size_of_a)
|
| 1272 |
+
prnt(' char *p = a;')
|
| 1273 |
+
for i, type in enumerate(tp.args):
|
| 1274 |
+
arg = 'a%d' % i
|
| 1275 |
+
if (isinstance(type, model.StructOrUnion) or
|
| 1276 |
+
may_need_128_bits(type)):
|
| 1277 |
+
arg = '&' + arg
|
| 1278 |
+
type = model.PointerType(type)
|
| 1279 |
+
prnt(' *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg))
|
| 1280 |
+
prnt(' _cffi_call_python(&_cffi_externpy__%s, p);' % name)
|
| 1281 |
+
if not isinstance(tp.result, model.VoidType):
|
| 1282 |
+
prnt(' return *(%s)p;' % (tp.result.get_c_name('*'),))
|
| 1283 |
+
prnt('}')
|
| 1284 |
+
prnt()
|
| 1285 |
+
self._num_externpy += 1
|
| 1286 |
+
|
| 1287 |
+
def _generate_cpy_extern_python_decl(self, tp, name):
|
| 1288 |
+
self._extern_python_decl(tp, name, 'static ')
|
| 1289 |
+
|
| 1290 |
+
def _generate_cpy_dllexport_python_decl(self, tp, name):
|
| 1291 |
+
self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ')
|
| 1292 |
+
|
| 1293 |
+
def _generate_cpy_extern_python_plus_c_decl(self, tp, name):
|
| 1294 |
+
self._extern_python_decl(tp, name, '')
|
| 1295 |
+
|
| 1296 |
+
def _generate_cpy_extern_python_ctx(self, tp, name):
|
| 1297 |
+
if self.target_is_python:
|
| 1298 |
+
raise VerificationError(
|
| 1299 |
+
"cannot use 'extern \"Python\"' in the ABI mode")
|
| 1300 |
+
if tp.ellipsis:
|
| 1301 |
+
raise NotImplementedError("a vararg function is extern \"Python\"")
|
| 1302 |
+
type_index = self._typesdict[tp]
|
| 1303 |
+
type_op = CffiOp(OP_EXTERN_PYTHON, type_index)
|
| 1304 |
+
self._lsts["global"].append(
|
| 1305 |
+
GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name))
|
| 1306 |
+
|
| 1307 |
+
_generate_cpy_dllexport_python_ctx = \
|
| 1308 |
+
_generate_cpy_extern_python_plus_c_ctx = \
|
| 1309 |
+
_generate_cpy_extern_python_ctx
|
| 1310 |
+
|
| 1311 |
+
def _print_string_literal_in_array(self, s):
|
| 1312 |
+
prnt = self._prnt
|
| 1313 |
+
prnt('// # NB. this is not a string because of a size limit in MSVC')
|
| 1314 |
+
if not isinstance(s, bytes): # unicode
|
| 1315 |
+
s = s.encode('utf-8') # -> bytes
|
| 1316 |
+
else:
|
| 1317 |
+
s.decode('utf-8') # got bytes, check for valid utf-8
|
| 1318 |
+
try:
|
| 1319 |
+
s.decode('ascii')
|
| 1320 |
+
except UnicodeDecodeError:
|
| 1321 |
+
s = b'# -*- encoding: utf8 -*-\n' + s
|
| 1322 |
+
for line in s.splitlines(True):
|
| 1323 |
+
comment = line
|
| 1324 |
+
if type('//') is bytes: # python2
|
| 1325 |
+
line = map(ord, line) # make a list of integers
|
| 1326 |
+
else: # python3
|
| 1327 |
+
# type(line) is bytes, which enumerates like a list of integers
|
| 1328 |
+
comment = ascii(comment)[1:-1]
|
| 1329 |
+
prnt(('// ' + comment).rstrip())
|
| 1330 |
+
printed_line = ''
|
| 1331 |
+
for c in line:
|
| 1332 |
+
if len(printed_line) >= 76:
|
| 1333 |
+
prnt(printed_line)
|
| 1334 |
+
printed_line = ''
|
| 1335 |
+
printed_line += '%d,' % (c,)
|
| 1336 |
+
prnt(printed_line)
|
| 1337 |
+
|
| 1338 |
+
# ----------
|
| 1339 |
+
# emitting the opcodes for individual types
|
| 1340 |
+
|
| 1341 |
+
def _emit_bytecode_VoidType(self, tp, index):
|
| 1342 |
+
self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID)
|
| 1343 |
+
|
| 1344 |
+
def _emit_bytecode_PrimitiveType(self, tp, index):
|
| 1345 |
+
prim_index = PRIMITIVE_TO_INDEX[tp.name]
|
| 1346 |
+
self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index)
|
| 1347 |
+
|
| 1348 |
+
def _emit_bytecode_UnknownIntegerType(self, tp, index):
|
| 1349 |
+
s = ('_cffi_prim_int(sizeof(%s), (\n'
|
| 1350 |
+
' ((%s)-1) | 0 /* check that %s is an integer type */\n'
|
| 1351 |
+
' ) <= 0)' % (tp.name, tp.name, tp.name))
|
| 1352 |
+
self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s)
|
| 1353 |
+
|
| 1354 |
+
def _emit_bytecode_UnknownFloatType(self, tp, index):
|
| 1355 |
+
s = ('_cffi_prim_float(sizeof(%s) *\n'
|
| 1356 |
+
' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n'
|
| 1357 |
+
' )' % (tp.name, tp.name))
|
| 1358 |
+
self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s)
|
| 1359 |
+
|
| 1360 |
+
def _emit_bytecode_RawFunctionType(self, tp, index):
|
| 1361 |
+
self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result])
|
| 1362 |
+
index += 1
|
| 1363 |
+
for tp1 in tp.args:
|
| 1364 |
+
realindex = self._typesdict[tp1]
|
| 1365 |
+
if index != realindex:
|
| 1366 |
+
if isinstance(tp1, model.PrimitiveType):
|
| 1367 |
+
self._emit_bytecode_PrimitiveType(tp1, index)
|
| 1368 |
+
else:
|
| 1369 |
+
self.cffi_types[index] = CffiOp(OP_NOOP, realindex)
|
| 1370 |
+
index += 1
|
| 1371 |
+
flags = int(tp.ellipsis)
|
| 1372 |
+
if tp.abi is not None:
|
| 1373 |
+
if tp.abi == '__stdcall':
|
| 1374 |
+
flags |= 2
|
| 1375 |
+
else:
|
| 1376 |
+
raise NotImplementedError("abi=%r" % (tp.abi,))
|
| 1377 |
+
self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags)
|
| 1378 |
+
|
| 1379 |
+
def _emit_bytecode_PointerType(self, tp, index):
|
| 1380 |
+
self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype])
|
| 1381 |
+
|
| 1382 |
+
_emit_bytecode_ConstPointerType = _emit_bytecode_PointerType
|
| 1383 |
+
_emit_bytecode_NamedPointerType = _emit_bytecode_PointerType
|
| 1384 |
+
|
| 1385 |
+
def _emit_bytecode_FunctionPtrType(self, tp, index):
|
| 1386 |
+
raw = tp.as_raw_function()
|
| 1387 |
+
self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw])
|
| 1388 |
+
|
| 1389 |
+
def _emit_bytecode_ArrayType(self, tp, index):
|
| 1390 |
+
item_index = self._typesdict[tp.item]
|
| 1391 |
+
if tp.length is None:
|
| 1392 |
+
self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index)
|
| 1393 |
+
elif tp.length == '...':
|
| 1394 |
+
raise VerificationError(
|
| 1395 |
+
"type %s badly placed: the '...' array length can only be "
|
| 1396 |
+
"used on global arrays or on fields of structures" % (
|
| 1397 |
+
str(tp).replace('/*...*/', '...'),))
|
| 1398 |
+
else:
|
| 1399 |
+
assert self.cffi_types[index + 1] == 'LEN'
|
| 1400 |
+
self.cffi_types[index] = CffiOp(OP_ARRAY, item_index)
|
| 1401 |
+
self.cffi_types[index + 1] = CffiOp(None, str(tp.length))
|
| 1402 |
+
|
| 1403 |
+
def _emit_bytecode_StructType(self, tp, index):
|
| 1404 |
+
struct_index = self._struct_unions[tp]
|
| 1405 |
+
self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index)
|
| 1406 |
+
_emit_bytecode_UnionType = _emit_bytecode_StructType
|
| 1407 |
+
|
| 1408 |
+
def _emit_bytecode_EnumType(self, tp, index):
|
| 1409 |
+
enum_index = self._enums[tp]
|
| 1410 |
+
self.cffi_types[index] = CffiOp(OP_ENUM, enum_index)
|
| 1411 |
+
|
| 1412 |
+
|
| 1413 |
+
if sys.version_info >= (3,):
|
| 1414 |
+
NativeIO = io.StringIO
|
| 1415 |
+
else:
|
| 1416 |
+
class NativeIO(io.BytesIO):
|
| 1417 |
+
def write(self, s):
|
| 1418 |
+
if isinstance(s, unicode):
|
| 1419 |
+
s = s.encode('ascii')
|
| 1420 |
+
super(NativeIO, self).write(s)
|
| 1421 |
+
|
| 1422 |
+
def _is_file_like(maybefile):
|
| 1423 |
+
# compare to xml.etree.ElementTree._get_writer
|
| 1424 |
+
return hasattr(maybefile, 'write')
|
| 1425 |
+
|
| 1426 |
+
def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose):
|
| 1427 |
+
if verbose:
|
| 1428 |
+
print("generating %s" % (target_file,))
|
| 1429 |
+
recompiler = Recompiler(ffi, module_name,
|
| 1430 |
+
target_is_python=(preamble is None))
|
| 1431 |
+
recompiler.collect_type_table()
|
| 1432 |
+
recompiler.collect_step_tables()
|
| 1433 |
+
if _is_file_like(target_file):
|
| 1434 |
+
recompiler.write_source_to_f(target_file, preamble)
|
| 1435 |
+
return True
|
| 1436 |
+
f = NativeIO()
|
| 1437 |
+
recompiler.write_source_to_f(f, preamble)
|
| 1438 |
+
output = f.getvalue()
|
| 1439 |
+
try:
|
| 1440 |
+
with open(target_file, 'r') as f1:
|
| 1441 |
+
if f1.read(len(output) + 1) != output:
|
| 1442 |
+
raise IOError
|
| 1443 |
+
if verbose:
|
| 1444 |
+
print("(already up-to-date)")
|
| 1445 |
+
return False # already up-to-date
|
| 1446 |
+
except IOError:
|
| 1447 |
+
tmp_file = '%s.~%d' % (target_file, os.getpid())
|
| 1448 |
+
with open(tmp_file, 'w') as f1:
|
| 1449 |
+
f1.write(output)
|
| 1450 |
+
try:
|
| 1451 |
+
os.rename(tmp_file, target_file)
|
| 1452 |
+
except OSError:
|
| 1453 |
+
os.unlink(target_file)
|
| 1454 |
+
os.rename(tmp_file, target_file)
|
| 1455 |
+
return True
|
| 1456 |
+
|
| 1457 |
+
def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False):
|
| 1458 |
+
assert preamble is not None
|
| 1459 |
+
return _make_c_or_py_source(ffi, module_name, preamble, target_c_file,
|
| 1460 |
+
verbose)
|
| 1461 |
+
|
| 1462 |
+
def make_py_source(ffi, module_name, target_py_file, verbose=False):
|
| 1463 |
+
return _make_c_or_py_source(ffi, module_name, None, target_py_file,
|
| 1464 |
+
verbose)
|
| 1465 |
+
|
| 1466 |
+
def _modname_to_file(outputdir, modname, extension):
|
| 1467 |
+
parts = modname.split('.')
|
| 1468 |
+
try:
|
| 1469 |
+
os.makedirs(os.path.join(outputdir, *parts[:-1]))
|
| 1470 |
+
except OSError:
|
| 1471 |
+
pass
|
| 1472 |
+
parts[-1] += extension
|
| 1473 |
+
return os.path.join(outputdir, *parts), parts
|
| 1474 |
+
|
| 1475 |
+
|
| 1476 |
+
# Aaargh. Distutils is not tested at all for the purpose of compiling
|
| 1477 |
+
# DLLs that are not extension modules. Here are some hacks to work
|
| 1478 |
+
# around that, in the _patch_for_*() functions...
|
| 1479 |
+
|
| 1480 |
+
def _patch_meth(patchlist, cls, name, new_meth):
|
| 1481 |
+
old = getattr(cls, name)
|
| 1482 |
+
patchlist.append((cls, name, old))
|
| 1483 |
+
setattr(cls, name, new_meth)
|
| 1484 |
+
return old
|
| 1485 |
+
|
| 1486 |
+
def _unpatch_meths(patchlist):
|
| 1487 |
+
for cls, name, old_meth in reversed(patchlist):
|
| 1488 |
+
setattr(cls, name, old_meth)
|
| 1489 |
+
|
| 1490 |
+
def _patch_for_embedding(patchlist):
|
| 1491 |
+
if sys.platform == 'win32':
|
| 1492 |
+
# we must not remove the manifest when building for embedding!
|
| 1493 |
+
# FUTURE: this module was removed in setuptools 74; this is likely dead code and should be removed,
|
| 1494 |
+
# since the toolchain it supports (VS2005-2008) is also long dead.
|
| 1495 |
+
from cffi._shimmed_dist_utils import MSVCCompiler
|
| 1496 |
+
if MSVCCompiler is not None:
|
| 1497 |
+
_patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref',
|
| 1498 |
+
lambda self, manifest_file: manifest_file)
|
| 1499 |
+
|
| 1500 |
+
if sys.platform == 'darwin':
|
| 1501 |
+
# we must not make a '-bundle', but a '-dynamiclib' instead
|
| 1502 |
+
from cffi._shimmed_dist_utils import CCompiler
|
| 1503 |
+
def my_link_shared_object(self, *args, **kwds):
|
| 1504 |
+
if '-bundle' in self.linker_so:
|
| 1505 |
+
self.linker_so = list(self.linker_so)
|
| 1506 |
+
i = self.linker_so.index('-bundle')
|
| 1507 |
+
self.linker_so[i] = '-dynamiclib'
|
| 1508 |
+
return old_link_shared_object(self, *args, **kwds)
|
| 1509 |
+
old_link_shared_object = _patch_meth(patchlist, CCompiler,
|
| 1510 |
+
'link_shared_object',
|
| 1511 |
+
my_link_shared_object)
|
| 1512 |
+
|
| 1513 |
+
def _patch_for_target(patchlist, target):
|
| 1514 |
+
from cffi._shimmed_dist_utils import build_ext
|
| 1515 |
+
# if 'target' is different from '*', we need to patch some internal
|
| 1516 |
+
# method to just return this 'target' value, instead of having it
|
| 1517 |
+
# built from module_name
|
| 1518 |
+
if target.endswith('.*'):
|
| 1519 |
+
target = target[:-2]
|
| 1520 |
+
if sys.platform == 'win32':
|
| 1521 |
+
target += '.dll'
|
| 1522 |
+
elif sys.platform == 'darwin':
|
| 1523 |
+
target += '.dylib'
|
| 1524 |
+
else:
|
| 1525 |
+
target += '.so'
|
| 1526 |
+
_patch_meth(patchlist, build_ext, 'get_ext_filename',
|
| 1527 |
+
lambda self, ext_name: target)
|
| 1528 |
+
|
| 1529 |
+
|
| 1530 |
+
def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True,
|
| 1531 |
+
c_file=None, source_extension='.c', extradir=None,
|
| 1532 |
+
compiler_verbose=1, target=None, debug=None,
|
| 1533 |
+
uses_ffiplatform=True, **kwds):
|
| 1534 |
+
if not isinstance(module_name, str):
|
| 1535 |
+
module_name = module_name.encode('ascii')
|
| 1536 |
+
if ffi._windows_unicode:
|
| 1537 |
+
ffi._apply_windows_unicode(kwds)
|
| 1538 |
+
if preamble is not None:
|
| 1539 |
+
if call_c_compiler and _is_file_like(c_file):
|
| 1540 |
+
raise TypeError("Writing to file-like objects is not supported "
|
| 1541 |
+
"with call_c_compiler=True")
|
| 1542 |
+
embedding = (ffi._embedding is not None)
|
| 1543 |
+
if embedding:
|
| 1544 |
+
ffi._apply_embedding_fix(kwds)
|
| 1545 |
+
if c_file is None:
|
| 1546 |
+
c_file, parts = _modname_to_file(tmpdir, module_name,
|
| 1547 |
+
source_extension)
|
| 1548 |
+
if extradir:
|
| 1549 |
+
parts = [extradir] + parts
|
| 1550 |
+
ext_c_file = os.path.join(*parts)
|
| 1551 |
+
else:
|
| 1552 |
+
ext_c_file = c_file
|
| 1553 |
+
#
|
| 1554 |
+
if target is None:
|
| 1555 |
+
if embedding:
|
| 1556 |
+
target = '%s.*' % module_name
|
| 1557 |
+
else:
|
| 1558 |
+
target = '*'
|
| 1559 |
+
#
|
| 1560 |
+
if uses_ffiplatform:
|
| 1561 |
+
ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds)
|
| 1562 |
+
else:
|
| 1563 |
+
ext = None
|
| 1564 |
+
updated = make_c_source(ffi, module_name, preamble, c_file,
|
| 1565 |
+
verbose=compiler_verbose)
|
| 1566 |
+
if call_c_compiler:
|
| 1567 |
+
patchlist = []
|
| 1568 |
+
cwd = os.getcwd()
|
| 1569 |
+
try:
|
| 1570 |
+
if embedding:
|
| 1571 |
+
_patch_for_embedding(patchlist)
|
| 1572 |
+
if target != '*':
|
| 1573 |
+
_patch_for_target(patchlist, target)
|
| 1574 |
+
if compiler_verbose:
|
| 1575 |
+
if tmpdir == '.':
|
| 1576 |
+
msg = 'the current directory is'
|
| 1577 |
+
else:
|
| 1578 |
+
msg = 'setting the current directory to'
|
| 1579 |
+
print('%s %r' % (msg, os.path.abspath(tmpdir)))
|
| 1580 |
+
os.chdir(tmpdir)
|
| 1581 |
+
outputfilename = ffiplatform.compile('.', ext,
|
| 1582 |
+
compiler_verbose, debug)
|
| 1583 |
+
finally:
|
| 1584 |
+
os.chdir(cwd)
|
| 1585 |
+
_unpatch_meths(patchlist)
|
| 1586 |
+
return outputfilename
|
| 1587 |
+
else:
|
| 1588 |
+
return ext, updated
|
| 1589 |
+
else:
|
| 1590 |
+
if c_file is None:
|
| 1591 |
+
c_file, _ = _modname_to_file(tmpdir, module_name, '.py')
|
| 1592 |
+
updated = make_py_source(ffi, module_name, c_file,
|
| 1593 |
+
verbose=compiler_verbose)
|
| 1594 |
+
if call_c_compiler:
|
| 1595 |
+
return c_file
|
| 1596 |
+
else:
|
| 1597 |
+
return None, updated
|
| 1598 |
+
|
.venv/lib/python3.11/site-packages/cffi/setuptools_ext.py
ADDED
|
@@ -0,0 +1,216 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import sys
|
| 3 |
+
|
| 4 |
+
try:
|
| 5 |
+
basestring
|
| 6 |
+
except NameError:
|
| 7 |
+
# Python 3.x
|
| 8 |
+
basestring = str
|
| 9 |
+
|
| 10 |
+
def error(msg):
|
| 11 |
+
from cffi._shimmed_dist_utils import DistutilsSetupError
|
| 12 |
+
raise DistutilsSetupError(msg)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def execfile(filename, glob):
|
| 16 |
+
# We use execfile() (here rewritten for Python 3) instead of
|
| 17 |
+
# __import__() to load the build script. The problem with
|
| 18 |
+
# a normal import is that in some packages, the intermediate
|
| 19 |
+
# __init__.py files may already try to import the file that
|
| 20 |
+
# we are generating.
|
| 21 |
+
with open(filename) as f:
|
| 22 |
+
src = f.read()
|
| 23 |
+
src += '\n' # Python 2.6 compatibility
|
| 24 |
+
code = compile(src, filename, 'exec')
|
| 25 |
+
exec(code, glob, glob)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def add_cffi_module(dist, mod_spec):
|
| 29 |
+
from cffi.api import FFI
|
| 30 |
+
|
| 31 |
+
if not isinstance(mod_spec, basestring):
|
| 32 |
+
error("argument to 'cffi_modules=...' must be a str or a list of str,"
|
| 33 |
+
" not %r" % (type(mod_spec).__name__,))
|
| 34 |
+
mod_spec = str(mod_spec)
|
| 35 |
+
try:
|
| 36 |
+
build_file_name, ffi_var_name = mod_spec.split(':')
|
| 37 |
+
except ValueError:
|
| 38 |
+
error("%r must be of the form 'path/build.py:ffi_variable'" %
|
| 39 |
+
(mod_spec,))
|
| 40 |
+
if not os.path.exists(build_file_name):
|
| 41 |
+
ext = ''
|
| 42 |
+
rewritten = build_file_name.replace('.', '/') + '.py'
|
| 43 |
+
if os.path.exists(rewritten):
|
| 44 |
+
ext = ' (rewrite cffi_modules to [%r])' % (
|
| 45 |
+
rewritten + ':' + ffi_var_name,)
|
| 46 |
+
error("%r does not name an existing file%s" % (build_file_name, ext))
|
| 47 |
+
|
| 48 |
+
mod_vars = {'__name__': '__cffi__', '__file__': build_file_name}
|
| 49 |
+
execfile(build_file_name, mod_vars)
|
| 50 |
+
|
| 51 |
+
try:
|
| 52 |
+
ffi = mod_vars[ffi_var_name]
|
| 53 |
+
except KeyError:
|
| 54 |
+
error("%r: object %r not found in module" % (mod_spec,
|
| 55 |
+
ffi_var_name))
|
| 56 |
+
if not isinstance(ffi, FFI):
|
| 57 |
+
ffi = ffi() # maybe it's a function instead of directly an ffi
|
| 58 |
+
if not isinstance(ffi, FFI):
|
| 59 |
+
error("%r is not an FFI instance (got %r)" % (mod_spec,
|
| 60 |
+
type(ffi).__name__))
|
| 61 |
+
if not hasattr(ffi, '_assigned_source'):
|
| 62 |
+
error("%r: the set_source() method was not called" % (mod_spec,))
|
| 63 |
+
module_name, source, source_extension, kwds = ffi._assigned_source
|
| 64 |
+
if ffi._windows_unicode:
|
| 65 |
+
kwds = kwds.copy()
|
| 66 |
+
ffi._apply_windows_unicode(kwds)
|
| 67 |
+
|
| 68 |
+
if source is None:
|
| 69 |
+
_add_py_module(dist, ffi, module_name)
|
| 70 |
+
else:
|
| 71 |
+
_add_c_module(dist, ffi, module_name, source, source_extension, kwds)
|
| 72 |
+
|
| 73 |
+
def _set_py_limited_api(Extension, kwds):
|
| 74 |
+
"""
|
| 75 |
+
Add py_limited_api to kwds if setuptools >= 26 is in use.
|
| 76 |
+
Do not alter the setting if it already exists.
|
| 77 |
+
Setuptools takes care of ignoring the flag on Python 2 and PyPy.
|
| 78 |
+
|
| 79 |
+
CPython itself should ignore the flag in a debugging version
|
| 80 |
+
(by not listing .abi3.so in the extensions it supports), but
|
| 81 |
+
it doesn't so far, creating troubles. That's why we check
|
| 82 |
+
for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent
|
| 83 |
+
of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401)
|
| 84 |
+
|
| 85 |
+
On Windows, with CPython <= 3.4, it's better not to use py_limited_api
|
| 86 |
+
because virtualenv *still* doesn't copy PYTHON3.DLL on these versions.
|
| 87 |
+
Recently (2020) we started shipping only >= 3.5 wheels, though. So
|
| 88 |
+
we'll give it another try and set py_limited_api on Windows >= 3.5.
|
| 89 |
+
"""
|
| 90 |
+
from cffi import recompiler
|
| 91 |
+
|
| 92 |
+
if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount')
|
| 93 |
+
and recompiler.USE_LIMITED_API):
|
| 94 |
+
import setuptools
|
| 95 |
+
try:
|
| 96 |
+
setuptools_major_version = int(setuptools.__version__.partition('.')[0])
|
| 97 |
+
if setuptools_major_version >= 26:
|
| 98 |
+
kwds['py_limited_api'] = True
|
| 99 |
+
except ValueError: # certain development versions of setuptools
|
| 100 |
+
# If we don't know the version number of setuptools, we
|
| 101 |
+
# try to set 'py_limited_api' anyway. At worst, we get a
|
| 102 |
+
# warning.
|
| 103 |
+
kwds['py_limited_api'] = True
|
| 104 |
+
return kwds
|
| 105 |
+
|
| 106 |
+
def _add_c_module(dist, ffi, module_name, source, source_extension, kwds):
|
| 107 |
+
# We are a setuptools extension. Need this build_ext for py_limited_api.
|
| 108 |
+
from setuptools.command.build_ext import build_ext
|
| 109 |
+
from cffi._shimmed_dist_utils import Extension, log, mkpath
|
| 110 |
+
from cffi import recompiler
|
| 111 |
+
|
| 112 |
+
allsources = ['$PLACEHOLDER']
|
| 113 |
+
allsources.extend(kwds.pop('sources', []))
|
| 114 |
+
kwds = _set_py_limited_api(Extension, kwds)
|
| 115 |
+
ext = Extension(name=module_name, sources=allsources, **kwds)
|
| 116 |
+
|
| 117 |
+
def make_mod(tmpdir, pre_run=None):
|
| 118 |
+
c_file = os.path.join(tmpdir, module_name + source_extension)
|
| 119 |
+
log.info("generating cffi module %r" % c_file)
|
| 120 |
+
mkpath(tmpdir)
|
| 121 |
+
# a setuptools-only, API-only hook: called with the "ext" and "ffi"
|
| 122 |
+
# arguments just before we turn the ffi into C code. To use it,
|
| 123 |
+
# subclass the 'distutils.command.build_ext.build_ext' class and
|
| 124 |
+
# add a method 'def pre_run(self, ext, ffi)'.
|
| 125 |
+
if pre_run is not None:
|
| 126 |
+
pre_run(ext, ffi)
|
| 127 |
+
updated = recompiler.make_c_source(ffi, module_name, source, c_file)
|
| 128 |
+
if not updated:
|
| 129 |
+
log.info("already up-to-date")
|
| 130 |
+
return c_file
|
| 131 |
+
|
| 132 |
+
if dist.ext_modules is None:
|
| 133 |
+
dist.ext_modules = []
|
| 134 |
+
dist.ext_modules.append(ext)
|
| 135 |
+
|
| 136 |
+
base_class = dist.cmdclass.get('build_ext', build_ext)
|
| 137 |
+
class build_ext_make_mod(base_class):
|
| 138 |
+
def run(self):
|
| 139 |
+
if ext.sources[0] == '$PLACEHOLDER':
|
| 140 |
+
pre_run = getattr(self, 'pre_run', None)
|
| 141 |
+
ext.sources[0] = make_mod(self.build_temp, pre_run)
|
| 142 |
+
base_class.run(self)
|
| 143 |
+
dist.cmdclass['build_ext'] = build_ext_make_mod
|
| 144 |
+
# NB. multiple runs here will create multiple 'build_ext_make_mod'
|
| 145 |
+
# classes. Even in this case the 'build_ext' command should be
|
| 146 |
+
# run once; but just in case, the logic above does nothing if
|
| 147 |
+
# called again.
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
def _add_py_module(dist, ffi, module_name):
|
| 151 |
+
from setuptools.command.build_py import build_py
|
| 152 |
+
from setuptools.command.build_ext import build_ext
|
| 153 |
+
from cffi._shimmed_dist_utils import log, mkpath
|
| 154 |
+
from cffi import recompiler
|
| 155 |
+
|
| 156 |
+
def generate_mod(py_file):
|
| 157 |
+
log.info("generating cffi module %r" % py_file)
|
| 158 |
+
mkpath(os.path.dirname(py_file))
|
| 159 |
+
updated = recompiler.make_py_source(ffi, module_name, py_file)
|
| 160 |
+
if not updated:
|
| 161 |
+
log.info("already up-to-date")
|
| 162 |
+
|
| 163 |
+
base_class = dist.cmdclass.get('build_py', build_py)
|
| 164 |
+
class build_py_make_mod(base_class):
|
| 165 |
+
def run(self):
|
| 166 |
+
base_class.run(self)
|
| 167 |
+
module_path = module_name.split('.')
|
| 168 |
+
module_path[-1] += '.py'
|
| 169 |
+
generate_mod(os.path.join(self.build_lib, *module_path))
|
| 170 |
+
def get_source_files(self):
|
| 171 |
+
# This is called from 'setup.py sdist' only. Exclude
|
| 172 |
+
# the generate .py module in this case.
|
| 173 |
+
saved_py_modules = self.py_modules
|
| 174 |
+
try:
|
| 175 |
+
if saved_py_modules:
|
| 176 |
+
self.py_modules = [m for m in saved_py_modules
|
| 177 |
+
if m != module_name]
|
| 178 |
+
return base_class.get_source_files(self)
|
| 179 |
+
finally:
|
| 180 |
+
self.py_modules = saved_py_modules
|
| 181 |
+
dist.cmdclass['build_py'] = build_py_make_mod
|
| 182 |
+
|
| 183 |
+
# distutils and setuptools have no notion I could find of a
|
| 184 |
+
# generated python module. If we don't add module_name to
|
| 185 |
+
# dist.py_modules, then things mostly work but there are some
|
| 186 |
+
# combination of options (--root and --record) that will miss
|
| 187 |
+
# the module. So we add it here, which gives a few apparently
|
| 188 |
+
# harmless warnings about not finding the file outside the
|
| 189 |
+
# build directory.
|
| 190 |
+
# Then we need to hack more in get_source_files(); see above.
|
| 191 |
+
if dist.py_modules is None:
|
| 192 |
+
dist.py_modules = []
|
| 193 |
+
dist.py_modules.append(module_name)
|
| 194 |
+
|
| 195 |
+
# the following is only for "build_ext -i"
|
| 196 |
+
base_class_2 = dist.cmdclass.get('build_ext', build_ext)
|
| 197 |
+
class build_ext_make_mod(base_class_2):
|
| 198 |
+
def run(self):
|
| 199 |
+
base_class_2.run(self)
|
| 200 |
+
if self.inplace:
|
| 201 |
+
# from get_ext_fullpath() in distutils/command/build_ext.py
|
| 202 |
+
module_path = module_name.split('.')
|
| 203 |
+
package = '.'.join(module_path[:-1])
|
| 204 |
+
build_py = self.get_finalized_command('build_py')
|
| 205 |
+
package_dir = build_py.get_package_dir(package)
|
| 206 |
+
file_name = module_path[-1] + '.py'
|
| 207 |
+
generate_mod(os.path.join(package_dir, file_name))
|
| 208 |
+
dist.cmdclass['build_ext'] = build_ext_make_mod
|
| 209 |
+
|
| 210 |
+
def cffi_modules(dist, attr, value):
|
| 211 |
+
assert attr == 'cffi_modules'
|
| 212 |
+
if isinstance(value, basestring):
|
| 213 |
+
value = [value]
|
| 214 |
+
|
| 215 |
+
for cffi_module in value:
|
| 216 |
+
add_cffi_module(dist, cffi_module)
|
.venv/lib/python3.11/site-packages/cffi/vengine_gen.py
ADDED
|
@@ -0,0 +1,679 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# DEPRECATED: implementation for ffi.verify()
|
| 3 |
+
#
|
| 4 |
+
import sys, os
|
| 5 |
+
import types
|
| 6 |
+
|
| 7 |
+
from . import model
|
| 8 |
+
from .error import VerificationError
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class VGenericEngine(object):
|
| 12 |
+
_class_key = 'g'
|
| 13 |
+
_gen_python_module = False
|
| 14 |
+
|
| 15 |
+
def __init__(self, verifier):
|
| 16 |
+
self.verifier = verifier
|
| 17 |
+
self.ffi = verifier.ffi
|
| 18 |
+
self.export_symbols = []
|
| 19 |
+
self._struct_pending_verification = {}
|
| 20 |
+
|
| 21 |
+
def patch_extension_kwds(self, kwds):
|
| 22 |
+
# add 'export_symbols' to the dictionary. Note that we add the
|
| 23 |
+
# list before filling it. When we fill it, it will thus also show
|
| 24 |
+
# up in kwds['export_symbols'].
|
| 25 |
+
kwds.setdefault('export_symbols', self.export_symbols)
|
| 26 |
+
|
| 27 |
+
def find_module(self, module_name, path, so_suffixes):
|
| 28 |
+
for so_suffix in so_suffixes:
|
| 29 |
+
basename = module_name + so_suffix
|
| 30 |
+
if path is None:
|
| 31 |
+
path = sys.path
|
| 32 |
+
for dirname in path:
|
| 33 |
+
filename = os.path.join(dirname, basename)
|
| 34 |
+
if os.path.isfile(filename):
|
| 35 |
+
return filename
|
| 36 |
+
|
| 37 |
+
def collect_types(self):
|
| 38 |
+
pass # not needed in the generic engine
|
| 39 |
+
|
| 40 |
+
def _prnt(self, what=''):
|
| 41 |
+
self._f.write(what + '\n')
|
| 42 |
+
|
| 43 |
+
def write_source_to_f(self):
|
| 44 |
+
prnt = self._prnt
|
| 45 |
+
# first paste some standard set of lines that are mostly '#include'
|
| 46 |
+
prnt(cffimod_header)
|
| 47 |
+
# then paste the C source given by the user, verbatim.
|
| 48 |
+
prnt(self.verifier.preamble)
|
| 49 |
+
#
|
| 50 |
+
# call generate_gen_xxx_decl(), for every xxx found from
|
| 51 |
+
# ffi._parser._declarations. This generates all the functions.
|
| 52 |
+
self._generate('decl')
|
| 53 |
+
#
|
| 54 |
+
# on Windows, distutils insists on putting init_cffi_xyz in
|
| 55 |
+
# 'export_symbols', so instead of fighting it, just give up and
|
| 56 |
+
# give it one
|
| 57 |
+
if sys.platform == 'win32':
|
| 58 |
+
if sys.version_info >= (3,):
|
| 59 |
+
prefix = 'PyInit_'
|
| 60 |
+
else:
|
| 61 |
+
prefix = 'init'
|
| 62 |
+
modname = self.verifier.get_module_name()
|
| 63 |
+
prnt("void %s%s(void) { }\n" % (prefix, modname))
|
| 64 |
+
|
| 65 |
+
def load_library(self, flags=0):
|
| 66 |
+
# import it with the CFFI backend
|
| 67 |
+
backend = self.ffi._backend
|
| 68 |
+
# needs to make a path that contains '/', on Posix
|
| 69 |
+
filename = os.path.join(os.curdir, self.verifier.modulefilename)
|
| 70 |
+
module = backend.load_library(filename, flags)
|
| 71 |
+
#
|
| 72 |
+
# call loading_gen_struct() to get the struct layout inferred by
|
| 73 |
+
# the C compiler
|
| 74 |
+
self._load(module, 'loading')
|
| 75 |
+
|
| 76 |
+
# build the FFILibrary class and instance, this is a module subclass
|
| 77 |
+
# because modules are expected to have usually-constant-attributes and
|
| 78 |
+
# in PyPy this means the JIT is able to treat attributes as constant,
|
| 79 |
+
# which we want.
|
| 80 |
+
class FFILibrary(types.ModuleType):
|
| 81 |
+
_cffi_generic_module = module
|
| 82 |
+
_cffi_ffi = self.ffi
|
| 83 |
+
_cffi_dir = []
|
| 84 |
+
def __dir__(self):
|
| 85 |
+
return FFILibrary._cffi_dir
|
| 86 |
+
library = FFILibrary("")
|
| 87 |
+
#
|
| 88 |
+
# finally, call the loaded_gen_xxx() functions. This will set
|
| 89 |
+
# up the 'library' object.
|
| 90 |
+
self._load(module, 'loaded', library=library)
|
| 91 |
+
return library
|
| 92 |
+
|
| 93 |
+
def _get_declarations(self):
|
| 94 |
+
lst = [(key, tp) for (key, (tp, qual)) in
|
| 95 |
+
self.ffi._parser._declarations.items()]
|
| 96 |
+
lst.sort()
|
| 97 |
+
return lst
|
| 98 |
+
|
| 99 |
+
def _generate(self, step_name):
|
| 100 |
+
for name, tp in self._get_declarations():
|
| 101 |
+
kind, realname = name.split(' ', 1)
|
| 102 |
+
try:
|
| 103 |
+
method = getattr(self, '_generate_gen_%s_%s' % (kind,
|
| 104 |
+
step_name))
|
| 105 |
+
except AttributeError:
|
| 106 |
+
raise VerificationError(
|
| 107 |
+
"not implemented in verify(): %r" % name)
|
| 108 |
+
try:
|
| 109 |
+
method(tp, realname)
|
| 110 |
+
except Exception as e:
|
| 111 |
+
model.attach_exception_info(e, name)
|
| 112 |
+
raise
|
| 113 |
+
|
| 114 |
+
def _load(self, module, step_name, **kwds):
|
| 115 |
+
for name, tp in self._get_declarations():
|
| 116 |
+
kind, realname = name.split(' ', 1)
|
| 117 |
+
method = getattr(self, '_%s_gen_%s' % (step_name, kind))
|
| 118 |
+
try:
|
| 119 |
+
method(tp, realname, module, **kwds)
|
| 120 |
+
except Exception as e:
|
| 121 |
+
model.attach_exception_info(e, name)
|
| 122 |
+
raise
|
| 123 |
+
|
| 124 |
+
def _generate_nothing(self, tp, name):
|
| 125 |
+
pass
|
| 126 |
+
|
| 127 |
+
def _loaded_noop(self, tp, name, module, **kwds):
|
| 128 |
+
pass
|
| 129 |
+
|
| 130 |
+
# ----------
|
| 131 |
+
# typedefs: generates no code so far
|
| 132 |
+
|
| 133 |
+
_generate_gen_typedef_decl = _generate_nothing
|
| 134 |
+
_loading_gen_typedef = _loaded_noop
|
| 135 |
+
_loaded_gen_typedef = _loaded_noop
|
| 136 |
+
|
| 137 |
+
# ----------
|
| 138 |
+
# function declarations
|
| 139 |
+
|
| 140 |
+
def _generate_gen_function_decl(self, tp, name):
|
| 141 |
+
assert isinstance(tp, model.FunctionPtrType)
|
| 142 |
+
if tp.ellipsis:
|
| 143 |
+
# cannot support vararg functions better than this: check for its
|
| 144 |
+
# exact type (including the fixed arguments), and build it as a
|
| 145 |
+
# constant function pointer (no _cffi_f_%s wrapper)
|
| 146 |
+
self._generate_gen_const(False, name, tp)
|
| 147 |
+
return
|
| 148 |
+
prnt = self._prnt
|
| 149 |
+
numargs = len(tp.args)
|
| 150 |
+
argnames = []
|
| 151 |
+
for i, type in enumerate(tp.args):
|
| 152 |
+
indirection = ''
|
| 153 |
+
if isinstance(type, model.StructOrUnion):
|
| 154 |
+
indirection = '*'
|
| 155 |
+
argnames.append('%sx%d' % (indirection, i))
|
| 156 |
+
context = 'argument of %s' % name
|
| 157 |
+
arglist = [type.get_c_name(' %s' % arg, context)
|
| 158 |
+
for type, arg in zip(tp.args, argnames)]
|
| 159 |
+
tpresult = tp.result
|
| 160 |
+
if isinstance(tpresult, model.StructOrUnion):
|
| 161 |
+
arglist.insert(0, tpresult.get_c_name(' *r', context))
|
| 162 |
+
tpresult = model.void_type
|
| 163 |
+
arglist = ', '.join(arglist) or 'void'
|
| 164 |
+
wrappername = '_cffi_f_%s' % name
|
| 165 |
+
self.export_symbols.append(wrappername)
|
| 166 |
+
if tp.abi:
|
| 167 |
+
abi = tp.abi + ' '
|
| 168 |
+
else:
|
| 169 |
+
abi = ''
|
| 170 |
+
funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist)
|
| 171 |
+
context = 'result of %s' % name
|
| 172 |
+
prnt(tpresult.get_c_name(funcdecl, context))
|
| 173 |
+
prnt('{')
|
| 174 |
+
#
|
| 175 |
+
if isinstance(tp.result, model.StructOrUnion):
|
| 176 |
+
result_code = '*r = '
|
| 177 |
+
elif not isinstance(tp.result, model.VoidType):
|
| 178 |
+
result_code = 'return '
|
| 179 |
+
else:
|
| 180 |
+
result_code = ''
|
| 181 |
+
prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames)))
|
| 182 |
+
prnt('}')
|
| 183 |
+
prnt()
|
| 184 |
+
|
| 185 |
+
_loading_gen_function = _loaded_noop
|
| 186 |
+
|
| 187 |
+
def _loaded_gen_function(self, tp, name, module, library):
|
| 188 |
+
assert isinstance(tp, model.FunctionPtrType)
|
| 189 |
+
if tp.ellipsis:
|
| 190 |
+
newfunction = self._load_constant(False, tp, name, module)
|
| 191 |
+
else:
|
| 192 |
+
indirections = []
|
| 193 |
+
base_tp = tp
|
| 194 |
+
if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args)
|
| 195 |
+
or isinstance(tp.result, model.StructOrUnion)):
|
| 196 |
+
indirect_args = []
|
| 197 |
+
for i, typ in enumerate(tp.args):
|
| 198 |
+
if isinstance(typ, model.StructOrUnion):
|
| 199 |
+
typ = model.PointerType(typ)
|
| 200 |
+
indirections.append((i, typ))
|
| 201 |
+
indirect_args.append(typ)
|
| 202 |
+
indirect_result = tp.result
|
| 203 |
+
if isinstance(indirect_result, model.StructOrUnion):
|
| 204 |
+
if indirect_result.fldtypes is None:
|
| 205 |
+
raise TypeError("'%s' is used as result type, "
|
| 206 |
+
"but is opaque" % (
|
| 207 |
+
indirect_result._get_c_name(),))
|
| 208 |
+
indirect_result = model.PointerType(indirect_result)
|
| 209 |
+
indirect_args.insert(0, indirect_result)
|
| 210 |
+
indirections.insert(0, ("result", indirect_result))
|
| 211 |
+
indirect_result = model.void_type
|
| 212 |
+
tp = model.FunctionPtrType(tuple(indirect_args),
|
| 213 |
+
indirect_result, tp.ellipsis)
|
| 214 |
+
BFunc = self.ffi._get_cached_btype(tp)
|
| 215 |
+
wrappername = '_cffi_f_%s' % name
|
| 216 |
+
newfunction = module.load_function(BFunc, wrappername)
|
| 217 |
+
for i, typ in indirections:
|
| 218 |
+
newfunction = self._make_struct_wrapper(newfunction, i, typ,
|
| 219 |
+
base_tp)
|
| 220 |
+
setattr(library, name, newfunction)
|
| 221 |
+
type(library)._cffi_dir.append(name)
|
| 222 |
+
|
| 223 |
+
def _make_struct_wrapper(self, oldfunc, i, tp, base_tp):
|
| 224 |
+
backend = self.ffi._backend
|
| 225 |
+
BType = self.ffi._get_cached_btype(tp)
|
| 226 |
+
if i == "result":
|
| 227 |
+
ffi = self.ffi
|
| 228 |
+
def newfunc(*args):
|
| 229 |
+
res = ffi.new(BType)
|
| 230 |
+
oldfunc(res, *args)
|
| 231 |
+
return res[0]
|
| 232 |
+
else:
|
| 233 |
+
def newfunc(*args):
|
| 234 |
+
args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:]
|
| 235 |
+
return oldfunc(*args)
|
| 236 |
+
newfunc._cffi_base_type = base_tp
|
| 237 |
+
return newfunc
|
| 238 |
+
|
| 239 |
+
# ----------
|
| 240 |
+
# named structs
|
| 241 |
+
|
| 242 |
+
def _generate_gen_struct_decl(self, tp, name):
|
| 243 |
+
assert name == tp.name
|
| 244 |
+
self._generate_struct_or_union_decl(tp, 'struct', name)
|
| 245 |
+
|
| 246 |
+
def _loading_gen_struct(self, tp, name, module):
|
| 247 |
+
self._loading_struct_or_union(tp, 'struct', name, module)
|
| 248 |
+
|
| 249 |
+
def _loaded_gen_struct(self, tp, name, module, **kwds):
|
| 250 |
+
self._loaded_struct_or_union(tp)
|
| 251 |
+
|
| 252 |
+
def _generate_gen_union_decl(self, tp, name):
|
| 253 |
+
assert name == tp.name
|
| 254 |
+
self._generate_struct_or_union_decl(tp, 'union', name)
|
| 255 |
+
|
| 256 |
+
def _loading_gen_union(self, tp, name, module):
|
| 257 |
+
self._loading_struct_or_union(tp, 'union', name, module)
|
| 258 |
+
|
| 259 |
+
def _loaded_gen_union(self, tp, name, module, **kwds):
|
| 260 |
+
self._loaded_struct_or_union(tp)
|
| 261 |
+
|
| 262 |
+
def _generate_struct_or_union_decl(self, tp, prefix, name):
|
| 263 |
+
if tp.fldnames is None:
|
| 264 |
+
return # nothing to do with opaque structs
|
| 265 |
+
checkfuncname = '_cffi_check_%s_%s' % (prefix, name)
|
| 266 |
+
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
|
| 267 |
+
cname = ('%s %s' % (prefix, name)).strip()
|
| 268 |
+
#
|
| 269 |
+
prnt = self._prnt
|
| 270 |
+
prnt('static void %s(%s *p)' % (checkfuncname, cname))
|
| 271 |
+
prnt('{')
|
| 272 |
+
prnt(' /* only to generate compile-time warnings or errors */')
|
| 273 |
+
prnt(' (void)p;')
|
| 274 |
+
for fname, ftype, fbitsize, fqual in tp.enumfields():
|
| 275 |
+
if (isinstance(ftype, model.PrimitiveType)
|
| 276 |
+
and ftype.is_integer_type()) or fbitsize >= 0:
|
| 277 |
+
# accept all integers, but complain on float or double
|
| 278 |
+
prnt(' (void)((p->%s) << 1);' % fname)
|
| 279 |
+
else:
|
| 280 |
+
# only accept exactly the type declared.
|
| 281 |
+
try:
|
| 282 |
+
prnt(' { %s = &p->%s; (void)tmp; }' % (
|
| 283 |
+
ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
|
| 284 |
+
fname))
|
| 285 |
+
except VerificationError as e:
|
| 286 |
+
prnt(' /* %s */' % str(e)) # cannot verify it, ignore
|
| 287 |
+
prnt('}')
|
| 288 |
+
self.export_symbols.append(layoutfuncname)
|
| 289 |
+
prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,))
|
| 290 |
+
prnt('{')
|
| 291 |
+
prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname)
|
| 292 |
+
prnt(' static intptr_t nums[] = {')
|
| 293 |
+
prnt(' sizeof(%s),' % cname)
|
| 294 |
+
prnt(' offsetof(struct _cffi_aligncheck, y),')
|
| 295 |
+
for fname, ftype, fbitsize, fqual in tp.enumfields():
|
| 296 |
+
if fbitsize >= 0:
|
| 297 |
+
continue # xxx ignore fbitsize for now
|
| 298 |
+
prnt(' offsetof(%s, %s),' % (cname, fname))
|
| 299 |
+
if isinstance(ftype, model.ArrayType) and ftype.length is None:
|
| 300 |
+
prnt(' 0, /* %s */' % ftype._get_c_name())
|
| 301 |
+
else:
|
| 302 |
+
prnt(' sizeof(((%s *)0)->%s),' % (cname, fname))
|
| 303 |
+
prnt(' -1')
|
| 304 |
+
prnt(' };')
|
| 305 |
+
prnt(' return nums[i];')
|
| 306 |
+
prnt(' /* the next line is not executed, but compiled */')
|
| 307 |
+
prnt(' %s(0);' % (checkfuncname,))
|
| 308 |
+
prnt('}')
|
| 309 |
+
prnt()
|
| 310 |
+
|
| 311 |
+
def _loading_struct_or_union(self, tp, prefix, name, module):
|
| 312 |
+
if tp.fldnames is None:
|
| 313 |
+
return # nothing to do with opaque structs
|
| 314 |
+
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
|
| 315 |
+
#
|
| 316 |
+
BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0]
|
| 317 |
+
function = module.load_function(BFunc, layoutfuncname)
|
| 318 |
+
layout = []
|
| 319 |
+
num = 0
|
| 320 |
+
while True:
|
| 321 |
+
x = function(num)
|
| 322 |
+
if x < 0: break
|
| 323 |
+
layout.append(x)
|
| 324 |
+
num += 1
|
| 325 |
+
if isinstance(tp, model.StructOrUnion) and tp.partial:
|
| 326 |
+
# use the function()'s sizes and offsets to guide the
|
| 327 |
+
# layout of the struct
|
| 328 |
+
totalsize = layout[0]
|
| 329 |
+
totalalignment = layout[1]
|
| 330 |
+
fieldofs = layout[2::2]
|
| 331 |
+
fieldsize = layout[3::2]
|
| 332 |
+
tp.force_flatten()
|
| 333 |
+
assert len(fieldofs) == len(fieldsize) == len(tp.fldnames)
|
| 334 |
+
tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment
|
| 335 |
+
else:
|
| 336 |
+
cname = ('%s %s' % (prefix, name)).strip()
|
| 337 |
+
self._struct_pending_verification[tp] = layout, cname
|
| 338 |
+
|
| 339 |
+
def _loaded_struct_or_union(self, tp):
|
| 340 |
+
if tp.fldnames is None:
|
| 341 |
+
return # nothing to do with opaque structs
|
| 342 |
+
self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered
|
| 343 |
+
|
| 344 |
+
if tp in self._struct_pending_verification:
|
| 345 |
+
# check that the layout sizes and offsets match the real ones
|
| 346 |
+
def check(realvalue, expectedvalue, msg):
|
| 347 |
+
if realvalue != expectedvalue:
|
| 348 |
+
raise VerificationError(
|
| 349 |
+
"%s (we have %d, but C compiler says %d)"
|
| 350 |
+
% (msg, expectedvalue, realvalue))
|
| 351 |
+
ffi = self.ffi
|
| 352 |
+
BStruct = ffi._get_cached_btype(tp)
|
| 353 |
+
layout, cname = self._struct_pending_verification.pop(tp)
|
| 354 |
+
check(layout[0], ffi.sizeof(BStruct), "wrong total size")
|
| 355 |
+
check(layout[1], ffi.alignof(BStruct), "wrong total alignment")
|
| 356 |
+
i = 2
|
| 357 |
+
for fname, ftype, fbitsize, fqual in tp.enumfields():
|
| 358 |
+
if fbitsize >= 0:
|
| 359 |
+
continue # xxx ignore fbitsize for now
|
| 360 |
+
check(layout[i], ffi.offsetof(BStruct, fname),
|
| 361 |
+
"wrong offset for field %r" % (fname,))
|
| 362 |
+
if layout[i+1] != 0:
|
| 363 |
+
BField = ffi._get_cached_btype(ftype)
|
| 364 |
+
check(layout[i+1], ffi.sizeof(BField),
|
| 365 |
+
"wrong size for field %r" % (fname,))
|
| 366 |
+
i += 2
|
| 367 |
+
assert i == len(layout)
|
| 368 |
+
|
| 369 |
+
# ----------
|
| 370 |
+
# 'anonymous' declarations. These are produced for anonymous structs
|
| 371 |
+
# or unions; the 'name' is obtained by a typedef.
|
| 372 |
+
|
| 373 |
+
def _generate_gen_anonymous_decl(self, tp, name):
|
| 374 |
+
if isinstance(tp, model.EnumType):
|
| 375 |
+
self._generate_gen_enum_decl(tp, name, '')
|
| 376 |
+
else:
|
| 377 |
+
self._generate_struct_or_union_decl(tp, '', name)
|
| 378 |
+
|
| 379 |
+
def _loading_gen_anonymous(self, tp, name, module):
|
| 380 |
+
if isinstance(tp, model.EnumType):
|
| 381 |
+
self._loading_gen_enum(tp, name, module, '')
|
| 382 |
+
else:
|
| 383 |
+
self._loading_struct_or_union(tp, '', name, module)
|
| 384 |
+
|
| 385 |
+
def _loaded_gen_anonymous(self, tp, name, module, **kwds):
|
| 386 |
+
if isinstance(tp, model.EnumType):
|
| 387 |
+
self._loaded_gen_enum(tp, name, module, **kwds)
|
| 388 |
+
else:
|
| 389 |
+
self._loaded_struct_or_union(tp)
|
| 390 |
+
|
| 391 |
+
# ----------
|
| 392 |
+
# constants, likely declared with '#define'
|
| 393 |
+
|
| 394 |
+
def _generate_gen_const(self, is_int, name, tp=None, category='const',
|
| 395 |
+
check_value=None):
|
| 396 |
+
prnt = self._prnt
|
| 397 |
+
funcname = '_cffi_%s_%s' % (category, name)
|
| 398 |
+
self.export_symbols.append(funcname)
|
| 399 |
+
if check_value is not None:
|
| 400 |
+
assert is_int
|
| 401 |
+
assert category == 'const'
|
| 402 |
+
prnt('int %s(char *out_error)' % funcname)
|
| 403 |
+
prnt('{')
|
| 404 |
+
self._check_int_constant_value(name, check_value)
|
| 405 |
+
prnt(' return 0;')
|
| 406 |
+
prnt('}')
|
| 407 |
+
elif is_int:
|
| 408 |
+
assert category == 'const'
|
| 409 |
+
prnt('int %s(long long *out_value)' % funcname)
|
| 410 |
+
prnt('{')
|
| 411 |
+
prnt(' *out_value = (long long)(%s);' % (name,))
|
| 412 |
+
prnt(' return (%s) <= 0;' % (name,))
|
| 413 |
+
prnt('}')
|
| 414 |
+
else:
|
| 415 |
+
assert tp is not None
|
| 416 |
+
assert check_value is None
|
| 417 |
+
if category == 'var':
|
| 418 |
+
ampersand = '&'
|
| 419 |
+
else:
|
| 420 |
+
ampersand = ''
|
| 421 |
+
extra = ''
|
| 422 |
+
if category == 'const' and isinstance(tp, model.StructOrUnion):
|
| 423 |
+
extra = 'const *'
|
| 424 |
+
ampersand = '&'
|
| 425 |
+
prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name))
|
| 426 |
+
prnt('{')
|
| 427 |
+
prnt(' return (%s%s);' % (ampersand, name))
|
| 428 |
+
prnt('}')
|
| 429 |
+
prnt()
|
| 430 |
+
|
| 431 |
+
def _generate_gen_constant_decl(self, tp, name):
|
| 432 |
+
is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
|
| 433 |
+
self._generate_gen_const(is_int, name, tp)
|
| 434 |
+
|
| 435 |
+
_loading_gen_constant = _loaded_noop
|
| 436 |
+
|
| 437 |
+
def _load_constant(self, is_int, tp, name, module, check_value=None):
|
| 438 |
+
funcname = '_cffi_const_%s' % name
|
| 439 |
+
if check_value is not None:
|
| 440 |
+
assert is_int
|
| 441 |
+
self._load_known_int_constant(module, funcname)
|
| 442 |
+
value = check_value
|
| 443 |
+
elif is_int:
|
| 444 |
+
BType = self.ffi._typeof_locked("long long*")[0]
|
| 445 |
+
BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0]
|
| 446 |
+
function = module.load_function(BFunc, funcname)
|
| 447 |
+
p = self.ffi.new(BType)
|
| 448 |
+
negative = function(p)
|
| 449 |
+
value = int(p[0])
|
| 450 |
+
if value < 0 and not negative:
|
| 451 |
+
BLongLong = self.ffi._typeof_locked("long long")[0]
|
| 452 |
+
value += (1 << (8*self.ffi.sizeof(BLongLong)))
|
| 453 |
+
else:
|
| 454 |
+
assert check_value is None
|
| 455 |
+
fntypeextra = '(*)(void)'
|
| 456 |
+
if isinstance(tp, model.StructOrUnion):
|
| 457 |
+
fntypeextra = '*' + fntypeextra
|
| 458 |
+
BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0]
|
| 459 |
+
function = module.load_function(BFunc, funcname)
|
| 460 |
+
value = function()
|
| 461 |
+
if isinstance(tp, model.StructOrUnion):
|
| 462 |
+
value = value[0]
|
| 463 |
+
return value
|
| 464 |
+
|
| 465 |
+
def _loaded_gen_constant(self, tp, name, module, library):
|
| 466 |
+
is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
|
| 467 |
+
value = self._load_constant(is_int, tp, name, module)
|
| 468 |
+
setattr(library, name, value)
|
| 469 |
+
type(library)._cffi_dir.append(name)
|
| 470 |
+
|
| 471 |
+
# ----------
|
| 472 |
+
# enums
|
| 473 |
+
|
| 474 |
+
def _check_int_constant_value(self, name, value):
|
| 475 |
+
prnt = self._prnt
|
| 476 |
+
if value <= 0:
|
| 477 |
+
prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % (
|
| 478 |
+
name, name, value))
|
| 479 |
+
else:
|
| 480 |
+
prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % (
|
| 481 |
+
name, name, value))
|
| 482 |
+
prnt(' char buf[64];')
|
| 483 |
+
prnt(' if ((%s) <= 0)' % name)
|
| 484 |
+
prnt(' sprintf(buf, "%%ld", (long)(%s));' % name)
|
| 485 |
+
prnt(' else')
|
| 486 |
+
prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' %
|
| 487 |
+
name)
|
| 488 |
+
prnt(' sprintf(out_error, "%s has the real value %s, not %s",')
|
| 489 |
+
prnt(' "%s", buf, "%d");' % (name[:100], value))
|
| 490 |
+
prnt(' return -1;')
|
| 491 |
+
prnt(' }')
|
| 492 |
+
|
| 493 |
+
def _load_known_int_constant(self, module, funcname):
|
| 494 |
+
BType = self.ffi._typeof_locked("char[]")[0]
|
| 495 |
+
BFunc = self.ffi._typeof_locked("int(*)(char*)")[0]
|
| 496 |
+
function = module.load_function(BFunc, funcname)
|
| 497 |
+
p = self.ffi.new(BType, 256)
|
| 498 |
+
if function(p) < 0:
|
| 499 |
+
error = self.ffi.string(p)
|
| 500 |
+
if sys.version_info >= (3,):
|
| 501 |
+
error = str(error, 'utf-8')
|
| 502 |
+
raise VerificationError(error)
|
| 503 |
+
|
| 504 |
+
def _enum_funcname(self, prefix, name):
|
| 505 |
+
# "$enum_$1" => "___D_enum____D_1"
|
| 506 |
+
name = name.replace('$', '___D_')
|
| 507 |
+
return '_cffi_e_%s_%s' % (prefix, name)
|
| 508 |
+
|
| 509 |
+
def _generate_gen_enum_decl(self, tp, name, prefix='enum'):
|
| 510 |
+
if tp.partial:
|
| 511 |
+
for enumerator in tp.enumerators:
|
| 512 |
+
self._generate_gen_const(True, enumerator)
|
| 513 |
+
return
|
| 514 |
+
#
|
| 515 |
+
funcname = self._enum_funcname(prefix, name)
|
| 516 |
+
self.export_symbols.append(funcname)
|
| 517 |
+
prnt = self._prnt
|
| 518 |
+
prnt('int %s(char *out_error)' % funcname)
|
| 519 |
+
prnt('{')
|
| 520 |
+
for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
|
| 521 |
+
self._check_int_constant_value(enumerator, enumvalue)
|
| 522 |
+
prnt(' return 0;')
|
| 523 |
+
prnt('}')
|
| 524 |
+
prnt()
|
| 525 |
+
|
| 526 |
+
def _loading_gen_enum(self, tp, name, module, prefix='enum'):
|
| 527 |
+
if tp.partial:
|
| 528 |
+
enumvalues = [self._load_constant(True, tp, enumerator, module)
|
| 529 |
+
for enumerator in tp.enumerators]
|
| 530 |
+
tp.enumvalues = tuple(enumvalues)
|
| 531 |
+
tp.partial_resolved = True
|
| 532 |
+
else:
|
| 533 |
+
funcname = self._enum_funcname(prefix, name)
|
| 534 |
+
self._load_known_int_constant(module, funcname)
|
| 535 |
+
|
| 536 |
+
def _loaded_gen_enum(self, tp, name, module, library):
|
| 537 |
+
for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
|
| 538 |
+
setattr(library, enumerator, enumvalue)
|
| 539 |
+
type(library)._cffi_dir.append(enumerator)
|
| 540 |
+
|
| 541 |
+
# ----------
|
| 542 |
+
# macros: for now only for integers
|
| 543 |
+
|
| 544 |
+
def _generate_gen_macro_decl(self, tp, name):
|
| 545 |
+
if tp == '...':
|
| 546 |
+
check_value = None
|
| 547 |
+
else:
|
| 548 |
+
check_value = tp # an integer
|
| 549 |
+
self._generate_gen_const(True, name, check_value=check_value)
|
| 550 |
+
|
| 551 |
+
_loading_gen_macro = _loaded_noop
|
| 552 |
+
|
| 553 |
+
def _loaded_gen_macro(self, tp, name, module, library):
|
| 554 |
+
if tp == '...':
|
| 555 |
+
check_value = None
|
| 556 |
+
else:
|
| 557 |
+
check_value = tp # an integer
|
| 558 |
+
value = self._load_constant(True, tp, name, module,
|
| 559 |
+
check_value=check_value)
|
| 560 |
+
setattr(library, name, value)
|
| 561 |
+
type(library)._cffi_dir.append(name)
|
| 562 |
+
|
| 563 |
+
# ----------
|
| 564 |
+
# global variables
|
| 565 |
+
|
| 566 |
+
def _generate_gen_variable_decl(self, tp, name):
|
| 567 |
+
if isinstance(tp, model.ArrayType):
|
| 568 |
+
if tp.length_is_unknown():
|
| 569 |
+
prnt = self._prnt
|
| 570 |
+
funcname = '_cffi_sizeof_%s' % (name,)
|
| 571 |
+
self.export_symbols.append(funcname)
|
| 572 |
+
prnt("size_t %s(void)" % funcname)
|
| 573 |
+
prnt("{")
|
| 574 |
+
prnt(" return sizeof(%s);" % (name,))
|
| 575 |
+
prnt("}")
|
| 576 |
+
tp_ptr = model.PointerType(tp.item)
|
| 577 |
+
self._generate_gen_const(False, name, tp_ptr)
|
| 578 |
+
else:
|
| 579 |
+
tp_ptr = model.PointerType(tp)
|
| 580 |
+
self._generate_gen_const(False, name, tp_ptr, category='var')
|
| 581 |
+
|
| 582 |
+
_loading_gen_variable = _loaded_noop
|
| 583 |
+
|
| 584 |
+
def _loaded_gen_variable(self, tp, name, module, library):
|
| 585 |
+
if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the
|
| 586 |
+
# sense that "a=..." is forbidden
|
| 587 |
+
if tp.length_is_unknown():
|
| 588 |
+
funcname = '_cffi_sizeof_%s' % (name,)
|
| 589 |
+
BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0]
|
| 590 |
+
function = module.load_function(BFunc, funcname)
|
| 591 |
+
size = function()
|
| 592 |
+
BItemType = self.ffi._get_cached_btype(tp.item)
|
| 593 |
+
length, rest = divmod(size, self.ffi.sizeof(BItemType))
|
| 594 |
+
if rest != 0:
|
| 595 |
+
raise VerificationError(
|
| 596 |
+
"bad size: %r does not seem to be an array of %s" %
|
| 597 |
+
(name, tp.item))
|
| 598 |
+
tp = tp.resolve_length(length)
|
| 599 |
+
tp_ptr = model.PointerType(tp.item)
|
| 600 |
+
value = self._load_constant(False, tp_ptr, name, module)
|
| 601 |
+
# 'value' is a <cdata 'type *'> which we have to replace with
|
| 602 |
+
# a <cdata 'type[N]'> if the N is actually known
|
| 603 |
+
if tp.length is not None:
|
| 604 |
+
BArray = self.ffi._get_cached_btype(tp)
|
| 605 |
+
value = self.ffi.cast(BArray, value)
|
| 606 |
+
setattr(library, name, value)
|
| 607 |
+
type(library)._cffi_dir.append(name)
|
| 608 |
+
return
|
| 609 |
+
# remove ptr=<cdata 'int *'> from the library instance, and replace
|
| 610 |
+
# it by a property on the class, which reads/writes into ptr[0].
|
| 611 |
+
funcname = '_cffi_var_%s' % name
|
| 612 |
+
BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0]
|
| 613 |
+
function = module.load_function(BFunc, funcname)
|
| 614 |
+
ptr = function()
|
| 615 |
+
def getter(library):
|
| 616 |
+
return ptr[0]
|
| 617 |
+
def setter(library, value):
|
| 618 |
+
ptr[0] = value
|
| 619 |
+
setattr(type(library), name, property(getter, setter))
|
| 620 |
+
type(library)._cffi_dir.append(name)
|
| 621 |
+
|
| 622 |
+
cffimod_header = r'''
|
| 623 |
+
#include <stdio.h>
|
| 624 |
+
#include <stddef.h>
|
| 625 |
+
#include <stdarg.h>
|
| 626 |
+
#include <errno.h>
|
| 627 |
+
#include <sys/types.h> /* XXX for ssize_t on some platforms */
|
| 628 |
+
|
| 629 |
+
/* this block of #ifs should be kept exactly identical between
|
| 630 |
+
c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
|
| 631 |
+
and cffi/_cffi_include.h */
|
| 632 |
+
#if defined(_MSC_VER)
|
| 633 |
+
# include <malloc.h> /* for alloca() */
|
| 634 |
+
# if _MSC_VER < 1600 /* MSVC < 2010 */
|
| 635 |
+
typedef __int8 int8_t;
|
| 636 |
+
typedef __int16 int16_t;
|
| 637 |
+
typedef __int32 int32_t;
|
| 638 |
+
typedef __int64 int64_t;
|
| 639 |
+
typedef unsigned __int8 uint8_t;
|
| 640 |
+
typedef unsigned __int16 uint16_t;
|
| 641 |
+
typedef unsigned __int32 uint32_t;
|
| 642 |
+
typedef unsigned __int64 uint64_t;
|
| 643 |
+
typedef __int8 int_least8_t;
|
| 644 |
+
typedef __int16 int_least16_t;
|
| 645 |
+
typedef __int32 int_least32_t;
|
| 646 |
+
typedef __int64 int_least64_t;
|
| 647 |
+
typedef unsigned __int8 uint_least8_t;
|
| 648 |
+
typedef unsigned __int16 uint_least16_t;
|
| 649 |
+
typedef unsigned __int32 uint_least32_t;
|
| 650 |
+
typedef unsigned __int64 uint_least64_t;
|
| 651 |
+
typedef __int8 int_fast8_t;
|
| 652 |
+
typedef __int16 int_fast16_t;
|
| 653 |
+
typedef __int32 int_fast32_t;
|
| 654 |
+
typedef __int64 int_fast64_t;
|
| 655 |
+
typedef unsigned __int8 uint_fast8_t;
|
| 656 |
+
typedef unsigned __int16 uint_fast16_t;
|
| 657 |
+
typedef unsigned __int32 uint_fast32_t;
|
| 658 |
+
typedef unsigned __int64 uint_fast64_t;
|
| 659 |
+
typedef __int64 intmax_t;
|
| 660 |
+
typedef unsigned __int64 uintmax_t;
|
| 661 |
+
# else
|
| 662 |
+
# include <stdint.h>
|
| 663 |
+
# endif
|
| 664 |
+
# if _MSC_VER < 1800 /* MSVC < 2013 */
|
| 665 |
+
# ifndef __cplusplus
|
| 666 |
+
typedef unsigned char _Bool;
|
| 667 |
+
# endif
|
| 668 |
+
# endif
|
| 669 |
+
# define _cffi_float_complex_t _Fcomplex /* include <complex.h> for it */
|
| 670 |
+
# define _cffi_double_complex_t _Dcomplex /* include <complex.h> for it */
|
| 671 |
+
#else
|
| 672 |
+
# include <stdint.h>
|
| 673 |
+
# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
|
| 674 |
+
# include <alloca.h>
|
| 675 |
+
# endif
|
| 676 |
+
# define _cffi_float_complex_t float _Complex
|
| 677 |
+
# define _cffi_double_complex_t double _Complex
|
| 678 |
+
#endif
|
| 679 |
+
'''
|
.venv/lib/python3.11/site-packages/diskcache/__init__.py
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
DiskCache API Reference
|
| 3 |
+
=======================
|
| 4 |
+
|
| 5 |
+
The :doc:`tutorial` provides a helpful walkthrough of most methods.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
from .core import (
|
| 9 |
+
DEFAULT_SETTINGS,
|
| 10 |
+
ENOVAL,
|
| 11 |
+
EVICTION_POLICY,
|
| 12 |
+
UNKNOWN,
|
| 13 |
+
Cache,
|
| 14 |
+
Disk,
|
| 15 |
+
EmptyDirWarning,
|
| 16 |
+
JSONDisk,
|
| 17 |
+
Timeout,
|
| 18 |
+
UnknownFileWarning,
|
| 19 |
+
)
|
| 20 |
+
from .fanout import FanoutCache
|
| 21 |
+
from .persistent import Deque, Index
|
| 22 |
+
from .recipes import (
|
| 23 |
+
Averager,
|
| 24 |
+
BoundedSemaphore,
|
| 25 |
+
Lock,
|
| 26 |
+
RLock,
|
| 27 |
+
barrier,
|
| 28 |
+
memoize_stampede,
|
| 29 |
+
throttle,
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
__all__ = [
|
| 33 |
+
'Averager',
|
| 34 |
+
'BoundedSemaphore',
|
| 35 |
+
'Cache',
|
| 36 |
+
'DEFAULT_SETTINGS',
|
| 37 |
+
'Deque',
|
| 38 |
+
'Disk',
|
| 39 |
+
'ENOVAL',
|
| 40 |
+
'EVICTION_POLICY',
|
| 41 |
+
'EmptyDirWarning',
|
| 42 |
+
'FanoutCache',
|
| 43 |
+
'Index',
|
| 44 |
+
'JSONDisk',
|
| 45 |
+
'Lock',
|
| 46 |
+
'RLock',
|
| 47 |
+
'Timeout',
|
| 48 |
+
'UNKNOWN',
|
| 49 |
+
'UnknownFileWarning',
|
| 50 |
+
'barrier',
|
| 51 |
+
'memoize_stampede',
|
| 52 |
+
'throttle',
|
| 53 |
+
]
|
| 54 |
+
|
| 55 |
+
try:
|
| 56 |
+
from .djangocache import DjangoCache # noqa
|
| 57 |
+
|
| 58 |
+
__all__.append('DjangoCache')
|
| 59 |
+
except Exception: # pylint: disable=broad-except # pragma: no cover
|
| 60 |
+
# Django not installed or not setup so ignore.
|
| 61 |
+
pass
|
| 62 |
+
|
| 63 |
+
__title__ = 'diskcache'
|
| 64 |
+
__version__ = '5.6.3'
|
| 65 |
+
__build__ = 0x050603
|
| 66 |
+
__author__ = 'Grant Jenks'
|
| 67 |
+
__license__ = 'Apache 2.0'
|
| 68 |
+
__copyright__ = 'Copyright 2016-2023 Grant Jenks'
|
.venv/lib/python3.11/site-packages/diskcache/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (1.64 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/diskcache/__pycache__/cli.cpython-311.pyc
ADDED
|
Binary file (228 Bytes). View file
|
|
|
.venv/lib/python3.11/site-packages/diskcache/__pycache__/djangocache.cpython-311.pyc
ADDED
|
Binary file (19.7 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/diskcache/__pycache__/fanout.cpython-311.pyc
ADDED
|
Binary file (31.8 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/diskcache/__pycache__/persistent.cpython-311.pyc
ADDED
|
Binary file (46 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/diskcache/__pycache__/recipes.cpython-311.pyc
ADDED
|
Binary file (22 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/diskcache/cli.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
"""Command line interface to disk cache."""
|
.venv/lib/python3.11/site-packages/diskcache/core.py
ADDED
|
@@ -0,0 +1,2452 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Core disk and file backed cache API.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import codecs
|
| 5 |
+
import contextlib as cl
|
| 6 |
+
import errno
|
| 7 |
+
import functools as ft
|
| 8 |
+
import io
|
| 9 |
+
import json
|
| 10 |
+
import os
|
| 11 |
+
import os.path as op
|
| 12 |
+
import pickle
|
| 13 |
+
import pickletools
|
| 14 |
+
import sqlite3
|
| 15 |
+
import struct
|
| 16 |
+
import tempfile
|
| 17 |
+
import threading
|
| 18 |
+
import time
|
| 19 |
+
import warnings
|
| 20 |
+
import zlib
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def full_name(func):
|
| 24 |
+
"""Return full name of `func` by adding the module and function name."""
|
| 25 |
+
return func.__module__ + '.' + func.__qualname__
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class Constant(tuple):
|
| 29 |
+
"""Pretty display of immutable constant."""
|
| 30 |
+
|
| 31 |
+
def __new__(cls, name):
|
| 32 |
+
return tuple.__new__(cls, (name,))
|
| 33 |
+
|
| 34 |
+
def __repr__(self):
|
| 35 |
+
return '%s' % self[0]
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
DBNAME = 'cache.db'
|
| 39 |
+
ENOVAL = Constant('ENOVAL')
|
| 40 |
+
UNKNOWN = Constant('UNKNOWN')
|
| 41 |
+
|
| 42 |
+
MODE_NONE = 0
|
| 43 |
+
MODE_RAW = 1
|
| 44 |
+
MODE_BINARY = 2
|
| 45 |
+
MODE_TEXT = 3
|
| 46 |
+
MODE_PICKLE = 4
|
| 47 |
+
|
| 48 |
+
DEFAULT_SETTINGS = {
|
| 49 |
+
'statistics': 0, # False
|
| 50 |
+
'tag_index': 0, # False
|
| 51 |
+
'eviction_policy': 'least-recently-stored',
|
| 52 |
+
'size_limit': 2**30, # 1gb
|
| 53 |
+
'cull_limit': 10,
|
| 54 |
+
'sqlite_auto_vacuum': 1, # FULL
|
| 55 |
+
'sqlite_cache_size': 2**13, # 8,192 pages
|
| 56 |
+
'sqlite_journal_mode': 'wal',
|
| 57 |
+
'sqlite_mmap_size': 2**26, # 64mb
|
| 58 |
+
'sqlite_synchronous': 1, # NORMAL
|
| 59 |
+
'disk_min_file_size': 2**15, # 32kb
|
| 60 |
+
'disk_pickle_protocol': pickle.HIGHEST_PROTOCOL,
|
| 61 |
+
}
|
| 62 |
+
|
| 63 |
+
METADATA = {
|
| 64 |
+
'count': 0,
|
| 65 |
+
'size': 0,
|
| 66 |
+
'hits': 0,
|
| 67 |
+
'misses': 0,
|
| 68 |
+
}
|
| 69 |
+
|
| 70 |
+
EVICTION_POLICY = {
|
| 71 |
+
'none': {
|
| 72 |
+
'init': None,
|
| 73 |
+
'get': None,
|
| 74 |
+
'cull': None,
|
| 75 |
+
},
|
| 76 |
+
'least-recently-stored': {
|
| 77 |
+
'init': (
|
| 78 |
+
'CREATE INDEX IF NOT EXISTS Cache_store_time ON'
|
| 79 |
+
' Cache (store_time)'
|
| 80 |
+
),
|
| 81 |
+
'get': None,
|
| 82 |
+
'cull': 'SELECT {fields} FROM Cache ORDER BY store_time LIMIT ?',
|
| 83 |
+
},
|
| 84 |
+
'least-recently-used': {
|
| 85 |
+
'init': (
|
| 86 |
+
'CREATE INDEX IF NOT EXISTS Cache_access_time ON'
|
| 87 |
+
' Cache (access_time)'
|
| 88 |
+
),
|
| 89 |
+
'get': 'access_time = {now}',
|
| 90 |
+
'cull': 'SELECT {fields} FROM Cache ORDER BY access_time LIMIT ?',
|
| 91 |
+
},
|
| 92 |
+
'least-frequently-used': {
|
| 93 |
+
'init': (
|
| 94 |
+
'CREATE INDEX IF NOT EXISTS Cache_access_count ON'
|
| 95 |
+
' Cache (access_count)'
|
| 96 |
+
),
|
| 97 |
+
'get': 'access_count = access_count + 1',
|
| 98 |
+
'cull': 'SELECT {fields} FROM Cache ORDER BY access_count LIMIT ?',
|
| 99 |
+
},
|
| 100 |
+
}
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
class Disk:
|
| 104 |
+
"""Cache key and value serialization for SQLite database and files."""
|
| 105 |
+
|
| 106 |
+
def __init__(self, directory, min_file_size=0, pickle_protocol=0):
|
| 107 |
+
"""Initialize disk instance.
|
| 108 |
+
|
| 109 |
+
:param str directory: directory path
|
| 110 |
+
:param int min_file_size: minimum size for file use
|
| 111 |
+
:param int pickle_protocol: pickle protocol for serialization
|
| 112 |
+
|
| 113 |
+
"""
|
| 114 |
+
self._directory = directory
|
| 115 |
+
self.min_file_size = min_file_size
|
| 116 |
+
self.pickle_protocol = pickle_protocol
|
| 117 |
+
|
| 118 |
+
def hash(self, key):
|
| 119 |
+
"""Compute portable hash for `key`.
|
| 120 |
+
|
| 121 |
+
:param key: key to hash
|
| 122 |
+
:return: hash value
|
| 123 |
+
|
| 124 |
+
"""
|
| 125 |
+
mask = 0xFFFFFFFF
|
| 126 |
+
disk_key, _ = self.put(key)
|
| 127 |
+
type_disk_key = type(disk_key)
|
| 128 |
+
|
| 129 |
+
if type_disk_key is sqlite3.Binary:
|
| 130 |
+
return zlib.adler32(disk_key) & mask
|
| 131 |
+
elif type_disk_key is str:
|
| 132 |
+
return zlib.adler32(disk_key.encode('utf-8')) & mask # noqa
|
| 133 |
+
elif type_disk_key is int:
|
| 134 |
+
return disk_key % mask
|
| 135 |
+
else:
|
| 136 |
+
assert type_disk_key is float
|
| 137 |
+
return zlib.adler32(struct.pack('!d', disk_key)) & mask
|
| 138 |
+
|
| 139 |
+
def put(self, key):
|
| 140 |
+
"""Convert `key` to fields key and raw for Cache table.
|
| 141 |
+
|
| 142 |
+
:param key: key to convert
|
| 143 |
+
:return: (database key, raw boolean) pair
|
| 144 |
+
|
| 145 |
+
"""
|
| 146 |
+
# pylint: disable=unidiomatic-typecheck
|
| 147 |
+
type_key = type(key)
|
| 148 |
+
|
| 149 |
+
if type_key is bytes:
|
| 150 |
+
return sqlite3.Binary(key), True
|
| 151 |
+
elif (
|
| 152 |
+
(type_key is str)
|
| 153 |
+
or (
|
| 154 |
+
type_key is int
|
| 155 |
+
and -9223372036854775808 <= key <= 9223372036854775807
|
| 156 |
+
)
|
| 157 |
+
or (type_key is float)
|
| 158 |
+
):
|
| 159 |
+
return key, True
|
| 160 |
+
else:
|
| 161 |
+
data = pickle.dumps(key, protocol=self.pickle_protocol)
|
| 162 |
+
result = pickletools.optimize(data)
|
| 163 |
+
return sqlite3.Binary(result), False
|
| 164 |
+
|
| 165 |
+
def get(self, key, raw):
|
| 166 |
+
"""Convert fields `key` and `raw` from Cache table to key.
|
| 167 |
+
|
| 168 |
+
:param key: database key to convert
|
| 169 |
+
:param bool raw: flag indicating raw database storage
|
| 170 |
+
:return: corresponding Python key
|
| 171 |
+
|
| 172 |
+
"""
|
| 173 |
+
# pylint: disable=unidiomatic-typecheck
|
| 174 |
+
if raw:
|
| 175 |
+
return bytes(key) if type(key) is sqlite3.Binary else key
|
| 176 |
+
else:
|
| 177 |
+
return pickle.load(io.BytesIO(key))
|
| 178 |
+
|
| 179 |
+
def store(self, value, read, key=UNKNOWN):
|
| 180 |
+
"""Convert `value` to fields size, mode, filename, and value for Cache
|
| 181 |
+
table.
|
| 182 |
+
|
| 183 |
+
:param value: value to convert
|
| 184 |
+
:param bool read: True when value is file-like object
|
| 185 |
+
:param key: key for item (default UNKNOWN)
|
| 186 |
+
:return: (size, mode, filename, value) tuple for Cache table
|
| 187 |
+
|
| 188 |
+
"""
|
| 189 |
+
# pylint: disable=unidiomatic-typecheck
|
| 190 |
+
type_value = type(value)
|
| 191 |
+
min_file_size = self.min_file_size
|
| 192 |
+
|
| 193 |
+
if (
|
| 194 |
+
(type_value is str and len(value) < min_file_size)
|
| 195 |
+
or (
|
| 196 |
+
type_value is int
|
| 197 |
+
and -9223372036854775808 <= value <= 9223372036854775807
|
| 198 |
+
)
|
| 199 |
+
or (type_value is float)
|
| 200 |
+
):
|
| 201 |
+
return 0, MODE_RAW, None, value
|
| 202 |
+
elif type_value is bytes:
|
| 203 |
+
if len(value) < min_file_size:
|
| 204 |
+
return 0, MODE_RAW, None, sqlite3.Binary(value)
|
| 205 |
+
else:
|
| 206 |
+
filename, full_path = self.filename(key, value)
|
| 207 |
+
self._write(full_path, io.BytesIO(value), 'xb')
|
| 208 |
+
return len(value), MODE_BINARY, filename, None
|
| 209 |
+
elif type_value is str:
|
| 210 |
+
filename, full_path = self.filename(key, value)
|
| 211 |
+
self._write(full_path, io.StringIO(value), 'x', 'UTF-8')
|
| 212 |
+
size = op.getsize(full_path)
|
| 213 |
+
return size, MODE_TEXT, filename, None
|
| 214 |
+
elif read:
|
| 215 |
+
reader = ft.partial(value.read, 2**22)
|
| 216 |
+
filename, full_path = self.filename(key, value)
|
| 217 |
+
iterator = iter(reader, b'')
|
| 218 |
+
size = self._write(full_path, iterator, 'xb')
|
| 219 |
+
return size, MODE_BINARY, filename, None
|
| 220 |
+
else:
|
| 221 |
+
result = pickle.dumps(value, protocol=self.pickle_protocol)
|
| 222 |
+
|
| 223 |
+
if len(result) < min_file_size:
|
| 224 |
+
return 0, MODE_PICKLE, None, sqlite3.Binary(result)
|
| 225 |
+
else:
|
| 226 |
+
filename, full_path = self.filename(key, value)
|
| 227 |
+
self._write(full_path, io.BytesIO(result), 'xb')
|
| 228 |
+
return len(result), MODE_PICKLE, filename, None
|
| 229 |
+
|
| 230 |
+
def _write(self, full_path, iterator, mode, encoding=None):
|
| 231 |
+
full_dir, _ = op.split(full_path)
|
| 232 |
+
|
| 233 |
+
for count in range(1, 11):
|
| 234 |
+
with cl.suppress(OSError):
|
| 235 |
+
os.makedirs(full_dir)
|
| 236 |
+
|
| 237 |
+
try:
|
| 238 |
+
# Another cache may have deleted the directory before
|
| 239 |
+
# the file could be opened.
|
| 240 |
+
writer = open(full_path, mode, encoding=encoding)
|
| 241 |
+
except OSError:
|
| 242 |
+
if count == 10:
|
| 243 |
+
# Give up after 10 tries to open the file.
|
| 244 |
+
raise
|
| 245 |
+
continue
|
| 246 |
+
|
| 247 |
+
with writer:
|
| 248 |
+
size = 0
|
| 249 |
+
for chunk in iterator:
|
| 250 |
+
size += len(chunk)
|
| 251 |
+
writer.write(chunk)
|
| 252 |
+
return size
|
| 253 |
+
|
| 254 |
+
def fetch(self, mode, filename, value, read):
|
| 255 |
+
"""Convert fields `mode`, `filename`, and `value` from Cache table to
|
| 256 |
+
value.
|
| 257 |
+
|
| 258 |
+
:param int mode: value mode raw, binary, text, or pickle
|
| 259 |
+
:param str filename: filename of corresponding value
|
| 260 |
+
:param value: database value
|
| 261 |
+
:param bool read: when True, return an open file handle
|
| 262 |
+
:return: corresponding Python value
|
| 263 |
+
:raises: IOError if the value cannot be read
|
| 264 |
+
|
| 265 |
+
"""
|
| 266 |
+
# pylint: disable=unidiomatic-typecheck,consider-using-with
|
| 267 |
+
if mode == MODE_RAW:
|
| 268 |
+
return bytes(value) if type(value) is sqlite3.Binary else value
|
| 269 |
+
elif mode == MODE_BINARY:
|
| 270 |
+
if read:
|
| 271 |
+
return open(op.join(self._directory, filename), 'rb')
|
| 272 |
+
else:
|
| 273 |
+
with open(op.join(self._directory, filename), 'rb') as reader:
|
| 274 |
+
return reader.read()
|
| 275 |
+
elif mode == MODE_TEXT:
|
| 276 |
+
full_path = op.join(self._directory, filename)
|
| 277 |
+
with open(full_path, 'r', encoding='UTF-8') as reader:
|
| 278 |
+
return reader.read()
|
| 279 |
+
elif mode == MODE_PICKLE:
|
| 280 |
+
if value is None:
|
| 281 |
+
with open(op.join(self._directory, filename), 'rb') as reader:
|
| 282 |
+
return pickle.load(reader)
|
| 283 |
+
else:
|
| 284 |
+
return pickle.load(io.BytesIO(value))
|
| 285 |
+
|
| 286 |
+
def filename(self, key=UNKNOWN, value=UNKNOWN):
|
| 287 |
+
"""Return filename and full-path tuple for file storage.
|
| 288 |
+
|
| 289 |
+
Filename will be a randomly generated 28 character hexadecimal string
|
| 290 |
+
with ".val" suffixed. Two levels of sub-directories will be used to
|
| 291 |
+
reduce the size of directories. On older filesystems, lookups in
|
| 292 |
+
directories with many files may be slow.
|
| 293 |
+
|
| 294 |
+
The default implementation ignores the `key` and `value` parameters.
|
| 295 |
+
|
| 296 |
+
In some scenarios, for example :meth:`Cache.push
|
| 297 |
+
<diskcache.Cache.push>`, the `key` or `value` may not be known when the
|
| 298 |
+
item is stored in the cache.
|
| 299 |
+
|
| 300 |
+
:param key: key for item (default UNKNOWN)
|
| 301 |
+
:param value: value for item (default UNKNOWN)
|
| 302 |
+
|
| 303 |
+
"""
|
| 304 |
+
# pylint: disable=unused-argument
|
| 305 |
+
hex_name = codecs.encode(os.urandom(16), 'hex').decode('utf-8')
|
| 306 |
+
sub_dir = op.join(hex_name[:2], hex_name[2:4])
|
| 307 |
+
name = hex_name[4:] + '.val'
|
| 308 |
+
filename = op.join(sub_dir, name)
|
| 309 |
+
full_path = op.join(self._directory, filename)
|
| 310 |
+
return filename, full_path
|
| 311 |
+
|
| 312 |
+
def remove(self, file_path):
|
| 313 |
+
"""Remove a file given by `file_path`.
|
| 314 |
+
|
| 315 |
+
This method is cross-thread and cross-process safe. If an OSError
|
| 316 |
+
occurs, it is suppressed.
|
| 317 |
+
|
| 318 |
+
:param str file_path: relative path to file
|
| 319 |
+
|
| 320 |
+
"""
|
| 321 |
+
full_path = op.join(self._directory, file_path)
|
| 322 |
+
full_dir, _ = op.split(full_path)
|
| 323 |
+
|
| 324 |
+
# Suppress OSError that may occur if two caches attempt to delete the
|
| 325 |
+
# same file or directory at the same time.
|
| 326 |
+
|
| 327 |
+
with cl.suppress(OSError):
|
| 328 |
+
os.remove(full_path)
|
| 329 |
+
|
| 330 |
+
with cl.suppress(OSError):
|
| 331 |
+
os.removedirs(full_dir)
|
| 332 |
+
|
| 333 |
+
|
| 334 |
+
class JSONDisk(Disk):
|
| 335 |
+
"""Cache key and value using JSON serialization with zlib compression."""
|
| 336 |
+
|
| 337 |
+
def __init__(self, directory, compress_level=1, **kwargs):
|
| 338 |
+
"""Initialize JSON disk instance.
|
| 339 |
+
|
| 340 |
+
Keys and values are compressed using the zlib library. The
|
| 341 |
+
`compress_level` is an integer from 0 to 9 controlling the level of
|
| 342 |
+
compression; 1 is fastest and produces the least compression, 9 is
|
| 343 |
+
slowest and produces the most compression, and 0 is no compression.
|
| 344 |
+
|
| 345 |
+
:param str directory: directory path
|
| 346 |
+
:param int compress_level: zlib compression level (default 1)
|
| 347 |
+
:param kwargs: super class arguments
|
| 348 |
+
|
| 349 |
+
"""
|
| 350 |
+
self.compress_level = compress_level
|
| 351 |
+
super().__init__(directory, **kwargs)
|
| 352 |
+
|
| 353 |
+
def put(self, key):
|
| 354 |
+
json_bytes = json.dumps(key).encode('utf-8')
|
| 355 |
+
data = zlib.compress(json_bytes, self.compress_level)
|
| 356 |
+
return super().put(data)
|
| 357 |
+
|
| 358 |
+
def get(self, key, raw):
|
| 359 |
+
data = super().get(key, raw)
|
| 360 |
+
return json.loads(zlib.decompress(data).decode('utf-8'))
|
| 361 |
+
|
| 362 |
+
def store(self, value, read, key=UNKNOWN):
|
| 363 |
+
if not read:
|
| 364 |
+
json_bytes = json.dumps(value).encode('utf-8')
|
| 365 |
+
value = zlib.compress(json_bytes, self.compress_level)
|
| 366 |
+
return super().store(value, read, key=key)
|
| 367 |
+
|
| 368 |
+
def fetch(self, mode, filename, value, read):
|
| 369 |
+
data = super().fetch(mode, filename, value, read)
|
| 370 |
+
if not read:
|
| 371 |
+
data = json.loads(zlib.decompress(data).decode('utf-8'))
|
| 372 |
+
return data
|
| 373 |
+
|
| 374 |
+
|
| 375 |
+
class Timeout(Exception):
|
| 376 |
+
"""Database timeout expired."""
|
| 377 |
+
|
| 378 |
+
|
| 379 |
+
class UnknownFileWarning(UserWarning):
|
| 380 |
+
"""Warning used by Cache.check for unknown files."""
|
| 381 |
+
|
| 382 |
+
|
| 383 |
+
class EmptyDirWarning(UserWarning):
|
| 384 |
+
"""Warning used by Cache.check for empty directories."""
|
| 385 |
+
|
| 386 |
+
|
| 387 |
+
def args_to_key(base, args, kwargs, typed, ignore):
|
| 388 |
+
"""Create cache key out of function arguments.
|
| 389 |
+
|
| 390 |
+
:param tuple base: base of key
|
| 391 |
+
:param tuple args: function arguments
|
| 392 |
+
:param dict kwargs: function keyword arguments
|
| 393 |
+
:param bool typed: include types in cache key
|
| 394 |
+
:param set ignore: positional or keyword args to ignore
|
| 395 |
+
:return: cache key tuple
|
| 396 |
+
|
| 397 |
+
"""
|
| 398 |
+
args = tuple(arg for index, arg in enumerate(args) if index not in ignore)
|
| 399 |
+
key = base + args + (None,)
|
| 400 |
+
|
| 401 |
+
if kwargs:
|
| 402 |
+
kwargs = {key: val for key, val in kwargs.items() if key not in ignore}
|
| 403 |
+
sorted_items = sorted(kwargs.items())
|
| 404 |
+
|
| 405 |
+
for item in sorted_items:
|
| 406 |
+
key += item
|
| 407 |
+
|
| 408 |
+
if typed:
|
| 409 |
+
key += tuple(type(arg) for arg in args)
|
| 410 |
+
|
| 411 |
+
if kwargs:
|
| 412 |
+
key += tuple(type(value) for _, value in sorted_items)
|
| 413 |
+
|
| 414 |
+
return key
|
| 415 |
+
|
| 416 |
+
|
| 417 |
+
class Cache:
|
| 418 |
+
"""Disk and file backed cache."""
|
| 419 |
+
|
| 420 |
+
def __init__(self, directory=None, timeout=60, disk=Disk, **settings):
|
| 421 |
+
"""Initialize cache instance.
|
| 422 |
+
|
| 423 |
+
:param str directory: cache directory
|
| 424 |
+
:param float timeout: SQLite connection timeout
|
| 425 |
+
:param disk: Disk type or subclass for serialization
|
| 426 |
+
:param settings: any of DEFAULT_SETTINGS
|
| 427 |
+
|
| 428 |
+
"""
|
| 429 |
+
try:
|
| 430 |
+
assert issubclass(disk, Disk)
|
| 431 |
+
except (TypeError, AssertionError):
|
| 432 |
+
raise ValueError('disk must subclass diskcache.Disk') from None
|
| 433 |
+
|
| 434 |
+
if directory is None:
|
| 435 |
+
directory = tempfile.mkdtemp(prefix='diskcache-')
|
| 436 |
+
directory = str(directory)
|
| 437 |
+
directory = op.expanduser(directory)
|
| 438 |
+
directory = op.expandvars(directory)
|
| 439 |
+
|
| 440 |
+
self._directory = directory
|
| 441 |
+
self._timeout = 0 # Manually handle retries during initialization.
|
| 442 |
+
self._local = threading.local()
|
| 443 |
+
self._txn_id = None
|
| 444 |
+
|
| 445 |
+
if not op.isdir(directory):
|
| 446 |
+
try:
|
| 447 |
+
os.makedirs(directory, 0o755)
|
| 448 |
+
except OSError as error:
|
| 449 |
+
if error.errno != errno.EEXIST:
|
| 450 |
+
raise EnvironmentError(
|
| 451 |
+
error.errno,
|
| 452 |
+
'Cache directory "%s" does not exist'
|
| 453 |
+
' and could not be created' % self._directory,
|
| 454 |
+
) from None
|
| 455 |
+
|
| 456 |
+
sql = self._sql_retry
|
| 457 |
+
|
| 458 |
+
# Setup Settings table.
|
| 459 |
+
|
| 460 |
+
try:
|
| 461 |
+
current_settings = dict(
|
| 462 |
+
sql('SELECT key, value FROM Settings').fetchall()
|
| 463 |
+
)
|
| 464 |
+
except sqlite3.OperationalError:
|
| 465 |
+
current_settings = {}
|
| 466 |
+
|
| 467 |
+
sets = DEFAULT_SETTINGS.copy()
|
| 468 |
+
sets.update(current_settings)
|
| 469 |
+
sets.update(settings)
|
| 470 |
+
|
| 471 |
+
for key in METADATA:
|
| 472 |
+
sets.pop(key, None)
|
| 473 |
+
|
| 474 |
+
# Chance to set pragmas before any tables are created.
|
| 475 |
+
|
| 476 |
+
for key, value in sorted(sets.items()):
|
| 477 |
+
if key.startswith('sqlite_'):
|
| 478 |
+
self.reset(key, value, update=False)
|
| 479 |
+
|
| 480 |
+
sql(
|
| 481 |
+
'CREATE TABLE IF NOT EXISTS Settings ('
|
| 482 |
+
' key TEXT NOT NULL UNIQUE,'
|
| 483 |
+
' value)'
|
| 484 |
+
)
|
| 485 |
+
|
| 486 |
+
# Setup Disk object (must happen after settings initialized).
|
| 487 |
+
|
| 488 |
+
kwargs = {
|
| 489 |
+
key[5:]: value
|
| 490 |
+
for key, value in sets.items()
|
| 491 |
+
if key.startswith('disk_')
|
| 492 |
+
}
|
| 493 |
+
self._disk = disk(directory, **kwargs)
|
| 494 |
+
|
| 495 |
+
# Set cached attributes: updates settings and sets pragmas.
|
| 496 |
+
|
| 497 |
+
for key, value in sets.items():
|
| 498 |
+
query = 'INSERT OR REPLACE INTO Settings VALUES (?, ?)'
|
| 499 |
+
sql(query, (key, value))
|
| 500 |
+
self.reset(key, value)
|
| 501 |
+
|
| 502 |
+
for key, value in METADATA.items():
|
| 503 |
+
query = 'INSERT OR IGNORE INTO Settings VALUES (?, ?)'
|
| 504 |
+
sql(query, (key, value))
|
| 505 |
+
self.reset(key)
|
| 506 |
+
|
| 507 |
+
((self._page_size,),) = sql('PRAGMA page_size').fetchall()
|
| 508 |
+
|
| 509 |
+
# Setup Cache table.
|
| 510 |
+
|
| 511 |
+
sql(
|
| 512 |
+
'CREATE TABLE IF NOT EXISTS Cache ('
|
| 513 |
+
' rowid INTEGER PRIMARY KEY,'
|
| 514 |
+
' key BLOB,'
|
| 515 |
+
' raw INTEGER,'
|
| 516 |
+
' store_time REAL,'
|
| 517 |
+
' expire_time REAL,'
|
| 518 |
+
' access_time REAL,'
|
| 519 |
+
' access_count INTEGER DEFAULT 0,'
|
| 520 |
+
' tag BLOB,'
|
| 521 |
+
' size INTEGER DEFAULT 0,'
|
| 522 |
+
' mode INTEGER DEFAULT 0,'
|
| 523 |
+
' filename TEXT,'
|
| 524 |
+
' value BLOB)'
|
| 525 |
+
)
|
| 526 |
+
|
| 527 |
+
sql(
|
| 528 |
+
'CREATE UNIQUE INDEX IF NOT EXISTS Cache_key_raw ON'
|
| 529 |
+
' Cache(key, raw)'
|
| 530 |
+
)
|
| 531 |
+
|
| 532 |
+
sql(
|
| 533 |
+
'CREATE INDEX IF NOT EXISTS Cache_expire_time ON'
|
| 534 |
+
' Cache (expire_time)'
|
| 535 |
+
)
|
| 536 |
+
|
| 537 |
+
query = EVICTION_POLICY[self.eviction_policy]['init']
|
| 538 |
+
|
| 539 |
+
if query is not None:
|
| 540 |
+
sql(query)
|
| 541 |
+
|
| 542 |
+
# Use triggers to keep Metadata updated.
|
| 543 |
+
|
| 544 |
+
sql(
|
| 545 |
+
'CREATE TRIGGER IF NOT EXISTS Settings_count_insert'
|
| 546 |
+
' AFTER INSERT ON Cache FOR EACH ROW BEGIN'
|
| 547 |
+
' UPDATE Settings SET value = value + 1'
|
| 548 |
+
' WHERE key = "count"; END'
|
| 549 |
+
)
|
| 550 |
+
|
| 551 |
+
sql(
|
| 552 |
+
'CREATE TRIGGER IF NOT EXISTS Settings_count_delete'
|
| 553 |
+
' AFTER DELETE ON Cache FOR EACH ROW BEGIN'
|
| 554 |
+
' UPDATE Settings SET value = value - 1'
|
| 555 |
+
' WHERE key = "count"; END'
|
| 556 |
+
)
|
| 557 |
+
|
| 558 |
+
sql(
|
| 559 |
+
'CREATE TRIGGER IF NOT EXISTS Settings_size_insert'
|
| 560 |
+
' AFTER INSERT ON Cache FOR EACH ROW BEGIN'
|
| 561 |
+
' UPDATE Settings SET value = value + NEW.size'
|
| 562 |
+
' WHERE key = "size"; END'
|
| 563 |
+
)
|
| 564 |
+
|
| 565 |
+
sql(
|
| 566 |
+
'CREATE TRIGGER IF NOT EXISTS Settings_size_update'
|
| 567 |
+
' AFTER UPDATE ON Cache FOR EACH ROW BEGIN'
|
| 568 |
+
' UPDATE Settings'
|
| 569 |
+
' SET value = value + NEW.size - OLD.size'
|
| 570 |
+
' WHERE key = "size"; END'
|
| 571 |
+
)
|
| 572 |
+
|
| 573 |
+
sql(
|
| 574 |
+
'CREATE TRIGGER IF NOT EXISTS Settings_size_delete'
|
| 575 |
+
' AFTER DELETE ON Cache FOR EACH ROW BEGIN'
|
| 576 |
+
' UPDATE Settings SET value = value - OLD.size'
|
| 577 |
+
' WHERE key = "size"; END'
|
| 578 |
+
)
|
| 579 |
+
|
| 580 |
+
# Create tag index if requested.
|
| 581 |
+
|
| 582 |
+
if self.tag_index: # pylint: disable=no-member
|
| 583 |
+
self.create_tag_index()
|
| 584 |
+
else:
|
| 585 |
+
self.drop_tag_index()
|
| 586 |
+
|
| 587 |
+
# Close and re-open database connection with given timeout.
|
| 588 |
+
|
| 589 |
+
self.close()
|
| 590 |
+
self._timeout = timeout
|
| 591 |
+
self._sql # pylint: disable=pointless-statement
|
| 592 |
+
|
| 593 |
+
@property
|
| 594 |
+
def directory(self):
|
| 595 |
+
"""Cache directory."""
|
| 596 |
+
return self._directory
|
| 597 |
+
|
| 598 |
+
@property
|
| 599 |
+
def timeout(self):
|
| 600 |
+
"""SQLite connection timeout value in seconds."""
|
| 601 |
+
return self._timeout
|
| 602 |
+
|
| 603 |
+
@property
|
| 604 |
+
def disk(self):
|
| 605 |
+
"""Disk used for serialization."""
|
| 606 |
+
return self._disk
|
| 607 |
+
|
| 608 |
+
@property
|
| 609 |
+
def _con(self):
|
| 610 |
+
# Check process ID to support process forking. If the process
|
| 611 |
+
# ID changes, close the connection and update the process ID.
|
| 612 |
+
|
| 613 |
+
local_pid = getattr(self._local, 'pid', None)
|
| 614 |
+
pid = os.getpid()
|
| 615 |
+
|
| 616 |
+
if local_pid != pid:
|
| 617 |
+
self.close()
|
| 618 |
+
self._local.pid = pid
|
| 619 |
+
|
| 620 |
+
con = getattr(self._local, 'con', None)
|
| 621 |
+
|
| 622 |
+
if con is None:
|
| 623 |
+
con = self._local.con = sqlite3.connect(
|
| 624 |
+
op.join(self._directory, DBNAME),
|
| 625 |
+
timeout=self._timeout,
|
| 626 |
+
isolation_level=None,
|
| 627 |
+
)
|
| 628 |
+
|
| 629 |
+
# Some SQLite pragmas work on a per-connection basis so
|
| 630 |
+
# query the Settings table and reset the pragmas. The
|
| 631 |
+
# Settings table may not exist so catch and ignore the
|
| 632 |
+
# OperationalError that may occur.
|
| 633 |
+
|
| 634 |
+
try:
|
| 635 |
+
select = 'SELECT key, value FROM Settings'
|
| 636 |
+
settings = con.execute(select).fetchall()
|
| 637 |
+
except sqlite3.OperationalError:
|
| 638 |
+
pass
|
| 639 |
+
else:
|
| 640 |
+
for key, value in settings:
|
| 641 |
+
if key.startswith('sqlite_'):
|
| 642 |
+
self.reset(key, value, update=False)
|
| 643 |
+
|
| 644 |
+
return con
|
| 645 |
+
|
| 646 |
+
@property
|
| 647 |
+
def _sql(self):
|
| 648 |
+
return self._con.execute
|
| 649 |
+
|
| 650 |
+
@property
|
| 651 |
+
def _sql_retry(self):
|
| 652 |
+
sql = self._sql
|
| 653 |
+
|
| 654 |
+
# 2018-11-01 GrantJ - Some SQLite builds/versions handle
|
| 655 |
+
# the SQLITE_BUSY return value and connection parameter
|
| 656 |
+
# "timeout" differently. For a more reliable duration,
|
| 657 |
+
# manually retry the statement for 60 seconds. Only used
|
| 658 |
+
# by statements which modify the database and do not use
|
| 659 |
+
# a transaction (like those in ``__init__`` or ``reset``).
|
| 660 |
+
# See Issue #85 for and tests/issue_85.py for more details.
|
| 661 |
+
|
| 662 |
+
def _execute_with_retry(statement, *args, **kwargs):
|
| 663 |
+
start = time.time()
|
| 664 |
+
while True:
|
| 665 |
+
try:
|
| 666 |
+
return sql(statement, *args, **kwargs)
|
| 667 |
+
except sqlite3.OperationalError as exc:
|
| 668 |
+
if str(exc) != 'database is locked':
|
| 669 |
+
raise
|
| 670 |
+
diff = time.time() - start
|
| 671 |
+
if diff > 60:
|
| 672 |
+
raise
|
| 673 |
+
time.sleep(0.001)
|
| 674 |
+
|
| 675 |
+
return _execute_with_retry
|
| 676 |
+
|
| 677 |
+
@cl.contextmanager
|
| 678 |
+
def transact(self, retry=False):
|
| 679 |
+
"""Context manager to perform a transaction by locking the cache.
|
| 680 |
+
|
| 681 |
+
While the cache is locked, no other write operation is permitted.
|
| 682 |
+
Transactions should therefore be as short as possible. Read and write
|
| 683 |
+
operations performed in a transaction are atomic. Read operations may
|
| 684 |
+
occur concurrent to a transaction.
|
| 685 |
+
|
| 686 |
+
Transactions may be nested and may not be shared between threads.
|
| 687 |
+
|
| 688 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 689 |
+
`False` (default).
|
| 690 |
+
|
| 691 |
+
>>> cache = Cache()
|
| 692 |
+
>>> with cache.transact(): # Atomically increment two keys.
|
| 693 |
+
... _ = cache.incr('total', 123.4)
|
| 694 |
+
... _ = cache.incr('count', 1)
|
| 695 |
+
>>> with cache.transact(): # Atomically calculate average.
|
| 696 |
+
... average = cache['total'] / cache['count']
|
| 697 |
+
>>> average
|
| 698 |
+
123.4
|
| 699 |
+
|
| 700 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 701 |
+
:return: context manager for use in `with` statement
|
| 702 |
+
:raises Timeout: if database timeout occurs
|
| 703 |
+
|
| 704 |
+
"""
|
| 705 |
+
with self._transact(retry=retry):
|
| 706 |
+
yield
|
| 707 |
+
|
| 708 |
+
@cl.contextmanager
|
| 709 |
+
def _transact(self, retry=False, filename=None):
|
| 710 |
+
sql = self._sql
|
| 711 |
+
filenames = []
|
| 712 |
+
_disk_remove = self._disk.remove
|
| 713 |
+
tid = threading.get_ident()
|
| 714 |
+
txn_id = self._txn_id
|
| 715 |
+
|
| 716 |
+
if tid == txn_id:
|
| 717 |
+
begin = False
|
| 718 |
+
else:
|
| 719 |
+
while True:
|
| 720 |
+
try:
|
| 721 |
+
sql('BEGIN IMMEDIATE')
|
| 722 |
+
begin = True
|
| 723 |
+
self._txn_id = tid
|
| 724 |
+
break
|
| 725 |
+
except sqlite3.OperationalError:
|
| 726 |
+
if retry:
|
| 727 |
+
continue
|
| 728 |
+
if filename is not None:
|
| 729 |
+
_disk_remove(filename)
|
| 730 |
+
raise Timeout from None
|
| 731 |
+
|
| 732 |
+
try:
|
| 733 |
+
yield sql, filenames.append
|
| 734 |
+
except BaseException:
|
| 735 |
+
if begin:
|
| 736 |
+
assert self._txn_id == tid
|
| 737 |
+
self._txn_id = None
|
| 738 |
+
sql('ROLLBACK')
|
| 739 |
+
raise
|
| 740 |
+
else:
|
| 741 |
+
if begin:
|
| 742 |
+
assert self._txn_id == tid
|
| 743 |
+
self._txn_id = None
|
| 744 |
+
sql('COMMIT')
|
| 745 |
+
for name in filenames:
|
| 746 |
+
if name is not None:
|
| 747 |
+
_disk_remove(name)
|
| 748 |
+
|
| 749 |
+
def set(self, key, value, expire=None, read=False, tag=None, retry=False):
|
| 750 |
+
"""Set `key` and `value` item in cache.
|
| 751 |
+
|
| 752 |
+
When `read` is `True`, `value` should be a file-like object opened
|
| 753 |
+
for reading in binary mode.
|
| 754 |
+
|
| 755 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 756 |
+
`False` (default).
|
| 757 |
+
|
| 758 |
+
:param key: key for item
|
| 759 |
+
:param value: value for item
|
| 760 |
+
:param float expire: seconds until item expires
|
| 761 |
+
(default None, no expiry)
|
| 762 |
+
:param bool read: read value as bytes from file (default False)
|
| 763 |
+
:param str tag: text to associate with key (default None)
|
| 764 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 765 |
+
:return: True if item was set
|
| 766 |
+
:raises Timeout: if database timeout occurs
|
| 767 |
+
|
| 768 |
+
"""
|
| 769 |
+
now = time.time()
|
| 770 |
+
db_key, raw = self._disk.put(key)
|
| 771 |
+
expire_time = None if expire is None else now + expire
|
| 772 |
+
size, mode, filename, db_value = self._disk.store(value, read, key=key)
|
| 773 |
+
columns = (expire_time, tag, size, mode, filename, db_value)
|
| 774 |
+
|
| 775 |
+
# The order of SELECT, UPDATE, and INSERT is important below.
|
| 776 |
+
#
|
| 777 |
+
# Typical cache usage pattern is:
|
| 778 |
+
#
|
| 779 |
+
# value = cache.get(key)
|
| 780 |
+
# if value is None:
|
| 781 |
+
# value = expensive_calculation()
|
| 782 |
+
# cache.set(key, value)
|
| 783 |
+
#
|
| 784 |
+
# Cache.get does not evict expired keys to avoid writes during lookups.
|
| 785 |
+
# Commonly used/expired keys will therefore remain in the cache making
|
| 786 |
+
# an UPDATE the preferred path.
|
| 787 |
+
#
|
| 788 |
+
# The alternative is to assume the key is not present by first trying
|
| 789 |
+
# to INSERT and then handling the IntegrityError that occurs from
|
| 790 |
+
# violating the UNIQUE constraint. This optimistic approach was
|
| 791 |
+
# rejected based on the common cache usage pattern.
|
| 792 |
+
#
|
| 793 |
+
# INSERT OR REPLACE aka UPSERT is not used because the old filename may
|
| 794 |
+
# need cleanup.
|
| 795 |
+
|
| 796 |
+
with self._transact(retry, filename) as (sql, cleanup):
|
| 797 |
+
rows = sql(
|
| 798 |
+
'SELECT rowid, filename FROM Cache'
|
| 799 |
+
' WHERE key = ? AND raw = ?',
|
| 800 |
+
(db_key, raw),
|
| 801 |
+
).fetchall()
|
| 802 |
+
|
| 803 |
+
if rows:
|
| 804 |
+
((rowid, old_filename),) = rows
|
| 805 |
+
cleanup(old_filename)
|
| 806 |
+
self._row_update(rowid, now, columns)
|
| 807 |
+
else:
|
| 808 |
+
self._row_insert(db_key, raw, now, columns)
|
| 809 |
+
|
| 810 |
+
self._cull(now, sql, cleanup)
|
| 811 |
+
|
| 812 |
+
return True
|
| 813 |
+
|
| 814 |
+
def __setitem__(self, key, value):
|
| 815 |
+
"""Set corresponding `value` for `key` in cache.
|
| 816 |
+
|
| 817 |
+
:param key: key for item
|
| 818 |
+
:param value: value for item
|
| 819 |
+
:return: corresponding value
|
| 820 |
+
:raises KeyError: if key is not found
|
| 821 |
+
|
| 822 |
+
"""
|
| 823 |
+
self.set(key, value, retry=True)
|
| 824 |
+
|
| 825 |
+
def _row_update(self, rowid, now, columns):
|
| 826 |
+
sql = self._sql
|
| 827 |
+
expire_time, tag, size, mode, filename, value = columns
|
| 828 |
+
sql(
|
| 829 |
+
'UPDATE Cache SET'
|
| 830 |
+
' store_time = ?,'
|
| 831 |
+
' expire_time = ?,'
|
| 832 |
+
' access_time = ?,'
|
| 833 |
+
' access_count = ?,'
|
| 834 |
+
' tag = ?,'
|
| 835 |
+
' size = ?,'
|
| 836 |
+
' mode = ?,'
|
| 837 |
+
' filename = ?,'
|
| 838 |
+
' value = ?'
|
| 839 |
+
' WHERE rowid = ?',
|
| 840 |
+
(
|
| 841 |
+
now, # store_time
|
| 842 |
+
expire_time,
|
| 843 |
+
now, # access_time
|
| 844 |
+
0, # access_count
|
| 845 |
+
tag,
|
| 846 |
+
size,
|
| 847 |
+
mode,
|
| 848 |
+
filename,
|
| 849 |
+
value,
|
| 850 |
+
rowid,
|
| 851 |
+
),
|
| 852 |
+
)
|
| 853 |
+
|
| 854 |
+
def _row_insert(self, key, raw, now, columns):
|
| 855 |
+
sql = self._sql
|
| 856 |
+
expire_time, tag, size, mode, filename, value = columns
|
| 857 |
+
sql(
|
| 858 |
+
'INSERT INTO Cache('
|
| 859 |
+
' key, raw, store_time, expire_time, access_time,'
|
| 860 |
+
' access_count, tag, size, mode, filename, value'
|
| 861 |
+
') VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
|
| 862 |
+
(
|
| 863 |
+
key,
|
| 864 |
+
raw,
|
| 865 |
+
now, # store_time
|
| 866 |
+
expire_time,
|
| 867 |
+
now, # access_time
|
| 868 |
+
0, # access_count
|
| 869 |
+
tag,
|
| 870 |
+
size,
|
| 871 |
+
mode,
|
| 872 |
+
filename,
|
| 873 |
+
value,
|
| 874 |
+
),
|
| 875 |
+
)
|
| 876 |
+
|
| 877 |
+
def _cull(self, now, sql, cleanup, limit=None):
|
| 878 |
+
cull_limit = self.cull_limit if limit is None else limit
|
| 879 |
+
|
| 880 |
+
if cull_limit == 0:
|
| 881 |
+
return
|
| 882 |
+
|
| 883 |
+
# Evict expired keys.
|
| 884 |
+
|
| 885 |
+
select_expired_template = (
|
| 886 |
+
'SELECT %s FROM Cache'
|
| 887 |
+
' WHERE expire_time IS NOT NULL AND expire_time < ?'
|
| 888 |
+
' ORDER BY expire_time LIMIT ?'
|
| 889 |
+
)
|
| 890 |
+
|
| 891 |
+
select_expired = select_expired_template % 'filename'
|
| 892 |
+
rows = sql(select_expired, (now, cull_limit)).fetchall()
|
| 893 |
+
|
| 894 |
+
if rows:
|
| 895 |
+
delete_expired = 'DELETE FROM Cache WHERE rowid IN (%s)' % (
|
| 896 |
+
select_expired_template % 'rowid'
|
| 897 |
+
)
|
| 898 |
+
sql(delete_expired, (now, cull_limit))
|
| 899 |
+
|
| 900 |
+
for (filename,) in rows:
|
| 901 |
+
cleanup(filename)
|
| 902 |
+
|
| 903 |
+
cull_limit -= len(rows)
|
| 904 |
+
|
| 905 |
+
if cull_limit == 0:
|
| 906 |
+
return
|
| 907 |
+
|
| 908 |
+
# Evict keys by policy.
|
| 909 |
+
|
| 910 |
+
select_policy = EVICTION_POLICY[self.eviction_policy]['cull']
|
| 911 |
+
|
| 912 |
+
if select_policy is None or self.volume() < self.size_limit:
|
| 913 |
+
return
|
| 914 |
+
|
| 915 |
+
select_filename = select_policy.format(fields='filename', now=now)
|
| 916 |
+
rows = sql(select_filename, (cull_limit,)).fetchall()
|
| 917 |
+
|
| 918 |
+
if rows:
|
| 919 |
+
delete = 'DELETE FROM Cache WHERE rowid IN (%s)' % (
|
| 920 |
+
select_policy.format(fields='rowid', now=now)
|
| 921 |
+
)
|
| 922 |
+
sql(delete, (cull_limit,))
|
| 923 |
+
|
| 924 |
+
for (filename,) in rows:
|
| 925 |
+
cleanup(filename)
|
| 926 |
+
|
| 927 |
+
def touch(self, key, expire=None, retry=False):
|
| 928 |
+
"""Touch `key` in cache and update `expire` time.
|
| 929 |
+
|
| 930 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 931 |
+
`False` (default).
|
| 932 |
+
|
| 933 |
+
:param key: key for item
|
| 934 |
+
:param float expire: seconds until item expires
|
| 935 |
+
(default None, no expiry)
|
| 936 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 937 |
+
:return: True if key was touched
|
| 938 |
+
:raises Timeout: if database timeout occurs
|
| 939 |
+
|
| 940 |
+
"""
|
| 941 |
+
now = time.time()
|
| 942 |
+
db_key, raw = self._disk.put(key)
|
| 943 |
+
expire_time = None if expire is None else now + expire
|
| 944 |
+
|
| 945 |
+
with self._transact(retry) as (sql, _):
|
| 946 |
+
rows = sql(
|
| 947 |
+
'SELECT rowid, expire_time FROM Cache'
|
| 948 |
+
' WHERE key = ? AND raw = ?',
|
| 949 |
+
(db_key, raw),
|
| 950 |
+
).fetchall()
|
| 951 |
+
|
| 952 |
+
if rows:
|
| 953 |
+
((rowid, old_expire_time),) = rows
|
| 954 |
+
|
| 955 |
+
if old_expire_time is None or old_expire_time > now:
|
| 956 |
+
sql(
|
| 957 |
+
'UPDATE Cache SET expire_time = ? WHERE rowid = ?',
|
| 958 |
+
(expire_time, rowid),
|
| 959 |
+
)
|
| 960 |
+
return True
|
| 961 |
+
|
| 962 |
+
return False
|
| 963 |
+
|
| 964 |
+
def add(self, key, value, expire=None, read=False, tag=None, retry=False):
|
| 965 |
+
"""Add `key` and `value` item to cache.
|
| 966 |
+
|
| 967 |
+
Similar to `set`, but only add to cache if key not present.
|
| 968 |
+
|
| 969 |
+
Operation is atomic. Only one concurrent add operation for a given key
|
| 970 |
+
will succeed.
|
| 971 |
+
|
| 972 |
+
When `read` is `True`, `value` should be a file-like object opened
|
| 973 |
+
for reading in binary mode.
|
| 974 |
+
|
| 975 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 976 |
+
`False` (default).
|
| 977 |
+
|
| 978 |
+
:param key: key for item
|
| 979 |
+
:param value: value for item
|
| 980 |
+
:param float expire: seconds until the key expires
|
| 981 |
+
(default None, no expiry)
|
| 982 |
+
:param bool read: read value as bytes from file (default False)
|
| 983 |
+
:param str tag: text to associate with key (default None)
|
| 984 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 985 |
+
:return: True if item was added
|
| 986 |
+
:raises Timeout: if database timeout occurs
|
| 987 |
+
|
| 988 |
+
"""
|
| 989 |
+
now = time.time()
|
| 990 |
+
db_key, raw = self._disk.put(key)
|
| 991 |
+
expire_time = None if expire is None else now + expire
|
| 992 |
+
size, mode, filename, db_value = self._disk.store(value, read, key=key)
|
| 993 |
+
columns = (expire_time, tag, size, mode, filename, db_value)
|
| 994 |
+
|
| 995 |
+
with self._transact(retry, filename) as (sql, cleanup):
|
| 996 |
+
rows = sql(
|
| 997 |
+
'SELECT rowid, filename, expire_time FROM Cache'
|
| 998 |
+
' WHERE key = ? AND raw = ?',
|
| 999 |
+
(db_key, raw),
|
| 1000 |
+
).fetchall()
|
| 1001 |
+
|
| 1002 |
+
if rows:
|
| 1003 |
+
((rowid, old_filename, old_expire_time),) = rows
|
| 1004 |
+
|
| 1005 |
+
if old_expire_time is None or old_expire_time > now:
|
| 1006 |
+
cleanup(filename)
|
| 1007 |
+
return False
|
| 1008 |
+
|
| 1009 |
+
cleanup(old_filename)
|
| 1010 |
+
self._row_update(rowid, now, columns)
|
| 1011 |
+
else:
|
| 1012 |
+
self._row_insert(db_key, raw, now, columns)
|
| 1013 |
+
|
| 1014 |
+
self._cull(now, sql, cleanup)
|
| 1015 |
+
|
| 1016 |
+
return True
|
| 1017 |
+
|
| 1018 |
+
def incr(self, key, delta=1, default=0, retry=False):
|
| 1019 |
+
"""Increment value by delta for item with key.
|
| 1020 |
+
|
| 1021 |
+
If key is missing and default is None then raise KeyError. Else if key
|
| 1022 |
+
is missing and default is not None then use default for value.
|
| 1023 |
+
|
| 1024 |
+
Operation is atomic. All concurrent increment operations will be
|
| 1025 |
+
counted individually.
|
| 1026 |
+
|
| 1027 |
+
Assumes value may be stored in a SQLite column. Most builds that target
|
| 1028 |
+
machines with 64-bit pointer widths will support 64-bit signed
|
| 1029 |
+
integers.
|
| 1030 |
+
|
| 1031 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1032 |
+
`False` (default).
|
| 1033 |
+
|
| 1034 |
+
:param key: key for item
|
| 1035 |
+
:param int delta: amount to increment (default 1)
|
| 1036 |
+
:param int default: value if key is missing (default 0)
|
| 1037 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 1038 |
+
:return: new value for item
|
| 1039 |
+
:raises KeyError: if key is not found and default is None
|
| 1040 |
+
:raises Timeout: if database timeout occurs
|
| 1041 |
+
|
| 1042 |
+
"""
|
| 1043 |
+
now = time.time()
|
| 1044 |
+
db_key, raw = self._disk.put(key)
|
| 1045 |
+
select = (
|
| 1046 |
+
'SELECT rowid, expire_time, filename, value FROM Cache'
|
| 1047 |
+
' WHERE key = ? AND raw = ?'
|
| 1048 |
+
)
|
| 1049 |
+
|
| 1050 |
+
with self._transact(retry) as (sql, cleanup):
|
| 1051 |
+
rows = sql(select, (db_key, raw)).fetchall()
|
| 1052 |
+
|
| 1053 |
+
if not rows:
|
| 1054 |
+
if default is None:
|
| 1055 |
+
raise KeyError(key)
|
| 1056 |
+
|
| 1057 |
+
value = default + delta
|
| 1058 |
+
columns = (None, None) + self._disk.store(
|
| 1059 |
+
value, False, key=key
|
| 1060 |
+
)
|
| 1061 |
+
self._row_insert(db_key, raw, now, columns)
|
| 1062 |
+
self._cull(now, sql, cleanup)
|
| 1063 |
+
return value
|
| 1064 |
+
|
| 1065 |
+
((rowid, expire_time, filename, value),) = rows
|
| 1066 |
+
|
| 1067 |
+
if expire_time is not None and expire_time < now:
|
| 1068 |
+
if default is None:
|
| 1069 |
+
raise KeyError(key)
|
| 1070 |
+
|
| 1071 |
+
value = default + delta
|
| 1072 |
+
columns = (None, None) + self._disk.store(
|
| 1073 |
+
value, False, key=key
|
| 1074 |
+
)
|
| 1075 |
+
self._row_update(rowid, now, columns)
|
| 1076 |
+
self._cull(now, sql, cleanup)
|
| 1077 |
+
cleanup(filename)
|
| 1078 |
+
return value
|
| 1079 |
+
|
| 1080 |
+
value += delta
|
| 1081 |
+
|
| 1082 |
+
columns = 'store_time = ?, value = ?'
|
| 1083 |
+
update_column = EVICTION_POLICY[self.eviction_policy]['get']
|
| 1084 |
+
|
| 1085 |
+
if update_column is not None:
|
| 1086 |
+
columns += ', ' + update_column.format(now=now)
|
| 1087 |
+
|
| 1088 |
+
update = 'UPDATE Cache SET %s WHERE rowid = ?' % columns
|
| 1089 |
+
sql(update, (now, value, rowid))
|
| 1090 |
+
|
| 1091 |
+
return value
|
| 1092 |
+
|
| 1093 |
+
def decr(self, key, delta=1, default=0, retry=False):
|
| 1094 |
+
"""Decrement value by delta for item with key.
|
| 1095 |
+
|
| 1096 |
+
If key is missing and default is None then raise KeyError. Else if key
|
| 1097 |
+
is missing and default is not None then use default for value.
|
| 1098 |
+
|
| 1099 |
+
Operation is atomic. All concurrent decrement operations will be
|
| 1100 |
+
counted individually.
|
| 1101 |
+
|
| 1102 |
+
Unlike Memcached, negative values are supported. Value may be
|
| 1103 |
+
decremented below zero.
|
| 1104 |
+
|
| 1105 |
+
Assumes value may be stored in a SQLite column. Most builds that target
|
| 1106 |
+
machines with 64-bit pointer widths will support 64-bit signed
|
| 1107 |
+
integers.
|
| 1108 |
+
|
| 1109 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1110 |
+
`False` (default).
|
| 1111 |
+
|
| 1112 |
+
:param key: key for item
|
| 1113 |
+
:param int delta: amount to decrement (default 1)
|
| 1114 |
+
:param int default: value if key is missing (default 0)
|
| 1115 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 1116 |
+
:return: new value for item
|
| 1117 |
+
:raises KeyError: if key is not found and default is None
|
| 1118 |
+
:raises Timeout: if database timeout occurs
|
| 1119 |
+
|
| 1120 |
+
"""
|
| 1121 |
+
return self.incr(key, -delta, default, retry)
|
| 1122 |
+
|
| 1123 |
+
def get(
|
| 1124 |
+
self,
|
| 1125 |
+
key,
|
| 1126 |
+
default=None,
|
| 1127 |
+
read=False,
|
| 1128 |
+
expire_time=False,
|
| 1129 |
+
tag=False,
|
| 1130 |
+
retry=False,
|
| 1131 |
+
):
|
| 1132 |
+
"""Retrieve value from cache. If `key` is missing, return `default`.
|
| 1133 |
+
|
| 1134 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1135 |
+
`False` (default).
|
| 1136 |
+
|
| 1137 |
+
:param key: key for item
|
| 1138 |
+
:param default: value to return if key is missing (default None)
|
| 1139 |
+
:param bool read: if True, return file handle to value
|
| 1140 |
+
(default False)
|
| 1141 |
+
:param bool expire_time: if True, return expire_time in tuple
|
| 1142 |
+
(default False)
|
| 1143 |
+
:param bool tag: if True, return tag in tuple (default False)
|
| 1144 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 1145 |
+
:return: value for item or default if key not found
|
| 1146 |
+
:raises Timeout: if database timeout occurs
|
| 1147 |
+
|
| 1148 |
+
"""
|
| 1149 |
+
db_key, raw = self._disk.put(key)
|
| 1150 |
+
update_column = EVICTION_POLICY[self.eviction_policy]['get']
|
| 1151 |
+
select = (
|
| 1152 |
+
'SELECT rowid, expire_time, tag, mode, filename, value'
|
| 1153 |
+
' FROM Cache WHERE key = ? AND raw = ?'
|
| 1154 |
+
' AND (expire_time IS NULL OR expire_time > ?)'
|
| 1155 |
+
)
|
| 1156 |
+
|
| 1157 |
+
if expire_time and tag:
|
| 1158 |
+
default = (default, None, None)
|
| 1159 |
+
elif expire_time or tag:
|
| 1160 |
+
default = (default, None)
|
| 1161 |
+
|
| 1162 |
+
if not self.statistics and update_column is None:
|
| 1163 |
+
# Fast path, no transaction necessary.
|
| 1164 |
+
|
| 1165 |
+
rows = self._sql(select, (db_key, raw, time.time())).fetchall()
|
| 1166 |
+
|
| 1167 |
+
if not rows:
|
| 1168 |
+
return default
|
| 1169 |
+
|
| 1170 |
+
((rowid, db_expire_time, db_tag, mode, filename, db_value),) = rows
|
| 1171 |
+
|
| 1172 |
+
try:
|
| 1173 |
+
value = self._disk.fetch(mode, filename, db_value, read)
|
| 1174 |
+
except IOError:
|
| 1175 |
+
# Key was deleted before we could retrieve result.
|
| 1176 |
+
return default
|
| 1177 |
+
|
| 1178 |
+
else: # Slow path, transaction required.
|
| 1179 |
+
cache_hit = (
|
| 1180 |
+
'UPDATE Settings SET value = value + 1 WHERE key = "hits"'
|
| 1181 |
+
)
|
| 1182 |
+
cache_miss = (
|
| 1183 |
+
'UPDATE Settings SET value = value + 1 WHERE key = "misses"'
|
| 1184 |
+
)
|
| 1185 |
+
|
| 1186 |
+
with self._transact(retry) as (sql, _):
|
| 1187 |
+
rows = sql(select, (db_key, raw, time.time())).fetchall()
|
| 1188 |
+
|
| 1189 |
+
if not rows:
|
| 1190 |
+
if self.statistics:
|
| 1191 |
+
sql(cache_miss)
|
| 1192 |
+
return default
|
| 1193 |
+
|
| 1194 |
+
(
|
| 1195 |
+
(rowid, db_expire_time, db_tag, mode, filename, db_value),
|
| 1196 |
+
) = rows # noqa: E127
|
| 1197 |
+
|
| 1198 |
+
try:
|
| 1199 |
+
value = self._disk.fetch(mode, filename, db_value, read)
|
| 1200 |
+
except IOError:
|
| 1201 |
+
# Key was deleted before we could retrieve result.
|
| 1202 |
+
if self.statistics:
|
| 1203 |
+
sql(cache_miss)
|
| 1204 |
+
return default
|
| 1205 |
+
|
| 1206 |
+
if self.statistics:
|
| 1207 |
+
sql(cache_hit)
|
| 1208 |
+
|
| 1209 |
+
now = time.time()
|
| 1210 |
+
update = 'UPDATE Cache SET %s WHERE rowid = ?'
|
| 1211 |
+
|
| 1212 |
+
if update_column is not None:
|
| 1213 |
+
sql(update % update_column.format(now=now), (rowid,))
|
| 1214 |
+
|
| 1215 |
+
if expire_time and tag:
|
| 1216 |
+
return (value, db_expire_time, db_tag)
|
| 1217 |
+
elif expire_time:
|
| 1218 |
+
return (value, db_expire_time)
|
| 1219 |
+
elif tag:
|
| 1220 |
+
return (value, db_tag)
|
| 1221 |
+
else:
|
| 1222 |
+
return value
|
| 1223 |
+
|
| 1224 |
+
def __getitem__(self, key):
|
| 1225 |
+
"""Return corresponding value for `key` from cache.
|
| 1226 |
+
|
| 1227 |
+
:param key: key matching item
|
| 1228 |
+
:return: corresponding value
|
| 1229 |
+
:raises KeyError: if key is not found
|
| 1230 |
+
|
| 1231 |
+
"""
|
| 1232 |
+
value = self.get(key, default=ENOVAL, retry=True)
|
| 1233 |
+
if value is ENOVAL:
|
| 1234 |
+
raise KeyError(key)
|
| 1235 |
+
return value
|
| 1236 |
+
|
| 1237 |
+
def read(self, key, retry=False):
|
| 1238 |
+
"""Return file handle value corresponding to `key` from cache.
|
| 1239 |
+
|
| 1240 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1241 |
+
`False` (default).
|
| 1242 |
+
|
| 1243 |
+
:param key: key matching item
|
| 1244 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 1245 |
+
:return: file open for reading in binary mode
|
| 1246 |
+
:raises KeyError: if key is not found
|
| 1247 |
+
:raises Timeout: if database timeout occurs
|
| 1248 |
+
|
| 1249 |
+
"""
|
| 1250 |
+
handle = self.get(key, default=ENOVAL, read=True, retry=retry)
|
| 1251 |
+
if handle is ENOVAL:
|
| 1252 |
+
raise KeyError(key)
|
| 1253 |
+
return handle
|
| 1254 |
+
|
| 1255 |
+
def __contains__(self, key):
|
| 1256 |
+
"""Return `True` if `key` matching item is found in cache.
|
| 1257 |
+
|
| 1258 |
+
:param key: key matching item
|
| 1259 |
+
:return: True if key matching item
|
| 1260 |
+
|
| 1261 |
+
"""
|
| 1262 |
+
sql = self._sql
|
| 1263 |
+
db_key, raw = self._disk.put(key)
|
| 1264 |
+
select = (
|
| 1265 |
+
'SELECT rowid FROM Cache'
|
| 1266 |
+
' WHERE key = ? AND raw = ?'
|
| 1267 |
+
' AND (expire_time IS NULL OR expire_time > ?)'
|
| 1268 |
+
)
|
| 1269 |
+
|
| 1270 |
+
rows = sql(select, (db_key, raw, time.time())).fetchall()
|
| 1271 |
+
|
| 1272 |
+
return bool(rows)
|
| 1273 |
+
|
| 1274 |
+
def pop(
|
| 1275 |
+
self, key, default=None, expire_time=False, tag=False, retry=False
|
| 1276 |
+
): # noqa: E501
|
| 1277 |
+
"""Remove corresponding item for `key` from cache and return value.
|
| 1278 |
+
|
| 1279 |
+
If `key` is missing, return `default`.
|
| 1280 |
+
|
| 1281 |
+
Operation is atomic. Concurrent operations will be serialized.
|
| 1282 |
+
|
| 1283 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1284 |
+
`False` (default).
|
| 1285 |
+
|
| 1286 |
+
:param key: key for item
|
| 1287 |
+
:param default: value to return if key is missing (default None)
|
| 1288 |
+
:param bool expire_time: if True, return expire_time in tuple
|
| 1289 |
+
(default False)
|
| 1290 |
+
:param bool tag: if True, return tag in tuple (default False)
|
| 1291 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 1292 |
+
:return: value for item or default if key not found
|
| 1293 |
+
:raises Timeout: if database timeout occurs
|
| 1294 |
+
|
| 1295 |
+
"""
|
| 1296 |
+
db_key, raw = self._disk.put(key)
|
| 1297 |
+
select = (
|
| 1298 |
+
'SELECT rowid, expire_time, tag, mode, filename, value'
|
| 1299 |
+
' FROM Cache WHERE key = ? AND raw = ?'
|
| 1300 |
+
' AND (expire_time IS NULL OR expire_time > ?)'
|
| 1301 |
+
)
|
| 1302 |
+
|
| 1303 |
+
if expire_time and tag:
|
| 1304 |
+
default = default, None, None
|
| 1305 |
+
elif expire_time or tag:
|
| 1306 |
+
default = default, None
|
| 1307 |
+
|
| 1308 |
+
with self._transact(retry) as (sql, _):
|
| 1309 |
+
rows = sql(select, (db_key, raw, time.time())).fetchall()
|
| 1310 |
+
|
| 1311 |
+
if not rows:
|
| 1312 |
+
return default
|
| 1313 |
+
|
| 1314 |
+
((rowid, db_expire_time, db_tag, mode, filename, db_value),) = rows
|
| 1315 |
+
|
| 1316 |
+
sql('DELETE FROM Cache WHERE rowid = ?', (rowid,))
|
| 1317 |
+
|
| 1318 |
+
try:
|
| 1319 |
+
value = self._disk.fetch(mode, filename, db_value, False)
|
| 1320 |
+
except IOError:
|
| 1321 |
+
# Key was deleted before we could retrieve result.
|
| 1322 |
+
return default
|
| 1323 |
+
finally:
|
| 1324 |
+
if filename is not None:
|
| 1325 |
+
self._disk.remove(filename)
|
| 1326 |
+
|
| 1327 |
+
if expire_time and tag:
|
| 1328 |
+
return value, db_expire_time, db_tag
|
| 1329 |
+
elif expire_time:
|
| 1330 |
+
return value, db_expire_time
|
| 1331 |
+
elif tag:
|
| 1332 |
+
return value, db_tag
|
| 1333 |
+
else:
|
| 1334 |
+
return value
|
| 1335 |
+
|
| 1336 |
+
def __delitem__(self, key, retry=True):
|
| 1337 |
+
"""Delete corresponding item for `key` from cache.
|
| 1338 |
+
|
| 1339 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1340 |
+
`False` (default `True`).
|
| 1341 |
+
|
| 1342 |
+
:param key: key matching item
|
| 1343 |
+
:param bool retry: retry if database timeout occurs (default True)
|
| 1344 |
+
:raises KeyError: if key is not found
|
| 1345 |
+
:raises Timeout: if database timeout occurs
|
| 1346 |
+
|
| 1347 |
+
"""
|
| 1348 |
+
db_key, raw = self._disk.put(key)
|
| 1349 |
+
|
| 1350 |
+
with self._transact(retry) as (sql, cleanup):
|
| 1351 |
+
rows = sql(
|
| 1352 |
+
'SELECT rowid, filename FROM Cache'
|
| 1353 |
+
' WHERE key = ? AND raw = ?'
|
| 1354 |
+
' AND (expire_time IS NULL OR expire_time > ?)',
|
| 1355 |
+
(db_key, raw, time.time()),
|
| 1356 |
+
).fetchall()
|
| 1357 |
+
|
| 1358 |
+
if not rows:
|
| 1359 |
+
raise KeyError(key)
|
| 1360 |
+
|
| 1361 |
+
((rowid, filename),) = rows
|
| 1362 |
+
sql('DELETE FROM Cache WHERE rowid = ?', (rowid,))
|
| 1363 |
+
cleanup(filename)
|
| 1364 |
+
|
| 1365 |
+
return True
|
| 1366 |
+
|
| 1367 |
+
def delete(self, key, retry=False):
|
| 1368 |
+
"""Delete corresponding item for `key` from cache.
|
| 1369 |
+
|
| 1370 |
+
Missing keys are ignored.
|
| 1371 |
+
|
| 1372 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1373 |
+
`False` (default).
|
| 1374 |
+
|
| 1375 |
+
:param key: key matching item
|
| 1376 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 1377 |
+
:return: True if item was deleted
|
| 1378 |
+
:raises Timeout: if database timeout occurs
|
| 1379 |
+
|
| 1380 |
+
"""
|
| 1381 |
+
# pylint: disable=unnecessary-dunder-call
|
| 1382 |
+
try:
|
| 1383 |
+
return self.__delitem__(key, retry=retry)
|
| 1384 |
+
except KeyError:
|
| 1385 |
+
return False
|
| 1386 |
+
|
| 1387 |
+
def push(
|
| 1388 |
+
self,
|
| 1389 |
+
value,
|
| 1390 |
+
prefix=None,
|
| 1391 |
+
side='back',
|
| 1392 |
+
expire=None,
|
| 1393 |
+
read=False,
|
| 1394 |
+
tag=None,
|
| 1395 |
+
retry=False,
|
| 1396 |
+
):
|
| 1397 |
+
"""Push `value` onto `side` of queue identified by `prefix` in cache.
|
| 1398 |
+
|
| 1399 |
+
When prefix is None, integer keys are used. Otherwise, string keys are
|
| 1400 |
+
used in the format "prefix-integer". Integer starts at 500 trillion.
|
| 1401 |
+
|
| 1402 |
+
Defaults to pushing value on back of queue. Set side to 'front' to push
|
| 1403 |
+
value on front of queue. Side must be one of 'back' or 'front'.
|
| 1404 |
+
|
| 1405 |
+
Operation is atomic. Concurrent operations will be serialized.
|
| 1406 |
+
|
| 1407 |
+
When `read` is `True`, `value` should be a file-like object opened
|
| 1408 |
+
for reading in binary mode.
|
| 1409 |
+
|
| 1410 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1411 |
+
`False` (default).
|
| 1412 |
+
|
| 1413 |
+
See also `Cache.pull`.
|
| 1414 |
+
|
| 1415 |
+
>>> cache = Cache()
|
| 1416 |
+
>>> print(cache.push('first value'))
|
| 1417 |
+
500000000000000
|
| 1418 |
+
>>> cache.get(500000000000000)
|
| 1419 |
+
'first value'
|
| 1420 |
+
>>> print(cache.push('second value'))
|
| 1421 |
+
500000000000001
|
| 1422 |
+
>>> print(cache.push('third value', side='front'))
|
| 1423 |
+
499999999999999
|
| 1424 |
+
>>> cache.push(1234, prefix='userids')
|
| 1425 |
+
'userids-500000000000000'
|
| 1426 |
+
|
| 1427 |
+
:param value: value for item
|
| 1428 |
+
:param str prefix: key prefix (default None, key is integer)
|
| 1429 |
+
:param str side: either 'back' or 'front' (default 'back')
|
| 1430 |
+
:param float expire: seconds until the key expires
|
| 1431 |
+
(default None, no expiry)
|
| 1432 |
+
:param bool read: read value as bytes from file (default False)
|
| 1433 |
+
:param str tag: text to associate with key (default None)
|
| 1434 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 1435 |
+
:return: key for item in cache
|
| 1436 |
+
:raises Timeout: if database timeout occurs
|
| 1437 |
+
|
| 1438 |
+
"""
|
| 1439 |
+
if prefix is None:
|
| 1440 |
+
min_key = 0
|
| 1441 |
+
max_key = 999999999999999
|
| 1442 |
+
else:
|
| 1443 |
+
min_key = prefix + '-000000000000000'
|
| 1444 |
+
max_key = prefix + '-999999999999999'
|
| 1445 |
+
|
| 1446 |
+
now = time.time()
|
| 1447 |
+
raw = True
|
| 1448 |
+
expire_time = None if expire is None else now + expire
|
| 1449 |
+
size, mode, filename, db_value = self._disk.store(value, read)
|
| 1450 |
+
columns = (expire_time, tag, size, mode, filename, db_value)
|
| 1451 |
+
order = {'back': 'DESC', 'front': 'ASC'}
|
| 1452 |
+
select = (
|
| 1453 |
+
'SELECT key FROM Cache'
|
| 1454 |
+
' WHERE ? < key AND key < ? AND raw = ?'
|
| 1455 |
+
' ORDER BY key %s LIMIT 1'
|
| 1456 |
+
) % order[side]
|
| 1457 |
+
|
| 1458 |
+
with self._transact(retry, filename) as (sql, cleanup):
|
| 1459 |
+
rows = sql(select, (min_key, max_key, raw)).fetchall()
|
| 1460 |
+
|
| 1461 |
+
if rows:
|
| 1462 |
+
((key,),) = rows
|
| 1463 |
+
|
| 1464 |
+
if prefix is not None:
|
| 1465 |
+
num = int(key[(key.rfind('-') + 1) :])
|
| 1466 |
+
else:
|
| 1467 |
+
num = key
|
| 1468 |
+
|
| 1469 |
+
if side == 'back':
|
| 1470 |
+
num += 1
|
| 1471 |
+
else:
|
| 1472 |
+
assert side == 'front'
|
| 1473 |
+
num -= 1
|
| 1474 |
+
else:
|
| 1475 |
+
num = 500000000000000
|
| 1476 |
+
|
| 1477 |
+
if prefix is not None:
|
| 1478 |
+
db_key = '{0}-{1:015d}'.format(prefix, num)
|
| 1479 |
+
else:
|
| 1480 |
+
db_key = num
|
| 1481 |
+
|
| 1482 |
+
self._row_insert(db_key, raw, now, columns)
|
| 1483 |
+
self._cull(now, sql, cleanup)
|
| 1484 |
+
|
| 1485 |
+
return db_key
|
| 1486 |
+
|
| 1487 |
+
def pull(
|
| 1488 |
+
self,
|
| 1489 |
+
prefix=None,
|
| 1490 |
+
default=(None, None),
|
| 1491 |
+
side='front',
|
| 1492 |
+
expire_time=False,
|
| 1493 |
+
tag=False,
|
| 1494 |
+
retry=False,
|
| 1495 |
+
):
|
| 1496 |
+
"""Pull key and value item pair from `side` of queue in cache.
|
| 1497 |
+
|
| 1498 |
+
When prefix is None, integer keys are used. Otherwise, string keys are
|
| 1499 |
+
used in the format "prefix-integer". Integer starts at 500 trillion.
|
| 1500 |
+
|
| 1501 |
+
If queue is empty, return default.
|
| 1502 |
+
|
| 1503 |
+
Defaults to pulling key and value item pairs from front of queue. Set
|
| 1504 |
+
side to 'back' to pull from back of queue. Side must be one of 'front'
|
| 1505 |
+
or 'back'.
|
| 1506 |
+
|
| 1507 |
+
Operation is atomic. Concurrent operations will be serialized.
|
| 1508 |
+
|
| 1509 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1510 |
+
`False` (default).
|
| 1511 |
+
|
| 1512 |
+
See also `Cache.push` and `Cache.get`.
|
| 1513 |
+
|
| 1514 |
+
>>> cache = Cache()
|
| 1515 |
+
>>> cache.pull()
|
| 1516 |
+
(None, None)
|
| 1517 |
+
>>> for letter in 'abc':
|
| 1518 |
+
... print(cache.push(letter))
|
| 1519 |
+
500000000000000
|
| 1520 |
+
500000000000001
|
| 1521 |
+
500000000000002
|
| 1522 |
+
>>> key, value = cache.pull()
|
| 1523 |
+
>>> print(key)
|
| 1524 |
+
500000000000000
|
| 1525 |
+
>>> value
|
| 1526 |
+
'a'
|
| 1527 |
+
>>> _, value = cache.pull(side='back')
|
| 1528 |
+
>>> value
|
| 1529 |
+
'c'
|
| 1530 |
+
>>> cache.push(1234, 'userids')
|
| 1531 |
+
'userids-500000000000000'
|
| 1532 |
+
>>> _, value = cache.pull('userids')
|
| 1533 |
+
>>> value
|
| 1534 |
+
1234
|
| 1535 |
+
|
| 1536 |
+
:param str prefix: key prefix (default None, key is integer)
|
| 1537 |
+
:param default: value to return if key is missing
|
| 1538 |
+
(default (None, None))
|
| 1539 |
+
:param str side: either 'front' or 'back' (default 'front')
|
| 1540 |
+
:param bool expire_time: if True, return expire_time in tuple
|
| 1541 |
+
(default False)
|
| 1542 |
+
:param bool tag: if True, return tag in tuple (default False)
|
| 1543 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 1544 |
+
:return: key and value item pair or default if queue is empty
|
| 1545 |
+
:raises Timeout: if database timeout occurs
|
| 1546 |
+
|
| 1547 |
+
"""
|
| 1548 |
+
# Caution: Nearly identical code exists in Cache.peek
|
| 1549 |
+
if prefix is None:
|
| 1550 |
+
min_key = 0
|
| 1551 |
+
max_key = 999999999999999
|
| 1552 |
+
else:
|
| 1553 |
+
min_key = prefix + '-000000000000000'
|
| 1554 |
+
max_key = prefix + '-999999999999999'
|
| 1555 |
+
|
| 1556 |
+
order = {'front': 'ASC', 'back': 'DESC'}
|
| 1557 |
+
select = (
|
| 1558 |
+
'SELECT rowid, key, expire_time, tag, mode, filename, value'
|
| 1559 |
+
' FROM Cache WHERE ? < key AND key < ? AND raw = 1'
|
| 1560 |
+
' ORDER BY key %s LIMIT 1'
|
| 1561 |
+
) % order[side]
|
| 1562 |
+
|
| 1563 |
+
if expire_time and tag:
|
| 1564 |
+
default = default, None, None
|
| 1565 |
+
elif expire_time or tag:
|
| 1566 |
+
default = default, None
|
| 1567 |
+
|
| 1568 |
+
while True:
|
| 1569 |
+
while True:
|
| 1570 |
+
with self._transact(retry) as (sql, cleanup):
|
| 1571 |
+
rows = sql(select, (min_key, max_key)).fetchall()
|
| 1572 |
+
|
| 1573 |
+
if not rows:
|
| 1574 |
+
return default
|
| 1575 |
+
|
| 1576 |
+
(
|
| 1577 |
+
(rowid, key, db_expire, db_tag, mode, name, db_value),
|
| 1578 |
+
) = rows
|
| 1579 |
+
|
| 1580 |
+
sql('DELETE FROM Cache WHERE rowid = ?', (rowid,))
|
| 1581 |
+
|
| 1582 |
+
if db_expire is not None and db_expire < time.time():
|
| 1583 |
+
cleanup(name)
|
| 1584 |
+
else:
|
| 1585 |
+
break
|
| 1586 |
+
|
| 1587 |
+
try:
|
| 1588 |
+
value = self._disk.fetch(mode, name, db_value, False)
|
| 1589 |
+
except IOError:
|
| 1590 |
+
# Key was deleted before we could retrieve result.
|
| 1591 |
+
continue
|
| 1592 |
+
finally:
|
| 1593 |
+
if name is not None:
|
| 1594 |
+
self._disk.remove(name)
|
| 1595 |
+
break
|
| 1596 |
+
|
| 1597 |
+
if expire_time and tag:
|
| 1598 |
+
return (key, value), db_expire, db_tag
|
| 1599 |
+
elif expire_time:
|
| 1600 |
+
return (key, value), db_expire
|
| 1601 |
+
elif tag:
|
| 1602 |
+
return (key, value), db_tag
|
| 1603 |
+
else:
|
| 1604 |
+
return key, value
|
| 1605 |
+
|
| 1606 |
+
def peek(
|
| 1607 |
+
self,
|
| 1608 |
+
prefix=None,
|
| 1609 |
+
default=(None, None),
|
| 1610 |
+
side='front',
|
| 1611 |
+
expire_time=False,
|
| 1612 |
+
tag=False,
|
| 1613 |
+
retry=False,
|
| 1614 |
+
):
|
| 1615 |
+
"""Peek at key and value item pair from `side` of queue in cache.
|
| 1616 |
+
|
| 1617 |
+
When prefix is None, integer keys are used. Otherwise, string keys are
|
| 1618 |
+
used in the format "prefix-integer". Integer starts at 500 trillion.
|
| 1619 |
+
|
| 1620 |
+
If queue is empty, return default.
|
| 1621 |
+
|
| 1622 |
+
Defaults to peeking at key and value item pairs from front of queue.
|
| 1623 |
+
Set side to 'back' to pull from back of queue. Side must be one of
|
| 1624 |
+
'front' or 'back'.
|
| 1625 |
+
|
| 1626 |
+
Expired items are deleted from cache. Operation is atomic. Concurrent
|
| 1627 |
+
operations will be serialized.
|
| 1628 |
+
|
| 1629 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1630 |
+
`False` (default).
|
| 1631 |
+
|
| 1632 |
+
See also `Cache.pull` and `Cache.push`.
|
| 1633 |
+
|
| 1634 |
+
>>> cache = Cache()
|
| 1635 |
+
>>> for letter in 'abc':
|
| 1636 |
+
... print(cache.push(letter))
|
| 1637 |
+
500000000000000
|
| 1638 |
+
500000000000001
|
| 1639 |
+
500000000000002
|
| 1640 |
+
>>> key, value = cache.peek()
|
| 1641 |
+
>>> print(key)
|
| 1642 |
+
500000000000000
|
| 1643 |
+
>>> value
|
| 1644 |
+
'a'
|
| 1645 |
+
>>> key, value = cache.peek(side='back')
|
| 1646 |
+
>>> print(key)
|
| 1647 |
+
500000000000002
|
| 1648 |
+
>>> value
|
| 1649 |
+
'c'
|
| 1650 |
+
|
| 1651 |
+
:param str prefix: key prefix (default None, key is integer)
|
| 1652 |
+
:param default: value to return if key is missing
|
| 1653 |
+
(default (None, None))
|
| 1654 |
+
:param str side: either 'front' or 'back' (default 'front')
|
| 1655 |
+
:param bool expire_time: if True, return expire_time in tuple
|
| 1656 |
+
(default False)
|
| 1657 |
+
:param bool tag: if True, return tag in tuple (default False)
|
| 1658 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 1659 |
+
:return: key and value item pair or default if queue is empty
|
| 1660 |
+
:raises Timeout: if database timeout occurs
|
| 1661 |
+
|
| 1662 |
+
"""
|
| 1663 |
+
# Caution: Nearly identical code exists in Cache.pull
|
| 1664 |
+
if prefix is None:
|
| 1665 |
+
min_key = 0
|
| 1666 |
+
max_key = 999999999999999
|
| 1667 |
+
else:
|
| 1668 |
+
min_key = prefix + '-000000000000000'
|
| 1669 |
+
max_key = prefix + '-999999999999999'
|
| 1670 |
+
|
| 1671 |
+
order = {'front': 'ASC', 'back': 'DESC'}
|
| 1672 |
+
select = (
|
| 1673 |
+
'SELECT rowid, key, expire_time, tag, mode, filename, value'
|
| 1674 |
+
' FROM Cache WHERE ? < key AND key < ? AND raw = 1'
|
| 1675 |
+
' ORDER BY key %s LIMIT 1'
|
| 1676 |
+
) % order[side]
|
| 1677 |
+
|
| 1678 |
+
if expire_time and tag:
|
| 1679 |
+
default = default, None, None
|
| 1680 |
+
elif expire_time or tag:
|
| 1681 |
+
default = default, None
|
| 1682 |
+
|
| 1683 |
+
while True:
|
| 1684 |
+
while True:
|
| 1685 |
+
with self._transact(retry) as (sql, cleanup):
|
| 1686 |
+
rows = sql(select, (min_key, max_key)).fetchall()
|
| 1687 |
+
|
| 1688 |
+
if not rows:
|
| 1689 |
+
return default
|
| 1690 |
+
|
| 1691 |
+
(
|
| 1692 |
+
(rowid, key, db_expire, db_tag, mode, name, db_value),
|
| 1693 |
+
) = rows
|
| 1694 |
+
|
| 1695 |
+
if db_expire is not None and db_expire < time.time():
|
| 1696 |
+
sql('DELETE FROM Cache WHERE rowid = ?', (rowid,))
|
| 1697 |
+
cleanup(name)
|
| 1698 |
+
else:
|
| 1699 |
+
break
|
| 1700 |
+
|
| 1701 |
+
try:
|
| 1702 |
+
value = self._disk.fetch(mode, name, db_value, False)
|
| 1703 |
+
except IOError:
|
| 1704 |
+
# Key was deleted before we could retrieve result.
|
| 1705 |
+
continue
|
| 1706 |
+
break
|
| 1707 |
+
|
| 1708 |
+
if expire_time and tag:
|
| 1709 |
+
return (key, value), db_expire, db_tag
|
| 1710 |
+
elif expire_time:
|
| 1711 |
+
return (key, value), db_expire
|
| 1712 |
+
elif tag:
|
| 1713 |
+
return (key, value), db_tag
|
| 1714 |
+
else:
|
| 1715 |
+
return key, value
|
| 1716 |
+
|
| 1717 |
+
def peekitem(self, last=True, expire_time=False, tag=False, retry=False):
|
| 1718 |
+
"""Peek at key and value item pair in cache based on iteration order.
|
| 1719 |
+
|
| 1720 |
+
Expired items are deleted from cache. Operation is atomic. Concurrent
|
| 1721 |
+
operations will be serialized.
|
| 1722 |
+
|
| 1723 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1724 |
+
`False` (default).
|
| 1725 |
+
|
| 1726 |
+
>>> cache = Cache()
|
| 1727 |
+
>>> for num, letter in enumerate('abc'):
|
| 1728 |
+
... cache[letter] = num
|
| 1729 |
+
>>> cache.peekitem()
|
| 1730 |
+
('c', 2)
|
| 1731 |
+
>>> cache.peekitem(last=False)
|
| 1732 |
+
('a', 0)
|
| 1733 |
+
|
| 1734 |
+
:param bool last: last item in iteration order (default True)
|
| 1735 |
+
:param bool expire_time: if True, return expire_time in tuple
|
| 1736 |
+
(default False)
|
| 1737 |
+
:param bool tag: if True, return tag in tuple (default False)
|
| 1738 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 1739 |
+
:return: key and value item pair
|
| 1740 |
+
:raises KeyError: if cache is empty
|
| 1741 |
+
:raises Timeout: if database timeout occurs
|
| 1742 |
+
|
| 1743 |
+
"""
|
| 1744 |
+
order = ('ASC', 'DESC')
|
| 1745 |
+
select = (
|
| 1746 |
+
'SELECT rowid, key, raw, expire_time, tag, mode, filename, value'
|
| 1747 |
+
' FROM Cache ORDER BY rowid %s LIMIT 1'
|
| 1748 |
+
) % order[last]
|
| 1749 |
+
|
| 1750 |
+
while True:
|
| 1751 |
+
while True:
|
| 1752 |
+
with self._transact(retry) as (sql, cleanup):
|
| 1753 |
+
rows = sql(select).fetchall()
|
| 1754 |
+
|
| 1755 |
+
if not rows:
|
| 1756 |
+
raise KeyError('dictionary is empty')
|
| 1757 |
+
|
| 1758 |
+
(
|
| 1759 |
+
(
|
| 1760 |
+
rowid,
|
| 1761 |
+
db_key,
|
| 1762 |
+
raw,
|
| 1763 |
+
db_expire,
|
| 1764 |
+
db_tag,
|
| 1765 |
+
mode,
|
| 1766 |
+
name,
|
| 1767 |
+
db_value,
|
| 1768 |
+
),
|
| 1769 |
+
) = rows
|
| 1770 |
+
|
| 1771 |
+
if db_expire is not None and db_expire < time.time():
|
| 1772 |
+
sql('DELETE FROM Cache WHERE rowid = ?', (rowid,))
|
| 1773 |
+
cleanup(name)
|
| 1774 |
+
else:
|
| 1775 |
+
break
|
| 1776 |
+
|
| 1777 |
+
key = self._disk.get(db_key, raw)
|
| 1778 |
+
|
| 1779 |
+
try:
|
| 1780 |
+
value = self._disk.fetch(mode, name, db_value, False)
|
| 1781 |
+
except IOError:
|
| 1782 |
+
# Key was deleted before we could retrieve result.
|
| 1783 |
+
continue
|
| 1784 |
+
break
|
| 1785 |
+
|
| 1786 |
+
if expire_time and tag:
|
| 1787 |
+
return (key, value), db_expire, db_tag
|
| 1788 |
+
elif expire_time:
|
| 1789 |
+
return (key, value), db_expire
|
| 1790 |
+
elif tag:
|
| 1791 |
+
return (key, value), db_tag
|
| 1792 |
+
else:
|
| 1793 |
+
return key, value
|
| 1794 |
+
|
| 1795 |
+
def memoize(
|
| 1796 |
+
self, name=None, typed=False, expire=None, tag=None, ignore=()
|
| 1797 |
+
):
|
| 1798 |
+
"""Memoizing cache decorator.
|
| 1799 |
+
|
| 1800 |
+
Decorator to wrap callable with memoizing function using cache.
|
| 1801 |
+
Repeated calls with the same arguments will lookup result in cache and
|
| 1802 |
+
avoid function evaluation.
|
| 1803 |
+
|
| 1804 |
+
If name is set to None (default), the callable name will be determined
|
| 1805 |
+
automatically.
|
| 1806 |
+
|
| 1807 |
+
When expire is set to zero, function results will not be set in the
|
| 1808 |
+
cache. Cache lookups still occur, however. Read
|
| 1809 |
+
:doc:`case-study-landing-page-caching` for example usage.
|
| 1810 |
+
|
| 1811 |
+
If typed is set to True, function arguments of different types will be
|
| 1812 |
+
cached separately. For example, f(3) and f(3.0) will be treated as
|
| 1813 |
+
distinct calls with distinct results.
|
| 1814 |
+
|
| 1815 |
+
The original underlying function is accessible through the __wrapped__
|
| 1816 |
+
attribute. This is useful for introspection, for bypassing the cache,
|
| 1817 |
+
or for rewrapping the function with a different cache.
|
| 1818 |
+
|
| 1819 |
+
>>> from diskcache import Cache
|
| 1820 |
+
>>> cache = Cache()
|
| 1821 |
+
>>> @cache.memoize(expire=1, tag='fib')
|
| 1822 |
+
... def fibonacci(number):
|
| 1823 |
+
... if number == 0:
|
| 1824 |
+
... return 0
|
| 1825 |
+
... elif number == 1:
|
| 1826 |
+
... return 1
|
| 1827 |
+
... else:
|
| 1828 |
+
... return fibonacci(number - 1) + fibonacci(number - 2)
|
| 1829 |
+
>>> print(fibonacci(100))
|
| 1830 |
+
354224848179261915075
|
| 1831 |
+
|
| 1832 |
+
An additional `__cache_key__` attribute can be used to generate the
|
| 1833 |
+
cache key used for the given arguments.
|
| 1834 |
+
|
| 1835 |
+
>>> key = fibonacci.__cache_key__(100)
|
| 1836 |
+
>>> print(cache[key])
|
| 1837 |
+
354224848179261915075
|
| 1838 |
+
|
| 1839 |
+
Remember to call memoize when decorating a callable. If you forget,
|
| 1840 |
+
then a TypeError will occur. Note the lack of parenthenses after
|
| 1841 |
+
memoize below:
|
| 1842 |
+
|
| 1843 |
+
>>> @cache.memoize
|
| 1844 |
+
... def test():
|
| 1845 |
+
... pass
|
| 1846 |
+
Traceback (most recent call last):
|
| 1847 |
+
...
|
| 1848 |
+
TypeError: name cannot be callable
|
| 1849 |
+
|
| 1850 |
+
:param cache: cache to store callable arguments and return values
|
| 1851 |
+
:param str name: name given for callable (default None, automatic)
|
| 1852 |
+
:param bool typed: cache different types separately (default False)
|
| 1853 |
+
:param float expire: seconds until arguments expire
|
| 1854 |
+
(default None, no expiry)
|
| 1855 |
+
:param str tag: text to associate with arguments (default None)
|
| 1856 |
+
:param set ignore: positional or keyword args to ignore (default ())
|
| 1857 |
+
:return: callable decorator
|
| 1858 |
+
|
| 1859 |
+
"""
|
| 1860 |
+
# Caution: Nearly identical code exists in DjangoCache.memoize
|
| 1861 |
+
if callable(name):
|
| 1862 |
+
raise TypeError('name cannot be callable')
|
| 1863 |
+
|
| 1864 |
+
def decorator(func):
|
| 1865 |
+
"""Decorator created by memoize() for callable `func`."""
|
| 1866 |
+
base = (full_name(func),) if name is None else (name,)
|
| 1867 |
+
|
| 1868 |
+
@ft.wraps(func)
|
| 1869 |
+
def wrapper(*args, **kwargs):
|
| 1870 |
+
"""Wrapper for callable to cache arguments and return values."""
|
| 1871 |
+
key = wrapper.__cache_key__(*args, **kwargs)
|
| 1872 |
+
result = self.get(key, default=ENOVAL, retry=True)
|
| 1873 |
+
|
| 1874 |
+
if result is ENOVAL:
|
| 1875 |
+
result = func(*args, **kwargs)
|
| 1876 |
+
if expire is None or expire > 0:
|
| 1877 |
+
self.set(key, result, expire, tag=tag, retry=True)
|
| 1878 |
+
|
| 1879 |
+
return result
|
| 1880 |
+
|
| 1881 |
+
def __cache_key__(*args, **kwargs):
|
| 1882 |
+
"""Make key for cache given function arguments."""
|
| 1883 |
+
return args_to_key(base, args, kwargs, typed, ignore)
|
| 1884 |
+
|
| 1885 |
+
wrapper.__cache_key__ = __cache_key__
|
| 1886 |
+
return wrapper
|
| 1887 |
+
|
| 1888 |
+
return decorator
|
| 1889 |
+
|
| 1890 |
+
def check(self, fix=False, retry=False):
|
| 1891 |
+
"""Check database and file system consistency.
|
| 1892 |
+
|
| 1893 |
+
Intended for use in testing and post-mortem error analysis.
|
| 1894 |
+
|
| 1895 |
+
While checking the Cache table for consistency, a writer lock is held
|
| 1896 |
+
on the database. The lock blocks other cache clients from writing to
|
| 1897 |
+
the database. For caches with many file references, the lock may be
|
| 1898 |
+
held for a long time. For example, local benchmarking shows that a
|
| 1899 |
+
cache with 1,000 file references takes ~60ms to check.
|
| 1900 |
+
|
| 1901 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 1902 |
+
`False` (default).
|
| 1903 |
+
|
| 1904 |
+
:param bool fix: correct inconsistencies
|
| 1905 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 1906 |
+
:return: list of warnings
|
| 1907 |
+
:raises Timeout: if database timeout occurs
|
| 1908 |
+
|
| 1909 |
+
"""
|
| 1910 |
+
# pylint: disable=access-member-before-definition,W0201
|
| 1911 |
+
with warnings.catch_warnings(record=True) as warns:
|
| 1912 |
+
sql = self._sql
|
| 1913 |
+
|
| 1914 |
+
# Check integrity of database.
|
| 1915 |
+
|
| 1916 |
+
rows = sql('PRAGMA integrity_check').fetchall()
|
| 1917 |
+
|
| 1918 |
+
if len(rows) != 1 or rows[0][0] != 'ok':
|
| 1919 |
+
for (message,) in rows:
|
| 1920 |
+
warnings.warn(message)
|
| 1921 |
+
|
| 1922 |
+
if fix:
|
| 1923 |
+
sql('VACUUM')
|
| 1924 |
+
|
| 1925 |
+
with self._transact(retry) as (sql, _):
|
| 1926 |
+
|
| 1927 |
+
# Check Cache.filename against file system.
|
| 1928 |
+
|
| 1929 |
+
filenames = set()
|
| 1930 |
+
select = (
|
| 1931 |
+
'SELECT rowid, size, filename FROM Cache'
|
| 1932 |
+
' WHERE filename IS NOT NULL'
|
| 1933 |
+
)
|
| 1934 |
+
|
| 1935 |
+
rows = sql(select).fetchall()
|
| 1936 |
+
|
| 1937 |
+
for rowid, size, filename in rows:
|
| 1938 |
+
full_path = op.join(self._directory, filename)
|
| 1939 |
+
filenames.add(full_path)
|
| 1940 |
+
|
| 1941 |
+
if op.exists(full_path):
|
| 1942 |
+
real_size = op.getsize(full_path)
|
| 1943 |
+
|
| 1944 |
+
if size != real_size:
|
| 1945 |
+
message = 'wrong file size: %s, %d != %d'
|
| 1946 |
+
args = full_path, real_size, size
|
| 1947 |
+
warnings.warn(message % args)
|
| 1948 |
+
|
| 1949 |
+
if fix:
|
| 1950 |
+
sql(
|
| 1951 |
+
'UPDATE Cache SET size = ?'
|
| 1952 |
+
' WHERE rowid = ?',
|
| 1953 |
+
(real_size, rowid),
|
| 1954 |
+
)
|
| 1955 |
+
|
| 1956 |
+
continue
|
| 1957 |
+
|
| 1958 |
+
warnings.warn('file not found: %s' % full_path)
|
| 1959 |
+
|
| 1960 |
+
if fix:
|
| 1961 |
+
sql('DELETE FROM Cache WHERE rowid = ?', (rowid,))
|
| 1962 |
+
|
| 1963 |
+
# Check file system against Cache.filename.
|
| 1964 |
+
|
| 1965 |
+
for dirpath, _, files in os.walk(self._directory):
|
| 1966 |
+
paths = [op.join(dirpath, filename) for filename in files]
|
| 1967 |
+
error = set(paths) - filenames
|
| 1968 |
+
|
| 1969 |
+
for full_path in error:
|
| 1970 |
+
if DBNAME in full_path:
|
| 1971 |
+
continue
|
| 1972 |
+
|
| 1973 |
+
message = 'unknown file: %s' % full_path
|
| 1974 |
+
warnings.warn(message, UnknownFileWarning)
|
| 1975 |
+
|
| 1976 |
+
if fix:
|
| 1977 |
+
os.remove(full_path)
|
| 1978 |
+
|
| 1979 |
+
# Check for empty directories.
|
| 1980 |
+
|
| 1981 |
+
for dirpath, dirs, files in os.walk(self._directory):
|
| 1982 |
+
if not (dirs or files):
|
| 1983 |
+
message = 'empty directory: %s' % dirpath
|
| 1984 |
+
warnings.warn(message, EmptyDirWarning)
|
| 1985 |
+
|
| 1986 |
+
if fix:
|
| 1987 |
+
os.rmdir(dirpath)
|
| 1988 |
+
|
| 1989 |
+
# Check Settings.count against count of Cache rows.
|
| 1990 |
+
|
| 1991 |
+
self.reset('count')
|
| 1992 |
+
((count,),) = sql('SELECT COUNT(key) FROM Cache').fetchall()
|
| 1993 |
+
|
| 1994 |
+
if self.count != count:
|
| 1995 |
+
message = 'Settings.count != COUNT(Cache.key); %d != %d'
|
| 1996 |
+
warnings.warn(message % (self.count, count))
|
| 1997 |
+
|
| 1998 |
+
if fix:
|
| 1999 |
+
sql(
|
| 2000 |
+
'UPDATE Settings SET value = ? WHERE key = ?',
|
| 2001 |
+
(count, 'count'),
|
| 2002 |
+
)
|
| 2003 |
+
|
| 2004 |
+
# Check Settings.size against sum of Cache.size column.
|
| 2005 |
+
|
| 2006 |
+
self.reset('size')
|
| 2007 |
+
select_size = 'SELECT COALESCE(SUM(size), 0) FROM Cache'
|
| 2008 |
+
((size,),) = sql(select_size).fetchall()
|
| 2009 |
+
|
| 2010 |
+
if self.size != size:
|
| 2011 |
+
message = 'Settings.size != SUM(Cache.size); %d != %d'
|
| 2012 |
+
warnings.warn(message % (self.size, size))
|
| 2013 |
+
|
| 2014 |
+
if fix:
|
| 2015 |
+
sql(
|
| 2016 |
+
'UPDATE Settings SET value = ? WHERE key =?',
|
| 2017 |
+
(size, 'size'),
|
| 2018 |
+
)
|
| 2019 |
+
|
| 2020 |
+
return warns
|
| 2021 |
+
|
| 2022 |
+
def create_tag_index(self):
|
| 2023 |
+
"""Create tag index on cache database.
|
| 2024 |
+
|
| 2025 |
+
It is better to initialize cache with `tag_index=True` than use this.
|
| 2026 |
+
|
| 2027 |
+
:raises Timeout: if database timeout occurs
|
| 2028 |
+
|
| 2029 |
+
"""
|
| 2030 |
+
sql = self._sql
|
| 2031 |
+
sql('CREATE INDEX IF NOT EXISTS Cache_tag_rowid ON Cache(tag, rowid)')
|
| 2032 |
+
self.reset('tag_index', 1)
|
| 2033 |
+
|
| 2034 |
+
def drop_tag_index(self):
|
| 2035 |
+
"""Drop tag index on cache database.
|
| 2036 |
+
|
| 2037 |
+
:raises Timeout: if database timeout occurs
|
| 2038 |
+
|
| 2039 |
+
"""
|
| 2040 |
+
sql = self._sql
|
| 2041 |
+
sql('DROP INDEX IF EXISTS Cache_tag_rowid')
|
| 2042 |
+
self.reset('tag_index', 0)
|
| 2043 |
+
|
| 2044 |
+
def evict(self, tag, retry=False):
|
| 2045 |
+
"""Remove items with matching `tag` from cache.
|
| 2046 |
+
|
| 2047 |
+
Removing items is an iterative process. In each iteration, a subset of
|
| 2048 |
+
items is removed. Concurrent writes may occur between iterations.
|
| 2049 |
+
|
| 2050 |
+
If a :exc:`Timeout` occurs, the first element of the exception's
|
| 2051 |
+
`args` attribute will be the number of items removed before the
|
| 2052 |
+
exception occurred.
|
| 2053 |
+
|
| 2054 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 2055 |
+
`False` (default).
|
| 2056 |
+
|
| 2057 |
+
:param str tag: tag identifying items
|
| 2058 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 2059 |
+
:return: count of rows removed
|
| 2060 |
+
:raises Timeout: if database timeout occurs
|
| 2061 |
+
|
| 2062 |
+
"""
|
| 2063 |
+
select = (
|
| 2064 |
+
'SELECT rowid, filename FROM Cache'
|
| 2065 |
+
' WHERE tag = ? AND rowid > ?'
|
| 2066 |
+
' ORDER BY rowid LIMIT ?'
|
| 2067 |
+
)
|
| 2068 |
+
args = [tag, 0, 100]
|
| 2069 |
+
return self._select_delete(select, args, arg_index=1, retry=retry)
|
| 2070 |
+
|
| 2071 |
+
def expire(self, now=None, retry=False):
|
| 2072 |
+
"""Remove expired items from cache.
|
| 2073 |
+
|
| 2074 |
+
Removing items is an iterative process. In each iteration, a subset of
|
| 2075 |
+
items is removed. Concurrent writes may occur between iterations.
|
| 2076 |
+
|
| 2077 |
+
If a :exc:`Timeout` occurs, the first element of the exception's
|
| 2078 |
+
`args` attribute will be the number of items removed before the
|
| 2079 |
+
exception occurred.
|
| 2080 |
+
|
| 2081 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 2082 |
+
`False` (default).
|
| 2083 |
+
|
| 2084 |
+
:param float now: current time (default None, ``time.time()`` used)
|
| 2085 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 2086 |
+
:return: count of items removed
|
| 2087 |
+
:raises Timeout: if database timeout occurs
|
| 2088 |
+
|
| 2089 |
+
"""
|
| 2090 |
+
select = (
|
| 2091 |
+
'SELECT rowid, expire_time, filename FROM Cache'
|
| 2092 |
+
' WHERE ? < expire_time AND expire_time < ?'
|
| 2093 |
+
' ORDER BY expire_time LIMIT ?'
|
| 2094 |
+
)
|
| 2095 |
+
args = [0, now or time.time(), 100]
|
| 2096 |
+
return self._select_delete(select, args, row_index=1, retry=retry)
|
| 2097 |
+
|
| 2098 |
+
def cull(self, retry=False):
|
| 2099 |
+
"""Cull items from cache until volume is less than size limit.
|
| 2100 |
+
|
| 2101 |
+
Removing items is an iterative process. In each iteration, a subset of
|
| 2102 |
+
items is removed. Concurrent writes may occur between iterations.
|
| 2103 |
+
|
| 2104 |
+
If a :exc:`Timeout` occurs, the first element of the exception's
|
| 2105 |
+
`args` attribute will be the number of items removed before the
|
| 2106 |
+
exception occurred.
|
| 2107 |
+
|
| 2108 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 2109 |
+
`False` (default).
|
| 2110 |
+
|
| 2111 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 2112 |
+
:return: count of items removed
|
| 2113 |
+
:raises Timeout: if database timeout occurs
|
| 2114 |
+
|
| 2115 |
+
"""
|
| 2116 |
+
now = time.time()
|
| 2117 |
+
|
| 2118 |
+
# Remove expired items.
|
| 2119 |
+
|
| 2120 |
+
count = self.expire(now)
|
| 2121 |
+
|
| 2122 |
+
# Remove items by policy.
|
| 2123 |
+
|
| 2124 |
+
select_policy = EVICTION_POLICY[self.eviction_policy]['cull']
|
| 2125 |
+
|
| 2126 |
+
if select_policy is None:
|
| 2127 |
+
return 0
|
| 2128 |
+
|
| 2129 |
+
select_filename = select_policy.format(fields='filename', now=now)
|
| 2130 |
+
|
| 2131 |
+
try:
|
| 2132 |
+
while self.volume() > self.size_limit:
|
| 2133 |
+
with self._transact(retry) as (sql, cleanup):
|
| 2134 |
+
rows = sql(select_filename, (10,)).fetchall()
|
| 2135 |
+
|
| 2136 |
+
if not rows:
|
| 2137 |
+
break
|
| 2138 |
+
|
| 2139 |
+
count += len(rows)
|
| 2140 |
+
delete = (
|
| 2141 |
+
'DELETE FROM Cache WHERE rowid IN (%s)'
|
| 2142 |
+
% select_policy.format(fields='rowid', now=now)
|
| 2143 |
+
)
|
| 2144 |
+
sql(delete, (10,))
|
| 2145 |
+
|
| 2146 |
+
for (filename,) in rows:
|
| 2147 |
+
cleanup(filename)
|
| 2148 |
+
except Timeout:
|
| 2149 |
+
raise Timeout(count) from None
|
| 2150 |
+
|
| 2151 |
+
return count
|
| 2152 |
+
|
| 2153 |
+
def clear(self, retry=False):
|
| 2154 |
+
"""Remove all items from cache.
|
| 2155 |
+
|
| 2156 |
+
Removing items is an iterative process. In each iteration, a subset of
|
| 2157 |
+
items is removed. Concurrent writes may occur between iterations.
|
| 2158 |
+
|
| 2159 |
+
If a :exc:`Timeout` occurs, the first element of the exception's
|
| 2160 |
+
`args` attribute will be the number of items removed before the
|
| 2161 |
+
exception occurred.
|
| 2162 |
+
|
| 2163 |
+
Raises :exc:`Timeout` error when database timeout occurs and `retry` is
|
| 2164 |
+
`False` (default).
|
| 2165 |
+
|
| 2166 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 2167 |
+
:return: count of rows removed
|
| 2168 |
+
:raises Timeout: if database timeout occurs
|
| 2169 |
+
|
| 2170 |
+
"""
|
| 2171 |
+
select = (
|
| 2172 |
+
'SELECT rowid, filename FROM Cache'
|
| 2173 |
+
' WHERE rowid > ?'
|
| 2174 |
+
' ORDER BY rowid LIMIT ?'
|
| 2175 |
+
)
|
| 2176 |
+
args = [0, 100]
|
| 2177 |
+
return self._select_delete(select, args, retry=retry)
|
| 2178 |
+
|
| 2179 |
+
def _select_delete(
|
| 2180 |
+
self, select, args, row_index=0, arg_index=0, retry=False
|
| 2181 |
+
):
|
| 2182 |
+
count = 0
|
| 2183 |
+
delete = 'DELETE FROM Cache WHERE rowid IN (%s)'
|
| 2184 |
+
|
| 2185 |
+
try:
|
| 2186 |
+
while True:
|
| 2187 |
+
with self._transact(retry) as (sql, cleanup):
|
| 2188 |
+
rows = sql(select, args).fetchall()
|
| 2189 |
+
|
| 2190 |
+
if not rows:
|
| 2191 |
+
break
|
| 2192 |
+
|
| 2193 |
+
count += len(rows)
|
| 2194 |
+
sql(delete % ','.join(str(row[0]) for row in rows))
|
| 2195 |
+
|
| 2196 |
+
for row in rows:
|
| 2197 |
+
args[arg_index] = row[row_index]
|
| 2198 |
+
cleanup(row[-1])
|
| 2199 |
+
|
| 2200 |
+
except Timeout:
|
| 2201 |
+
raise Timeout(count) from None
|
| 2202 |
+
|
| 2203 |
+
return count
|
| 2204 |
+
|
| 2205 |
+
def iterkeys(self, reverse=False):
|
| 2206 |
+
"""Iterate Cache keys in database sort order.
|
| 2207 |
+
|
| 2208 |
+
>>> cache = Cache()
|
| 2209 |
+
>>> for key in [4, 1, 3, 0, 2]:
|
| 2210 |
+
... cache[key] = key
|
| 2211 |
+
>>> list(cache.iterkeys())
|
| 2212 |
+
[0, 1, 2, 3, 4]
|
| 2213 |
+
>>> list(cache.iterkeys(reverse=True))
|
| 2214 |
+
[4, 3, 2, 1, 0]
|
| 2215 |
+
|
| 2216 |
+
:param bool reverse: reverse sort order (default False)
|
| 2217 |
+
:return: iterator of Cache keys
|
| 2218 |
+
|
| 2219 |
+
"""
|
| 2220 |
+
sql = self._sql
|
| 2221 |
+
limit = 100
|
| 2222 |
+
_disk_get = self._disk.get
|
| 2223 |
+
|
| 2224 |
+
if reverse:
|
| 2225 |
+
select = (
|
| 2226 |
+
'SELECT key, raw FROM Cache'
|
| 2227 |
+
' ORDER BY key DESC, raw DESC LIMIT 1'
|
| 2228 |
+
)
|
| 2229 |
+
iterate = (
|
| 2230 |
+
'SELECT key, raw FROM Cache'
|
| 2231 |
+
' WHERE key = ? AND raw < ? OR key < ?'
|
| 2232 |
+
' ORDER BY key DESC, raw DESC LIMIT ?'
|
| 2233 |
+
)
|
| 2234 |
+
else:
|
| 2235 |
+
select = (
|
| 2236 |
+
'SELECT key, raw FROM Cache'
|
| 2237 |
+
' ORDER BY key ASC, raw ASC LIMIT 1'
|
| 2238 |
+
)
|
| 2239 |
+
iterate = (
|
| 2240 |
+
'SELECT key, raw FROM Cache'
|
| 2241 |
+
' WHERE key = ? AND raw > ? OR key > ?'
|
| 2242 |
+
' ORDER BY key ASC, raw ASC LIMIT ?'
|
| 2243 |
+
)
|
| 2244 |
+
|
| 2245 |
+
row = sql(select).fetchall()
|
| 2246 |
+
|
| 2247 |
+
if row:
|
| 2248 |
+
((key, raw),) = row
|
| 2249 |
+
else:
|
| 2250 |
+
return
|
| 2251 |
+
|
| 2252 |
+
yield _disk_get(key, raw)
|
| 2253 |
+
|
| 2254 |
+
while True:
|
| 2255 |
+
rows = sql(iterate, (key, raw, key, limit)).fetchall()
|
| 2256 |
+
|
| 2257 |
+
if not rows:
|
| 2258 |
+
break
|
| 2259 |
+
|
| 2260 |
+
for key, raw in rows:
|
| 2261 |
+
yield _disk_get(key, raw)
|
| 2262 |
+
|
| 2263 |
+
def _iter(self, ascending=True):
|
| 2264 |
+
sql = self._sql
|
| 2265 |
+
rows = sql('SELECT MAX(rowid) FROM Cache').fetchall()
|
| 2266 |
+
((max_rowid,),) = rows
|
| 2267 |
+
yield # Signal ready.
|
| 2268 |
+
|
| 2269 |
+
if max_rowid is None:
|
| 2270 |
+
return
|
| 2271 |
+
|
| 2272 |
+
bound = max_rowid + 1
|
| 2273 |
+
limit = 100
|
| 2274 |
+
_disk_get = self._disk.get
|
| 2275 |
+
rowid = 0 if ascending else bound
|
| 2276 |
+
select = (
|
| 2277 |
+
'SELECT rowid, key, raw FROM Cache'
|
| 2278 |
+
' WHERE ? < rowid AND rowid < ?'
|
| 2279 |
+
' ORDER BY rowid %s LIMIT ?'
|
| 2280 |
+
) % ('ASC' if ascending else 'DESC')
|
| 2281 |
+
|
| 2282 |
+
while True:
|
| 2283 |
+
if ascending:
|
| 2284 |
+
args = (rowid, bound, limit)
|
| 2285 |
+
else:
|
| 2286 |
+
args = (0, rowid, limit)
|
| 2287 |
+
|
| 2288 |
+
rows = sql(select, args).fetchall()
|
| 2289 |
+
|
| 2290 |
+
if not rows:
|
| 2291 |
+
break
|
| 2292 |
+
|
| 2293 |
+
for rowid, key, raw in rows:
|
| 2294 |
+
yield _disk_get(key, raw)
|
| 2295 |
+
|
| 2296 |
+
def __iter__(self):
|
| 2297 |
+
"""Iterate keys in cache including expired items."""
|
| 2298 |
+
iterator = self._iter()
|
| 2299 |
+
next(iterator)
|
| 2300 |
+
return iterator
|
| 2301 |
+
|
| 2302 |
+
def __reversed__(self):
|
| 2303 |
+
"""Reverse iterate keys in cache including expired items."""
|
| 2304 |
+
iterator = self._iter(ascending=False)
|
| 2305 |
+
next(iterator)
|
| 2306 |
+
return iterator
|
| 2307 |
+
|
| 2308 |
+
def stats(self, enable=True, reset=False):
|
| 2309 |
+
"""Return cache statistics hits and misses.
|
| 2310 |
+
|
| 2311 |
+
:param bool enable: enable collecting statistics (default True)
|
| 2312 |
+
:param bool reset: reset hits and misses to 0 (default False)
|
| 2313 |
+
:return: (hits, misses)
|
| 2314 |
+
|
| 2315 |
+
"""
|
| 2316 |
+
# pylint: disable=E0203,W0201
|
| 2317 |
+
result = (self.reset('hits'), self.reset('misses'))
|
| 2318 |
+
|
| 2319 |
+
if reset:
|
| 2320 |
+
self.reset('hits', 0)
|
| 2321 |
+
self.reset('misses', 0)
|
| 2322 |
+
|
| 2323 |
+
self.reset('statistics', enable)
|
| 2324 |
+
|
| 2325 |
+
return result
|
| 2326 |
+
|
| 2327 |
+
def volume(self):
|
| 2328 |
+
"""Return estimated total size of cache on disk.
|
| 2329 |
+
|
| 2330 |
+
:return: size in bytes
|
| 2331 |
+
|
| 2332 |
+
"""
|
| 2333 |
+
((page_count,),) = self._sql('PRAGMA page_count').fetchall()
|
| 2334 |
+
total_size = self._page_size * page_count + self.reset('size')
|
| 2335 |
+
return total_size
|
| 2336 |
+
|
| 2337 |
+
def close(self):
|
| 2338 |
+
"""Close database connection."""
|
| 2339 |
+
con = getattr(self._local, 'con', None)
|
| 2340 |
+
|
| 2341 |
+
if con is None:
|
| 2342 |
+
return
|
| 2343 |
+
|
| 2344 |
+
con.close()
|
| 2345 |
+
|
| 2346 |
+
try:
|
| 2347 |
+
delattr(self._local, 'con')
|
| 2348 |
+
except AttributeError:
|
| 2349 |
+
pass
|
| 2350 |
+
|
| 2351 |
+
def __enter__(self):
|
| 2352 |
+
# Create connection in thread.
|
| 2353 |
+
# pylint: disable=unused-variable
|
| 2354 |
+
connection = self._con # noqa
|
| 2355 |
+
return self
|
| 2356 |
+
|
| 2357 |
+
def __exit__(self, *exception):
|
| 2358 |
+
self.close()
|
| 2359 |
+
|
| 2360 |
+
def __len__(self):
|
| 2361 |
+
"""Count of items in cache including expired items."""
|
| 2362 |
+
return self.reset('count')
|
| 2363 |
+
|
| 2364 |
+
def __getstate__(self):
|
| 2365 |
+
return (self.directory, self.timeout, type(self.disk))
|
| 2366 |
+
|
| 2367 |
+
def __setstate__(self, state):
|
| 2368 |
+
self.__init__(*state)
|
| 2369 |
+
|
| 2370 |
+
def reset(self, key, value=ENOVAL, update=True):
|
| 2371 |
+
"""Reset `key` and `value` item from Settings table.
|
| 2372 |
+
|
| 2373 |
+
Use `reset` to update the value of Cache settings correctly. Cache
|
| 2374 |
+
settings are stored in the Settings table of the SQLite database. If
|
| 2375 |
+
`update` is ``False`` then no attempt is made to update the database.
|
| 2376 |
+
|
| 2377 |
+
If `value` is not given, it is reloaded from the Settings
|
| 2378 |
+
table. Otherwise, the Settings table is updated.
|
| 2379 |
+
|
| 2380 |
+
Settings with the ``disk_`` prefix correspond to Disk
|
| 2381 |
+
attributes. Updating the value will change the unprefixed attribute on
|
| 2382 |
+
the associated Disk instance.
|
| 2383 |
+
|
| 2384 |
+
Settings with the ``sqlite_`` prefix correspond to SQLite
|
| 2385 |
+
pragmas. Updating the value will execute the corresponding PRAGMA
|
| 2386 |
+
statement.
|
| 2387 |
+
|
| 2388 |
+
SQLite PRAGMA statements may be executed before the Settings table
|
| 2389 |
+
exists in the database by setting `update` to ``False``.
|
| 2390 |
+
|
| 2391 |
+
:param str key: Settings key for item
|
| 2392 |
+
:param value: value for item (optional)
|
| 2393 |
+
:param bool update: update database Settings table (default True)
|
| 2394 |
+
:return: updated value for item
|
| 2395 |
+
:raises Timeout: if database timeout occurs
|
| 2396 |
+
|
| 2397 |
+
"""
|
| 2398 |
+
sql = self._sql
|
| 2399 |
+
sql_retry = self._sql_retry
|
| 2400 |
+
|
| 2401 |
+
if value is ENOVAL:
|
| 2402 |
+
select = 'SELECT value FROM Settings WHERE key = ?'
|
| 2403 |
+
((value,),) = sql_retry(select, (key,)).fetchall()
|
| 2404 |
+
setattr(self, key, value)
|
| 2405 |
+
return value
|
| 2406 |
+
|
| 2407 |
+
if update:
|
| 2408 |
+
statement = 'UPDATE Settings SET value = ? WHERE key = ?'
|
| 2409 |
+
sql_retry(statement, (value, key))
|
| 2410 |
+
|
| 2411 |
+
if key.startswith('sqlite_'):
|
| 2412 |
+
pragma = key[7:]
|
| 2413 |
+
|
| 2414 |
+
# 2016-02-17 GrantJ - PRAGMA and isolation_level=None
|
| 2415 |
+
# don't always play nicely together. Retry setting the
|
| 2416 |
+
# PRAGMA. I think some PRAGMA statements expect to
|
| 2417 |
+
# immediately take an EXCLUSIVE lock on the database. I
|
| 2418 |
+
# can't find any documentation for this but without the
|
| 2419 |
+
# retry, stress will intermittently fail with multiple
|
| 2420 |
+
# processes.
|
| 2421 |
+
|
| 2422 |
+
# 2018-11-05 GrantJ - Avoid setting pragma values that
|
| 2423 |
+
# are already set. Pragma settings like auto_vacuum and
|
| 2424 |
+
# journal_mode can take a long time or may not work after
|
| 2425 |
+
# tables have been created.
|
| 2426 |
+
|
| 2427 |
+
start = time.time()
|
| 2428 |
+
while True:
|
| 2429 |
+
try:
|
| 2430 |
+
try:
|
| 2431 |
+
((old_value,),) = sql(
|
| 2432 |
+
'PRAGMA %s' % (pragma)
|
| 2433 |
+
).fetchall()
|
| 2434 |
+
update = old_value != value
|
| 2435 |
+
except ValueError:
|
| 2436 |
+
update = True
|
| 2437 |
+
if update:
|
| 2438 |
+
sql('PRAGMA %s = %s' % (pragma, value)).fetchall()
|
| 2439 |
+
break
|
| 2440 |
+
except sqlite3.OperationalError as exc:
|
| 2441 |
+
if str(exc) != 'database is locked':
|
| 2442 |
+
raise
|
| 2443 |
+
diff = time.time() - start
|
| 2444 |
+
if diff > 60:
|
| 2445 |
+
raise
|
| 2446 |
+
time.sleep(0.001)
|
| 2447 |
+
elif key.startswith('disk_'):
|
| 2448 |
+
attr = key[5:]
|
| 2449 |
+
setattr(self._disk, attr, value)
|
| 2450 |
+
|
| 2451 |
+
setattr(self, key, value)
|
| 2452 |
+
return value
|
.venv/lib/python3.11/site-packages/diskcache/djangocache.py
ADDED
|
@@ -0,0 +1,456 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Django-compatible disk and file backed cache."""
|
| 2 |
+
|
| 3 |
+
from functools import wraps
|
| 4 |
+
|
| 5 |
+
from django.core.cache.backends.base import BaseCache
|
| 6 |
+
|
| 7 |
+
try:
|
| 8 |
+
from django.core.cache.backends.base import DEFAULT_TIMEOUT
|
| 9 |
+
except ImportError: # pragma: no cover
|
| 10 |
+
# For older versions of Django simply use 300 seconds.
|
| 11 |
+
DEFAULT_TIMEOUT = 300
|
| 12 |
+
|
| 13 |
+
from .core import ENOVAL, args_to_key, full_name
|
| 14 |
+
from .fanout import FanoutCache
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class DjangoCache(BaseCache):
|
| 18 |
+
"""Django-compatible disk and file backed cache."""
|
| 19 |
+
|
| 20 |
+
def __init__(self, directory, params):
|
| 21 |
+
"""Initialize DjangoCache instance.
|
| 22 |
+
|
| 23 |
+
:param str directory: cache directory
|
| 24 |
+
:param dict params: cache parameters
|
| 25 |
+
|
| 26 |
+
"""
|
| 27 |
+
super().__init__(params)
|
| 28 |
+
shards = params.get('SHARDS', 8)
|
| 29 |
+
timeout = params.get('DATABASE_TIMEOUT', 0.010)
|
| 30 |
+
options = params.get('OPTIONS', {})
|
| 31 |
+
self._cache = FanoutCache(directory, shards, timeout, **options)
|
| 32 |
+
|
| 33 |
+
@property
|
| 34 |
+
def directory(self):
|
| 35 |
+
"""Cache directory."""
|
| 36 |
+
return self._cache.directory
|
| 37 |
+
|
| 38 |
+
def cache(self, name):
|
| 39 |
+
"""Return Cache with given `name` in subdirectory.
|
| 40 |
+
|
| 41 |
+
:param str name: subdirectory name for Cache
|
| 42 |
+
:return: Cache with given name
|
| 43 |
+
|
| 44 |
+
"""
|
| 45 |
+
return self._cache.cache(name)
|
| 46 |
+
|
| 47 |
+
def deque(self, name, maxlen=None):
|
| 48 |
+
"""Return Deque with given `name` in subdirectory.
|
| 49 |
+
|
| 50 |
+
:param str name: subdirectory name for Deque
|
| 51 |
+
:param maxlen: max length (default None, no max)
|
| 52 |
+
:return: Deque with given name
|
| 53 |
+
|
| 54 |
+
"""
|
| 55 |
+
return self._cache.deque(name, maxlen=maxlen)
|
| 56 |
+
|
| 57 |
+
def index(self, name):
|
| 58 |
+
"""Return Index with given `name` in subdirectory.
|
| 59 |
+
|
| 60 |
+
:param str name: subdirectory name for Index
|
| 61 |
+
:return: Index with given name
|
| 62 |
+
|
| 63 |
+
"""
|
| 64 |
+
return self._cache.index(name)
|
| 65 |
+
|
| 66 |
+
def add(
|
| 67 |
+
self,
|
| 68 |
+
key,
|
| 69 |
+
value,
|
| 70 |
+
timeout=DEFAULT_TIMEOUT,
|
| 71 |
+
version=None,
|
| 72 |
+
read=False,
|
| 73 |
+
tag=None,
|
| 74 |
+
retry=True,
|
| 75 |
+
):
|
| 76 |
+
"""Set a value in the cache if the key does not already exist. If
|
| 77 |
+
timeout is given, that timeout will be used for the key; otherwise the
|
| 78 |
+
default cache timeout will be used.
|
| 79 |
+
|
| 80 |
+
Return True if the value was stored, False otherwise.
|
| 81 |
+
|
| 82 |
+
:param key: key for item
|
| 83 |
+
:param value: value for item
|
| 84 |
+
:param float timeout: seconds until the item expires
|
| 85 |
+
(default 300 seconds)
|
| 86 |
+
:param int version: key version number (default None, cache parameter)
|
| 87 |
+
:param bool read: read value as bytes from file (default False)
|
| 88 |
+
:param str tag: text to associate with key (default None)
|
| 89 |
+
:param bool retry: retry if database timeout occurs (default True)
|
| 90 |
+
:return: True if item was added
|
| 91 |
+
|
| 92 |
+
"""
|
| 93 |
+
# pylint: disable=arguments-differ
|
| 94 |
+
key = self.make_key(key, version=version)
|
| 95 |
+
timeout = self.get_backend_timeout(timeout=timeout)
|
| 96 |
+
return self._cache.add(key, value, timeout, read, tag, retry)
|
| 97 |
+
|
| 98 |
+
def get(
|
| 99 |
+
self,
|
| 100 |
+
key,
|
| 101 |
+
default=None,
|
| 102 |
+
version=None,
|
| 103 |
+
read=False,
|
| 104 |
+
expire_time=False,
|
| 105 |
+
tag=False,
|
| 106 |
+
retry=False,
|
| 107 |
+
):
|
| 108 |
+
"""Fetch a given key from the cache. If the key does not exist, return
|
| 109 |
+
default, which itself defaults to None.
|
| 110 |
+
|
| 111 |
+
:param key: key for item
|
| 112 |
+
:param default: return value if key is missing (default None)
|
| 113 |
+
:param int version: key version number (default None, cache parameter)
|
| 114 |
+
:param bool read: if True, return file handle to value
|
| 115 |
+
(default False)
|
| 116 |
+
:param float expire_time: if True, return expire_time in tuple
|
| 117 |
+
(default False)
|
| 118 |
+
:param tag: if True, return tag in tuple (default False)
|
| 119 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 120 |
+
:return: value for item if key is found else default
|
| 121 |
+
|
| 122 |
+
"""
|
| 123 |
+
# pylint: disable=arguments-differ
|
| 124 |
+
key = self.make_key(key, version=version)
|
| 125 |
+
return self._cache.get(key, default, read, expire_time, tag, retry)
|
| 126 |
+
|
| 127 |
+
def read(self, key, version=None):
|
| 128 |
+
"""Return file handle corresponding to `key` from Cache.
|
| 129 |
+
|
| 130 |
+
:param key: Python key to retrieve
|
| 131 |
+
:param int version: key version number (default None, cache parameter)
|
| 132 |
+
:return: file open for reading in binary mode
|
| 133 |
+
:raises KeyError: if key is not found
|
| 134 |
+
|
| 135 |
+
"""
|
| 136 |
+
key = self.make_key(key, version=version)
|
| 137 |
+
return self._cache.read(key)
|
| 138 |
+
|
| 139 |
+
def set(
|
| 140 |
+
self,
|
| 141 |
+
key,
|
| 142 |
+
value,
|
| 143 |
+
timeout=DEFAULT_TIMEOUT,
|
| 144 |
+
version=None,
|
| 145 |
+
read=False,
|
| 146 |
+
tag=None,
|
| 147 |
+
retry=True,
|
| 148 |
+
):
|
| 149 |
+
"""Set a value in the cache. If timeout is given, that timeout will be
|
| 150 |
+
used for the key; otherwise the default cache timeout will be used.
|
| 151 |
+
|
| 152 |
+
:param key: key for item
|
| 153 |
+
:param value: value for item
|
| 154 |
+
:param float timeout: seconds until the item expires
|
| 155 |
+
(default 300 seconds)
|
| 156 |
+
:param int version: key version number (default None, cache parameter)
|
| 157 |
+
:param bool read: read value as bytes from file (default False)
|
| 158 |
+
:param str tag: text to associate with key (default None)
|
| 159 |
+
:param bool retry: retry if database timeout occurs (default True)
|
| 160 |
+
:return: True if item was set
|
| 161 |
+
|
| 162 |
+
"""
|
| 163 |
+
# pylint: disable=arguments-differ
|
| 164 |
+
key = self.make_key(key, version=version)
|
| 165 |
+
timeout = self.get_backend_timeout(timeout=timeout)
|
| 166 |
+
return self._cache.set(key, value, timeout, read, tag, retry)
|
| 167 |
+
|
| 168 |
+
def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None, retry=True):
|
| 169 |
+
"""Touch a key in the cache. If timeout is given, that timeout will be
|
| 170 |
+
used for the key; otherwise the default cache timeout will be used.
|
| 171 |
+
|
| 172 |
+
:param key: key for item
|
| 173 |
+
:param float timeout: seconds until the item expires
|
| 174 |
+
(default 300 seconds)
|
| 175 |
+
:param int version: key version number (default None, cache parameter)
|
| 176 |
+
:param bool retry: retry if database timeout occurs (default True)
|
| 177 |
+
:return: True if key was touched
|
| 178 |
+
|
| 179 |
+
"""
|
| 180 |
+
# pylint: disable=arguments-differ
|
| 181 |
+
key = self.make_key(key, version=version)
|
| 182 |
+
timeout = self.get_backend_timeout(timeout=timeout)
|
| 183 |
+
return self._cache.touch(key, timeout, retry)
|
| 184 |
+
|
| 185 |
+
def pop(
|
| 186 |
+
self,
|
| 187 |
+
key,
|
| 188 |
+
default=None,
|
| 189 |
+
version=None,
|
| 190 |
+
expire_time=False,
|
| 191 |
+
tag=False,
|
| 192 |
+
retry=True,
|
| 193 |
+
):
|
| 194 |
+
"""Remove corresponding item for `key` from cache and return value.
|
| 195 |
+
|
| 196 |
+
If `key` is missing, return `default`.
|
| 197 |
+
|
| 198 |
+
Operation is atomic. Concurrent operations will be serialized.
|
| 199 |
+
|
| 200 |
+
:param key: key for item
|
| 201 |
+
:param default: return value if key is missing (default None)
|
| 202 |
+
:param int version: key version number (default None, cache parameter)
|
| 203 |
+
:param float expire_time: if True, return expire_time in tuple
|
| 204 |
+
(default False)
|
| 205 |
+
:param tag: if True, return tag in tuple (default False)
|
| 206 |
+
:param bool retry: retry if database timeout occurs (default True)
|
| 207 |
+
:return: value for item if key is found else default
|
| 208 |
+
|
| 209 |
+
"""
|
| 210 |
+
key = self.make_key(key, version=version)
|
| 211 |
+
return self._cache.pop(key, default, expire_time, tag, retry)
|
| 212 |
+
|
| 213 |
+
def delete(self, key, version=None, retry=True):
|
| 214 |
+
"""Delete a key from the cache, failing silently.
|
| 215 |
+
|
| 216 |
+
:param key: key for item
|
| 217 |
+
:param int version: key version number (default None, cache parameter)
|
| 218 |
+
:param bool retry: retry if database timeout occurs (default True)
|
| 219 |
+
:return: True if item was deleted
|
| 220 |
+
|
| 221 |
+
"""
|
| 222 |
+
# pylint: disable=arguments-differ
|
| 223 |
+
key = self.make_key(key, version=version)
|
| 224 |
+
return self._cache.delete(key, retry)
|
| 225 |
+
|
| 226 |
+
def incr(self, key, delta=1, version=None, default=None, retry=True):
|
| 227 |
+
"""Increment value by delta for item with key.
|
| 228 |
+
|
| 229 |
+
If key is missing and default is None then raise KeyError. Else if key
|
| 230 |
+
is missing and default is not None then use default for value.
|
| 231 |
+
|
| 232 |
+
Operation is atomic. All concurrent increment operations will be
|
| 233 |
+
counted individually.
|
| 234 |
+
|
| 235 |
+
Assumes value may be stored in a SQLite column. Most builds that target
|
| 236 |
+
machines with 64-bit pointer widths will support 64-bit signed
|
| 237 |
+
integers.
|
| 238 |
+
|
| 239 |
+
:param key: key for item
|
| 240 |
+
:param int delta: amount to increment (default 1)
|
| 241 |
+
:param int version: key version number (default None, cache parameter)
|
| 242 |
+
:param int default: value if key is missing (default None)
|
| 243 |
+
:param bool retry: retry if database timeout occurs (default True)
|
| 244 |
+
:return: new value for item on success else None
|
| 245 |
+
:raises ValueError: if key is not found and default is None
|
| 246 |
+
|
| 247 |
+
"""
|
| 248 |
+
# pylint: disable=arguments-differ
|
| 249 |
+
key = self.make_key(key, version=version)
|
| 250 |
+
try:
|
| 251 |
+
return self._cache.incr(key, delta, default, retry)
|
| 252 |
+
except KeyError:
|
| 253 |
+
raise ValueError("Key '%s' not found" % key) from None
|
| 254 |
+
|
| 255 |
+
def decr(self, key, delta=1, version=None, default=None, retry=True):
|
| 256 |
+
"""Decrement value by delta for item with key.
|
| 257 |
+
|
| 258 |
+
If key is missing and default is None then raise KeyError. Else if key
|
| 259 |
+
is missing and default is not None then use default for value.
|
| 260 |
+
|
| 261 |
+
Operation is atomic. All concurrent decrement operations will be
|
| 262 |
+
counted individually.
|
| 263 |
+
|
| 264 |
+
Unlike Memcached, negative values are supported. Value may be
|
| 265 |
+
decremented below zero.
|
| 266 |
+
|
| 267 |
+
Assumes value may be stored in a SQLite column. Most builds that target
|
| 268 |
+
machines with 64-bit pointer widths will support 64-bit signed
|
| 269 |
+
integers.
|
| 270 |
+
|
| 271 |
+
:param key: key for item
|
| 272 |
+
:param int delta: amount to decrement (default 1)
|
| 273 |
+
:param int version: key version number (default None, cache parameter)
|
| 274 |
+
:param int default: value if key is missing (default None)
|
| 275 |
+
:param bool retry: retry if database timeout occurs (default True)
|
| 276 |
+
:return: new value for item on success else None
|
| 277 |
+
:raises ValueError: if key is not found and default is None
|
| 278 |
+
|
| 279 |
+
"""
|
| 280 |
+
# pylint: disable=arguments-differ
|
| 281 |
+
return self.incr(key, -delta, version, default, retry)
|
| 282 |
+
|
| 283 |
+
def has_key(self, key, version=None):
|
| 284 |
+
"""Returns True if the key is in the cache and has not expired.
|
| 285 |
+
|
| 286 |
+
:param key: key for item
|
| 287 |
+
:param int version: key version number (default None, cache parameter)
|
| 288 |
+
:return: True if key is found
|
| 289 |
+
|
| 290 |
+
"""
|
| 291 |
+
key = self.make_key(key, version=version)
|
| 292 |
+
return key in self._cache
|
| 293 |
+
|
| 294 |
+
def expire(self):
|
| 295 |
+
"""Remove expired items from cache.
|
| 296 |
+
|
| 297 |
+
:return: count of items removed
|
| 298 |
+
|
| 299 |
+
"""
|
| 300 |
+
return self._cache.expire()
|
| 301 |
+
|
| 302 |
+
def stats(self, enable=True, reset=False):
|
| 303 |
+
"""Return cache statistics hits and misses.
|
| 304 |
+
|
| 305 |
+
:param bool enable: enable collecting statistics (default True)
|
| 306 |
+
:param bool reset: reset hits and misses to 0 (default False)
|
| 307 |
+
:return: (hits, misses)
|
| 308 |
+
|
| 309 |
+
"""
|
| 310 |
+
return self._cache.stats(enable=enable, reset=reset)
|
| 311 |
+
|
| 312 |
+
def create_tag_index(self):
|
| 313 |
+
"""Create tag index on cache database.
|
| 314 |
+
|
| 315 |
+
Better to initialize cache with `tag_index=True` than use this.
|
| 316 |
+
|
| 317 |
+
:raises Timeout: if database timeout occurs
|
| 318 |
+
|
| 319 |
+
"""
|
| 320 |
+
self._cache.create_tag_index()
|
| 321 |
+
|
| 322 |
+
def drop_tag_index(self):
|
| 323 |
+
"""Drop tag index on cache database.
|
| 324 |
+
|
| 325 |
+
:raises Timeout: if database timeout occurs
|
| 326 |
+
|
| 327 |
+
"""
|
| 328 |
+
self._cache.drop_tag_index()
|
| 329 |
+
|
| 330 |
+
def evict(self, tag):
|
| 331 |
+
"""Remove items with matching `tag` from cache.
|
| 332 |
+
|
| 333 |
+
:param str tag: tag identifying items
|
| 334 |
+
:return: count of items removed
|
| 335 |
+
|
| 336 |
+
"""
|
| 337 |
+
return self._cache.evict(tag)
|
| 338 |
+
|
| 339 |
+
def cull(self):
|
| 340 |
+
"""Cull items from cache until volume is less than size limit.
|
| 341 |
+
|
| 342 |
+
:return: count of items removed
|
| 343 |
+
|
| 344 |
+
"""
|
| 345 |
+
return self._cache.cull()
|
| 346 |
+
|
| 347 |
+
def clear(self):
|
| 348 |
+
"""Remove *all* values from the cache at once."""
|
| 349 |
+
return self._cache.clear()
|
| 350 |
+
|
| 351 |
+
def close(self, **kwargs):
|
| 352 |
+
"""Close the cache connection."""
|
| 353 |
+
# pylint: disable=unused-argument
|
| 354 |
+
self._cache.close()
|
| 355 |
+
|
| 356 |
+
def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT):
|
| 357 |
+
"""Return seconds to expiration.
|
| 358 |
+
|
| 359 |
+
:param float timeout: seconds until the item expires
|
| 360 |
+
(default 300 seconds)
|
| 361 |
+
|
| 362 |
+
"""
|
| 363 |
+
if timeout == DEFAULT_TIMEOUT:
|
| 364 |
+
timeout = self.default_timeout
|
| 365 |
+
elif timeout == 0:
|
| 366 |
+
# ticket 21147 - avoid time.time() related precision issues
|
| 367 |
+
timeout = -1
|
| 368 |
+
return None if timeout is None else timeout
|
| 369 |
+
|
| 370 |
+
def memoize(
|
| 371 |
+
self,
|
| 372 |
+
name=None,
|
| 373 |
+
timeout=DEFAULT_TIMEOUT,
|
| 374 |
+
version=None,
|
| 375 |
+
typed=False,
|
| 376 |
+
tag=None,
|
| 377 |
+
ignore=(),
|
| 378 |
+
):
|
| 379 |
+
"""Memoizing cache decorator.
|
| 380 |
+
|
| 381 |
+
Decorator to wrap callable with memoizing function using cache.
|
| 382 |
+
Repeated calls with the same arguments will lookup result in cache and
|
| 383 |
+
avoid function evaluation.
|
| 384 |
+
|
| 385 |
+
If name is set to None (default), the callable name will be determined
|
| 386 |
+
automatically.
|
| 387 |
+
|
| 388 |
+
When timeout is set to zero, function results will not be set in the
|
| 389 |
+
cache. Cache lookups still occur, however. Read
|
| 390 |
+
:doc:`case-study-landing-page-caching` for example usage.
|
| 391 |
+
|
| 392 |
+
If typed is set to True, function arguments of different types will be
|
| 393 |
+
cached separately. For example, f(3) and f(3.0) will be treated as
|
| 394 |
+
distinct calls with distinct results.
|
| 395 |
+
|
| 396 |
+
The original underlying function is accessible through the __wrapped__
|
| 397 |
+
attribute. This is useful for introspection, for bypassing the cache,
|
| 398 |
+
or for rewrapping the function with a different cache.
|
| 399 |
+
|
| 400 |
+
An additional `__cache_key__` attribute can be used to generate the
|
| 401 |
+
cache key used for the given arguments.
|
| 402 |
+
|
| 403 |
+
Remember to call memoize when decorating a callable. If you forget,
|
| 404 |
+
then a TypeError will occur.
|
| 405 |
+
|
| 406 |
+
:param str name: name given for callable (default None, automatic)
|
| 407 |
+
:param float timeout: seconds until the item expires
|
| 408 |
+
(default 300 seconds)
|
| 409 |
+
:param int version: key version number (default None, cache parameter)
|
| 410 |
+
:param bool typed: cache different types separately (default False)
|
| 411 |
+
:param str tag: text to associate with arguments (default None)
|
| 412 |
+
:param set ignore: positional or keyword args to ignore (default ())
|
| 413 |
+
:return: callable decorator
|
| 414 |
+
|
| 415 |
+
"""
|
| 416 |
+
# Caution: Nearly identical code exists in Cache.memoize
|
| 417 |
+
if callable(name):
|
| 418 |
+
raise TypeError('name cannot be callable')
|
| 419 |
+
|
| 420 |
+
def decorator(func):
|
| 421 |
+
"""Decorator created by memoize() for callable `func`."""
|
| 422 |
+
base = (full_name(func),) if name is None else (name,)
|
| 423 |
+
|
| 424 |
+
@wraps(func)
|
| 425 |
+
def wrapper(*args, **kwargs):
|
| 426 |
+
"""Wrapper for callable to cache arguments and return values."""
|
| 427 |
+
key = wrapper.__cache_key__(*args, **kwargs)
|
| 428 |
+
result = self.get(key, ENOVAL, version, retry=True)
|
| 429 |
+
|
| 430 |
+
if result is ENOVAL:
|
| 431 |
+
result = func(*args, **kwargs)
|
| 432 |
+
valid_timeout = (
|
| 433 |
+
timeout is None
|
| 434 |
+
or timeout == DEFAULT_TIMEOUT
|
| 435 |
+
or timeout > 0
|
| 436 |
+
)
|
| 437 |
+
if valid_timeout:
|
| 438 |
+
self.set(
|
| 439 |
+
key,
|
| 440 |
+
result,
|
| 441 |
+
timeout,
|
| 442 |
+
version,
|
| 443 |
+
tag=tag,
|
| 444 |
+
retry=True,
|
| 445 |
+
)
|
| 446 |
+
|
| 447 |
+
return result
|
| 448 |
+
|
| 449 |
+
def __cache_key__(*args, **kwargs):
|
| 450 |
+
"""Make key for cache given function arguments."""
|
| 451 |
+
return args_to_key(base, args, kwargs, typed, ignore)
|
| 452 |
+
|
| 453 |
+
wrapper.__cache_key__ = __cache_key__
|
| 454 |
+
return wrapper
|
| 455 |
+
|
| 456 |
+
return decorator
|
.venv/lib/python3.11/site-packages/diskcache/fanout.py
ADDED
|
@@ -0,0 +1,687 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Fanout cache automatically shards keys and values."""
|
| 2 |
+
|
| 3 |
+
import contextlib as cl
|
| 4 |
+
import functools
|
| 5 |
+
import itertools as it
|
| 6 |
+
import operator
|
| 7 |
+
import os.path as op
|
| 8 |
+
import sqlite3
|
| 9 |
+
import tempfile
|
| 10 |
+
import time
|
| 11 |
+
|
| 12 |
+
from .core import DEFAULT_SETTINGS, ENOVAL, Cache, Disk, Timeout
|
| 13 |
+
from .persistent import Deque, Index
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class FanoutCache:
|
| 17 |
+
"""Cache that shards keys and values."""
|
| 18 |
+
|
| 19 |
+
def __init__(
|
| 20 |
+
self, directory=None, shards=8, timeout=0.010, disk=Disk, **settings
|
| 21 |
+
):
|
| 22 |
+
"""Initialize cache instance.
|
| 23 |
+
|
| 24 |
+
:param str directory: cache directory
|
| 25 |
+
:param int shards: number of shards to distribute writes
|
| 26 |
+
:param float timeout: SQLite connection timeout
|
| 27 |
+
:param disk: `Disk` instance for serialization
|
| 28 |
+
:param settings: any of `DEFAULT_SETTINGS`
|
| 29 |
+
|
| 30 |
+
"""
|
| 31 |
+
if directory is None:
|
| 32 |
+
directory = tempfile.mkdtemp(prefix='diskcache-')
|
| 33 |
+
directory = str(directory)
|
| 34 |
+
directory = op.expanduser(directory)
|
| 35 |
+
directory = op.expandvars(directory)
|
| 36 |
+
|
| 37 |
+
default_size_limit = DEFAULT_SETTINGS['size_limit']
|
| 38 |
+
size_limit = settings.pop('size_limit', default_size_limit) / shards
|
| 39 |
+
|
| 40 |
+
self._count = shards
|
| 41 |
+
self._directory = directory
|
| 42 |
+
self._disk = disk
|
| 43 |
+
self._shards = tuple(
|
| 44 |
+
Cache(
|
| 45 |
+
directory=op.join(directory, '%03d' % num),
|
| 46 |
+
timeout=timeout,
|
| 47 |
+
disk=disk,
|
| 48 |
+
size_limit=size_limit,
|
| 49 |
+
**settings,
|
| 50 |
+
)
|
| 51 |
+
for num in range(shards)
|
| 52 |
+
)
|
| 53 |
+
self._hash = self._shards[0].disk.hash
|
| 54 |
+
self._caches = {}
|
| 55 |
+
self._deques = {}
|
| 56 |
+
self._indexes = {}
|
| 57 |
+
|
| 58 |
+
@property
|
| 59 |
+
def directory(self):
|
| 60 |
+
"""Cache directory."""
|
| 61 |
+
return self._directory
|
| 62 |
+
|
| 63 |
+
def __getattr__(self, name):
|
| 64 |
+
safe_names = {'timeout', 'disk'}
|
| 65 |
+
valid_name = name in DEFAULT_SETTINGS or name in safe_names
|
| 66 |
+
assert valid_name, 'cannot access {} in cache shard'.format(name)
|
| 67 |
+
return getattr(self._shards[0], name)
|
| 68 |
+
|
| 69 |
+
@cl.contextmanager
|
| 70 |
+
def transact(self, retry=True):
|
| 71 |
+
"""Context manager to perform a transaction by locking the cache.
|
| 72 |
+
|
| 73 |
+
While the cache is locked, no other write operation is permitted.
|
| 74 |
+
Transactions should therefore be as short as possible. Read and write
|
| 75 |
+
operations performed in a transaction are atomic. Read operations may
|
| 76 |
+
occur concurrent to a transaction.
|
| 77 |
+
|
| 78 |
+
Transactions may be nested and may not be shared between threads.
|
| 79 |
+
|
| 80 |
+
Blocks until transactions are held on all cache shards by retrying as
|
| 81 |
+
necessary.
|
| 82 |
+
|
| 83 |
+
>>> cache = FanoutCache()
|
| 84 |
+
>>> with cache.transact(): # Atomically increment two keys.
|
| 85 |
+
... _ = cache.incr('total', 123.4)
|
| 86 |
+
... _ = cache.incr('count', 1)
|
| 87 |
+
>>> with cache.transact(): # Atomically calculate average.
|
| 88 |
+
... average = cache['total'] / cache['count']
|
| 89 |
+
>>> average
|
| 90 |
+
123.4
|
| 91 |
+
|
| 92 |
+
:return: context manager for use in `with` statement
|
| 93 |
+
|
| 94 |
+
"""
|
| 95 |
+
assert retry, 'retry must be True in FanoutCache'
|
| 96 |
+
with cl.ExitStack() as stack:
|
| 97 |
+
for shard in self._shards:
|
| 98 |
+
shard_transaction = shard.transact(retry=True)
|
| 99 |
+
stack.enter_context(shard_transaction)
|
| 100 |
+
yield
|
| 101 |
+
|
| 102 |
+
def set(self, key, value, expire=None, read=False, tag=None, retry=False):
|
| 103 |
+
"""Set `key` and `value` item in cache.
|
| 104 |
+
|
| 105 |
+
When `read` is `True`, `value` should be a file-like object opened
|
| 106 |
+
for reading in binary mode.
|
| 107 |
+
|
| 108 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 109 |
+
`True` (default `False`).
|
| 110 |
+
|
| 111 |
+
:param key: key for item
|
| 112 |
+
:param value: value for item
|
| 113 |
+
:param float expire: seconds until the key expires
|
| 114 |
+
(default None, no expiry)
|
| 115 |
+
:param bool read: read value as raw bytes from file (default False)
|
| 116 |
+
:param str tag: text to associate with key (default None)
|
| 117 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 118 |
+
:return: True if item was set
|
| 119 |
+
|
| 120 |
+
"""
|
| 121 |
+
index = self._hash(key) % self._count
|
| 122 |
+
shard = self._shards[index]
|
| 123 |
+
try:
|
| 124 |
+
return shard.set(key, value, expire, read, tag, retry)
|
| 125 |
+
except Timeout:
|
| 126 |
+
return False
|
| 127 |
+
|
| 128 |
+
def __setitem__(self, key, value):
|
| 129 |
+
"""Set `key` and `value` item in cache.
|
| 130 |
+
|
| 131 |
+
Calls :func:`FanoutCache.set` internally with `retry` set to `True`.
|
| 132 |
+
|
| 133 |
+
:param key: key for item
|
| 134 |
+
:param value: value for item
|
| 135 |
+
|
| 136 |
+
"""
|
| 137 |
+
index = self._hash(key) % self._count
|
| 138 |
+
shard = self._shards[index]
|
| 139 |
+
shard[key] = value
|
| 140 |
+
|
| 141 |
+
def touch(self, key, expire=None, retry=False):
|
| 142 |
+
"""Touch `key` in cache and update `expire` time.
|
| 143 |
+
|
| 144 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 145 |
+
`True` (default `False`).
|
| 146 |
+
|
| 147 |
+
:param key: key for item
|
| 148 |
+
:param float expire: seconds until the key expires
|
| 149 |
+
(default None, no expiry)
|
| 150 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 151 |
+
:return: True if key was touched
|
| 152 |
+
|
| 153 |
+
"""
|
| 154 |
+
index = self._hash(key) % self._count
|
| 155 |
+
shard = self._shards[index]
|
| 156 |
+
try:
|
| 157 |
+
return shard.touch(key, expire, retry)
|
| 158 |
+
except Timeout:
|
| 159 |
+
return False
|
| 160 |
+
|
| 161 |
+
def add(self, key, value, expire=None, read=False, tag=None, retry=False):
|
| 162 |
+
"""Add `key` and `value` item to cache.
|
| 163 |
+
|
| 164 |
+
Similar to `set`, but only add to cache if key not present.
|
| 165 |
+
|
| 166 |
+
This operation is atomic. Only one concurrent add operation for given
|
| 167 |
+
key from separate threads or processes will succeed.
|
| 168 |
+
|
| 169 |
+
When `read` is `True`, `value` should be a file-like object opened
|
| 170 |
+
for reading in binary mode.
|
| 171 |
+
|
| 172 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 173 |
+
`True` (default `False`).
|
| 174 |
+
|
| 175 |
+
:param key: key for item
|
| 176 |
+
:param value: value for item
|
| 177 |
+
:param float expire: seconds until the key expires
|
| 178 |
+
(default None, no expiry)
|
| 179 |
+
:param bool read: read value as bytes from file (default False)
|
| 180 |
+
:param str tag: text to associate with key (default None)
|
| 181 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 182 |
+
:return: True if item was added
|
| 183 |
+
|
| 184 |
+
"""
|
| 185 |
+
index = self._hash(key) % self._count
|
| 186 |
+
shard = self._shards[index]
|
| 187 |
+
try:
|
| 188 |
+
return shard.add(key, value, expire, read, tag, retry)
|
| 189 |
+
except Timeout:
|
| 190 |
+
return False
|
| 191 |
+
|
| 192 |
+
def incr(self, key, delta=1, default=0, retry=False):
|
| 193 |
+
"""Increment value by delta for item with key.
|
| 194 |
+
|
| 195 |
+
If key is missing and default is None then raise KeyError. Else if key
|
| 196 |
+
is missing and default is not None then use default for value.
|
| 197 |
+
|
| 198 |
+
Operation is atomic. All concurrent increment operations will be
|
| 199 |
+
counted individually.
|
| 200 |
+
|
| 201 |
+
Assumes value may be stored in a SQLite column. Most builds that target
|
| 202 |
+
machines with 64-bit pointer widths will support 64-bit signed
|
| 203 |
+
integers.
|
| 204 |
+
|
| 205 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 206 |
+
`True` (default `False`).
|
| 207 |
+
|
| 208 |
+
:param key: key for item
|
| 209 |
+
:param int delta: amount to increment (default 1)
|
| 210 |
+
:param int default: value if key is missing (default 0)
|
| 211 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 212 |
+
:return: new value for item on success else None
|
| 213 |
+
:raises KeyError: if key is not found and default is None
|
| 214 |
+
|
| 215 |
+
"""
|
| 216 |
+
index = self._hash(key) % self._count
|
| 217 |
+
shard = self._shards[index]
|
| 218 |
+
try:
|
| 219 |
+
return shard.incr(key, delta, default, retry)
|
| 220 |
+
except Timeout:
|
| 221 |
+
return None
|
| 222 |
+
|
| 223 |
+
def decr(self, key, delta=1, default=0, retry=False):
|
| 224 |
+
"""Decrement value by delta for item with key.
|
| 225 |
+
|
| 226 |
+
If key is missing and default is None then raise KeyError. Else if key
|
| 227 |
+
is missing and default is not None then use default for value.
|
| 228 |
+
|
| 229 |
+
Operation is atomic. All concurrent decrement operations will be
|
| 230 |
+
counted individually.
|
| 231 |
+
|
| 232 |
+
Unlike Memcached, negative values are supported. Value may be
|
| 233 |
+
decremented below zero.
|
| 234 |
+
|
| 235 |
+
Assumes value may be stored in a SQLite column. Most builds that target
|
| 236 |
+
machines with 64-bit pointer widths will support 64-bit signed
|
| 237 |
+
integers.
|
| 238 |
+
|
| 239 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 240 |
+
`True` (default `False`).
|
| 241 |
+
|
| 242 |
+
:param key: key for item
|
| 243 |
+
:param int delta: amount to decrement (default 1)
|
| 244 |
+
:param int default: value if key is missing (default 0)
|
| 245 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 246 |
+
:return: new value for item on success else None
|
| 247 |
+
:raises KeyError: if key is not found and default is None
|
| 248 |
+
|
| 249 |
+
"""
|
| 250 |
+
index = self._hash(key) % self._count
|
| 251 |
+
shard = self._shards[index]
|
| 252 |
+
try:
|
| 253 |
+
return shard.decr(key, delta, default, retry)
|
| 254 |
+
except Timeout:
|
| 255 |
+
return None
|
| 256 |
+
|
| 257 |
+
def get(
|
| 258 |
+
self,
|
| 259 |
+
key,
|
| 260 |
+
default=None,
|
| 261 |
+
read=False,
|
| 262 |
+
expire_time=False,
|
| 263 |
+
tag=False,
|
| 264 |
+
retry=False,
|
| 265 |
+
):
|
| 266 |
+
"""Retrieve value from cache. If `key` is missing, return `default`.
|
| 267 |
+
|
| 268 |
+
If database timeout occurs then returns `default` unless `retry` is set
|
| 269 |
+
to `True` (default `False`).
|
| 270 |
+
|
| 271 |
+
:param key: key for item
|
| 272 |
+
:param default: return value if key is missing (default None)
|
| 273 |
+
:param bool read: if True, return file handle to value
|
| 274 |
+
(default False)
|
| 275 |
+
:param float expire_time: if True, return expire_time in tuple
|
| 276 |
+
(default False)
|
| 277 |
+
:param tag: if True, return tag in tuple (default False)
|
| 278 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 279 |
+
:return: value for item if key is found else default
|
| 280 |
+
|
| 281 |
+
"""
|
| 282 |
+
index = self._hash(key) % self._count
|
| 283 |
+
shard = self._shards[index]
|
| 284 |
+
try:
|
| 285 |
+
return shard.get(key, default, read, expire_time, tag, retry)
|
| 286 |
+
except (Timeout, sqlite3.OperationalError):
|
| 287 |
+
return default
|
| 288 |
+
|
| 289 |
+
def __getitem__(self, key):
|
| 290 |
+
"""Return corresponding value for `key` from cache.
|
| 291 |
+
|
| 292 |
+
Calls :func:`FanoutCache.get` internally with `retry` set to `True`.
|
| 293 |
+
|
| 294 |
+
:param key: key for item
|
| 295 |
+
:return: value for item
|
| 296 |
+
:raises KeyError: if key is not found
|
| 297 |
+
|
| 298 |
+
"""
|
| 299 |
+
index = self._hash(key) % self._count
|
| 300 |
+
shard = self._shards[index]
|
| 301 |
+
return shard[key]
|
| 302 |
+
|
| 303 |
+
def read(self, key):
|
| 304 |
+
"""Return file handle corresponding to `key` from cache.
|
| 305 |
+
|
| 306 |
+
:param key: key for item
|
| 307 |
+
:return: file open for reading in binary mode
|
| 308 |
+
:raises KeyError: if key is not found
|
| 309 |
+
|
| 310 |
+
"""
|
| 311 |
+
handle = self.get(key, default=ENOVAL, read=True, retry=True)
|
| 312 |
+
if handle is ENOVAL:
|
| 313 |
+
raise KeyError(key)
|
| 314 |
+
return handle
|
| 315 |
+
|
| 316 |
+
def __contains__(self, key):
|
| 317 |
+
"""Return `True` if `key` matching item is found in cache.
|
| 318 |
+
|
| 319 |
+
:param key: key for item
|
| 320 |
+
:return: True if key is found
|
| 321 |
+
|
| 322 |
+
"""
|
| 323 |
+
index = self._hash(key) % self._count
|
| 324 |
+
shard = self._shards[index]
|
| 325 |
+
return key in shard
|
| 326 |
+
|
| 327 |
+
def pop(
|
| 328 |
+
self, key, default=None, expire_time=False, tag=False, retry=False
|
| 329 |
+
): # noqa: E501
|
| 330 |
+
"""Remove corresponding item for `key` from cache and return value.
|
| 331 |
+
|
| 332 |
+
If `key` is missing, return `default`.
|
| 333 |
+
|
| 334 |
+
Operation is atomic. Concurrent operations will be serialized.
|
| 335 |
+
|
| 336 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 337 |
+
`True` (default `False`).
|
| 338 |
+
|
| 339 |
+
:param key: key for item
|
| 340 |
+
:param default: return value if key is missing (default None)
|
| 341 |
+
:param float expire_time: if True, return expire_time in tuple
|
| 342 |
+
(default False)
|
| 343 |
+
:param tag: if True, return tag in tuple (default False)
|
| 344 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 345 |
+
:return: value for item if key is found else default
|
| 346 |
+
|
| 347 |
+
"""
|
| 348 |
+
index = self._hash(key) % self._count
|
| 349 |
+
shard = self._shards[index]
|
| 350 |
+
try:
|
| 351 |
+
return shard.pop(key, default, expire_time, tag, retry)
|
| 352 |
+
except Timeout:
|
| 353 |
+
return default
|
| 354 |
+
|
| 355 |
+
def delete(self, key, retry=False):
|
| 356 |
+
"""Delete corresponding item for `key` from cache.
|
| 357 |
+
|
| 358 |
+
Missing keys are ignored.
|
| 359 |
+
|
| 360 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 361 |
+
`True` (default `False`).
|
| 362 |
+
|
| 363 |
+
:param key: key for item
|
| 364 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 365 |
+
:return: True if item was deleted
|
| 366 |
+
|
| 367 |
+
"""
|
| 368 |
+
index = self._hash(key) % self._count
|
| 369 |
+
shard = self._shards[index]
|
| 370 |
+
try:
|
| 371 |
+
return shard.delete(key, retry)
|
| 372 |
+
except Timeout:
|
| 373 |
+
return False
|
| 374 |
+
|
| 375 |
+
def __delitem__(self, key):
|
| 376 |
+
"""Delete corresponding item for `key` from cache.
|
| 377 |
+
|
| 378 |
+
Calls :func:`FanoutCache.delete` internally with `retry` set to `True`.
|
| 379 |
+
|
| 380 |
+
:param key: key for item
|
| 381 |
+
:raises KeyError: if key is not found
|
| 382 |
+
|
| 383 |
+
"""
|
| 384 |
+
index = self._hash(key) % self._count
|
| 385 |
+
shard = self._shards[index]
|
| 386 |
+
del shard[key]
|
| 387 |
+
|
| 388 |
+
def check(self, fix=False, retry=False):
|
| 389 |
+
"""Check database and file system consistency.
|
| 390 |
+
|
| 391 |
+
Intended for use in testing and post-mortem error analysis.
|
| 392 |
+
|
| 393 |
+
While checking the cache table for consistency, a writer lock is held
|
| 394 |
+
on the database. The lock blocks other cache clients from writing to
|
| 395 |
+
the database. For caches with many file references, the lock may be
|
| 396 |
+
held for a long time. For example, local benchmarking shows that a
|
| 397 |
+
cache with 1,000 file references takes ~60ms to check.
|
| 398 |
+
|
| 399 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 400 |
+
`True` (default `False`).
|
| 401 |
+
|
| 402 |
+
:param bool fix: correct inconsistencies
|
| 403 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 404 |
+
:return: list of warnings
|
| 405 |
+
:raises Timeout: if database timeout occurs
|
| 406 |
+
|
| 407 |
+
"""
|
| 408 |
+
warnings = (shard.check(fix, retry) for shard in self._shards)
|
| 409 |
+
return functools.reduce(operator.iadd, warnings, [])
|
| 410 |
+
|
| 411 |
+
def expire(self, retry=False):
|
| 412 |
+
"""Remove expired items from cache.
|
| 413 |
+
|
| 414 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 415 |
+
`True` (default `False`).
|
| 416 |
+
|
| 417 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 418 |
+
:return: count of items removed
|
| 419 |
+
|
| 420 |
+
"""
|
| 421 |
+
return self._remove('expire', args=(time.time(),), retry=retry)
|
| 422 |
+
|
| 423 |
+
def create_tag_index(self):
|
| 424 |
+
"""Create tag index on cache database.
|
| 425 |
+
|
| 426 |
+
Better to initialize cache with `tag_index=True` than use this.
|
| 427 |
+
|
| 428 |
+
:raises Timeout: if database timeout occurs
|
| 429 |
+
|
| 430 |
+
"""
|
| 431 |
+
for shard in self._shards:
|
| 432 |
+
shard.create_tag_index()
|
| 433 |
+
|
| 434 |
+
def drop_tag_index(self):
|
| 435 |
+
"""Drop tag index on cache database.
|
| 436 |
+
|
| 437 |
+
:raises Timeout: if database timeout occurs
|
| 438 |
+
|
| 439 |
+
"""
|
| 440 |
+
for shard in self._shards:
|
| 441 |
+
shard.drop_tag_index()
|
| 442 |
+
|
| 443 |
+
def evict(self, tag, retry=False):
|
| 444 |
+
"""Remove items with matching `tag` from cache.
|
| 445 |
+
|
| 446 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 447 |
+
`True` (default `False`).
|
| 448 |
+
|
| 449 |
+
:param str tag: tag identifying items
|
| 450 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 451 |
+
:return: count of items removed
|
| 452 |
+
|
| 453 |
+
"""
|
| 454 |
+
return self._remove('evict', args=(tag,), retry=retry)
|
| 455 |
+
|
| 456 |
+
def cull(self, retry=False):
|
| 457 |
+
"""Cull items from cache until volume is less than size limit.
|
| 458 |
+
|
| 459 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 460 |
+
`True` (default `False`).
|
| 461 |
+
|
| 462 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 463 |
+
:return: count of items removed
|
| 464 |
+
|
| 465 |
+
"""
|
| 466 |
+
return self._remove('cull', retry=retry)
|
| 467 |
+
|
| 468 |
+
def clear(self, retry=False):
|
| 469 |
+
"""Remove all items from cache.
|
| 470 |
+
|
| 471 |
+
If database timeout occurs then fails silently unless `retry` is set to
|
| 472 |
+
`True` (default `False`).
|
| 473 |
+
|
| 474 |
+
:param bool retry: retry if database timeout occurs (default False)
|
| 475 |
+
:return: count of items removed
|
| 476 |
+
|
| 477 |
+
"""
|
| 478 |
+
return self._remove('clear', retry=retry)
|
| 479 |
+
|
| 480 |
+
def _remove(self, name, args=(), retry=False):
|
| 481 |
+
total = 0
|
| 482 |
+
for shard in self._shards:
|
| 483 |
+
method = getattr(shard, name)
|
| 484 |
+
while True:
|
| 485 |
+
try:
|
| 486 |
+
count = method(*args, retry=retry)
|
| 487 |
+
total += count
|
| 488 |
+
except Timeout as timeout:
|
| 489 |
+
total += timeout.args[0]
|
| 490 |
+
else:
|
| 491 |
+
break
|
| 492 |
+
return total
|
| 493 |
+
|
| 494 |
+
def stats(self, enable=True, reset=False):
|
| 495 |
+
"""Return cache statistics hits and misses.
|
| 496 |
+
|
| 497 |
+
:param bool enable: enable collecting statistics (default True)
|
| 498 |
+
:param bool reset: reset hits and misses to 0 (default False)
|
| 499 |
+
:return: (hits, misses)
|
| 500 |
+
|
| 501 |
+
"""
|
| 502 |
+
results = [shard.stats(enable, reset) for shard in self._shards]
|
| 503 |
+
total_hits = sum(hits for hits, _ in results)
|
| 504 |
+
total_misses = sum(misses for _, misses in results)
|
| 505 |
+
return total_hits, total_misses
|
| 506 |
+
|
| 507 |
+
def volume(self):
|
| 508 |
+
"""Return estimated total size of cache on disk.
|
| 509 |
+
|
| 510 |
+
:return: size in bytes
|
| 511 |
+
|
| 512 |
+
"""
|
| 513 |
+
return sum(shard.volume() for shard in self._shards)
|
| 514 |
+
|
| 515 |
+
def close(self):
|
| 516 |
+
"""Close database connection."""
|
| 517 |
+
for shard in self._shards:
|
| 518 |
+
shard.close()
|
| 519 |
+
self._caches.clear()
|
| 520 |
+
self._deques.clear()
|
| 521 |
+
self._indexes.clear()
|
| 522 |
+
|
| 523 |
+
def __enter__(self):
|
| 524 |
+
return self
|
| 525 |
+
|
| 526 |
+
def __exit__(self, *exception):
|
| 527 |
+
self.close()
|
| 528 |
+
|
| 529 |
+
def __getstate__(self):
|
| 530 |
+
return (self._directory, self._count, self.timeout, type(self.disk))
|
| 531 |
+
|
| 532 |
+
def __setstate__(self, state):
|
| 533 |
+
self.__init__(*state)
|
| 534 |
+
|
| 535 |
+
def __iter__(self):
|
| 536 |
+
"""Iterate keys in cache including expired items."""
|
| 537 |
+
iterators = (iter(shard) for shard in self._shards)
|
| 538 |
+
return it.chain.from_iterable(iterators)
|
| 539 |
+
|
| 540 |
+
def __reversed__(self):
|
| 541 |
+
"""Reverse iterate keys in cache including expired items."""
|
| 542 |
+
iterators = (reversed(shard) for shard in reversed(self._shards))
|
| 543 |
+
return it.chain.from_iterable(iterators)
|
| 544 |
+
|
| 545 |
+
def __len__(self):
|
| 546 |
+
"""Count of items in cache including expired items."""
|
| 547 |
+
return sum(len(shard) for shard in self._shards)
|
| 548 |
+
|
| 549 |
+
def reset(self, key, value=ENOVAL):
|
| 550 |
+
"""Reset `key` and `value` item from Settings table.
|
| 551 |
+
|
| 552 |
+
If `value` is not given, it is reloaded from the Settings
|
| 553 |
+
table. Otherwise, the Settings table is updated.
|
| 554 |
+
|
| 555 |
+
Settings attributes on cache objects are lazy-loaded and
|
| 556 |
+
read-only. Use `reset` to update the value.
|
| 557 |
+
|
| 558 |
+
Settings with the ``sqlite_`` prefix correspond to SQLite
|
| 559 |
+
pragmas. Updating the value will execute the corresponding PRAGMA
|
| 560 |
+
statement.
|
| 561 |
+
|
| 562 |
+
:param str key: Settings key for item
|
| 563 |
+
:param value: value for item (optional)
|
| 564 |
+
:return: updated value for item
|
| 565 |
+
|
| 566 |
+
"""
|
| 567 |
+
for shard in self._shards:
|
| 568 |
+
while True:
|
| 569 |
+
try:
|
| 570 |
+
result = shard.reset(key, value)
|
| 571 |
+
except Timeout:
|
| 572 |
+
pass
|
| 573 |
+
else:
|
| 574 |
+
break
|
| 575 |
+
return result
|
| 576 |
+
|
| 577 |
+
def cache(self, name, timeout=60, disk=None, **settings):
|
| 578 |
+
"""Return Cache with given `name` in subdirectory.
|
| 579 |
+
|
| 580 |
+
If disk is none (default), uses the fanout cache disk.
|
| 581 |
+
|
| 582 |
+
>>> fanout_cache = FanoutCache()
|
| 583 |
+
>>> cache = fanout_cache.cache('test')
|
| 584 |
+
>>> cache.set('abc', 123)
|
| 585 |
+
True
|
| 586 |
+
>>> cache.get('abc')
|
| 587 |
+
123
|
| 588 |
+
>>> len(cache)
|
| 589 |
+
1
|
| 590 |
+
>>> cache.delete('abc')
|
| 591 |
+
True
|
| 592 |
+
|
| 593 |
+
:param str name: subdirectory name for Cache
|
| 594 |
+
:param float timeout: SQLite connection timeout
|
| 595 |
+
:param disk: Disk type or subclass for serialization
|
| 596 |
+
:param settings: any of DEFAULT_SETTINGS
|
| 597 |
+
:return: Cache with given name
|
| 598 |
+
|
| 599 |
+
"""
|
| 600 |
+
_caches = self._caches
|
| 601 |
+
|
| 602 |
+
try:
|
| 603 |
+
return _caches[name]
|
| 604 |
+
except KeyError:
|
| 605 |
+
parts = name.split('/')
|
| 606 |
+
directory = op.join(self._directory, 'cache', *parts)
|
| 607 |
+
temp = Cache(
|
| 608 |
+
directory=directory,
|
| 609 |
+
timeout=timeout,
|
| 610 |
+
disk=self._disk if disk is None else Disk,
|
| 611 |
+
**settings,
|
| 612 |
+
)
|
| 613 |
+
_caches[name] = temp
|
| 614 |
+
return temp
|
| 615 |
+
|
| 616 |
+
def deque(self, name, maxlen=None):
|
| 617 |
+
"""Return Deque with given `name` in subdirectory.
|
| 618 |
+
|
| 619 |
+
>>> cache = FanoutCache()
|
| 620 |
+
>>> deque = cache.deque('test')
|
| 621 |
+
>>> deque.extend('abc')
|
| 622 |
+
>>> deque.popleft()
|
| 623 |
+
'a'
|
| 624 |
+
>>> deque.pop()
|
| 625 |
+
'c'
|
| 626 |
+
>>> len(deque)
|
| 627 |
+
1
|
| 628 |
+
|
| 629 |
+
:param str name: subdirectory name for Deque
|
| 630 |
+
:param maxlen: max length (default None, no max)
|
| 631 |
+
:return: Deque with given name
|
| 632 |
+
|
| 633 |
+
"""
|
| 634 |
+
_deques = self._deques
|
| 635 |
+
|
| 636 |
+
try:
|
| 637 |
+
return _deques[name]
|
| 638 |
+
except KeyError:
|
| 639 |
+
parts = name.split('/')
|
| 640 |
+
directory = op.join(self._directory, 'deque', *parts)
|
| 641 |
+
cache = Cache(
|
| 642 |
+
directory=directory,
|
| 643 |
+
disk=self._disk,
|
| 644 |
+
eviction_policy='none',
|
| 645 |
+
)
|
| 646 |
+
deque = Deque.fromcache(cache, maxlen=maxlen)
|
| 647 |
+
_deques[name] = deque
|
| 648 |
+
return deque
|
| 649 |
+
|
| 650 |
+
def index(self, name):
|
| 651 |
+
"""Return Index with given `name` in subdirectory.
|
| 652 |
+
|
| 653 |
+
>>> cache = FanoutCache()
|
| 654 |
+
>>> index = cache.index('test')
|
| 655 |
+
>>> index['abc'] = 123
|
| 656 |
+
>>> index['def'] = 456
|
| 657 |
+
>>> index['ghi'] = 789
|
| 658 |
+
>>> index.popitem()
|
| 659 |
+
('ghi', 789)
|
| 660 |
+
>>> del index['abc']
|
| 661 |
+
>>> len(index)
|
| 662 |
+
1
|
| 663 |
+
>>> index['def']
|
| 664 |
+
456
|
| 665 |
+
|
| 666 |
+
:param str name: subdirectory name for Index
|
| 667 |
+
:return: Index with given name
|
| 668 |
+
|
| 669 |
+
"""
|
| 670 |
+
_indexes = self._indexes
|
| 671 |
+
|
| 672 |
+
try:
|
| 673 |
+
return _indexes[name]
|
| 674 |
+
except KeyError:
|
| 675 |
+
parts = name.split('/')
|
| 676 |
+
directory = op.join(self._directory, 'index', *parts)
|
| 677 |
+
cache = Cache(
|
| 678 |
+
directory=directory,
|
| 679 |
+
disk=self._disk,
|
| 680 |
+
eviction_policy='none',
|
| 681 |
+
)
|
| 682 |
+
index = Index.fromcache(cache)
|
| 683 |
+
_indexes[name] = index
|
| 684 |
+
return index
|
| 685 |
+
|
| 686 |
+
|
| 687 |
+
FanoutCache.memoize = Cache.memoize # type: ignore
|
.venv/lib/python3.11/site-packages/diskcache/persistent.py
ADDED
|
@@ -0,0 +1,1245 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Persistent Data Types
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import operator as op
|
| 5 |
+
from collections import OrderedDict
|
| 6 |
+
from collections.abc import (
|
| 7 |
+
ItemsView,
|
| 8 |
+
KeysView,
|
| 9 |
+
MutableMapping,
|
| 10 |
+
Sequence,
|
| 11 |
+
ValuesView,
|
| 12 |
+
)
|
| 13 |
+
from contextlib import contextmanager
|
| 14 |
+
from shutil import rmtree
|
| 15 |
+
|
| 16 |
+
from .core import ENOVAL, Cache
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def _make_compare(seq_op, doc):
|
| 20 |
+
"""Make compare method with Sequence semantics."""
|
| 21 |
+
|
| 22 |
+
def compare(self, that):
|
| 23 |
+
"""Compare method for deque and sequence."""
|
| 24 |
+
if not isinstance(that, Sequence):
|
| 25 |
+
return NotImplemented
|
| 26 |
+
|
| 27 |
+
len_self = len(self)
|
| 28 |
+
len_that = len(that)
|
| 29 |
+
|
| 30 |
+
if len_self != len_that:
|
| 31 |
+
if seq_op is op.eq:
|
| 32 |
+
return False
|
| 33 |
+
if seq_op is op.ne:
|
| 34 |
+
return True
|
| 35 |
+
|
| 36 |
+
for alpha, beta in zip(self, that):
|
| 37 |
+
if alpha != beta:
|
| 38 |
+
return seq_op(alpha, beta)
|
| 39 |
+
|
| 40 |
+
return seq_op(len_self, len_that)
|
| 41 |
+
|
| 42 |
+
compare.__name__ = '__{0}__'.format(seq_op.__name__)
|
| 43 |
+
doc_str = 'Return True if and only if deque is {0} `that`.'
|
| 44 |
+
compare.__doc__ = doc_str.format(doc)
|
| 45 |
+
|
| 46 |
+
return compare
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class Deque(Sequence):
|
| 50 |
+
"""Persistent sequence with double-ended queue semantics.
|
| 51 |
+
|
| 52 |
+
Double-ended queue is an ordered collection with optimized access at its
|
| 53 |
+
endpoints.
|
| 54 |
+
|
| 55 |
+
Items are serialized to disk. Deque may be initialized from directory path
|
| 56 |
+
where items are stored.
|
| 57 |
+
|
| 58 |
+
>>> deque = Deque()
|
| 59 |
+
>>> deque += range(5)
|
| 60 |
+
>>> list(deque)
|
| 61 |
+
[0, 1, 2, 3, 4]
|
| 62 |
+
>>> for value in range(5):
|
| 63 |
+
... deque.appendleft(-value)
|
| 64 |
+
>>> len(deque)
|
| 65 |
+
10
|
| 66 |
+
>>> list(deque)
|
| 67 |
+
[-4, -3, -2, -1, 0, 0, 1, 2, 3, 4]
|
| 68 |
+
>>> deque.pop()
|
| 69 |
+
4
|
| 70 |
+
>>> deque.popleft()
|
| 71 |
+
-4
|
| 72 |
+
>>> deque.reverse()
|
| 73 |
+
>>> list(deque)
|
| 74 |
+
[3, 2, 1, 0, 0, -1, -2, -3]
|
| 75 |
+
|
| 76 |
+
"""
|
| 77 |
+
|
| 78 |
+
def __init__(self, iterable=(), directory=None, maxlen=None):
|
| 79 |
+
"""Initialize deque instance.
|
| 80 |
+
|
| 81 |
+
If directory is None then temporary directory created. The directory
|
| 82 |
+
will *not* be automatically removed.
|
| 83 |
+
|
| 84 |
+
:param iterable: iterable of items to append to deque
|
| 85 |
+
:param directory: deque directory (default None)
|
| 86 |
+
|
| 87 |
+
"""
|
| 88 |
+
self._cache = Cache(directory, eviction_policy='none')
|
| 89 |
+
self._maxlen = float('inf') if maxlen is None else maxlen
|
| 90 |
+
self._extend(iterable)
|
| 91 |
+
|
| 92 |
+
@classmethod
|
| 93 |
+
def fromcache(cls, cache, iterable=(), maxlen=None):
|
| 94 |
+
"""Initialize deque using `cache`.
|
| 95 |
+
|
| 96 |
+
>>> cache = Cache()
|
| 97 |
+
>>> deque = Deque.fromcache(cache, [5, 6, 7, 8])
|
| 98 |
+
>>> deque.cache is cache
|
| 99 |
+
True
|
| 100 |
+
>>> len(deque)
|
| 101 |
+
4
|
| 102 |
+
>>> 7 in deque
|
| 103 |
+
True
|
| 104 |
+
>>> deque.popleft()
|
| 105 |
+
5
|
| 106 |
+
|
| 107 |
+
:param Cache cache: cache to use
|
| 108 |
+
:param iterable: iterable of items
|
| 109 |
+
:return: initialized Deque
|
| 110 |
+
|
| 111 |
+
"""
|
| 112 |
+
# pylint: disable=no-member,protected-access
|
| 113 |
+
self = cls.__new__(cls)
|
| 114 |
+
self._cache = cache
|
| 115 |
+
self._maxlen = float('inf') if maxlen is None else maxlen
|
| 116 |
+
self._extend(iterable)
|
| 117 |
+
return self
|
| 118 |
+
|
| 119 |
+
@property
|
| 120 |
+
def cache(self):
|
| 121 |
+
"""Cache used by deque."""
|
| 122 |
+
return self._cache
|
| 123 |
+
|
| 124 |
+
@property
|
| 125 |
+
def directory(self):
|
| 126 |
+
"""Directory path where deque is stored."""
|
| 127 |
+
return self._cache.directory
|
| 128 |
+
|
| 129 |
+
@property
|
| 130 |
+
def maxlen(self):
|
| 131 |
+
"""Max length of the deque."""
|
| 132 |
+
return self._maxlen
|
| 133 |
+
|
| 134 |
+
@maxlen.setter
|
| 135 |
+
def maxlen(self, value):
|
| 136 |
+
"""Set max length of the deque.
|
| 137 |
+
|
| 138 |
+
Pops items from left while length greater than max.
|
| 139 |
+
|
| 140 |
+
>>> deque = Deque()
|
| 141 |
+
>>> deque.extendleft('abcde')
|
| 142 |
+
>>> deque.maxlen = 3
|
| 143 |
+
>>> list(deque)
|
| 144 |
+
['c', 'd', 'e']
|
| 145 |
+
|
| 146 |
+
:param value: max length
|
| 147 |
+
|
| 148 |
+
"""
|
| 149 |
+
self._maxlen = value
|
| 150 |
+
with self._cache.transact(retry=True):
|
| 151 |
+
while len(self._cache) > self._maxlen:
|
| 152 |
+
self._popleft()
|
| 153 |
+
|
| 154 |
+
def _index(self, index, func):
|
| 155 |
+
len_self = len(self)
|
| 156 |
+
|
| 157 |
+
if index >= 0:
|
| 158 |
+
if index >= len_self:
|
| 159 |
+
raise IndexError('deque index out of range')
|
| 160 |
+
|
| 161 |
+
for key in self._cache.iterkeys():
|
| 162 |
+
if index == 0:
|
| 163 |
+
try:
|
| 164 |
+
return func(key)
|
| 165 |
+
except KeyError:
|
| 166 |
+
continue
|
| 167 |
+
index -= 1
|
| 168 |
+
else:
|
| 169 |
+
if index < -len_self:
|
| 170 |
+
raise IndexError('deque index out of range')
|
| 171 |
+
|
| 172 |
+
index += 1
|
| 173 |
+
|
| 174 |
+
for key in self._cache.iterkeys(reverse=True):
|
| 175 |
+
if index == 0:
|
| 176 |
+
try:
|
| 177 |
+
return func(key)
|
| 178 |
+
except KeyError:
|
| 179 |
+
continue
|
| 180 |
+
index += 1
|
| 181 |
+
|
| 182 |
+
raise IndexError('deque index out of range')
|
| 183 |
+
|
| 184 |
+
def __getitem__(self, index):
|
| 185 |
+
"""deque.__getitem__(index) <==> deque[index]
|
| 186 |
+
|
| 187 |
+
Return corresponding item for `index` in deque.
|
| 188 |
+
|
| 189 |
+
See also `Deque.peekleft` and `Deque.peek` for indexing deque at index
|
| 190 |
+
``0`` or ``-1``.
|
| 191 |
+
|
| 192 |
+
>>> deque = Deque()
|
| 193 |
+
>>> deque.extend('abcde')
|
| 194 |
+
>>> deque[1]
|
| 195 |
+
'b'
|
| 196 |
+
>>> deque[-2]
|
| 197 |
+
'd'
|
| 198 |
+
|
| 199 |
+
:param int index: index of item
|
| 200 |
+
:return: corresponding item
|
| 201 |
+
:raises IndexError: if index out of range
|
| 202 |
+
|
| 203 |
+
"""
|
| 204 |
+
return self._index(index, self._cache.__getitem__)
|
| 205 |
+
|
| 206 |
+
def __setitem__(self, index, value):
|
| 207 |
+
"""deque.__setitem__(index, value) <==> deque[index] = value
|
| 208 |
+
|
| 209 |
+
Store `value` in deque at `index`.
|
| 210 |
+
|
| 211 |
+
>>> deque = Deque()
|
| 212 |
+
>>> deque.extend([None] * 3)
|
| 213 |
+
>>> deque[0] = 'a'
|
| 214 |
+
>>> deque[1] = 'b'
|
| 215 |
+
>>> deque[-1] = 'c'
|
| 216 |
+
>>> ''.join(deque)
|
| 217 |
+
'abc'
|
| 218 |
+
|
| 219 |
+
:param int index: index of value
|
| 220 |
+
:param value: value to store
|
| 221 |
+
:raises IndexError: if index out of range
|
| 222 |
+
|
| 223 |
+
"""
|
| 224 |
+
|
| 225 |
+
def _set_value(key):
|
| 226 |
+
return self._cache.__setitem__(key, value)
|
| 227 |
+
|
| 228 |
+
self._index(index, _set_value)
|
| 229 |
+
|
| 230 |
+
def __delitem__(self, index):
|
| 231 |
+
"""deque.__delitem__(index) <==> del deque[index]
|
| 232 |
+
|
| 233 |
+
Delete item in deque at `index`.
|
| 234 |
+
|
| 235 |
+
>>> deque = Deque()
|
| 236 |
+
>>> deque.extend([None] * 3)
|
| 237 |
+
>>> del deque[0]
|
| 238 |
+
>>> del deque[1]
|
| 239 |
+
>>> del deque[-1]
|
| 240 |
+
>>> len(deque)
|
| 241 |
+
0
|
| 242 |
+
|
| 243 |
+
:param int index: index of item
|
| 244 |
+
:raises IndexError: if index out of range
|
| 245 |
+
|
| 246 |
+
"""
|
| 247 |
+
self._index(index, self._cache.__delitem__)
|
| 248 |
+
|
| 249 |
+
def __repr__(self):
|
| 250 |
+
"""deque.__repr__() <==> repr(deque)
|
| 251 |
+
|
| 252 |
+
Return string with printable representation of deque.
|
| 253 |
+
|
| 254 |
+
"""
|
| 255 |
+
name = type(self).__name__
|
| 256 |
+
return '{0}(directory={1!r})'.format(name, self.directory)
|
| 257 |
+
|
| 258 |
+
__eq__ = _make_compare(op.eq, 'equal to')
|
| 259 |
+
__ne__ = _make_compare(op.ne, 'not equal to')
|
| 260 |
+
__lt__ = _make_compare(op.lt, 'less than')
|
| 261 |
+
__gt__ = _make_compare(op.gt, 'greater than')
|
| 262 |
+
__le__ = _make_compare(op.le, 'less than or equal to')
|
| 263 |
+
__ge__ = _make_compare(op.ge, 'greater than or equal to')
|
| 264 |
+
|
| 265 |
+
def __iadd__(self, iterable):
|
| 266 |
+
"""deque.__iadd__(iterable) <==> deque += iterable
|
| 267 |
+
|
| 268 |
+
Extend back side of deque with items from iterable.
|
| 269 |
+
|
| 270 |
+
:param iterable: iterable of items to append to deque
|
| 271 |
+
:return: deque with added items
|
| 272 |
+
|
| 273 |
+
"""
|
| 274 |
+
self._extend(iterable)
|
| 275 |
+
return self
|
| 276 |
+
|
| 277 |
+
def __iter__(self):
|
| 278 |
+
"""deque.__iter__() <==> iter(deque)
|
| 279 |
+
|
| 280 |
+
Return iterator of deque from front to back.
|
| 281 |
+
|
| 282 |
+
"""
|
| 283 |
+
_cache = self._cache
|
| 284 |
+
|
| 285 |
+
for key in _cache.iterkeys():
|
| 286 |
+
try:
|
| 287 |
+
yield _cache[key]
|
| 288 |
+
except KeyError:
|
| 289 |
+
pass
|
| 290 |
+
|
| 291 |
+
def __len__(self):
|
| 292 |
+
"""deque.__len__() <==> len(deque)
|
| 293 |
+
|
| 294 |
+
Return length of deque.
|
| 295 |
+
|
| 296 |
+
"""
|
| 297 |
+
return len(self._cache)
|
| 298 |
+
|
| 299 |
+
def __reversed__(self):
|
| 300 |
+
"""deque.__reversed__() <==> reversed(deque)
|
| 301 |
+
|
| 302 |
+
Return iterator of deque from back to front.
|
| 303 |
+
|
| 304 |
+
>>> deque = Deque()
|
| 305 |
+
>>> deque.extend('abcd')
|
| 306 |
+
>>> iterator = reversed(deque)
|
| 307 |
+
>>> next(iterator)
|
| 308 |
+
'd'
|
| 309 |
+
>>> list(iterator)
|
| 310 |
+
['c', 'b', 'a']
|
| 311 |
+
|
| 312 |
+
"""
|
| 313 |
+
_cache = self._cache
|
| 314 |
+
|
| 315 |
+
for key in _cache.iterkeys(reverse=True):
|
| 316 |
+
try:
|
| 317 |
+
yield _cache[key]
|
| 318 |
+
except KeyError:
|
| 319 |
+
pass
|
| 320 |
+
|
| 321 |
+
def __getstate__(self):
|
| 322 |
+
return self.directory, self.maxlen
|
| 323 |
+
|
| 324 |
+
def __setstate__(self, state):
|
| 325 |
+
directory, maxlen = state
|
| 326 |
+
self.__init__(directory=directory, maxlen=maxlen)
|
| 327 |
+
|
| 328 |
+
def append(self, value):
|
| 329 |
+
"""Add `value` to back of deque.
|
| 330 |
+
|
| 331 |
+
>>> deque = Deque()
|
| 332 |
+
>>> deque.append('a')
|
| 333 |
+
>>> deque.append('b')
|
| 334 |
+
>>> deque.append('c')
|
| 335 |
+
>>> list(deque)
|
| 336 |
+
['a', 'b', 'c']
|
| 337 |
+
|
| 338 |
+
:param value: value to add to back of deque
|
| 339 |
+
|
| 340 |
+
"""
|
| 341 |
+
with self._cache.transact(retry=True):
|
| 342 |
+
self._cache.push(value, retry=True)
|
| 343 |
+
if len(self._cache) > self._maxlen:
|
| 344 |
+
self._popleft()
|
| 345 |
+
|
| 346 |
+
_append = append
|
| 347 |
+
|
| 348 |
+
def appendleft(self, value):
|
| 349 |
+
"""Add `value` to front of deque.
|
| 350 |
+
|
| 351 |
+
>>> deque = Deque()
|
| 352 |
+
>>> deque.appendleft('a')
|
| 353 |
+
>>> deque.appendleft('b')
|
| 354 |
+
>>> deque.appendleft('c')
|
| 355 |
+
>>> list(deque)
|
| 356 |
+
['c', 'b', 'a']
|
| 357 |
+
|
| 358 |
+
:param value: value to add to front of deque
|
| 359 |
+
|
| 360 |
+
"""
|
| 361 |
+
with self._cache.transact(retry=True):
|
| 362 |
+
self._cache.push(value, side='front', retry=True)
|
| 363 |
+
if len(self._cache) > self._maxlen:
|
| 364 |
+
self._pop()
|
| 365 |
+
|
| 366 |
+
_appendleft = appendleft
|
| 367 |
+
|
| 368 |
+
def clear(self):
|
| 369 |
+
"""Remove all elements from deque.
|
| 370 |
+
|
| 371 |
+
>>> deque = Deque('abc')
|
| 372 |
+
>>> len(deque)
|
| 373 |
+
3
|
| 374 |
+
>>> deque.clear()
|
| 375 |
+
>>> list(deque)
|
| 376 |
+
[]
|
| 377 |
+
|
| 378 |
+
"""
|
| 379 |
+
self._cache.clear(retry=True)
|
| 380 |
+
|
| 381 |
+
_clear = clear
|
| 382 |
+
|
| 383 |
+
def copy(self):
|
| 384 |
+
"""Copy deque with same directory and max length."""
|
| 385 |
+
TypeSelf = type(self)
|
| 386 |
+
return TypeSelf(directory=self.directory, maxlen=self.maxlen)
|
| 387 |
+
|
| 388 |
+
def count(self, value):
|
| 389 |
+
"""Return number of occurrences of `value` in deque.
|
| 390 |
+
|
| 391 |
+
>>> deque = Deque()
|
| 392 |
+
>>> deque += [num for num in range(1, 5) for _ in range(num)]
|
| 393 |
+
>>> deque.count(0)
|
| 394 |
+
0
|
| 395 |
+
>>> deque.count(1)
|
| 396 |
+
1
|
| 397 |
+
>>> deque.count(4)
|
| 398 |
+
4
|
| 399 |
+
|
| 400 |
+
:param value: value to count in deque
|
| 401 |
+
:return: count of items equal to value in deque
|
| 402 |
+
|
| 403 |
+
"""
|
| 404 |
+
return sum(1 for item in self if value == item)
|
| 405 |
+
|
| 406 |
+
def extend(self, iterable):
|
| 407 |
+
"""Extend back side of deque with values from `iterable`.
|
| 408 |
+
|
| 409 |
+
:param iterable: iterable of values
|
| 410 |
+
|
| 411 |
+
"""
|
| 412 |
+
for value in iterable:
|
| 413 |
+
self._append(value)
|
| 414 |
+
|
| 415 |
+
_extend = extend
|
| 416 |
+
|
| 417 |
+
def extendleft(self, iterable):
|
| 418 |
+
"""Extend front side of deque with value from `iterable`.
|
| 419 |
+
|
| 420 |
+
>>> deque = Deque()
|
| 421 |
+
>>> deque.extendleft('abc')
|
| 422 |
+
>>> list(deque)
|
| 423 |
+
['c', 'b', 'a']
|
| 424 |
+
|
| 425 |
+
:param iterable: iterable of values
|
| 426 |
+
|
| 427 |
+
"""
|
| 428 |
+
for value in iterable:
|
| 429 |
+
self._appendleft(value)
|
| 430 |
+
|
| 431 |
+
def peek(self):
|
| 432 |
+
"""Peek at value at back of deque.
|
| 433 |
+
|
| 434 |
+
Faster than indexing deque at -1.
|
| 435 |
+
|
| 436 |
+
If deque is empty then raise IndexError.
|
| 437 |
+
|
| 438 |
+
>>> deque = Deque()
|
| 439 |
+
>>> deque.peek()
|
| 440 |
+
Traceback (most recent call last):
|
| 441 |
+
...
|
| 442 |
+
IndexError: peek from an empty deque
|
| 443 |
+
>>> deque += 'abc'
|
| 444 |
+
>>> deque.peek()
|
| 445 |
+
'c'
|
| 446 |
+
|
| 447 |
+
:return: value at back of deque
|
| 448 |
+
:raises IndexError: if deque is empty
|
| 449 |
+
|
| 450 |
+
"""
|
| 451 |
+
default = None, ENOVAL
|
| 452 |
+
_, value = self._cache.peek(default=default, side='back', retry=True)
|
| 453 |
+
if value is ENOVAL:
|
| 454 |
+
raise IndexError('peek from an empty deque')
|
| 455 |
+
return value
|
| 456 |
+
|
| 457 |
+
def peekleft(self):
|
| 458 |
+
"""Peek at value at front of deque.
|
| 459 |
+
|
| 460 |
+
Faster than indexing deque at 0.
|
| 461 |
+
|
| 462 |
+
If deque is empty then raise IndexError.
|
| 463 |
+
|
| 464 |
+
>>> deque = Deque()
|
| 465 |
+
>>> deque.peekleft()
|
| 466 |
+
Traceback (most recent call last):
|
| 467 |
+
...
|
| 468 |
+
IndexError: peek from an empty deque
|
| 469 |
+
>>> deque += 'abc'
|
| 470 |
+
>>> deque.peekleft()
|
| 471 |
+
'a'
|
| 472 |
+
|
| 473 |
+
:return: value at front of deque
|
| 474 |
+
:raises IndexError: if deque is empty
|
| 475 |
+
|
| 476 |
+
"""
|
| 477 |
+
default = None, ENOVAL
|
| 478 |
+
_, value = self._cache.peek(default=default, side='front', retry=True)
|
| 479 |
+
if value is ENOVAL:
|
| 480 |
+
raise IndexError('peek from an empty deque')
|
| 481 |
+
return value
|
| 482 |
+
|
| 483 |
+
def pop(self):
|
| 484 |
+
"""Remove and return value at back of deque.
|
| 485 |
+
|
| 486 |
+
If deque is empty then raise IndexError.
|
| 487 |
+
|
| 488 |
+
>>> deque = Deque()
|
| 489 |
+
>>> deque += 'ab'
|
| 490 |
+
>>> deque.pop()
|
| 491 |
+
'b'
|
| 492 |
+
>>> deque.pop()
|
| 493 |
+
'a'
|
| 494 |
+
>>> deque.pop()
|
| 495 |
+
Traceback (most recent call last):
|
| 496 |
+
...
|
| 497 |
+
IndexError: pop from an empty deque
|
| 498 |
+
|
| 499 |
+
:return: value at back of deque
|
| 500 |
+
:raises IndexError: if deque is empty
|
| 501 |
+
|
| 502 |
+
"""
|
| 503 |
+
default = None, ENOVAL
|
| 504 |
+
_, value = self._cache.pull(default=default, side='back', retry=True)
|
| 505 |
+
if value is ENOVAL:
|
| 506 |
+
raise IndexError('pop from an empty deque')
|
| 507 |
+
return value
|
| 508 |
+
|
| 509 |
+
_pop = pop
|
| 510 |
+
|
| 511 |
+
def popleft(self):
|
| 512 |
+
"""Remove and return value at front of deque.
|
| 513 |
+
|
| 514 |
+
>>> deque = Deque()
|
| 515 |
+
>>> deque += 'ab'
|
| 516 |
+
>>> deque.popleft()
|
| 517 |
+
'a'
|
| 518 |
+
>>> deque.popleft()
|
| 519 |
+
'b'
|
| 520 |
+
>>> deque.popleft()
|
| 521 |
+
Traceback (most recent call last):
|
| 522 |
+
...
|
| 523 |
+
IndexError: pop from an empty deque
|
| 524 |
+
|
| 525 |
+
:return: value at front of deque
|
| 526 |
+
:raises IndexError: if deque is empty
|
| 527 |
+
|
| 528 |
+
"""
|
| 529 |
+
default = None, ENOVAL
|
| 530 |
+
_, value = self._cache.pull(default=default, retry=True)
|
| 531 |
+
if value is ENOVAL:
|
| 532 |
+
raise IndexError('pop from an empty deque')
|
| 533 |
+
return value
|
| 534 |
+
|
| 535 |
+
_popleft = popleft
|
| 536 |
+
|
| 537 |
+
def remove(self, value):
|
| 538 |
+
"""Remove first occurrence of `value` in deque.
|
| 539 |
+
|
| 540 |
+
>>> deque = Deque()
|
| 541 |
+
>>> deque += 'aab'
|
| 542 |
+
>>> deque.remove('a')
|
| 543 |
+
>>> list(deque)
|
| 544 |
+
['a', 'b']
|
| 545 |
+
>>> deque.remove('b')
|
| 546 |
+
>>> list(deque)
|
| 547 |
+
['a']
|
| 548 |
+
>>> deque.remove('c')
|
| 549 |
+
Traceback (most recent call last):
|
| 550 |
+
...
|
| 551 |
+
ValueError: deque.remove(value): value not in deque
|
| 552 |
+
|
| 553 |
+
:param value: value to remove
|
| 554 |
+
:raises ValueError: if value not in deque
|
| 555 |
+
|
| 556 |
+
"""
|
| 557 |
+
_cache = self._cache
|
| 558 |
+
|
| 559 |
+
for key in _cache.iterkeys():
|
| 560 |
+
try:
|
| 561 |
+
item = _cache[key]
|
| 562 |
+
except KeyError:
|
| 563 |
+
continue
|
| 564 |
+
else:
|
| 565 |
+
if value == item:
|
| 566 |
+
try:
|
| 567 |
+
del _cache[key]
|
| 568 |
+
except KeyError:
|
| 569 |
+
continue
|
| 570 |
+
return
|
| 571 |
+
|
| 572 |
+
raise ValueError('deque.remove(value): value not in deque')
|
| 573 |
+
|
| 574 |
+
def reverse(self):
|
| 575 |
+
"""Reverse deque in place.
|
| 576 |
+
|
| 577 |
+
>>> deque = Deque()
|
| 578 |
+
>>> deque += 'abc'
|
| 579 |
+
>>> deque.reverse()
|
| 580 |
+
>>> list(deque)
|
| 581 |
+
['c', 'b', 'a']
|
| 582 |
+
|
| 583 |
+
"""
|
| 584 |
+
# pylint: disable=protected-access
|
| 585 |
+
# GrantJ 2019-03-22 Consider using an algorithm that swaps the values
|
| 586 |
+
# at two keys. Like self._cache.swap(key1, key2, retry=True) The swap
|
| 587 |
+
# method would exchange the values at two given keys. Then, using a
|
| 588 |
+
# forward iterator and a reverse iterator, the reverse method could
|
| 589 |
+
# avoid making copies of the values.
|
| 590 |
+
temp = Deque(iterable=reversed(self))
|
| 591 |
+
self._clear()
|
| 592 |
+
self._extend(temp)
|
| 593 |
+
directory = temp.directory
|
| 594 |
+
temp._cache.close()
|
| 595 |
+
del temp
|
| 596 |
+
rmtree(directory)
|
| 597 |
+
|
| 598 |
+
def rotate(self, steps=1):
|
| 599 |
+
"""Rotate deque right by `steps`.
|
| 600 |
+
|
| 601 |
+
If steps is negative then rotate left.
|
| 602 |
+
|
| 603 |
+
>>> deque = Deque()
|
| 604 |
+
>>> deque += range(5)
|
| 605 |
+
>>> deque.rotate(2)
|
| 606 |
+
>>> list(deque)
|
| 607 |
+
[3, 4, 0, 1, 2]
|
| 608 |
+
>>> deque.rotate(-1)
|
| 609 |
+
>>> list(deque)
|
| 610 |
+
[4, 0, 1, 2, 3]
|
| 611 |
+
|
| 612 |
+
:param int steps: number of steps to rotate (default 1)
|
| 613 |
+
|
| 614 |
+
"""
|
| 615 |
+
if not isinstance(steps, int):
|
| 616 |
+
type_name = type(steps).__name__
|
| 617 |
+
raise TypeError('integer argument expected, got %s' % type_name)
|
| 618 |
+
|
| 619 |
+
len_self = len(self)
|
| 620 |
+
|
| 621 |
+
if not len_self:
|
| 622 |
+
return
|
| 623 |
+
|
| 624 |
+
if steps >= 0:
|
| 625 |
+
steps %= len_self
|
| 626 |
+
|
| 627 |
+
for _ in range(steps):
|
| 628 |
+
try:
|
| 629 |
+
value = self._pop()
|
| 630 |
+
except IndexError:
|
| 631 |
+
return
|
| 632 |
+
else:
|
| 633 |
+
self._appendleft(value)
|
| 634 |
+
else:
|
| 635 |
+
steps *= -1
|
| 636 |
+
steps %= len_self
|
| 637 |
+
|
| 638 |
+
for _ in range(steps):
|
| 639 |
+
try:
|
| 640 |
+
value = self._popleft()
|
| 641 |
+
except IndexError:
|
| 642 |
+
return
|
| 643 |
+
else:
|
| 644 |
+
self._append(value)
|
| 645 |
+
|
| 646 |
+
__hash__ = None # type: ignore
|
| 647 |
+
|
| 648 |
+
@contextmanager
|
| 649 |
+
def transact(self):
|
| 650 |
+
"""Context manager to perform a transaction by locking the deque.
|
| 651 |
+
|
| 652 |
+
While the deque is locked, no other write operation is permitted.
|
| 653 |
+
Transactions should therefore be as short as possible. Read and write
|
| 654 |
+
operations performed in a transaction are atomic. Read operations may
|
| 655 |
+
occur concurrent to a transaction.
|
| 656 |
+
|
| 657 |
+
Transactions may be nested and may not be shared between threads.
|
| 658 |
+
|
| 659 |
+
>>> from diskcache import Deque
|
| 660 |
+
>>> deque = Deque()
|
| 661 |
+
>>> deque += range(5)
|
| 662 |
+
>>> with deque.transact(): # Atomically rotate elements.
|
| 663 |
+
... value = deque.pop()
|
| 664 |
+
... deque.appendleft(value)
|
| 665 |
+
>>> list(deque)
|
| 666 |
+
[4, 0, 1, 2, 3]
|
| 667 |
+
|
| 668 |
+
:return: context manager for use in `with` statement
|
| 669 |
+
|
| 670 |
+
"""
|
| 671 |
+
with self._cache.transact(retry=True):
|
| 672 |
+
yield
|
| 673 |
+
|
| 674 |
+
|
| 675 |
+
class Index(MutableMapping):
|
| 676 |
+
"""Persistent mutable mapping with insertion order iteration.
|
| 677 |
+
|
| 678 |
+
Items are serialized to disk. Index may be initialized from directory path
|
| 679 |
+
where items are stored.
|
| 680 |
+
|
| 681 |
+
Hashing protocol is not used. Keys are looked up by their serialized
|
| 682 |
+
format. See ``diskcache.Disk`` for details.
|
| 683 |
+
|
| 684 |
+
>>> index = Index()
|
| 685 |
+
>>> index.update([('a', 1), ('b', 2), ('c', 3)])
|
| 686 |
+
>>> index['a']
|
| 687 |
+
1
|
| 688 |
+
>>> list(index)
|
| 689 |
+
['a', 'b', 'c']
|
| 690 |
+
>>> len(index)
|
| 691 |
+
3
|
| 692 |
+
>>> del index['b']
|
| 693 |
+
>>> index.popitem()
|
| 694 |
+
('c', 3)
|
| 695 |
+
|
| 696 |
+
"""
|
| 697 |
+
|
| 698 |
+
def __init__(self, *args, **kwargs):
|
| 699 |
+
"""Initialize index in directory and update items.
|
| 700 |
+
|
| 701 |
+
Optional first argument may be string specifying directory where items
|
| 702 |
+
are stored. When None or not given, temporary directory is created.
|
| 703 |
+
|
| 704 |
+
>>> index = Index({'a': 1, 'b': 2, 'c': 3})
|
| 705 |
+
>>> len(index)
|
| 706 |
+
3
|
| 707 |
+
>>> directory = index.directory
|
| 708 |
+
>>> inventory = Index(directory, d=4)
|
| 709 |
+
>>> inventory['b']
|
| 710 |
+
2
|
| 711 |
+
>>> len(inventory)
|
| 712 |
+
4
|
| 713 |
+
|
| 714 |
+
"""
|
| 715 |
+
if args and isinstance(args[0], (bytes, str)):
|
| 716 |
+
directory = args[0]
|
| 717 |
+
args = args[1:]
|
| 718 |
+
else:
|
| 719 |
+
if args and args[0] is None:
|
| 720 |
+
args = args[1:]
|
| 721 |
+
directory = None
|
| 722 |
+
self._cache = Cache(directory, eviction_policy='none')
|
| 723 |
+
self._update(*args, **kwargs)
|
| 724 |
+
|
| 725 |
+
_update = MutableMapping.update
|
| 726 |
+
|
| 727 |
+
@classmethod
|
| 728 |
+
def fromcache(cls, cache, *args, **kwargs):
|
| 729 |
+
"""Initialize index using `cache` and update items.
|
| 730 |
+
|
| 731 |
+
>>> cache = Cache()
|
| 732 |
+
>>> index = Index.fromcache(cache, {'a': 1, 'b': 2, 'c': 3})
|
| 733 |
+
>>> index.cache is cache
|
| 734 |
+
True
|
| 735 |
+
>>> len(index)
|
| 736 |
+
3
|
| 737 |
+
>>> 'b' in index
|
| 738 |
+
True
|
| 739 |
+
>>> index['c']
|
| 740 |
+
3
|
| 741 |
+
|
| 742 |
+
:param Cache cache: cache to use
|
| 743 |
+
:param args: mapping or sequence of items
|
| 744 |
+
:param kwargs: mapping of items
|
| 745 |
+
:return: initialized Index
|
| 746 |
+
|
| 747 |
+
"""
|
| 748 |
+
# pylint: disable=no-member,protected-access
|
| 749 |
+
self = cls.__new__(cls)
|
| 750 |
+
self._cache = cache
|
| 751 |
+
self._update(*args, **kwargs)
|
| 752 |
+
return self
|
| 753 |
+
|
| 754 |
+
@property
|
| 755 |
+
def cache(self):
|
| 756 |
+
"""Cache used by index."""
|
| 757 |
+
return self._cache
|
| 758 |
+
|
| 759 |
+
@property
|
| 760 |
+
def directory(self):
|
| 761 |
+
"""Directory path where items are stored."""
|
| 762 |
+
return self._cache.directory
|
| 763 |
+
|
| 764 |
+
def __getitem__(self, key):
|
| 765 |
+
"""index.__getitem__(key) <==> index[key]
|
| 766 |
+
|
| 767 |
+
Return corresponding value for `key` in index.
|
| 768 |
+
|
| 769 |
+
>>> index = Index()
|
| 770 |
+
>>> index.update({'a': 1, 'b': 2})
|
| 771 |
+
>>> index['a']
|
| 772 |
+
1
|
| 773 |
+
>>> index['b']
|
| 774 |
+
2
|
| 775 |
+
>>> index['c']
|
| 776 |
+
Traceback (most recent call last):
|
| 777 |
+
...
|
| 778 |
+
KeyError: 'c'
|
| 779 |
+
|
| 780 |
+
:param key: key for item
|
| 781 |
+
:return: value for item in index with given key
|
| 782 |
+
:raises KeyError: if key is not found
|
| 783 |
+
|
| 784 |
+
"""
|
| 785 |
+
return self._cache[key]
|
| 786 |
+
|
| 787 |
+
def __setitem__(self, key, value):
|
| 788 |
+
"""index.__setitem__(key, value) <==> index[key] = value
|
| 789 |
+
|
| 790 |
+
Set `key` and `value` item in index.
|
| 791 |
+
|
| 792 |
+
>>> index = Index()
|
| 793 |
+
>>> index['a'] = 1
|
| 794 |
+
>>> index[0] = None
|
| 795 |
+
>>> len(index)
|
| 796 |
+
2
|
| 797 |
+
|
| 798 |
+
:param key: key for item
|
| 799 |
+
:param value: value for item
|
| 800 |
+
|
| 801 |
+
"""
|
| 802 |
+
self._cache[key] = value
|
| 803 |
+
|
| 804 |
+
def __delitem__(self, key):
|
| 805 |
+
"""index.__delitem__(key) <==> del index[key]
|
| 806 |
+
|
| 807 |
+
Delete corresponding item for `key` from index.
|
| 808 |
+
|
| 809 |
+
>>> index = Index()
|
| 810 |
+
>>> index.update({'a': 1, 'b': 2})
|
| 811 |
+
>>> del index['a']
|
| 812 |
+
>>> del index['b']
|
| 813 |
+
>>> len(index)
|
| 814 |
+
0
|
| 815 |
+
>>> del index['c']
|
| 816 |
+
Traceback (most recent call last):
|
| 817 |
+
...
|
| 818 |
+
KeyError: 'c'
|
| 819 |
+
|
| 820 |
+
:param key: key for item
|
| 821 |
+
:raises KeyError: if key is not found
|
| 822 |
+
|
| 823 |
+
"""
|
| 824 |
+
del self._cache[key]
|
| 825 |
+
|
| 826 |
+
def setdefault(self, key, default=None):
|
| 827 |
+
"""Set and get value for `key` in index using `default`.
|
| 828 |
+
|
| 829 |
+
If `key` is not in index then set corresponding value to `default`. If
|
| 830 |
+
`key` is in index then ignore `default` and return existing value.
|
| 831 |
+
|
| 832 |
+
>>> index = Index()
|
| 833 |
+
>>> index.setdefault('a', 0)
|
| 834 |
+
0
|
| 835 |
+
>>> index.setdefault('a', 1)
|
| 836 |
+
0
|
| 837 |
+
|
| 838 |
+
:param key: key for item
|
| 839 |
+
:param default: value if key is missing (default None)
|
| 840 |
+
:return: value for item in index with given key
|
| 841 |
+
|
| 842 |
+
"""
|
| 843 |
+
_cache = self._cache
|
| 844 |
+
while True:
|
| 845 |
+
try:
|
| 846 |
+
return _cache[key]
|
| 847 |
+
except KeyError:
|
| 848 |
+
_cache.add(key, default, retry=True)
|
| 849 |
+
|
| 850 |
+
def peekitem(self, last=True):
|
| 851 |
+
"""Peek at key and value item pair in index based on iteration order.
|
| 852 |
+
|
| 853 |
+
>>> index = Index()
|
| 854 |
+
>>> for num, letter in enumerate('xyz'):
|
| 855 |
+
... index[letter] = num
|
| 856 |
+
>>> index.peekitem()
|
| 857 |
+
('z', 2)
|
| 858 |
+
>>> index.peekitem(last=False)
|
| 859 |
+
('x', 0)
|
| 860 |
+
|
| 861 |
+
:param bool last: last item in iteration order (default True)
|
| 862 |
+
:return: key and value item pair
|
| 863 |
+
:raises KeyError: if cache is empty
|
| 864 |
+
|
| 865 |
+
"""
|
| 866 |
+
return self._cache.peekitem(last, retry=True)
|
| 867 |
+
|
| 868 |
+
def pop(self, key, default=ENOVAL):
|
| 869 |
+
"""Remove corresponding item for `key` from index and return value.
|
| 870 |
+
|
| 871 |
+
If `key` is missing then return `default`. If `default` is `ENOVAL`
|
| 872 |
+
then raise KeyError.
|
| 873 |
+
|
| 874 |
+
>>> index = Index({'a': 1, 'b': 2})
|
| 875 |
+
>>> index.pop('a')
|
| 876 |
+
1
|
| 877 |
+
>>> index.pop('b')
|
| 878 |
+
2
|
| 879 |
+
>>> index.pop('c', default=3)
|
| 880 |
+
3
|
| 881 |
+
>>> index.pop('d')
|
| 882 |
+
Traceback (most recent call last):
|
| 883 |
+
...
|
| 884 |
+
KeyError: 'd'
|
| 885 |
+
|
| 886 |
+
:param key: key for item
|
| 887 |
+
:param default: return value if key is missing (default ENOVAL)
|
| 888 |
+
:return: value for item if key is found else default
|
| 889 |
+
:raises KeyError: if key is not found and default is ENOVAL
|
| 890 |
+
|
| 891 |
+
"""
|
| 892 |
+
_cache = self._cache
|
| 893 |
+
value = _cache.pop(key, default=default, retry=True)
|
| 894 |
+
if value is ENOVAL:
|
| 895 |
+
raise KeyError(key)
|
| 896 |
+
return value
|
| 897 |
+
|
| 898 |
+
def popitem(self, last=True):
|
| 899 |
+
"""Remove and return item pair.
|
| 900 |
+
|
| 901 |
+
Item pairs are returned in last-in-first-out (LIFO) order if last is
|
| 902 |
+
True else first-in-first-out (FIFO) order. LIFO order imitates a stack
|
| 903 |
+
and FIFO order imitates a queue.
|
| 904 |
+
|
| 905 |
+
>>> index = Index()
|
| 906 |
+
>>> index.update([('a', 1), ('b', 2), ('c', 3)])
|
| 907 |
+
>>> index.popitem()
|
| 908 |
+
('c', 3)
|
| 909 |
+
>>> index.popitem(last=False)
|
| 910 |
+
('a', 1)
|
| 911 |
+
>>> index.popitem()
|
| 912 |
+
('b', 2)
|
| 913 |
+
>>> index.popitem()
|
| 914 |
+
Traceback (most recent call last):
|
| 915 |
+
...
|
| 916 |
+
KeyError: 'dictionary is empty'
|
| 917 |
+
|
| 918 |
+
:param bool last: pop last item pair (default True)
|
| 919 |
+
:return: key and value item pair
|
| 920 |
+
:raises KeyError: if index is empty
|
| 921 |
+
|
| 922 |
+
"""
|
| 923 |
+
# pylint: disable=arguments-differ,unbalanced-tuple-unpacking
|
| 924 |
+
_cache = self._cache
|
| 925 |
+
|
| 926 |
+
with _cache.transact(retry=True):
|
| 927 |
+
key, value = _cache.peekitem(last=last)
|
| 928 |
+
del _cache[key]
|
| 929 |
+
|
| 930 |
+
return key, value
|
| 931 |
+
|
| 932 |
+
def push(self, value, prefix=None, side='back'):
|
| 933 |
+
"""Push `value` onto `side` of queue in index identified by `prefix`.
|
| 934 |
+
|
| 935 |
+
When prefix is None, integer keys are used. Otherwise, string keys are
|
| 936 |
+
used in the format "prefix-integer". Integer starts at 500 trillion.
|
| 937 |
+
|
| 938 |
+
Defaults to pushing value on back of queue. Set side to 'front' to push
|
| 939 |
+
value on front of queue. Side must be one of 'back' or 'front'.
|
| 940 |
+
|
| 941 |
+
See also `Index.pull`.
|
| 942 |
+
|
| 943 |
+
>>> index = Index()
|
| 944 |
+
>>> print(index.push('apples'))
|
| 945 |
+
500000000000000
|
| 946 |
+
>>> print(index.push('beans'))
|
| 947 |
+
500000000000001
|
| 948 |
+
>>> print(index.push('cherries', side='front'))
|
| 949 |
+
499999999999999
|
| 950 |
+
>>> index[500000000000001]
|
| 951 |
+
'beans'
|
| 952 |
+
>>> index.push('dates', prefix='fruit')
|
| 953 |
+
'fruit-500000000000000'
|
| 954 |
+
|
| 955 |
+
:param value: value for item
|
| 956 |
+
:param str prefix: key prefix (default None, key is integer)
|
| 957 |
+
:param str side: either 'back' or 'front' (default 'back')
|
| 958 |
+
:return: key for item in cache
|
| 959 |
+
|
| 960 |
+
"""
|
| 961 |
+
return self._cache.push(value, prefix, side, retry=True)
|
| 962 |
+
|
| 963 |
+
def pull(self, prefix=None, default=(None, None), side='front'):
|
| 964 |
+
"""Pull key and value item pair from `side` of queue in index.
|
| 965 |
+
|
| 966 |
+
When prefix is None, integer keys are used. Otherwise, string keys are
|
| 967 |
+
used in the format "prefix-integer". Integer starts at 500 trillion.
|
| 968 |
+
|
| 969 |
+
If queue is empty, return default.
|
| 970 |
+
|
| 971 |
+
Defaults to pulling key and value item pairs from front of queue. Set
|
| 972 |
+
side to 'back' to pull from back of queue. Side must be one of 'front'
|
| 973 |
+
or 'back'.
|
| 974 |
+
|
| 975 |
+
See also `Index.push`.
|
| 976 |
+
|
| 977 |
+
>>> index = Index()
|
| 978 |
+
>>> for letter in 'abc':
|
| 979 |
+
... print(index.push(letter))
|
| 980 |
+
500000000000000
|
| 981 |
+
500000000000001
|
| 982 |
+
500000000000002
|
| 983 |
+
>>> key, value = index.pull()
|
| 984 |
+
>>> print(key)
|
| 985 |
+
500000000000000
|
| 986 |
+
>>> value
|
| 987 |
+
'a'
|
| 988 |
+
>>> _, value = index.pull(side='back')
|
| 989 |
+
>>> value
|
| 990 |
+
'c'
|
| 991 |
+
>>> index.pull(prefix='fruit')
|
| 992 |
+
(None, None)
|
| 993 |
+
|
| 994 |
+
:param str prefix: key prefix (default None, key is integer)
|
| 995 |
+
:param default: value to return if key is missing
|
| 996 |
+
(default (None, None))
|
| 997 |
+
:param str side: either 'front' or 'back' (default 'front')
|
| 998 |
+
:return: key and value item pair or default if queue is empty
|
| 999 |
+
|
| 1000 |
+
"""
|
| 1001 |
+
return self._cache.pull(prefix, default, side, retry=True)
|
| 1002 |
+
|
| 1003 |
+
def clear(self):
|
| 1004 |
+
"""Remove all items from index.
|
| 1005 |
+
|
| 1006 |
+
>>> index = Index({'a': 0, 'b': 1, 'c': 2})
|
| 1007 |
+
>>> len(index)
|
| 1008 |
+
3
|
| 1009 |
+
>>> index.clear()
|
| 1010 |
+
>>> dict(index)
|
| 1011 |
+
{}
|
| 1012 |
+
|
| 1013 |
+
"""
|
| 1014 |
+
self._cache.clear(retry=True)
|
| 1015 |
+
|
| 1016 |
+
def __iter__(self):
|
| 1017 |
+
"""index.__iter__() <==> iter(index)
|
| 1018 |
+
|
| 1019 |
+
Return iterator of index keys in insertion order.
|
| 1020 |
+
|
| 1021 |
+
"""
|
| 1022 |
+
return iter(self._cache)
|
| 1023 |
+
|
| 1024 |
+
def __reversed__(self):
|
| 1025 |
+
"""index.__reversed__() <==> reversed(index)
|
| 1026 |
+
|
| 1027 |
+
Return iterator of index keys in reversed insertion order.
|
| 1028 |
+
|
| 1029 |
+
>>> index = Index()
|
| 1030 |
+
>>> index.update([('a', 1), ('b', 2), ('c', 3)])
|
| 1031 |
+
>>> iterator = reversed(index)
|
| 1032 |
+
>>> next(iterator)
|
| 1033 |
+
'c'
|
| 1034 |
+
>>> list(iterator)
|
| 1035 |
+
['b', 'a']
|
| 1036 |
+
|
| 1037 |
+
"""
|
| 1038 |
+
return reversed(self._cache)
|
| 1039 |
+
|
| 1040 |
+
def __len__(self):
|
| 1041 |
+
"""index.__len__() <==> len(index)
|
| 1042 |
+
|
| 1043 |
+
Return length of index.
|
| 1044 |
+
|
| 1045 |
+
"""
|
| 1046 |
+
return len(self._cache)
|
| 1047 |
+
|
| 1048 |
+
def keys(self):
|
| 1049 |
+
"""Set-like object providing a view of index keys.
|
| 1050 |
+
|
| 1051 |
+
>>> index = Index()
|
| 1052 |
+
>>> index.update({'a': 1, 'b': 2, 'c': 3})
|
| 1053 |
+
>>> keys_view = index.keys()
|
| 1054 |
+
>>> 'b' in keys_view
|
| 1055 |
+
True
|
| 1056 |
+
|
| 1057 |
+
:return: keys view
|
| 1058 |
+
|
| 1059 |
+
"""
|
| 1060 |
+
return KeysView(self)
|
| 1061 |
+
|
| 1062 |
+
def values(self):
|
| 1063 |
+
"""Set-like object providing a view of index values.
|
| 1064 |
+
|
| 1065 |
+
>>> index = Index()
|
| 1066 |
+
>>> index.update({'a': 1, 'b': 2, 'c': 3})
|
| 1067 |
+
>>> values_view = index.values()
|
| 1068 |
+
>>> 2 in values_view
|
| 1069 |
+
True
|
| 1070 |
+
|
| 1071 |
+
:return: values view
|
| 1072 |
+
|
| 1073 |
+
"""
|
| 1074 |
+
return ValuesView(self)
|
| 1075 |
+
|
| 1076 |
+
def items(self):
|
| 1077 |
+
"""Set-like object providing a view of index items.
|
| 1078 |
+
|
| 1079 |
+
>>> index = Index()
|
| 1080 |
+
>>> index.update({'a': 1, 'b': 2, 'c': 3})
|
| 1081 |
+
>>> items_view = index.items()
|
| 1082 |
+
>>> ('b', 2) in items_view
|
| 1083 |
+
True
|
| 1084 |
+
|
| 1085 |
+
:return: items view
|
| 1086 |
+
|
| 1087 |
+
"""
|
| 1088 |
+
return ItemsView(self)
|
| 1089 |
+
|
| 1090 |
+
__hash__ = None # type: ignore
|
| 1091 |
+
|
| 1092 |
+
def __getstate__(self):
|
| 1093 |
+
return self.directory
|
| 1094 |
+
|
| 1095 |
+
def __setstate__(self, state):
|
| 1096 |
+
self.__init__(state)
|
| 1097 |
+
|
| 1098 |
+
def __eq__(self, other):
|
| 1099 |
+
"""index.__eq__(other) <==> index == other
|
| 1100 |
+
|
| 1101 |
+
Compare equality for index and `other`.
|
| 1102 |
+
|
| 1103 |
+
Comparison to another index or ordered dictionary is
|
| 1104 |
+
order-sensitive. Comparison to all other mappings is order-insensitive.
|
| 1105 |
+
|
| 1106 |
+
>>> index = Index()
|
| 1107 |
+
>>> pairs = [('a', 1), ('b', 2), ('c', 3)]
|
| 1108 |
+
>>> index.update(pairs)
|
| 1109 |
+
>>> from collections import OrderedDict
|
| 1110 |
+
>>> od = OrderedDict(pairs)
|
| 1111 |
+
>>> index == od
|
| 1112 |
+
True
|
| 1113 |
+
>>> index == {'c': 3, 'b': 2, 'a': 1}
|
| 1114 |
+
True
|
| 1115 |
+
|
| 1116 |
+
:param other: other mapping in equality comparison
|
| 1117 |
+
:return: True if index equals other
|
| 1118 |
+
|
| 1119 |
+
"""
|
| 1120 |
+
if len(self) != len(other):
|
| 1121 |
+
return False
|
| 1122 |
+
|
| 1123 |
+
if isinstance(other, (Index, OrderedDict)):
|
| 1124 |
+
alpha = ((key, self[key]) for key in self)
|
| 1125 |
+
beta = ((key, other[key]) for key in other)
|
| 1126 |
+
pairs = zip(alpha, beta)
|
| 1127 |
+
return not any(a != x or b != y for (a, b), (x, y) in pairs)
|
| 1128 |
+
else:
|
| 1129 |
+
return all(self[key] == other.get(key, ENOVAL) for key in self)
|
| 1130 |
+
|
| 1131 |
+
def __ne__(self, other):
|
| 1132 |
+
"""index.__ne__(other) <==> index != other
|
| 1133 |
+
|
| 1134 |
+
Compare inequality for index and `other`.
|
| 1135 |
+
|
| 1136 |
+
Comparison to another index or ordered dictionary is
|
| 1137 |
+
order-sensitive. Comparison to all other mappings is order-insensitive.
|
| 1138 |
+
|
| 1139 |
+
>>> index = Index()
|
| 1140 |
+
>>> index.update([('a', 1), ('b', 2), ('c', 3)])
|
| 1141 |
+
>>> from collections import OrderedDict
|
| 1142 |
+
>>> od = OrderedDict([('c', 3), ('b', 2), ('a', 1)])
|
| 1143 |
+
>>> index != od
|
| 1144 |
+
True
|
| 1145 |
+
>>> index != {'a': 1, 'b': 2}
|
| 1146 |
+
True
|
| 1147 |
+
|
| 1148 |
+
:param other: other mapping in inequality comparison
|
| 1149 |
+
:return: True if index does not equal other
|
| 1150 |
+
|
| 1151 |
+
"""
|
| 1152 |
+
return not self == other
|
| 1153 |
+
|
| 1154 |
+
def memoize(self, name=None, typed=False, ignore=()):
|
| 1155 |
+
"""Memoizing cache decorator.
|
| 1156 |
+
|
| 1157 |
+
Decorator to wrap callable with memoizing function using cache.
|
| 1158 |
+
Repeated calls with the same arguments will lookup result in cache and
|
| 1159 |
+
avoid function evaluation.
|
| 1160 |
+
|
| 1161 |
+
If name is set to None (default), the callable name will be determined
|
| 1162 |
+
automatically.
|
| 1163 |
+
|
| 1164 |
+
If typed is set to True, function arguments of different types will be
|
| 1165 |
+
cached separately. For example, f(3) and f(3.0) will be treated as
|
| 1166 |
+
distinct calls with distinct results.
|
| 1167 |
+
|
| 1168 |
+
The original underlying function is accessible through the __wrapped__
|
| 1169 |
+
attribute. This is useful for introspection, for bypassing the cache,
|
| 1170 |
+
or for rewrapping the function with a different cache.
|
| 1171 |
+
|
| 1172 |
+
>>> from diskcache import Index
|
| 1173 |
+
>>> mapping = Index()
|
| 1174 |
+
>>> @mapping.memoize()
|
| 1175 |
+
... def fibonacci(number):
|
| 1176 |
+
... if number == 0:
|
| 1177 |
+
... return 0
|
| 1178 |
+
... elif number == 1:
|
| 1179 |
+
... return 1
|
| 1180 |
+
... else:
|
| 1181 |
+
... return fibonacci(number - 1) + fibonacci(number - 2)
|
| 1182 |
+
>>> print(fibonacci(100))
|
| 1183 |
+
354224848179261915075
|
| 1184 |
+
|
| 1185 |
+
An additional `__cache_key__` attribute can be used to generate the
|
| 1186 |
+
cache key used for the given arguments.
|
| 1187 |
+
|
| 1188 |
+
>>> key = fibonacci.__cache_key__(100)
|
| 1189 |
+
>>> print(mapping[key])
|
| 1190 |
+
354224848179261915075
|
| 1191 |
+
|
| 1192 |
+
Remember to call memoize when decorating a callable. If you forget,
|
| 1193 |
+
then a TypeError will occur. Note the lack of parenthenses after
|
| 1194 |
+
memoize below:
|
| 1195 |
+
|
| 1196 |
+
>>> @mapping.memoize
|
| 1197 |
+
... def test():
|
| 1198 |
+
... pass
|
| 1199 |
+
Traceback (most recent call last):
|
| 1200 |
+
...
|
| 1201 |
+
TypeError: name cannot be callable
|
| 1202 |
+
|
| 1203 |
+
:param str name: name given for callable (default None, automatic)
|
| 1204 |
+
:param bool typed: cache different types separately (default False)
|
| 1205 |
+
:param set ignore: positional or keyword args to ignore (default ())
|
| 1206 |
+
:return: callable decorator
|
| 1207 |
+
|
| 1208 |
+
"""
|
| 1209 |
+
return self._cache.memoize(name, typed, ignore=ignore)
|
| 1210 |
+
|
| 1211 |
+
@contextmanager
|
| 1212 |
+
def transact(self):
|
| 1213 |
+
"""Context manager to perform a transaction by locking the index.
|
| 1214 |
+
|
| 1215 |
+
While the index is locked, no other write operation is permitted.
|
| 1216 |
+
Transactions should therefore be as short as possible. Read and write
|
| 1217 |
+
operations performed in a transaction are atomic. Read operations may
|
| 1218 |
+
occur concurrent to a transaction.
|
| 1219 |
+
|
| 1220 |
+
Transactions may be nested and may not be shared between threads.
|
| 1221 |
+
|
| 1222 |
+
>>> from diskcache import Index
|
| 1223 |
+
>>> mapping = Index()
|
| 1224 |
+
>>> with mapping.transact(): # Atomically increment two keys.
|
| 1225 |
+
... mapping['total'] = mapping.get('total', 0) + 123.4
|
| 1226 |
+
... mapping['count'] = mapping.get('count', 0) + 1
|
| 1227 |
+
>>> with mapping.transact(): # Atomically calculate average.
|
| 1228 |
+
... average = mapping['total'] / mapping['count']
|
| 1229 |
+
>>> average
|
| 1230 |
+
123.4
|
| 1231 |
+
|
| 1232 |
+
:return: context manager for use in `with` statement
|
| 1233 |
+
|
| 1234 |
+
"""
|
| 1235 |
+
with self._cache.transact(retry=True):
|
| 1236 |
+
yield
|
| 1237 |
+
|
| 1238 |
+
def __repr__(self):
|
| 1239 |
+
"""index.__repr__() <==> repr(index)
|
| 1240 |
+
|
| 1241 |
+
Return string with printable representation of index.
|
| 1242 |
+
|
| 1243 |
+
"""
|
| 1244 |
+
name = type(self).__name__
|
| 1245 |
+
return '{0}({1!r})'.format(name, self.directory)
|
.venv/lib/python3.11/site-packages/diskcache/recipes.py
ADDED
|
@@ -0,0 +1,488 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Disk Cache Recipes
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import functools
|
| 5 |
+
import math
|
| 6 |
+
import os
|
| 7 |
+
import random
|
| 8 |
+
import threading
|
| 9 |
+
import time
|
| 10 |
+
|
| 11 |
+
from .core import ENOVAL, args_to_key, full_name
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class Averager:
|
| 15 |
+
"""Recipe for calculating a running average.
|
| 16 |
+
|
| 17 |
+
Sometimes known as "online statistics," the running average maintains the
|
| 18 |
+
total and count. The average can then be calculated at any time.
|
| 19 |
+
|
| 20 |
+
Assumes the key will not be evicted. Set the eviction policy to 'none' on
|
| 21 |
+
the cache to guarantee the key is not evicted.
|
| 22 |
+
|
| 23 |
+
>>> import diskcache
|
| 24 |
+
>>> cache = diskcache.FanoutCache()
|
| 25 |
+
>>> ave = Averager(cache, 'latency')
|
| 26 |
+
>>> ave.add(0.080)
|
| 27 |
+
>>> ave.add(0.120)
|
| 28 |
+
>>> ave.get()
|
| 29 |
+
0.1
|
| 30 |
+
>>> ave.add(0.160)
|
| 31 |
+
>>> ave.pop()
|
| 32 |
+
0.12
|
| 33 |
+
>>> print(ave.get())
|
| 34 |
+
None
|
| 35 |
+
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
def __init__(self, cache, key, expire=None, tag=None):
|
| 39 |
+
self._cache = cache
|
| 40 |
+
self._key = key
|
| 41 |
+
self._expire = expire
|
| 42 |
+
self._tag = tag
|
| 43 |
+
|
| 44 |
+
def add(self, value):
|
| 45 |
+
"""Add `value` to average."""
|
| 46 |
+
with self._cache.transact(retry=True):
|
| 47 |
+
total, count = self._cache.get(self._key, default=(0.0, 0))
|
| 48 |
+
total += value
|
| 49 |
+
count += 1
|
| 50 |
+
self._cache.set(
|
| 51 |
+
self._key,
|
| 52 |
+
(total, count),
|
| 53 |
+
expire=self._expire,
|
| 54 |
+
tag=self._tag,
|
| 55 |
+
)
|
| 56 |
+
|
| 57 |
+
def get(self):
|
| 58 |
+
"""Get current average or return `None` if count equals zero."""
|
| 59 |
+
total, count = self._cache.get(self._key, default=(0.0, 0), retry=True)
|
| 60 |
+
return None if count == 0 else total / count
|
| 61 |
+
|
| 62 |
+
def pop(self):
|
| 63 |
+
"""Return current average and delete key."""
|
| 64 |
+
total, count = self._cache.pop(self._key, default=(0.0, 0), retry=True)
|
| 65 |
+
return None if count == 0 else total / count
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
class Lock:
|
| 69 |
+
"""Recipe for cross-process and cross-thread lock.
|
| 70 |
+
|
| 71 |
+
Assumes the key will not be evicted. Set the eviction policy to 'none' on
|
| 72 |
+
the cache to guarantee the key is not evicted.
|
| 73 |
+
|
| 74 |
+
>>> import diskcache
|
| 75 |
+
>>> cache = diskcache.Cache()
|
| 76 |
+
>>> lock = Lock(cache, 'report-123')
|
| 77 |
+
>>> lock.acquire()
|
| 78 |
+
>>> lock.release()
|
| 79 |
+
>>> with lock:
|
| 80 |
+
... pass
|
| 81 |
+
|
| 82 |
+
"""
|
| 83 |
+
|
| 84 |
+
def __init__(self, cache, key, expire=None, tag=None):
|
| 85 |
+
self._cache = cache
|
| 86 |
+
self._key = key
|
| 87 |
+
self._expire = expire
|
| 88 |
+
self._tag = tag
|
| 89 |
+
|
| 90 |
+
def acquire(self):
|
| 91 |
+
"""Acquire lock using spin-lock algorithm."""
|
| 92 |
+
while True:
|
| 93 |
+
added = self._cache.add(
|
| 94 |
+
self._key,
|
| 95 |
+
None,
|
| 96 |
+
expire=self._expire,
|
| 97 |
+
tag=self._tag,
|
| 98 |
+
retry=True,
|
| 99 |
+
)
|
| 100 |
+
if added:
|
| 101 |
+
break
|
| 102 |
+
time.sleep(0.001)
|
| 103 |
+
|
| 104 |
+
def release(self):
|
| 105 |
+
"""Release lock by deleting key."""
|
| 106 |
+
self._cache.delete(self._key, retry=True)
|
| 107 |
+
|
| 108 |
+
def locked(self):
|
| 109 |
+
"""Return true if the lock is acquired."""
|
| 110 |
+
return self._key in self._cache
|
| 111 |
+
|
| 112 |
+
def __enter__(self):
|
| 113 |
+
self.acquire()
|
| 114 |
+
|
| 115 |
+
def __exit__(self, *exc_info):
|
| 116 |
+
self.release()
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
class RLock:
|
| 120 |
+
"""Recipe for cross-process and cross-thread re-entrant lock.
|
| 121 |
+
|
| 122 |
+
Assumes the key will not be evicted. Set the eviction policy to 'none' on
|
| 123 |
+
the cache to guarantee the key is not evicted.
|
| 124 |
+
|
| 125 |
+
>>> import diskcache
|
| 126 |
+
>>> cache = diskcache.Cache()
|
| 127 |
+
>>> rlock = RLock(cache, 'user-123')
|
| 128 |
+
>>> rlock.acquire()
|
| 129 |
+
>>> rlock.acquire()
|
| 130 |
+
>>> rlock.release()
|
| 131 |
+
>>> with rlock:
|
| 132 |
+
... pass
|
| 133 |
+
>>> rlock.release()
|
| 134 |
+
>>> rlock.release()
|
| 135 |
+
Traceback (most recent call last):
|
| 136 |
+
...
|
| 137 |
+
AssertionError: cannot release un-acquired lock
|
| 138 |
+
|
| 139 |
+
"""
|
| 140 |
+
|
| 141 |
+
def __init__(self, cache, key, expire=None, tag=None):
|
| 142 |
+
self._cache = cache
|
| 143 |
+
self._key = key
|
| 144 |
+
self._expire = expire
|
| 145 |
+
self._tag = tag
|
| 146 |
+
|
| 147 |
+
def acquire(self):
|
| 148 |
+
"""Acquire lock by incrementing count using spin-lock algorithm."""
|
| 149 |
+
pid = os.getpid()
|
| 150 |
+
tid = threading.get_ident()
|
| 151 |
+
pid_tid = '{}-{}'.format(pid, tid)
|
| 152 |
+
|
| 153 |
+
while True:
|
| 154 |
+
with self._cache.transact(retry=True):
|
| 155 |
+
value, count = self._cache.get(self._key, default=(None, 0))
|
| 156 |
+
if pid_tid == value or count == 0:
|
| 157 |
+
self._cache.set(
|
| 158 |
+
self._key,
|
| 159 |
+
(pid_tid, count + 1),
|
| 160 |
+
expire=self._expire,
|
| 161 |
+
tag=self._tag,
|
| 162 |
+
)
|
| 163 |
+
return
|
| 164 |
+
time.sleep(0.001)
|
| 165 |
+
|
| 166 |
+
def release(self):
|
| 167 |
+
"""Release lock by decrementing count."""
|
| 168 |
+
pid = os.getpid()
|
| 169 |
+
tid = threading.get_ident()
|
| 170 |
+
pid_tid = '{}-{}'.format(pid, tid)
|
| 171 |
+
|
| 172 |
+
with self._cache.transact(retry=True):
|
| 173 |
+
value, count = self._cache.get(self._key, default=(None, 0))
|
| 174 |
+
is_owned = pid_tid == value and count > 0
|
| 175 |
+
assert is_owned, 'cannot release un-acquired lock'
|
| 176 |
+
self._cache.set(
|
| 177 |
+
self._key,
|
| 178 |
+
(value, count - 1),
|
| 179 |
+
expire=self._expire,
|
| 180 |
+
tag=self._tag,
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
def __enter__(self):
|
| 184 |
+
self.acquire()
|
| 185 |
+
|
| 186 |
+
def __exit__(self, *exc_info):
|
| 187 |
+
self.release()
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
class BoundedSemaphore:
|
| 191 |
+
"""Recipe for cross-process and cross-thread bounded semaphore.
|
| 192 |
+
|
| 193 |
+
Assumes the key will not be evicted. Set the eviction policy to 'none' on
|
| 194 |
+
the cache to guarantee the key is not evicted.
|
| 195 |
+
|
| 196 |
+
>>> import diskcache
|
| 197 |
+
>>> cache = diskcache.Cache()
|
| 198 |
+
>>> semaphore = BoundedSemaphore(cache, 'max-cons', value=2)
|
| 199 |
+
>>> semaphore.acquire()
|
| 200 |
+
>>> semaphore.acquire()
|
| 201 |
+
>>> semaphore.release()
|
| 202 |
+
>>> with semaphore:
|
| 203 |
+
... pass
|
| 204 |
+
>>> semaphore.release()
|
| 205 |
+
>>> semaphore.release()
|
| 206 |
+
Traceback (most recent call last):
|
| 207 |
+
...
|
| 208 |
+
AssertionError: cannot release un-acquired semaphore
|
| 209 |
+
|
| 210 |
+
"""
|
| 211 |
+
|
| 212 |
+
def __init__(self, cache, key, value=1, expire=None, tag=None):
|
| 213 |
+
self._cache = cache
|
| 214 |
+
self._key = key
|
| 215 |
+
self._value = value
|
| 216 |
+
self._expire = expire
|
| 217 |
+
self._tag = tag
|
| 218 |
+
|
| 219 |
+
def acquire(self):
|
| 220 |
+
"""Acquire semaphore by decrementing value using spin-lock algorithm."""
|
| 221 |
+
while True:
|
| 222 |
+
with self._cache.transact(retry=True):
|
| 223 |
+
value = self._cache.get(self._key, default=self._value)
|
| 224 |
+
if value > 0:
|
| 225 |
+
self._cache.set(
|
| 226 |
+
self._key,
|
| 227 |
+
value - 1,
|
| 228 |
+
expire=self._expire,
|
| 229 |
+
tag=self._tag,
|
| 230 |
+
)
|
| 231 |
+
return
|
| 232 |
+
time.sleep(0.001)
|
| 233 |
+
|
| 234 |
+
def release(self):
|
| 235 |
+
"""Release semaphore by incrementing value."""
|
| 236 |
+
with self._cache.transact(retry=True):
|
| 237 |
+
value = self._cache.get(self._key, default=self._value)
|
| 238 |
+
assert self._value > value, 'cannot release un-acquired semaphore'
|
| 239 |
+
value += 1
|
| 240 |
+
self._cache.set(
|
| 241 |
+
self._key,
|
| 242 |
+
value,
|
| 243 |
+
expire=self._expire,
|
| 244 |
+
tag=self._tag,
|
| 245 |
+
)
|
| 246 |
+
|
| 247 |
+
def __enter__(self):
|
| 248 |
+
self.acquire()
|
| 249 |
+
|
| 250 |
+
def __exit__(self, *exc_info):
|
| 251 |
+
self.release()
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
def throttle(
|
| 255 |
+
cache,
|
| 256 |
+
count,
|
| 257 |
+
seconds,
|
| 258 |
+
name=None,
|
| 259 |
+
expire=None,
|
| 260 |
+
tag=None,
|
| 261 |
+
time_func=time.time,
|
| 262 |
+
sleep_func=time.sleep,
|
| 263 |
+
):
|
| 264 |
+
"""Decorator to throttle calls to function.
|
| 265 |
+
|
| 266 |
+
Assumes keys will not be evicted. Set the eviction policy to 'none' on the
|
| 267 |
+
cache to guarantee the keys are not evicted.
|
| 268 |
+
|
| 269 |
+
>>> import diskcache, time
|
| 270 |
+
>>> cache = diskcache.Cache()
|
| 271 |
+
>>> count = 0
|
| 272 |
+
>>> @throttle(cache, 2, 1) # 2 calls per 1 second
|
| 273 |
+
... def increment():
|
| 274 |
+
... global count
|
| 275 |
+
... count += 1
|
| 276 |
+
>>> start = time.time()
|
| 277 |
+
>>> while (time.time() - start) <= 2:
|
| 278 |
+
... increment()
|
| 279 |
+
>>> count in (6, 7) # 6 or 7 calls depending on CPU load
|
| 280 |
+
True
|
| 281 |
+
|
| 282 |
+
"""
|
| 283 |
+
|
| 284 |
+
def decorator(func):
|
| 285 |
+
rate = count / float(seconds)
|
| 286 |
+
key = full_name(func) if name is None else name
|
| 287 |
+
now = time_func()
|
| 288 |
+
cache.set(key, (now, count), expire=expire, tag=tag, retry=True)
|
| 289 |
+
|
| 290 |
+
@functools.wraps(func)
|
| 291 |
+
def wrapper(*args, **kwargs):
|
| 292 |
+
while True:
|
| 293 |
+
with cache.transact(retry=True):
|
| 294 |
+
last, tally = cache.get(key)
|
| 295 |
+
now = time_func()
|
| 296 |
+
tally += (now - last) * rate
|
| 297 |
+
delay = 0
|
| 298 |
+
|
| 299 |
+
if tally > count:
|
| 300 |
+
cache.set(key, (now, count - 1), expire)
|
| 301 |
+
elif tally >= 1:
|
| 302 |
+
cache.set(key, (now, tally - 1), expire)
|
| 303 |
+
else:
|
| 304 |
+
delay = (1 - tally) / rate
|
| 305 |
+
|
| 306 |
+
if delay:
|
| 307 |
+
sleep_func(delay)
|
| 308 |
+
else:
|
| 309 |
+
break
|
| 310 |
+
|
| 311 |
+
return func(*args, **kwargs)
|
| 312 |
+
|
| 313 |
+
return wrapper
|
| 314 |
+
|
| 315 |
+
return decorator
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
def barrier(cache, lock_factory, name=None, expire=None, tag=None):
|
| 319 |
+
"""Barrier to calling decorated function.
|
| 320 |
+
|
| 321 |
+
Supports different kinds of locks: Lock, RLock, BoundedSemaphore.
|
| 322 |
+
|
| 323 |
+
Assumes keys will not be evicted. Set the eviction policy to 'none' on the
|
| 324 |
+
cache to guarantee the keys are not evicted.
|
| 325 |
+
|
| 326 |
+
>>> import diskcache, time
|
| 327 |
+
>>> cache = diskcache.Cache()
|
| 328 |
+
>>> @barrier(cache, Lock)
|
| 329 |
+
... def work(num):
|
| 330 |
+
... print('worker started')
|
| 331 |
+
... time.sleep(1)
|
| 332 |
+
... print('worker finished')
|
| 333 |
+
>>> import multiprocessing.pool
|
| 334 |
+
>>> pool = multiprocessing.pool.ThreadPool(2)
|
| 335 |
+
>>> _ = pool.map(work, range(2))
|
| 336 |
+
worker started
|
| 337 |
+
worker finished
|
| 338 |
+
worker started
|
| 339 |
+
worker finished
|
| 340 |
+
>>> pool.terminate()
|
| 341 |
+
|
| 342 |
+
"""
|
| 343 |
+
|
| 344 |
+
def decorator(func):
|
| 345 |
+
key = full_name(func) if name is None else name
|
| 346 |
+
lock = lock_factory(cache, key, expire=expire, tag=tag)
|
| 347 |
+
|
| 348 |
+
@functools.wraps(func)
|
| 349 |
+
def wrapper(*args, **kwargs):
|
| 350 |
+
with lock:
|
| 351 |
+
return func(*args, **kwargs)
|
| 352 |
+
|
| 353 |
+
return wrapper
|
| 354 |
+
|
| 355 |
+
return decorator
|
| 356 |
+
|
| 357 |
+
|
| 358 |
+
def memoize_stampede(
|
| 359 |
+
cache, expire, name=None, typed=False, tag=None, beta=1, ignore=()
|
| 360 |
+
):
|
| 361 |
+
"""Memoizing cache decorator with cache stampede protection.
|
| 362 |
+
|
| 363 |
+
Cache stampedes are a type of system overload that can occur when parallel
|
| 364 |
+
computing systems using memoization come under heavy load. This behaviour
|
| 365 |
+
is sometimes also called dog-piling, cache miss storm, cache choking, or
|
| 366 |
+
the thundering herd problem.
|
| 367 |
+
|
| 368 |
+
The memoization decorator implements cache stampede protection through
|
| 369 |
+
early recomputation. Early recomputation of function results will occur
|
| 370 |
+
probabilistically before expiration in a background thread of
|
| 371 |
+
execution. Early probabilistic recomputation is based on research by
|
| 372 |
+
Vattani, A.; Chierichetti, F.; Lowenstein, K. (2015), Optimal Probabilistic
|
| 373 |
+
Cache Stampede Prevention, VLDB, pp. 886-897, ISSN 2150-8097
|
| 374 |
+
|
| 375 |
+
If name is set to None (default), the callable name will be determined
|
| 376 |
+
automatically.
|
| 377 |
+
|
| 378 |
+
If typed is set to True, function arguments of different types will be
|
| 379 |
+
cached separately. For example, f(3) and f(3.0) will be treated as distinct
|
| 380 |
+
calls with distinct results.
|
| 381 |
+
|
| 382 |
+
The original underlying function is accessible through the `__wrapped__`
|
| 383 |
+
attribute. This is useful for introspection, for bypassing the cache, or
|
| 384 |
+
for rewrapping the function with a different cache.
|
| 385 |
+
|
| 386 |
+
>>> from diskcache import Cache
|
| 387 |
+
>>> cache = Cache()
|
| 388 |
+
>>> @memoize_stampede(cache, expire=1)
|
| 389 |
+
... def fib(number):
|
| 390 |
+
... if number == 0:
|
| 391 |
+
... return 0
|
| 392 |
+
... elif number == 1:
|
| 393 |
+
... return 1
|
| 394 |
+
... else:
|
| 395 |
+
... return fib(number - 1) + fib(number - 2)
|
| 396 |
+
>>> print(fib(100))
|
| 397 |
+
354224848179261915075
|
| 398 |
+
|
| 399 |
+
An additional `__cache_key__` attribute can be used to generate the cache
|
| 400 |
+
key used for the given arguments.
|
| 401 |
+
|
| 402 |
+
>>> key = fib.__cache_key__(100)
|
| 403 |
+
>>> del cache[key]
|
| 404 |
+
|
| 405 |
+
Remember to call memoize when decorating a callable. If you forget, then a
|
| 406 |
+
TypeError will occur.
|
| 407 |
+
|
| 408 |
+
:param cache: cache to store callable arguments and return values
|
| 409 |
+
:param float expire: seconds until arguments expire
|
| 410 |
+
:param str name: name given for callable (default None, automatic)
|
| 411 |
+
:param bool typed: cache different types separately (default False)
|
| 412 |
+
:param str tag: text to associate with arguments (default None)
|
| 413 |
+
:param set ignore: positional or keyword args to ignore (default ())
|
| 414 |
+
:return: callable decorator
|
| 415 |
+
|
| 416 |
+
"""
|
| 417 |
+
# Caution: Nearly identical code exists in Cache.memoize
|
| 418 |
+
def decorator(func):
|
| 419 |
+
"""Decorator created by memoize call for callable."""
|
| 420 |
+
base = (full_name(func),) if name is None else (name,)
|
| 421 |
+
|
| 422 |
+
def timer(*args, **kwargs):
|
| 423 |
+
"""Time execution of `func` and return result and time delta."""
|
| 424 |
+
start = time.time()
|
| 425 |
+
result = func(*args, **kwargs)
|
| 426 |
+
delta = time.time() - start
|
| 427 |
+
return result, delta
|
| 428 |
+
|
| 429 |
+
@functools.wraps(func)
|
| 430 |
+
def wrapper(*args, **kwargs):
|
| 431 |
+
"""Wrapper for callable to cache arguments and return values."""
|
| 432 |
+
key = wrapper.__cache_key__(*args, **kwargs)
|
| 433 |
+
pair, expire_time = cache.get(
|
| 434 |
+
key,
|
| 435 |
+
default=ENOVAL,
|
| 436 |
+
expire_time=True,
|
| 437 |
+
retry=True,
|
| 438 |
+
)
|
| 439 |
+
|
| 440 |
+
if pair is not ENOVAL:
|
| 441 |
+
result, delta = pair
|
| 442 |
+
now = time.time()
|
| 443 |
+
ttl = expire_time - now
|
| 444 |
+
|
| 445 |
+
if (-delta * beta * math.log(random.random())) < ttl:
|
| 446 |
+
return result # Cache hit.
|
| 447 |
+
|
| 448 |
+
# Check whether a thread has started for early recomputation.
|
| 449 |
+
|
| 450 |
+
thread_key = key + (ENOVAL,)
|
| 451 |
+
thread_added = cache.add(
|
| 452 |
+
thread_key,
|
| 453 |
+
None,
|
| 454 |
+
expire=delta,
|
| 455 |
+
retry=True,
|
| 456 |
+
)
|
| 457 |
+
|
| 458 |
+
if thread_added:
|
| 459 |
+
# Start thread for early recomputation.
|
| 460 |
+
def recompute():
|
| 461 |
+
with cache:
|
| 462 |
+
pair = timer(*args, **kwargs)
|
| 463 |
+
cache.set(
|
| 464 |
+
key,
|
| 465 |
+
pair,
|
| 466 |
+
expire=expire,
|
| 467 |
+
tag=tag,
|
| 468 |
+
retry=True,
|
| 469 |
+
)
|
| 470 |
+
|
| 471 |
+
thread = threading.Thread(target=recompute)
|
| 472 |
+
thread.daemon = True
|
| 473 |
+
thread.start()
|
| 474 |
+
|
| 475 |
+
return result
|
| 476 |
+
|
| 477 |
+
pair = timer(*args, **kwargs)
|
| 478 |
+
cache.set(key, pair, expire=expire, tag=tag, retry=True)
|
| 479 |
+
return pair[0]
|
| 480 |
+
|
| 481 |
+
def __cache_key__(*args, **kwargs):
|
| 482 |
+
"""Make key for cache given function arguments."""
|
| 483 |
+
return args_to_key(base, args, kwargs, typed, ignore)
|
| 484 |
+
|
| 485 |
+
wrapper.__cache_key__ = __cache_key__
|
| 486 |
+
return wrapper
|
| 487 |
+
|
| 488 |
+
return decorator
|
.venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
.venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/METADATA
ADDED
|
@@ -0,0 +1,203 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.3
|
| 2 |
+
Name: httpx
|
| 3 |
+
Version: 0.28.1
|
| 4 |
+
Summary: The next generation HTTP client.
|
| 5 |
+
Project-URL: Changelog, https://github.com/encode/httpx/blob/master/CHANGELOG.md
|
| 6 |
+
Project-URL: Documentation, https://www.python-httpx.org
|
| 7 |
+
Project-URL: Homepage, https://github.com/encode/httpx
|
| 8 |
+
Project-URL: Source, https://github.com/encode/httpx
|
| 9 |
+
Author-email: Tom Christie <tom@tomchristie.com>
|
| 10 |
+
License: BSD-3-Clause
|
| 11 |
+
Classifier: Development Status :: 4 - Beta
|
| 12 |
+
Classifier: Environment :: Web Environment
|
| 13 |
+
Classifier: Framework :: AsyncIO
|
| 14 |
+
Classifier: Framework :: Trio
|
| 15 |
+
Classifier: Intended Audience :: Developers
|
| 16 |
+
Classifier: License :: OSI Approved :: BSD License
|
| 17 |
+
Classifier: Operating System :: OS Independent
|
| 18 |
+
Classifier: Programming Language :: Python :: 3
|
| 19 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 20 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 21 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 22 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 23 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 24 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 25 |
+
Classifier: Topic :: Internet :: WWW/HTTP
|
| 26 |
+
Requires-Python: >=3.8
|
| 27 |
+
Requires-Dist: anyio
|
| 28 |
+
Requires-Dist: certifi
|
| 29 |
+
Requires-Dist: httpcore==1.*
|
| 30 |
+
Requires-Dist: idna
|
| 31 |
+
Provides-Extra: brotli
|
| 32 |
+
Requires-Dist: brotli; (platform_python_implementation == 'CPython') and extra == 'brotli'
|
| 33 |
+
Requires-Dist: brotlicffi; (platform_python_implementation != 'CPython') and extra == 'brotli'
|
| 34 |
+
Provides-Extra: cli
|
| 35 |
+
Requires-Dist: click==8.*; extra == 'cli'
|
| 36 |
+
Requires-Dist: pygments==2.*; extra == 'cli'
|
| 37 |
+
Requires-Dist: rich<14,>=10; extra == 'cli'
|
| 38 |
+
Provides-Extra: http2
|
| 39 |
+
Requires-Dist: h2<5,>=3; extra == 'http2'
|
| 40 |
+
Provides-Extra: socks
|
| 41 |
+
Requires-Dist: socksio==1.*; extra == 'socks'
|
| 42 |
+
Provides-Extra: zstd
|
| 43 |
+
Requires-Dist: zstandard>=0.18.0; extra == 'zstd'
|
| 44 |
+
Description-Content-Type: text/markdown
|
| 45 |
+
|
| 46 |
+
<p align="center">
|
| 47 |
+
<a href="https://www.python-httpx.org/"><img width="350" height="208" src="https://raw.githubusercontent.com/encode/httpx/master/docs/img/butterfly.png" alt='HTTPX'></a>
|
| 48 |
+
</p>
|
| 49 |
+
|
| 50 |
+
<p align="center"><strong>HTTPX</strong> <em>- A next-generation HTTP client for Python.</em></p>
|
| 51 |
+
|
| 52 |
+
<p align="center">
|
| 53 |
+
<a href="https://github.com/encode/httpx/actions">
|
| 54 |
+
<img src="https://github.com/encode/httpx/workflows/Test%20Suite/badge.svg" alt="Test Suite">
|
| 55 |
+
</a>
|
| 56 |
+
<a href="https://pypi.org/project/httpx/">
|
| 57 |
+
<img src="https://badge.fury.io/py/httpx.svg" alt="Package version">
|
| 58 |
+
</a>
|
| 59 |
+
</p>
|
| 60 |
+
|
| 61 |
+
HTTPX is a fully featured HTTP client library for Python 3. It includes **an integrated command line client**, has support for both **HTTP/1.1 and HTTP/2**, and provides both **sync and async APIs**.
|
| 62 |
+
|
| 63 |
+
---
|
| 64 |
+
|
| 65 |
+
Install HTTPX using pip:
|
| 66 |
+
|
| 67 |
+
```shell
|
| 68 |
+
$ pip install httpx
|
| 69 |
+
```
|
| 70 |
+
|
| 71 |
+
Now, let's get started:
|
| 72 |
+
|
| 73 |
+
```pycon
|
| 74 |
+
>>> import httpx
|
| 75 |
+
>>> r = httpx.get('https://www.example.org/')
|
| 76 |
+
>>> r
|
| 77 |
+
<Response [200 OK]>
|
| 78 |
+
>>> r.status_code
|
| 79 |
+
200
|
| 80 |
+
>>> r.headers['content-type']
|
| 81 |
+
'text/html; charset=UTF-8'
|
| 82 |
+
>>> r.text
|
| 83 |
+
'<!doctype html>\n<html>\n<head>\n<title>Example Domain</title>...'
|
| 84 |
+
```
|
| 85 |
+
|
| 86 |
+
Or, using the command-line client.
|
| 87 |
+
|
| 88 |
+
```shell
|
| 89 |
+
$ pip install 'httpx[cli]' # The command line client is an optional dependency.
|
| 90 |
+
```
|
| 91 |
+
|
| 92 |
+
Which now allows us to use HTTPX directly from the command-line...
|
| 93 |
+
|
| 94 |
+
<p align="center">
|
| 95 |
+
<img width="700" src="https://raw.githubusercontent.com/encode/httpx/master/docs/img/httpx-help.png" alt='httpx --help'>
|
| 96 |
+
</p>
|
| 97 |
+
|
| 98 |
+
Sending a request...
|
| 99 |
+
|
| 100 |
+
<p align="center">
|
| 101 |
+
<img width="700" src="https://raw.githubusercontent.com/encode/httpx/master/docs/img/httpx-request.png" alt='httpx http://httpbin.org/json'>
|
| 102 |
+
</p>
|
| 103 |
+
|
| 104 |
+
## Features
|
| 105 |
+
|
| 106 |
+
HTTPX builds on the well-established usability of `requests`, and gives you:
|
| 107 |
+
|
| 108 |
+
* A broadly [requests-compatible API](https://www.python-httpx.org/compatibility/).
|
| 109 |
+
* An integrated command-line client.
|
| 110 |
+
* HTTP/1.1 [and HTTP/2 support](https://www.python-httpx.org/http2/).
|
| 111 |
+
* Standard synchronous interface, but with [async support if you need it](https://www.python-httpx.org/async/).
|
| 112 |
+
* Ability to make requests directly to [WSGI applications](https://www.python-httpx.org/advanced/transports/#wsgi-transport) or [ASGI applications](https://www.python-httpx.org/advanced/transports/#asgi-transport).
|
| 113 |
+
* Strict timeouts everywhere.
|
| 114 |
+
* Fully type annotated.
|
| 115 |
+
* 100% test coverage.
|
| 116 |
+
|
| 117 |
+
Plus all the standard features of `requests`...
|
| 118 |
+
|
| 119 |
+
* International Domains and URLs
|
| 120 |
+
* Keep-Alive & Connection Pooling
|
| 121 |
+
* Sessions with Cookie Persistence
|
| 122 |
+
* Browser-style SSL Verification
|
| 123 |
+
* Basic/Digest Authentication
|
| 124 |
+
* Elegant Key/Value Cookies
|
| 125 |
+
* Automatic Decompression
|
| 126 |
+
* Automatic Content Decoding
|
| 127 |
+
* Unicode Response Bodies
|
| 128 |
+
* Multipart File Uploads
|
| 129 |
+
* HTTP(S) Proxy Support
|
| 130 |
+
* Connection Timeouts
|
| 131 |
+
* Streaming Downloads
|
| 132 |
+
* .netrc Support
|
| 133 |
+
* Chunked Requests
|
| 134 |
+
|
| 135 |
+
## Installation
|
| 136 |
+
|
| 137 |
+
Install with pip:
|
| 138 |
+
|
| 139 |
+
```shell
|
| 140 |
+
$ pip install httpx
|
| 141 |
+
```
|
| 142 |
+
|
| 143 |
+
Or, to include the optional HTTP/2 support, use:
|
| 144 |
+
|
| 145 |
+
```shell
|
| 146 |
+
$ pip install httpx[http2]
|
| 147 |
+
```
|
| 148 |
+
|
| 149 |
+
HTTPX requires Python 3.8+.
|
| 150 |
+
|
| 151 |
+
## Documentation
|
| 152 |
+
|
| 153 |
+
Project documentation is available at [https://www.python-httpx.org/](https://www.python-httpx.org/).
|
| 154 |
+
|
| 155 |
+
For a run-through of all the basics, head over to the [QuickStart](https://www.python-httpx.org/quickstart/).
|
| 156 |
+
|
| 157 |
+
For more advanced topics, see the [Advanced Usage](https://www.python-httpx.org/advanced/) section, the [async support](https://www.python-httpx.org/async/) section, or the [HTTP/2](https://www.python-httpx.org/http2/) section.
|
| 158 |
+
|
| 159 |
+
The [Developer Interface](https://www.python-httpx.org/api/) provides a comprehensive API reference.
|
| 160 |
+
|
| 161 |
+
To find out about tools that integrate with HTTPX, see [Third Party Packages](https://www.python-httpx.org/third_party_packages/).
|
| 162 |
+
|
| 163 |
+
## Contribute
|
| 164 |
+
|
| 165 |
+
If you want to contribute with HTTPX check out the [Contributing Guide](https://www.python-httpx.org/contributing/) to learn how to start.
|
| 166 |
+
|
| 167 |
+
## Dependencies
|
| 168 |
+
|
| 169 |
+
The HTTPX project relies on these excellent libraries:
|
| 170 |
+
|
| 171 |
+
* `httpcore` - The underlying transport implementation for `httpx`.
|
| 172 |
+
* `h11` - HTTP/1.1 support.
|
| 173 |
+
* `certifi` - SSL certificates.
|
| 174 |
+
* `idna` - Internationalized domain name support.
|
| 175 |
+
* `sniffio` - Async library autodetection.
|
| 176 |
+
|
| 177 |
+
As well as these optional installs:
|
| 178 |
+
|
| 179 |
+
* `h2` - HTTP/2 support. *(Optional, with `httpx[http2]`)*
|
| 180 |
+
* `socksio` - SOCKS proxy support. *(Optional, with `httpx[socks]`)*
|
| 181 |
+
* `rich` - Rich terminal support. *(Optional, with `httpx[cli]`)*
|
| 182 |
+
* `click` - Command line client support. *(Optional, with `httpx[cli]`)*
|
| 183 |
+
* `brotli` or `brotlicffi` - Decoding for "brotli" compressed responses. *(Optional, with `httpx[brotli]`)*
|
| 184 |
+
* `zstandard` - Decoding for "zstd" compressed responses. *(Optional, with `httpx[zstd]`)*
|
| 185 |
+
|
| 186 |
+
A huge amount of credit is due to `requests` for the API layout that
|
| 187 |
+
much of this work follows, as well as to `urllib3` for plenty of design
|
| 188 |
+
inspiration around the lower-level networking details.
|
| 189 |
+
|
| 190 |
+
---
|
| 191 |
+
|
| 192 |
+
<p align="center"><i>HTTPX is <a href="https://github.com/encode/httpx/blob/master/LICENSE.md">BSD licensed</a> code.<br/>Designed & crafted with care.</i><br/>— 🦋 —</p>
|
| 193 |
+
|
| 194 |
+
## Release Information
|
| 195 |
+
|
| 196 |
+
### Fixed
|
| 197 |
+
|
| 198 |
+
* Reintroduced supposedly-private `URLTypes` shortcut. (#2673)
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
---
|
| 202 |
+
|
| 203 |
+
[Full changelog](https://github.com/encode/httpx/blob/master/CHANGELOG.md)
|
.venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/RECORD
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
../../../bin/httpx,sha256=-O1w-cp46ZdDYeVBnP1ONQmtxcUS42Cdb-Qadrs4fbs,222
|
| 2 |
+
httpx-0.28.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 3 |
+
httpx-0.28.1.dist-info/METADATA,sha256=_rubD48-gNV8gZnDBPNcQzboWB0dGNeYPJJ2a4J5OyU,7052
|
| 4 |
+
httpx-0.28.1.dist-info/RECORD,,
|
| 5 |
+
httpx-0.28.1.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
|
| 6 |
+
httpx-0.28.1.dist-info/entry_points.txt,sha256=2lVkdQmxLA1pNMgSN2eV89o90HCZezhmNwsy6ryKDSA,37
|
| 7 |
+
httpx-0.28.1.dist-info/licenses/LICENSE.md,sha256=TsWdVE8StfU5o6cW_TIaxYzNgDC0ZSIfLIgCAM3yjY0,1508
|
| 8 |
+
httpx/__init__.py,sha256=CsaZe6yZj0rHg6322AWKWHGTMVr9txgEfD5P3_Rrz60,2171
|
| 9 |
+
httpx/__pycache__/__init__.cpython-311.pyc,,
|
| 10 |
+
httpx/__pycache__/__version__.cpython-311.pyc,,
|
| 11 |
+
httpx/__pycache__/_api.cpython-311.pyc,,
|
| 12 |
+
httpx/__pycache__/_auth.cpython-311.pyc,,
|
| 13 |
+
httpx/__pycache__/_client.cpython-311.pyc,,
|
| 14 |
+
httpx/__pycache__/_config.cpython-311.pyc,,
|
| 15 |
+
httpx/__pycache__/_content.cpython-311.pyc,,
|
| 16 |
+
httpx/__pycache__/_decoders.cpython-311.pyc,,
|
| 17 |
+
httpx/__pycache__/_exceptions.cpython-311.pyc,,
|
| 18 |
+
httpx/__pycache__/_main.cpython-311.pyc,,
|
| 19 |
+
httpx/__pycache__/_models.cpython-311.pyc,,
|
| 20 |
+
httpx/__pycache__/_multipart.cpython-311.pyc,,
|
| 21 |
+
httpx/__pycache__/_status_codes.cpython-311.pyc,,
|
| 22 |
+
httpx/__pycache__/_types.cpython-311.pyc,,
|
| 23 |
+
httpx/__pycache__/_urlparse.cpython-311.pyc,,
|
| 24 |
+
httpx/__pycache__/_urls.cpython-311.pyc,,
|
| 25 |
+
httpx/__pycache__/_utils.cpython-311.pyc,,
|
| 26 |
+
httpx/__version__.py,sha256=LoUyYeOXTieGzuP_64UL0wxdtxjuu_QbOvE7NOg-IqU,108
|
| 27 |
+
httpx/_api.py,sha256=r_Zgs4jIpcPJLqK5dbbSayqo_iVMKFaxZCd-oOHxLEs,11743
|
| 28 |
+
httpx/_auth.py,sha256=Yr3QwaUSK17rGYx-7j-FdicFIzz4Y9FFV-1F4-7RXX4,11891
|
| 29 |
+
httpx/_client.py,sha256=xD-UG67-WMkeltAAOeGGj-cZ2RRTAm19sWRxlFY7_40,65714
|
| 30 |
+
httpx/_config.py,sha256=pPp2U-wicfcKsF-KYRE1LYdt3e6ERGeIoXZ8Gjo3LWc,8547
|
| 31 |
+
httpx/_content.py,sha256=LGGzrJTR3OvN4Mb1GVVNLXkXJH-6oKlwAttO9p5w_yg,8161
|
| 32 |
+
httpx/_decoders.py,sha256=p0dX8I0NEHexs3UGp4SsZutiMhsXrrWl6-GnqVb0iKM,12041
|
| 33 |
+
httpx/_exceptions.py,sha256=bxW7fxzgVMAdNTbwT0Vnq04gJDW1_gI_GFiQPuMyjL0,8527
|
| 34 |
+
httpx/_main.py,sha256=Cg9GMabiTT_swaDfUgIRitSwxLRMSwUDOm7LdSGqlA4,15626
|
| 35 |
+
httpx/_models.py,sha256=4__Guyv1gLxuZChwim8kfQNiIOcJ9acreFOSurvZfms,44700
|
| 36 |
+
httpx/_multipart.py,sha256=KOHEZZl6oohg9mPaKyyu345qq1rJLg35TUG3YAzXB3Y,9843
|
| 37 |
+
httpx/_status_codes.py,sha256=DYn-2ufBgMeXy5s8x3_TB7wjAuAAMewTakPrm5rXEsc,5639
|
| 38 |
+
httpx/_transports/__init__.py,sha256=GbUoBSAOp7z-l-9j5YhMhR3DMIcn6FVLhj072O3Nnno,275
|
| 39 |
+
httpx/_transports/__pycache__/__init__.cpython-311.pyc,,
|
| 40 |
+
httpx/_transports/__pycache__/asgi.cpython-311.pyc,,
|
| 41 |
+
httpx/_transports/__pycache__/base.cpython-311.pyc,,
|
| 42 |
+
httpx/_transports/__pycache__/default.cpython-311.pyc,,
|
| 43 |
+
httpx/_transports/__pycache__/mock.cpython-311.pyc,,
|
| 44 |
+
httpx/_transports/__pycache__/wsgi.cpython-311.pyc,,
|
| 45 |
+
httpx/_transports/asgi.py,sha256=HRfiDYMPt4wQH2gFgHZg4c-i3sblo6bL5GTqcET-xz8,5501
|
| 46 |
+
httpx/_transports/base.py,sha256=kZS_VMbViYfF570pogUCJ1bulz-ybfL51Pqs9yktebU,2523
|
| 47 |
+
httpx/_transports/default.py,sha256=AzeaRUyVwCccTyyNJexDf0n1dFfzzydpdIQgvw7PLnk,13983
|
| 48 |
+
httpx/_transports/mock.py,sha256=PTo0d567RITXxGrki6kN7_67wwAxfwiMDcuXJiZCjEo,1232
|
| 49 |
+
httpx/_transports/wsgi.py,sha256=NcPX3Xap_EwCFZWO_OaSyQNuInCYx1QMNbO8GAei6jY,4825
|
| 50 |
+
httpx/_types.py,sha256=Jyh41GQq7AOev8IOWKDAg7zCbvHAfufmW5g_PiTtErY,2965
|
| 51 |
+
httpx/_urlparse.py,sha256=ZAmH47ONfkxrrj-PPYhGeiHjb6AjKCS-ANWIN4OL_KY,18546
|
| 52 |
+
httpx/_urls.py,sha256=dX99VR1DSOHpgo9Aq7PzYO4FKdxqKjwyNp8grf8dHN0,21550
|
| 53 |
+
httpx/_utils.py,sha256=_TVeqAKvxJkKHdz7dFeb4s0LZqQXgeFkXSgfiHBK_1o,8285
|
| 54 |
+
httpx/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
.venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: hatchling 1.26.3
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
.venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/entry_points.txt
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[console_scripts]
|
| 2 |
+
httpx = httpx:main
|
.venv/lib/python3.11/site-packages/httpx-0.28.1.dist-info/licenses/LICENSE.md
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright © 2019, [Encode OSS Ltd](https://www.encode.io/).
|
| 2 |
+
All rights reserved.
|
| 3 |
+
|
| 4 |
+
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
| 5 |
+
|
| 6 |
+
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
| 7 |
+
|
| 8 |
+
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
| 9 |
+
|
| 10 |
+
* Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
|
| 11 |
+
|
| 12 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
.venv/lib/python3.11/site-packages/outlines-0.1.11.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
.venv/lib/python3.11/site-packages/outlines-0.1.11.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Apache License
|
| 2 |
+
Version 2.0, January 2004
|
| 3 |
+
http://www.apache.org/licenses/
|
| 4 |
+
|
| 5 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
| 6 |
+
|
| 7 |
+
1. Definitions.
|
| 8 |
+
|
| 9 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
| 10 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
| 11 |
+
|
| 12 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
| 13 |
+
the copyright owner that is granting the License.
|
| 14 |
+
|
| 15 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
| 16 |
+
other entities that control, are controlled by, or are under common
|
| 17 |
+
control with that entity. For the purposes of this definition,
|
| 18 |
+
"control" means (i) the power, direct or indirect, to cause the
|
| 19 |
+
direction or management of such entity, whether by contract or
|
| 20 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
| 21 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
| 22 |
+
|
| 23 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
| 24 |
+
exercising permissions granted by this License.
|
| 25 |
+
|
| 26 |
+
"Source" form shall mean the preferred form for making modifications,
|
| 27 |
+
including but not limited to software source code, documentation
|
| 28 |
+
source, and configuration files.
|
| 29 |
+
|
| 30 |
+
"Object" form shall mean any form resulting from mechanical
|
| 31 |
+
transformation or translation of a Source form, including but
|
| 32 |
+
not limited to compiled object code, generated documentation,
|
| 33 |
+
and conversions to other media types.
|
| 34 |
+
|
| 35 |
+
"Work" shall mean the work of authorship, whether in Source or
|
| 36 |
+
Object form, made available under the License, as indicated by a
|
| 37 |
+
copyright notice that is included in or attached to the work
|
| 38 |
+
(an example is provided in the Appendix below).
|
| 39 |
+
|
| 40 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
| 41 |
+
form, that is based on (or derived from) the Work and for which the
|
| 42 |
+
editorial revisions, annotations, elaborations, or other modifications
|
| 43 |
+
represent, as a whole, an original work of authorship. For the purposes
|
| 44 |
+
of this License, Derivative Works shall not include works that remain
|
| 45 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
| 46 |
+
the Work and Derivative Works thereof.
|
| 47 |
+
|
| 48 |
+
"Contribution" shall mean any work of authorship, including
|
| 49 |
+
the original version of the Work and any modifications or additions
|
| 50 |
+
to that Work or Derivative Works thereof, that is intentionally
|
| 51 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
| 52 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
| 53 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
| 54 |
+
means any form of electronic, verbal, or written communication sent
|
| 55 |
+
to the Licensor or its representatives, including but not limited to
|
| 56 |
+
communication on electronic mailing lists, source code control systems,
|
| 57 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
| 58 |
+
Licensor for the purpose of discussing and improving the Work, but
|
| 59 |
+
excluding communication that is conspicuously marked or otherwise
|
| 60 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
| 61 |
+
|
| 62 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
| 63 |
+
on behalf of whom a Contribution has been received by Licensor and
|
| 64 |
+
subsequently incorporated within the Work.
|
| 65 |
+
|
| 66 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
| 67 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 68 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 69 |
+
copyright license to reproduce, prepare Derivative Works of,
|
| 70 |
+
publicly display, publicly perform, sublicense, and distribute the
|
| 71 |
+
Work and such Derivative Works in Source or Object form.
|
| 72 |
+
|
| 73 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
| 74 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 75 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 76 |
+
(except as stated in this section) patent license to make, have made,
|
| 77 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
| 78 |
+
where such license applies only to those patent claims licensable
|
| 79 |
+
by such Contributor that are necessarily infringed by their
|
| 80 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
| 81 |
+
with the Work to which such Contribution(s) was submitted. If You
|
| 82 |
+
institute patent litigation against any entity (including a
|
| 83 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
| 84 |
+
or a Contribution incorporated within the Work constitutes direct
|
| 85 |
+
or contributory patent infringement, then any patent licenses
|
| 86 |
+
granted to You under this License for that Work shall terminate
|
| 87 |
+
as of the date such litigation is filed.
|
| 88 |
+
|
| 89 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
| 90 |
+
Work or Derivative Works thereof in any medium, with or without
|
| 91 |
+
modifications, and in Source or Object form, provided that You
|
| 92 |
+
meet the following conditions:
|
| 93 |
+
|
| 94 |
+
(a) You must give any other recipients of the Work or
|
| 95 |
+
Derivative Works a copy of this License; and
|
| 96 |
+
|
| 97 |
+
(b) You must cause any modified files to carry prominent notices
|
| 98 |
+
stating that You changed the files; and
|
| 99 |
+
|
| 100 |
+
(c) You must retain, in the Source form of any Derivative Works
|
| 101 |
+
that You distribute, all copyright, patent, trademark, and
|
| 102 |
+
attribution notices from the Source form of the Work,
|
| 103 |
+
excluding those notices that do not pertain to any part of
|
| 104 |
+
the Derivative Works; and
|
| 105 |
+
|
| 106 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
| 107 |
+
distribution, then any Derivative Works that You distribute must
|
| 108 |
+
include a readable copy of the attribution notices contained
|
| 109 |
+
within such NOTICE file, excluding those notices that do not
|
| 110 |
+
pertain to any part of the Derivative Works, in at least one
|
| 111 |
+
of the following places: within a NOTICE text file distributed
|
| 112 |
+
as part of the Derivative Works; within the Source form or
|
| 113 |
+
documentation, if provided along with the Derivative Works; or,
|
| 114 |
+
within a display generated by the Derivative Works, if and
|
| 115 |
+
wherever such third-party notices normally appear. The contents
|
| 116 |
+
of the NOTICE file are for informational purposes only and
|
| 117 |
+
do not modify the License. You may add Your own attribution
|
| 118 |
+
notices within Derivative Works that You distribute, alongside
|
| 119 |
+
or as an addendum to the NOTICE text from the Work, provided
|
| 120 |
+
that such additional attribution notices cannot be construed
|
| 121 |
+
as modifying the License.
|
| 122 |
+
|
| 123 |
+
You may add Your own copyright statement to Your modifications and
|
| 124 |
+
may provide additional or different license terms and conditions
|
| 125 |
+
for use, reproduction, or distribution of Your modifications, or
|
| 126 |
+
for any such Derivative Works as a whole, provided Your use,
|
| 127 |
+
reproduction, and distribution of the Work otherwise complies with
|
| 128 |
+
the conditions stated in this License.
|
| 129 |
+
|
| 130 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
| 131 |
+
any Contribution intentionally submitted for inclusion in the Work
|
| 132 |
+
by You to the Licensor shall be under the terms and conditions of
|
| 133 |
+
this License, without any additional terms or conditions.
|
| 134 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
| 135 |
+
the terms of any separate license agreement you may have executed
|
| 136 |
+
with Licensor regarding such Contributions.
|
| 137 |
+
|
| 138 |
+
6. Trademarks. This License does not grant permission to use the trade
|
| 139 |
+
names, trademarks, service marks, or product names of the Licensor,
|
| 140 |
+
except as required for reasonable and customary use in describing the
|
| 141 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
| 142 |
+
|
| 143 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
| 144 |
+
agreed to in writing, Licensor provides the Work (and each
|
| 145 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
| 146 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
| 147 |
+
implied, including, without limitation, any warranties or conditions
|
| 148 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
| 149 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
| 150 |
+
appropriateness of using or redistributing the Work and assume any
|
| 151 |
+
risks associated with Your exercise of permissions under this License.
|
| 152 |
+
|
| 153 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
| 154 |
+
whether in tort (including negligence), contract, or otherwise,
|
| 155 |
+
unless required by applicable law (such as deliberate and grossly
|
| 156 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
| 157 |
+
liable to You for damages, including any direct, indirect, special,
|
| 158 |
+
incidental, or consequential damages of any character arising as a
|
| 159 |
+
result of this License or out of the use or inability to use the
|
| 160 |
+
Work (including but not limited to damages for loss of goodwill,
|
| 161 |
+
work stoppage, computer failure or malfunction, or any and all
|
| 162 |
+
other commercial damages or losses), even if such Contributor
|
| 163 |
+
has been advised of the possibility of such damages.
|
| 164 |
+
|
| 165 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
| 166 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
| 167 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
| 168 |
+
or other liability obligations and/or rights consistent with this
|
| 169 |
+
License. However, in accepting such obligations, You may act only
|
| 170 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
| 171 |
+
of any other Contributor, and only if You agree to indemnify,
|
| 172 |
+
defend, and hold each Contributor harmless for any liability
|
| 173 |
+
incurred by, or claims asserted against, such Contributor by reason
|
| 174 |
+
of your accepting any such warranty or additional liability.
|
| 175 |
+
|
| 176 |
+
END OF TERMS AND CONDITIONS
|
| 177 |
+
|
| 178 |
+
APPENDIX: How to apply the Apache License to your work.
|
| 179 |
+
|
| 180 |
+
To apply the Apache License to your work, attach the following
|
| 181 |
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
| 182 |
+
replaced with your own identifying information. (Don't include
|
| 183 |
+
the brackets!) The text should be enclosed in the appropriate
|
| 184 |
+
comment syntax for the file format. We also recommend that a
|
| 185 |
+
file or class name and description of purpose be included on the
|
| 186 |
+
same "printed page" as the copyright notice for easier
|
| 187 |
+
identification within third-party archives.
|
| 188 |
+
|
| 189 |
+
Copyright 2023- The Outlines developers
|
| 190 |
+
|
| 191 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
| 192 |
+
you may not use this file except in compliance with the License.
|
| 193 |
+
You may obtain a copy of the License at
|
| 194 |
+
|
| 195 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
| 196 |
+
|
| 197 |
+
Unless required by applicable law or agreed to in writing, software
|
| 198 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
| 199 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 200 |
+
See the License for the specific language governing permissions and
|
| 201 |
+
limitations under the License.
|
.venv/lib/python3.11/site-packages/outlines-0.1.11.dist-info/METADATA
ADDED
|
@@ -0,0 +1,503 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: outlines
|
| 3 |
+
Version: 0.1.11
|
| 4 |
+
Summary: Probabilistic Generative Model Programming
|
| 5 |
+
Author: Outlines Developers
|
| 6 |
+
License: Apache-2.0
|
| 7 |
+
Project-URL: homepage, https://github.com/dottxt-ai/outlines
|
| 8 |
+
Project-URL: documentation, https://dottxt-ai.github.io/outlines/
|
| 9 |
+
Project-URL: repository, https://github.com/dottxt-ai/outlines
|
| 10 |
+
Keywords: machine learning,deep learning,language models,structured generation
|
| 11 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 12 |
+
Classifier: Intended Audience :: Developers
|
| 13 |
+
Classifier: Intended Audience :: Information Technology
|
| 14 |
+
Classifier: Intended Audience :: Science/Research
|
| 15 |
+
Classifier: Operating System :: OS Independent
|
| 16 |
+
Classifier: Programming Language :: Python :: 3
|
| 17 |
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
| 18 |
+
Requires-Python: >=3.9
|
| 19 |
+
Description-Content-Type: text/markdown
|
| 20 |
+
License-File: LICENSE
|
| 21 |
+
Requires-Dist: interegular
|
| 22 |
+
Requires-Dist: jinja2
|
| 23 |
+
Requires-Dist: lark
|
| 24 |
+
Requires-Dist: nest_asyncio
|
| 25 |
+
Requires-Dist: numpy
|
| 26 |
+
Requires-Dist: cloudpickle
|
| 27 |
+
Requires-Dist: diskcache
|
| 28 |
+
Requires-Dist: pydantic>=2.0
|
| 29 |
+
Requires-Dist: referencing
|
| 30 |
+
Requires-Dist: jsonschema
|
| 31 |
+
Requires-Dist: requests
|
| 32 |
+
Requires-Dist: tqdm
|
| 33 |
+
Requires-Dist: typing_extensions
|
| 34 |
+
Requires-Dist: pycountry
|
| 35 |
+
Requires-Dist: airportsdata
|
| 36 |
+
Requires-Dist: torch
|
| 37 |
+
Requires-Dist: outlines_core==0.1.26
|
| 38 |
+
Provides-Extra: vllm
|
| 39 |
+
Requires-Dist: vllm; extra == "vllm"
|
| 40 |
+
Requires-Dist: transformers; extra == "vllm"
|
| 41 |
+
Requires-Dist: numpy<2; extra == "vllm"
|
| 42 |
+
Provides-Extra: transformers
|
| 43 |
+
Requires-Dist: transformers; extra == "transformers"
|
| 44 |
+
Requires-Dist: accelerate; extra == "transformers"
|
| 45 |
+
Requires-Dist: datasets; extra == "transformers"
|
| 46 |
+
Requires-Dist: numpy<2; extra == "transformers"
|
| 47 |
+
Provides-Extra: mlxlm
|
| 48 |
+
Requires-Dist: mlx-lm; extra == "mlxlm"
|
| 49 |
+
Requires-Dist: datasets; extra == "mlxlm"
|
| 50 |
+
Provides-Extra: openai
|
| 51 |
+
Requires-Dist: openai; extra == "openai"
|
| 52 |
+
Provides-Extra: llamacpp
|
| 53 |
+
Requires-Dist: llama-cpp-python; extra == "llamacpp"
|
| 54 |
+
Requires-Dist: transformers; extra == "llamacpp"
|
| 55 |
+
Requires-Dist: datasets; extra == "llamacpp"
|
| 56 |
+
Requires-Dist: numpy<2; extra == "llamacpp"
|
| 57 |
+
Provides-Extra: exllamav2
|
| 58 |
+
Requires-Dist: exllamav2; extra == "exllamav2"
|
| 59 |
+
Provides-Extra: test
|
| 60 |
+
Requires-Dist: pre-commit; extra == "test"
|
| 61 |
+
Requires-Dist: pytest; extra == "test"
|
| 62 |
+
Requires-Dist: pytest-benchmark; extra == "test"
|
| 63 |
+
Requires-Dist: pytest-cov; extra == "test"
|
| 64 |
+
Requires-Dist: pytest-mock; extra == "test"
|
| 65 |
+
Requires-Dist: coverage[toml]>=5.1; extra == "test"
|
| 66 |
+
Requires-Dist: diff-cover; extra == "test"
|
| 67 |
+
Requires-Dist: accelerate; extra == "test"
|
| 68 |
+
Requires-Dist: beartype<0.16.0; extra == "test"
|
| 69 |
+
Requires-Dist: responses; extra == "test"
|
| 70 |
+
Requires-Dist: llama-cpp-python; extra == "test"
|
| 71 |
+
Requires-Dist: mlx-lm>=0.19.2; (platform_machine == "arm64" and sys_platform == "darwin") and extra == "test"
|
| 72 |
+
Requires-Dist: huggingface_hub; extra == "test"
|
| 73 |
+
Requires-Dist: openai>=1.0.0; extra == "test"
|
| 74 |
+
Requires-Dist: datasets; extra == "test"
|
| 75 |
+
Requires-Dist: vllm; sys_platform != "darwin" and extra == "test"
|
| 76 |
+
Requires-Dist: transformers; extra == "test"
|
| 77 |
+
Requires-Dist: pillow; extra == "test"
|
| 78 |
+
Requires-Dist: exllamav2; extra == "test"
|
| 79 |
+
Requires-Dist: jax; extra == "test"
|
| 80 |
+
Provides-Extra: serve
|
| 81 |
+
Requires-Dist: vllm>=0.3.0; extra == "serve"
|
| 82 |
+
Requires-Dist: uvicorn; extra == "serve"
|
| 83 |
+
Requires-Dist: fastapi; extra == "serve"
|
| 84 |
+
Requires-Dist: pydantic>=2.0; extra == "serve"
|
| 85 |
+
|
| 86 |
+
<div align="center" style="margin-bottom: 1em;">
|
| 87 |
+
|
| 88 |
+
<img src="./docs/assets/images/logo.png" alt="Outlines Logo" width=500></img>
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
🗒️ *Make LLMs speak the language of every application.* 🗒️
|
| 92 |
+
|
| 93 |
+
Made with ❤👷️ by the team at [.txt](https://dottxt.co).
|
| 94 |
+
|
| 95 |
+
[![Documentation][documentation-badge]][documentation]
|
| 96 |
+
[![Contributors][contributors-badge]][contributors]
|
| 97 |
+
[![Downloads][downloads-badge]][pypistats]
|
| 98 |
+
[![Discord][discord-badge]][discord]
|
| 99 |
+
|
| 100 |
+
[Youtube channel][youtube-dottxt] | [.txt blog][blog-dottxt] | [Twitter][dottxt-twitter]
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
</div>
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
``` bash
|
| 107 |
+
pip install outlines
|
| 108 |
+
```
|
| 109 |
+
|
| 110 |
+
First time here? Go to our [setup guide](https://dottxt-ai.github.io/outlines/latest/welcome/)
|
| 111 |
+
|
| 112 |
+
## Features
|
| 113 |
+
|
| 114 |
+
- [x] 🤖 [Multiple model integrations](https://dottxt-ai.github.io/outlines/latest/installation): OpenAI, transformers, llama.cpp, exllama2, mamba
|
| 115 |
+
- [x] 🖍️ Simple and powerful prompting primitives based on the [Jinja templating engine](https://jinja.palletsprojects.com/)
|
| 116 |
+
- [x] 🚄 [Multiple choices](#multiple-choices), [type constraints](#type-constraint) and dynamic stopping
|
| 117 |
+
- [x] ⚡ Fast [regex-structured generation](#efficient-regex-structured-generation)
|
| 118 |
+
- [x] 🔥 Fast [JSON generation](#efficient-json-generation-following-a-pydantic-model) following a JSON schema or a Pydantic model
|
| 119 |
+
- [x] 📝 [Grammar-structured generation](#using-context-free-grammars-to-guide-generation)
|
| 120 |
+
- [x] 🐍 Interleave completions with loops, conditionals, and custom Python functions
|
| 121 |
+
- [x] 💾 Caching of generations
|
| 122 |
+
- [x] 🗂️ Batch inference
|
| 123 |
+
- [x] 🎲 Sample with the greedy, multinomial and beam search algorithms (and more to come!)
|
| 124 |
+
- [x] 🚀 [Serve with vLLM](https://dottxt-ai.github.io/outlines/latest/reference/serve/vllm), with official Docker image, [`outlinesdev/outlines`](https://hub.docker.com/r/outlinesdev/outlines)!
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
Outlines has new releases and features coming every week. Make sure to ⭐ star and 👀 watch this repository, follow [@dottxtai][dottxt-twitter] to stay up to date!
|
| 128 |
+
|
| 129 |
+
## Why should I use structured generation?
|
| 130 |
+
|
| 131 |
+
* It doesn't add any overhead during inference (cost-free)
|
| 132 |
+
* It allows Open Source models to beat closed source models ([Mistral](https://x.com/dottxtai/status/1797692104023363765), [GPT-4](https://x.com/dottxtai/status/1798443290913853770))
|
| 133 |
+
* [It speeds up inference](http://blog.dottxt.co/coalescence.html)
|
| 134 |
+
* [It improves the performance of base models (GSM8K)](http://blog.dottxt.co/performance-gsm8k.html)
|
| 135 |
+
* [It improves the performance of finetuned models (CoNNL)](https://predibase.com/blog/lorax-outlines-better-json-extraction-with-structured-generation-and-lora)
|
| 136 |
+
* [It improves model efficiency (less examples needed)](https://huggingface.co/blog/evaluation-structured-outputs)
|
| 137 |
+
|
| 138 |
+
## .txt company
|
| 139 |
+
|
| 140 |
+
<div align="center">
|
| 141 |
+
<img src="./docs/assets/images/dottxt.png" alt="Outlines Logo" width=100></img>
|
| 142 |
+
</div>
|
| 143 |
+
|
| 144 |
+
We started a company to keep pushing the boundaries of structured generation. Learn more about [.txt](https://twitter.com/dottxtai), and [give our .json API a try](https://h1xbpbfsf0w.typeform.com/to/ZgBCvJHF) if you need a hosted solution ✨
|
| 145 |
+
|
| 146 |
+
## Structured generation
|
| 147 |
+
|
| 148 |
+
The first step towards reliability of systems that include large language models
|
| 149 |
+
is to ensure that there is a well-defined interface between their output and
|
| 150 |
+
user-defined code. **Outlines** provides ways to control the generation of
|
| 151 |
+
language models to make their output more predictable.
|
| 152 |
+
|
| 153 |
+
### Multiple choices
|
| 154 |
+
|
| 155 |
+
You can reduce the completion to a choice between multiple possibilities:
|
| 156 |
+
|
| 157 |
+
``` python
|
| 158 |
+
import outlines
|
| 159 |
+
|
| 160 |
+
model = outlines.models.transformers("microsoft/Phi-3-mini-4k-instruct")
|
| 161 |
+
|
| 162 |
+
prompt = """You are a sentiment-labelling assistant.
|
| 163 |
+
Is the following review positive or negative?
|
| 164 |
+
|
| 165 |
+
Review: This restaurant is just awesome!
|
| 166 |
+
"""
|
| 167 |
+
|
| 168 |
+
generator = outlines.generate.choice(model, ["Positive", "Negative"])
|
| 169 |
+
answer = generator(prompt)
|
| 170 |
+
```
|
| 171 |
+
|
| 172 |
+
You can also pass these choices through en enum:
|
| 173 |
+
|
| 174 |
+
````python
|
| 175 |
+
from enum import Enum
|
| 176 |
+
|
| 177 |
+
import outlines
|
| 178 |
+
|
| 179 |
+
class Sentiment(str, Enum):
|
| 180 |
+
positive = "Positive"
|
| 181 |
+
negative = "Negative"
|
| 182 |
+
|
| 183 |
+
model = outlines.models.transformers("microsoft/Phi-3-mini-4k-instruct")
|
| 184 |
+
|
| 185 |
+
prompt = """You are a sentiment-labelling assistant.
|
| 186 |
+
Is the following review positive or negative?
|
| 187 |
+
|
| 188 |
+
Review: This restaurant is just awesome!
|
| 189 |
+
"""
|
| 190 |
+
|
| 191 |
+
generator = outlines.generate.choice(model, Sentiment)
|
| 192 |
+
answer = generator(prompt)
|
| 193 |
+
````
|
| 194 |
+
|
| 195 |
+
### Type constraint
|
| 196 |
+
|
| 197 |
+
You can instruct the model to only return integers or floats:
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
``` python
|
| 201 |
+
import outlines
|
| 202 |
+
|
| 203 |
+
model = outlines.models.transformers("WizardLM/WizardMath-7B-V1.1")
|
| 204 |
+
|
| 205 |
+
prompt = "<s>result of 9 + 9 = 18</s><s>result of 1 + 2 = "
|
| 206 |
+
answer = outlines.generate.format(model, int)(prompt)
|
| 207 |
+
print(answer)
|
| 208 |
+
# 3
|
| 209 |
+
|
| 210 |
+
prompt = "sqrt(2)="
|
| 211 |
+
generator = outlines.generate.format(model, float)
|
| 212 |
+
answer = generator(prompt, max_tokens=10)
|
| 213 |
+
print(answer)
|
| 214 |
+
# 1.41421356
|
| 215 |
+
```
|
| 216 |
+
|
| 217 |
+
### Efficient regex-structured generation
|
| 218 |
+
|
| 219 |
+
Outlines also comes with fast regex-structured generation. In fact, the `choice` and
|
| 220 |
+
`format` functions above all use regex-structured generation under the
|
| 221 |
+
hood:
|
| 222 |
+
|
| 223 |
+
``` python
|
| 224 |
+
import outlines
|
| 225 |
+
|
| 226 |
+
model = outlines.models.transformers("microsoft/Phi-3-mini-4k-instruct")
|
| 227 |
+
|
| 228 |
+
prompt = "What is the IP address of the Google DNS servers? "
|
| 229 |
+
|
| 230 |
+
generator = outlines.generate.text(model)
|
| 231 |
+
unstructured = generator(prompt, max_tokens=30)
|
| 232 |
+
|
| 233 |
+
generator = outlines.generate.regex(
|
| 234 |
+
model,
|
| 235 |
+
r"((25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(25[0-5]|2[0-4]\d|[01]?\d\d?)",
|
| 236 |
+
)
|
| 237 |
+
structured = generator(prompt, max_tokens=30)
|
| 238 |
+
|
| 239 |
+
print(unstructured)
|
| 240 |
+
# What is the IP address of the Google DNS servers?
|
| 241 |
+
#
|
| 242 |
+
# Passive DNS servers are at DNS servers that are private.
|
| 243 |
+
# In other words, both IP servers are private. The database
|
| 244 |
+
# does not contain Chelsea Manning
|
| 245 |
+
|
| 246 |
+
print(structured)
|
| 247 |
+
# What is the IP address of the Google DNS servers?
|
| 248 |
+
# 2.2.6.1
|
| 249 |
+
```
|
| 250 |
+
|
| 251 |
+
Unlike other libraries, regex-structured generation in Outlines is almost as fast
|
| 252 |
+
as non-structured generation.
|
| 253 |
+
|
| 254 |
+
### Efficient JSON generation following a Pydantic model
|
| 255 |
+
|
| 256 |
+
Outlines allows to guide the generation process so the output is *guaranteed* to follow a [JSON schema](https://json-schema.org/) or [Pydantic model](https://docs.pydantic.dev/latest/):
|
| 257 |
+
|
| 258 |
+
```python
|
| 259 |
+
from enum import Enum
|
| 260 |
+
from pydantic import BaseModel, constr
|
| 261 |
+
|
| 262 |
+
import outlines
|
| 263 |
+
import torch
|
| 264 |
+
|
| 265 |
+
|
| 266 |
+
class Weapon(str, Enum):
|
| 267 |
+
sword = "sword"
|
| 268 |
+
axe = "axe"
|
| 269 |
+
mace = "mace"
|
| 270 |
+
spear = "spear"
|
| 271 |
+
bow = "bow"
|
| 272 |
+
crossbow = "crossbow"
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
class Armor(str, Enum):
|
| 276 |
+
leather = "leather"
|
| 277 |
+
chainmail = "chainmail"
|
| 278 |
+
plate = "plate"
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
class Character(BaseModel):
|
| 282 |
+
name: constr(max_length=10)
|
| 283 |
+
age: int
|
| 284 |
+
armor: Armor
|
| 285 |
+
weapon: Weapon
|
| 286 |
+
strength: int
|
| 287 |
+
|
| 288 |
+
|
| 289 |
+
model = outlines.models.transformers("microsoft/Phi-3-mini-4k-instruct")
|
| 290 |
+
|
| 291 |
+
# Construct structured sequence generator
|
| 292 |
+
generator = outlines.generate.json(model, Character)
|
| 293 |
+
|
| 294 |
+
# Draw a sample
|
| 295 |
+
seed = 789001
|
| 296 |
+
|
| 297 |
+
character = generator("Give me a character description", seed=seed)
|
| 298 |
+
|
| 299 |
+
print(repr(character))
|
| 300 |
+
# Character(name='Anderson', age=28, armor=<Armor.chainmail: 'chainmail'>, weapon=<Weapon.sword: 'sword'>, strength=8)
|
| 301 |
+
|
| 302 |
+
character = generator("Give me an interesting character description")
|
| 303 |
+
|
| 304 |
+
print(repr(character))
|
| 305 |
+
# Character(name='Vivian Thr', age=44, armor=<Armor.plate: 'plate'>, weapon=<Weapon.crossbow: 'crossbow'>, strength=125)
|
| 306 |
+
```
|
| 307 |
+
|
| 308 |
+
The method works with union types, optional types, arrays, nested schemas, etc. Some field constraints are [not supported yet](https://github.com/dottxt-ai/outlines/issues/215), but everything else should work.
|
| 309 |
+
|
| 310 |
+
### Efficient JSON generation following a JSON Schema
|
| 311 |
+
|
| 312 |
+
Sometimes you just want to be able to pass a JSON Schema instead of a Pydantic model. We've got you covered:
|
| 313 |
+
|
| 314 |
+
``` python
|
| 315 |
+
import outlines
|
| 316 |
+
|
| 317 |
+
schema = '''{
|
| 318 |
+
"title": "Character",
|
| 319 |
+
"type": "object",
|
| 320 |
+
"properties": {
|
| 321 |
+
"name": {
|
| 322 |
+
"title": "Name",
|
| 323 |
+
"maxLength": 10,
|
| 324 |
+
"type": "string"
|
| 325 |
+
},
|
| 326 |
+
"age": {
|
| 327 |
+
"title": "Age",
|
| 328 |
+
"type": "integer"
|
| 329 |
+
},
|
| 330 |
+
"armor": {"$ref": "#/definitions/Armor"},
|
| 331 |
+
"weapon": {"$ref": "#/definitions/Weapon"},
|
| 332 |
+
"strength": {
|
| 333 |
+
"title": "Strength",
|
| 334 |
+
"type": "integer"
|
| 335 |
+
}
|
| 336 |
+
},
|
| 337 |
+
"required": ["name", "age", "armor", "weapon", "strength"],
|
| 338 |
+
"definitions": {
|
| 339 |
+
"Armor": {
|
| 340 |
+
"title": "Armor",
|
| 341 |
+
"description": "An enumeration.",
|
| 342 |
+
"enum": ["leather", "chainmail", "plate"],
|
| 343 |
+
"type": "string"
|
| 344 |
+
},
|
| 345 |
+
"Weapon": {
|
| 346 |
+
"title": "Weapon",
|
| 347 |
+
"description": "An enumeration.",
|
| 348 |
+
"enum": ["sword", "axe", "mace", "spear", "bow", "crossbow"],
|
| 349 |
+
"type": "string"
|
| 350 |
+
}
|
| 351 |
+
}
|
| 352 |
+
}'''
|
| 353 |
+
|
| 354 |
+
model = outlines.models.transformers("microsoft/Phi-3-mini-4k-instruct")
|
| 355 |
+
generator = outlines.generate.json(model, schema)
|
| 356 |
+
character = generator("Give me a character description")
|
| 357 |
+
```
|
| 358 |
+
|
| 359 |
+
### Using context-free grammars to guide generation
|
| 360 |
+
|
| 361 |
+
Formal grammars rule the world, and Outlines makes them rule LLMs too. You can pass any context-free grammar in the EBNF format and Outlines will generate an output that is valid to this grammar:
|
| 362 |
+
|
| 363 |
+
``` python
|
| 364 |
+
import outlines
|
| 365 |
+
|
| 366 |
+
arithmetic_grammar = """
|
| 367 |
+
?start: expression
|
| 368 |
+
|
| 369 |
+
?expression: term (("+" | "-") term)*
|
| 370 |
+
|
| 371 |
+
?term: factor (("*" | "/") factor)*
|
| 372 |
+
|
| 373 |
+
?factor: NUMBER
|
| 374 |
+
| "-" factor
|
| 375 |
+
| "(" expression ")"
|
| 376 |
+
|
| 377 |
+
%import common.NUMBER
|
| 378 |
+
"""
|
| 379 |
+
|
| 380 |
+
model = outlines.models.transformers("WizardLM/WizardMath-7B-V1.1")
|
| 381 |
+
generator = outlines.generate.cfg(model, arithmetic_grammar)
|
| 382 |
+
sequence = generator("Alice had 4 apples and Bob ate 2. Write an expression for Alice's apples:")
|
| 383 |
+
|
| 384 |
+
print(sequence)
|
| 385 |
+
# (8-2)
|
| 386 |
+
```
|
| 387 |
+
|
| 388 |
+
This was a very simple grammar, and you can use `outlines.generate.cfg` to generate syntactically valid Python, SQL, and much more than this. Any kind of structured text, really. All you have to do is search for "X EBNF grammar" on the web, and take a look at the [Outlines `grammars` module](https://github.com/dottxt-ai/outlines/tree/main/outlines/grammars).
|
| 389 |
+
|
| 390 |
+
### Open functions
|
| 391 |
+
|
| 392 |
+
Outlines can infer the structure of the output from the signature of a function. The result is a dictionary, and can be passed directly to the function using the usual dictionary expansion syntax `**`:
|
| 393 |
+
|
| 394 |
+
```python
|
| 395 |
+
import outlines
|
| 396 |
+
|
| 397 |
+
|
| 398 |
+
def add(a: int, b: int):
|
| 399 |
+
return a + b
|
| 400 |
+
|
| 401 |
+
model = outlines.models.transformers("WizardLM/WizardMath-7B-V1.1")
|
| 402 |
+
generator = outlines.generate.json(model, add)
|
| 403 |
+
result = generator("Return json with two integers named a and b respectively. a is odd and b even.")
|
| 404 |
+
|
| 405 |
+
print(add(**result))
|
| 406 |
+
# 3
|
| 407 |
+
```
|
| 408 |
+
|
| 409 |
+
A great advantage of passing functions directly to specify the structure is that the structure of the LLM will change with the function's definition. No need to change the code at several places!
|
| 410 |
+
|
| 411 |
+
You can also embed various functions into an enum to generate params:
|
| 412 |
+
|
| 413 |
+
```python
|
| 414 |
+
from enum import Enum
|
| 415 |
+
from functools import partial
|
| 416 |
+
|
| 417 |
+
import outlines
|
| 418 |
+
|
| 419 |
+
|
| 420 |
+
def add(a: int, b: int) -> int:
|
| 421 |
+
return a + b
|
| 422 |
+
|
| 423 |
+
def mul(c: float, d: float) -> float:
|
| 424 |
+
return c * d
|
| 425 |
+
|
| 426 |
+
class Operation(Enum):
|
| 427 |
+
add = partial(add)
|
| 428 |
+
mul = partial(mul)
|
| 429 |
+
|
| 430 |
+
model = outlines.models.transformers("WizardLM/WizardMath-7B-V1.1")
|
| 431 |
+
generator = outlines.generate.json(model, add)
|
| 432 |
+
result = generator("Return json with two float named c and d respectively. c is negative and d greater than 1.0.")
|
| 433 |
+
|
| 434 |
+
print(result)
|
| 435 |
+
# {'c': -3.14, 'd': 1.5}
|
| 436 |
+
```
|
| 437 |
+
|
| 438 |
+
## Prompting
|
| 439 |
+
|
| 440 |
+
Building prompts can get messy. **Outlines** makes it easier to write and manage
|
| 441 |
+
prompts by encapsulating templates inside "template functions".
|
| 442 |
+
|
| 443 |
+
These functions make it possible to neatly separate the prompt logic from the
|
| 444 |
+
general program logic; they can be imported from other modules and libraries.
|
| 445 |
+
|
| 446 |
+
Template functions require no superfluous abstraction, they use the Jinja2
|
| 447 |
+
templating engine to help build complex prompts in a concise manner:
|
| 448 |
+
|
| 449 |
+
``` python
|
| 450 |
+
import outlines
|
| 451 |
+
|
| 452 |
+
examples = [
|
| 453 |
+
("The food was disgusting", "Negative"),
|
| 454 |
+
("We had a fantastic night", "Positive"),
|
| 455 |
+
("Recommended", "Positive"),
|
| 456 |
+
("The waiter was rude", "Negative")
|
| 457 |
+
]
|
| 458 |
+
|
| 459 |
+
@outlines.prompt
|
| 460 |
+
def labelling(to_label, examples):
|
| 461 |
+
"""You are a sentiment-labelling assistant.
|
| 462 |
+
|
| 463 |
+
{% for example in examples %}
|
| 464 |
+
{{ example[0] }} // {{ example[1] }}
|
| 465 |
+
{% endfor %}
|
| 466 |
+
{{ to_label }} //
|
| 467 |
+
"""
|
| 468 |
+
|
| 469 |
+
model = outlines.models.transformers("microsoft/Phi-3-mini-4k-instruct")
|
| 470 |
+
prompt = labelling("Just awesome", examples)
|
| 471 |
+
answer = outlines.generate.text(model)(prompt, max_tokens=100)
|
| 472 |
+
```
|
| 473 |
+
|
| 474 |
+
## Join us
|
| 475 |
+
|
| 476 |
+
- 💡 **Have an idea?** Come chat with us on [Discord][discord]
|
| 477 |
+
- 🔨 **Want to contribute?** Consult our [contribution guide](https://dottxt-ai.github.io/outlines/latest/community/contribute/).
|
| 478 |
+
- 🐞 **Found a bug?** Open an [issue](https://github.com/dottxt-ai/outlines/issues)
|
| 479 |
+
|
| 480 |
+
|
| 481 |
+
## Cite Outlines
|
| 482 |
+
|
| 483 |
+
```
|
| 484 |
+
@article{willard2023efficient,
|
| 485 |
+
title={Efficient Guided Generation for LLMs},
|
| 486 |
+
author={Willard, Brandon T and Louf, R{\'e}mi},
|
| 487 |
+
journal={arXiv preprint arXiv:2307.09702},
|
| 488 |
+
year={2023}
|
| 489 |
+
}
|
| 490 |
+
```
|
| 491 |
+
|
| 492 |
+
[documentation]: https://dottxt-ai.github.io/outlines/latest/welcome/
|
| 493 |
+
[documentation-badge]: https://img.shields.io/readthedocs/outlines
|
| 494 |
+
[contributors]: https://github.com/dottxt-ai/outlines/graphs/contributors
|
| 495 |
+
[contributors-badge]: https://img.shields.io/github/contributors/dottxt-ai/outlines?style=flat-square&logo=github&logoColor=white&color=ECEFF4
|
| 496 |
+
[dottxt-twitter]: https://twitter.com/dottxtai
|
| 497 |
+
[discord]: https://discord.gg/R9DSu34mGd
|
| 498 |
+
[discord-badge]: https://img.shields.io/discord/1182316225284554793?color=81A1C1&logo=discord&logoColor=white&style=flat-square
|
| 499 |
+
[downloads-badge]: https://img.shields.io/pypi/dm/outlines?color=89AC6B&logo=python&logoColor=white&style=flat-square
|
| 500 |
+
[pypistats]: https://pypistats.org/packages/outlines
|
| 501 |
+
[dottxt-twitter-badge]: https://img.shields.io/twitter/follow/dottxtai?style=social
|
| 502 |
+
[youtube-dottxt]: https://www.youtube.com/@dottxt-ai
|
| 503 |
+
[blog-dottxt]: https://blog.dottxt.co/
|
.venv/lib/python3.11/site-packages/outlines-0.1.11.dist-info/RECORD
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
outlines-0.1.11.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
outlines-0.1.11.dist-info/LICENSE,sha256=9xB47oqqPVZwSIdW8Zk7neOuZMlUagIy67vdWVxTddc,11354
|
| 3 |
+
outlines-0.1.11.dist-info/METADATA,sha256=90I6ySed9yjWM_A0cZZ7kYaG6CSh1DiTnGq-Q1s_jeM,17137
|
| 4 |
+
outlines-0.1.11.dist-info/RECORD,,
|
| 5 |
+
outlines-0.1.11.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
| 6 |
+
outlines-0.1.11.dist-info/top_level.txt,sha256=DRbCwvEBUKClPATvDaHzpX6gD7LgECM9WVYkEq0NHpY,9
|
| 7 |
+
outlines/__init__.py,sha256=sYuMGn7xxyuPhwq-M3M2WKjwGqFwEXG0xyJw6lw31Ng,495
|
| 8 |
+
outlines/__pycache__/__init__.cpython-311.pyc,,
|
| 9 |
+
outlines/__pycache__/_version.cpython-311.pyc,,
|
| 10 |
+
outlines/__pycache__/base.cpython-311.pyc,,
|
| 11 |
+
outlines/__pycache__/caching.cpython-311.pyc,,
|
| 12 |
+
outlines/__pycache__/function.cpython-311.pyc,,
|
| 13 |
+
outlines/__pycache__/grammars.cpython-311.pyc,,
|
| 14 |
+
outlines/__pycache__/prompts.cpython-311.pyc,,
|
| 15 |
+
outlines/__pycache__/samplers.cpython-311.pyc,,
|
| 16 |
+
outlines/_version.py,sha256=HreDwlLXV189L3kiBj3huM_kqWD1usijlC8LN1YXcCM,413
|
| 17 |
+
outlines/base.py,sha256=InRqZU2VeNPjpkb3wfCDnYZ5xW1wxSYeCNXCHTLz_Vg,10501
|
| 18 |
+
outlines/caching.py,sha256=WxfFldbINw0MBtsHhHI51nugsgH7dDpYyPf07A6Yv2E,5337
|
| 19 |
+
outlines/fsm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 20 |
+
outlines/fsm/__pycache__/__init__.cpython-311.pyc,,
|
| 21 |
+
outlines/fsm/__pycache__/guide.cpython-311.pyc,,
|
| 22 |
+
outlines/fsm/__pycache__/json_schema.cpython-311.pyc,,
|
| 23 |
+
outlines/fsm/__pycache__/parsing.cpython-311.pyc,,
|
| 24 |
+
outlines/fsm/__pycache__/types.cpython-311.pyc,,
|
| 25 |
+
outlines/fsm/guide.py,sha256=0DZwVei2g-3kA9Cn5NECwDalWB2ufKTwxJVvdXOVGQ0,8953
|
| 26 |
+
outlines/fsm/json_schema.py,sha256=eB0fMz3UKI-pHOsuYdVQZmsm2Jr1QIw_6DzkC83mB6Y,2535
|
| 27 |
+
outlines/fsm/parsing.py,sha256=ypJ52to1umo2wItiUqhxXDGW4fQf731mq5cGLrQAOeI,39516
|
| 28 |
+
outlines/fsm/types.py,sha256=XEhFaGaM6rrFKsXNXnGmvk1_5Jfht8nkqCcKBk2piDQ,2493
|
| 29 |
+
outlines/function.py,sha256=kN22C9c5IBoQ3KR5GwCFR0gyPzG2Ke5k6ZAPb6pF55U,3707
|
| 30 |
+
outlines/generate/__init__.py,sha256=aQs6Ga6r0n_KMzAY-d1NQhnGkQSWGdQXNCdJzMcbeGo,202
|
| 31 |
+
outlines/generate/__pycache__/__init__.cpython-311.pyc,,
|
| 32 |
+
outlines/generate/__pycache__/api.cpython-311.pyc,,
|
| 33 |
+
outlines/generate/__pycache__/cfg.cpython-311.pyc,,
|
| 34 |
+
outlines/generate/__pycache__/choice.cpython-311.pyc,,
|
| 35 |
+
outlines/generate/__pycache__/format.cpython-311.pyc,,
|
| 36 |
+
outlines/generate/__pycache__/fsm.cpython-311.pyc,,
|
| 37 |
+
outlines/generate/__pycache__/generator.cpython-311.pyc,,
|
| 38 |
+
outlines/generate/__pycache__/json.cpython-311.pyc,,
|
| 39 |
+
outlines/generate/__pycache__/regex.cpython-311.pyc,,
|
| 40 |
+
outlines/generate/__pycache__/text.cpython-311.pyc,,
|
| 41 |
+
outlines/generate/api.py,sha256=54ww0C759h2A6COktBcJeLPDXPH1Nn4l0Iv2i-gLH84,20666
|
| 42 |
+
outlines/generate/cfg.py,sha256=giAHsT-TAi4OnO_d3U15JJX1X194SKQrBqYgdxnFEw4,1686
|
| 43 |
+
outlines/generate/choice.py,sha256=MNJZ0Ig-ZvW_Ci1IazrMqJNkuqnYU7H0R7cvic9YbPc,1752
|
| 44 |
+
outlines/generate/format.py,sha256=d0tEbpdImunihJorf4cYc3KK3aeFrjuWI6G3KoO8Dqg,1435
|
| 45 |
+
outlines/generate/fsm.py,sha256=N7M6BUmEoN02gcVijV3kPUa3Bk9S_sGfFGt1I-lvCeY,1111
|
| 46 |
+
outlines/generate/generator.py,sha256=-EnFq8pb7fbfLPmqRFvMeXN-kA1l_mhwrGvDoRxKWx0,8811
|
| 47 |
+
outlines/generate/json.py,sha256=cFHVogIC_ltTjoPURCP2WaQjuqslRuzcR7GLy3dlgjA,4309
|
| 48 |
+
outlines/generate/regex.py,sha256=3PhYSiR2tpDLj3ty_fvjv7vMcU28Y9dgYiGsfRFOe8Q,1715
|
| 49 |
+
outlines/generate/text.py,sha256=8-DcHDtV4imaqKfG_f4hhYQ_wbPwhhCdjuPmHG_HVo4,1409
|
| 50 |
+
outlines/grammars.py,sha256=OXxQyKvthoQCfrwQuCHSSi4VYcb3GMAOYudC2DmvquU,396
|
| 51 |
+
outlines/grammars/arithmetic.lark,sha256=4aWsZ_IkS9nP7NGihdgPf0wWaP2tn0xb_jhFNF5ws50,293
|
| 52 |
+
outlines/grammars/common.lark,sha256=h6mPVV0vitrbCSVDUnL_GvQriCfwrN8EtWLFiss3K9Q,2243
|
| 53 |
+
outlines/grammars/json.lark,sha256=6d6owpAzgVkAOUSsINg6MLu81VV_HQknRsMsSXHYB-k,373
|
| 54 |
+
outlines/models/__init__.py,sha256=8vIXGlkrjOIeBYx21Uo0-3U6A4UyOBOMf9iK4Wswvcw,701
|
| 55 |
+
outlines/models/__pycache__/__init__.cpython-311.pyc,,
|
| 56 |
+
outlines/models/__pycache__/exllamav2.cpython-311.pyc,,
|
| 57 |
+
outlines/models/__pycache__/llamacpp.cpython-311.pyc,,
|
| 58 |
+
outlines/models/__pycache__/mlxlm.cpython-311.pyc,,
|
| 59 |
+
outlines/models/__pycache__/openai.cpython-311.pyc,,
|
| 60 |
+
outlines/models/__pycache__/tokenizer.cpython-311.pyc,,
|
| 61 |
+
outlines/models/__pycache__/transformers.cpython-311.pyc,,
|
| 62 |
+
outlines/models/__pycache__/transformers_vision.cpython-311.pyc,,
|
| 63 |
+
outlines/models/__pycache__/vllm.cpython-311.pyc,,
|
| 64 |
+
outlines/models/exllamav2.py,sha256=Mo8gpuQI7KQe77T-BZHXHOV3Kkucgvkqo7-TjJcpzV0,13295
|
| 65 |
+
outlines/models/llamacpp.py,sha256=mI_xD-DqfcADl9asF554qOKxpusekx65GEl1Ja-C-xY,14662
|
| 66 |
+
outlines/models/mlxlm.py,sha256=ieim5QadwNQXM6311RBXOoYh52EnRcJZSvPiEfLpxbU,8588
|
| 67 |
+
outlines/models/openai.py,sha256=Oa-HiCUf5tk8HL_UCMI9FJ4tz4F0gAnQgggE1EB28QU,9009
|
| 68 |
+
outlines/models/tokenizer.py,sha256=x6228TFhbcGe-XssA4SAAjaOBEZoAvFciQUpK22Y28U,996
|
| 69 |
+
outlines/models/transformers.py,sha256=xJblsZB8FoXfDxrhvJ7pW0Hj8HSLT9FndURPrZ7kO2M,15337
|
| 70 |
+
outlines/models/transformers_vision.py,sha256=t77kgdRa5DIRiPis126AOfTnKl3PswL3klouUlFR9Jk,5069
|
| 71 |
+
outlines/models/vllm.py,sha256=BRvkrYAC2gTMZ3vhcETXJYf_mlO1U49m3bMArGymyDU,7769
|
| 72 |
+
outlines/processors/__init__.py,sha256=fDMQ-pyBPaDB7Eb8pgwJ16eTUbPAm-w2Wf-Vn8BuCGY,158
|
| 73 |
+
outlines/processors/__pycache__/__init__.cpython-311.pyc,,
|
| 74 |
+
outlines/processors/__pycache__/base_logits_processor.cpython-311.pyc,,
|
| 75 |
+
outlines/processors/__pycache__/structured.cpython-311.pyc,,
|
| 76 |
+
outlines/processors/base_logits_processor.py,sha256=vFM2p65Mstk4YkO2ZC1xOON3YGj4KgWgjj_iFnROSQQ,5354
|
| 77 |
+
outlines/processors/structured.py,sha256=XOZ3hq_B9BbD6nRuOjdZYQvXYRIYY1s6PJFYzdwtV-c,8240
|
| 78 |
+
outlines/prompts.py,sha256=By6LodDBBDeh9xhCXqkxQqnD1pGNStK7JNJDmMylBMg,10071
|
| 79 |
+
outlines/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 80 |
+
outlines/samplers.py,sha256=aQqVwEqgCoAVjr2qDkSk28hJXf4CQ8DT0LEJv73vQC4,10646
|
| 81 |
+
outlines/serve/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 82 |
+
outlines/serve/__pycache__/__init__.cpython-311.pyc,,
|
| 83 |
+
outlines/serve/__pycache__/serve.cpython-311.pyc,,
|
| 84 |
+
outlines/serve/serve.py,sha256=xZnXnos-mB7xurY_y2zQIRkUi9508QNxZERZTfbxosw,4940
|
| 85 |
+
outlines/types/__init__.py,sha256=0ZVfLELb_CZ6P9RTete561Uja8bgoGZ4S2shDy-iNhg,110
|
| 86 |
+
outlines/types/__pycache__/__init__.cpython-311.pyc,,
|
| 87 |
+
outlines/types/__pycache__/airports.cpython-311.pyc,,
|
| 88 |
+
outlines/types/__pycache__/countries.cpython-311.pyc,,
|
| 89 |
+
outlines/types/__pycache__/email.cpython-311.pyc,,
|
| 90 |
+
outlines/types/__pycache__/isbn.cpython-311.pyc,,
|
| 91 |
+
outlines/types/__pycache__/locales.cpython-311.pyc,,
|
| 92 |
+
outlines/types/__pycache__/phone_numbers.cpython-311.pyc,,
|
| 93 |
+
outlines/types/__pycache__/zip_codes.cpython-311.pyc,,
|
| 94 |
+
outlines/types/airports.py,sha256=L2rBblU02mkiXrQfm35XS-r4h0L8OySZ-rEpJJvw75s,241
|
| 95 |
+
outlines/types/countries.py,sha256=XWjvIEXkKNwHSdG4TILxfpSU3xHNJnTeMhvVLp1n_S4,748
|
| 96 |
+
outlines/types/email.py,sha256=aOc004pbeIY4p_Ssj5kWBYXfwAukHxVVY10lTj77byY,739
|
| 97 |
+
outlines/types/isbn.py,sha256=2HtRGX-eoOvGImOI0WL2LUAa7IuvJmGgr1Xb7JZOwi8,761
|
| 98 |
+
outlines/types/locales.py,sha256=rKj2OfDIgY4akyjMWOCWF7jB93kv3NzdQcihM4ojh-s,530
|
| 99 |
+
outlines/types/phone_numbers.py,sha256=l8MSwbzsQ2qjGzKN0vVH546IdaHTuT9OD9XzZE4zAp8,435
|
| 100 |
+
outlines/types/zip_codes.py,sha256=lGj2OBwX3LwLk7agw396WK17Aky4a5fZpLeZsNPkjAg,300
|
.venv/lib/python3.11/site-packages/outlines-0.1.11.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: setuptools (75.6.0)
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
| 5 |
+
|
.venv/lib/python3.11/site-packages/outlines-0.1.11.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
outlines
|
.venv/lib/python3.11/site-packages/partial_json_parser/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (522 Bytes). View file
|
|
|
.venv/lib/python3.11/site-packages/partial_json_parser/__pycache__/options.cpython-311.pyc
ADDED
|
Binary file (276 Bytes). View file
|
|
|
.venv/lib/python3.11/site-packages/partial_json_parser/__pycache__/playground.cpython-311.pyc
ADDED
|
Binary file (1.97 kB). View file
|
|
|